mirror of
https://github.com/mailcow/mailcow-dockerized.git
synced 2026-02-18 23:26:24 +00:00
Compare commits
247 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c0be3347f8 | ||
|
|
caaa4a414d | ||
|
|
4f08c4ed7d | ||
|
|
c3d841340c | ||
|
|
b8cd00111f | ||
|
|
81cda80651 | ||
|
|
c1d4f04c22 | ||
|
|
82276cd1ca | ||
|
|
56ea4302ed | ||
|
|
c06112b26e | ||
|
|
aa5a4f0998 | ||
|
|
bf4f471cfd | ||
|
|
978bff9dbc | ||
|
|
869d9af7dd | ||
|
|
af10499ecb | ||
|
|
a1a4d8ff98 | ||
|
|
95d61e8aa2 | ||
|
|
ec8dd1a54f | ||
|
|
382ee34d0e | ||
|
|
0999c9e9ab | ||
|
|
0e76396f01 | ||
|
|
9bbac9f171 | ||
|
|
c485968e7f | ||
|
|
e727620bd3 | ||
|
|
71fa3ecebc | ||
|
|
70101d1187 | ||
|
|
c060c205d3 | ||
|
|
038b2efb75 | ||
|
|
1fe4cd03e9 | ||
|
|
12e02e67ff | ||
|
|
b6f57dfb78 | ||
|
|
3ebf2c2d2d | ||
|
|
1bac6f1ee7 | ||
|
|
67e7acd6bd | ||
|
|
910ce573d6 | ||
|
|
689336b3e1 | ||
|
|
01cf72cdef | ||
|
|
4cdb97c699 | ||
|
|
1bd795a9c6 | ||
|
|
39f29e6c30 | ||
|
|
1ab6af21e3 | ||
|
|
5d95c48e0d | ||
|
|
dbb9e474b0 | ||
|
|
f8eed8c786 | ||
|
|
ef010aa39c | ||
|
|
79171ea6f5 | ||
|
|
4e3294b273 | ||
|
|
32a6ecddb6 | ||
|
|
f3d9833ecf | ||
|
|
930ca76ea7 | ||
|
|
9a2887cf46 | ||
|
|
9950914086 | ||
|
|
470cfb0026 | ||
|
|
6c106b4e4d | ||
|
|
3d6253a2b2 | ||
|
|
b873812588 | ||
|
|
514fefd2ed | ||
|
|
6f9ee2d151 | ||
|
|
9832006141 | ||
|
|
0413d26855 | ||
|
|
7b29c1f304 | ||
|
|
ae3ef391ee | ||
|
|
7313f996d3 | ||
|
|
62d16c9e56 | ||
|
|
674b41ce08 | ||
|
|
1b833be760 | ||
|
|
88adb1adf5 | ||
|
|
ec472f13cf | ||
|
|
2e1d98cc7c | ||
|
|
07d7e3dc30 | ||
|
|
b0f5aee628 | ||
|
|
d3065612fd | ||
|
|
9912e41f78 | ||
|
|
04200c99a4 | ||
|
|
45666d2c4e | ||
|
|
9a806e64ce | ||
|
|
95e0608749 | ||
|
|
22a09b9795 | ||
|
|
04d5c43550 | ||
|
|
fbcb8cbeb9 | ||
|
|
0338a36ecf | ||
|
|
23fb5e2fca | ||
|
|
3507ff2773 | ||
|
|
a4970397f1 | ||
|
|
4132f6bd48 | ||
|
|
e6f83853ae | ||
|
|
586b3a2ed1 | ||
|
|
6af2addf3c | ||
|
|
f6eed6c441 | ||
|
|
b85837c803 | ||
|
|
653fc40d4c | ||
|
|
c17d80a6fd | ||
|
|
980bfa3aa0 | ||
|
|
664a954393 | ||
|
|
d5a27c4ccb | ||
|
|
6a8a2e2136 | ||
|
|
b859a52b8e | ||
|
|
10e0c42eff | ||
|
|
f47df263d7 | ||
|
|
2642d9109e | ||
|
|
6708b94ebb | ||
|
|
79cf0abc6e | ||
|
|
7de70322d6 | ||
|
|
417835dea8 | ||
|
|
3dcacc4187 | ||
|
|
69f0552d4f | ||
|
|
c443a9400a | ||
|
|
5c9f387d94 | ||
|
|
e9414d17e4 | ||
|
|
6bfa58611e | ||
|
|
df4d3bb6e0 | ||
|
|
e31b6d9a07 | ||
|
|
455ef084b4 | ||
|
|
c2948735f2 | ||
|
|
24c62b2f09 | ||
|
|
7da088c931 | ||
|
|
1ef0149076 | ||
|
|
922d173540 | ||
|
|
fd088cb504 | ||
|
|
721ee2394e | ||
|
|
c217be06c6 | ||
|
|
871c422ec1 | ||
|
|
3cc28af607 | ||
|
|
796e131c3a | ||
|
|
dd160cd508 | ||
|
|
732b321962 | ||
|
|
c51a769aec | ||
|
|
45a61755a5 | ||
|
|
769c57c355 | ||
|
|
2e7eb7c0fd | ||
|
|
4c83147d01 | ||
|
|
ca0bec4fc2 | ||
|
|
6f50dd17da | ||
|
|
4a331929d0 | ||
|
|
748bc893b6 | ||
|
|
e462602ddc | ||
|
|
4e0f435d12 | ||
|
|
46f0581936 | ||
|
|
20f04ecf6b | ||
|
|
ff43799763 | ||
|
|
85ca197615 | ||
|
|
d06d23bbaf | ||
|
|
702ed85dfd | ||
|
|
8abe74a562 | ||
|
|
2f8a181281 | ||
|
|
5c5287ca21 | ||
|
|
83ba8d5840 | ||
|
|
ce219668cf | ||
|
|
5b1b49a418 | ||
|
|
8978a9ad79 | ||
|
|
5f4a4fd759 | ||
|
|
171c591da4 | ||
|
|
9133b9899c | ||
|
|
701c9fb1b4 | ||
|
|
eabd22188b | ||
|
|
7028619742 | ||
|
|
c915bf2ee2 | ||
|
|
011edd5ac9 | ||
|
|
7ba3de4ced | ||
|
|
8ead77083f | ||
|
|
b2774fb50b | ||
|
|
4440bd46ad | ||
|
|
28985973eb | ||
|
|
f2c4697ca3 | ||
|
|
383b5affb5 | ||
|
|
ed4dcff63b | ||
|
|
caca32bbba | ||
|
|
d31e74c778 | ||
|
|
6c00e29276 | ||
|
|
9940c503a2 | ||
|
|
4b2862cb3c | ||
|
|
a36485f0f1 | ||
|
|
78168ee80a | ||
|
|
610609378f | ||
|
|
260906e350 | ||
|
|
2891bbf82a | ||
|
|
eb26bcbc94 | ||
|
|
bb3c2fb4fe | ||
|
|
ef0f366d1c | ||
|
|
84e230de8f | ||
|
|
f67a12d157 | ||
|
|
34b48eedfc | ||
|
|
0d900d4fc8 | ||
|
|
642ac6d02c | ||
|
|
eb84847a5b | ||
|
|
4db1569c93 | ||
|
|
94c1a6c4e1 | ||
|
|
6dc90186f9 | ||
|
|
0cfcde673c | ||
|
|
ed5be5d7dc | ||
|
|
ac90ecaf4f | ||
|
|
fed3fc9514 | ||
|
|
35b9940db4 | ||
|
|
ece940b000 | ||
|
|
4b5fd0b50a | ||
|
|
5aa9498f65 | ||
|
|
690d511e54 | ||
|
|
e2a2b42139 | ||
|
|
4bbda8006d | ||
|
|
a281746958 | ||
|
|
cec51b6162 | ||
|
|
107c5d2e7d | ||
|
|
00c025f31a | ||
|
|
9b6388d0d0 | ||
|
|
2f25fcad77 | ||
|
|
7067e2c714 | ||
|
|
9f3cdfa713 | ||
|
|
529acf5ff6 | ||
|
|
0371edcf5e | ||
|
|
d20254d4ee | ||
|
|
befecfc31d | ||
|
|
004fcf092b | ||
|
|
a487fcd0bd | ||
|
|
17e38a05f0 | ||
|
|
c503abfe40 | ||
|
|
73929db796 | ||
|
|
fb0685fa71 | ||
|
|
df36670c7c | ||
|
|
3f9215678d | ||
|
|
0ac0e5c252 | ||
|
|
af61c82077 | ||
|
|
c066273c79 | ||
|
|
0c3e53e3a9 | ||
|
|
5ca10d1cde | ||
|
|
7907d43af7 | ||
|
|
d198f1d3f8 | ||
|
|
102226723e | ||
|
|
2efaccf038 | ||
|
|
aa7b6fa4a9 | ||
|
|
714727a129 | ||
|
|
4e5e264e3e | ||
|
|
267c81b42e | ||
|
|
f2f3fbe497 | ||
|
|
6ba650820f | ||
|
|
baa6286471 | ||
|
|
be8537d165 | ||
|
|
737fced7be | ||
|
|
5a532df8ce | ||
|
|
f8ce7a71e6 | ||
|
|
2e876bda9a | ||
|
|
d2e5926cce | ||
|
|
e3b576be67 | ||
|
|
0f7e359686 | ||
|
|
b9a0b2db6d | ||
|
|
93b876c473 | ||
|
|
92c2aa2023 | ||
|
|
9351cf24fe |
69
.github/ISSUE_TEMPLATE/Bug_report.yml
vendored
69
.github/ISSUE_TEMPLATE/Bug_report.yml
vendored
@@ -11,22 +11,35 @@ body:
|
||||
required: true
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: I've found a bug and checked that ...
|
||||
description: Prior to placing the issue, please check following:** *(fill out each checkbox with an `X` once done)*
|
||||
label: Checklist prior issue creation
|
||||
description: Prior to creating the issue...
|
||||
options:
|
||||
- label: ... I understand that not following the below instructions will result in immediate closure and/or deletion of my issue.
|
||||
- label: I understand that failure to follow below instructions may cause this issue to be closed.
|
||||
required: true
|
||||
- label: ... I have understood that this bug report is dedicated for bugs, and not for support-related inquiries.
|
||||
- label: I understand that vague, incomplete or inaccurate information may cause this issue to be closed.
|
||||
required: true
|
||||
- label: ... I have understood that answers are voluntary and community-driven, and not commercial support.
|
||||
- label: I understand that this form is intended solely for reporting software bugs and not for support-related inquiries.
|
||||
required: true
|
||||
- label: ... I have verified that my issue has not been already answered in the past. I also checked previous [issues](https://github.com/mailcow/mailcow-dockerized/issues).
|
||||
- label: I understand that all responses are voluntary and community-driven, and do not constitute commercial support.
|
||||
required: true
|
||||
- label: I confirm that I have reviewed previous [issues](https://github.com/mailcow/mailcow-dockerized/issues) to ensure this matter has not already been addressed.
|
||||
required: true
|
||||
- label: I confirm that my environment meets all [prerequisite requirements](https://docs.mailcow.email/getstarted/prerequisite-system/) as specified in the official documentation.
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Description
|
||||
description: Please provide a brief description of the bug in 1-2 sentences. If applicable, add screenshots to help explain your problem. Very useful for bugs in mailcow UI.
|
||||
render: plain text
|
||||
description: Please provide a brief description of the bug. If applicable, add screenshots to help explain your problem. (Very useful for bugs in mailcow UI.)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: "Steps to reproduce:"
|
||||
description: "Please describe the steps to reproduce the bug. Screenshots can be added, if helpful."
|
||||
placeholder: |-
|
||||
1. ...
|
||||
2. ...
|
||||
3. ...
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
@@ -36,45 +49,36 @@ body:
|
||||
render: plain text
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: "Steps to reproduce:"
|
||||
description: "Please describe the steps to reproduce the bug. Screenshots can be added, if helpful."
|
||||
render: plain text
|
||||
placeholder: |-
|
||||
1. ...
|
||||
2. ...
|
||||
3. ...
|
||||
validations:
|
||||
required: true
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
## System information
|
||||
### In this stage we would kindly ask you to attach general system information about your setup.
|
||||
In this stage we would kindly ask you to attach general system information about your setup.
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: "Which branch are you using?"
|
||||
description: "#### `git rev-parse --abbrev-ref HEAD`"
|
||||
description: "#### Run: `git rev-parse --abbrev-ref HEAD`"
|
||||
multiple: false
|
||||
options:
|
||||
- master
|
||||
- master (stable)
|
||||
- staging
|
||||
- nightly
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: "Which architecture are you using?"
|
||||
description: "#### `uname -m`"
|
||||
description: "#### Run: `uname -m`"
|
||||
multiple: false
|
||||
options:
|
||||
- x86
|
||||
- x86_64
|
||||
- ARM64 (aarch64)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: "Operating System:"
|
||||
description: "#### Run: `lsb_release -ds`"
|
||||
placeholder: "e.g. Ubuntu 22.04 LTS"
|
||||
validations:
|
||||
required: true
|
||||
@@ -93,43 +97,44 @@ body:
|
||||
- type: input
|
||||
attributes:
|
||||
label: "Virtualization technology:"
|
||||
placeholder: "KVM, VMware, Xen, etc - **LXC and OpenVZ are not supported**"
|
||||
description: "LXC and OpenVZ are not supported!"
|
||||
placeholder: "KVM, VMware ESXi, Xen, etc"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: "Docker version:"
|
||||
description: "#### `docker version`"
|
||||
description: "#### Run: `docker version`"
|
||||
placeholder: "20.10.21"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: "docker-compose version or docker compose version:"
|
||||
description: "#### `docker-compose version` or `docker compose version`"
|
||||
description: "#### Run: `docker-compose version` or `docker compose version`"
|
||||
placeholder: "v2.12.2"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: "mailcow version:"
|
||||
description: "#### ```git describe --tags `git rev-list --tags --max-count=1` ```"
|
||||
placeholder: "2022-08"
|
||||
description: "#### Run: ```git describe --tags `git rev-list --tags --max-count=1` ```"
|
||||
placeholder: "2022-08x"
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: "Reverse proxy:"
|
||||
placeholder: "e.g. Nginx/Traefik"
|
||||
placeholder: "e.g. nginx/Traefik, or none"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: "Logs of git diff:"
|
||||
description: "#### Output of `git diff origin/master`, any other changes to the code? If so, **please post them**:"
|
||||
description: "#### Output of `git diff origin/master`, any other changes to the code? Sanitize if needed. If so, **please post them**:"
|
||||
render: plain text
|
||||
validations:
|
||||
required: true
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: "Logs of iptables -L -vn:"
|
||||
|
||||
@@ -14,7 +14,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Mark/Close Stale Issues and Pull Requests 🗑️
|
||||
uses: actions/stale@v10.0.0
|
||||
uses: actions/stale@v10.1.1
|
||||
with:
|
||||
repo-token: ${{ secrets.STALE_ACTION_PAT }}
|
||||
days-before-stale: 60
|
||||
|
||||
4
.github/workflows/image_builds.yml
vendored
4
.github/workflows/image_builds.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
||||
images:
|
||||
- "acme-mailcow"
|
||||
- "clamd-mailcow"
|
||||
- "dockerapi-mailcow"
|
||||
- "controller-mailcow"
|
||||
- "dovecot-mailcow"
|
||||
- "netfilter-mailcow"
|
||||
- "olefy-mailcow"
|
||||
@@ -27,7 +27,7 @@ jobs:
|
||||
- "watchdog-mailcow"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- name: Setup Docker
|
||||
run: |
|
||||
curl -sSL https://get.docker.com/ | CHANNEL=stable sudo sh
|
||||
|
||||
4
.github/workflows/pr_to_nightly.yml
vendored
4
.github/workflows/pr_to_nightly.yml
vendored
@@ -8,11 +8,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Run the Action
|
||||
uses: devops-infra/action-pull-request@v0.6.1
|
||||
uses: devops-infra/action-pull-request@v1.0.2
|
||||
with:
|
||||
github_token: ${{ secrets.PRTONIGHTLY_ACTION_PAT }}
|
||||
title: Automatic PR to nightly from ${{ github.event.repository.updated_at}}
|
||||
|
||||
2
.github/workflows/rebuild_backup_image.yml
vendored
2
.github/workflows/rebuild_backup_image.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
@@ -15,14 +15,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Generate postscreen_access.cidr
|
||||
run: |
|
||||
bash helper-scripts/update_postscreen_whitelist.sh
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v8
|
||||
with:
|
||||
token: ${{ secrets.mailcow_action_Update_postscreen_access_cidr_pat }}
|
||||
commit-message: update postscreen_access.cidr
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
# Contribution Guidelines
|
||||
**_Last modified on 15th August 2024_**
|
||||
**_Last modified on 12th November 2025_**
|
||||
|
||||
First of all, thank you for wanting to provide a bugfix or a new feature for the mailcow community, it's because of your help that the project can continue to grow!
|
||||
|
||||
As we want to keep mailcow's development structured we setup these Guidelines which helps you to create your issue/pull request accordingly.
|
||||
|
||||
**PLEASE NOTE, THAT WE MIGHT CLOSE ISSUES/PULL REQUESTS IF THEY DON'T FULLFIL OUR WRITTEN GUIDELINES WRITTEN INSIDE THIS DOCUMENT**. So please check this guidelines before you propose a Issue/Pull Request.
|
||||
**PLEASE NOTE, THAT WE WILL CLOSE ISSUES/PULL REQUESTS IF THEY DON'T FULFILL OUR WRITTEN GUIDELINES WRITTEN INSIDE THIS DOCUMENT**. So please check this guidelines before you propose a Issue/Pull Request.
|
||||
|
||||
## Topics
|
||||
|
||||
@@ -27,14 +27,18 @@ However, please note the following regarding pull requests:
|
||||
6. Please **ALWAYS** create the actual pull request against the staging branch and **NEVER** directly against the master branch. *If you forget to do this, our moobot will remind you to switch the branch to staging.*
|
||||
7. Wait for a merge commit: It may happen that we do not accept your pull request immediately or sometimes not at all for various reasons. Please do not be disappointed if this is the case. We always endeavor to incorporate any meaningful changes from the community into the mailcow project.
|
||||
8. If you are planning larger and therefore more complex pull requests, it would be advisable to first announce this in a separate issue and then start implementing it after the idea has been accepted in order to avoid unnecessary frustration and effort!
|
||||
9. If your PR requires a Docker image rebuild (changes to Dockerfiles or files in data/Dockerfiles/), update the image tag in docker-compose.yml. Use the base-image versioning (e.g. ghcr.io/mailcow/sogo:5.12.4 → :5.12.5 for version bumps; append a letter for patch fixes, e.g. :5.12.4a). Follow this scheme.
|
||||
|
||||
---
|
||||
|
||||
## Issue Reporting
|
||||
**_Last modified on 15th August 2024_**
|
||||
**_Last modified on 12th November 2025_**
|
||||
|
||||
If you plan to report a issue within mailcow please read and understand the following rules:
|
||||
|
||||
### Security disclosures / Security-related fixes
|
||||
- Security vulnerabilities and security fixes must always be reported confidentially first to the contact address specified in SECURITY.md before they are integrated, published, or publicly disclosed in issues/PRs. Please wait for a response from the specified contact to ensure coordinated and responsible disclosure.
|
||||
|
||||
### Issue Reporting Guidelines
|
||||
|
||||
1. **ONLY** use the issue tracker for bug reports or improvement requests and NOT for support questions. For support questions you can either contact the [mailcow community on Telegram](https://docs.mailcow.email/#community-support-and-chat) or the mailcow team directly in exchange for a [support fee](https://docs.mailcow.email/#commercial-support).
|
||||
|
||||
@@ -17,7 +17,13 @@ caller="${BASH_SOURCE[1]##*/}"
|
||||
|
||||
get_installed_tools(){
|
||||
for bin in openssl curl docker git awk sha1sum grep cut jq; do
|
||||
if [[ -z $(command -v ${bin}) ]]; then echo "Cannot find ${bin}, exiting..."; exit 1; fi
|
||||
if [[ -z $(command -v ${bin}) ]]; then
|
||||
echo "Error: Cannot find command '${bin}'. Cannot proceed."
|
||||
echo "Solution: Please review system requirements and install requirements. Then, re-run the script."
|
||||
echo "See System Requirements: https://docs.mailcow.email/getstarted/install/"
|
||||
echo "Exiting..."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
if grep --help 2>&1 | head -n 1 | grep -q -i "busybox"; then echo -e "${LIGHT_RED}BusyBox grep detected, please install gnu grep, \"apk add --no-cache --upgrade grep\"${NC}"; exit 1; fi
|
||||
@@ -32,45 +38,45 @@ get_docker_version(){
|
||||
}
|
||||
|
||||
get_compose_type(){
|
||||
if docker compose > /dev/null 2>&1; then
|
||||
if docker compose version --short | grep -e "^2." -e "^v2." > /dev/null 2>&1; then
|
||||
COMPOSE_VERSION=native
|
||||
COMPOSE_COMMAND="docker compose"
|
||||
if [[ "$caller" == "update.sh" ]]; then
|
||||
sed -i 's/^DOCKER_COMPOSE_VERSION=.*/DOCKER_COMPOSE_VERSION=native/' "$SCRIPT_DIR/mailcow.conf"
|
||||
fi
|
||||
echo -e "\e[33mFound Docker Compose Plugin (native).\e[0m"
|
||||
echo -e "\e[33mSetting the DOCKER_COMPOSE_VERSION Variable to native\e[0m"
|
||||
sleep 2
|
||||
echo -e "\e[33mNotice: You'll have to update this Compose Version via your Package Manager manually!\e[0m"
|
||||
else
|
||||
echo -e "\e[31mCannot find Docker Compose with a Version Higher than 2.X.X.\e[0m"
|
||||
echo -e "\e[31mPlease update/install it manually regarding to this doc site: https://docs.mailcow.email/install/\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
elif docker-compose > /dev/null 2>&1; then
|
||||
if ! [[ $(alias docker-compose 2> /dev/null) ]] ; then
|
||||
if docker-compose version --short | grep "^2." > /dev/null 2>&1; then
|
||||
COMPOSE_VERSION=standalone
|
||||
COMPOSE_COMMAND="docker-compose"
|
||||
if [[ "$caller" == "update.sh" ]]; then
|
||||
sed -i 's/^DOCKER_COMPOSE_VERSION=.*/DOCKER_COMPOSE_VERSION=standalone/' "$SCRIPT_DIR/mailcow.conf"
|
||||
fi
|
||||
echo -e "\e[33mFound Docker Compose Standalone.\e[0m"
|
||||
echo -e "\e[33mSetting the DOCKER_COMPOSE_VERSION Variable to standalone\e[0m"
|
||||
sleep 2
|
||||
echo -e "\e[33mNotice: For an automatic update of docker-compose please use the update_compose.sh scripts located at the helper-scripts folder.\e[0m"
|
||||
else
|
||||
echo -e "\e[31mCannot find Docker Compose with a Version Higher than 2.X.X.\e[0m"
|
||||
echo -e "\e[31mPlease update/install manually regarding to this doc site: https://docs.mailcow.email/install/\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
if docker compose > /dev/null 2>&1; then
|
||||
if docker compose version --short | grep -e "^[2-9]\." -e "^v[2-9]\." -e "^[1-9][0-9]\." -e "^v[1-9][0-9]\." > /dev/null 2>&1; then
|
||||
COMPOSE_VERSION=native
|
||||
COMPOSE_COMMAND="docker compose"
|
||||
if [[ "$caller" == "update.sh" ]]; then
|
||||
sed -i 's/^DOCKER_COMPOSE_VERSION=.*/DOCKER_COMPOSE_VERSION=native/' "$SCRIPT_DIR/mailcow.conf"
|
||||
fi
|
||||
echo -e "\e[33mFound Docker Compose Plugin (native).\e[0m"
|
||||
echo -e "\e[33mSetting the DOCKER_COMPOSE_VERSION Variable to native\e[0m"
|
||||
sleep 2
|
||||
echo -e "\e[33mNotice: You'll have to update this Compose Version via your Package Manager manually!\e[0m"
|
||||
else
|
||||
echo -e "\e[31mCannot find Docker Compose.\e[0m"
|
||||
echo -e "\e[31mPlease install it regarding to this doc site: https://docs.mailcow.email/install/\e[0m"
|
||||
exit 1
|
||||
echo -e "\e[31mCannot find Docker Compose with a Version Higher than 2.X.X.\e[0m"
|
||||
echo -e "\e[31mPlease update/install it manually regarding to this doc site: https://docs.mailcow.email/install/\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
elif docker-compose > /dev/null 2>&1; then
|
||||
if ! [[ $(alias docker-compose 2> /dev/null) ]] ; then
|
||||
if docker-compose version --short | grep -e "^[2-9]\." -e "^[1-9][0-9]\." > /dev/null 2>&1; then
|
||||
COMPOSE_VERSION=standalone
|
||||
COMPOSE_COMMAND="docker-compose"
|
||||
if [[ "$caller" == "update.sh" ]]; then
|
||||
sed -i 's/^DOCKER_COMPOSE_VERSION=.*/DOCKER_COMPOSE_VERSION=standalone/' "$SCRIPT_DIR/mailcow.conf"
|
||||
fi
|
||||
echo -e "\e[33mFound Docker Compose Standalone.\e[0m"
|
||||
echo -e "\e[33mSetting the DOCKER_COMPOSE_VERSION Variable to standalone\e[0m"
|
||||
sleep 2
|
||||
echo -e "\e[33mNotice: For an automatic update of docker-compose please use the update_compose.sh scripts located at the helper-scripts folder.\e[0m"
|
||||
else
|
||||
echo -e "\e[31mCannot find Docker Compose with a Version Higher than 2.X.X.\e[0m"
|
||||
echo -e "\e[31mPlease update/install manually regarding to this doc site: https://docs.mailcow.email/install/\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo -e "\e[31mCannot find Docker Compose.\e[0m"
|
||||
echo -e "\e[31mPlease install it regarding to this doc site: https://docs.mailcow.email/install/\e[0m"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
detect_bad_asn() {
|
||||
@@ -221,4 +227,4 @@ detect_major_update() {
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,14 +5,65 @@
|
||||
|
||||
# 1) Check if the host supports IPv6
|
||||
get_ipv6_support() {
|
||||
if grep -qs '^1' /proc/sys/net/ipv6/conf/all/disable_ipv6 2>/dev/null \
|
||||
|| ! ip -6 route show default &>/dev/null; then
|
||||
# ---- helper: probe external IPv6 connectivity without DNS ----
|
||||
_probe_ipv6_connectivity() {
|
||||
# Use literal, always-on IPv6 echo responders (no DNS required)
|
||||
local PROBE_IPS=("2001:4860:4860::8888" "2606:4700:4700::1111")
|
||||
local ip rc=1
|
||||
|
||||
for ip in "${PROBE_IPS[@]}"; do
|
||||
if command -v ping6 &>/dev/null; then
|
||||
ping6 -c1 -W2 "$ip" &>/dev/null || ping6 -c1 -w2 "$ip" &>/dev/null
|
||||
rc=$?
|
||||
elif command -v ping &>/dev/null; then
|
||||
ping -6 -c1 -W2 "$ip" &>/dev/null || ping -6 -c1 -w2 "$ip" &>/dev/null
|
||||
rc=$?
|
||||
else
|
||||
rc=1
|
||||
fi
|
||||
[[ $rc -eq 0 ]] && return 0
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
if [[ ! -f /proc/net/if_inet6 ]] || grep -qs '^1' /proc/sys/net/ipv6/conf/all/disable_ipv6 2>/dev/null; then
|
||||
DETECTED_IPV6=false
|
||||
echo -e "${YELLOW}IPv6 not detected on host – ${LIGHT_RED}disabling IPv6 support${YELLOW}.${NC}"
|
||||
else
|
||||
DETECTED_IPV6=true
|
||||
echo -e "IPv6 detected on host – ${LIGHT_GREEN}leaving IPv6 support enabled${YELLOW}.${NC}"
|
||||
echo -e "${YELLOW}IPv6 not detected on host – ${LIGHT_RED}IPv6 is administratively disabled${YELLOW}.${NC}"
|
||||
return
|
||||
fi
|
||||
|
||||
if ip -6 route show default 2>/dev/null | grep -qE '^default'; then
|
||||
echo -e "${YELLOW}Default IPv6 route found – testing external IPv6 connectivity...${NC}"
|
||||
if _probe_ipv6_connectivity; then
|
||||
DETECTED_IPV6=true
|
||||
echo -e "IPv6 detected on host – ${LIGHT_GREEN}leaving IPv6 support enabled${YELLOW}.${NC}"
|
||||
else
|
||||
DETECTED_IPV6=false
|
||||
echo -e "${YELLOW}Default IPv6 route present but external IPv6 connectivity failed – ${LIGHT_RED}disabling IPv6 support${YELLOW}.${NC}"
|
||||
fi
|
||||
return
|
||||
fi
|
||||
|
||||
if ip -6 addr show scope global 2>/dev/null | grep -q 'inet6'; then
|
||||
DETECTED_IPV6=false
|
||||
echo -e "${YELLOW}Global IPv6 address present but no default route – ${LIGHT_RED}disabling IPv6 support${YELLOW}.${NC}"
|
||||
return
|
||||
fi
|
||||
|
||||
if ip -6 addr show scope link 2>/dev/null | grep -q 'inet6'; then
|
||||
echo -e "${YELLOW}Only link-local IPv6 addresses found – testing external IPv6 connectivity...${NC}"
|
||||
if _probe_ipv6_connectivity; then
|
||||
DETECTED_IPV6=true
|
||||
echo -e "External IPv6 connectivity available – ${LIGHT_GREEN}leaving IPv6 support enabled${YELLOW}.${NC}"
|
||||
else
|
||||
DETECTED_IPV6=false
|
||||
echo -e "${YELLOW}Only link-local IPv6 present and no external connectivity – ${LIGHT_RED}disabling IPv6 support${YELLOW}.${NC}"
|
||||
fi
|
||||
return
|
||||
fi
|
||||
|
||||
DETECTED_IPV6=false
|
||||
echo -e "${YELLOW}IPv6 not detected on host – ${LIGHT_RED}disabling IPv6 support${YELLOW}.${NC}"
|
||||
}
|
||||
|
||||
# 2) Ensure Docker daemon.json has (or create) the required IPv6 settings
|
||||
@@ -21,7 +72,7 @@ docker_daemon_edit(){
|
||||
DOCKER_MAJOR=$(docker version --format '{{.Server.Version}}' 2>/dev/null | cut -d. -f1)
|
||||
MISSING=()
|
||||
|
||||
_has_kv() { grep -Eq "\"$1\"\s*:\s*$2" "$DOCKER_DAEMON_CONFIG" 2>/dev/null; }
|
||||
_has_kv() { grep -Eq "\"$1\"[[:space:]]*:[[:space:]]*$2" "$DOCKER_DAEMON_CONFIG" 2>/dev/null; }
|
||||
|
||||
if [[ -f "$DOCKER_DAEMON_CONFIG" ]]; then
|
||||
|
||||
@@ -38,12 +89,18 @@ docker_daemon_edit(){
|
||||
fi
|
||||
|
||||
# Gather missing keys
|
||||
! _has_kv ipv6 true && MISSING+=("ipv6: true")
|
||||
! grep -Eq '"fixed-cidr-v6"\s*:\s*".+"' "$DOCKER_DAEMON_CONFIG" \
|
||||
&& MISSING+=('fixed-cidr-v6: "fd00:dead:beef:c0::/80"')
|
||||
if [[ -n "$DOCKER_MAJOR" && "$DOCKER_MAJOR" -le 27 ]]; then
|
||||
! _has_kv ipv6 true && MISSING+=("ipv6: true")
|
||||
|
||||
# For Docker < 28, keep requiring fixed-cidr-v6 (default bridge needs it on old engines)
|
||||
if [[ -n "$DOCKER_MAJOR" && "$DOCKER_MAJOR" -lt 28 ]]; then
|
||||
! grep -Eq '"fixed-cidr-v6"[[:space:]]*:[[:space:]]*".+"' "$DOCKER_DAEMON_CONFIG" \
|
||||
&& MISSING+=('fixed-cidr-v6: "fd00:dead:beef:c0::/80"')
|
||||
fi
|
||||
|
||||
# For Docker < 27, ip6tables needed and was tied to experimental in older releases
|
||||
if [[ -n "$DOCKER_MAJOR" && "$DOCKER_MAJOR" -lt 27 ]]; then
|
||||
_has_kv ipv6 true && ! _has_kv ip6tables true && MISSING+=("ip6tables: true")
|
||||
! _has_kv experimental true && MISSING+=("experimental: true")
|
||||
! _has_kv experimental true && MISSING+=("experimental: true")
|
||||
fi
|
||||
|
||||
# Fix if needed
|
||||
@@ -60,9 +117,19 @@ docker_daemon_edit(){
|
||||
cp "$DOCKER_DAEMON_CONFIG" "${DOCKER_DAEMON_CONFIG}.bak"
|
||||
if command -v jq &>/dev/null; then
|
||||
TMP=$(mktemp)
|
||||
JQ_FILTER='.ipv6 = true | .["fixed-cidr-v6"] = "fd00:dead:beef:c0::/80"'
|
||||
[[ "$DOCKER_MAJOR" && "$DOCKER_MAJOR" -lt 27 ]] \
|
||||
&& JQ_FILTER+=' | .ip6tables = true | .experimental = true'
|
||||
# Base filter: ensure ipv6 = true
|
||||
JQ_FILTER='.ipv6 = true'
|
||||
|
||||
# Add fixed-cidr-v6 only for Docker < 28
|
||||
if [[ -n "$DOCKER_MAJOR" && "$DOCKER_MAJOR" -lt 28 ]]; then
|
||||
JQ_FILTER+=' | .["fixed-cidr-v6"] = (.["fixed-cidr-v6"] // "fd00:dead:beef:c0::/80")'
|
||||
fi
|
||||
|
||||
# Add ip6tables/experimental only for Docker < 27
|
||||
if [[ -n "$DOCKER_MAJOR" && "$DOCKER_MAJOR" -lt 27 ]]; then
|
||||
JQ_FILTER+=' | .ip6tables = true | .experimental = true'
|
||||
fi
|
||||
|
||||
jq "$JQ_FILTER" "$DOCKER_DAEMON_CONFIG" >"$TMP" && mv "$TMP" "$DOCKER_DAEMON_CONFIG"
|
||||
echo -e "${LIGHT_GREEN}daemon.json updated. Restarting Docker...${NC}"
|
||||
(command -v systemctl &>/dev/null && systemctl restart docker) || service docker restart
|
||||
@@ -88,6 +155,7 @@ docker_daemon_edit(){
|
||||
fi
|
||||
|
||||
if [[ $ans =~ ^[Yy]$ ]]; then
|
||||
mkdir -p "$(dirname "$DOCKER_DAEMON_CONFIG")"
|
||||
if [[ -n "$DOCKER_MAJOR" && "$DOCKER_MAJOR" -lt 27 ]]; then
|
||||
cat > "$DOCKER_DAEMON_CONFIG" <<EOF
|
||||
{
|
||||
@@ -97,12 +165,19 @@ docker_daemon_edit(){
|
||||
"experimental": true
|
||||
}
|
||||
EOF
|
||||
else
|
||||
elif [[ -n "$DOCKER_MAJOR" && "$DOCKER_MAJOR" -lt 28 ]]; then
|
||||
cat > "$DOCKER_DAEMON_CONFIG" <<EOF
|
||||
{
|
||||
"ipv6": true,
|
||||
"fixed-cidr-v6": "fd00:dead:beef:c0::/80"
|
||||
}
|
||||
EOF
|
||||
else
|
||||
# Docker 28+: ipv6 works without fixed-cidr-v6
|
||||
cat > "$DOCKER_DAEMON_CONFIG" <<EOF
|
||||
{
|
||||
"ipv6": true
|
||||
}
|
||||
EOF
|
||||
fi
|
||||
echo -e "${GREEN}Created $DOCKER_DAEMON_CONFIG with IPv6 settings.${NC}"
|
||||
@@ -122,7 +197,7 @@ configure_ipv6() {
|
||||
# detect manual override if mailcow.conf is present
|
||||
if [[ -n "$MAILCOW_CONF" && -f "$MAILCOW_CONF" ]] && grep -q '^ENABLE_IPV6=' "$MAILCOW_CONF"; then
|
||||
MANUAL_SETTING=$(grep '^ENABLE_IPV6=' "$MAILCOW_CONF" | cut -d= -f2)
|
||||
elif [[ -z "$MAILCOW_CONF" ]] && [[ ! -z "${ENABLE_IPV6:-}" ]]; then
|
||||
elif [[ -z "$MAILCOW_CONF" ]] && [[ -n "${ENABLE_IPV6:-}" ]]; then
|
||||
MANUAL_SETTING="$ENABLE_IPV6"
|
||||
else
|
||||
MANUAL_SETTING=""
|
||||
@@ -131,38 +206,34 @@ configure_ipv6() {
|
||||
get_ipv6_support
|
||||
|
||||
# if user manually set it, check for mismatch
|
||||
if [[ -n "$MANUAL_SETTING" ]]; then
|
||||
if [[ "$MANUAL_SETTING" == "false" && "$DETECTED_IPV6" == "true" ]]; then
|
||||
echo -e "${RED}ERROR: You have ENABLE_IPV6=false but your host and Docker support IPv6.${NC}"
|
||||
echo -e "${RED}This can create an open relay. Please set ENABLE_IPV6=true in your mailcow.conf and re-run.${NC}"
|
||||
exit 1
|
||||
elif [[ "$MANUAL_SETTING" == "true" && "$DETECTED_IPV6" == "false" ]]; then
|
||||
echo -e "${RED}ERROR: You have ENABLE_IPV6=true but your host does not support IPv6.${NC}"
|
||||
echo -e "${RED}Please disable or fix your host/Docker IPv6 support, or set ENABLE_IPV6=false.${NC}"
|
||||
exit 1
|
||||
if [[ "$DETECTED_IPV6" != "true" ]]; then
|
||||
if [[ -n "$MAILCOW_CONF" && -f "$MAILCOW_CONF" ]]; then
|
||||
if grep -q '^ENABLE_IPV6=' "$MAILCOW_CONF"; then
|
||||
sed -i 's/^ENABLE_IPV6=.*/ENABLE_IPV6=false/' "$MAILCOW_CONF"
|
||||
else
|
||||
echo "ENABLE_IPV6=false" >> "$MAILCOW_CONF"
|
||||
fi
|
||||
else
|
||||
return
|
||||
export IPV6_BOOL=false
|
||||
fi
|
||||
fi
|
||||
|
||||
# no manual override: proceed to set or export
|
||||
if [[ "$DETECTED_IPV6" == "true" ]]; then
|
||||
docker_daemon_edit
|
||||
else
|
||||
echo "Skipping Docker IPv6 configuration because host does not support IPv6."
|
||||
echo "Make sure to check if your docker daemon.json does not include \"enable_ipv6\": true if you do not want IPv6."
|
||||
echo "IPv6 configuration complete: ENABLE_IPV6=false"
|
||||
sleep 2
|
||||
return
|
||||
fi
|
||||
|
||||
# now write into mailcow.conf or export
|
||||
docker_daemon_edit
|
||||
|
||||
if [[ -n "$MAILCOW_CONF" && -f "$MAILCOW_CONF" ]]; then
|
||||
LINE="ENABLE_IPV6=$DETECTED_IPV6"
|
||||
if grep -q '^ENABLE_IPV6=' "$MAILCOW_CONF"; then
|
||||
sed -i "s/^ENABLE_IPV6=.*/$LINE/" "$MAILCOW_CONF"
|
||||
sed -i 's/^ENABLE_IPV6=.*/ENABLE_IPV6=true/' "$MAILCOW_CONF"
|
||||
else
|
||||
echo "$LINE" >> "$MAILCOW_CONF"
|
||||
echo "ENABLE_IPV6=true" >> "$MAILCOW_CONF"
|
||||
fi
|
||||
else
|
||||
export IPV6_BOOL="$DETECTED_IPV6"
|
||||
export IPV6_BOOL=true
|
||||
fi
|
||||
|
||||
echo "IPv6 configuration complete: ENABLE_IPV6=$DETECTED_IPV6"
|
||||
echo "IPv6 configuration complete: ENABLE_IPV6=true"
|
||||
}
|
||||
@@ -43,6 +43,7 @@ adapt_new_options() {
|
||||
"ALLOW_ADMIN_EMAIL_LOGIN"
|
||||
"SKIP_HTTP_VERIFICATION"
|
||||
"SOGO_EXPIRE_SESSION"
|
||||
"SOGO_URL_ENCRYPTION_KEY"
|
||||
"REDIS_PORT"
|
||||
"REDISPASS"
|
||||
"DOVECOT_MASTER_USER"
|
||||
@@ -94,7 +95,6 @@ adapt_new_options() {
|
||||
echo '# Max log lines per service to keep in Redis logs' >> mailcow.conf
|
||||
echo "LOG_LINES=9999" >> mailcow.conf
|
||||
;;
|
||||
|
||||
IPV4_NETWORK)
|
||||
echo '# Internal IPv4 /24 subnet, format n.n.n. (expands to n.n.n.0/24)' >> mailcow.conf
|
||||
echo "IPV4_NETWORK=172.22.1" >> mailcow.conf
|
||||
@@ -276,21 +276,22 @@ adapt_new_options() {
|
||||
echo '# A COMPLETE DOCKER STACK REBUILD (compose down && compose up -d) IS NEEDED TO APPLY THIS.' >> mailcow.conf
|
||||
echo ENABLE_IPV6=${IPV6_BOOL} >> mailcow.conf
|
||||
;;
|
||||
|
||||
SKIP_CLAMD)
|
||||
echo '# Skip ClamAV (clamd-mailcow) anti-virus (Rspamd will auto-detect a missing ClamAV container) - y/n' >> mailcow.conf
|
||||
echo 'SKIP_CLAMD=n' >> mailcow.conf
|
||||
;;
|
||||
|
||||
SKIP_OLEFY)
|
||||
echo '# Skip Olefy (olefy-mailcow) anti-virus for Office documents (Rspamd will auto-detect a missing Olefy container) - y/n' >> mailcow.conf
|
||||
echo 'SKIP_OLEFY=n' >> mailcow.conf
|
||||
;;
|
||||
|
||||
REDISPASS)
|
||||
echo "REDISPASS=$(LC_ALL=C </dev/urandom tr -dc A-Za-z0-9 2>/dev/null | head -c 28)" >> mailcow.conf
|
||||
;;
|
||||
|
||||
SOGO_URL_ENCRYPTION_KEY)
|
||||
echo '# SOGo URL encryption key (exactly 16 characters, limited to A–Z, a–z, 0–9)' >> mailcow.conf
|
||||
echo '# This key is used to encrypt email addresses within SOGo URLs' >> mailcow.conf
|
||||
echo "SOGO_URL_ENCRYPTION_KEY=$(LC_ALL=C </dev/urandom tr -dc A-Za-z0-9 2>/dev/null | head -c 16)" >> mailcow.conf
|
||||
;;
|
||||
*)
|
||||
echo "${option}=" >> mailcow.conf
|
||||
;;
|
||||
|
||||
@@ -48,11 +48,11 @@ if [[ "${SKIP_LETS_ENCRYPT}" =~ ^([yY][eE][sS]|[yY])+$ ]]; then
|
||||
exec $(readlink -f "$0")
|
||||
fi
|
||||
|
||||
log_f "Waiting for Docker API..."
|
||||
until ping dockerapi -c1 > /dev/null; do
|
||||
log_f "Waiting for Controller .."
|
||||
until ping controller -c1 > /dev/null; do
|
||||
sleep 1
|
||||
done
|
||||
log_f "Docker API OK"
|
||||
log_f "Controller OK"
|
||||
|
||||
log_f "Waiting for Postfix..."
|
||||
until ping postfix -c1 > /dev/null; do
|
||||
@@ -246,6 +246,25 @@ while true; do
|
||||
done
|
||||
VALIDATED_CONFIG_DOMAINS+=("${VALIDATED_CONFIG_DOMAINS_SUBDOMAINS[*]}")
|
||||
done
|
||||
|
||||
# Fetch alias domains where target domain has MTA-STS enabled
|
||||
if [[ ${AUTODISCOVER_SAN} == "y" ]]; then
|
||||
SQL_ALIAS_DOMAINS=$(mariadb --skip-ssl --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} -e "SELECT ad.alias_domain FROM alias_domain ad INNER JOIN mta_sts m ON ad.target_domain = m.domain WHERE ad.active = 1 AND m.active = 1" -Bs)
|
||||
if [[ $? -eq 0 ]]; then
|
||||
while read alias_domain; do
|
||||
if [[ -z "${alias_domain}" ]]; then
|
||||
# ignore empty lines
|
||||
continue
|
||||
fi
|
||||
# Only add mta-sts subdomain for alias domains
|
||||
if [[ "mta-sts.${alias_domain}" != "${MAILCOW_HOSTNAME}" ]]; then
|
||||
if check_domain "mta-sts.${alias_domain}"; then
|
||||
VALIDATED_CONFIG_DOMAINS+=("mta-sts.${alias_domain}")
|
||||
fi
|
||||
fi
|
||||
done <<< "${SQL_ALIAS_DOMAINS}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if check_domain ${MAILCOW_HOSTNAME}; then
|
||||
|
||||
@@ -2,32 +2,32 @@
|
||||
|
||||
# Reading container IDs
|
||||
# Wrapping as array to ensure trimmed content when calling $NGINX etc.
|
||||
NGINX=($(curl --silent --insecure https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], project: .Config.Labels[\"com.docker.compose.project\"], id: .Id}" | jq -rc "select( .name | tostring | contains(\"nginx-mailcow\")) | select( .project | tostring | contains(\"${COMPOSE_PROJECT_NAME,,}\")) | .id" | tr "\n" " "))
|
||||
DOVECOT=($(curl --silent --insecure https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], project: .Config.Labels[\"com.docker.compose.project\"], id: .Id}" | jq -rc "select( .name | tostring | contains(\"dovecot-mailcow\")) | select( .project | tostring | contains(\"${COMPOSE_PROJECT_NAME,,}\")) | .id" | tr "\n" " "))
|
||||
POSTFIX=($(curl --silent --insecure https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], project: .Config.Labels[\"com.docker.compose.project\"], id: .Id}" | jq -rc "select( .name | tostring | contains(\"postfix-mailcow\")) | select( .project | tostring | contains(\"${COMPOSE_PROJECT_NAME,,}\")) | .id" | tr "\n" " "))
|
||||
NGINX=($(curl --silent --insecure https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], project: .Config.Labels[\"com.docker.compose.project\"], id: .Id}" | jq -rc "select( .name | tostring | contains(\"nginx-mailcow\")) | select( .project | tostring | contains(\"${COMPOSE_PROJECT_NAME,,}\")) | .id" | tr "\n" " "))
|
||||
DOVECOT=($(curl --silent --insecure https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], project: .Config.Labels[\"com.docker.compose.project\"], id: .Id}" | jq -rc "select( .name | tostring | contains(\"dovecot-mailcow\")) | select( .project | tostring | contains(\"${COMPOSE_PROJECT_NAME,,}\")) | .id" | tr "\n" " "))
|
||||
POSTFIX=($(curl --silent --insecure https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], project: .Config.Labels[\"com.docker.compose.project\"], id: .Id}" | jq -rc "select( .name | tostring | contains(\"postfix-mailcow\")) | select( .project | tostring | contains(\"${COMPOSE_PROJECT_NAME,,}\")) | .id" | tr "\n" " "))
|
||||
|
||||
reload_nginx(){
|
||||
echo "Reloading Nginx..."
|
||||
NGINX_RELOAD_RET=$(curl -X POST --insecure https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${NGINX}/exec -d '{"cmd":"reload", "task":"nginx"}' --silent -H 'Content-type: application/json' | jq -r .type)
|
||||
NGINX_RELOAD_RET=$(curl -X POST --insecure https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${NGINX}/exec -d '{"cmd":"reload", "task":"nginx"}' --silent -H 'Content-type: application/json' | jq -r .type)
|
||||
[[ ${NGINX_RELOAD_RET} != 'success' ]] && { echo "Could not reload Nginx, restarting container..."; restart_container ${NGINX} ; }
|
||||
}
|
||||
|
||||
reload_dovecot(){
|
||||
echo "Reloading Dovecot..."
|
||||
DOVECOT_RELOAD_RET=$(curl -X POST --insecure https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${DOVECOT}/exec -d '{"cmd":"reload", "task":"dovecot"}' --silent -H 'Content-type: application/json' | jq -r .type)
|
||||
DOVECOT_RELOAD_RET=$(curl -X POST --insecure https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${DOVECOT}/exec -d '{"cmd":"reload", "task":"dovecot"}' --silent -H 'Content-type: application/json' | jq -r .type)
|
||||
[[ ${DOVECOT_RELOAD_RET} != 'success' ]] && { echo "Could not reload Dovecot, restarting container..."; restart_container ${DOVECOT} ; }
|
||||
}
|
||||
|
||||
reload_postfix(){
|
||||
echo "Reloading Postfix..."
|
||||
POSTFIX_RELOAD_RET=$(curl -X POST --insecure https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${POSTFIX}/exec -d '{"cmd":"reload", "task":"postfix"}' --silent -H 'Content-type: application/json' | jq -r .type)
|
||||
POSTFIX_RELOAD_RET=$(curl -X POST --insecure https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${POSTFIX}/exec -d '{"cmd":"reload", "task":"postfix"}' --silent -H 'Content-type: application/json' | jq -r .type)
|
||||
[[ ${POSTFIX_RELOAD_RET} != 'success' ]] && { echo "Could not reload Postfix, restarting container..."; restart_container ${POSTFIX} ; }
|
||||
}
|
||||
|
||||
restart_container(){
|
||||
for container in $*; do
|
||||
echo "Restarting ${container}..."
|
||||
C_REST_OUT=$(curl -X POST --insecure https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${container}/restart --silent | jq -r '.msg')
|
||||
C_REST_OUT=$(curl -X POST --insecure https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${container}/restart --silent | jq -r '.msg')
|
||||
echo "${C_REST_OUT}"
|
||||
done
|
||||
}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
FROM debian:bookworm-slim
|
||||
FROM debian:trixie-slim
|
||||
|
||||
RUN apt update && apt install pigz -y --no-install-recommends
|
||||
RUN apt update && apt install pigz zstd -y --no-install-recommends
|
||||
@@ -6,22 +6,29 @@ ARG PIP_BREAK_SYSTEM_PACKAGES=1
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --update --no-cache python3 \
|
||||
bash \
|
||||
py3-pip \
|
||||
openssl \
|
||||
tzdata \
|
||||
py3-psutil \
|
||||
py3-redis \
|
||||
py3-async-timeout \
|
||||
supervisor \
|
||||
curl \
|
||||
&& pip3 install --upgrade pip \
|
||||
fastapi \
|
||||
uvicorn \
|
||||
aiodocker \
|
||||
docker
|
||||
RUN mkdir /app/modules
|
||||
|
||||
COPY mailcow-adm/ /app/mailcow-adm/
|
||||
RUN pip3 install -r /app/mailcow-adm/requirements.txt
|
||||
|
||||
COPY api/ /app/api/
|
||||
|
||||
COPY docker-entrypoint.sh /app/
|
||||
COPY main.py /app/main.py
|
||||
COPY modules/ /app/modules/
|
||||
COPY supervisord.conf /etc/supervisor/supervisord.conf
|
||||
COPY stop-supervisor.sh /usr/local/sbin/stop-supervisor.sh
|
||||
|
||||
ENTRYPOINT ["/bin/sh", "/app/docker-entrypoint.sh"]
|
||||
CMD ["python", "main.py"]
|
||||
CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/supervisord.conf"]
|
||||
@@ -254,8 +254,8 @@ if __name__ == '__main__':
|
||||
app,
|
||||
host="0.0.0.0",
|
||||
port=443,
|
||||
ssl_certfile="/app/dockerapi_cert.pem",
|
||||
ssl_keyfile="/app/dockerapi_key.pem",
|
||||
ssl_certfile="/app/controller_cert.pem",
|
||||
ssl_keyfile="/app/controller_key.pem",
|
||||
log_level="info",
|
||||
loop="none"
|
||||
)
|
||||
9
data/Dockerfiles/controller/docker-entrypoint.sh
Executable file
9
data/Dockerfiles/controller/docker-entrypoint.sh
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
`openssl req -x509 -newkey rsa:4096 -sha256 -days 3650 -nodes \
|
||||
-keyout /app/controller_key.pem \
|
||||
-out /app/controller_cert.pem \
|
||||
-subj /CN=controller/O=mailcow \
|
||||
-addext subjectAltName=DNS:controller`
|
||||
|
||||
exec "$@"
|
||||
61
data/Dockerfiles/controller/mailcow-adm/mailcow-adm.py
Executable file
61
data/Dockerfiles/controller/mailcow-adm/mailcow-adm.py
Executable file
@@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from models.AliasModel import AliasModel
|
||||
from models.MailboxModel import MailboxModel
|
||||
from models.SyncjobModel import SyncjobModel
|
||||
from models.CalendarModel import CalendarModel
|
||||
from models.MailerModel import MailerModel
|
||||
from models.AddressbookModel import AddressbookModel
|
||||
from models.MaildirModel import MaildirModel
|
||||
from models.DomainModel import DomainModel
|
||||
from models.DomainadminModel import DomainadminModel
|
||||
from models.StatusModel import StatusModel
|
||||
|
||||
from modules.Utils import Utils
|
||||
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
utils = Utils()
|
||||
|
||||
model_map = {
|
||||
MailboxModel.parser_command: MailboxModel,
|
||||
AliasModel.parser_command: AliasModel,
|
||||
SyncjobModel.parser_command: SyncjobModel,
|
||||
CalendarModel.parser_command: CalendarModel,
|
||||
AddressbookModel.parser_command: AddressbookModel,
|
||||
MailerModel.parser_command: MailerModel,
|
||||
MaildirModel.parser_command: MaildirModel,
|
||||
DomainModel.parser_command: DomainModel,
|
||||
DomainadminModel.parser_command: DomainadminModel,
|
||||
StatusModel.parser_command: StatusModel
|
||||
}
|
||||
|
||||
parser = argparse.ArgumentParser(description="mailcow Admin Tool")
|
||||
subparsers = parser.add_subparsers(dest="command", required=True)
|
||||
|
||||
for model in model_map.values():
|
||||
model.add_parser(subparsers)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
for cmd, model_cls in model_map.items():
|
||||
if args.command == cmd and model_cls.has_required_args(args):
|
||||
instance = model_cls(**vars(args))
|
||||
action = getattr(instance, args.object, None)
|
||||
if callable(action):
|
||||
res = action()
|
||||
utils.pprint(res)
|
||||
sys.exit(0)
|
||||
|
||||
parser.print_help()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,140 @@
|
||||
from modules.Sogo import Sogo
|
||||
from models.BaseModel import BaseModel
|
||||
|
||||
class AddressbookModel(BaseModel):
|
||||
parser_command = "addressbook"
|
||||
required_args = {
|
||||
"add": [["username", "name"]],
|
||||
"delete": [["username", "name"]],
|
||||
"get": [["username", "name"]],
|
||||
"set_acl": [["username", "name", "sharee_email", "acl"]],
|
||||
"get_acl": [["username", "name"]],
|
||||
"delete_acl": [["username", "name", "sharee_email"]],
|
||||
"add_contact": [["username", "name", "contact_name", "contact_email", "type"]],
|
||||
"delete_contact": [["username", "name", "contact_name"]],
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
username=None,
|
||||
name=None,
|
||||
sharee_email=None,
|
||||
acl=None,
|
||||
subscribe=None,
|
||||
ics=None,
|
||||
contact_name=None,
|
||||
contact_email=None,
|
||||
type=None,
|
||||
**kwargs
|
||||
):
|
||||
self.sogo = Sogo(username)
|
||||
|
||||
self.name = name
|
||||
self.acl = acl
|
||||
self.sharee_email = sharee_email
|
||||
self.subscribe = subscribe
|
||||
self.ics = ics
|
||||
self.contact_name = contact_name
|
||||
self.contact_email = contact_email
|
||||
self.type = type
|
||||
|
||||
def add(self):
|
||||
"""
|
||||
Add a new addressbook.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
return self.sogo.addAddressbook(self.name)
|
||||
|
||||
def set_acl(self):
|
||||
"""
|
||||
Set ACL for the addressbook.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
addressbook_id = self.sogo.getAddressbookIdByName(self.name)
|
||||
if not addressbook_id:
|
||||
print(f"Addressbook '{self.name}' not found for user '{self.username}'.")
|
||||
return None
|
||||
return self.sogo.setAddressbookACL(addressbook_id, self.sharee_email, self.acl, self.subscribe)
|
||||
|
||||
def delete_acl(self):
|
||||
"""
|
||||
Delete the addressbook ACL.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
addressbook_id = self.sogo.getAddressbookIdByName(self.name)
|
||||
if not addressbook_id:
|
||||
print(f"Addressbook '{self.name}' not found for user '{self.username}'.")
|
||||
return None
|
||||
return self.sogo.deleteAddressbookACL(addressbook_id, self.sharee_email)
|
||||
|
||||
def get_acl(self):
|
||||
"""
|
||||
Get the ACL for the addressbook.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
addressbook_id = self.sogo.getAddressbookIdByName(self.name)
|
||||
if not addressbook_id:
|
||||
print(f"Addressbook '{self.name}' not found for user '{self.username}'.")
|
||||
return None
|
||||
return self.sogo.getAddressbookACL(addressbook_id)
|
||||
|
||||
def add_contact(self):
|
||||
"""
|
||||
Add a new contact to the addressbook.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
addressbook_id = self.sogo.getAddressbookIdByName(self.name)
|
||||
if not addressbook_id:
|
||||
print(f"Addressbook '{self.name}' not found for user '{self.username}'.")
|
||||
return None
|
||||
if self.type == "card":
|
||||
return self.sogo.addAddressbookContact(addressbook_id, self.contact_name, self.contact_email)
|
||||
elif self.type == "list":
|
||||
return self.sogo.addAddressbookContactList(addressbook_id, self.contact_name, self.contact_email)
|
||||
|
||||
def delete_contact(self):
|
||||
"""
|
||||
Delete a contact or contactlist from the addressbook.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
addressbook_id = self.sogo.getAddressbookIdByName(self.name)
|
||||
if not addressbook_id:
|
||||
print(f"Addressbook '{self.name}' not found for user '{self.username}'.")
|
||||
return None
|
||||
return self.sogo.deleteAddressbookItem(addressbook_id, self.contact_name)
|
||||
|
||||
def get(self):
|
||||
"""
|
||||
Retrieve addressbooks list.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
return self.sogo.getAddressbookList()
|
||||
|
||||
def delete(self):
|
||||
"""
|
||||
Delete the addressbook.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
|
||||
addressbook_id = self.sogo.getAddressbookIdByName(self.name)
|
||||
if not addressbook_id:
|
||||
print(f"Addressbook '{self.name}' not found for user '{self.username}'.")
|
||||
return None
|
||||
return self.sogo.deleteAddressbook(addressbook_id)
|
||||
|
||||
@classmethod
|
||||
def add_parser(cls, subparsers):
|
||||
parser = subparsers.add_parser(
|
||||
cls.parser_command,
|
||||
help="Manage addressbooks (add, delete, get, set_acl, get_acl, delete_acl, add_contact, delete_contact)"
|
||||
)
|
||||
parser.add_argument("object", choices=list(cls.required_args.keys()), help="Action to perform: add, delete, get, set_acl, get_acl, delete_acl, add_contact, delete_contact")
|
||||
parser.add_argument("--username", required=True, help="Username of the addressbook owner (e.g. user@example.com)")
|
||||
parser.add_argument("--name", help="Addressbook name")
|
||||
parser.add_argument("--sharee-email", help="Email address to share the addressbook with")
|
||||
parser.add_argument("--acl", help="ACL rights for the sharee (e.g. r, w, rw)")
|
||||
parser.add_argument("--subscribe", action='store_true', help="Subscribe the sharee to the addressbook")
|
||||
parser.add_argument("--contact-name", help="Name of the contact or contactlist to add or delete")
|
||||
parser.add_argument("--contact-email", help="Email address of the contact to add")
|
||||
parser.add_argument("--type", choices=["card", "list"], help="Type of contact to add: card (single contact) or list (distribution list)")
|
||||
|
||||
107
data/Dockerfiles/controller/mailcow-adm/models/AliasModel.py
Normal file
107
data/Dockerfiles/controller/mailcow-adm/models/AliasModel.py
Normal file
@@ -0,0 +1,107 @@
|
||||
from modules.Mailcow import Mailcow
|
||||
from models.BaseModel import BaseModel
|
||||
|
||||
class AliasModel(BaseModel):
|
||||
parser_command = "alias"
|
||||
required_args = {
|
||||
"add": [["address", "goto"]],
|
||||
"delete": [["id"]],
|
||||
"get": [["id"]],
|
||||
"edit": [["id"]]
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
id=None,
|
||||
address=None,
|
||||
goto=None,
|
||||
active=None,
|
||||
sogo_visible=None,
|
||||
**kwargs
|
||||
):
|
||||
self.mailcow = Mailcow()
|
||||
|
||||
self.id = id
|
||||
self.address = address
|
||||
self.goto = goto
|
||||
self.active = active
|
||||
self.sogo_visible = sogo_visible
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data):
|
||||
return cls(
|
||||
address=data.get("address"),
|
||||
goto=data.get("goto"),
|
||||
active=data.get("active", None),
|
||||
sogo_visible=data.get("sogo_visible", None)
|
||||
)
|
||||
|
||||
def getAdd(self):
|
||||
"""
|
||||
Get the alias details as a dictionary for adding, sets default values.
|
||||
:return: Dictionary containing alias details.
|
||||
"""
|
||||
|
||||
alias = {
|
||||
"address": self.address,
|
||||
"goto": self.goto,
|
||||
"active": self.active if self.active is not None else 1,
|
||||
"sogo_visible": self.sogo_visible if self.sogo_visible is not None else 0
|
||||
}
|
||||
return {key: value for key, value in alias.items() if value is not None}
|
||||
|
||||
def getEdit(self):
|
||||
"""
|
||||
Get the alias details as a dictionary for editing, sets no default values.
|
||||
:return: Dictionary containing mailbox details.
|
||||
"""
|
||||
|
||||
alias = {
|
||||
"address": self.address,
|
||||
"goto": self.goto,
|
||||
"active": self.active,
|
||||
"sogo_visible": self.sogo_visible
|
||||
}
|
||||
return {key: value for key, value in alias.items() if value is not None}
|
||||
|
||||
def get(self):
|
||||
"""
|
||||
Get the mailbox details from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.getAlias(self.id)
|
||||
|
||||
def delete(self):
|
||||
"""
|
||||
Get the mailbox details from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.deleteAlias(self.id)
|
||||
|
||||
def add(self):
|
||||
"""
|
||||
Get the mailbox details from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.addAlias(self.getAdd())
|
||||
|
||||
def edit(self):
|
||||
"""
|
||||
Get the mailbox details from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.editAlias(self.id, self.getEdit())
|
||||
|
||||
@classmethod
|
||||
def add_parser(cls, subparsers):
|
||||
parser = subparsers.add_parser(
|
||||
cls.parser_command,
|
||||
help="Manage aliases (add, delete, get, edit)"
|
||||
)
|
||||
parser.add_argument("object", choices=list(cls.required_args.keys()), help="Action to perform: add, delete, get, edit")
|
||||
parser.add_argument("--id", help="Alias object ID (required for get, edit, delete)")
|
||||
parser.add_argument("--address", help="Alias email address (e.g. alias@example.com)")
|
||||
parser.add_argument("--goto", help="Destination address(es), comma-separated (e.g. user1@example.com,user2@example.com)")
|
||||
parser.add_argument("--active", choices=["1", "0"], help="Activate (1) or deactivate (0) the alias")
|
||||
parser.add_argument("--sogo-visible", choices=["1", "0"], help="Show alias in SOGo addressbook (1 = yes, 0 = no)")
|
||||
|
||||
35
data/Dockerfiles/controller/mailcow-adm/models/BaseModel.py
Normal file
35
data/Dockerfiles/controller/mailcow-adm/models/BaseModel.py
Normal file
@@ -0,0 +1,35 @@
|
||||
class BaseModel:
|
||||
parser_command = ""
|
||||
required_args = {}
|
||||
|
||||
@classmethod
|
||||
def has_required_args(cls, args):
|
||||
"""
|
||||
Validate that all required arguments are present.
|
||||
"""
|
||||
object_name = args.object if hasattr(args, "object") else args.get("object")
|
||||
required_lists = cls.required_args.get(object_name, False)
|
||||
|
||||
if not required_lists:
|
||||
return False
|
||||
|
||||
for required_set in required_lists:
|
||||
result = True
|
||||
for required_args in required_set:
|
||||
if isinstance(args, dict):
|
||||
if not args.get(required_args):
|
||||
result = False
|
||||
break
|
||||
elif not hasattr(args, required_args):
|
||||
result = False
|
||||
break
|
||||
if result:
|
||||
break
|
||||
|
||||
if not result:
|
||||
print(f"Required arguments for '{object_name}': {required_lists}")
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def add_parser(cls, subparsers):
|
||||
pass
|
||||
111
data/Dockerfiles/controller/mailcow-adm/models/CalendarModel.py
Normal file
111
data/Dockerfiles/controller/mailcow-adm/models/CalendarModel.py
Normal file
@@ -0,0 +1,111 @@
|
||||
from modules.Sogo import Sogo
|
||||
from models.BaseModel import BaseModel
|
||||
|
||||
class CalendarModel(BaseModel):
|
||||
parser_command = "calendar"
|
||||
required_args = {
|
||||
"add": [["username", "name"]],
|
||||
"delete": [["username", "name"]],
|
||||
"get": [["username"]],
|
||||
"import_ics": [["username", "name", "ics"]],
|
||||
"set_acl": [["username", "name", "sharee_email", "acl"]],
|
||||
"get_acl": [["username", "name"]],
|
||||
"delete_acl": [["username", "name", "sharee_email"]],
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
username=None,
|
||||
name=None,
|
||||
sharee_email=None,
|
||||
acl=None,
|
||||
subscribe=None,
|
||||
ics=None,
|
||||
**kwargs
|
||||
):
|
||||
self.sogo = Sogo(username)
|
||||
|
||||
self.name = name
|
||||
self.acl = acl
|
||||
self.sharee_email = sharee_email
|
||||
self.subscribe = subscribe
|
||||
self.ics = ics
|
||||
|
||||
def add(self):
|
||||
"""
|
||||
Add a new calendar.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
return self.sogo.addCalendar(self.name)
|
||||
|
||||
def delete(self):
|
||||
"""
|
||||
Delete a calendar.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
calendar_id = self.sogo.getCalendarIdByName(self.name)
|
||||
if not calendar_id:
|
||||
print(f"Calendar '{self.name}' not found for user '{self.username}'.")
|
||||
return None
|
||||
return self.sogo.deleteCalendar(calendar_id)
|
||||
|
||||
def get(self):
|
||||
"""
|
||||
Get the calendar details.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
return self.sogo.getCalendar()
|
||||
|
||||
def set_acl(self):
|
||||
"""
|
||||
Set ACL for the calendar.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
calendar_id = self.sogo.getCalendarIdByName(self.name)
|
||||
if not calendar_id:
|
||||
print(f"Calendar '{self.name}' not found for user '{self.username}'.")
|
||||
return None
|
||||
return self.sogo.setCalendarACL(calendar_id, self.sharee_email, self.acl, self.subscribe)
|
||||
|
||||
def delete_acl(self):
|
||||
"""
|
||||
Delete the calendar ACL.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
calendar_id = self.sogo.getCalendarIdByName(self.name)
|
||||
if not calendar_id:
|
||||
print(f"Calendar '{self.name}' not found for user '{self.username}'.")
|
||||
return None
|
||||
return self.sogo.deleteCalendarACL(calendar_id, self.sharee_email)
|
||||
|
||||
def get_acl(self):
|
||||
"""
|
||||
Get the ACL for the calendar.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
calendar_id = self.sogo.getCalendarIdByName(self.name)
|
||||
if not calendar_id:
|
||||
print(f"Calendar '{self.name}' not found for user '{self.username}'.")
|
||||
return None
|
||||
return self.sogo.getCalendarACL(calendar_id)
|
||||
|
||||
def import_ics(self):
|
||||
"""
|
||||
Import a calendar from an ICS file.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
return self.sogo.importCalendar(self.name, self.ics)
|
||||
|
||||
@classmethod
|
||||
def add_parser(cls, subparsers):
|
||||
parser = subparsers.add_parser(
|
||||
cls.parser_command,
|
||||
help="Manage calendars (add, delete, get, import_ics, set_acl, get_acl, delete_acl)"
|
||||
)
|
||||
parser.add_argument("object", choices=list(cls.required_args.keys()), help="Action to perform: add, delete, get, import_ics, set_acl, get_acl, delete_acl")
|
||||
parser.add_argument("--username", required=True, help="Username of the calendar owner (e.g. user@example.com)")
|
||||
parser.add_argument("--name", help="Calendar name")
|
||||
parser.add_argument("--ics", help="Path to ICS file for import")
|
||||
parser.add_argument("--sharee-email", help="Email address to share the calendar with")
|
||||
parser.add_argument("--acl", help="ACL rights for the sharee (e.g. r, w, rw)")
|
||||
parser.add_argument("--subscribe", action='store_true', help="Subscribe the sharee to the calendar")
|
||||
162
data/Dockerfiles/controller/mailcow-adm/models/DomainModel.py
Normal file
162
data/Dockerfiles/controller/mailcow-adm/models/DomainModel.py
Normal file
@@ -0,0 +1,162 @@
|
||||
from modules.Mailcow import Mailcow
|
||||
from models.BaseModel import BaseModel
|
||||
|
||||
class DomainModel(BaseModel):
|
||||
parser_command = "domain"
|
||||
required_args = {
|
||||
"add": [["domain"]],
|
||||
"delete": [["domain"]],
|
||||
"get": [["domain"]],
|
||||
"edit": [["domain"]]
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
domain=None,
|
||||
active=None,
|
||||
aliases=None,
|
||||
backupmx=None,
|
||||
defquota=None,
|
||||
description=None,
|
||||
mailboxes=None,
|
||||
maxquota=None,
|
||||
quota=None,
|
||||
relay_all_recipients=None,
|
||||
rl_frame=None,
|
||||
rl_value=None,
|
||||
restart_sogo=None,
|
||||
tags=None,
|
||||
**kwargs
|
||||
):
|
||||
self.mailcow = Mailcow()
|
||||
|
||||
self.domain = domain
|
||||
self.active = active
|
||||
self.aliases = aliases
|
||||
self.backupmx = backupmx
|
||||
self.defquota = defquota
|
||||
self.description = description
|
||||
self.mailboxes = mailboxes
|
||||
self.maxquota = maxquota
|
||||
self.quota = quota
|
||||
self.relay_all_recipients = relay_all_recipients
|
||||
self.rl_frame = rl_frame
|
||||
self.rl_value = rl_value
|
||||
self.restart_sogo = restart_sogo
|
||||
self.tags = tags
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data):
|
||||
return cls(
|
||||
domain=data.get("domain"),
|
||||
active=data.get("active", None),
|
||||
aliases=data.get("aliases", None),
|
||||
backupmx=data.get("backupmx", None),
|
||||
defquota=data.get("defquota", None),
|
||||
description=data.get("description", None),
|
||||
mailboxes=data.get("mailboxes", None),
|
||||
maxquota=data.get("maxquota", None),
|
||||
quota=data.get("quota", None),
|
||||
relay_all_recipients=data.get("relay_all_recipients", None),
|
||||
rl_frame=data.get("rl_frame", None),
|
||||
rl_value=data.get("rl_value", None),
|
||||
restart_sogo=data.get("restart_sogo", None),
|
||||
tags=data.get("tags", None)
|
||||
)
|
||||
|
||||
def getAdd(self):
|
||||
"""
|
||||
Get the domain details as a dictionary for adding, sets default values.
|
||||
:return: Dictionary containing domain details.
|
||||
"""
|
||||
domain = {
|
||||
"domain": self.domain,
|
||||
"active": self.active if self.active is not None else 1,
|
||||
"aliases": self.aliases if self.aliases is not None else 400,
|
||||
"backupmx": self.backupmx if self.backupmx is not None else 0,
|
||||
"defquota": self.defquota if self.defquota is not None else 3072,
|
||||
"description": self.description if self.description is not None else "",
|
||||
"mailboxes": self.mailboxes if self.mailboxes is not None else 10,
|
||||
"maxquota": self.maxquota if self.maxquota is not None else 10240,
|
||||
"quota": self.quota if self.quota is not None else 10240,
|
||||
"relay_all_recipients": self.relay_all_recipients if self.relay_all_recipients is not None else 0,
|
||||
"rl_frame": self.rl_frame,
|
||||
"rl_value": self.rl_value,
|
||||
"restart_sogo": self.restart_sogo if self.restart_sogo is not None else 0,
|
||||
"tags": self.tags if self.tags is not None else []
|
||||
}
|
||||
return {key: value for key, value in domain.items() if value is not None}
|
||||
|
||||
def getEdit(self):
|
||||
"""
|
||||
Get the domain details as a dictionary for editing, sets no default values.
|
||||
:return: Dictionary containing domain details.
|
||||
"""
|
||||
domain = {
|
||||
"domain": self.domain,
|
||||
"active": self.active,
|
||||
"aliases": self.aliases,
|
||||
"backupmx": self.backupmx,
|
||||
"defquota": self.defquota,
|
||||
"description": self.description,
|
||||
"mailboxes": self.mailboxes,
|
||||
"maxquota": self.maxquota,
|
||||
"quota": self.quota,
|
||||
"relay_all_recipients": self.relay_all_recipients,
|
||||
"rl_frame": self.rl_frame,
|
||||
"rl_value": self.rl_value,
|
||||
"restart_sogo": self.restart_sogo,
|
||||
"tags": self.tags
|
||||
}
|
||||
return {key: value for key, value in domain.items() if value is not None}
|
||||
|
||||
def get(self):
|
||||
"""
|
||||
Get the domain details from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.getDomain(self.domain)
|
||||
|
||||
def delete(self):
|
||||
"""
|
||||
Delete the domain from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.deleteDomain(self.domain)
|
||||
|
||||
def add(self):
|
||||
"""
|
||||
Add the domain to the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.addDomain(self.getAdd())
|
||||
|
||||
def edit(self):
|
||||
"""
|
||||
Edit the domain in the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.editDomain(self.domain, self.getEdit())
|
||||
|
||||
@classmethod
|
||||
def add_parser(cls, subparsers):
|
||||
parser = subparsers.add_parser(
|
||||
cls.parser_command,
|
||||
help="Manage domains (add, delete, get, edit)"
|
||||
)
|
||||
parser.add_argument("object", choices=list(cls.required_args.keys()), help="Action to perform: add, delete, get, edit")
|
||||
parser.add_argument("--domain", required=True, help="Domain name (e.g. domain.tld)")
|
||||
parser.add_argument("--active", choices=["1", "0"], help="Activate (1) or deactivate (0) the domain")
|
||||
parser.add_argument("--aliases", help="Number of aliases allowed for the domain")
|
||||
parser.add_argument("--backupmx", choices=["1", "0"], help="Enable (1) or disable (0) backup MX")
|
||||
parser.add_argument("--defquota", help="Default quota for mailboxes in MB")
|
||||
parser.add_argument("--description", help="Description of the domain")
|
||||
parser.add_argument("--mailboxes", help="Number of mailboxes allowed for the domain")
|
||||
parser.add_argument("--maxquota", help="Maximum quota for the domain in MB")
|
||||
parser.add_argument("--quota", help="Quota used by the domain in MB")
|
||||
parser.add_argument("--relay-all-recipients", choices=["1", "0"], help="Relay all recipients (1 = yes, 0 = no)")
|
||||
parser.add_argument("--rl-frame", help="Rate limit frame (e.g., s, m, h)")
|
||||
parser.add_argument("--rl-value", help="Rate limit value")
|
||||
parser.add_argument("--restart-sogo", help="Restart SOGo after changes (1 = yes, 0 = no)")
|
||||
parser.add_argument("--tags", nargs="*", help="Tags for the domain")
|
||||
|
||||
@@ -0,0 +1,106 @@
|
||||
from modules.Mailcow import Mailcow
|
||||
from models.BaseModel import BaseModel
|
||||
|
||||
class DomainadminModel(BaseModel):
|
||||
parser_command = "domainadmin"
|
||||
required_args = {
|
||||
"add": [["username", "domains", "password"]],
|
||||
"delete": [["username"]],
|
||||
"get": [["username"]],
|
||||
"edit": [["username"]]
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
username=None,
|
||||
domains=None,
|
||||
password=None,
|
||||
active=None,
|
||||
**kwargs
|
||||
):
|
||||
self.mailcow = Mailcow()
|
||||
|
||||
self.username = username
|
||||
self.domains = domains
|
||||
self.password = password
|
||||
self.password2 = password
|
||||
self.active = active
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data):
|
||||
return cls(
|
||||
username=data.get("username"),
|
||||
domains=data.get("domains"),
|
||||
password=data.get("password"),
|
||||
password2=data.get("password"),
|
||||
active=data.get("active", None),
|
||||
)
|
||||
|
||||
def getAdd(self):
|
||||
"""
|
||||
Get the domain admin details as a dictionary for adding, sets default values.
|
||||
:return: Dictionary containing domain admin details.
|
||||
"""
|
||||
domainadmin = {
|
||||
"username": self.username,
|
||||
"domains": self.domains,
|
||||
"password": self.password,
|
||||
"password2": self.password2,
|
||||
"active": self.active if self.active is not None else "1"
|
||||
}
|
||||
return {key: value for key, value in domainadmin.items() if value is not None}
|
||||
|
||||
def getEdit(self):
|
||||
"""
|
||||
Get the domain admin details as a dictionary for editing, sets no default values.
|
||||
:return: Dictionary containing domain admin details.
|
||||
"""
|
||||
domainadmin = {
|
||||
"username": self.username,
|
||||
"domains": self.domains,
|
||||
"password": self.password,
|
||||
"password2": self.password2,
|
||||
"active": self.active
|
||||
}
|
||||
return {key: value for key, value in domainadmin.items() if value is not None}
|
||||
|
||||
def get(self):
|
||||
"""
|
||||
Get the domain admin details from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.getDomainadmin(self.username)
|
||||
|
||||
def delete(self):
|
||||
"""
|
||||
Delete the domain admin from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.deleteDomainadmin(self.username)
|
||||
|
||||
def add(self):
|
||||
"""
|
||||
Add the domain admin to the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.addDomainadmin(self.getAdd())
|
||||
|
||||
def edit(self):
|
||||
"""
|
||||
Edit the domain admin in the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.editDomainadmin(self.username, self.getEdit())
|
||||
|
||||
@classmethod
|
||||
def add_parser(cls, subparsers):
|
||||
parser = subparsers.add_parser(
|
||||
cls.parser_command,
|
||||
help="Manage domain admins (add, delete, get, edit)"
|
||||
)
|
||||
parser.add_argument("object", choices=list(cls.required_args.keys()), help="Action to perform: add, delete, get, edit")
|
||||
parser.add_argument("--username", help="Username for the domain admin")
|
||||
parser.add_argument("--domains", help="Comma-separated list of domains")
|
||||
parser.add_argument("--password", help="Password for the domain admin")
|
||||
parser.add_argument("--active", choices=["1", "0"], help="Activate (1) or deactivate (0) the domain admin")
|
||||
|
||||
164
data/Dockerfiles/controller/mailcow-adm/models/MailboxModel.py
Normal file
164
data/Dockerfiles/controller/mailcow-adm/models/MailboxModel.py
Normal file
@@ -0,0 +1,164 @@
|
||||
from modules.Mailcow import Mailcow
|
||||
from models.BaseModel import BaseModel
|
||||
|
||||
class MailboxModel(BaseModel):
|
||||
parser_command = "mailbox"
|
||||
required_args = {
|
||||
"add": [["username", "password"]],
|
||||
"delete": [["username"]],
|
||||
"get": [["username"]],
|
||||
"edit": [["username"]]
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
password=None,
|
||||
username=None,
|
||||
domain=None,
|
||||
local_part=None,
|
||||
active=None,
|
||||
sogo_access=None,
|
||||
name=None,
|
||||
authsource=None,
|
||||
quota=None,
|
||||
force_pw_update=None,
|
||||
tls_enforce_in=None,
|
||||
tls_enforce_out=None,
|
||||
tags=None,
|
||||
sender_acl=None,
|
||||
**kwargs
|
||||
):
|
||||
self.mailcow = Mailcow()
|
||||
|
||||
if username is not None and "@" in username:
|
||||
self.username = username
|
||||
self.local_part, self.domain = username.split("@")
|
||||
else:
|
||||
self.username = f"{local_part}@{domain}"
|
||||
self.local_part = local_part
|
||||
self.domain = domain
|
||||
|
||||
self.password = password
|
||||
self.password2 = password
|
||||
self.active = active
|
||||
self.sogo_access = sogo_access
|
||||
self.name = name
|
||||
self.authsource = authsource
|
||||
self.quota = quota
|
||||
self.force_pw_update = force_pw_update
|
||||
self.tls_enforce_in = tls_enforce_in
|
||||
self.tls_enforce_out = tls_enforce_out
|
||||
self.tags = tags
|
||||
self.sender_acl = sender_acl
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data):
|
||||
return cls(
|
||||
domain=data.get("domain"),
|
||||
local_part=data.get("local_part"),
|
||||
password=data.get("password"),
|
||||
password2=data.get("password"),
|
||||
active=data.get("active", None),
|
||||
sogo_access=data.get("sogo_access", None),
|
||||
name=data.get("name", None),
|
||||
authsource=data.get("authsource", None),
|
||||
quota=data.get("quota", None),
|
||||
force_pw_update=data.get("force_pw_update", None),
|
||||
tls_enforce_in=data.get("tls_enforce_in", None),
|
||||
tls_enforce_out=data.get("tls_enforce_out", None),
|
||||
tags=data.get("tags", None),
|
||||
sender_acl=data.get("sender_acl", None)
|
||||
)
|
||||
|
||||
def getAdd(self):
|
||||
"""
|
||||
Get the mailbox details as a dictionary for adding, sets default values.
|
||||
:return: Dictionary containing mailbox details.
|
||||
"""
|
||||
|
||||
mailbox = {
|
||||
"domain": self.domain,
|
||||
"local_part": self.local_part,
|
||||
"password": self.password,
|
||||
"password2": self.password2,
|
||||
"active": self.active if self.active is not None else 1,
|
||||
"name": self.name if self.name is not None else "",
|
||||
"authsource": self.authsource if self.authsource is not None else "mailcow",
|
||||
"quota": self.quota if self.quota is not None else 0,
|
||||
"force_pw_update": self.force_pw_update if self.force_pw_update is not None else 0,
|
||||
"tls_enforce_in": self.tls_enforce_in if self.tls_enforce_in is not None else 0,
|
||||
"tls_enforce_out": self.tls_enforce_out if self.tls_enforce_out is not None else 0,
|
||||
"tags": self.tags if self.tags is not None else []
|
||||
}
|
||||
return {key: value for key, value in mailbox.items() if value is not None}
|
||||
|
||||
def getEdit(self):
|
||||
"""
|
||||
Get the mailbox details as a dictionary for editing, sets no default values.
|
||||
:return: Dictionary containing mailbox details.
|
||||
"""
|
||||
|
||||
mailbox = {
|
||||
"domain": self.domain,
|
||||
"local_part": self.local_part,
|
||||
"password": self.password,
|
||||
"password2": self.password2,
|
||||
"active": self.active,
|
||||
"name": self.name,
|
||||
"authsource": self.authsource,
|
||||
"quota": self.quota,
|
||||
"force_pw_update": self.force_pw_update,
|
||||
"tls_enforce_in": self.tls_enforce_in,
|
||||
"tls_enforce_out": self.tls_enforce_out,
|
||||
"tags": self.tags
|
||||
}
|
||||
return {key: value for key, value in mailbox.items() if value is not None}
|
||||
|
||||
def get(self):
|
||||
"""
|
||||
Get the mailbox details from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.getMailbox(self.username)
|
||||
|
||||
def delete(self):
|
||||
"""
|
||||
Get the mailbox details from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.deleteMailbox(self.username)
|
||||
|
||||
def add(self):
|
||||
"""
|
||||
Get the mailbox details from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.addMailbox(self.getAdd())
|
||||
|
||||
def edit(self):
|
||||
"""
|
||||
Get the mailbox details from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.editMailbox(self.username, self.getEdit())
|
||||
|
||||
@classmethod
|
||||
def add_parser(cls, subparsers):
|
||||
parser = subparsers.add_parser(
|
||||
cls.parser_command,
|
||||
help="Manage mailboxes (add, delete, get, edit)"
|
||||
)
|
||||
parser.add_argument("object", choices=list(cls.required_args.keys()), help="Action to perform: add, delete, get, edit")
|
||||
parser.add_argument("--username", help="Full email address of the mailbox (e.g. user@example.com)")
|
||||
parser.add_argument("--password", help="Password for the mailbox (required for add)")
|
||||
parser.add_argument("--active", choices=["1", "0"], help="Activate (1) or deactivate (0) the mailbox")
|
||||
parser.add_argument("--sogo-access", choices=["1", "0"], help="Redirect mailbox to SOGo after web login (1 = yes, 0 = no)")
|
||||
parser.add_argument("--name", help="Display name of the mailbox owner")
|
||||
parser.add_argument("--authsource", help="Authentication source (default: mailcow)")
|
||||
parser.add_argument("--quota", help="Mailbox quota in bytes (0 = unlimited)")
|
||||
parser.add_argument("--force-pw-update", choices=["1", "0"], help="Force password update on next login (1 = yes, 0 = no)")
|
||||
parser.add_argument("--tls-enforce-in", choices=["1", "0"], help="Enforce TLS for incoming emails (1 = yes, 0 = no)")
|
||||
parser.add_argument("--tls-enforce-out", choices=["1", "0"], help="Enforce TLS for outgoing emails (1 = yes, 0 = no)")
|
||||
parser.add_argument("--tags", help="Comma-separated list of tags for the mailbox")
|
||||
parser.add_argument("--sender-acl", help="Comma-separated list of allowed sender addresses for this mailbox")
|
||||
|
||||
@@ -0,0 +1,67 @@
|
||||
from modules.Dovecot import Dovecot
|
||||
from models.BaseModel import BaseModel
|
||||
|
||||
class MaildirModel(BaseModel):
|
||||
parser_command = "maildir"
|
||||
required_args = {
|
||||
"encrypt": [],
|
||||
"decrypt": [],
|
||||
"restore": [["username", "item"], ["list"]]
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
username=None,
|
||||
source=None,
|
||||
item=None,
|
||||
overwrite=None,
|
||||
list=None,
|
||||
**kwargs
|
||||
):
|
||||
self.dovecot = Dovecot()
|
||||
|
||||
for key, value in kwargs.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
self.username = username
|
||||
self.source = source
|
||||
self.item = item
|
||||
self.overwrite = overwrite
|
||||
self.list = list
|
||||
|
||||
def encrypt(self):
|
||||
"""
|
||||
Encrypt the maildir for the specified user or all.
|
||||
:return: Response from Dovecot.
|
||||
"""
|
||||
return self.dovecot.encryptMaildir(self.source_dir, self.output_dir)
|
||||
|
||||
def decrypt(self):
|
||||
"""
|
||||
Decrypt the maildir for the specified user or all.
|
||||
:return: Response from Dovecot.
|
||||
"""
|
||||
return self.dovecot.decryptMaildir(self.source_dir, self.output_dir)
|
||||
|
||||
def restore(self):
|
||||
"""
|
||||
Restore or List maildir data for the specified user.
|
||||
:return: Response from Dovecot.
|
||||
"""
|
||||
if self.list:
|
||||
return self.dovecot.listDeletedMaildirs()
|
||||
return self.dovecot.restoreMaildir(self.username, self.item)
|
||||
|
||||
|
||||
@classmethod
|
||||
def add_parser(cls, subparsers):
|
||||
parser = subparsers.add_parser(
|
||||
cls.parser_command,
|
||||
help="Manage maildir (encrypt, decrypt, restore)"
|
||||
)
|
||||
parser.add_argument("object", choices=list(cls.required_args.keys()), help="Action to perform: encrypt, decrypt, restore")
|
||||
parser.add_argument("--item", help="Item to restore")
|
||||
parser.add_argument("--username", help="Username to restore the item to")
|
||||
parser.add_argument("--list", action="store_true", help="List items to restore")
|
||||
parser.add_argument("--source-dir", help="Path to the source maildir to import/encrypt/decrypt")
|
||||
parser.add_argument("--output-dir", help="Directory to store encrypted/decrypted files inside the Dovecot container")
|
||||
@@ -0,0 +1,62 @@
|
||||
import json
|
||||
from models.BaseModel import BaseModel
|
||||
from modules.Mailer import Mailer
|
||||
|
||||
class MailerModel(BaseModel):
|
||||
parser_command = "mail"
|
||||
required_args = {
|
||||
"send": [["sender", "recipient", "subject", "body"]]
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
sender=None,
|
||||
recipient=None,
|
||||
subject=None,
|
||||
body=None,
|
||||
context=None,
|
||||
**kwargs
|
||||
):
|
||||
self.sender = sender
|
||||
self.recipient = recipient
|
||||
self.subject = subject
|
||||
self.body = body
|
||||
self.context = context
|
||||
|
||||
def send(self):
|
||||
if self.context is not None:
|
||||
try:
|
||||
self.context = json.loads(self.context)
|
||||
except json.JSONDecodeError as e:
|
||||
return f"Invalid context JSON: {e}"
|
||||
else:
|
||||
self.context = {}
|
||||
|
||||
mailer = Mailer(
|
||||
smtp_host="postfix-mailcow",
|
||||
smtp_port=25,
|
||||
username=self.sender,
|
||||
password="",
|
||||
use_tls=True
|
||||
)
|
||||
res = mailer.send_mail(
|
||||
subject=self.subject,
|
||||
from_addr=self.sender,
|
||||
to_addrs=self.recipient.split(","),
|
||||
template=self.body,
|
||||
context=self.context
|
||||
)
|
||||
return res
|
||||
|
||||
@classmethod
|
||||
def add_parser(cls, subparsers):
|
||||
parser = subparsers.add_parser(
|
||||
cls.parser_command,
|
||||
help="Send emails via SMTP"
|
||||
)
|
||||
parser.add_argument("object", choices=list(cls.required_args.keys()), help="Action to perform: send")
|
||||
parser.add_argument("--sender", required=True, help="Email sender address")
|
||||
parser.add_argument("--recipient", required=True, help="Email recipient address (comma-separated for multiple)")
|
||||
parser.add_argument("--subject", required=True, help="Email subject")
|
||||
parser.add_argument("--body", required=True, help="Email body (Jinja2 template supported)")
|
||||
parser.add_argument("--context", help="Context for Jinja2 template rendering (JSON format)")
|
||||
@@ -0,0 +1,45 @@
|
||||
from modules.Mailcow import Mailcow
|
||||
from models.BaseModel import BaseModel
|
||||
|
||||
class StatusModel(BaseModel):
|
||||
parser_command = "status"
|
||||
required_args = {
|
||||
"version": [[]],
|
||||
"vmail": [[]],
|
||||
"containers": [[]]
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
**kwargs
|
||||
):
|
||||
self.mailcow = Mailcow()
|
||||
|
||||
def version(self):
|
||||
"""
|
||||
Get the version of the mailcow instance.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.getStatusVersion()
|
||||
|
||||
def vmail(self):
|
||||
"""
|
||||
Get the vmail details from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.getStatusVmail()
|
||||
|
||||
def containers(self):
|
||||
"""
|
||||
Get the status of containers in the mailcow instance.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.getStatusContainers()
|
||||
|
||||
@classmethod
|
||||
def add_parser(cls, subparsers):
|
||||
parser = subparsers.add_parser(
|
||||
cls.parser_command,
|
||||
help="Get information about mailcow (version, vmail, containers)"
|
||||
)
|
||||
parser.add_argument("object", choices=list(cls.required_args.keys()), help="Action to perform: version, vmail, containers")
|
||||
221
data/Dockerfiles/controller/mailcow-adm/models/SyncjobModel.py
Normal file
221
data/Dockerfiles/controller/mailcow-adm/models/SyncjobModel.py
Normal file
@@ -0,0 +1,221 @@
|
||||
from modules.Mailcow import Mailcow
|
||||
from models.BaseModel import BaseModel
|
||||
|
||||
class SyncjobModel(BaseModel):
|
||||
parser_command = "syncjob"
|
||||
required_args = {
|
||||
"add": [["username", "host1", "port1", "user1", "password1", "enc1"]],
|
||||
"delete": [["id"]],
|
||||
"get": [["username"]],
|
||||
"edit": [["id"]],
|
||||
"run": [["id"]]
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
id=None,
|
||||
username=None,
|
||||
host1=None,
|
||||
port1=None,
|
||||
user1=None,
|
||||
password1=None,
|
||||
enc1=None,
|
||||
mins_interval=None,
|
||||
subfolder2=None,
|
||||
maxage=None,
|
||||
maxbytespersecond=None,
|
||||
timeout1=None,
|
||||
timeout2=None,
|
||||
exclude=None,
|
||||
custom_parameters=None,
|
||||
delete2duplicates=None,
|
||||
delete1=None,
|
||||
delete2=None,
|
||||
automap=None,
|
||||
skipcrossduplicates=None,
|
||||
subscribeall=None,
|
||||
active=None,
|
||||
force=None,
|
||||
**kwargs
|
||||
):
|
||||
self.mailcow = Mailcow()
|
||||
|
||||
for key, value in kwargs.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
self.id = id
|
||||
self.username = username
|
||||
self.host1 = host1
|
||||
self.port1 = port1
|
||||
self.user1 = user1
|
||||
self.password1 = password1
|
||||
self.enc1 = enc1
|
||||
self.mins_interval = mins_interval
|
||||
self.subfolder2 = subfolder2
|
||||
self.maxage = maxage
|
||||
self.maxbytespersecond = maxbytespersecond
|
||||
self.timeout1 = timeout1
|
||||
self.timeout2 = timeout2
|
||||
self.exclude = exclude
|
||||
self.custom_parameters = custom_parameters
|
||||
self.delete2duplicates = delete2duplicates
|
||||
self.delete1 = delete1
|
||||
self.delete2 = delete2
|
||||
self.automap = automap
|
||||
self.skipcrossduplicates = skipcrossduplicates
|
||||
self.subscribeall = subscribeall
|
||||
self.active = active
|
||||
self.force = force
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data):
|
||||
return cls(
|
||||
username=data.get("username"),
|
||||
host1=data.get("host1"),
|
||||
port1=data.get("port1"),
|
||||
user1=data.get("user1"),
|
||||
password1=data.get("password1"),
|
||||
enc1=data.get("enc1"),
|
||||
mins_interval=data.get("mins_interval", None),
|
||||
subfolder2=data.get("subfolder2", None),
|
||||
maxage=data.get("maxage", None),
|
||||
maxbytespersecond=data.get("maxbytespersecond", None),
|
||||
timeout1=data.get("timeout1", None),
|
||||
timeout2=data.get("timeout2", None),
|
||||
exclude=data.get("exclude", None),
|
||||
custom_parameters=data.get("custom_parameters", None),
|
||||
delete2duplicates=data.get("delete2duplicates", None),
|
||||
delete1=data.get("delete1", None),
|
||||
delete2=data.get("delete2", None),
|
||||
automap=data.get("automap", None),
|
||||
skipcrossduplicates=data.get("skipcrossduplicates", None),
|
||||
subscribeall=data.get("subscribeall", None),
|
||||
active=data.get("active", None),
|
||||
)
|
||||
|
||||
def getAdd(self):
|
||||
"""
|
||||
Get the sync job details as a dictionary for adding, sets default values.
|
||||
:return: Dictionary containing sync job details.
|
||||
"""
|
||||
syncjob = {
|
||||
"username": self.username,
|
||||
"host1": self.host1,
|
||||
"port1": self.port1,
|
||||
"user1": self.user1,
|
||||
"password1": self.password1,
|
||||
"enc1": self.enc1,
|
||||
"mins_interval": self.mins_interval if self.mins_interval is not None else 20,
|
||||
"subfolder2": self.subfolder2 if self.subfolder2 is not None else "",
|
||||
"maxage": self.maxage if self.maxage is not None else 0,
|
||||
"maxbytespersecond": self.maxbytespersecond if self.maxbytespersecond is not None else 0,
|
||||
"timeout1": self.timeout1 if self.timeout1 is not None else 600,
|
||||
"timeout2": self.timeout2 if self.timeout2 is not None else 600,
|
||||
"exclude": self.exclude if self.exclude is not None else "(?i)spam|(?i)junk",
|
||||
"custom_parameters": self.custom_parameters if self.custom_parameters is not None else "",
|
||||
"delete2duplicates": 1 if self.delete2duplicates else 0,
|
||||
"delete1": 1 if self.delete1 else 0,
|
||||
"delete2": 1 if self.delete2 else 0,
|
||||
"automap": 1 if self.automap else 0,
|
||||
"skipcrossduplicates": 1 if self.skipcrossduplicates else 0,
|
||||
"subscribeall": 1 if self.subscribeall else 0,
|
||||
"active": 1 if self.active else 0
|
||||
}
|
||||
return {key: value for key, value in syncjob.items() if value is not None}
|
||||
|
||||
def getEdit(self):
|
||||
"""
|
||||
Get the sync job details as a dictionary for editing, sets no default values.
|
||||
:return: Dictionary containing sync job details.
|
||||
"""
|
||||
syncjob = {
|
||||
"username": self.username,
|
||||
"host1": self.host1,
|
||||
"port1": self.port1,
|
||||
"user1": self.user1,
|
||||
"password1": self.password1,
|
||||
"enc1": self.enc1,
|
||||
"mins_interval": self.mins_interval,
|
||||
"subfolder2": self.subfolder2,
|
||||
"maxage": self.maxage,
|
||||
"maxbytespersecond": self.maxbytespersecond,
|
||||
"timeout1": self.timeout1,
|
||||
"timeout2": self.timeout2,
|
||||
"exclude": self.exclude,
|
||||
"custom_parameters": self.custom_parameters,
|
||||
"delete2duplicates": self.delete2duplicates,
|
||||
"delete1": self.delete1,
|
||||
"delete2": self.delete2,
|
||||
"automap": self.automap,
|
||||
"skipcrossduplicates": self.skipcrossduplicates,
|
||||
"subscribeall": self.subscribeall,
|
||||
"active": self.active
|
||||
}
|
||||
return {key: value for key, value in syncjob.items() if value is not None}
|
||||
|
||||
def get(self):
|
||||
"""
|
||||
Get the sync job details from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.getSyncjob(self.username)
|
||||
|
||||
def delete(self):
|
||||
"""
|
||||
Get the sync job details from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.deleteSyncjob(self.id)
|
||||
|
||||
def add(self):
|
||||
"""
|
||||
Get the sync job details from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.addSyncjob(self.getAdd())
|
||||
|
||||
def edit(self):
|
||||
"""
|
||||
Get the sync job details from the mailcow API.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.editSyncjob(self.id, self.getEdit())
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Run the sync job.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.mailcow.runSyncjob(self.id, force=self.force)
|
||||
|
||||
@classmethod
|
||||
def add_parser(cls, subparsers):
|
||||
parser = subparsers.add_parser(
|
||||
cls.parser_command,
|
||||
help="Manage sync jobs (add, delete, get, edit)"
|
||||
)
|
||||
parser.add_argument("object", choices=list(cls.required_args.keys()), help="Action to perform: add, delete, get, edit")
|
||||
parser.add_argument("--id", help="Syncjob object ID (required for edit, delete, run)")
|
||||
parser.add_argument("--username", help="Target mailbox username (e.g. user@example.com)")
|
||||
parser.add_argument("--host1", help="Source IMAP server hostname")
|
||||
parser.add_argument("--port1", help="Source IMAP server port")
|
||||
parser.add_argument("--user1", help="Source IMAP account username")
|
||||
parser.add_argument("--password1", help="Source IMAP account password")
|
||||
parser.add_argument("--enc1", choices=["PLAIN", "SSL", "TLS"], help="Encryption for source server connection")
|
||||
parser.add_argument("--mins-interval", help="Sync interval in minutes (default: 20)")
|
||||
parser.add_argument("--subfolder2", help="Destination subfolder (default: empty)")
|
||||
parser.add_argument("--maxage", help="Maximum mail age in days (default: 0 = unlimited)")
|
||||
parser.add_argument("--maxbytespersecond", help="Maximum bandwidth in bytes/sec (default: 0 = unlimited)")
|
||||
parser.add_argument("--timeout1", help="Timeout for source server in seconds (default: 600)")
|
||||
parser.add_argument("--timeout2", help="Timeout for destination server in seconds (default: 600)")
|
||||
parser.add_argument("--exclude", help="Regex pattern to exclude folders (default: (?i)spam|(?i)junk)")
|
||||
parser.add_argument("--custom-parameters", help="Additional imapsync parameters")
|
||||
parser.add_argument("--delete2duplicates", choices=["1", "0"], help="Delete duplicates on destination (1 = yes, 0 = no)")
|
||||
parser.add_argument("--del1", choices=["1", "0"], help="Delete mails on source after sync (1 = yes, 0 = no)")
|
||||
parser.add_argument("--del2", choices=["1", "0"], help="Delete mails on destination after sync (1 = yes, 0 = no)")
|
||||
parser.add_argument("--automap", choices=["1", "0"], help="Enable folder automapping (1 = yes, 0 = no)")
|
||||
parser.add_argument("--skipcrossduplicates", choices=["1", "0"], help="Skip cross-account duplicates (1 = yes, 0 = no)")
|
||||
parser.add_argument("--subscribeall", choices=["1", "0"], help="Subscribe to all folders (1 = yes, 0 = no)")
|
||||
parser.add_argument("--active", choices=["1", "0"], help="Activate syncjob (1 = yes, 0 = no)")
|
||||
parser.add_argument("--force", action="store_true", help="Force the syncjob to run even if it is not active")
|
||||
|
||||
128
data/Dockerfiles/controller/mailcow-adm/modules/Docker.py
Normal file
128
data/Dockerfiles/controller/mailcow-adm/modules/Docker.py
Normal file
@@ -0,0 +1,128 @@
|
||||
import docker
|
||||
from docker.errors import APIError
|
||||
|
||||
class Docker:
|
||||
def __init__(self):
|
||||
self.client = docker.from_env()
|
||||
|
||||
def exec_command(self, container_name, cmd, user=None):
|
||||
"""
|
||||
Execute a command in a container by its container name.
|
||||
:param container_name: The name of the container.
|
||||
:param cmd: The command to execute as a list (e.g., ["ls", "-la"]).
|
||||
:param user: The user to execute the command as (optional).
|
||||
:return: A standardized response with status, output, and exit_code.
|
||||
"""
|
||||
|
||||
filters = {"name": container_name}
|
||||
|
||||
try:
|
||||
for container in self.client.containers.list(filters=filters):
|
||||
exec_result = container.exec_run(cmd, user=user)
|
||||
return {
|
||||
"status": "success",
|
||||
"exit_code": exec_result.exit_code,
|
||||
"output": exec_result.output.decode("utf-8")
|
||||
}
|
||||
except APIError as e:
|
||||
return {
|
||||
"status": "error",
|
||||
"exit_code": "APIError",
|
||||
"output": str(e)
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"status": "error",
|
||||
"exit_code": "Exception",
|
||||
"output": str(e)
|
||||
}
|
||||
|
||||
def start_container(self, container_name):
|
||||
"""
|
||||
Start a container by its container name.
|
||||
:param container_name: The name of the container.
|
||||
:return: A standardized response with status, output, and exit_code.
|
||||
"""
|
||||
|
||||
filters = {"name": container_name}
|
||||
|
||||
try:
|
||||
for container in self.client.containers.list(filters=filters):
|
||||
container.start()
|
||||
return {
|
||||
"status": "success",
|
||||
"exit_code": "0",
|
||||
"output": f"Container '{container_name}' started successfully."
|
||||
}
|
||||
except APIError as e:
|
||||
return {
|
||||
"status": "error",
|
||||
"exit_code": "APIError",
|
||||
"output": str(e)
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"status": "error",
|
||||
"error_type": "Exception",
|
||||
"output": str(e)
|
||||
}
|
||||
|
||||
def stop_container(self, container_name):
|
||||
"""
|
||||
Stop a container by its container name.
|
||||
:param container_name: The name of the container.
|
||||
:return: A standardized response with status, output, and exit_code.
|
||||
"""
|
||||
|
||||
filters = {"name": container_name}
|
||||
|
||||
try:
|
||||
for container in self.client.containers.list(filters=filters):
|
||||
container.stop()
|
||||
return {
|
||||
"status": "success",
|
||||
"exit_code": "0",
|
||||
"output": f"Container '{container_name}' stopped successfully."
|
||||
}
|
||||
except APIError as e:
|
||||
return {
|
||||
"status": "error",
|
||||
"exit_code": "APIError",
|
||||
"output": str(e)
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"status": "error",
|
||||
"exit_code": "Exception",
|
||||
"output": str(e)
|
||||
}
|
||||
|
||||
def restart_container(self, container_name):
|
||||
"""
|
||||
Restart a container by its container name.
|
||||
:param container_name: The name of the container.
|
||||
:return: A standardized response with status, output, and exit_code.
|
||||
"""
|
||||
|
||||
filters = {"name": container_name}
|
||||
|
||||
try:
|
||||
for container in self.client.containers.list(filters=filters):
|
||||
container.restart()
|
||||
return {
|
||||
"status": "success",
|
||||
"exit_code": "0",
|
||||
"output": f"Container '{container_name}' restarted successfully."
|
||||
}
|
||||
except APIError as e:
|
||||
return {
|
||||
"status": "error",
|
||||
"exit_code": "APIError",
|
||||
"output": str(e)
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"status": "error",
|
||||
"exit_code": "Exception",
|
||||
"output": str(e)
|
||||
}
|
||||
206
data/Dockerfiles/controller/mailcow-adm/modules/Dovecot.py
Normal file
206
data/Dockerfiles/controller/mailcow-adm/modules/Dovecot.py
Normal file
@@ -0,0 +1,206 @@
|
||||
import os
|
||||
|
||||
from modules.Docker import Docker
|
||||
|
||||
class Dovecot:
|
||||
def __init__(self):
|
||||
self.docker = Docker()
|
||||
|
||||
def decryptMaildir(self, source_dir="/var/vmail/", output_dir=None):
|
||||
"""
|
||||
Decrypt files in /var/vmail using doveadm if they are encrypted.
|
||||
:param output_dir: Directory inside the Dovecot container to store decrypted files, Default overwrite.
|
||||
"""
|
||||
private_key = "/mail_crypt/ecprivkey.pem"
|
||||
public_key = "/mail_crypt/ecpubkey.pem"
|
||||
|
||||
if output_dir:
|
||||
# Ensure the output directory exists inside the container
|
||||
mkdir_result = self.docker.exec_command("dovecot-mailcow", f"bash -c 'mkdir -p {output_dir} && chown vmail:vmail {output_dir}'")
|
||||
if mkdir_result.get("status") != "success":
|
||||
print(f"Error creating output directory: {mkdir_result.get('output')}")
|
||||
return
|
||||
|
||||
find_command = [
|
||||
"find", source_dir, "-type", "f", "-regextype", "egrep", "-regex", ".*S=.*W=.*"
|
||||
]
|
||||
|
||||
try:
|
||||
find_result = self.docker.exec_command("dovecot-mailcow", " ".join(find_command))
|
||||
if find_result.get("status") != "success":
|
||||
print(f"Error finding files: {find_result.get('output')}")
|
||||
return
|
||||
|
||||
files = find_result.get("output", "").splitlines()
|
||||
|
||||
for file in files:
|
||||
head_command = f"head -c7 {file}"
|
||||
head_result = self.docker.exec_command("dovecot-mailcow", head_command)
|
||||
if head_result.get("status") == "success" and head_result.get("output", "").strip() == "CRYPTED":
|
||||
if output_dir:
|
||||
# Preserve the directory structure in the output directory
|
||||
relative_path = os.path.relpath(file, source_dir)
|
||||
output_file = os.path.join(output_dir, relative_path)
|
||||
current_path = output_dir
|
||||
for part in os.path.dirname(relative_path).split(os.sep):
|
||||
current_path = os.path.join(current_path, part)
|
||||
mkdir_result = self.docker.exec_command("dovecot-mailcow", f"bash -c '[ ! -d {current_path} ] && mkdir {current_path} && chown vmail:vmail {current_path}'")
|
||||
if mkdir_result.get("status") != "success":
|
||||
print(f"Error creating directory {current_path}: {mkdir_result.get('output')}")
|
||||
continue
|
||||
else:
|
||||
# Overwrite the original file
|
||||
output_file = file
|
||||
|
||||
decrypt_command = (
|
||||
f"bash -c 'doveadm fs get compress lz4:1:crypt:private_key_path={private_key}:public_key_path={public_key}:posix:prefix=/ {file} > {output_file}'"
|
||||
)
|
||||
|
||||
decrypt_result = self.docker.exec_command("dovecot-mailcow", decrypt_command)
|
||||
if decrypt_result.get("status") == "success":
|
||||
print(f"Decrypted {file}")
|
||||
|
||||
# Verify the file size and set permissions
|
||||
size_check_command = f"bash -c '[ -s {output_file} ] && chmod 600 {output_file} && chown vmail:vmail {output_file} || rm -f {output_file}'"
|
||||
size_check_result = self.docker.exec_command("dovecot-mailcow", size_check_command)
|
||||
if size_check_result.get("status") != "success":
|
||||
print(f"Error setting permissions for {output_file}: {size_check_result.get('output')}\n")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error during decryption: {e}")
|
||||
|
||||
return "Done"
|
||||
|
||||
def encryptMaildir(self, source_dir="/var/vmail/", output_dir=None):
|
||||
"""
|
||||
Encrypt files in /var/vmail using doveadm if they are not already encrypted.
|
||||
:param source_dir: Directory inside the Dovecot container to encrypt files.
|
||||
:param output_dir: Directory inside the Dovecot container to store encrypted files, Default overwrite.
|
||||
"""
|
||||
private_key = "/mail_crypt/ecprivkey.pem"
|
||||
public_key = "/mail_crypt/ecpubkey.pem"
|
||||
|
||||
if output_dir:
|
||||
# Ensure the output directory exists inside the container
|
||||
mkdir_result = self.docker.exec_command("dovecot-mailcow", f"mkdir -p {output_dir}")
|
||||
if mkdir_result.get("status") != "success":
|
||||
print(f"Error creating output directory: {mkdir_result.get('output')}")
|
||||
return
|
||||
|
||||
find_command = [
|
||||
"find", source_dir, "-type", "f", "-regextype", "egrep", "-regex", ".*S=.*W=.*"
|
||||
]
|
||||
|
||||
try:
|
||||
find_result = self.docker.exec_command("dovecot-mailcow", " ".join(find_command))
|
||||
if find_result.get("status") != "success":
|
||||
print(f"Error finding files: {find_result.get('output')}")
|
||||
return
|
||||
|
||||
files = find_result.get("output", "").splitlines()
|
||||
|
||||
for file in files:
|
||||
head_command = f"head -c7 {file}"
|
||||
head_result = self.docker.exec_command("dovecot-mailcow", head_command)
|
||||
if head_result.get("status") == "success" and head_result.get("output", "").strip() != "CRYPTED":
|
||||
if output_dir:
|
||||
# Preserve the directory structure in the output directory
|
||||
relative_path = os.path.relpath(file, source_dir)
|
||||
output_file = os.path.join(output_dir, relative_path)
|
||||
current_path = output_dir
|
||||
for part in os.path.dirname(relative_path).split(os.sep):
|
||||
current_path = os.path.join(current_path, part)
|
||||
mkdir_result = self.docker.exec_command("dovecot-mailcow", f"bash -c '[ ! -d {current_path} ] && mkdir {current_path} && chown vmail:vmail {current_path}'")
|
||||
if mkdir_result.get("status") != "success":
|
||||
print(f"Error creating directory {current_path}: {mkdir_result.get('output')}")
|
||||
continue
|
||||
else:
|
||||
# Overwrite the original file
|
||||
output_file = file
|
||||
|
||||
encrypt_command = (
|
||||
f"bash -c 'doveadm fs put crypt private_key_path={private_key}:public_key_path={public_key}:posix:prefix=/ {file} {output_file}'"
|
||||
)
|
||||
|
||||
encrypt_result = self.docker.exec_command("dovecot-mailcow", encrypt_command)
|
||||
if encrypt_result.get("status") == "success":
|
||||
print(f"Encrypted {file}")
|
||||
|
||||
# Set permissions
|
||||
permissions_command = f"bash -c 'chmod 600 {output_file} && chown 5000:5000 {output_file}'"
|
||||
permissions_result = self.docker.exec_command("dovecot-mailcow", permissions_command)
|
||||
if permissions_result.get("status") != "success":
|
||||
print(f"Error setting permissions for {output_file}: {permissions_result.get('output')}\n")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error during encryption: {e}")
|
||||
|
||||
return "Done"
|
||||
|
||||
def listDeletedMaildirs(self, source_dir="/var/vmail/_garbage"):
|
||||
"""
|
||||
List deleted maildirs in the specified garbage directory.
|
||||
:param source_dir: Directory to search for deleted maildirs.
|
||||
:return: List of maildirs.
|
||||
"""
|
||||
list_command = ["bash", "-c", f"ls -la {source_dir}"]
|
||||
|
||||
try:
|
||||
result = self.docker.exec_command("dovecot-mailcow", list_command)
|
||||
if result.get("status") != "success":
|
||||
print(f"Error listing deleted maildirs: {result.get('output')}")
|
||||
return []
|
||||
|
||||
lines = result.get("output", "").splitlines()
|
||||
maildirs = {}
|
||||
|
||||
for idx, line in enumerate(lines):
|
||||
parts = line.split()
|
||||
if "_" in line:
|
||||
folder_name = parts[-1]
|
||||
time, maildir = folder_name.split("_", 1)
|
||||
|
||||
if maildir.endswith("_index"):
|
||||
main_item = maildir[:-6]
|
||||
if main_item in maildirs:
|
||||
maildirs[main_item]["has_index"] = True
|
||||
else:
|
||||
maildirs[maildir] = {"item": idx, "time": time, "name": maildir, "has_index": False}
|
||||
|
||||
return list(maildirs.values())
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error during listing deleted maildirs: {e}")
|
||||
return []
|
||||
|
||||
def restoreMaildir(self, username, item, source_dir="/var/vmail/_garbage"):
|
||||
"""
|
||||
Restore a maildir item for a specific user from the deleted maildirs.
|
||||
:param username: Username to restore the item to.
|
||||
:param item: Item to restore (e.g., mailbox, folder).
|
||||
:param source_dir: Directory containing deleted maildirs.
|
||||
:return: Response from Dovecot.
|
||||
"""
|
||||
username_splitted = username.split("@")
|
||||
maildirs = self.listDeletedMaildirs()
|
||||
|
||||
maildir = None
|
||||
for mdir in maildirs:
|
||||
if mdir["item"] == int(item):
|
||||
maildir = mdir
|
||||
break
|
||||
if not maildir:
|
||||
return {"status": "error", "message": "Maildir not found."}
|
||||
|
||||
restore_command = f"mv {source_dir}/{maildir['time']}_{maildir['name']} /var/vmail/{username_splitted[1]}/{username_splitted[0]}"
|
||||
restore_index_command = f"mv {source_dir}/{maildir['time']}_{maildir['name']}_index /var/vmail_index/{username}"
|
||||
|
||||
result = self.docker.exec_command("dovecot-mailcow", ["bash", "-c", restore_command])
|
||||
if result.get("status") != "success":
|
||||
return {"status": "error", "message": "Failed to restore maildir."}
|
||||
|
||||
result = self.docker.exec_command("dovecot-mailcow", ["bash", "-c", restore_index_command])
|
||||
if result.get("status") != "success":
|
||||
return {"status": "error", "message": "Failed to restore maildir index."}
|
||||
|
||||
return "Done"
|
||||
457
data/Dockerfiles/controller/mailcow-adm/modules/Mailcow.py
Normal file
457
data/Dockerfiles/controller/mailcow-adm/modules/Mailcow.py
Normal file
@@ -0,0 +1,457 @@
|
||||
import requests
|
||||
import urllib3
|
||||
import sys
|
||||
import os
|
||||
import subprocess
|
||||
import tempfile
|
||||
import mysql.connector
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
from modules.Docker import Docker
|
||||
|
||||
|
||||
class Mailcow:
|
||||
def __init__(self):
|
||||
self.apiUrl = "/api/v1"
|
||||
self.ignore_ssl_errors = True
|
||||
|
||||
self.baseUrl = f"https://{os.getenv('IPv4_NETWORK', '172.22.1')}.247:{os.getenv('HTTPS_PORT', '443')}"
|
||||
self.host = os.getenv("MAILCOW_HOSTNAME", "")
|
||||
self.apiKey = ""
|
||||
if self.ignore_ssl_errors:
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
self.db_config = {
|
||||
'user': os.getenv('DBUSER'),
|
||||
'password': os.getenv('DBPASS'),
|
||||
'database': os.getenv('DBNAME'),
|
||||
'unix_socket': '/var/run/mysqld/mysqld.sock',
|
||||
}
|
||||
|
||||
self.docker = Docker()
|
||||
|
||||
|
||||
# API Functions
|
||||
def addDomain(self, domain):
|
||||
"""
|
||||
Add a domain to the mailcow instance.
|
||||
:param domain: Dictionary containing domain details.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
|
||||
return self.post('/add/domain', domain)
|
||||
|
||||
def addMailbox(self, mailbox):
|
||||
"""
|
||||
Add a mailbox to the mailcow instance.
|
||||
:param mailbox: Dictionary containing mailbox details.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
|
||||
return self.post('/add/mailbox', mailbox)
|
||||
|
||||
def addAlias(self, alias):
|
||||
"""
|
||||
Add an alias to the mailcow instance.
|
||||
:param alias: Dictionary containing alias details.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
|
||||
return self.post('/add/alias', alias)
|
||||
|
||||
def addSyncjob(self, syncjob):
|
||||
"""
|
||||
Add a sync job to the mailcow instance.
|
||||
:param syncjob: Dictionary containing sync job details.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
|
||||
return self.post('/add/syncjob', syncjob)
|
||||
|
||||
def addDomainadmin(self, domainadmin):
|
||||
"""
|
||||
Add a domain admin to the mailcow instance.
|
||||
:param domainadmin: Dictionary containing domain admin details.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
|
||||
return self.post('/add/domain-admin', domainadmin)
|
||||
|
||||
def deleteDomain(self, domain):
|
||||
"""
|
||||
Delete a domain from the mailcow instance.
|
||||
:param domain: Name of the domain to delete.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
|
||||
items = [domain]
|
||||
return self.post('/delete/domain', items)
|
||||
|
||||
def deleteAlias(self, id):
|
||||
"""
|
||||
Delete an alias from the mailcow instance.
|
||||
:param id: ID of the alias to delete.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
|
||||
items = [id]
|
||||
return self.post('/delete/alias', items)
|
||||
|
||||
def deleteSyncjob(self, id):
|
||||
"""
|
||||
Delete a sync job from the mailcow instance.
|
||||
:param id: ID of the sync job to delete.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
|
||||
items = [id]
|
||||
return self.post('/delete/syncjob', items)
|
||||
|
||||
def deleteMailbox(self, mailbox):
|
||||
"""
|
||||
Delete a mailbox from the mailcow instance.
|
||||
:param mailbox: Name of the mailbox to delete.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
|
||||
items = [mailbox]
|
||||
return self.post('/delete/mailbox', items)
|
||||
|
||||
def deleteDomainadmin(self, username):
|
||||
"""
|
||||
Delete a domain admin from the mailcow instance.
|
||||
:param username: Username of the domain admin to delete.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
|
||||
items = [username]
|
||||
return self.post('/delete/domain-admin', items)
|
||||
|
||||
def post(self, endpoint, data):
|
||||
"""
|
||||
Make a POST request to the mailcow API.
|
||||
:param endpoint: The API endpoint to post to.
|
||||
:param data: Data to be sent in the POST request.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
|
||||
url = f"{self.baseUrl}{self.apiUrl}/{endpoint.lstrip('/')}"
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Host": self.host
|
||||
}
|
||||
if self.apiKey:
|
||||
headers["X-Api-Key"] = self.apiKey
|
||||
response = requests.post(
|
||||
url,
|
||||
json=data,
|
||||
headers=headers,
|
||||
verify=not self.ignore_ssl_errors
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def getDomain(self, domain):
|
||||
"""
|
||||
Get a domain from the mailcow instance.
|
||||
:param domain: Name of the domain to get.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
|
||||
return self.get(f'/get/domain/{domain}')
|
||||
|
||||
def getMailbox(self, username):
|
||||
"""
|
||||
Get a mailbox from the mailcow instance.
|
||||
:param mailbox: Dictionary containing mailbox details (e.g. {"username": "user@example.com"})
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.get(f'/get/mailbox/{username}')
|
||||
|
||||
def getAlias(self, id):
|
||||
"""
|
||||
Get an alias from the mailcow instance.
|
||||
:param alias: Dictionary containing alias details (e.g. {"address": "alias@example.com"})
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.get(f'/get/alias/{id}')
|
||||
|
||||
def getSyncjob(self, id):
|
||||
"""
|
||||
Get a sync job from the mailcow instance.
|
||||
:param syncjob: Dictionary containing sync job details (e.g. {"id": "123"})
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.get(f'/get/syncjobs/{id}')
|
||||
|
||||
def getDomainadmin(self, username):
|
||||
"""
|
||||
Get a domain admin from the mailcow instance.
|
||||
:param username: Username of the domain admin to get.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.get(f'/get/domain-admin/{username}')
|
||||
|
||||
def getStatusVersion(self):
|
||||
"""
|
||||
Get the version of the mailcow instance.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.get('/get/status/version')
|
||||
|
||||
def getStatusVmail(self):
|
||||
"""
|
||||
Get the vmail status from the mailcow instance.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.get('/get/status/vmail')
|
||||
|
||||
def getStatusContainers(self):
|
||||
"""
|
||||
Get the status of containers from the mailcow instance.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
return self.get('/get/status/containers')
|
||||
|
||||
def get(self, endpoint, params=None):
|
||||
"""
|
||||
Make a GET request to the mailcow API.
|
||||
:param endpoint: The API endpoint to get from.
|
||||
:param params: Parameters to be sent in the GET request.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
|
||||
url = f"{self.baseUrl}{self.apiUrl}/{endpoint.lstrip('/')}"
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Host": self.host
|
||||
}
|
||||
if self.apiKey:
|
||||
headers["X-Api-Key"] = self.apiKey
|
||||
response = requests.get(
|
||||
url,
|
||||
params=params,
|
||||
headers=headers,
|
||||
verify=not self.ignore_ssl_errors
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def editDomain(self, domain, attributes):
|
||||
"""
|
||||
Edit an existing domain in the mailcow instance.
|
||||
:param domain: Name of the domain to edit
|
||||
:param attributes: Dictionary containing the new domain attributes.
|
||||
"""
|
||||
|
||||
items = [domain]
|
||||
return self.edit('/edit/domain', items, attributes)
|
||||
|
||||
def editMailbox(self, mailbox, attributes):
|
||||
"""
|
||||
Edit an existing mailbox in the mailcow instance.
|
||||
:param mailbox: Name of the mailbox to edit
|
||||
:param attributes: Dictionary containing the new mailbox attributes.
|
||||
"""
|
||||
|
||||
items = [mailbox]
|
||||
return self.edit('/edit/mailbox', items, attributes)
|
||||
|
||||
def editAlias(self, alias, attributes):
|
||||
"""
|
||||
Edit an existing alias in the mailcow instance.
|
||||
:param alias: Name of the alias to edit
|
||||
:param attributes: Dictionary containing the new alias attributes.
|
||||
"""
|
||||
|
||||
items = [alias]
|
||||
return self.edit('/edit/alias', items, attributes)
|
||||
|
||||
def editSyncjob(self, syncjob, attributes):
|
||||
"""
|
||||
Edit an existing sync job in the mailcow instance.
|
||||
:param syncjob: Name of the sync job to edit
|
||||
:param attributes: Dictionary containing the new sync job attributes.
|
||||
"""
|
||||
|
||||
items = [syncjob]
|
||||
return self.edit('/edit/syncjob', items, attributes)
|
||||
|
||||
def editDomainadmin(self, username, attributes):
|
||||
"""
|
||||
Edit an existing domain admin in the mailcow instance.
|
||||
:param username: Username of the domain admin to edit
|
||||
:param attributes: Dictionary containing the new domain admin attributes.
|
||||
"""
|
||||
|
||||
items = [username]
|
||||
return self.edit('/edit/domain-admin', items, attributes)
|
||||
|
||||
def edit(self, endpoint, items, attributes):
|
||||
"""
|
||||
Make a POST request to edit items in the mailcow API.
|
||||
:param items: List of items to edit.
|
||||
:param attributes: Dictionary containing the new attributes for the items.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
|
||||
url = f"{self.baseUrl}{self.apiUrl}/{endpoint.lstrip('/')}"
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Host": self.host
|
||||
}
|
||||
if self.apiKey:
|
||||
headers["X-Api-Key"] = self.apiKey
|
||||
data = {
|
||||
"items": items,
|
||||
"attr": attributes
|
||||
}
|
||||
response = requests.post(
|
||||
url,
|
||||
json=data,
|
||||
headers=headers,
|
||||
verify=not self.ignore_ssl_errors
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
|
||||
# System Functions
|
||||
def runSyncjob(self, id, force=False):
|
||||
"""
|
||||
Run a sync job.
|
||||
:param id: ID of the sync job to run.
|
||||
:return: Response from the imapsync script.
|
||||
"""
|
||||
|
||||
creds_path = "/app/sieve.creds"
|
||||
|
||||
conn = mysql.connector.connect(**self.db_config)
|
||||
cursor = conn.cursor(dictionary=True)
|
||||
|
||||
with open(creds_path, 'r') as file:
|
||||
master_user, master_pass = file.read().strip().split(':')
|
||||
|
||||
query = ("SELECT * FROM imapsync WHERE id = %s")
|
||||
cursor.execute(query, (id,))
|
||||
|
||||
success = False
|
||||
syncjob = cursor.fetchone()
|
||||
if not syncjob:
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return f"Sync job with ID {id} not found."
|
||||
if syncjob['active'] == 0 and not force:
|
||||
cursor.close()
|
||||
conn.close()
|
||||
return f"Sync job with ID {id} is not active."
|
||||
|
||||
enc1_flag = "--tls1" if syncjob['enc1'] == "TLS" else "--ssl1" if syncjob['enc1'] == "SSL" else None
|
||||
|
||||
|
||||
passfile1_path = f"/tmp/passfile1_{id}.txt"
|
||||
passfile2_path = f"/tmp/passfile2_{id}.txt"
|
||||
passfile1_cmd = [
|
||||
"sh", "-c",
|
||||
f"echo {syncjob['password1']} > {passfile1_path}"
|
||||
]
|
||||
passfile2_cmd = [
|
||||
"sh", "-c",
|
||||
f"echo {master_pass} > {passfile2_path}"
|
||||
]
|
||||
|
||||
self.docker.exec_command("dovecot-mailcow", passfile1_cmd)
|
||||
self.docker.exec_command("dovecot-mailcow", passfile2_cmd)
|
||||
|
||||
imapsync_cmd = [
|
||||
"/usr/local/bin/imapsync",
|
||||
"--tmpdir", "/tmp",
|
||||
"--nofoldersizes",
|
||||
"--addheader"
|
||||
]
|
||||
|
||||
if int(syncjob['timeout1']) > 0:
|
||||
imapsync_cmd.extend(['--timeout1', str(syncjob['timeout1'])])
|
||||
if int(syncjob['timeout2']) > 0:
|
||||
imapsync_cmd.extend(['--timeout2', str(syncjob['timeout2'])])
|
||||
if syncjob['exclude']:
|
||||
imapsync_cmd.extend(['--exclude', syncjob['exclude']])
|
||||
if syncjob['subfolder2']:
|
||||
imapsync_cmd.extend(['--subfolder2', syncjob['subfolder2']])
|
||||
if int(syncjob['maxage']) > 0:
|
||||
imapsync_cmd.extend(['--maxage', str(syncjob['maxage'])])
|
||||
if int(syncjob['maxbytespersecond']) > 0:
|
||||
imapsync_cmd.extend(['--maxbytespersecond', str(syncjob['maxbytespersecond'])])
|
||||
if int(syncjob['delete2duplicates']) == 1:
|
||||
imapsync_cmd.append("--delete2duplicates")
|
||||
if int(syncjob['subscribeall']) == 1:
|
||||
imapsync_cmd.append("--subscribeall")
|
||||
if int(syncjob['delete1']) == 1:
|
||||
imapsync_cmd.append("--delete")
|
||||
if int(syncjob['delete2']) == 1:
|
||||
imapsync_cmd.append("--delete2")
|
||||
if int(syncjob['automap']) == 1:
|
||||
imapsync_cmd.append("--automap")
|
||||
if int(syncjob['skipcrossduplicates']) == 1:
|
||||
imapsync_cmd.append("--skipcrossduplicates")
|
||||
if enc1_flag:
|
||||
imapsync_cmd.append(enc1_flag)
|
||||
|
||||
imapsync_cmd.extend([
|
||||
"--host1", syncjob['host1'],
|
||||
"--user1", syncjob['user1'],
|
||||
"--passfile1", passfile1_path,
|
||||
"--port1", str(syncjob['port1']),
|
||||
"--host2", "localhost",
|
||||
"--user2", f"{syncjob['user2']}*{master_user}",
|
||||
"--passfile2", passfile2_path
|
||||
])
|
||||
|
||||
if syncjob['dry'] == 1:
|
||||
imapsync_cmd.append("--dry")
|
||||
|
||||
imapsync_cmd.extend([
|
||||
"--no-modulesversion",
|
||||
"--noreleasecheck"
|
||||
])
|
||||
|
||||
try:
|
||||
cursor.execute("UPDATE imapsync SET is_running = 1, success = NULL, exit_status = NULL WHERE id = %s", (id,))
|
||||
conn.commit()
|
||||
|
||||
result = self.docker.exec_command("dovecot-mailcow", imapsync_cmd)
|
||||
print(result)
|
||||
|
||||
success = result['status'] == "success" and result['exit_code'] == 0
|
||||
cursor.execute(
|
||||
"UPDATE imapsync SET returned_text = %s, success = %s, exit_status = %s WHERE id = %s",
|
||||
(result['output'], int(success), result['exit_code'], id)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
except Exception as e:
|
||||
cursor.execute(
|
||||
"UPDATE imapsync SET returned_text = %s, success = 0 WHERE id = %s",
|
||||
(str(e), id)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
finally:
|
||||
cursor.execute("UPDATE imapsync SET last_run = NOW(), is_running = 0 WHERE id = %s", (id,))
|
||||
conn.commit()
|
||||
|
||||
delete_passfile1_cmd = [
|
||||
"sh", "-c",
|
||||
f"rm -f {passfile1_path}"
|
||||
]
|
||||
delete_passfile2_cmd = [
|
||||
"sh", "-c",
|
||||
f"rm -f {passfile2_path}"
|
||||
]
|
||||
self.docker.exec_command("dovecot-mailcow", delete_passfile1_cmd)
|
||||
self.docker.exec_command("dovecot-mailcow", delete_passfile2_cmd)
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
|
||||
return "Sync job completed successfully." if success else "Sync job failed."
|
||||
64
data/Dockerfiles/controller/mailcow-adm/modules/Mailer.py
Normal file
64
data/Dockerfiles/controller/mailcow-adm/modules/Mailer.py
Normal file
@@ -0,0 +1,64 @@
|
||||
import smtplib
|
||||
import json
|
||||
import os
|
||||
from email.mime.text import MIMEText
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from jinja2 import Environment, BaseLoader
|
||||
|
||||
class Mailer:
|
||||
def __init__(self, smtp_host, smtp_port, username, password, use_tls=True):
|
||||
self.smtp_host = smtp_host
|
||||
self.smtp_port = smtp_port
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.use_tls = use_tls
|
||||
self.server = None
|
||||
self.env = Environment(loader=BaseLoader())
|
||||
|
||||
def connect(self):
|
||||
print("Connecting to the SMTP server...")
|
||||
self.server = smtplib.SMTP(self.smtp_host, self.smtp_port)
|
||||
if self.use_tls:
|
||||
self.server.starttls()
|
||||
print("TLS activated!")
|
||||
if self.username and self.password:
|
||||
self.server.login(self.username, self.password)
|
||||
print("Authenticated!")
|
||||
|
||||
def disconnect(self):
|
||||
if self.server:
|
||||
try:
|
||||
if self.server.sock:
|
||||
self.server.quit()
|
||||
except smtplib.SMTPServerDisconnected:
|
||||
pass
|
||||
finally:
|
||||
self.server = None
|
||||
|
||||
def render_inline_template(self, template_string, context):
|
||||
template = self.env.from_string(template_string)
|
||||
return template.render(context)
|
||||
|
||||
def send_mail(self, subject, from_addr, to_addrs, template, context = {}):
|
||||
try:
|
||||
if template == "":
|
||||
print("Cannot send email, template is empty!")
|
||||
return "Failed: Template is empty."
|
||||
|
||||
body = self.render_inline_template(template, context)
|
||||
|
||||
msg = MIMEMultipart()
|
||||
msg['From'] = from_addr
|
||||
msg['To'] = ', '.join(to_addrs) if isinstance(to_addrs, list) else to_addrs
|
||||
msg['Subject'] = subject
|
||||
msg.attach(MIMEText(body, 'html'))
|
||||
|
||||
self.connect()
|
||||
self.server.sendmail(from_addr, to_addrs, msg.as_string())
|
||||
self.disconnect()
|
||||
return f"Success: Email sent to {msg['To']}"
|
||||
except Exception as e:
|
||||
print(f"Error during send_mail: {type(e).__name__}: {e}")
|
||||
return f"Failed: {type(e).__name__}: {e}"
|
||||
finally:
|
||||
self.disconnect()
|
||||
51
data/Dockerfiles/controller/mailcow-adm/modules/Reader.py
Normal file
51
data/Dockerfiles/controller/mailcow-adm/modules/Reader.py
Normal file
@@ -0,0 +1,51 @@
|
||||
from jinja2 import Environment, Template
|
||||
import csv
|
||||
|
||||
def split_at(value, sep, idx):
|
||||
try:
|
||||
return value.split(sep)[idx]
|
||||
except Exception:
|
||||
return ''
|
||||
|
||||
class Reader:
|
||||
"""
|
||||
Reader class to handle reading and processing of CSV and JSON files for mailcow.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def read_csv(self, file_path, delimiter=',', encoding='iso-8859-1'):
|
||||
"""
|
||||
Read a CSV file and return a list of dictionaries.
|
||||
Each dictionary represents a row in the CSV file.
|
||||
:param file_path: Path to the CSV file.
|
||||
:param delimiter: Delimiter used in the CSV file (default: ',').
|
||||
"""
|
||||
with open(file_path, mode='r', encoding=encoding) as file:
|
||||
reader = csv.DictReader(file, delimiter=delimiter)
|
||||
reader.fieldnames = [h.replace(" ", "_") if h else h for h in reader.fieldnames]
|
||||
return [row for row in reader]
|
||||
|
||||
def map_csv_data(self, data, mapping_file_path, encoding='iso-8859-1'):
|
||||
"""
|
||||
Map CSV data to a specific structure based on the provided Jinja2 template file.
|
||||
:param data: List of dictionaries representing CSV rows.
|
||||
:param mapping_file_path: Path to the Jinja2 template file.
|
||||
:return: List of dictionaries with mapped data.
|
||||
"""
|
||||
with open(mapping_file_path, 'r', encoding=encoding) as tpl_file:
|
||||
template_content = tpl_file.read()
|
||||
env = Environment()
|
||||
env.filters['split_at'] = split_at
|
||||
template = env.from_string(template_content)
|
||||
|
||||
mapped_data = []
|
||||
for row in data:
|
||||
rendered = template.render(**row)
|
||||
try:
|
||||
mapped_row = eval(rendered)
|
||||
except Exception:
|
||||
mapped_row = rendered
|
||||
mapped_data.append(mapped_row)
|
||||
return mapped_data
|
||||
512
data/Dockerfiles/controller/mailcow-adm/modules/Sogo.py
Normal file
512
data/Dockerfiles/controller/mailcow-adm/modules/Sogo.py
Normal file
@@ -0,0 +1,512 @@
|
||||
import requests
|
||||
import urllib3
|
||||
import os
|
||||
from uuid import uuid4
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
class Sogo:
|
||||
def __init__(self, username, password=""):
|
||||
self.apiUrl = "/SOGo/so"
|
||||
self.davUrl = "/SOGo/dav"
|
||||
self.ignore_ssl_errors = True
|
||||
|
||||
self.baseUrl = f"https://{os.getenv('IPv4_NETWORK', '172.22.1')}.247:{os.getenv('HTTPS_PORT', '443')}"
|
||||
self.host = os.getenv("MAILCOW_HOSTNAME", "")
|
||||
if self.ignore_ssl_errors:
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
self.username = username
|
||||
self.password = password
|
||||
|
||||
def addCalendar(self, calendar_name):
|
||||
"""
|
||||
Add a new calendar to the sogo instance.
|
||||
:param calendar_name: Name of the calendar to be created
|
||||
:return: Response from the sogo API.
|
||||
"""
|
||||
|
||||
res = self.post(f"/{self.username}/Calendar/createFolder", {
|
||||
"name": calendar_name
|
||||
})
|
||||
try:
|
||||
return res.json()
|
||||
except ValueError:
|
||||
return res.text
|
||||
|
||||
def getCalendarIdByName(self, calendar_name):
|
||||
"""
|
||||
Get the calendar ID by its name.
|
||||
:param calendar_name: Name of the calendar to find
|
||||
:return: Calendar ID if found, otherwise None.
|
||||
"""
|
||||
|
||||
res = self.get(f"/{self.username}/Calendar/calendarslist")
|
||||
try:
|
||||
for calendar in res.json()["calendars"]:
|
||||
if calendar['name'] == calendar_name:
|
||||
return calendar['id']
|
||||
except ValueError:
|
||||
return None
|
||||
return None
|
||||
|
||||
def getCalendar(self):
|
||||
"""
|
||||
Get calendar list.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
|
||||
res = self.get(f"/{self.username}/Calendar/calendarslist")
|
||||
try:
|
||||
return res.json()
|
||||
except ValueError:
|
||||
return res.text
|
||||
|
||||
def deleteCalendar(self, calendar_id):
|
||||
"""
|
||||
Delete a calendar.
|
||||
:param calendar_id: ID of the calendar to be deleted
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
res = self.get(f"/{self.username}/Calendar/{calendar_id}/delete")
|
||||
return res.status_code == 204
|
||||
|
||||
def importCalendar(self, calendar_name, ics_file):
|
||||
"""
|
||||
Import a calendar from an ICS file.
|
||||
:param calendar_name: Name of the calendar to import into
|
||||
:param ics_file: Path to the ICS file to import
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
|
||||
try:
|
||||
with open(ics_file, "rb") as f:
|
||||
pass
|
||||
except Exception as e:
|
||||
print(f"Could not open ICS file '{ics_file}': {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
new_calendar = self.addCalendar(calendar_name)
|
||||
selected_calendar = new_calendar.json()["id"]
|
||||
|
||||
url = f"{self.baseUrl}{self.apiUrl}/{self.username}/Calendar/{selected_calendar}/import"
|
||||
auth = (self.username, self.password)
|
||||
with open(ics_file, "rb") as f:
|
||||
files = {'icsFile': (ics_file, f, 'text/calendar')}
|
||||
res = requests.post(
|
||||
url,
|
||||
files=files,
|
||||
auth=auth,
|
||||
verify=not self.ignore_ssl_errors
|
||||
)
|
||||
try:
|
||||
return res.json()
|
||||
except ValueError:
|
||||
return res.text
|
||||
|
||||
return None
|
||||
|
||||
def setCalendarACL(self, calendar_id, sharee_email, acl="r", subscribe=False):
|
||||
"""
|
||||
Set CalDAV calendar permissions for a user (sharee).
|
||||
:param calendar_id: ID of the calendar to share
|
||||
:param sharee_email: Email of the user to share with
|
||||
:param acl: "w" for write, "r" for read-only or combination "rw" for read-write
|
||||
:param subscribe: True will scubscribe the sharee to the calendar
|
||||
:return: None
|
||||
"""
|
||||
|
||||
# Access rights
|
||||
if acl == "" or len(acl) > 2:
|
||||
return "Invalid acl level specified. Use 'w', 'r' or combinations like 'rw'."
|
||||
rights = [{
|
||||
"c_email": sharee_email,
|
||||
"uid": sharee_email,
|
||||
"userClass": "normal-user",
|
||||
"rights": {
|
||||
"Public": "None",
|
||||
"Private": "None",
|
||||
"Confidential": "None",
|
||||
"canCreateObjects": 0,
|
||||
"canEraseObjects": 0
|
||||
}
|
||||
}]
|
||||
if "w" in acl:
|
||||
rights[0]["rights"]["canCreateObjects"] = 1
|
||||
rights[0]["rights"]["canEraseObjects"] = 1
|
||||
if "r" in acl:
|
||||
rights[0]["rights"]["Public"] = "Viewer"
|
||||
rights[0]["rights"]["Private"] = "Viewer"
|
||||
rights[0]["rights"]["Confidential"] = "Viewer"
|
||||
|
||||
r_add = self.get(f"/{self.username}/Calendar/{calendar_id}/addUserInAcls?uid={sharee_email}")
|
||||
if r_add.status_code < 200 or r_add.status_code > 299:
|
||||
try:
|
||||
return r_add.json()
|
||||
except ValueError:
|
||||
return r_add.text
|
||||
|
||||
r_save = self.post(f"/{self.username}/Calendar/{calendar_id}/saveUserRights", rights)
|
||||
if r_save.status_code < 200 or r_save.status_code > 299:
|
||||
try:
|
||||
return r_save.json()
|
||||
except ValueError:
|
||||
return r_save.text
|
||||
|
||||
if subscribe:
|
||||
r_subscribe = self.get(f"/{self.username}/Calendar/{calendar_id}/subscribeUsers?uids={sharee_email}")
|
||||
if r_subscribe.status_code < 200 or r_subscribe.status_code > 299:
|
||||
try:
|
||||
return r_subscribe.json()
|
||||
except ValueError:
|
||||
return r_subscribe.text
|
||||
|
||||
return r_save.status_code == 200
|
||||
|
||||
def getCalendarACL(self, calendar_id):
|
||||
"""
|
||||
Get CalDAV calendar permissions for a user (sharee).
|
||||
:param calendar_id: ID of the calendar to get ACL from
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
|
||||
res = self.get(f"/{self.username}/Calendar/{calendar_id}/acls")
|
||||
try:
|
||||
return res.json()
|
||||
except ValueError:
|
||||
return res.text
|
||||
|
||||
def deleteCalendarACL(self, calendar_id, sharee_email):
|
||||
"""
|
||||
Delete a calendar ACL for a user (sharee).
|
||||
:param calendar_id: ID of the calendar to delete ACL from
|
||||
:param sharee_email: Email of the user whose ACL to delete
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
|
||||
res = self.get(f"/{self.username}/Calendar/{calendar_id}/removeUserFromAcls?uid={sharee_email}")
|
||||
return res.status_code == 204
|
||||
|
||||
def addAddressbook(self, addressbook_name):
|
||||
"""
|
||||
Add a new addressbook to the sogo instance.
|
||||
:param addressbook_name: Name of the addressbook to be created
|
||||
:return: Response from the sogo API.
|
||||
"""
|
||||
|
||||
res = self.post(f"/{self.username}/Contacts/createFolder", {
|
||||
"name": addressbook_name
|
||||
})
|
||||
try:
|
||||
return res.json()
|
||||
except ValueError:
|
||||
return res.text
|
||||
|
||||
def getAddressbookIdByName(self, addressbook_name):
|
||||
"""
|
||||
Get the addressbook ID by its name.
|
||||
:param addressbook_name: Name of the addressbook to find
|
||||
:return: Addressbook ID if found, otherwise None.
|
||||
"""
|
||||
|
||||
res = self.get(f"/{self.username}/Contacts/addressbooksList")
|
||||
try:
|
||||
for addressbook in res.json()["addressbooks"]:
|
||||
if addressbook['name'] == addressbook_name:
|
||||
return addressbook['id']
|
||||
except ValueError:
|
||||
return None
|
||||
return None
|
||||
|
||||
def deleteAddressbook(self, addressbook_id):
|
||||
"""
|
||||
Delete an addressbook.
|
||||
:param addressbook_id: ID of the addressbook to be deleted
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
|
||||
res = self.get(f"/{self.username}/Contacts/{addressbook_id}/delete")
|
||||
return res.status_code == 204
|
||||
|
||||
def getAddressbookList(self):
|
||||
"""
|
||||
Get addressbook list.
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
|
||||
res = self.get(f"/{self.username}/Contacts/addressbooksList")
|
||||
try:
|
||||
return res.json()
|
||||
except ValueError:
|
||||
return res.text
|
||||
|
||||
def setAddressbookACL(self, addressbook_id, sharee_email, acl="r", subscribe=False):
|
||||
"""
|
||||
Set CalDAV addressbook permissions for a user (sharee).
|
||||
:param addressbook_id: ID of the addressbook to share
|
||||
:param sharee_email: Email of the user to share with
|
||||
:param acl: "w" for write, "r" for read-only or combination "rw" for read-write
|
||||
:param subscribe: True will subscribe the sharee to the addressbook
|
||||
:return: None
|
||||
"""
|
||||
|
||||
# Access rights
|
||||
if acl == "" or len(acl) > 2:
|
||||
print("Invalid acl level specified. Use 's', 'w', 'r' or combinations like 'rws'.")
|
||||
return "Invalid acl level specified. Use 'w', 'r' or combinations like 'rw'."
|
||||
rights = [{
|
||||
"c_email": sharee_email,
|
||||
"uid": sharee_email,
|
||||
"userClass": "normal-user",
|
||||
"rights": {
|
||||
"canCreateObjects": 0,
|
||||
"canEditObjects": 0,
|
||||
"canEraseObjects": 0,
|
||||
"canViewObjects": 0,
|
||||
}
|
||||
}]
|
||||
if "w" in acl:
|
||||
rights[0]["rights"]["canCreateObjects"] = 1
|
||||
rights[0]["rights"]["canEditObjects"] = 1
|
||||
rights[0]["rights"]["canEraseObjects"] = 1
|
||||
if "r" in acl:
|
||||
rights[0]["rights"]["canViewObjects"] = 1
|
||||
|
||||
r_add = self.get(f"/{self.username}/Contacts/{addressbook_id}/addUserInAcls?uid={sharee_email}")
|
||||
if r_add.status_code < 200 or r_add.status_code > 299:
|
||||
try:
|
||||
return r_add.json()
|
||||
except ValueError:
|
||||
return r_add.text
|
||||
|
||||
r_save = self.post(f"/{self.username}/Contacts/{addressbook_id}/saveUserRights", rights)
|
||||
if r_save.status_code < 200 or r_save.status_code > 299:
|
||||
try:
|
||||
return r_save.json()
|
||||
except ValueError:
|
||||
return r_save.text
|
||||
|
||||
if subscribe:
|
||||
r_subscribe = self.get(f"/{self.username}/Contacts/{addressbook_id}/subscribeUsers?uids={sharee_email}")
|
||||
if r_subscribe.status_code < 200 or r_subscribe.status_code > 299:
|
||||
try:
|
||||
return r_subscribe.json()
|
||||
except ValueError:
|
||||
return r_subscribe.text
|
||||
|
||||
return r_save.status_code == 200
|
||||
|
||||
def getAddressbookACL(self, addressbook_id):
|
||||
"""
|
||||
Get CalDAV addressbook permissions for a user (sharee).
|
||||
:param addressbook_id: ID of the addressbook to get ACL from
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
|
||||
res = self.get(f"/{self.username}/Contacts/{addressbook_id}/acls")
|
||||
try:
|
||||
return res.json()
|
||||
except ValueError:
|
||||
return res.text
|
||||
|
||||
def deleteAddressbookACL(self, addressbook_id, sharee_email):
|
||||
"""
|
||||
Delete an addressbook ACL for a user (sharee).
|
||||
:param addressbook_id: ID of the addressbook to delete ACL from
|
||||
:param sharee_email: Email of the user whose ACL to delete
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
|
||||
res = self.get(f"/{self.username}/Contacts/{addressbook_id}/removeUserFromAcls?uid={sharee_email}")
|
||||
return res.status_code == 204
|
||||
|
||||
def getAddressbookNewGuid(self, addressbook_id):
|
||||
"""
|
||||
Request a new GUID for a SOGo addressbook.
|
||||
:param addressbook_id: ID of the addressbook
|
||||
:return: JSON response from SOGo or None if not found
|
||||
"""
|
||||
res = self.get(f"/{self.username}/Contacts/{addressbook_id}/newguid")
|
||||
try:
|
||||
return res.json()
|
||||
except ValueError:
|
||||
return res.text
|
||||
|
||||
def addAddressbookContact(self, addressbook_id, contact_name, contact_email):
|
||||
"""
|
||||
Save a vCard as a contact in the specified addressbook.
|
||||
:param addressbook_id: ID of the addressbook
|
||||
:param contact_name: Name of the contact
|
||||
:param contact_email: Email of the contact
|
||||
:return: JSON response from SOGo or None if not found
|
||||
"""
|
||||
vcard_id = self.getAddressbookNewGuid(addressbook_id)
|
||||
contact_data = {
|
||||
"id": vcard_id["id"],
|
||||
"pid": vcard_id["pid"],
|
||||
"c_cn": contact_name,
|
||||
"emails": [{
|
||||
"type": "pref",
|
||||
"value": contact_email
|
||||
}],
|
||||
"isNew": True,
|
||||
"c_component": "vcard",
|
||||
}
|
||||
|
||||
endpoint = f"/{self.username}/Contacts/{addressbook_id}/{vcard_id['id']}/saveAsContact"
|
||||
res = self.post(endpoint, contact_data)
|
||||
try:
|
||||
return res.json()
|
||||
except ValueError:
|
||||
return res.text
|
||||
|
||||
def getAddressbookContacts(self, addressbook_id, contact_email=None):
|
||||
"""
|
||||
Get all contacts from the specified addressbook.
|
||||
:param addressbook_id: ID of the addressbook
|
||||
:return: JSON response with contacts or None if not found
|
||||
"""
|
||||
res = self.get(f"/{self.username}/Contacts/{addressbook_id}/view")
|
||||
try:
|
||||
res_json = res.json()
|
||||
headers = res_json.get("headers", [])
|
||||
if not headers or len(headers) < 2:
|
||||
return []
|
||||
|
||||
field_names = headers[0]
|
||||
contacts = []
|
||||
for row in headers[1:]:
|
||||
contact = dict(zip(field_names, row))
|
||||
contacts.append(contact)
|
||||
|
||||
if contact_email:
|
||||
contact = {}
|
||||
for c in contacts:
|
||||
if c["c_mail"] == contact_email or c["c_cn"] == contact_email:
|
||||
contact = c
|
||||
break
|
||||
return contact
|
||||
|
||||
return contacts
|
||||
except ValueError:
|
||||
return res.text
|
||||
|
||||
def addAddressbookContactList(self, addressbook_id, contact_name, contact_email=None):
|
||||
"""
|
||||
Add a new contact list to the addressbook.
|
||||
:param addressbook_id: ID of the addressbook
|
||||
:param contact_name: Name of the contact list
|
||||
:param contact_email: Comma-separated emails to include in the list
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
gal_domain = self.username.split("@")[-1]
|
||||
vlist_id = self.getAddressbookNewGuid(addressbook_id)
|
||||
contact_emails = contact_email.split(",") if contact_email else []
|
||||
contacts = self.getAddressbookContacts(addressbook_id)
|
||||
|
||||
refs = []
|
||||
for contact in contacts:
|
||||
if contact['c_mail'] in contact_emails:
|
||||
refs.append({
|
||||
"refs": [],
|
||||
"categories": [],
|
||||
"c_screenname": contact.get("c_screenname", ""),
|
||||
"pid": contact.get("pid", vlist_id["pid"]),
|
||||
"id": contact.get("id", ""),
|
||||
"notes": [""],
|
||||
"empty": " ",
|
||||
"hasphoto": contact.get("hasphoto", 0),
|
||||
"c_cn": contact.get("c_cn", ""),
|
||||
"c_uid": contact.get("c_uid", None),
|
||||
"containername": contact.get("containername", f"GAL {gal_domain}"), # or your addressbook name
|
||||
"sourceid": contact.get("sourceid", gal_domain),
|
||||
"c_component": contact.get("c_component", "vcard"),
|
||||
"c_sn": contact.get("c_sn", ""),
|
||||
"c_givenname": contact.get("c_givenname", ""),
|
||||
"c_name": contact.get("c_name", contact.get("id", "")),
|
||||
"c_telephonenumber": contact.get("c_telephonenumber", ""),
|
||||
"fn": contact.get("fn", ""),
|
||||
"c_mail": contact.get("c_mail", ""),
|
||||
"emails": contact.get("emails", []),
|
||||
"c_o": contact.get("c_o", ""),
|
||||
"reference": contact.get("id", ""),
|
||||
"birthday": contact.get("birthday", "")
|
||||
})
|
||||
|
||||
contact_data = {
|
||||
"refs": refs,
|
||||
"categories": [],
|
||||
"c_screenname": None,
|
||||
"pid": vlist_id["pid"],
|
||||
"c_component": "vlist",
|
||||
"notes": [""],
|
||||
"empty": " ",
|
||||
"isNew": True,
|
||||
"id": vlist_id["id"],
|
||||
"c_cn": contact_name,
|
||||
"birthday": ""
|
||||
}
|
||||
|
||||
endpoint = f"/{self.username}/Contacts/{addressbook_id}/{vlist_id['id']}/saveAsList"
|
||||
res = self.post(endpoint, contact_data)
|
||||
try:
|
||||
return res.json()
|
||||
except ValueError:
|
||||
return res.text
|
||||
|
||||
def deleteAddressbookItem(self, addressbook_id, contact_name):
|
||||
"""
|
||||
Delete an addressbook item by its ID.
|
||||
:param addressbook_id: ID of the addressbook item to delete
|
||||
:param contact_name: Name of the contact to delete
|
||||
:return: Response from SOGo API.
|
||||
"""
|
||||
res = self.getAddressbookContacts(addressbook_id, contact_name)
|
||||
|
||||
if "id" not in res:
|
||||
print(f"Contact '{contact_name}' not found in addressbook '{addressbook_id}'.")
|
||||
return None
|
||||
res = self.post(f"/{self.username}/Contacts/{addressbook_id}/batchDelete", {
|
||||
"uids": [res["id"]],
|
||||
})
|
||||
return res.status_code == 204
|
||||
|
||||
def get(self, endpoint, params=None):
|
||||
"""
|
||||
Make a GET request to the mailcow API.
|
||||
:param endpoint: The API endpoint to get.
|
||||
:param params: Optional parameters for the GET request.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
url = f"{self.baseUrl}{self.apiUrl}{endpoint}"
|
||||
auth = (self.username, self.password)
|
||||
headers = {"Host": self.host}
|
||||
|
||||
response = requests.get(
|
||||
url,
|
||||
params=params,
|
||||
auth=auth,
|
||||
headers=headers,
|
||||
verify=not self.ignore_ssl_errors
|
||||
)
|
||||
return response
|
||||
|
||||
def post(self, endpoint, data):
|
||||
"""
|
||||
Make a POST request to the mailcow API.
|
||||
:param endpoint: The API endpoint to post to.
|
||||
:param data: Data to be sent in the POST request.
|
||||
:return: Response from the mailcow API.
|
||||
"""
|
||||
url = f"{self.baseUrl}{self.apiUrl}{endpoint}"
|
||||
auth = (self.username, self.password)
|
||||
headers = {"Host": self.host}
|
||||
|
||||
response = requests.post(
|
||||
url,
|
||||
json=data,
|
||||
auth=auth,
|
||||
headers=headers,
|
||||
verify=not self.ignore_ssl_errors
|
||||
)
|
||||
return response
|
||||
|
||||
37
data/Dockerfiles/controller/mailcow-adm/modules/Utils.py
Normal file
37
data/Dockerfiles/controller/mailcow-adm/modules/Utils.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import json
|
||||
import random
|
||||
import string
|
||||
|
||||
|
||||
class Utils:
|
||||
def __init(self):
|
||||
pass
|
||||
|
||||
def normalize_email(self, email):
|
||||
replacements = {
|
||||
"ä": "ae", "ö": "oe", "ü": "ue", "ß": "ss",
|
||||
"Ä": "Ae", "Ö": "Oe", "Ü": "Ue"
|
||||
}
|
||||
for orig, repl in replacements.items():
|
||||
email = email.replace(orig, repl)
|
||||
return email
|
||||
|
||||
def generate_password(self, length=8):
|
||||
chars = string.ascii_letters + string.digits
|
||||
return ''.join(random.choices(chars, k=length))
|
||||
|
||||
def pprint(self, data=""):
|
||||
"""
|
||||
Pretty print a dictionary, list, or text.
|
||||
If data is a text containing JSON, it will be printed in a formatted way.
|
||||
"""
|
||||
if isinstance(data, (dict, list)):
|
||||
print(json.dumps(data, indent=2, ensure_ascii=False))
|
||||
elif isinstance(data, str):
|
||||
try:
|
||||
json_data = json.loads(data)
|
||||
print(json.dumps(json_data, indent=2, ensure_ascii=False))
|
||||
except json.JSONDecodeError:
|
||||
print(data)
|
||||
else:
|
||||
print(data)
|
||||
4
data/Dockerfiles/controller/mailcow-adm/requirements.txt
Normal file
4
data/Dockerfiles/controller/mailcow-adm/requirements.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
jinja2
|
||||
requests
|
||||
mysql-connector-python
|
||||
pytest
|
||||
@@ -0,0 +1,94 @@
|
||||
import pytest
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
|
||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
|
||||
from models.DomainModel import DomainModel
|
||||
from models.AliasModel import AliasModel
|
||||
|
||||
|
||||
def test_model():
|
||||
# Generate random alias
|
||||
random_alias = f"alias_test{os.urandom(4).hex()}@mailcow.local"
|
||||
|
||||
# Create an instance of AliasModel
|
||||
model = AliasModel(
|
||||
address=random_alias,
|
||||
goto="test@mailcow.local,test2@mailcow.local"
|
||||
)
|
||||
|
||||
# Test the parser_command attribute
|
||||
assert model.parser_command == "alias", "Parser command should be 'alias'"
|
||||
|
||||
# add Domain for testing
|
||||
domain_model = DomainModel(domain="mailcow.local")
|
||||
domain_model.add()
|
||||
|
||||
# 1. Alias add tests, should success
|
||||
r_add = model.add()
|
||||
assert isinstance(r_add, list), f"Expected a array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add) > 0, f"Wrong array received: {json.dumps(r_add, indent=2)}"
|
||||
assert "type" in r_add[0], f"'type' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['type'] == "success", f"Wrong 'type' received: {r_add[0]['type']}\n{json.dumps(r_add, indent=2)}"
|
||||
assert "msg" in r_add[0], f"'msg' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert isinstance(r_add[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add[0]['msg']) > 0 and len(r_add[0]['msg']) <= 3, f"Wrong 'msg' array received: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['msg'][0] == "alias_added", f"Wrong 'msg' received: {r_add[0]['msg'][0]}, expected: 'alias_added'\n{json.dumps(r_add, indent=2)}"
|
||||
|
||||
# Assign created alias ID for further tests
|
||||
model.id = r_add[0]['msg'][2]
|
||||
|
||||
# 2. Alias add tests, should fail because the alias already exists
|
||||
r_add = model.add()
|
||||
assert isinstance(r_add, list), f"Expected a array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add) > 0, f"Wrong array received: {json.dumps(r_add, indent=2)}"
|
||||
assert "type" in r_add[0], f"'type' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['type'] == "danger", f"Wrong 'type' received: {r_add[0]['type']}\n{json.dumps(r_add, indent=2)}"
|
||||
assert "msg" in r_add[0], f"'msg' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert isinstance(r_add[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add[0]['msg']) > 0 and len(r_add[0]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['msg'][0] == "is_alias_or_mailbox", f"Wrong 'msg' received: {r_add[0]['msg'][0]}, expected: 'is_alias_or_mailbox'\n{json.dumps(r_add, indent=2)}"
|
||||
|
||||
# 3. Alias get tests
|
||||
r_get = model.get()
|
||||
assert isinstance(r_get, dict), f"Expected a dict but received: {json.dumps(r_get, indent=2)}"
|
||||
assert "domain" in r_get, f"'domain' key missing in response: {json.dumps(r_get, indent=2)}"
|
||||
assert "goto" in r_get, f"'goto' key missing in response: {json.dumps(r_get, indent=2)}"
|
||||
assert "address" in r_get, f"'address' key missing in response: {json.dumps(r_get, indent=2)}"
|
||||
assert r_get['domain'] == model.address.split("@")[1], f"Wrong 'domain' received: {r_get['domain']}, expected: {model.address.split('@')[1]}\n{json.dumps(r_get, indent=2)}"
|
||||
assert r_get['goto'] == model.goto, f"Wrong 'goto' received: {r_get['goto']}, expected: {model.goto}\n{json.dumps(r_get, indent=2)}"
|
||||
assert r_get['address'] == model.address, f"Wrong 'address' received: {r_get['address']}, expected: {model.address}\n{json.dumps(r_get, indent=2)}"
|
||||
|
||||
# 4. Alias edit tests
|
||||
model.goto = "test@mailcow.local"
|
||||
model.active = 0
|
||||
r_edit = model.edit()
|
||||
assert isinstance(r_edit, list), f"Expected a array but received: {json.dumps(r_edit, indent=2)}"
|
||||
assert len(r_edit) > 0, f"Wrong array received: {json.dumps(r_edit, indent=2)}"
|
||||
assert "type" in r_edit[0], f"'type' key missing in response: {json.dumps(r_edit, indent=2)}"
|
||||
assert r_edit[0]['type'] == "success", f"Wrong 'type' received: {r_edit[0]['type']}\n{json.dumps(r_edit, indent=2)}"
|
||||
assert "msg" in r_edit[0], f"'msg' key missing in response: {json.dumps(r_edit, indent=2)}"
|
||||
assert isinstance(r_edit[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_edit, indent=2)}"
|
||||
assert len(r_edit[0]['msg']) > 0 and len(r_edit[0]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_edit, indent=2)}"
|
||||
assert r_edit[0]['msg'][0] == "alias_modified", f"Wrong 'msg' received: {r_edit[0]['msg'][0]}, expected: 'alias_modified'\n{json.dumps(r_edit, indent=2)}"
|
||||
|
||||
# 5. Alias delete tests
|
||||
r_delete = model.delete()
|
||||
assert isinstance(r_delete, list), f"Expected a array but received: {json.dumps(r_delete, indent=2)}"
|
||||
assert len(r_delete) > 0, f"Wrong array received: {json.dumps(r_delete, indent=2)}"
|
||||
assert "type" in r_delete[0], f"'type' key missing in response: {json.dumps(r_delete, indent=2)}"
|
||||
assert r_delete[0]['type'] == "success", f"Wrong 'type' received: {r_delete[0]['type']}\n{json.dumps(r_delete, indent=2)}"
|
||||
assert "msg" in r_delete[0], f"'msg' key missing in response: {json.dumps(r_delete, indent=2)}"
|
||||
assert isinstance(r_delete[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_delete, indent=2)}"
|
||||
assert len(r_delete[0]['msg']) > 0 and len(r_delete[0]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_delete, indent=2)}"
|
||||
assert r_delete[0]['msg'][0] == "alias_removed", f"Wrong 'msg' received: {r_delete[0]['msg'][0]}, expected: 'alias_removed'\n{json.dumps(r_delete, indent=2)}"
|
||||
|
||||
# delete testing Domain
|
||||
domain_model.delete()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Running AliasModel tests...")
|
||||
test_model()
|
||||
print("All tests passed!")
|
||||
@@ -0,0 +1,71 @@
|
||||
import pytest
|
||||
from models.BaseModel import BaseModel
|
||||
|
||||
|
||||
class Args:
|
||||
def __init__(self, **kwargs):
|
||||
for key, value in kwargs.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
|
||||
def test_has_required_args():
|
||||
BaseModel.required_args = {
|
||||
"test_object": [["arg1"], ["arg2", "arg3"]],
|
||||
}
|
||||
|
||||
# Test cases with Args object
|
||||
args = Args(object="non_existent_object")
|
||||
assert BaseModel.has_required_args(args) == False
|
||||
|
||||
args = Args(object="test_object")
|
||||
assert BaseModel.has_required_args(args) == False
|
||||
|
||||
args = Args(object="test_object", arg1="value")
|
||||
assert BaseModel.has_required_args(args) == True
|
||||
|
||||
args = Args(object="test_object", arg2="value")
|
||||
assert BaseModel.has_required_args(args) == False
|
||||
|
||||
args = Args(object="test_object", arg3="value")
|
||||
assert BaseModel.has_required_args(args) == False
|
||||
|
||||
args = Args(object="test_object", arg2="value", arg3="value")
|
||||
assert BaseModel.has_required_args(args) == True
|
||||
|
||||
# Test cases with dict object
|
||||
args = {"object": "non_existent_object"}
|
||||
assert BaseModel.has_required_args(args) == False
|
||||
|
||||
args = {"object": "test_object"}
|
||||
assert BaseModel.has_required_args(args) == False
|
||||
|
||||
args = {"object": "test_object", "arg1": "value"}
|
||||
assert BaseModel.has_required_args(args) == True
|
||||
|
||||
args = {"object": "test_object", "arg2": "value"}
|
||||
assert BaseModel.has_required_args(args) == False
|
||||
|
||||
args = {"object": "test_object", "arg3": "value"}
|
||||
assert BaseModel.has_required_args(args) == False
|
||||
|
||||
args = {"object": "test_object", "arg2": "value", "arg3": "value"}
|
||||
assert BaseModel.has_required_args(args) == True
|
||||
|
||||
|
||||
BaseModel.required_args = {
|
||||
"test_object": [[]],
|
||||
}
|
||||
|
||||
# Test cases with Args object
|
||||
args = Args(object="non_existent_object")
|
||||
assert BaseModel.has_required_args(args) == False
|
||||
|
||||
args = Args(object="test_object")
|
||||
assert BaseModel.has_required_args(args) == True
|
||||
|
||||
# Test cases with dict object
|
||||
args = {"object": "non_existent_object"}
|
||||
assert BaseModel.has_required_args(args) == False
|
||||
|
||||
args = {"object": "test_object"}
|
||||
assert BaseModel.has_required_args(args) == True
|
||||
@@ -0,0 +1,74 @@
|
||||
import pytest
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
|
||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
|
||||
from models.DomainModel import DomainModel
|
||||
|
||||
|
||||
def test_model():
|
||||
# Create an instance of DomainModel
|
||||
model = DomainModel(
|
||||
domain="mailcow.local",
|
||||
)
|
||||
|
||||
# Test the parser_command attribute
|
||||
assert model.parser_command == "domain", "Parser command should be 'domain'"
|
||||
|
||||
# 1. Domain add tests, should success
|
||||
r_add = model.add()
|
||||
assert isinstance(r_add, list), f"Expected a array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add) > 0 and len(r_add) >= 2, f"Wrong array received: {json.dumps(r_add, indent=2)}"
|
||||
assert "type" in r_add[1], f"'type' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[1]['type'] == "success", f"Wrong 'type' received: {r_add[1]['type']}\n{json.dumps(r_add, indent=2)}"
|
||||
assert "msg" in r_add[1], f"'msg' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert isinstance(r_add[1]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add[1]['msg']) > 0 and len(r_add[1]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[1]['msg'][0] == "domain_added", f"Wrong 'msg' received: {r_add[1]['msg'][0]}, expected: 'domain_added'\n{json.dumps(r_add, indent=2)}"
|
||||
|
||||
# 2. Domain add tests, should fail because the domain already exists
|
||||
r_add = model.add()
|
||||
assert isinstance(r_add, list), f"Expected a array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add) > 0, f"Wrong array received: {json.dumps(r_add, indent=2)}"
|
||||
assert "type" in r_add[0], f"'type' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['type'] == "danger", f"Wrong 'type' received: {r_add[0]['type']}\n{json.dumps(r_add, indent=2)}"
|
||||
assert "msg" in r_add[0], f"'msg' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert isinstance(r_add[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add[0]['msg']) > 0 and len(r_add[0]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['msg'][0] == "domain_exists", f"Wrong 'msg' received: {r_add[0]['msg'][0]}, expected: 'domain_exists'\n{json.dumps(r_add, indent=2)}"
|
||||
|
||||
# 3. Domain get tests
|
||||
r_get = model.get()
|
||||
assert isinstance(r_get, dict), f"Expected a dict but received: {json.dumps(r_get, indent=2)}"
|
||||
assert "domain_name" in r_get, f"'domain_name' key missing in response: {json.dumps(r_get, indent=2)}"
|
||||
assert r_get['domain_name'] == model.domain, f"Wrong 'domain_name' received: {r_get['domain_name']}, expected: {model.domain}\n{json.dumps(r_get, indent=2)}"
|
||||
|
||||
# 4. Domain edit tests
|
||||
model.active = 0
|
||||
r_edit = model.edit()
|
||||
assert isinstance(r_edit, list), f"Expected a array but received: {json.dumps(r_edit, indent=2)}"
|
||||
assert len(r_edit) > 0, f"Wrong array received: {json.dumps(r_edit, indent=2)}"
|
||||
assert "type" in r_edit[0], f"'type' key missing in response: {json.dumps(r_edit, indent=2)}"
|
||||
assert r_edit[0]['type'] == "success", f"Wrong 'type' received: {r_edit[0]['type']}\n{json.dumps(r_edit, indent=2)}"
|
||||
assert "msg" in r_edit[0], f"'msg' key missing in response: {json.dumps(r_edit, indent=2)}"
|
||||
assert isinstance(r_edit[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_edit, indent=2)}"
|
||||
assert len(r_edit[0]['msg']) > 0 and len(r_edit[0]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_edit, indent=2)}"
|
||||
assert r_edit[0]['msg'][0] == "domain_modified", f"Wrong 'msg' received: {r_edit[0]['msg'][0]}, expected: 'domain_modified'\n{json.dumps(r_edit, indent=2)}"
|
||||
|
||||
# 5. Domain delete tests
|
||||
r_delete = model.delete()
|
||||
assert isinstance(r_delete, list), f"Expected a array but received: {json.dumps(r_delete, indent=2)}"
|
||||
assert len(r_delete) > 0, f"Wrong array received: {json.dumps(r_delete, indent=2)}"
|
||||
assert "type" in r_delete[0], f"'type' key missing in response: {json.dumps(r_delete, indent=2)}"
|
||||
assert r_delete[0]['type'] == "success", f"Wrong 'type' received: {r_delete[0]['type']}\n{json.dumps(r_delete, indent=2)}"
|
||||
assert "msg" in r_delete[0], f"'msg' key missing in response: {json.dumps(r_delete, indent=2)}"
|
||||
assert isinstance(r_delete[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_delete, indent=2)}"
|
||||
assert len(r_delete[0]['msg']) > 0 and len(r_delete[0]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_delete, indent=2)}"
|
||||
assert r_delete[0]['msg'][0] == "domain_removed", f"Wrong 'msg' received: {r_delete[0]['msg'][0]}, expected: 'domain_removed'\n{json.dumps(r_delete, indent=2)}"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Running DomainModel tests...")
|
||||
test_model()
|
||||
print("All tests passed!")
|
||||
@@ -0,0 +1,89 @@
|
||||
import pytest
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
|
||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
|
||||
from models.DomainModel import DomainModel
|
||||
from models.DomainadminModel import DomainadminModel
|
||||
|
||||
|
||||
def test_model():
|
||||
# Generate random domainadmin
|
||||
random_username = f"dadmin_test{os.urandom(4).hex()}"
|
||||
random_password = f"{os.urandom(4).hex()}"
|
||||
|
||||
# Create an instance of DomainadminModel
|
||||
model = DomainadminModel(
|
||||
username=random_username,
|
||||
password=random_password,
|
||||
domains="mailcow.local",
|
||||
)
|
||||
|
||||
# Test the parser_command attribute
|
||||
assert model.parser_command == "domainadmin", "Parser command should be 'domainadmin'"
|
||||
|
||||
# add Domain for testing
|
||||
domain_model = DomainModel(domain="mailcow.local")
|
||||
domain_model.add()
|
||||
|
||||
# 1. Domainadmin add tests, should success
|
||||
r_add = model.add()
|
||||
assert isinstance(r_add, list), f"Expected a array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add) > 0, f"Wrong array received: {json.dumps(r_add, indent=2)}"
|
||||
assert "type" in r_add[0], f"'type' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['type'] == "success", f"Wrong 'type' received: {r_add[0]['type']}\n{json.dumps(r_add, indent=2)}"
|
||||
assert "msg" in r_add[0], f"'msg' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert isinstance(r_add[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add[0]['msg']) > 0 and len(r_add[0]['msg']) <= 3, f"Wrong 'msg' array received: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['msg'][0] == "domain_admin_added", f"Wrong 'msg' received: {r_add[0]['msg'][0]}, expected: 'domain_admin_added'\n{json.dumps(r_add, indent=2)}"
|
||||
|
||||
# 2. Domainadmin add tests, should fail because the domainadmin already exists
|
||||
r_add = model.add()
|
||||
assert isinstance(r_add, list), f"Expected a array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add) > 0, f"Wrong array received: {json.dumps(r_add, indent=2)}"
|
||||
assert "type" in r_add[0], f"'type' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['type'] == "danger", f"Wrong 'type' received: {r_add[0]['type']}\n{json.dumps(r_add, indent=2)}"
|
||||
assert "msg" in r_add[0], f"'msg' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert isinstance(r_add[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add[0]['msg']) > 0 and len(r_add[0]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['msg'][0] == "object_exists", f"Wrong 'msg' received: {r_add[0]['msg'][0]}, expected: 'object_exists'\n{json.dumps(r_add, indent=2)}"
|
||||
|
||||
# 3. Domainadmin get tests
|
||||
r_get = model.get()
|
||||
assert isinstance(r_get, dict), f"Expected a dict but received: {json.dumps(r_get, indent=2)}"
|
||||
assert "selected_domains" in r_get, f"'selected_domains' key missing in response: {json.dumps(r_get, indent=2)}"
|
||||
assert "username" in r_get, f"'username' key missing in response: {json.dumps(r_get, indent=2)}"
|
||||
assert set(model.domains.replace(" ", "").split(",")) == set(r_get['selected_domains']), f"Wrong 'selected_domains' received: {r_get['selected_domains']}, expected: {model.domains}\n{json.dumps(r_get, indent=2)}"
|
||||
assert r_get['username'] == model.username, f"Wrong 'username' received: {r_get['username']}, expected: {model.username}\n{json.dumps(r_get, indent=2)}"
|
||||
|
||||
# 4. Domainadmin edit tests
|
||||
model.active = 0
|
||||
r_edit = model.edit()
|
||||
assert isinstance(r_edit, list), f"Expected a array but received: {json.dumps(r_edit, indent=2)}"
|
||||
assert len(r_edit) > 0, f"Wrong array received: {json.dumps(r_edit, indent=2)}"
|
||||
assert "type" in r_edit[0], f"'type' key missing in response: {json.dumps(r_edit, indent=2)}"
|
||||
assert r_edit[0]['type'] == "success", f"Wrong 'type' received: {r_edit[0]['type']}\n{json.dumps(r_edit, indent=2)}"
|
||||
assert "msg" in r_edit[0], f"'msg' key missing in response: {json.dumps(r_edit, indent=2)}"
|
||||
assert isinstance(r_edit[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_edit, indent=2)}"
|
||||
assert len(r_edit[0]['msg']) > 0 and len(r_edit[0]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_edit, indent=2)}"
|
||||
assert r_edit[0]['msg'][0] == "domain_admin_modified", f"Wrong 'msg' received: {r_edit[0]['msg'][0]}, expected: 'domain_admin_modified'\n{json.dumps(r_edit, indent=2)}"
|
||||
|
||||
# 5. Domainadmin delete tests
|
||||
r_delete = model.delete()
|
||||
assert isinstance(r_delete, list), f"Expected a array but received: {json.dumps(r_delete, indent=2)}"
|
||||
assert len(r_delete) > 0, f"Wrong array received: {json.dumps(r_delete, indent=2)}"
|
||||
assert "type" in r_delete[0], f"'type' key missing in response: {json.dumps(r_delete, indent=2)}"
|
||||
assert r_delete[0]['type'] == "success", f"Wrong 'type' received: {r_delete[0]['type']}\n{json.dumps(r_delete, indent=2)}"
|
||||
assert "msg" in r_delete[0], f"'msg' key missing in response: {json.dumps(r_delete, indent=2)}"
|
||||
assert isinstance(r_delete[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_delete, indent=2)}"
|
||||
assert len(r_delete[0]['msg']) > 0 and len(r_delete[0]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_delete, indent=2)}"
|
||||
assert r_delete[0]['msg'][0] == "domain_admin_removed", f"Wrong 'msg' received: {r_delete[0]['msg'][0]}, expected: 'domain_admin_removed'\n{json.dumps(r_delete, indent=2)}"
|
||||
|
||||
# delete testing Domain
|
||||
domain_model.delete()
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Running DomainadminModel tests...")
|
||||
test_model()
|
||||
print("All tests passed!")
|
||||
@@ -0,0 +1,89 @@
|
||||
import pytest
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
|
||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
|
||||
from models.DomainModel import DomainModel
|
||||
from models.MailboxModel import MailboxModel
|
||||
|
||||
|
||||
def test_model():
|
||||
# Generate random mailbox
|
||||
random_username = f"mbox_test{os.urandom(4).hex()}@mailcow.local"
|
||||
random_password = f"{os.urandom(4).hex()}"
|
||||
|
||||
# Create an instance of MailboxModel
|
||||
model = MailboxModel(
|
||||
username=random_username,
|
||||
password=random_password
|
||||
)
|
||||
|
||||
# Test the parser_command attribute
|
||||
assert model.parser_command == "mailbox", "Parser command should be 'mailbox'"
|
||||
|
||||
# add Domain for testing
|
||||
domain_model = DomainModel(domain="mailcow.local")
|
||||
domain_model.add()
|
||||
|
||||
# 1. Mailbox add tests, should success
|
||||
r_add = model.add()
|
||||
assert isinstance(r_add, list), f"Expected a array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add) > 0 and len(r_add) <= 2, f"Wrong array received: {json.dumps(r_add, indent=2)}"
|
||||
assert "type" in r_add[1], f"'type' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[1]['type'] == "success", f"Wrong 'type' received: {r_add[1]['type']}\n{json.dumps(r_add, indent=2)}"
|
||||
assert "msg" in r_add[1], f"'msg' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert isinstance(r_add[1]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add[1]['msg']) > 0 and len(r_add[1]['msg']) <= 3, f"Wrong 'msg' array received: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[1]['msg'][0] == "mailbox_added", f"Wrong 'msg' received: {r_add[1]['msg'][0]}, expected: 'mailbox_added'\n{json.dumps(r_add, indent=2)}"
|
||||
|
||||
# 2. Mailbox add tests, should fail because the mailbox already exists
|
||||
r_add = model.add()
|
||||
assert isinstance(r_add, list), f"Expected a array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add) > 0, f"Wrong array received: {json.dumps(r_add, indent=2)}"
|
||||
assert "type" in r_add[0], f"'type' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['type'] == "danger", f"Wrong 'type' received: {r_add[0]['type']}\n{json.dumps(r_add, indent=2)}"
|
||||
assert "msg" in r_add[0], f"'msg' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert isinstance(r_add[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add[0]['msg']) > 0 and len(r_add[0]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['msg'][0] == "object_exists", f"Wrong 'msg' received: {r_add[0]['msg'][0]}, expected: 'object_exists'\n{json.dumps(r_add, indent=2)}"
|
||||
|
||||
# 3. Mailbox get tests
|
||||
r_get = model.get()
|
||||
assert isinstance(r_get, dict), f"Expected a dict but received: {json.dumps(r_get, indent=2)}"
|
||||
assert "domain" in r_get, f"'domain' key missing in response: {json.dumps(r_get, indent=2)}"
|
||||
assert "local_part" in r_get, f"'local_part' key missing in response: {json.dumps(r_get, indent=2)}"
|
||||
assert r_get['domain'] == model.domain, f"Wrong 'domain' received: {r_get['domain']}, expected: {model.domain}\n{json.dumps(r_get, indent=2)}"
|
||||
assert r_get['local_part'] == model.local_part, f"Wrong 'local_part' received: {r_get['local_part']}, expected: {model.local_part}\n{json.dumps(r_get, indent=2)}"
|
||||
|
||||
# 4. Mailbox edit tests
|
||||
model.active = 0
|
||||
r_edit = model.edit()
|
||||
assert isinstance(r_edit, list), f"Expected a array but received: {json.dumps(r_edit, indent=2)}"
|
||||
assert len(r_edit) > 0, f"Wrong array received: {json.dumps(r_edit, indent=2)}"
|
||||
assert "type" in r_edit[0], f"'type' key missing in response: {json.dumps(r_edit, indent=2)}"
|
||||
assert r_edit[0]['type'] == "success", f"Wrong 'type' received: {r_edit[0]['type']}\n{json.dumps(r_edit, indent=2)}"
|
||||
assert "msg" in r_edit[0], f"'msg' key missing in response: {json.dumps(r_edit, indent=2)}"
|
||||
assert isinstance(r_edit[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_edit, indent=2)}"
|
||||
assert len(r_edit[0]['msg']) > 0 and len(r_edit[0]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_edit, indent=2)}"
|
||||
assert r_edit[0]['msg'][0] == "mailbox_modified", f"Wrong 'msg' received: {r_edit[0]['msg'][0]}, expected: 'mailbox_modified'\n{json.dumps(r_edit, indent=2)}"
|
||||
|
||||
# 5. Mailbox delete tests
|
||||
r_delete = model.delete()
|
||||
assert isinstance(r_delete, list), f"Expected a array but received: {json.dumps(r_delete, indent=2)}"
|
||||
assert len(r_delete) > 0, f"Wrong array received: {json.dumps(r_delete, indent=2)}"
|
||||
assert "type" in r_delete[0], f"'type' key missing in response: {json.dumps(r_delete, indent=2)}"
|
||||
assert r_delete[0]['type'] == "success", f"Wrong 'type' received: {r_delete[0]['type']}\n{json.dumps(r_delete, indent=2)}"
|
||||
assert "msg" in r_delete[0], f"'msg' key missing in response: {json.dumps(r_delete, indent=2)}"
|
||||
assert isinstance(r_delete[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_delete, indent=2)}"
|
||||
assert len(r_delete[0]['msg']) > 0 and len(r_delete[0]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_delete, indent=2)}"
|
||||
assert r_delete[0]['msg'][0] == "mailbox_removed", f"Wrong 'msg' received: {r_delete[0]['msg'][0]}, expected: 'mailbox_removed'\n{json.dumps(r_delete, indent=2)}"
|
||||
|
||||
# delete testing Domain
|
||||
domain_model.delete()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Running MailboxModel tests...")
|
||||
test_model()
|
||||
print("All tests passed!")
|
||||
@@ -0,0 +1,39 @@
|
||||
import pytest
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
|
||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
|
||||
from models.StatusModel import StatusModel
|
||||
|
||||
|
||||
def test_model():
|
||||
# Create an instance of StatusModel
|
||||
model = StatusModel()
|
||||
|
||||
# Test the parser_command attribute
|
||||
assert model.parser_command == "status", "Parser command should be 'status'"
|
||||
|
||||
# 1. Status version tests
|
||||
r_version = model.version()
|
||||
assert isinstance(r_version, dict), f"Expected a dict but received: {json.dumps(r_version, indent=2)}"
|
||||
assert "version" in r_version, f"'version' key missing in response: {json.dumps(r_version, indent=2)}"
|
||||
|
||||
# 2. Status vmail tests
|
||||
r_vmail = model.vmail()
|
||||
assert isinstance(r_vmail, dict), f"Expected a dict but received: {json.dumps(r_vmail, indent=2)}"
|
||||
assert "type" in r_vmail, f"'type' key missing in response: {json.dumps(r_vmail, indent=2)}"
|
||||
assert "disk" in r_vmail, f"'disk' key missing in response: {json.dumps(r_vmail, indent=2)}"
|
||||
assert "used" in r_vmail, f"'used' key missing in response: {json.dumps(r_vmail, indent=2)}"
|
||||
assert "total" in r_vmail, f"'total' key missing in response: {json.dumps(r_vmail, indent=2)}"
|
||||
assert "used_percent" in r_vmail, f"'used_percent' key missing in response: {json.dumps(r_vmail, indent=2)}"
|
||||
|
||||
# 3. Status containers tests
|
||||
r_containers = model.containers()
|
||||
assert isinstance(r_containers, dict), f"Expected a dict but received: {json.dumps(r_containers, indent=2)}"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Running StatusModel tests...")
|
||||
test_model()
|
||||
print("All tests passed!")
|
||||
@@ -0,0 +1,106 @@
|
||||
import pytest
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
|
||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../")))
|
||||
from models.DomainModel import DomainModel
|
||||
from models.MailboxModel import MailboxModel
|
||||
from models.SyncjobModel import SyncjobModel
|
||||
|
||||
|
||||
def test_model():
|
||||
# Generate random Mailbox
|
||||
random_username = f"mbox_test@mailcow.local"
|
||||
random_password = f"{os.urandom(4).hex()}"
|
||||
|
||||
# Create an instance of SyncjobModel
|
||||
model = SyncjobModel(
|
||||
username=random_username,
|
||||
host1="mailcow.local",
|
||||
port1=993,
|
||||
user1="testuser@mailcow.local",
|
||||
password1="testpassword",
|
||||
enc1="SSL",
|
||||
)
|
||||
|
||||
# Test the parser_command attribute
|
||||
assert model.parser_command == "syncjob", "Parser command should be 'syncjob'"
|
||||
|
||||
# add Domain and Mailbox for testing
|
||||
domain_model = DomainModel(domain="mailcow.local")
|
||||
domain_model.add()
|
||||
mbox_model = MailboxModel(username=random_username, password=random_password)
|
||||
mbox_model.add()
|
||||
|
||||
# 1. Syncjob add tests, should success
|
||||
r_add = model.add()
|
||||
assert isinstance(r_add, list), f"Expected a array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add) > 0 and len(r_add) <= 2, f"Wrong array received: {json.dumps(r_add, indent=2)}"
|
||||
assert "type" in r_add[0], f"'type' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['type'] == "success", f"Wrong 'type' received: {r_add[0]['type']}\n{json.dumps(r_add, indent=2)}"
|
||||
assert "msg" in r_add[0], f"'msg' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert isinstance(r_add[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add[0]['msg']) > 0 and len(r_add[0]['msg']) <= 3, f"Wrong 'msg' array received: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['msg'][0] == "mailbox_modified", f"Wrong 'msg' received: {r_add[0]['msg'][0]}, expected: 'mailbox_modified'\n{json.dumps(r_add, indent=2)}"
|
||||
|
||||
# Assign created syncjob ID for further tests
|
||||
model.id = r_add[0]['msg'][2]
|
||||
|
||||
# 2. Syncjob add tests, should fail because the syncjob already exists
|
||||
r_add = model.add()
|
||||
assert isinstance(r_add, list), f"Expected a array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add) > 0, f"Wrong array received: {json.dumps(r_add, indent=2)}"
|
||||
assert "type" in r_add[0], f"'type' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['type'] == "danger", f"Wrong 'type' received: {r_add[0]['type']}\n{json.dumps(r_add, indent=2)}"
|
||||
assert "msg" in r_add[0], f"'msg' key missing in response: {json.dumps(r_add, indent=2)}"
|
||||
assert isinstance(r_add[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_add, indent=2)}"
|
||||
assert len(r_add[0]['msg']) > 0 and len(r_add[0]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_add, indent=2)}"
|
||||
assert r_add[0]['msg'][0] == "object_exists", f"Wrong 'msg' received: {r_add[0]['msg'][0]}, expected: 'object_exists'\n{json.dumps(r_add, indent=2)}"
|
||||
|
||||
# 3. Syncjob get tests
|
||||
r_get = model.get()
|
||||
assert isinstance(r_get, list), f"Expected a list but received: {json.dumps(r_get, indent=2)}"
|
||||
assert "user2" in r_get[0], f"'user2' key missing in response: {json.dumps(r_get, indent=2)}"
|
||||
assert "host1" in r_get[0], f"'host1' key missing in response: {json.dumps(r_get, indent=2)}"
|
||||
assert "port1" in r_get[0], f"'port1' key missing in response: {json.dumps(r_get, indent=2)}"
|
||||
assert "user1" in r_get[0], f"'user1' key missing in response: {json.dumps(r_get, indent=2)}"
|
||||
assert "enc1" in r_get[0], f"'enc1' key missing in response: {json.dumps(r_get, indent=2)}"
|
||||
assert r_get[0]['user2'] == model.username, f"Wrong 'user2' received: {r_get[0]['user2']}, expected: {model.username}\n{json.dumps(r_get, indent=2)}"
|
||||
assert r_get[0]['host1'] == model.host1, f"Wrong 'host1' received: {r_get[0]['host1']}, expected: {model.host1}\n{json.dumps(r_get, indent=2)}"
|
||||
assert r_get[0]['port1'] == model.port1, f"Wrong 'port1' received: {r_get[0]['port1']}, expected: {model.port1}\n{json.dumps(r_get, indent=2)}"
|
||||
assert r_get[0]['user1'] == model.user1, f"Wrong 'user1' received: {r_get[0]['user1']}, expected: {model.user1}\n{json.dumps(r_get, indent=2)}"
|
||||
assert r_get[0]['enc1'] == model.enc1, f"Wrong 'enc1' received: {r_get[0]['enc1']}, expected: {model.enc1}\n{json.dumps(r_get, indent=2)}"
|
||||
|
||||
# 4. Syncjob edit tests
|
||||
model.active = 1
|
||||
r_edit = model.edit()
|
||||
assert isinstance(r_edit, list), f"Expected a array but received: {json.dumps(r_edit, indent=2)}"
|
||||
assert len(r_edit) > 0, f"Wrong array received: {json.dumps(r_edit, indent=2)}"
|
||||
assert "type" in r_edit[0], f"'type' key missing in response: {json.dumps(r_edit, indent=2)}"
|
||||
assert r_edit[0]['type'] == "success", f"Wrong 'type' received: {r_edit[0]['type']}\n{json.dumps(r_edit, indent=2)}"
|
||||
assert "msg" in r_edit[0], f"'msg' key missing in response: {json.dumps(r_edit, indent=2)}"
|
||||
assert isinstance(r_edit[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_edit, indent=2)}"
|
||||
assert len(r_edit[0]['msg']) > 0 and len(r_edit[0]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_edit, indent=2)}"
|
||||
assert r_edit[0]['msg'][0] == "mailbox_modified", f"Wrong 'msg' received: {r_edit[0]['msg'][0]}, expected: 'mailbox_modified'\n{json.dumps(r_edit, indent=2)}"
|
||||
|
||||
# 5. Syncjob delete tests
|
||||
r_delete = model.delete()
|
||||
assert isinstance(r_delete, list), f"Expected a array but received: {json.dumps(r_delete, indent=2)}"
|
||||
assert len(r_delete) > 0, f"Wrong array received: {json.dumps(r_delete, indent=2)}"
|
||||
assert "type" in r_delete[0], f"'type' key missing in response: {json.dumps(r_delete, indent=2)}"
|
||||
assert r_delete[0]['type'] == "success", f"Wrong 'type' received: {r_delete[0]['type']}\n{json.dumps(r_delete, indent=2)}"
|
||||
assert "msg" in r_delete[0], f"'msg' key missing in response: {json.dumps(r_delete, indent=2)}"
|
||||
assert isinstance(r_delete[0]['msg'], list), f"Expected a 'msg' array but received: {json.dumps(r_delete, indent=2)}"
|
||||
assert len(r_delete[0]['msg']) > 0 and len(r_delete[0]['msg']) <= 2, f"Wrong 'msg' array received: {json.dumps(r_delete, indent=2)}"
|
||||
assert r_delete[0]['msg'][0] == "deleted_syncjob", f"Wrong 'msg' received: {r_delete[0]['msg'][0]}, expected: 'deleted_syncjob'\n{json.dumps(r_delete, indent=2)}"
|
||||
|
||||
# delete testing Domain and Mailbox
|
||||
mbox_model.delete()
|
||||
domain_model.delete()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Running SyncjobModel tests...")
|
||||
test_model()
|
||||
print("All tests passed!")
|
||||
8
data/Dockerfiles/controller/stop-supervisor.sh
Executable file
8
data/Dockerfiles/controller/stop-supervisor.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
printf "READY\n";
|
||||
|
||||
while read line; do
|
||||
echo "Processing Event: $line" >&2;
|
||||
kill -3 $(cat "/var/run/supervisord.pid")
|
||||
done < /dev/stdin
|
||||
17
data/Dockerfiles/controller/supervisord.conf
Normal file
17
data/Dockerfiles/controller/supervisord.conf
Normal file
@@ -0,0 +1,17 @@
|
||||
[supervisord]
|
||||
nodaemon=true
|
||||
user=root
|
||||
pidfile=/var/run/supervisord.pid
|
||||
|
||||
[program:api]
|
||||
command=python /app/api/main.py
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/dev/stdout
|
||||
stderr_logfile=/dev/stderr
|
||||
stdout_logfile_maxbytes=0
|
||||
stderr_logfile_maxbytes=0
|
||||
|
||||
[eventlistener:processes]
|
||||
command=/usr/local/sbin/stop-supervisor.sh
|
||||
events=PROCESS_STATE_STOPPED, PROCESS_STATE_EXITED, PROCESS_STATE_FATAL
|
||||
@@ -1,9 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
`openssl req -x509 -newkey rsa:4096 -sha256 -days 3650 -nodes \
|
||||
-keyout /app/dockerapi_key.pem \
|
||||
-out /app/dockerapi_cert.pem \
|
||||
-subj /CN=dockerapi/O=mailcow \
|
||||
-addext subjectAltName=DNS:dockerapi`
|
||||
|
||||
exec "$@"
|
||||
@@ -3,7 +3,7 @@ FROM alpine:3.21
|
||||
LABEL maintainer="The Infrastructure Company GmbH <info@servercow.de>"
|
||||
|
||||
# renovate: datasource=github-releases depName=tianon/gosu versioning=semver-coerced extractVersion=^(?<version>.*)$
|
||||
ARG GOSU_VERSION=1.17
|
||||
ARG GOSU_VERSION=1.19
|
||||
|
||||
ENV LANG=C.UTF-8
|
||||
ENV LC_ALL=C.UTF-8
|
||||
|
||||
@@ -204,16 +204,17 @@ EOF
|
||||
# Create random master Password for SOGo SSO
|
||||
RAND_PASS=$(cat /dev/urandom | tr -dc 'a-z0-9' | fold -w 32 | head -n 1)
|
||||
echo -n ${RAND_PASS} > /etc/phpfpm/sogo-sso.pass
|
||||
# Creating additional creds file for SOGo notify crons (calendars, etc)
|
||||
echo -n ${RAND_USER}@mailcow.local:${RAND_PASS} > /etc/sogo/cron.creds
|
||||
cat <<EOF > /etc/dovecot/sogo-sso.conf
|
||||
# Autogenerated by mailcow
|
||||
passdb {
|
||||
driver = static
|
||||
args = allow_real_nets=${IPV4_NETWORK}.248/32 password={plain}${RAND_PASS}
|
||||
args = allow_nets=${IPV4_NETWORK}.248/32 password={plain}${RAND_PASS}
|
||||
}
|
||||
EOF
|
||||
|
||||
# Creating additional creds file for SOGo notify crons (calendars, etc) (dummy user, sso password)
|
||||
echo -n ${RAND_USER}@mailcow.local:${RAND_PASS} > /etc/sogo/cron.creds
|
||||
|
||||
if [[ "${MASTER}" =~ ^([nN][oO]|[nN])+$ ]]; then
|
||||
# Toggling MASTER will result in a rebuild of containers, so the quota script will be recreated
|
||||
cat <<'EOF' > /usr/local/bin/quota_notify.py
|
||||
|
||||
@@ -25,11 +25,11 @@ sed -i -e 's/\([^\\]\)\$\([^\/]\)/\1\\$\2/g' /etc/rspamd/custom/sa-rules
|
||||
|
||||
if [[ "$(cat /etc/rspamd/custom/sa-rules | md5sum | cut -d' ' -f1)" != "${HASH_SA_RULES}" ]]; then
|
||||
CONTAINER_NAME=rspamd-mailcow
|
||||
CONTAINER_ID=$(curl --silent --insecure https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | \
|
||||
CONTAINER_ID=$(curl --silent --insecure https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | \
|
||||
jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], project: .Config.Labels[\"com.docker.compose.project\"], id: .Id}" | \
|
||||
jq -rc "select( .name | tostring | contains(\"${CONTAINER_NAME}\")) | select( .project | tostring | contains(\"${COMPOSE_PROJECT_NAME,,}\")) | .id")
|
||||
if [[ ! -z ${CONTAINER_ID} ]]; then
|
||||
curl --silent --insecure -XPOST --connect-timeout 15 --max-time 120 https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${CONTAINER_ID}/restart
|
||||
curl --silent --insecure -XPOST --connect-timeout 15 --max-time 120 https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${CONTAINER_ID}/restart
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/sh
|
||||
|
||||
backend=iptables
|
||||
backend=nftables
|
||||
|
||||
nft list table ip filter &>/dev/null
|
||||
nftables_found=$?
|
||||
|
||||
@@ -449,6 +449,11 @@ if __name__ == '__main__':
|
||||
tables = NFTables(chain_name, logger)
|
||||
else:
|
||||
logger.logInfo('Using IPTables backend')
|
||||
logger.logWarn(
|
||||
"DEPRECATION: iptables-legacy is deprecated and will be removed in future releases. "
|
||||
"Please switch to nftables on your host to ensure complete compatibility."
|
||||
)
|
||||
time.sleep(5)
|
||||
tables = IPTables(chain_name, logger)
|
||||
|
||||
clear()
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import time
|
||||
import json
|
||||
import datetime
|
||||
|
||||
class Logger:
|
||||
def __init__(self):
|
||||
@@ -8,17 +9,28 @@ class Logger:
|
||||
def set_redis(self, redis):
|
||||
self.r = redis
|
||||
|
||||
def _format_timestamp(self):
|
||||
# Local time with milliseconds
|
||||
return datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
def log(self, priority, message):
|
||||
tolog = {}
|
||||
tolog['time'] = int(round(time.time()))
|
||||
tolog['priority'] = priority
|
||||
tolog['message'] = message
|
||||
print(message)
|
||||
# build redis-friendly dict
|
||||
tolog = {
|
||||
'time': int(round(time.time())), # keep raw timestamp for Redis
|
||||
'priority': priority,
|
||||
'message': message
|
||||
}
|
||||
|
||||
# print human-readable message with timestamp
|
||||
ts = self._format_timestamp()
|
||||
print(f"{ts} {priority.upper()}: {message}", flush=True)
|
||||
|
||||
# also push JSON to Redis if connected
|
||||
if self.r is not None:
|
||||
try:
|
||||
self.r.lpush('NETFILTER_LOG', json.dumps(tolog, ensure_ascii=False))
|
||||
except Exception as ex:
|
||||
print('Failed logging to redis: %s' % (ex))
|
||||
print(f'{ts} WARN: Failed logging to redis: {ex}', flush=True)
|
||||
|
||||
def logWarn(self, message):
|
||||
self.log('warn', message)
|
||||
@@ -27,4 +39,4 @@ class Logger:
|
||||
self.log('crit', message)
|
||||
|
||||
def logInfo(self, message):
|
||||
self.log('info', message)
|
||||
self.log('info', message)
|
||||
@@ -10,7 +10,7 @@ def includes_conf(env, template_vars):
|
||||
server_name_config = f"server_name {template_vars['MAILCOW_HOSTNAME']} autodiscover.* autoconfig.* {' '.join(template_vars['ADDITIONAL_SERVER_NAMES'])};"
|
||||
listen_plain_config = f"listen {template_vars['HTTP_PORT']};"
|
||||
listen_ssl_config = f"listen {template_vars['HTTPS_PORT']};"
|
||||
if not template_vars['ENABLE_IPV6']:
|
||||
if template_vars['ENABLE_IPV6']:
|
||||
listen_plain_config += f"\nlisten [::]:{template_vars['HTTP_PORT']};"
|
||||
listen_ssl_config += f"\nlisten [::]:{template_vars['HTTPS_PORT']} ssl;"
|
||||
listen_ssl_config += "\nhttp2 on;"
|
||||
|
||||
@@ -3,15 +3,15 @@ FROM php:8.2-fpm-alpine3.21
|
||||
LABEL maintainer = "The Infrastructure Company GmbH <info@servercow.de>"
|
||||
|
||||
# renovate: datasource=github-tags depName=krakjoe/apcu versioning=semver-coerced extractVersion=^v(?<version>.*)$
|
||||
ARG APCU_PECL_VERSION=5.1.26
|
||||
ARG APCU_PECL_VERSION=5.1.28
|
||||
# renovate: datasource=github-tags depName=Imagick/imagick versioning=semver-coerced extractVersion=(?<version>.*)$
|
||||
ARG IMAGICK_PECL_VERSION=3.8.0
|
||||
ARG IMAGICK_PECL_VERSION=3.8.1
|
||||
# renovate: datasource=github-tags depName=php/pecl-mail-mailparse versioning=semver-coerced extractVersion=^v(?<version>.*)$
|
||||
ARG MAILPARSE_PECL_VERSION=3.1.8
|
||||
ARG MAILPARSE_PECL_VERSION=3.1.9
|
||||
# renovate: datasource=github-tags depName=php-memcached-dev/php-memcached versioning=semver-coerced extractVersion=^v(?<version>.*)$
|
||||
ARG MEMCACHED_PECL_VERSION=3.3.0
|
||||
ARG MEMCACHED_PECL_VERSION=3.4.0
|
||||
# renovate: datasource=github-tags depName=phpredis/phpredis versioning=semver-coerced extractVersion=(?<version>.*)$
|
||||
ARG REDIS_PECL_VERSION=6.2.0
|
||||
ARG REDIS_PECL_VERSION=6.3.0
|
||||
# renovate: datasource=github-tags depName=composer/composer versioning=semver-coerced extractVersion=(?<version>.*)$
|
||||
ARG COMPOSER_VERSION=2.8.6
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ session.save_path = "tcp://'${REDIS_HOST}':'${REDIS_PORT}'?auth='${REDISPASS}'"
|
||||
# Check mysql_upgrade (master and slave)
|
||||
CONTAINER_ID=
|
||||
until [[ ! -z "${CONTAINER_ID}" ]] && [[ "${CONTAINER_ID}" =~ ^[[:alnum:]]*$ ]]; do
|
||||
CONTAINER_ID=$(curl --silent --insecure https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], project: .Config.Labels[\"com.docker.compose.project\"], id: .Id}" 2> /dev/null | jq -rc "select( .name | tostring | contains(\"mysql-mailcow\")) | select( .project | tostring | contains(\"${COMPOSE_PROJECT_NAME,,}\")) | .id" 2> /dev/null)
|
||||
CONTAINER_ID=$(curl --silent --insecure https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], project: .Config.Labels[\"com.docker.compose.project\"], id: .Id}" 2> /dev/null | jq -rc "select( .name | tostring | contains(\"mysql-mailcow\")) | select( .project | tostring | contains(\"${COMPOSE_PROJECT_NAME,,}\")) | .id" 2> /dev/null)
|
||||
echo "Could not get mysql-mailcow container id... trying again"
|
||||
sleep 2
|
||||
done
|
||||
@@ -44,7 +44,7 @@ until [[ ${SQL_UPGRADE_STATUS} == 'success' ]]; do
|
||||
echo "Tried to upgrade MySQL and failed, giving up after ${SQL_LOOP_C} retries and starting container (oops, not good)"
|
||||
break
|
||||
fi
|
||||
SQL_FULL_UPGRADE_RETURN=$(curl --silent --insecure -XPOST https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${CONTAINER_ID}/exec -d '{"cmd":"system", "task":"mysql_upgrade"}' --silent -H 'Content-type: application/json')
|
||||
SQL_FULL_UPGRADE_RETURN=$(curl --silent --insecure -XPOST https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${CONTAINER_ID}/exec -d '{"cmd":"system", "task":"mysql_upgrade"}' --silent -H 'Content-type: application/json')
|
||||
SQL_UPGRADE_STATUS=$(echo ${SQL_FULL_UPGRADE_RETURN} | jq -r .type)
|
||||
SQL_LOOP_C=$((SQL_LOOP_C+1))
|
||||
echo "SQL upgrade iteration #${SQL_LOOP_C}"
|
||||
@@ -69,12 +69,12 @@ done
|
||||
|
||||
# doing post-installation stuff, if SQL was upgraded (master and slave)
|
||||
if [ ${SQL_CHANGED} -eq 1 ]; then
|
||||
POSTFIX=$(curl --silent --insecure https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], project: .Config.Labels[\"com.docker.compose.project\"], id: .Id}" 2> /dev/null | jq -rc "select( .name | tostring | contains(\"postfix-mailcow\")) | select( .project | tostring | contains(\"${COMPOSE_PROJECT_NAME,,}\")) | .id" 2> /dev/null)
|
||||
POSTFIX=$(curl --silent --insecure https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], project: .Config.Labels[\"com.docker.compose.project\"], id: .Id}" 2> /dev/null | jq -rc "select( .name | tostring | contains(\"postfix-mailcow\")) | select( .project | tostring | contains(\"${COMPOSE_PROJECT_NAME,,}\")) | .id" 2> /dev/null)
|
||||
if [[ -z "${POSTFIX}" ]] || ! [[ "${POSTFIX}" =~ ^[[:alnum:]]*$ ]]; then
|
||||
echo "Could not determine Postfix container ID, skipping Postfix restart."
|
||||
else
|
||||
echo "Restarting Postfix"
|
||||
curl -X POST --silent --insecure https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${POSTFIX}/restart | jq -r '.msg'
|
||||
curl -X POST --silent --insecure https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${POSTFIX}/restart | jq -r '.msg'
|
||||
echo "Sleeping 5 seconds..."
|
||||
sleep 5
|
||||
fi
|
||||
@@ -83,7 +83,7 @@ fi
|
||||
# Check mysql tz import (master and slave)
|
||||
TZ_CHECK=$(mariadb --skip-ssl --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} -e "SELECT CONVERT_TZ('2019-11-02 23:33:00','Europe/Berlin','UTC') AS time;" -BN 2> /dev/null)
|
||||
if [[ -z ${TZ_CHECK} ]] || [[ "${TZ_CHECK}" == "NULL" ]]; then
|
||||
SQL_FULL_TZINFO_IMPORT_RETURN=$(curl --silent --insecure -XPOST https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${CONTAINER_ID}/exec -d '{"cmd":"system", "task":"mysql_tzinfo_to_sql"}' --silent -H 'Content-type: application/json')
|
||||
SQL_FULL_TZINFO_IMPORT_RETURN=$(curl --silent --insecure -XPOST https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${CONTAINER_ID}/exec -d '{"cmd":"system", "task":"mysql_tzinfo_to_sql"}' --silent -H 'Content-type: application/json')
|
||||
echo "MySQL mysql_tzinfo_to_sql - debug output:"
|
||||
echo ${SQL_FULL_TZINFO_IMPORT_RETURN}
|
||||
fi
|
||||
@@ -167,7 +167,7 @@ DELIMITER //
|
||||
CREATE EVENT clean_spamalias
|
||||
ON SCHEDULE EVERY 1 DAY DO
|
||||
BEGIN
|
||||
DELETE FROM spamalias WHERE validity < UNIX_TIMESTAMP();
|
||||
DELETE FROM spamalias WHERE validity < UNIX_TIMESTAMP() AND permanent = 0;
|
||||
END;
|
||||
//
|
||||
DELIMITER ;
|
||||
|
||||
@@ -4,7 +4,7 @@ WORKDIR /src
|
||||
ENV CGO_ENABLED=0 \
|
||||
GO111MODULE=on \
|
||||
NOOPT=1 \
|
||||
VERSION=1.8.14
|
||||
VERSION=1.8.22
|
||||
|
||||
RUN git clone --branch v${VERSION} https://github.com/Zuplu/postfix-tlspol && \
|
||||
cd /src/postfix-tlspol && \
|
||||
|
||||
@@ -329,14 +329,17 @@ query = SELECT goto FROM alias
|
||||
SELECT id FROM alias
|
||||
WHERE address='%s'
|
||||
AND (active='1' OR active='2')
|
||||
AND sender_allowed='1'
|
||||
), (
|
||||
SELECT id FROM alias
|
||||
WHERE address='@%d'
|
||||
AND (active='1' OR active='2')
|
||||
AND sender_allowed='1'
|
||||
)
|
||||
)
|
||||
)
|
||||
AND active='1'
|
||||
AND sender_allowed='1'
|
||||
AND (domain IN
|
||||
(SELECT domain FROM domain
|
||||
WHERE domain='%d'
|
||||
@@ -390,7 +393,7 @@ hosts = unix:/var/run/mysqld/mysqld.sock
|
||||
dbname = ${DBNAME}
|
||||
query = SELECT goto FROM spamalias
|
||||
WHERE address='%s'
|
||||
AND validity >= UNIX_TIMESTAMP()
|
||||
AND (validity >= UNIX_TIMESTAMP() OR permanent != 0)
|
||||
EOF
|
||||
|
||||
if [ ! -f /opt/postfix/conf/dns_blocklists.cf ]; then
|
||||
@@ -524,4 +527,4 @@ if [[ $? != 0 ]]; then
|
||||
else
|
||||
postfix -c /opt/postfix/conf start
|
||||
sleep 126144000
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
FROM debian:bookworm-slim
|
||||
FROM debian:trixie-slim
|
||||
LABEL maintainer="The Infrastructure Company GmbH <info@servercow.de>"
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG RSPAMD_VER=rspamd_3.12.1-1~6dbfca2fa
|
||||
ARG CODENAME=bookworm
|
||||
ARG RSPAMD_VER=rspamd_3.14.2-82~90302bc
|
||||
ARG CODENAME=trixie
|
||||
ENV LC_ALL=C
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
@@ -14,8 +14,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
dnsutils \
|
||||
netcat-traditional \
|
||||
wget \
|
||||
redis-tools \
|
||||
procps \
|
||||
redis-tools \
|
||||
procps \
|
||||
nano \
|
||||
lua-cjson \
|
||||
&& arch=$(arch | sed s/aarch64/arm64/ | sed s/x86_64/amd64/) \
|
||||
|
||||
@@ -6,7 +6,7 @@ ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG DEBIAN_VERSION=bookworm
|
||||
ARG SOGO_DEBIAN_REPOSITORY=https://packagingv2.sogo.nu/sogo-nightly-debian/
|
||||
# renovate: datasource=github-releases depName=tianon/gosu versioning=semver-coerced extractVersion=^(?<version>.*)$
|
||||
ARG GOSU_VERSION=1.17
|
||||
ARG GOSU_VERSION=1.19
|
||||
ENV LC_ALL=C
|
||||
|
||||
# Prerequisites
|
||||
|
||||
@@ -24,6 +24,10 @@ while [[ "${DBV_NOW}" != "${DBV_NEW}" ]]; do
|
||||
done
|
||||
echo "DB schema is ${DBV_NOW}"
|
||||
|
||||
if [[ "${MASTER}" =~ ^([yY][eE][sS]|[yY])+$ ]]; then
|
||||
mariadb --skip-ssl --socket=/var/run/mysqld/mysqld.sock -u ${DBUSER} -p${DBPASS} ${DBNAME} -e "DROP TRIGGER IF EXISTS sogo_update_password"
|
||||
fi
|
||||
|
||||
# cat /dev/urandom seems to hang here occasionally and is not recommended anyway, better use openssl
|
||||
RAND_PASS=$(openssl rand -base64 16 | tr -dc _A-Z-a-z-0-9)
|
||||
|
||||
|
||||
@@ -200,12 +200,12 @@ get_container_ip() {
|
||||
else
|
||||
sleep 0.5
|
||||
# get long container id for exact match
|
||||
CONTAINER_ID=($(curl --silent --insecure https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], project: .Config.Labels[\"com.docker.compose.project\"], id: .Id}" | jq -rc "select( .name | tostring == \"${1}\") | select( .project | tostring | contains(\"${COMPOSE_PROJECT_NAME,,}\")) | .id"))
|
||||
CONTAINER_ID=($(curl --silent --insecure https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], project: .Config.Labels[\"com.docker.compose.project\"], id: .Id}" | jq -rc "select( .name | tostring == \"${1}\") | select( .project | tostring | contains(\"${COMPOSE_PROJECT_NAME,,}\")) | .id"))
|
||||
# returned id can have multiple elements (if scaled), shuffle for random test
|
||||
CONTAINER_ID=($(printf "%s\n" "${CONTAINER_ID[@]}" | shuf))
|
||||
if [[ ! -z ${CONTAINER_ID} ]]; then
|
||||
for matched_container in "${CONTAINER_ID[@]}"; do
|
||||
CONTAINER_IPS=($(curl --silent --insecure https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${matched_container}/json | jq -r '.NetworkSettings.Networks[].IPAddress'))
|
||||
CONTAINER_IPS=($(curl --silent --insecure https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${matched_container}/json | jq -r '.NetworkSettings.Networks[].IPAddress'))
|
||||
for ip_match in "${CONTAINER_IPS[@]}"; do
|
||||
# grep will do nothing if one of these vars is empty
|
||||
[[ -z ${ip_match} ]] && continue
|
||||
@@ -1075,15 +1075,15 @@ while true; do
|
||||
done
|
||||
) &
|
||||
|
||||
# Monitor dockerapi
|
||||
# Monitor controller
|
||||
(
|
||||
while true; do
|
||||
while nc -z dockerapi 443; do
|
||||
while nc -z controller 443; do
|
||||
sleep 3
|
||||
done
|
||||
log_msg "Cannot find dockerapi-mailcow, waiting to recover..."
|
||||
log_msg "Cannot find controller-mailcow, waiting to recover..."
|
||||
kill -STOP ${BACKGROUND_TASKS[*]}
|
||||
until nc -z dockerapi 443; do
|
||||
until nc -z controller 443; do
|
||||
sleep 3
|
||||
done
|
||||
kill -CONT ${BACKGROUND_TASKS[*]}
|
||||
@@ -1143,12 +1143,12 @@ while true; do
|
||||
elif [[ ${com_pipe_answer} =~ .+-mailcow ]]; then
|
||||
kill -STOP ${BACKGROUND_TASKS[*]}
|
||||
sleep 10
|
||||
CONTAINER_ID=$(curl --silent --insecure https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], project: .Config.Labels[\"com.docker.compose.project\"], id: .Id}" | jq -rc "select( .name | tostring | contains(\"${com_pipe_answer}\")) | select( .project | tostring | contains(\"${COMPOSE_PROJECT_NAME,,}\")) | .id")
|
||||
CONTAINER_ID=$(curl --silent --insecure https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/json | jq -r ".[] | {name: .Config.Labels[\"com.docker.compose.service\"], project: .Config.Labels[\"com.docker.compose.project\"], id: .Id}" | jq -rc "select( .name | tostring | contains(\"${com_pipe_answer}\")) | select( .project | tostring | contains(\"${COMPOSE_PROJECT_NAME,,}\")) | .id")
|
||||
if [[ ! -z ${CONTAINER_ID} ]]; then
|
||||
if [[ "${com_pipe_answer}" == "php-fpm-mailcow" ]]; then
|
||||
HAS_INITDB=$(curl --silent --insecure -XPOST https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${CONTAINER_ID}/top | jq '.msg.Processes[] | contains(["php -c /usr/local/etc/php -f /web/inc/init_db.inc.php"])' | grep true)
|
||||
HAS_INITDB=$(curl --silent --insecure -XPOST https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${CONTAINER_ID}/top | jq '.msg.Processes[] | contains(["php -c /usr/local/etc/php -f /web/inc/init_db.inc.php"])' | grep true)
|
||||
fi
|
||||
S_RUNNING=$(($(date +%s) - $(curl --silent --insecure https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${CONTAINER_ID}/json | jq .State.StartedAt | xargs -n1 date +%s -d)))
|
||||
S_RUNNING=$(($(date +%s) - $(curl --silent --insecure https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${CONTAINER_ID}/json | jq .State.StartedAt | xargs -n1 date +%s -d)))
|
||||
if [ ${S_RUNNING} -lt 360 ]; then
|
||||
log_msg "Container is running for less than 360 seconds, skipping action..."
|
||||
elif [[ ! -z ${HAS_INITDB} ]]; then
|
||||
@@ -1156,7 +1156,7 @@ while true; do
|
||||
sleep 60
|
||||
else
|
||||
log_msg "Sending restart command to ${CONTAINER_ID}..."
|
||||
curl --silent --insecure -XPOST https://dockerapi.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${CONTAINER_ID}/restart
|
||||
curl --silent --insecure -XPOST https://controller.${COMPOSE_PROJECT_NAME}_mailcow-network/containers/${CONTAINER_ID}/restart
|
||||
notify_error "${com_pipe_answer}"
|
||||
log_msg "Wait for restarted container to settle and continue watching..."
|
||||
sleep 35
|
||||
|
||||
@@ -80,14 +80,21 @@ if ($isSOGoRequest) {
|
||||
}
|
||||
if ($result === false){
|
||||
// If it's a SOGo Request, don't check for protocol access
|
||||
$service = ($isSOGoRequest) ? false : array($post['service'] => true);
|
||||
$result = apppass_login($post['username'], $post['password'], $service, array(
|
||||
if ($isSOGoRequest) {
|
||||
$service = 'SOGO';
|
||||
$post['service'] = 'NONE';
|
||||
} else {
|
||||
$service = $post['service'];
|
||||
}
|
||||
|
||||
$result = apppass_login($post['username'], $post['password'], array(
|
||||
'service' => $post['service'],
|
||||
'is_internal' => true,
|
||||
'remote_addr' => $post['real_rip']
|
||||
));
|
||||
if ($result) {
|
||||
error_log('MAILCOWAUTH: App auth for user ' . $post['username'] . " with service " . $post['service'] . " from IP " . $post['real_rip']);
|
||||
set_sasl_log($post['username'], $post['real_rip'], $post['service']);
|
||||
error_log('MAILCOWAUTH: App auth for user ' . $post['username'] . " with service " . $service . " from IP " . $post['real_rip']);
|
||||
set_sasl_log($post['username'], $post['real_rip'], $service);
|
||||
}
|
||||
}
|
||||
if ($result === false){
|
||||
|
||||
@@ -13,6 +13,7 @@ events {
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
server_tokens off;
|
||||
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
@@ -78,7 +79,7 @@ http {
|
||||
{%endif%}
|
||||
listen {{ HTTPS_PORT }}{% if NGINX_USE_PROXY_PROTOCOL %} proxy_protocol{%endif%} ssl;
|
||||
|
||||
{% if not DISABLE_IPv6 %}
|
||||
{% if ENABLE_IPV6 %}
|
||||
{% if not HTTP_REDIRECT %}
|
||||
listen [::]:{{ HTTP_PORT }}{% if NGINX_USE_PROXY_PROTOCOL %} proxy_protocol{%endif%};
|
||||
{%endif%}
|
||||
@@ -105,7 +106,7 @@ http {
|
||||
{%endif%}
|
||||
listen {{ HTTPS_PORT }}{% if NGINX_USE_PROXY_PROTOCOL %} proxy_protocol{%endif%} ssl;
|
||||
|
||||
{% if not DISABLE_IPv6 %}
|
||||
{% if ENABLE_IPV6 %}
|
||||
{% if not HTTP_REDIRECT %}
|
||||
listen [::]:{{ HTTP_PORT }}{% if NGINX_USE_PROXY_PROTOCOL %} proxy_protocol{%endif%};
|
||||
{%endif%}
|
||||
@@ -126,7 +127,7 @@ http {
|
||||
# rspamd dynmaps:
|
||||
server {
|
||||
listen 8081;
|
||||
{% if not DISABLE_IPv6 %}
|
||||
{% if ENABLE_IPV6 %}
|
||||
listen [::]:8081;
|
||||
{%endif%}
|
||||
index index.php index.html;
|
||||
@@ -199,7 +200,7 @@ http {
|
||||
{%endif%}
|
||||
listen {{ HTTPS_PORT }}{% if NGINX_USE_PROXY_PROTOCOL %} proxy_protocol{%endif%} ssl;
|
||||
|
||||
{% if not DISABLE_IPv6 %}
|
||||
{% if ENABLE_IPV6 %}
|
||||
{% if not HTTP_REDIRECT %}
|
||||
listen [::]:{{ HTTP_PORT }}{% if NGINX_USE_PROXY_PROTOCOL %} proxy_protocol{%endif%};
|
||||
{%endif%}
|
||||
|
||||
@@ -14,7 +14,6 @@ ssl_session_tickets off;
|
||||
|
||||
add_header Strict-Transport-Security "max-age=15768000;";
|
||||
add_header X-Content-Type-Options nosniff;
|
||||
add_header X-XSS-Protection "1; mode=block";
|
||||
add_header X-Robots-Tag none;
|
||||
add_header X-Download-Options noopen;
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
@@ -186,6 +185,7 @@ location ^~ /Microsoft-Server-ActiveSync {
|
||||
auth_request_set $user $upstream_http_x_user;
|
||||
auth_request_set $auth $upstream_http_x_auth;
|
||||
auth_request_set $auth_type $upstream_http_x_auth_type;
|
||||
auth_request_set $real_ip $remote_addr;
|
||||
proxy_set_header x-webobjects-remote-user "$user";
|
||||
proxy_set_header Authorization "$auth";
|
||||
proxy_set_header x-webobjects-auth-type "$auth_type";
|
||||
@@ -211,6 +211,7 @@ location ^~ /SOGo {
|
||||
auth_request_set $user $upstream_http_x_user;
|
||||
auth_request_set $auth $upstream_http_x_auth;
|
||||
auth_request_set $auth_type $upstream_http_x_auth_type;
|
||||
auth_request_set $real_ip $remote_addr;
|
||||
proxy_set_header x-webobjects-remote-user "$user";
|
||||
proxy_set_header Authorization "$auth";
|
||||
proxy_set_header x-webobjects-auth-type "$auth_type";
|
||||
@@ -233,6 +234,7 @@ location ^~ /SOGo {
|
||||
auth_request_set $user $upstream_http_x_user;
|
||||
auth_request_set $auth $upstream_http_x_auth;
|
||||
auth_request_set $auth_type $upstream_http_x_auth_type;
|
||||
auth_request_set $real_ip $remote_addr;
|
||||
proxy_set_header x-webobjects-remote-user "$user";
|
||||
proxy_set_header Authorization "$auth";
|
||||
proxy_set_header x-webobjects-auth-type "$auth_type";
|
||||
|
||||
@@ -1,7 +1,16 @@
|
||||
; NOTE: Restart phpfpm on ANY manual changes to PHP files!
|
||||
|
||||
; opcache
|
||||
opcache.enable=1
|
||||
opcache.enable_cli=1
|
||||
opcache.interned_strings_buffer=16
|
||||
opcache.max_accelerated_files=10000
|
||||
opcache.memory_consumption=128
|
||||
opcache.save_comments=1
|
||||
opcache.revalidate_freq=1
|
||||
opcache.validate_timestamps=0
|
||||
|
||||
; JIT
|
||||
; Disabled for now due to some PHP segmentation faults observed
|
||||
; in certain environments. Possibly some PHP or PHP extension bug.
|
||||
opcache.jit=disable
|
||||
opcache.jit_buffer_size=0
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
# Whitelist generated by Postwhite v3.4 on Mon Sep 1 00:23:07 UTC 2025
|
||||
# Whitelist generated by Postwhite v3.4 on Thu Jan 1 00:24:01 UTC 2026
|
||||
# https://github.com/stevejenkins/postwhite/
|
||||
# 2165 total rules
|
||||
# 2105 total rules
|
||||
2a00:1450:4000::/36 permit
|
||||
2a01:111:f400::/48 permit
|
||||
2a01:111:f403::/49 permit
|
||||
2a01:111:f403:8000::/50 permit
|
||||
2a01:111:f403:2800::/53 permit
|
||||
2a01:111:f403:8000::/51 permit
|
||||
2a01:111:f403::/49 permit
|
||||
2a01:111:f403:c000::/51 permit
|
||||
2a01:111:f403:d000::/53 permit
|
||||
2a01:111:f403:f000::/52 permit
|
||||
2a01:238:20a:202:5370::1 permit
|
||||
2a01:238:20a:202:5372::1 permit
|
||||
@@ -28,7 +29,9 @@
|
||||
2a01:b747:3005:200::/56 permit
|
||||
2a01:b747:3006:200::/56 permit
|
||||
2a02:a60:0:5::/64 permit
|
||||
2a0f:f640::/56 permit
|
||||
2c0f:fb50:4000::/36 permit
|
||||
2.207.151.53 permit
|
||||
2.207.217.30 permit
|
||||
3.64.237.68 permit
|
||||
3.65.3.180 permit
|
||||
@@ -49,13 +52,11 @@
|
||||
8.25.194.0/23 permit
|
||||
8.25.196.0/23 permit
|
||||
8.36.116.0/24 permit
|
||||
8.39.54.0/23 permit
|
||||
8.39.54.250/31 permit
|
||||
8.39.144.0/24 permit
|
||||
8.40.222.0/23 permit
|
||||
8.40.222.250/31 permit
|
||||
12.130.86.238 permit
|
||||
13.107.246.40 permit
|
||||
13.107.213.38 permit
|
||||
13.107.246.38 permit
|
||||
13.108.16.0/20 permit
|
||||
13.110.208.0/21 permit
|
||||
13.110.209.0/24 permit
|
||||
13.110.216.0/22 permit
|
||||
@@ -167,13 +168,13 @@
|
||||
34.215.104.144 permit
|
||||
34.218.115.239 permit
|
||||
34.225.212.172 permit
|
||||
34.241.242.183 permit
|
||||
35.83.148.184 permit
|
||||
35.155.198.111 permit
|
||||
35.158.23.94 permit
|
||||
35.161.32.253 permit
|
||||
35.162.73.231 permit
|
||||
35.167.93.243 permit
|
||||
35.174.145.124 permit
|
||||
35.176.132.251 permit
|
||||
35.205.92.9 permit
|
||||
35.228.216.85 permit
|
||||
@@ -183,7 +184,6 @@
|
||||
37.218.249.47 permit
|
||||
37.218.251.62 permit
|
||||
39.156.163.64/29 permit
|
||||
40.90.65.81 permit
|
||||
40.92.0.0/15 permit
|
||||
40.92.0.0/16 permit
|
||||
40.107.0.0/16 permit
|
||||
@@ -191,7 +191,6 @@
|
||||
40.233.64.216 permit
|
||||
40.233.83.78 permit
|
||||
40.233.88.28 permit
|
||||
43.239.212.33 permit
|
||||
44.206.138.57 permit
|
||||
44.210.169.44 permit
|
||||
44.217.45.156 permit
|
||||
@@ -271,12 +270,8 @@
|
||||
50.56.130.221 permit
|
||||
50.56.130.222 permit
|
||||
50.112.246.219 permit
|
||||
51.77.79.158 permit
|
||||
51.83.17.38 permit
|
||||
51.89.119.103 permit
|
||||
52.1.14.157 permit
|
||||
52.5.230.59 permit
|
||||
52.6.74.205 permit
|
||||
52.12.53.23 permit
|
||||
52.13.214.179 permit
|
||||
52.26.1.71 permit
|
||||
@@ -300,15 +295,6 @@
|
||||
52.94.124.0/28 permit
|
||||
52.95.48.152/29 permit
|
||||
52.95.49.88/29 permit
|
||||
52.96.91.34 permit
|
||||
52.96.111.82 permit
|
||||
52.96.172.98 permit
|
||||
52.96.214.50 permit
|
||||
52.96.222.194 permit
|
||||
52.96.222.226 permit
|
||||
52.96.223.2 permit
|
||||
52.96.228.130 permit
|
||||
52.96.229.242 permit
|
||||
52.100.0.0/15 permit
|
||||
52.102.0.0/16 permit
|
||||
52.103.0.0/17 permit
|
||||
@@ -324,8 +310,6 @@
|
||||
52.234.172.96/28 permit
|
||||
52.235.253.128 permit
|
||||
52.236.28.240/28 permit
|
||||
54.36.149.183 permit
|
||||
54.38.221.122 permit
|
||||
54.90.148.255 permit
|
||||
54.165.19.38 permit
|
||||
54.174.52.0/24 permit
|
||||
@@ -344,7 +328,6 @@
|
||||
54.244.54.130 permit
|
||||
54.244.242.0/24 permit
|
||||
54.255.61.23 permit
|
||||
56.124.6.228 permit
|
||||
57.103.64.0/18 permit
|
||||
57.129.93.249 permit
|
||||
62.13.128.0/24 permit
|
||||
@@ -405,31 +388,11 @@
|
||||
64.207.219.143 permit
|
||||
64.233.160.0/19 permit
|
||||
65.52.80.137 permit
|
||||
65.54.51.64/26 permit
|
||||
65.54.61.64/26 permit
|
||||
65.54.121.120/29 permit
|
||||
65.54.190.0/24 permit
|
||||
65.54.241.0/24 permit
|
||||
65.55.29.77 permit
|
||||
65.55.33.64/28 permit
|
||||
65.55.34.0/24 permit
|
||||
65.55.42.224/28 permit
|
||||
65.55.52.224/27 permit
|
||||
65.55.78.128/25 permit
|
||||
65.55.81.48/28 permit
|
||||
65.55.90.0/24 permit
|
||||
65.55.94.0/25 permit
|
||||
65.55.111.0/24 permit
|
||||
65.55.113.64/26 permit
|
||||
65.55.116.0/25 permit
|
||||
65.55.126.0/25 permit
|
||||
65.55.174.0/25 permit
|
||||
65.55.178.128/27 permit
|
||||
65.55.234.192/26 permit
|
||||
65.110.161.77 permit
|
||||
65.123.29.213 permit
|
||||
65.123.29.220 permit
|
||||
65.154.166.0/24 permit
|
||||
65.212.180.36 permit
|
||||
66.102.0.0/20 permit
|
||||
66.119.150.192/26 permit
|
||||
@@ -546,7 +509,6 @@
|
||||
69.169.224.0/20 permit
|
||||
69.171.232.0/24 permit
|
||||
69.171.244.0/23 permit
|
||||
70.37.151.128/25 permit
|
||||
70.42.149.35 permit
|
||||
72.3.185.0/24 permit
|
||||
72.14.192.0/18 permit
|
||||
@@ -643,7 +605,6 @@
|
||||
74.208.4.220 permit
|
||||
74.208.4.221 permit
|
||||
74.209.250.0/24 permit
|
||||
75.2.70.75 permit
|
||||
76.223.128.0/19 permit
|
||||
76.223.176.0/20 permit
|
||||
77.238.176.0/24 permit
|
||||
@@ -686,6 +647,8 @@
|
||||
82.165.159.45 permit
|
||||
82.165.159.130 permit
|
||||
82.165.159.131 permit
|
||||
85.9.206.169 permit
|
||||
85.9.210.45 permit
|
||||
85.158.136.0/21 permit
|
||||
85.215.255.39 permit
|
||||
85.215.255.40 permit
|
||||
@@ -737,8 +700,6 @@
|
||||
91.198.2.0/24 permit
|
||||
91.211.240.0/22 permit
|
||||
94.236.119.0/26 permit
|
||||
94.245.112.0/27 permit
|
||||
94.245.112.10/31 permit
|
||||
95.131.104.0/21 permit
|
||||
95.217.114.154 permit
|
||||
96.43.144.0/20 permit
|
||||
@@ -1231,19 +1192,14 @@
|
||||
98.139.245.208/30 permit
|
||||
98.139.245.212/31 permit
|
||||
99.78.197.208/28 permit
|
||||
99.83.190.102 permit
|
||||
103.9.96.0/22 permit
|
||||
103.28.42.0/24 permit
|
||||
103.122.78.238 permit
|
||||
103.151.192.0/23 permit
|
||||
103.168.172.128/27 permit
|
||||
103.237.104.0/22 permit
|
||||
104.43.243.237 permit
|
||||
104.44.112.128/25 permit
|
||||
104.47.0.0/17 permit
|
||||
104.47.20.0/23 permit
|
||||
104.47.75.0/24 permit
|
||||
104.47.108.0/23 permit
|
||||
104.130.96.0/28 permit
|
||||
104.130.122.0/23 permit
|
||||
106.10.144.64/27 permit
|
||||
@@ -1378,15 +1334,9 @@
|
||||
108.174.6.215 permit
|
||||
108.175.18.45 permit
|
||||
108.175.30.45 permit
|
||||
108.177.96.0/20 permit
|
||||
108.179.144.0/20 permit
|
||||
109.224.244.0/24 permit
|
||||
109.237.142.0/24 permit
|
||||
111.221.23.128/25 permit
|
||||
111.221.26.0/27 permit
|
||||
111.221.66.0/25 permit
|
||||
111.221.69.128/25 permit
|
||||
111.221.112.0/21 permit
|
||||
112.19.199.64/29 permit
|
||||
112.19.242.64/29 permit
|
||||
116.214.12.47 permit
|
||||
@@ -1404,9 +1354,6 @@
|
||||
117.120.16.0/21 permit
|
||||
119.42.242.52/31 permit
|
||||
119.42.242.156 permit
|
||||
121.244.91.48 permit
|
||||
121.244.91.52 permit
|
||||
122.15.156.182 permit
|
||||
123.126.78.64/29 permit
|
||||
124.108.96.24/31 permit
|
||||
124.108.96.28/31 permit
|
||||
@@ -1434,6 +1381,7 @@
|
||||
128.245.248.0/21 permit
|
||||
129.41.77.70 permit
|
||||
129.41.169.249 permit
|
||||
129.77.16.0/20 permit
|
||||
129.80.5.164 permit
|
||||
129.80.64.36 permit
|
||||
129.80.67.121 permit
|
||||
@@ -1450,6 +1398,7 @@
|
||||
129.153.194.228 permit
|
||||
129.154.255.129 permit
|
||||
129.158.56.255 permit
|
||||
129.158.62.153 permit
|
||||
129.159.22.159 permit
|
||||
129.159.87.137 permit
|
||||
129.213.195.191 permit
|
||||
@@ -1470,21 +1419,8 @@
|
||||
134.170.141.64/26 permit
|
||||
134.170.143.0/24 permit
|
||||
134.170.174.0/24 permit
|
||||
135.84.80.0/24 permit
|
||||
135.84.81.0/24 permit
|
||||
135.84.82.0/24 permit
|
||||
135.84.83.0/24 permit
|
||||
135.84.216.0/22 permit
|
||||
136.143.160.0/24 permit
|
||||
136.143.161.0/24 permit
|
||||
136.143.162.0/24 permit
|
||||
136.143.176.0/24 permit
|
||||
136.143.177.0/24 permit
|
||||
136.143.178.49 permit
|
||||
136.143.182.0/23 permit
|
||||
136.143.184.0/24 permit
|
||||
136.143.188.0/24 permit
|
||||
136.143.190.0/23 permit
|
||||
136.146.128.0/20 permit
|
||||
136.147.128.0/20 permit
|
||||
136.147.135.0/24 permit
|
||||
136.147.176.0/20 permit
|
||||
@@ -1499,9 +1435,10 @@
|
||||
139.138.46.219 permit
|
||||
139.138.57.55 permit
|
||||
139.138.58.119 permit
|
||||
139.167.79.86 permit
|
||||
139.180.17.0/24 permit
|
||||
140.238.148.191 permit
|
||||
141.148.55.217 permit
|
||||
141.148.91.244 permit
|
||||
141.148.159.229 permit
|
||||
141.193.32.0/23 permit
|
||||
141.193.184.32/27 permit
|
||||
@@ -1544,8 +1481,10 @@
|
||||
148.105.0.0/16 permit
|
||||
148.105.8.0/21 permit
|
||||
149.72.0.0/16 permit
|
||||
149.72.234.184 permit
|
||||
149.72.248.236 permit
|
||||
149.97.173.180 permit
|
||||
150.136.21.199 permit
|
||||
150.230.98.160 permit
|
||||
151.145.38.14 permit
|
||||
152.67.105.195 permit
|
||||
@@ -1555,20 +1494,7 @@
|
||||
155.248.220.138 permit
|
||||
155.248.234.149 permit
|
||||
155.248.237.141 permit
|
||||
157.55.0.192/26 permit
|
||||
157.55.1.128/26 permit
|
||||
157.55.2.0/25 permit
|
||||
157.55.9.128/25 permit
|
||||
157.55.11.0/25 permit
|
||||
157.55.49.0/25 permit
|
||||
157.55.61.0/24 permit
|
||||
157.55.157.128/25 permit
|
||||
157.55.225.0/25 permit
|
||||
157.56.24.0/25 permit
|
||||
157.56.120.128/26 permit
|
||||
157.56.232.0/21 permit
|
||||
157.56.240.0/20 permit
|
||||
157.56.248.0/21 permit
|
||||
157.58.30.128/25 permit
|
||||
157.58.196.96/29 permit
|
||||
157.58.249.3 permit
|
||||
@@ -1599,6 +1525,7 @@
|
||||
159.183.0.0/16 permit
|
||||
159.183.68.71 permit
|
||||
159.183.79.38 permit
|
||||
159.183.129.172 permit
|
||||
160.1.62.192 permit
|
||||
161.38.192.0/20 permit
|
||||
161.38.204.0/22 permit
|
||||
@@ -1616,12 +1543,13 @@
|
||||
163.114.134.16 permit
|
||||
163.114.135.16 permit
|
||||
163.116.128.0/17 permit
|
||||
163.192.116.87 permit
|
||||
163.192.125.176 permit
|
||||
163.192.196.146 permit
|
||||
163.192.204.161 permit
|
||||
164.152.23.32 permit
|
||||
164.152.25.241 permit
|
||||
164.177.132.168/30 permit
|
||||
165.173.128.0/24 permit
|
||||
165.173.180.250/31 permit
|
||||
165.173.182.250/31 permit
|
||||
166.78.68.0/22 permit
|
||||
166.78.68.221 permit
|
||||
166.78.69.169 permit
|
||||
@@ -1651,28 +1579,12 @@
|
||||
168.245.12.252 permit
|
||||
168.245.46.9 permit
|
||||
168.245.127.231 permit
|
||||
169.148.129.0/24 permit
|
||||
169.148.131.0/24 permit
|
||||
169.148.138.0/24 permit
|
||||
169.148.142.10 permit
|
||||
169.148.144.0/25 permit
|
||||
169.148.144.10 permit
|
||||
169.148.146.0/23 permit
|
||||
169.148.175.3 permit
|
||||
169.148.188.0/24 permit
|
||||
169.148.188.182 permit
|
||||
170.9.232.254 permit
|
||||
170.10.128.0/24 permit
|
||||
170.10.129.0/24 permit
|
||||
170.10.132.56/29 permit
|
||||
170.10.132.64/29 permit
|
||||
170.10.133.0/24 permit
|
||||
172.217.0.0/20 permit
|
||||
172.217.32.0/20 permit
|
||||
172.217.128.0/19 permit
|
||||
172.217.160.0/20 permit
|
||||
172.217.192.0/19 permit
|
||||
172.253.56.0/21 permit
|
||||
172.253.112.0/20 permit
|
||||
173.0.84.0/29 permit
|
||||
173.0.84.224/27 permit
|
||||
173.0.94.244/30 permit
|
||||
@@ -1816,6 +1728,7 @@
|
||||
194.97.212.12 permit
|
||||
194.106.220.0/23 permit
|
||||
194.113.24.0/22 permit
|
||||
194.113.42.0/26 permit
|
||||
194.154.193.192/27 permit
|
||||
195.4.92.0/23 permit
|
||||
195.54.172.0/23 permit
|
||||
@@ -1829,6 +1742,7 @@
|
||||
198.61.254.21 permit
|
||||
198.61.254.231 permit
|
||||
198.178.234.57 permit
|
||||
198.202.211.1 permit
|
||||
198.244.48.0/20 permit
|
||||
198.244.56.107 permit
|
||||
198.244.56.108 permit
|
||||
@@ -1850,16 +1764,7 @@
|
||||
199.16.156.0/22 permit
|
||||
199.33.145.1 permit
|
||||
199.33.145.32 permit
|
||||
199.34.22.36 permit
|
||||
199.59.148.0/22 permit
|
||||
199.67.80.2 permit
|
||||
199.67.80.20 permit
|
||||
199.67.82.2 permit
|
||||
199.67.82.20 permit
|
||||
199.67.84.0/24 permit
|
||||
199.67.86.0/24 permit
|
||||
199.67.88.0/24 permit
|
||||
199.67.90.0/24 permit
|
||||
199.101.161.130 permit
|
||||
199.101.162.0/25 permit
|
||||
199.122.120.0/21 permit
|
||||
@@ -1912,12 +1817,9 @@
|
||||
204.14.232.64/28 permit
|
||||
204.14.234.64/28 permit
|
||||
204.75.142.0/24 permit
|
||||
204.79.197.212 permit
|
||||
204.92.114.187 permit
|
||||
204.92.114.203 permit
|
||||
204.92.114.204/31 permit
|
||||
204.141.32.0/23 permit
|
||||
204.141.42.0/23 permit
|
||||
204.216.164.202 permit
|
||||
204.220.160.0/21 permit
|
||||
204.220.168.0/21 permit
|
||||
@@ -1940,24 +1842,13 @@
|
||||
206.165.246.80/29 permit
|
||||
206.191.224.0/19 permit
|
||||
206.246.157.1 permit
|
||||
207.46.4.128/25 permit
|
||||
207.46.22.35 permit
|
||||
207.46.50.72 permit
|
||||
207.46.50.82 permit
|
||||
207.46.50.192/26 permit
|
||||
207.46.50.224 permit
|
||||
207.46.52.71 permit
|
||||
207.46.52.79 permit
|
||||
207.46.58.128/25 permit
|
||||
207.46.116.128/29 permit
|
||||
207.46.117.0/24 permit
|
||||
207.46.132.128/27 permit
|
||||
207.46.198.0/25 permit
|
||||
207.46.200.0/27 permit
|
||||
207.67.38.0/24 permit
|
||||
207.67.98.192/27 permit
|
||||
207.68.176.0/26 permit
|
||||
207.68.176.96/27 permit
|
||||
207.97.204.96/29 permit
|
||||
207.126.144.0/20 permit
|
||||
207.171.160.0/19 permit
|
||||
@@ -2112,8 +2003,6 @@
|
||||
213.199.128.145 permit
|
||||
213.199.138.181 permit
|
||||
213.199.138.191 permit
|
||||
213.199.161.128/27 permit
|
||||
213.199.177.0/26 permit
|
||||
216.17.150.242 permit
|
||||
216.17.150.251 permit
|
||||
216.24.224.0/20 permit
|
||||
@@ -2141,7 +2030,6 @@
|
||||
216.39.62.60/31 permit
|
||||
216.39.62.136/29 permit
|
||||
216.39.62.144/31 permit
|
||||
216.58.192.0/19 permit
|
||||
216.66.217.240/29 permit
|
||||
216.71.138.33 permit
|
||||
216.71.152.207 permit
|
||||
@@ -2205,21 +2093,18 @@
|
||||
2603:1030:20e:3::23c permit
|
||||
2603:1030:b:3::152 permit
|
||||
2603:1030:c02:8::14 permit
|
||||
2607:13c0:0001:0000:0000:0000:0000:7000/116 permit
|
||||
2607:13c0:0002:0000:0000:0000:0000:1000/116 permit
|
||||
2607:13c0:0004:0000:0000:0000:0000:0000/116 permit
|
||||
2607:f8b0:4000::/36 permit
|
||||
2620:109:c003:104::215 permit
|
||||
2620:109:c003:104::/64 permit
|
||||
2620:109:c006:104::215 permit
|
||||
2620:109:c003:104::215 permit
|
||||
2620:109:c006:104::/64 permit
|
||||
2620:109:c006:104::215 permit
|
||||
2620:109:c00d:104::/64 permit
|
||||
2620:10d:c090:400::8:1 permit
|
||||
2620:10d:c091:400::8:1 permit
|
||||
2620:10d:c09b:400::8:1 permit
|
||||
2620:10d:c09c:400::8:1 permit
|
||||
2620:119:50c0:207::215 permit
|
||||
2620:119:50c0:207::/64 permit
|
||||
2620:119:50c0:207::215 permit
|
||||
2800:3f0:4000::/36 permit
|
||||
49.12.4.251 permit # checks.mailcow.email
|
||||
2a01:4f8:c17:7906::10 permit # checks.mailcow.email
|
||||
|
||||
@@ -133,7 +133,7 @@ try {
|
||||
error_log("ALIAS EXPANDER: http pipe: goto address " . $goto . " is an alias branch for " . $goto_branch . PHP_EOL);
|
||||
$goto_branch_array = explode(',', $goto_branch);
|
||||
} else {
|
||||
$stmt = $pdo->prepare("SELECT `target_domain` FROM `alias_domain` WHERE `alias_domain` = :domain AND `active` AND '1'");
|
||||
$stmt = $pdo->prepare("SELECT `target_domain` FROM `alias_domain` WHERE `alias_domain` = :domain AND `active` = '1'");
|
||||
$stmt->execute(array(':domain' => $parsed_goto['domain']));
|
||||
$goto_branch = $stmt->fetch(PDO::FETCH_ASSOC)['target_domain'];
|
||||
if ($goto_branch) {
|
||||
|
||||
@@ -146,8 +146,171 @@ rspamd_config:register_symbol({
|
||||
return false
|
||||
end
|
||||
|
||||
-- Helper function to parse IPv6 into 8 segments
|
||||
local function ipv6_to_segments(ip_str)
|
||||
-- Remove zone identifier if present (e.g., %eth0)
|
||||
ip_str = ip_str:gsub("%%.*$", "")
|
||||
|
||||
local segments = {}
|
||||
|
||||
-- Handle :: compression
|
||||
if ip_str:find('::') then
|
||||
local before, after = ip_str:match('^(.*)::(.*)$')
|
||||
before = before or ''
|
||||
after = after or ''
|
||||
|
||||
local before_parts = {}
|
||||
local after_parts = {}
|
||||
|
||||
if before ~= '' then
|
||||
for seg in before:gmatch('[^:]+') do
|
||||
table.insert(before_parts, tonumber(seg, 16) or 0)
|
||||
end
|
||||
end
|
||||
|
||||
if after ~= '' then
|
||||
for seg in after:gmatch('[^:]+') do
|
||||
table.insert(after_parts, tonumber(seg, 16) or 0)
|
||||
end
|
||||
end
|
||||
|
||||
-- Add before segments
|
||||
for _, seg in ipairs(before_parts) do
|
||||
table.insert(segments, seg)
|
||||
end
|
||||
|
||||
-- Add compressed zeros
|
||||
local zeros_needed = 8 - #before_parts - #after_parts
|
||||
for i = 1, zeros_needed do
|
||||
table.insert(segments, 0)
|
||||
end
|
||||
|
||||
-- Add after segments
|
||||
for _, seg in ipairs(after_parts) do
|
||||
table.insert(segments, seg)
|
||||
end
|
||||
else
|
||||
-- No compression
|
||||
for seg in ip_str:gmatch('[^:]+') do
|
||||
table.insert(segments, tonumber(seg, 16) or 0)
|
||||
end
|
||||
end
|
||||
|
||||
-- Ensure we have exactly 8 segments
|
||||
while #segments < 8 do
|
||||
table.insert(segments, 0)
|
||||
end
|
||||
|
||||
return segments
|
||||
end
|
||||
|
||||
-- Generate all common IPv6 notations
|
||||
local function get_ipv6_variants(ip_str)
|
||||
local variants = {}
|
||||
local seen = {}
|
||||
|
||||
local function add_variant(v)
|
||||
if v and not seen[v] then
|
||||
table.insert(variants, v)
|
||||
seen[v] = true
|
||||
end
|
||||
end
|
||||
|
||||
-- For IPv4, just return the original
|
||||
if not ip_str:find(':') then
|
||||
add_variant(ip_str)
|
||||
return variants
|
||||
end
|
||||
|
||||
local segments = ipv6_to_segments(ip_str)
|
||||
|
||||
-- 1. Fully expanded form (all zeros shown as 0000)
|
||||
local expanded_parts = {}
|
||||
for _, seg in ipairs(segments) do
|
||||
table.insert(expanded_parts, string.format('%04x', seg))
|
||||
end
|
||||
add_variant(table.concat(expanded_parts, ':'))
|
||||
|
||||
-- 2. Standard form (no leading zeros, but all segments present)
|
||||
local standard_parts = {}
|
||||
for _, seg in ipairs(segments) do
|
||||
table.insert(standard_parts, string.format('%x', seg))
|
||||
end
|
||||
add_variant(table.concat(standard_parts, ':'))
|
||||
|
||||
-- 3. Find all possible :: compressions
|
||||
-- RFC 5952: compress the longest run of consecutive zeros
|
||||
-- But we need to check all possibilities since Redis might have any form
|
||||
|
||||
-- Find all zero runs
|
||||
local zero_runs = {}
|
||||
local in_run = false
|
||||
local run_start = 0
|
||||
local run_length = 0
|
||||
|
||||
for i = 1, 8 do
|
||||
if segments[i] == 0 then
|
||||
if not in_run then
|
||||
in_run = true
|
||||
run_start = i
|
||||
run_length = 1
|
||||
else
|
||||
run_length = run_length + 1
|
||||
end
|
||||
else
|
||||
if in_run then
|
||||
if run_length >= 1 then -- Allow single zero compression too
|
||||
table.insert(zero_runs, {start = run_start, length = run_length})
|
||||
end
|
||||
in_run = false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Don't forget the last run
|
||||
if in_run and run_length >= 1 then
|
||||
table.insert(zero_runs, {start = run_start, length = run_length})
|
||||
end
|
||||
|
||||
-- Generate variant for each zero run compression
|
||||
for _, run in ipairs(zero_runs) do
|
||||
local parts = {}
|
||||
|
||||
-- Before compression
|
||||
for i = 1, run.start - 1 do
|
||||
table.insert(parts, string.format('%x', segments[i]))
|
||||
end
|
||||
|
||||
-- The compression
|
||||
if run.start == 1 then
|
||||
table.insert(parts, '')
|
||||
table.insert(parts, '')
|
||||
elseif run.start + run.length - 1 == 8 then
|
||||
table.insert(parts, '')
|
||||
table.insert(parts, '')
|
||||
else
|
||||
table.insert(parts, '')
|
||||
end
|
||||
|
||||
-- After compression
|
||||
for i = run.start + run.length, 8 do
|
||||
table.insert(parts, string.format('%x', segments[i]))
|
||||
end
|
||||
|
||||
local compressed = table.concat(parts, ':'):gsub('::+', '::')
|
||||
add_variant(compressed)
|
||||
end
|
||||
|
||||
return variants
|
||||
end
|
||||
|
||||
local from_ip_string = tostring(ip)
|
||||
ip_check_table = {from_ip_string}
|
||||
local ip_check_table = {}
|
||||
|
||||
-- Add all variants of the exact IP
|
||||
for _, variant in ipairs(get_ipv6_variants(from_ip_string)) do
|
||||
table.insert(ip_check_table, variant)
|
||||
end
|
||||
|
||||
local maxbits = 128
|
||||
local minbits = 32
|
||||
@@ -155,10 +318,18 @@ rspamd_config:register_symbol({
|
||||
maxbits = 32
|
||||
minbits = 8
|
||||
end
|
||||
|
||||
-- Add all CIDR notations with variants
|
||||
for i=maxbits,minbits,-1 do
|
||||
local nip = ip:apply_mask(i):to_string() .. "/" .. i
|
||||
table.insert(ip_check_table, nip)
|
||||
local masked_ip = ip:apply_mask(i)
|
||||
local cidr_base = masked_ip:to_string()
|
||||
|
||||
for _, variant in ipairs(get_ipv6_variants(cidr_base)) do
|
||||
local cidr = variant .. "/" .. i
|
||||
table.insert(ip_check_table, cidr)
|
||||
end
|
||||
end
|
||||
|
||||
local function keep_spam_cb(err, data)
|
||||
if err then
|
||||
rspamd_logger.infox(rspamd_config, "keep_spam query request for ip %s returned invalid or empty data (\"%s\") or error (\"%s\")", ip, data, err)
|
||||
@@ -166,12 +337,15 @@ rspamd_config:register_symbol({
|
||||
else
|
||||
for k,v in pairs(data) do
|
||||
if (v and v ~= userdata and v == '1') then
|
||||
rspamd_logger.infox(rspamd_config, "found ip in keep_spam map, setting pre-result")
|
||||
rspamd_logger.infox(rspamd_config, "found ip %s (checked as: %s) in keep_spam map, setting pre-result accept", from_ip_string, ip_check_table[k])
|
||||
task:set_pre_result('accept', 'ip matched with forward hosts', 'keep_spam')
|
||||
task:set_flag('no_stat')
|
||||
return
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
table.insert(ip_check_table, 1, 'KEEP_SPAM')
|
||||
local redis_ret_user = rspamd_redis_make_request(task,
|
||||
redis_params, -- connect params
|
||||
@@ -210,6 +384,7 @@ rspamd_config:register_symbol({
|
||||
rspamd_config:register_symbol({
|
||||
name = 'TAG_MOO',
|
||||
type = 'postfilter',
|
||||
flags = 'ignore_passthrough',
|
||||
callback = function(task)
|
||||
local util = require("rspamd_util")
|
||||
local rspamd_logger = require "rspamd_logger"
|
||||
@@ -218,9 +393,6 @@ rspamd_config:register_symbol({
|
||||
local rcpts = task:get_recipients('smtp')
|
||||
local lua_util = require "lua_util"
|
||||
|
||||
local tagged_rcpt = task:get_symbol("TAGGED_RCPT")
|
||||
local mailcow_domain = task:get_symbol("RCPT_MAILCOW_DOMAIN")
|
||||
|
||||
local function remove_moo_tag()
|
||||
local moo_tag_header = task:get_header('X-Moo-Tag', false)
|
||||
if moo_tag_header then
|
||||
@@ -231,101 +403,149 @@ rspamd_config:register_symbol({
|
||||
return true
|
||||
end
|
||||
|
||||
if tagged_rcpt and tagged_rcpt[1].options and mailcow_domain then
|
||||
local tag = tagged_rcpt[1].options[1]
|
||||
rspamd_logger.infox("found tag: %s", tag)
|
||||
local action = task:get_metric_action('default')
|
||||
rspamd_logger.infox("metric action now: %s", action)
|
||||
-- Check if we have exactly one recipient
|
||||
if not (rcpts and #rcpts == 1) then
|
||||
rspamd_logger.infox("TAG_MOO: not exactly one rcpt (%s), removing moo tag", rcpts and #rcpts or 0)
|
||||
remove_moo_tag()
|
||||
return
|
||||
end
|
||||
|
||||
if action ~= 'no action' and action ~= 'greylist' then
|
||||
rspamd_logger.infox("skipping tag handler for action: %s", action)
|
||||
remove_moo_tag()
|
||||
return true
|
||||
local rcpt_addr = rcpts[1]['addr']
|
||||
local rcpt_user = rcpts[1]['user']
|
||||
local rcpt_domain = rcpts[1]['domain']
|
||||
|
||||
-- Check if recipient has a tag (contains '+')
|
||||
local tag = nil
|
||||
if rcpt_user:find('%+') then
|
||||
local base_user, tag_part = rcpt_user:match('^(.-)%+(.+)$')
|
||||
if base_user and tag_part then
|
||||
tag = tag_part
|
||||
rspamd_logger.infox("TAG_MOO: found tag in recipient: %s (base: %s, tag: %s)", rcpt_addr, base_user, tag)
|
||||
end
|
||||
end
|
||||
|
||||
local function http_callback(err_message, code, body, headers)
|
||||
if body ~= nil and body ~= "" then
|
||||
rspamd_logger.infox(rspamd_config, "expanding rcpt to \"%s\"", body)
|
||||
if not tag then
|
||||
rspamd_logger.infox("TAG_MOO: no tag found in recipient %s, removing moo tag", rcpt_addr)
|
||||
remove_moo_tag()
|
||||
return
|
||||
end
|
||||
|
||||
local function tag_callback_subject(err, data)
|
||||
if err or type(data) ~= 'string' then
|
||||
rspamd_logger.infox(rspamd_config, "subject tag handler rcpt %s returned invalid or empty data (\"%s\") or error (\"%s\") - trying subfolder tag handler...", body, data, err)
|
||||
-- Optional: Check if domain is a mailcow domain
|
||||
-- When KEEP_SPAM is active, RCPT_MAILCOW_DOMAIN might not be set
|
||||
-- If the mail is being delivered, we can assume it's valid
|
||||
local mailcow_domain = task:get_symbol("RCPT_MAILCOW_DOMAIN")
|
||||
if not mailcow_domain then
|
||||
rspamd_logger.infox("TAG_MOO: RCPT_MAILCOW_DOMAIN not set (possibly due to pre-result), proceeding anyway for domain %s", rcpt_domain)
|
||||
end
|
||||
|
||||
local function tag_callback_subfolder(err, data)
|
||||
if err or type(data) ~= 'string' then
|
||||
rspamd_logger.infox(rspamd_config, "subfolder tag handler for rcpt %s returned invalid or empty data (\"%s\") or error (\"%s\")", body, data, err)
|
||||
remove_moo_tag()
|
||||
else
|
||||
rspamd_logger.infox("Add X-Moo-Tag header")
|
||||
task:set_milter_reply({
|
||||
add_headers = {['X-Moo-Tag'] = 'YES'}
|
||||
})
|
||||
end
|
||||
end
|
||||
local action = task:get_metric_action('default')
|
||||
rspamd_logger.infox("TAG_MOO: metric action: %s", action)
|
||||
|
||||
local redis_ret_subfolder = rspamd_redis_make_request(task,
|
||||
redis_params, -- connect params
|
||||
body, -- hash key
|
||||
false, -- is write
|
||||
tag_callback_subfolder, --callback
|
||||
'HGET', -- command
|
||||
{'RCPT_WANTS_SUBFOLDER_TAG', body} -- arguments
|
||||
)
|
||||
if not redis_ret_subfolder then
|
||||
rspamd_logger.infox(rspamd_config, "cannot make request to load tag handler for rcpt")
|
||||
-- Check if we have a pre-result (e.g., from KEEP_SPAM or POSTMASTER_HANDLER)
|
||||
local allow_processing = false
|
||||
|
||||
if task.has_pre_result then
|
||||
local has_pre, pre_action = task:has_pre_result()
|
||||
if has_pre then
|
||||
rspamd_logger.infox("TAG_MOO: pre-result detected: %s", tostring(pre_action))
|
||||
if pre_action == 'accept' then
|
||||
allow_processing = true
|
||||
rspamd_logger.infox("TAG_MOO: pre-result is accept, will process")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
-- Allow processing for mild actions or when we have pre-result accept
|
||||
if not allow_processing and action ~= 'no action' and action ~= 'greylist' then
|
||||
rspamd_logger.infox("TAG_MOO: skipping tag handler for action: %s", action)
|
||||
remove_moo_tag()
|
||||
return true
|
||||
end
|
||||
|
||||
rspamd_logger.infox("TAG_MOO: processing allowed")
|
||||
|
||||
local function http_callback(err_message, code, body, headers)
|
||||
if body ~= nil and body ~= "" then
|
||||
rspamd_logger.infox(rspamd_config, "TAG_MOO: expanding rcpt to \"%s\"", body)
|
||||
|
||||
local function tag_callback_subject(err, data)
|
||||
if err or type(data) ~= 'string' or data == '' then
|
||||
rspamd_logger.infox(rspamd_config, "TAG_MOO: subject tag handler rcpt %s returned invalid or empty data (\"%s\") or error (\"%s\") - trying subfolder tag handler...", body, data, err)
|
||||
|
||||
local function tag_callback_subfolder(err, data)
|
||||
if err or type(data) ~= 'string' or data == '' then
|
||||
rspamd_logger.infox(rspamd_config, "TAG_MOO: subfolder tag handler for rcpt %s returned invalid or empty data (\"%s\") or error (\"%s\")", body, data, err)
|
||||
remove_moo_tag()
|
||||
else
|
||||
rspamd_logger.infox("TAG_MOO: User wants subfolder tag, adding X-Moo-Tag header")
|
||||
task:set_milter_reply({
|
||||
add_headers = {['X-Moo-Tag'] = 'YES'}
|
||||
})
|
||||
end
|
||||
|
||||
else
|
||||
rspamd_logger.infox("user wants subject modified for tagged mail")
|
||||
local sbj = task:get_header('Subject')
|
||||
new_sbj = '=?UTF-8?B?' .. tostring(util.encode_base64('[' .. tag .. '] ' .. sbj)) .. '?='
|
||||
task:set_milter_reply({
|
||||
remove_headers = {
|
||||
['Subject'] = 1,
|
||||
['X-Moo-Tag'] = 0
|
||||
},
|
||||
add_headers = {['Subject'] = new_sbj}
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
local redis_ret_subject = rspamd_redis_make_request(task,
|
||||
redis_params, -- connect params
|
||||
body, -- hash key
|
||||
false, -- is write
|
||||
tag_callback_subject, --callback
|
||||
'HGET', -- command
|
||||
{'RCPT_WANTS_SUBJECT_TAG', body} -- arguments
|
||||
)
|
||||
if not redis_ret_subject then
|
||||
rspamd_logger.infox(rspamd_config, "cannot make request to load tag handler for rcpt")
|
||||
remove_moo_tag()
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
if rcpts and #rcpts == 1 then
|
||||
for _,rcpt in ipairs(rcpts) do
|
||||
local rcpt_split = rspamd_str_split(rcpt['addr'], '@')
|
||||
if #rcpt_split == 2 then
|
||||
if rcpt_split[1] == 'postmaster' then
|
||||
rspamd_logger.infox(rspamd_config, "not expanding postmaster alias")
|
||||
local redis_ret_subfolder = rspamd_redis_make_request(task,
|
||||
redis_params, -- connect params
|
||||
body, -- hash key
|
||||
false, -- is write
|
||||
tag_callback_subfolder, --callback
|
||||
'HGET', -- command
|
||||
{'RCPT_WANTS_SUBFOLDER_TAG', body} -- arguments
|
||||
)
|
||||
if not redis_ret_subfolder then
|
||||
rspamd_logger.infox(rspamd_config, "TAG_MOO: cannot make request to load tag handler for rcpt")
|
||||
remove_moo_tag()
|
||||
else
|
||||
rspamd_http.request({
|
||||
task=task,
|
||||
url='http://nginx:8081/aliasexp.php',
|
||||
body='',
|
||||
callback=http_callback,
|
||||
headers={Rcpt=rcpt['addr']},
|
||||
})
|
||||
end
|
||||
|
||||
else
|
||||
rspamd_logger.infox("TAG_MOO: user wants subject modified for tagged mail")
|
||||
local sbj = task:get_header('Subject') or ''
|
||||
new_sbj = '=?UTF-8?B?' .. tostring(util.encode_base64('[' .. tag .. '] ' .. sbj)) .. '?='
|
||||
task:set_milter_reply({
|
||||
remove_headers = {
|
||||
['Subject'] = 1,
|
||||
['X-Moo-Tag'] = 0
|
||||
},
|
||||
add_headers = {['Subject'] = new_sbj}
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
local redis_ret_subject = rspamd_redis_make_request(task,
|
||||
redis_params, -- connect params
|
||||
body, -- hash key
|
||||
false, -- is write
|
||||
tag_callback_subject, --callback
|
||||
'HGET', -- command
|
||||
{'RCPT_WANTS_SUBJECT_TAG', body} -- arguments
|
||||
)
|
||||
if not redis_ret_subject then
|
||||
rspamd_logger.infox(rspamd_config, "TAG_MOO: cannot make request to load tag handler for rcpt")
|
||||
remove_moo_tag()
|
||||
end
|
||||
else
|
||||
rspamd_logger.infox("TAG_MOO: alias expansion returned empty body")
|
||||
remove_moo_tag()
|
||||
end
|
||||
end
|
||||
|
||||
local rcpt_split = rspamd_str_split(rcpt_addr, '@')
|
||||
if #rcpt_split == 2 then
|
||||
if rcpt_split[1]:match('^postmaster') then
|
||||
rspamd_logger.infox(rspamd_config, "TAG_MOO: not expanding postmaster alias")
|
||||
remove_moo_tag()
|
||||
else
|
||||
rspamd_logger.infox("TAG_MOO: requesting alias expansion for %s", rcpt_addr)
|
||||
rspamd_http.request({
|
||||
task=task,
|
||||
url='http://nginx:8081/aliasexp.php',
|
||||
body='',
|
||||
callback=http_callback,
|
||||
headers={Rcpt=rcpt_addr},
|
||||
})
|
||||
end
|
||||
else
|
||||
rspamd_logger.infox("TAG_MOO: invalid rcpt format")
|
||||
remove_moo_tag()
|
||||
end
|
||||
end,
|
||||
@@ -335,6 +555,7 @@ rspamd_config:register_symbol({
|
||||
rspamd_config:register_symbol({
|
||||
name = 'BCC',
|
||||
type = 'postfilter',
|
||||
flags = 'ignore_passthrough',
|
||||
callback = function(task)
|
||||
local util = require("rspamd_util")
|
||||
local rspamd_http = require "rspamd_http"
|
||||
@@ -363,11 +584,13 @@ rspamd_config:register_symbol({
|
||||
local email_content = tostring(task:get_content())
|
||||
email_content = string.gsub(email_content, "\r\n%.", "\r\n..")
|
||||
-- send mail
|
||||
local from_smtp = task:get_from('smtp')
|
||||
local from_addr = (from_smtp and from_smtp[1] and from_smtp[1].addr) or 'mailer-daemon@localhost'
|
||||
lua_smtp.sendmail({
|
||||
task = task,
|
||||
host = os.getenv("IPV4_NETWORK") .. '.253',
|
||||
port = 591,
|
||||
from = task:get_from(stp)[1].addr,
|
||||
from = from_addr,
|
||||
recipients = bcc_dest,
|
||||
helo = 'bcc',
|
||||
timeout = 20,
|
||||
@@ -397,27 +620,41 @@ rspamd_config:register_symbol({
|
||||
end
|
||||
|
||||
local action = task:get_metric_action('default')
|
||||
rspamd_logger.infox("metric action now: %s", action)
|
||||
rspamd_logger.infox("BCC: metric action: %s", action)
|
||||
|
||||
-- Check for pre-result accept (e.g., from KEEP_SPAM)
|
||||
local allow_bcc = false
|
||||
if task.has_pre_result then
|
||||
local has_pre, pre_action = task:has_pre_result()
|
||||
if has_pre and pre_action == 'accept' then
|
||||
allow_bcc = true
|
||||
rspamd_logger.infox("BCC: pre-result accept detected, will send BCC")
|
||||
end
|
||||
end
|
||||
|
||||
-- Allow BCC for mild actions or when we have pre-result accept
|
||||
if not allow_bcc and action ~= 'no action' and action ~= 'add header' and action ~= 'rewrite subject' then
|
||||
rspamd_logger.infox("BCC: skipping for action: %s", action)
|
||||
return
|
||||
end
|
||||
|
||||
local function rcpt_callback(err_message, code, body, headers)
|
||||
if err_message == nil and code == 201 and body ~= nil then
|
||||
if action == 'no action' or action == 'add header' or action == 'rewrite subject' then
|
||||
send_mail(task, body)
|
||||
end
|
||||
rspamd_logger.infox("BCC: sending BCC to %s for rcpt match", body)
|
||||
send_mail(task, body)
|
||||
end
|
||||
end
|
||||
|
||||
local function from_callback(err_message, code, body, headers)
|
||||
if err_message == nil and code == 201 and body ~= nil then
|
||||
if action == 'no action' or action == 'add header' or action == 'rewrite subject' then
|
||||
send_mail(task, body)
|
||||
end
|
||||
rspamd_logger.infox("BCC: sending BCC to %s for from match", body)
|
||||
send_mail(task, body)
|
||||
end
|
||||
end
|
||||
|
||||
if rcpt_table then
|
||||
for _,e in ipairs(rcpt_table) do
|
||||
rspamd_logger.infox(rspamd_config, "checking bcc for rcpt address %s", e)
|
||||
rspamd_logger.infox(rspamd_config, "BCC: checking bcc for rcpt address %s", e)
|
||||
rspamd_http.request({
|
||||
task=task,
|
||||
url='http://nginx:8081/bcc.php',
|
||||
@@ -430,7 +667,7 @@ rspamd_config:register_symbol({
|
||||
|
||||
if from_table then
|
||||
for _,e in ipairs(from_table) do
|
||||
rspamd_logger.infox(rspamd_config, "checking bcc for from address %s", e)
|
||||
rspamd_logger.infox(rspamd_config, "BCC: checking bcc for from address %s", e)
|
||||
rspamd_http.request({
|
||||
task=task,
|
||||
url='http://nginx:8081/bcc.php',
|
||||
@@ -441,7 +678,7 @@ rspamd_config:register_symbol({
|
||||
end
|
||||
end
|
||||
|
||||
return true
|
||||
-- Don't return true to avoid symbol being logged
|
||||
end,
|
||||
priority = 20
|
||||
})
|
||||
@@ -708,4 +945,4 @@ rspamd_config:register_symbol({
|
||||
return true
|
||||
end,
|
||||
priority = 1
|
||||
})
|
||||
})
|
||||
@@ -182,7 +182,7 @@ foreach (json_decode($rcpts, true) as $rcpt) {
|
||||
error_log("RCPT RESOVLER: http pipe: goto address " . $goto . " is an alias branch for " . $goto_branch . PHP_EOL);
|
||||
$goto_branch_array = explode(',', $goto_branch);
|
||||
} else {
|
||||
$stmt = $pdo->prepare("SELECT `target_domain` FROM `alias_domain` WHERE `alias_domain` = :domain AND `active` AND '1'");
|
||||
$stmt = $pdo->prepare("SELECT `target_domain` FROM `alias_domain` WHERE `alias_domain` = :domain AND `active` = '1'");
|
||||
$stmt->execute(array(':domain' => $parsed_goto['domain']));
|
||||
$goto_branch = $stmt->fetch(PDO::FETCH_ASSOC)['target_domain'];
|
||||
if ($goto_branch) {
|
||||
|
||||
@@ -167,7 +167,7 @@ foreach (json_decode($rcpts, true) as $rcpt) {
|
||||
error_log("RCPT RESOVLER: http pipe: goto address " . $goto . " is an alias branch for " . $goto_branch . PHP_EOL);
|
||||
$goto_branch_array = explode(',', $goto_branch);
|
||||
} else {
|
||||
$stmt = $pdo->prepare("SELECT `target_domain` FROM `alias_domain` WHERE `alias_domain` = :domain AND `active` AND '1'");
|
||||
$stmt = $pdo->prepare("SELECT `target_domain` FROM `alias_domain` WHERE `alias_domain` = :domain AND `active` = '1'");
|
||||
$stmt->execute(array(':domain' => $parsed_goto['domain']));
|
||||
$goto_branch = $stmt->fetch(PDO::FETCH_ASSOC)['target_domain'];
|
||||
if ($goto_branch) {
|
||||
|
||||
@@ -86,6 +86,12 @@
|
||||
SOGoMaximumFailedLoginInterval = 900;
|
||||
SOGoFailedLoginBlockInterval = 900;
|
||||
|
||||
// Enable SOGo URL Description for GDPR compliance, this may cause some issues with calendars and contacts. Also uncomment the encryption key below to use it.
|
||||
//SOGoURLEncryptionEnabled = NO;
|
||||
|
||||
// Set a 16 character encryption key for SOGo URL Description, change this to your own value
|
||||
//SOGoURLPathEncryptionKey = "SOGoSuperSecret0";
|
||||
|
||||
GCSChannelCollectionTimer = 60;
|
||||
GCSChannelExpireAge = 60;
|
||||
|
||||
|
||||
@@ -5352,9 +5352,9 @@ paths:
|
||||
started_at: "2019-12-22T21:00:01.622856172Z"
|
||||
state: running
|
||||
type: info
|
||||
dockerapi-mailcow:
|
||||
container: dockerapi-mailcow
|
||||
image: "mailcow/dockerapi:1.36"
|
||||
controller-mailcow:
|
||||
container: controller-mailcow
|
||||
image: "mailcow/controller:1.36"
|
||||
started_at: "2019-12-22T20:59:59.984797808Z"
|
||||
state: running
|
||||
type: info
|
||||
|
||||
@@ -29,8 +29,8 @@ header('Content-Type: application/xml');
|
||||
<clientConfig version="1.1">
|
||||
<emailProvider id="<?=$mailcow_hostname; ?>">
|
||||
<domain>%EMAILDOMAIN%</domain>
|
||||
<displayName>A mailcow mail server</displayName>
|
||||
<displayShortName>mail server</displayShortName>
|
||||
<displayName><?=$autodiscover_config['displayName']; ?></displayName>
|
||||
<displayShortName><?=$autodiscover_config['displayShortName']; ?></displayShortName>
|
||||
|
||||
<incomingServer type="imap">
|
||||
<hostname><?=$autodiscover_config['imap']['server']; ?></hostname>
|
||||
|
||||
@@ -7,6 +7,8 @@ if(file_exists('inc/vars.local.inc.php')) {
|
||||
require_once $_SERVER['DOCUMENT_ROOT'] . '/inc/functions.inc.php';
|
||||
require_once $_SERVER['DOCUMENT_ROOT'] . '/inc/functions.auth.inc.php';
|
||||
require_once $_SERVER['DOCUMENT_ROOT'] . '/inc/sessions.inc.php';
|
||||
require_once $_SERVER['DOCUMENT_ROOT'] . '/inc/functions.mailbox.inc.php';
|
||||
require_once $_SERVER['DOCUMENT_ROOT'] . '/inc/functions.ratelimit.inc.php';
|
||||
$default_autodiscover_config = $autodiscover_config;
|
||||
$autodiscover_config = array_merge($default_autodiscover_config, $autodiscover_config);
|
||||
|
||||
@@ -77,7 +79,7 @@ if (empty($_SERVER['PHP_AUTH_USER']) || empty($_SERVER['PHP_AUTH_PW'])) {
|
||||
exit(0);
|
||||
}
|
||||
|
||||
$login_role = check_login($login_user, $login_pass, array('eas' => TRUE));
|
||||
$login_role = check_login($login_user, $login_pass, array('service' => 'EAS'));
|
||||
|
||||
if ($login_role === "user") {
|
||||
header("Content-Type: application/xml");
|
||||
|
||||
@@ -129,7 +129,16 @@ if (isset($_SESSION['mailcow_cc_role']) && ($_SESSION['mailcow_cc_role'] == "adm
|
||||
);
|
||||
}
|
||||
|
||||
$mta_sts = mailbox('get', 'mta_sts', $domain);
|
||||
// Check if domain is an alias domain and get target domain's MTA-STS
|
||||
$alias_domain_details = mailbox('get', 'alias_domain_details', $domain);
|
||||
$mta_sts_domain = $domain;
|
||||
|
||||
if ($alias_domain_details !== false && !empty($alias_domain_details['target_domain'])) {
|
||||
// This is an alias domain, check target domain for MTA-STS
|
||||
$mta_sts_domain = $alias_domain_details['target_domain'];
|
||||
}
|
||||
|
||||
$mta_sts = mailbox('get', 'mta_sts', $mta_sts_domain);
|
||||
if (count($mta_sts) > 0 && $mta_sts['active'] == 1) {
|
||||
if (!in_array($domain, $alias_domains)) {
|
||||
$records[] = array(
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<?php
|
||||
function app_passwd($_action, $_data = null) {
|
||||
global $pdo;
|
||||
global $lang;
|
||||
global $pdo;
|
||||
global $lang;
|
||||
$_data_log = $_data;
|
||||
!isset($_data_log['app_passwd']) ?: $_data_log['app_passwd'] = '*';
|
||||
!isset($_data_log['app_passwd2']) ?: $_data_log['app_passwd2'] = '*';
|
||||
@@ -43,20 +43,7 @@ function app_passwd($_action, $_data = null) {
|
||||
);
|
||||
return false;
|
||||
}
|
||||
if (!preg_match('/' . $GLOBALS['PASSWD_REGEP'] . '/', $password)) {
|
||||
$_SESSION['return'][] = array(
|
||||
'type' => 'danger',
|
||||
'log' => array(__FUNCTION__, $_action, $_data_log),
|
||||
'msg' => 'password_complexity'
|
||||
);
|
||||
return false;
|
||||
}
|
||||
if ($password != $password2) {
|
||||
$_SESSION['return'][] = array(
|
||||
'type' => 'danger',
|
||||
'log' => array(__FUNCTION__, $_action, $_data_log),
|
||||
'msg' => 'password_mismatch'
|
||||
);
|
||||
if (password_check($password, $password2) !== true) {
|
||||
return false;
|
||||
}
|
||||
$password_hashed = hash_password($password);
|
||||
@@ -88,15 +75,15 @@ function app_passwd($_action, $_data = null) {
|
||||
'log' => array(__FUNCTION__, $_action, $_data_log),
|
||||
'msg' => 'app_passwd_added'
|
||||
);
|
||||
break;
|
||||
break;
|
||||
case 'edit':
|
||||
$ids = (array)$_data['id'];
|
||||
foreach ($ids as $id) {
|
||||
$is_now = app_passwd('details', $id);
|
||||
if (!empty($is_now)) {
|
||||
$app_name = (!empty($_data['app_name'])) ? $_data['app_name'] : $is_now['name'];
|
||||
$password = (!empty($_data['password'])) ? $_data['password'] : null;
|
||||
$password2 = (!empty($_data['password2'])) ? $_data['password2'] : null;
|
||||
$password = (!empty($_data['app_passwd'])) ? $_data['app_passwd'] : null;
|
||||
$password2 = (!empty($_data['app_passwd2'])) ? $_data['app_passwd2'] : null;
|
||||
if (isset($_data['protocols'])) {
|
||||
$protocols = (array)$_data['protocols'];
|
||||
$imap_access = (in_array('imap_access', $protocols)) ? 1 : 0;
|
||||
@@ -126,20 +113,7 @@ function app_passwd($_action, $_data = null) {
|
||||
}
|
||||
$app_name = htmlspecialchars(trim($app_name));
|
||||
if (!empty($password) && !empty($password2)) {
|
||||
if (!preg_match('/' . $GLOBALS['PASSWD_REGEP'] . '/', $password)) {
|
||||
$_SESSION['return'][] = array(
|
||||
'type' => 'danger',
|
||||
'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr),
|
||||
'msg' => 'password_complexity'
|
||||
);
|
||||
continue;
|
||||
}
|
||||
if ($password != $password2) {
|
||||
$_SESSION['return'][] = array(
|
||||
'type' => 'danger',
|
||||
'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr),
|
||||
'msg' => 'password_mismatch'
|
||||
);
|
||||
if (password_check($password, $password2) !== true) {
|
||||
continue;
|
||||
}
|
||||
$password_hashed = hash_password($password);
|
||||
@@ -182,7 +156,7 @@ function app_passwd($_action, $_data = null) {
|
||||
'msg' => array('object_modified', htmlspecialchars(implode(', ', $ids)))
|
||||
);
|
||||
}
|
||||
break;
|
||||
break;
|
||||
case 'delete':
|
||||
$ids = (array)$_data['id'];
|
||||
foreach ($ids as $id) {
|
||||
@@ -213,19 +187,17 @@ function app_passwd($_action, $_data = null) {
|
||||
'msg' => array('app_passwd_removed', htmlspecialchars($id))
|
||||
);
|
||||
}
|
||||
break;
|
||||
break;
|
||||
case 'get':
|
||||
$app_passwds = array();
|
||||
$stmt = $pdo->prepare("SELECT `id`, `name` FROM `app_passwd` WHERE `mailbox` = :username");
|
||||
$stmt->execute(array(':username' => $username));
|
||||
$app_passwds = $stmt->fetchAll(PDO::FETCH_ASSOC);
|
||||
return $app_passwds;
|
||||
break;
|
||||
break;
|
||||
case 'details':
|
||||
$app_passwd_data = array();
|
||||
$stmt = $pdo->prepare("SELECT *
|
||||
FROM `app_passwd`
|
||||
WHERE `id` = :id");
|
||||
$stmt = $pdo->prepare("SELECT * FROM `app_passwd` WHERE `id` = :id");
|
||||
$stmt->execute(array(':id' => $_data));
|
||||
$app_passwd_data = $stmt->fetch(PDO::FETCH_ASSOC);
|
||||
if (empty($app_passwd_data)) {
|
||||
@@ -237,6 +209,6 @@ function app_passwd($_action, $_data = null) {
|
||||
}
|
||||
$app_passwd_data['name'] = htmlspecialchars(trim($app_passwd_data['name']));
|
||||
return $app_passwd_data;
|
||||
break;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
<?php
|
||||
function check_login($user, $pass, $app_passwd_data = false, $extra = null) {
|
||||
function check_login($user, $pass, $extra = null) {
|
||||
global $pdo;
|
||||
global $redis;
|
||||
|
||||
$is_internal = $extra['is_internal'];
|
||||
$role = $extra['role'];
|
||||
$extra['service'] = !isset($extra['service']) ? 'NONE' : $extra['service'];
|
||||
|
||||
// Try validate admin
|
||||
if (!isset($role) || $role == "admin") {
|
||||
@@ -25,34 +26,20 @@ function check_login($user, $pass, $app_passwd_data = false, $extra = null) {
|
||||
|
||||
// Try validate app password
|
||||
if (!isset($role) || $role == "app") {
|
||||
$result = apppass_login($user, $pass, $app_passwd_data);
|
||||
$result = apppass_login($user, $pass, $extra);
|
||||
if ($result !== false) {
|
||||
if ($app_passwd_data['eas'] === true) {
|
||||
$service = 'EAS';
|
||||
} elseif ($app_passwd_data['dav'] === true) {
|
||||
$service = 'DAV';
|
||||
} else {
|
||||
$service = 'NONE';
|
||||
}
|
||||
$real_rip = ($_SERVER['HTTP_X_REAL_IP'] ?? $_SERVER['REMOTE_ADDR']);
|
||||
set_sasl_log($user, $real_rip, $service, $pass);
|
||||
set_sasl_log($user, $real_rip, $extra['service'], $pass);
|
||||
return $result;
|
||||
}
|
||||
}
|
||||
|
||||
// Try validate user
|
||||
if (!isset($role) || $role == "user") {
|
||||
$result = user_login($user, $pass);
|
||||
$result = user_login($user, $pass, $extra);
|
||||
if ($result !== false) {
|
||||
if ($app_passwd_data['eas'] === true) {
|
||||
$service = 'EAS';
|
||||
} elseif ($app_passwd_data['dav'] === true) {
|
||||
$service = 'DAV';
|
||||
} else {
|
||||
$service = 'MAILCOWUI';
|
||||
}
|
||||
$real_rip = ($_SERVER['HTTP_X_REAL_IP'] ?? $_SERVER['REMOTE_ADDR']);
|
||||
set_sasl_log($user, $real_rip, $service);
|
||||
set_sasl_log($user, $real_rip, $extra['service']);
|
||||
return $result;
|
||||
}
|
||||
}
|
||||
@@ -193,7 +180,7 @@ function user_login($user, $pass, $extra = null){
|
||||
global $iam_settings;
|
||||
|
||||
$is_internal = $extra['is_internal'];
|
||||
$service = $extra['service'];
|
||||
$extra['service'] = !isset($extra['service']) ? 'NONE' : $extra['service'];
|
||||
|
||||
if (!filter_var($user, FILTER_VALIDATE_EMAIL) && !ctype_alnum(str_replace(array('_', '.', '-'), '', $user))) {
|
||||
if (!$is_internal){
|
||||
@@ -236,10 +223,10 @@ function user_login($user, $pass, $extra = null){
|
||||
$row = $stmt->fetch(PDO::FETCH_ASSOC);
|
||||
|
||||
if (!empty($row)) {
|
||||
// check if user has access to service (imap, smtp, pop3, sieve) if service is set
|
||||
// check if user has access to service (imap, smtp, pop3, sieve, dav, eas) if service is set
|
||||
$row['attributes'] = json_decode($row['attributes'], true);
|
||||
if (isset($service)) {
|
||||
$key = strtolower($service) . "_access";
|
||||
if ($extra['service'] != 'NONE') {
|
||||
$key = strtolower($extra['service']) . "_access";
|
||||
if (isset($row['attributes'][$key]) && $row['attributes'][$key] != '1') {
|
||||
return false;
|
||||
}
|
||||
@@ -253,8 +240,8 @@ function user_login($user, $pass, $extra = null){
|
||||
|
||||
// check if user has access to service (imap, smtp, pop3, sieve) if service is set
|
||||
$row['attributes'] = json_decode($row['attributes'], true);
|
||||
if (isset($service)) {
|
||||
$key = strtolower($service) . "_access";
|
||||
if ($extra['service'] != 'NONE') {
|
||||
$key = strtolower($extra['service']) . "_access";
|
||||
if (isset($row['attributes'][$key]) && $row['attributes'][$key] != '1') {
|
||||
return false;
|
||||
}
|
||||
@@ -408,7 +395,7 @@ function user_login($user, $pass, $extra = null){
|
||||
|
||||
return false;
|
||||
}
|
||||
function apppass_login($user, $pass, $app_passwd_data, $extra = null){
|
||||
function apppass_login($user, $pass, $extra = null){
|
||||
global $pdo;
|
||||
|
||||
$is_internal = $extra['is_internal'];
|
||||
@@ -424,20 +411,8 @@ function apppass_login($user, $pass, $app_passwd_data, $extra = null){
|
||||
return false;
|
||||
}
|
||||
|
||||
$protocol = false;
|
||||
if ($app_passwd_data['eas']){
|
||||
$protocol = 'eas';
|
||||
} else if ($app_passwd_data['dav']){
|
||||
$protocol = 'dav';
|
||||
} else if ($app_passwd_data['smtp']){
|
||||
$protocol = 'smtp';
|
||||
} else if ($app_passwd_data['imap']){
|
||||
$protocol = 'imap';
|
||||
} else if ($app_passwd_data['sieve']){
|
||||
$protocol = 'sieve';
|
||||
} else if ($app_passwd_data['pop3']){
|
||||
$protocol = 'pop3';
|
||||
} else if (!$is_internal) {
|
||||
$extra['service'] = !isset($extra['service']) ? 'NONE' : $extra['service'];
|
||||
if (!$is_internal && $extra['service'] == 'NONE') {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -458,7 +433,7 @@ function apppass_login($user, $pass, $app_passwd_data, $extra = null){
|
||||
$rows = $stmt->fetchAll(PDO::FETCH_ASSOC);
|
||||
|
||||
foreach ($rows as $row) {
|
||||
if ($protocol && $row[$protocol . '_access'] != '1'){
|
||||
if ($extra['service'] != 'NONE' && $row[strtolower($extra['service']) . '_access'] != '1'){
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
@@ -325,8 +325,10 @@ function customize($_action, $_item, $_data = null) {
|
||||
break;
|
||||
case 'ui_texts':
|
||||
try {
|
||||
$data['title_name'] = ($title_name = $redis->get('TITLE_NAME')) ? $title_name : 'mailcow UI';
|
||||
$data['main_name'] = ($main_name = $redis->get('MAIN_NAME')) ? $main_name : 'mailcow UI';
|
||||
$mailcow_hostname = strtolower(getenv("MAILCOW_HOSTNAME"));
|
||||
|
||||
$data['title_name'] = ($title_name = $redis->get('TITLE_NAME')) ? $title_name : "$mailcow_hostname - mail UI";
|
||||
$data['main_name'] = ($main_name = $redis->get('MAIN_NAME')) ? $main_name : "$mailcow_hostname - mail UI";
|
||||
$data['apps_name'] = ($apps_name = $redis->get('APPS_NAME')) ? $apps_name : $lang['header']['apps'];
|
||||
$data['help_text'] = ($help_text = $redis->get('HELP_TEXT')) ? $help_text : false;
|
||||
if (!empty($redis->get('UI_IMPRESS'))) {
|
||||
|
||||
@@ -4,12 +4,12 @@ function docker($action, $service_name = null, $attr1 = null, $attr2 = null, $ex
|
||||
global $redis;
|
||||
$curl = curl_init();
|
||||
curl_setopt($curl, CURLOPT_HTTPHEADER,array('Content-Type: application/json' ));
|
||||
// We are using our mail certificates for dockerapi, the names will not match, the certs are trusted anyway
|
||||
// We are using our mail certificates for controller, the names will not match, the certs are trusted anyway
|
||||
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
|
||||
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
|
||||
switch($action) {
|
||||
case 'get_id':
|
||||
curl_setopt($curl, CURLOPT_URL, 'https://dockerapi:443/containers/json');
|
||||
curl_setopt($curl, CURLOPT_URL, 'https://controller:443/containers/json');
|
||||
curl_setopt($curl, CURLOPT_RETURNTRANSFER, 1);
|
||||
curl_setopt($curl, CURLOPT_POST, 0);
|
||||
curl_setopt($curl, CURLOPT_TIMEOUT, $DOCKER_TIMEOUT);
|
||||
@@ -35,7 +35,7 @@ function docker($action, $service_name = null, $attr1 = null, $attr2 = null, $ex
|
||||
return false;
|
||||
break;
|
||||
case 'containers':
|
||||
curl_setopt($curl, CURLOPT_URL, 'https://dockerapi:443/containers/json');
|
||||
curl_setopt($curl, CURLOPT_URL, 'https://controller:443/containers/json');
|
||||
curl_setopt($curl, CURLOPT_RETURNTRANSFER, 1);
|
||||
curl_setopt($curl, CURLOPT_POST, 0);
|
||||
curl_setopt($curl, CURLOPT_TIMEOUT, $DOCKER_TIMEOUT);
|
||||
@@ -63,7 +63,7 @@ function docker($action, $service_name = null, $attr1 = null, $attr2 = null, $ex
|
||||
break;
|
||||
case 'info':
|
||||
if (empty($service_name)) {
|
||||
curl_setopt($curl, CURLOPT_URL, 'https://dockerapi:443/containers/json');
|
||||
curl_setopt($curl, CURLOPT_URL, 'https://controller:443/containers/json');
|
||||
curl_setopt($curl, CURLOPT_RETURNTRANSFER, 1);
|
||||
curl_setopt($curl, CURLOPT_POST, 0);
|
||||
curl_setopt($curl, CURLOPT_TIMEOUT, $DOCKER_TIMEOUT);
|
||||
@@ -71,7 +71,7 @@ function docker($action, $service_name = null, $attr1 = null, $attr2 = null, $ex
|
||||
else {
|
||||
$container_id = docker('get_id', $service_name);
|
||||
if (ctype_xdigit($container_id)) {
|
||||
curl_setopt($curl, CURLOPT_URL, 'https://dockerapi:443/containers/' . $container_id . '/json');
|
||||
curl_setopt($curl, CURLOPT_URL, 'https://controller:443/containers/' . $container_id . '/json');
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
@@ -102,7 +102,7 @@ function docker($action, $service_name = null, $attr1 = null, $attr2 = null, $ex
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (isset($decoded_response['Config']['Labels']['com.docker.compose.project'])
|
||||
if (isset($decoded_response['Config']['Labels']['com.docker.compose.project'])
|
||||
&& strtolower($decoded_response['Config']['Labels']['com.docker.compose.project']) == strtolower(getenv('COMPOSE_PROJECT_NAME'))) {
|
||||
unset($container['Config']['Env']);
|
||||
$out[$decoded_response['Config']['Labels']['com.docker.compose.service']]['State'] = $decoded_response['State'];
|
||||
@@ -123,7 +123,7 @@ function docker($action, $service_name = null, $attr1 = null, $attr2 = null, $ex
|
||||
if (!empty($attr1)) {
|
||||
$container_id = docker('get_id', $service_name);
|
||||
if (ctype_xdigit($container_id) && ctype_alnum($attr1)) {
|
||||
curl_setopt($curl, CURLOPT_URL, 'https://dockerapi:443/containers/' . $container_id . '/' . $attr1);
|
||||
curl_setopt($curl, CURLOPT_URL, 'https://controller:443/containers/' . $container_id . '/' . $attr1);
|
||||
curl_setopt($curl, CURLOPT_POST, 1);
|
||||
curl_setopt($curl, CURLOPT_TIMEOUT, $DOCKER_TIMEOUT);
|
||||
if (!empty($attr2)) {
|
||||
@@ -157,7 +157,7 @@ function docker($action, $service_name = null, $attr1 = null, $attr2 = null, $ex
|
||||
}
|
||||
|
||||
$container_id = $service_name;
|
||||
curl_setopt($curl, CURLOPT_URL, 'https://dockerapi:443/container/' . $container_id . '/stats/update');
|
||||
curl_setopt($curl, CURLOPT_URL, 'https://controller:443/container/' . $container_id . '/stats/update');
|
||||
curl_setopt($curl, CURLOPT_RETURNTRANSFER, 1);
|
||||
curl_setopt($curl, CURLOPT_POST, 1);
|
||||
curl_setopt($curl, CURLOPT_TIMEOUT, $DOCKER_TIMEOUT);
|
||||
@@ -175,7 +175,7 @@ function docker($action, $service_name = null, $attr1 = null, $attr2 = null, $ex
|
||||
return false;
|
||||
break;
|
||||
case 'host_stats':
|
||||
curl_setopt($curl, CURLOPT_URL, 'https://dockerapi:443/host/stats');
|
||||
curl_setopt($curl, CURLOPT_URL, 'https://controller:443/host/stats');
|
||||
curl_setopt($curl, CURLOPT_RETURNTRANSFER, 1);
|
||||
curl_setopt($curl, CURLOPT_POST, 0);
|
||||
curl_setopt($curl, CURLOPT_TIMEOUT, $DOCKER_TIMEOUT);
|
||||
|
||||
@@ -205,6 +205,42 @@ function password_complexity($_action, $_data = null) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
function password_generate(){
|
||||
$password_complexity = password_complexity('get');
|
||||
$min_length = max(16, intval($password_complexity['length']));
|
||||
|
||||
$lowercase = range('a', 'z');
|
||||
$uppercase = range('A', 'Z');
|
||||
$digits = range(0, 9);
|
||||
$special_chars = str_split('!@#$%^&*()?=');
|
||||
|
||||
$password = [
|
||||
$lowercase[random_int(0, count($lowercase) - 1)],
|
||||
$uppercase[random_int(0, count($uppercase) - 1)],
|
||||
$digits[random_int(0, count($digits) - 1)],
|
||||
$special_chars[random_int(0, count($special_chars) - 1)],
|
||||
];
|
||||
|
||||
$all = array_merge($lowercase, $uppercase, $digits, $special_chars);
|
||||
|
||||
while (count($password) < $min_length) {
|
||||
$password[] = $all[random_int(0, count($all) - 1)];
|
||||
}
|
||||
|
||||
// Cryptographically secure shuffle using Fisher-Yates algorithm
|
||||
$count = count($password);
|
||||
for ($i = $count - 1; $i > 0; $i--) {
|
||||
$j = random_int(0, $i);
|
||||
$temp = $password[$i];
|
||||
$password[$i] = $password[$j];
|
||||
$password[$j] = $temp;
|
||||
}
|
||||
|
||||
return implode('', $password);
|
||||
|
||||
}
|
||||
|
||||
function password_check($password1, $password2) {
|
||||
$password_complexity = password_complexity('get');
|
||||
|
||||
@@ -488,6 +524,16 @@ function sys_mail($_data) {
|
||||
'msg' => 'Mass mail job completed, sent ' . count($rcpts) . ' mails'
|
||||
);
|
||||
}
|
||||
function get_remote_ip($use_x_real_ip = true) {
|
||||
$remote = $_SERVER['REMOTE_ADDR'];
|
||||
if ($use_x_real_ip && !empty($_SERVER['HTTP_X_REAL_IP'])) {
|
||||
$remote = $_SERVER['HTTP_X_REAL_IP'];
|
||||
}
|
||||
if (filter_var($remote, FILTER_VALIDATE_IP) === false) {
|
||||
$remote = '0.0.0.0';
|
||||
}
|
||||
return $remote;
|
||||
}
|
||||
function logger($_data = false) {
|
||||
/*
|
||||
logger() will be called as last function
|
||||
@@ -814,6 +860,32 @@ function verify_hash($hash, $password) {
|
||||
$hash = $components[4];
|
||||
return hash_equals(hash_pbkdf2('sha1', $password, $salt, $rounds), $hash);
|
||||
|
||||
case "PBKDF2-SHA512":
|
||||
// Handle FreeIPA-style hash: {PBKDF2-SHA512}10000$<base64_salt>$<base64_hash>
|
||||
$components = explode('$', $hash);
|
||||
if (count($components) !== 3) return false;
|
||||
|
||||
// 1st part: iteration count (integer)
|
||||
$iterations = intval($components[0]);
|
||||
if ($iterations <= 0) return false;
|
||||
|
||||
// 2nd part: salt (base64-encoded)
|
||||
$salt = $components[1];
|
||||
// 3rd part: hash (base64-encoded)
|
||||
$stored_hash_b64 = $components[2];
|
||||
|
||||
// Decode salt and hash from base64
|
||||
$salt_bin = base64_decode($salt, true);
|
||||
$hash_bin = base64_decode($stored_hash_b64, true);
|
||||
if ($salt_bin === false || $hash_bin === false) return false;
|
||||
// Get length of hash in bytes
|
||||
$hash_len = strlen($hash_bin);
|
||||
if ($hash_len === 0) return false;
|
||||
|
||||
// Calculate PBKDF2-SHA512 hash for provided password
|
||||
$test_hash = hash_pbkdf2('sha512', $password, $salt_bin, $iterations, $hash_len, true);
|
||||
return hash_equals($hash_bin, $test_hash);
|
||||
|
||||
case "PLAIN-MD4":
|
||||
return hash_equals(hash('md4', $password), $hash);
|
||||
|
||||
@@ -1006,7 +1078,7 @@ function edit_user_account($_data) {
|
||||
update_sogo_static_view();
|
||||
}
|
||||
// edit password recovery email
|
||||
elseif (isset($pw_recovery_email)) {
|
||||
elseif (!empty($password_old) && isset($pw_recovery_email)) {
|
||||
if (!isset($_SESSION['acl']['pw_reset']) || $_SESSION['acl']['pw_reset'] != "1" ) {
|
||||
$_SESSION['return'][] = array(
|
||||
'type' => 'danger',
|
||||
@@ -1016,6 +1088,21 @@ function edit_user_account($_data) {
|
||||
return false;
|
||||
}
|
||||
|
||||
$stmt = $pdo->prepare("SELECT `password` FROM `mailbox`
|
||||
WHERE `kind` NOT REGEXP 'location|thing|group'
|
||||
AND `username` = :user AND authsource = 'mailcow'");
|
||||
$stmt->execute(array(':user' => $username));
|
||||
$row = $stmt->fetch(PDO::FETCH_ASSOC);
|
||||
|
||||
if (!verify_hash($row['password'], $password_old)) {
|
||||
$_SESSION['return'][] = array(
|
||||
'type' => 'danger',
|
||||
'log' => array(__FUNCTION__, $_data_log),
|
||||
'msg' => 'access_denied'
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
$pw_recovery_email = (!filter_var($pw_recovery_email, FILTER_VALIDATE_EMAIL)) ? '' : $pw_recovery_email;
|
||||
$stmt = $pdo->prepare("UPDATE `mailbox` SET `attributes` = JSON_SET(`attributes`, '$.recovery_email', :recovery_email)
|
||||
WHERE `username` = :username AND authsource = 'mailcow'");
|
||||
@@ -1107,11 +1194,21 @@ function user_get_alias_details($username) {
|
||||
}
|
||||
return $data;
|
||||
}
|
||||
function is_valid_domain_name($domain_name) {
|
||||
function is_valid_domain_name($domain_name, $options = array()) {
|
||||
if (empty($domain_name)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Convert domain name to ASCII for validation
|
||||
$domain_name = idn_to_ascii($domain_name, 0, INTL_IDNA_VARIANT_UTS46);
|
||||
|
||||
if (isset($options['allow_wildcard']) && $options['allow_wildcard'] == true) {
|
||||
// Remove '*.' if wildcard subdomains are allowed
|
||||
if (strpos($domain_name, '*.') === 0) {
|
||||
$domain_name = substr($domain_name, 2);
|
||||
}
|
||||
}
|
||||
|
||||
return (preg_match("/^([a-z\d](-*[a-z\d])*)(\.([a-z\d](-*[a-z\d])*))*$/i", $domain_name)
|
||||
&& preg_match("/^.{1,253}$/", $domain_name)
|
||||
&& preg_match("/^[^\.]{1,63}(\.[^\.]{1,63})*$/", $domain_name));
|
||||
|
||||
@@ -49,6 +49,12 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
// Default to 1 yr
|
||||
$_data["validity"] = 8760;
|
||||
}
|
||||
if (isset($_data["permanent"]) && filter_var($_data["permanent"], FILTER_VALIDATE_BOOL)) {
|
||||
$permanent = 1;
|
||||
}
|
||||
else {
|
||||
$permanent = 0;
|
||||
}
|
||||
$domain = $_data['domain'];
|
||||
$description = $_data['description'];
|
||||
$valid_domains[] = mailbox('get', 'mailbox_details', $username)['domain'];
|
||||
@@ -65,13 +71,14 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
return false;
|
||||
}
|
||||
$validity = strtotime("+" . $_data["validity"] . " hour");
|
||||
$stmt = $pdo->prepare("INSERT INTO `spamalias` (`address`, `description`, `goto`, `validity`) VALUES
|
||||
(:address, :description, :goto, :validity)");
|
||||
$stmt = $pdo->prepare("INSERT INTO `spamalias` (`address`, `description`, `goto`, `validity`, `permanent`) VALUES
|
||||
(:address, :description, :goto, :validity, :permanent)");
|
||||
$stmt->execute(array(
|
||||
':address' => readable_random_string(rand(rand(3, 9), rand(3, 9))) . '.' . readable_random_string(rand(rand(3, 9), rand(3, 9))) . '@' . $domain,
|
||||
':description' => $description,
|
||||
':goto' => $username,
|
||||
':validity' => $validity
|
||||
':validity' => $validity,
|
||||
':permanent' => $permanent
|
||||
));
|
||||
$_SESSION['return'][] = array(
|
||||
'type' => 'success',
|
||||
@@ -468,10 +475,11 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
':delete2duplicates' => $delete2duplicates,
|
||||
':active' => $active,
|
||||
));
|
||||
$id = $pdo->lastInsertId();
|
||||
$_SESSION['return'][] = array(
|
||||
'type' => 'success',
|
||||
'log' => array(__FUNCTION__, $_action, $_type, $_data_log, $_attr),
|
||||
'msg' => array('mailbox_modified', $username)
|
||||
'msg' => array('mailbox_modified', $username, $id)
|
||||
);
|
||||
break;
|
||||
case 'domain':
|
||||
@@ -688,6 +696,7 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
$gotos = array_map('trim', preg_split( "/( |,|;|\n)/", $_data['goto']));
|
||||
$internal = intval($_data['internal']);
|
||||
$active = intval($_data['active']);
|
||||
$sender_allowed = intval($_data['sender_allowed']);
|
||||
$sogo_visible = intval($_data['sogo_visible']);
|
||||
$goto_null = intval($_data['goto_null']);
|
||||
$goto_spam = intval($_data['goto_spam']);
|
||||
@@ -843,8 +852,8 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
);
|
||||
continue;
|
||||
}
|
||||
$stmt = $pdo->prepare("INSERT INTO `alias` (`address`, `public_comment`, `private_comment`, `goto`, `domain`, `sogo_visible`, `internal`, `active`)
|
||||
VALUES (:address, :public_comment, :private_comment, :goto, :domain, :sogo_visible, :internal, :active)");
|
||||
$stmt = $pdo->prepare("INSERT INTO `alias` (`address`, `public_comment`, `private_comment`, `goto`, `domain`, `sogo_visible`, `internal`, `sender_allowed`, `active`)
|
||||
VALUES (:address, :public_comment, :private_comment, :goto, :domain, :sogo_visible, :internal, :sender_allowed, :active)");
|
||||
if (!filter_var($address, FILTER_VALIDATE_EMAIL) === true) {
|
||||
$stmt->execute(array(
|
||||
':address' => '@'.$domain,
|
||||
@@ -855,6 +864,7 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
':domain' => $domain,
|
||||
':sogo_visible' => $sogo_visible,
|
||||
':internal' => $internal,
|
||||
':sender_allowed' => $sender_allowed,
|
||||
':active' => $active
|
||||
));
|
||||
}
|
||||
@@ -867,6 +877,7 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
':domain' => $domain,
|
||||
':sogo_visible' => $sogo_visible,
|
||||
':internal' => $internal,
|
||||
':sender_allowed' => $sender_allowed,
|
||||
':active' => $active
|
||||
));
|
||||
}
|
||||
@@ -1068,6 +1079,8 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
$_data['pop3_access'] = (in_array('pop3', $_data['protocol_access'])) ? 1 : 0;
|
||||
$_data['smtp_access'] = (in_array('smtp', $_data['protocol_access'])) ? 1 : 0;
|
||||
$_data['sieve_access'] = (in_array('sieve', $_data['protocol_access'])) ? 1 : 0;
|
||||
$_data['eas_access'] = (in_array('eas', $_data['protocol_access'])) ? 1 : 0;
|
||||
$_data['dav_access'] = (in_array('dav', $_data['protocol_access'])) ? 1 : 0;
|
||||
}
|
||||
$active = (isset($_data['active'])) ? intval($_data['active']) : intval($MAILBOX_DEFAULT_ATTRIBUTES['active']);
|
||||
$force_pw_update = (isset($_data['force_pw_update'])) ? intval($_data['force_pw_update']) : intval($MAILBOX_DEFAULT_ATTRIBUTES['force_pw_update']);
|
||||
@@ -1078,6 +1091,8 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
$pop3_access = (isset($_data['pop3_access'])) ? intval($_data['pop3_access']) : intval($MAILBOX_DEFAULT_ATTRIBUTES['pop3_access']);
|
||||
$smtp_access = (isset($_data['smtp_access'])) ? intval($_data['smtp_access']) : intval($MAILBOX_DEFAULT_ATTRIBUTES['smtp_access']);
|
||||
$sieve_access = (isset($_data['sieve_access'])) ? intval($_data['sieve_access']) : intval($MAILBOX_DEFAULT_ATTRIBUTES['sieve_access']);
|
||||
$eas_access = (isset($_data['eas_access'])) ? intval($_data['eas_access']) : intval($MAILBOX_DEFAULT_ATTRIBUTES['eas_access']);
|
||||
$dav_access = (isset($_data['dav_access'])) ? intval($_data['dav_access']) : intval($MAILBOX_DEFAULT_ATTRIBUTES['dav_access']);
|
||||
$relayhost = (isset($_data['relayhost'])) ? intval($_data['relayhost']) : 0;
|
||||
$quarantine_notification = (isset($_data['quarantine_notification'])) ? strval($_data['quarantine_notification']) : strval($MAILBOX_DEFAULT_ATTRIBUTES['quarantine_notification']);
|
||||
$quarantine_category = (isset($_data['quarantine_category'])) ? strval($_data['quarantine_category']) : strval($MAILBOX_DEFAULT_ATTRIBUTES['quarantine_category']);
|
||||
@@ -1096,6 +1111,8 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
'pop3_access' => strval($pop3_access),
|
||||
'smtp_access' => strval($smtp_access),
|
||||
'sieve_access' => strval($sieve_access),
|
||||
'eas_access' => strval($eas_access),
|
||||
'dav_access' => strval($dav_access),
|
||||
'relayhost' => strval($relayhost),
|
||||
'passwd_update' => time(),
|
||||
'mailbox_format' => strval($MAILBOX_DEFAULT_ATTRIBUTES['mailbox_format']),
|
||||
@@ -1446,7 +1463,7 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
}
|
||||
foreach ($mx as $index => $mx_domain) {
|
||||
$mx_domain = idn_to_ascii(strtolower(trim($mx_domain)), 0, INTL_IDNA_VARIANT_UTS46);
|
||||
if (!is_valid_domain_name($mx_domain)) {
|
||||
if (!is_valid_domain_name($mx_domain, array('allow_wildcard' => true))) {
|
||||
$_SESSION['return'][] = array(
|
||||
'type' => 'danger',
|
||||
'log' => array(__FUNCTION__, $_action, $_type, $_data, $_attr),
|
||||
@@ -1714,12 +1731,16 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
$attr['pop3_access'] = (in_array('pop3', $_data['protocol_access'])) ? 1 : 0;
|
||||
$attr['smtp_access'] = (in_array('smtp', $_data['protocol_access'])) ? 1 : 0;
|
||||
$attr['sieve_access'] = (in_array('sieve', $_data['protocol_access'])) ? 1 : 0;
|
||||
$attr['eas_access'] = (in_array('eas', $_data['protocol_access'])) ? 1 : 0;
|
||||
$attr['dav_access'] = (in_array('dav', $_data['protocol_access'])) ? 1 : 0;
|
||||
}
|
||||
else {
|
||||
$attr['imap_access'] = intval($MAILBOX_DEFAULT_ATTRIBUTES['imap_access']);
|
||||
$attr['pop3_access'] = intval($MAILBOX_DEFAULT_ATTRIBUTES['pop3_access']);
|
||||
$attr['smtp_access'] = intval($MAILBOX_DEFAULT_ATTRIBUTES['smtp_access']);
|
||||
$attr['sieve_access'] = intval($MAILBOX_DEFAULT_ATTRIBUTES['sieve_access']);
|
||||
$attr['eas_access'] = intval($MAILBOX_DEFAULT_ATTRIBUTES['eas_access']);
|
||||
$attr['dav_access'] = intval($MAILBOX_DEFAULT_ATTRIBUTES['dav_access']);
|
||||
}
|
||||
if (isset($_data['acl'])) {
|
||||
$_data['acl'] = (array)$_data['acl'];
|
||||
@@ -2103,15 +2124,23 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
);
|
||||
continue;
|
||||
}
|
||||
if (empty($_data['validity'])) {
|
||||
if (empty($_data['validity']) && empty($_data['permanent'])) {
|
||||
continue;
|
||||
}
|
||||
$validity = round((int)time() + ($_data['validity'] * 3600));
|
||||
$stmt = $pdo->prepare("UPDATE `spamalias` SET `validity` = :validity WHERE
|
||||
if (isset($_data['permanent']) && filter_var($_data['permanent'], FILTER_VALIDATE_BOOL)) {
|
||||
$permanent = 1;
|
||||
$validity = 0;
|
||||
}
|
||||
else if (isset($_data['validity'])) {
|
||||
$permanent = 0;
|
||||
$validity = round((int)time() + ($_data['validity'] * 3600));
|
||||
}
|
||||
$stmt = $pdo->prepare("UPDATE `spamalias` SET `validity` = :validity, `permanent` = :permanent WHERE
|
||||
`address` = :address");
|
||||
$stmt->execute(array(
|
||||
':address' => $address,
|
||||
':validity' => $validity
|
||||
':validity' => $validity,
|
||||
':permanent' => $permanent
|
||||
));
|
||||
$_SESSION['return'][] = array(
|
||||
'type' => 'success',
|
||||
@@ -2486,6 +2515,7 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
if (!empty($is_now)) {
|
||||
$internal = (isset($_data['internal'])) ? intval($_data['internal']) : $is_now['internal'];
|
||||
$active = (isset($_data['active'])) ? intval($_data['active']) : $is_now['active'];
|
||||
$sender_allowed = (isset($_data['sender_allowed'])) ? intval($_data['sender_allowed']) : $is_now['sender_allowed'];
|
||||
$sogo_visible = (isset($_data['sogo_visible'])) ? intval($_data['sogo_visible']) : $is_now['sogo_visible'];
|
||||
$goto_null = (isset($_data['goto_null'])) ? intval($_data['goto_null']) : 0;
|
||||
$goto_spam = (isset($_data['goto_spam'])) ? intval($_data['goto_spam']) : 0;
|
||||
@@ -2671,6 +2701,7 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
`goto` = :goto,
|
||||
`sogo_visible`= :sogo_visible,
|
||||
`internal`= :internal,
|
||||
`sender_allowed`= :sender_allowed,
|
||||
`active`= :active
|
||||
WHERE `id` = :id");
|
||||
$stmt->execute(array(
|
||||
@@ -2681,6 +2712,7 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
':goto' => $goto,
|
||||
':sogo_visible' => $sogo_visible,
|
||||
':internal' => $internal,
|
||||
':sender_allowed' => $sender_allowed,
|
||||
':active' => $active,
|
||||
':id' => $is_now['id']
|
||||
));
|
||||
@@ -3028,6 +3060,8 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
$_data['pop3_access'] = (in_array('pop3', $_data['protocol_access'])) ? 1 : 0;
|
||||
$_data['smtp_access'] = (in_array('smtp', $_data['protocol_access'])) ? 1 : 0;
|
||||
$_data['sieve_access'] = (in_array('sieve', $_data['protocol_access'])) ? 1 : 0;
|
||||
$_data['eas_access'] = (in_array('eas', $_data['protocol_access'])) ? 1 : 0;
|
||||
$_data['dav_access'] = (in_array('dav', $_data['protocol_access'])) ? 1 : 0;
|
||||
}
|
||||
if (!empty($is_now)) {
|
||||
$active = (isset($_data['active'])) ? intval($_data['active']) : $is_now['active'];
|
||||
@@ -3037,6 +3071,8 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
(int)$pop3_access = (isset($_data['pop3_access']) && hasACLAccess("protocol_access")) ? intval($_data['pop3_access']) : intval($is_now['attributes']['pop3_access']);
|
||||
(int)$smtp_access = (isset($_data['smtp_access']) && hasACLAccess("protocol_access")) ? intval($_data['smtp_access']) : intval($is_now['attributes']['smtp_access']);
|
||||
(int)$sieve_access = (isset($_data['sieve_access']) && hasACLAccess("protocol_access")) ? intval($_data['sieve_access']) : intval($is_now['attributes']['sieve_access']);
|
||||
(int)$eas_access = (isset($_data['eas_access']) && hasACLAccess("protocol_access")) ? intval($_data['eas_access']) : intval($is_now['attributes']['eas_access']);
|
||||
(int)$dav_access = (isset($_data['dav_access']) && hasACLAccess("protocol_access")) ? intval($_data['dav_access']) : intval($is_now['attributes']['dav_access']);
|
||||
(int)$relayhost = (isset($_data['relayhost']) && hasACLAccess("mailbox_relayhost")) ? intval($_data['relayhost']) : intval($is_now['attributes']['relayhost']);
|
||||
(int)$quota_m = (isset_has_content($_data['quota'])) ? intval($_data['quota']) : ($is_now['quota'] / 1048576);
|
||||
$name = (!empty($_data['name'])) ? ltrim(rtrim($_data['name'], '>'), '<') : $is_now['name'];
|
||||
@@ -3170,9 +3206,10 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
}
|
||||
if (isset($_data['sender_acl'])) {
|
||||
// Get sender_acl items set by admin
|
||||
$current_sender_acls = mailbox('get', 'sender_acl_handles', $username);
|
||||
$sender_acl_admin = array_merge(
|
||||
mailbox('get', 'sender_acl_handles', $username)['sender_acl_domains']['ro'],
|
||||
mailbox('get', 'sender_acl_handles', $username)['sender_acl_addresses']['ro']
|
||||
$current_sender_acls['sender_acl_domains']['ro'],
|
||||
$current_sender_acls['sender_acl_addresses']['ro']
|
||||
);
|
||||
// Get sender_acl items from POST array
|
||||
// Set sender_acl_domain_admin to empty array if sender_acl contains "default" to trigger a reset
|
||||
@@ -3260,16 +3297,25 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
$stmt->execute(array(
|
||||
':username' => $username
|
||||
));
|
||||
$fixed_sender_aliases = mailbox('get', 'sender_acl_handles', $username)['fixed_sender_aliases'];
|
||||
$sender_acl_handles = mailbox('get', 'sender_acl_handles', $username);
|
||||
$fixed_sender_aliases_allowed = $sender_acl_handles['fixed_sender_aliases_allowed'];
|
||||
$fixed_sender_aliases_blocked = $sender_acl_handles['fixed_sender_aliases_blocked'];
|
||||
|
||||
foreach ($sender_acl_merged as $sender_acl) {
|
||||
$domain = ltrim($sender_acl, '@');
|
||||
if (is_valid_domain_name($domain)) {
|
||||
$sender_acl = '@' . $domain;
|
||||
}
|
||||
// Don't add if allowed by alias
|
||||
if (in_array($sender_acl, $fixed_sender_aliases)) {
|
||||
|
||||
// Always add to sender_acl table to create explicit permission
|
||||
// Skip only if it's in allowed list (would be redundant)
|
||||
// But DO add if it's in blocked list (creates override)
|
||||
if (in_array($sender_acl, $fixed_sender_aliases_allowed)) {
|
||||
// Skip: already allowed by sender_allowed=1, no need for sender_acl entry
|
||||
continue;
|
||||
}
|
||||
|
||||
// Add to sender_acl (either override for blocked aliases, or grant for selectable ones)
|
||||
$stmt = $pdo->prepare("INSERT INTO `sender_acl` (`send_as`, `logged_in_as`)
|
||||
VALUES (:sender_acl, :username)");
|
||||
$stmt->execute(array(
|
||||
@@ -3320,6 +3366,8 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
`attributes` = JSON_SET(`attributes`, '$.pop3_access', :pop3_access),
|
||||
`attributes` = JSON_SET(`attributes`, '$.relayhost', :relayhost),
|
||||
`attributes` = JSON_SET(`attributes`, '$.smtp_access', :smtp_access),
|
||||
`attributes` = JSON_SET(`attributes`, '$.eas_access', :eas_access),
|
||||
`attributes` = JSON_SET(`attributes`, '$.dav_access', :dav_access),
|
||||
`attributes` = JSON_SET(`attributes`, '$.recovery_email', :recovery_email),
|
||||
`attributes` = JSON_SET(`attributes`, '$.attribute_hash', :attribute_hash)
|
||||
WHERE `username` = :username");
|
||||
@@ -3334,6 +3382,8 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
':pop3_access' => $pop3_access,
|
||||
':sieve_access' => $sieve_access,
|
||||
':smtp_access' => $smtp_access,
|
||||
':eas_access' => $eas_access,
|
||||
':dav_access' => $dav_access,
|
||||
':recovery_email' => $pw_recovery_email,
|
||||
':relayhost' => $relayhost,
|
||||
':username' => $username,
|
||||
@@ -3716,6 +3766,8 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
$attr['pop3_access'] = (in_array('pop3', $_data['protocol_access'])) ? 1 : 0;
|
||||
$attr['smtp_access'] = (in_array('smtp', $_data['protocol_access'])) ? 1 : 0;
|
||||
$attr['sieve_access'] = (in_array('sieve', $_data['protocol_access'])) ? 1 : 0;
|
||||
$attr['eas_access'] = (in_array('eas', $_data['protocol_access'])) ? 1 : 0;
|
||||
$attr['dav_access'] = (in_array('dav', $_data['protocol_access'])) ? 1 : 0;
|
||||
}
|
||||
else {
|
||||
foreach ($is_now as $key => $value){
|
||||
@@ -3897,7 +3949,7 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
foreach ($mx as $index => $mx_domain) {
|
||||
$mx_domain = idn_to_ascii(strtolower(trim($mx_domain)), 0, INTL_IDNA_VARIANT_UTS46);
|
||||
$invalid_mx = false;
|
||||
if (!is_valid_domain_name($mx_domain)) {
|
||||
if (!is_valid_domain_name($mx_domain, array('allow_wildcard' => true))) {
|
||||
$invalid_mx = $mx_domain;
|
||||
break;
|
||||
}
|
||||
@@ -4145,13 +4197,22 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
$data['sender_acl_addresses']['rw'] = array();
|
||||
$data['sender_acl_addresses']['selectable'] = array();
|
||||
$data['fixed_sender_aliases'] = array();
|
||||
$data['fixed_sender_aliases_allowed'] = array();
|
||||
$data['fixed_sender_aliases_blocked'] = array();
|
||||
$data['external_sender_aliases'] = array();
|
||||
// Fixed addresses
|
||||
$stmt = $pdo->prepare("SELECT `address` FROM `alias` WHERE `goto` REGEXP :goto AND `address` NOT LIKE '@%'");
|
||||
// Fixed addresses - split by sender_allowed status
|
||||
$stmt = $pdo->prepare("SELECT `address`, `sender_allowed` FROM `alias` WHERE `goto` REGEXP :goto AND `address` NOT LIKE '@%'");
|
||||
$stmt->execute(array(':goto' => '(^|,)'.preg_quote($_data, '/').'($|,)'));
|
||||
$rows = $stmt->fetchAll(PDO::FETCH_ASSOC);
|
||||
while ($row = array_shift($rows)) {
|
||||
// Keep old array for backward compatibility
|
||||
$data['fixed_sender_aliases'][] = $row['address'];
|
||||
// Split into allowed/blocked for proper display
|
||||
if ($row['sender_allowed'] == '1') {
|
||||
$data['fixed_sender_aliases_allowed'][] = $row['address'];
|
||||
} else {
|
||||
$data['fixed_sender_aliases_blocked'][] = $row['address'];
|
||||
}
|
||||
}
|
||||
$stmt = $pdo->prepare("SELECT CONCAT(`local_part`, '@', `alias_domain`.`alias_domain`) AS `alias_domain_alias` FROM `mailbox`, `alias_domain`
|
||||
WHERE `alias_domain`.`target_domain` = `mailbox`.`domain`
|
||||
@@ -4584,10 +4645,12 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
`description`,
|
||||
`validity`,
|
||||
`created`,
|
||||
`modified`
|
||||
`modified`,
|
||||
`permanent`
|
||||
FROM `spamalias`
|
||||
WHERE `goto` = :username
|
||||
AND `validity` >= :unixnow");
|
||||
AND (`validity` >= :unixnow
|
||||
OR `permanent` != 0)");
|
||||
$stmt->execute(array(':username' => $_data, ':unixnow' => time()));
|
||||
$tladata = $stmt->fetchAll(PDO::FETCH_ASSOC);
|
||||
return $tladata;
|
||||
@@ -4709,6 +4772,7 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
`internal`,
|
||||
`active`,
|
||||
`sogo_visible`,
|
||||
`sender_allowed`,
|
||||
`created`,
|
||||
`modified`
|
||||
FROM `alias`
|
||||
@@ -4742,6 +4806,7 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
$aliasdata['active_int'] = $row['active'];
|
||||
$aliasdata['sogo_visible'] = $row['sogo_visible'];
|
||||
$aliasdata['sogo_visible_int'] = $row['sogo_visible'];
|
||||
$aliasdata['sender_allowed'] = $row['sender_allowed'];
|
||||
$aliasdata['created'] = $row['created'];
|
||||
$aliasdata['modified'] = $row['modified'];
|
||||
if (!hasDomainAccess($_SESSION['mailcow_cc_username'], $_SESSION['mailcow_cc_role'], $aliasdata['domain'])) {
|
||||
@@ -5162,7 +5227,7 @@ function mailbox($_action, $_type, $_data = null, $_extra = null) {
|
||||
$stmt = $pdo->prepare("SELECT COALESCE(SUM(`quota`), 0) as `in_use` FROM `mailbox` WHERE (`kind` = '' OR `kind` = NULL) AND `domain` = :domain AND `username` != :username");
|
||||
$stmt->execute(array(':domain' => $row['domain'], ':username' => $_data));
|
||||
$MailboxUsage = $stmt->fetch(PDO::FETCH_ASSOC);
|
||||
$stmt = $pdo->prepare("SELECT IFNULL(COUNT(`address`), 0) AS `sa_count` FROM `spamalias` WHERE `goto` = :address AND `validity` >= :unixnow");
|
||||
$stmt = $pdo->prepare("SELECT IFNULL(COUNT(`address`), 0) AS `sa_count` FROM `spamalias` WHERE `goto` = :address AND (`validity` >= :unixnow OR `permanent` != 0)");
|
||||
$stmt->execute(array(':address' => $_data, ':unixnow' => time()));
|
||||
$SpamaliasUsage = $stmt->fetch(PDO::FETCH_ASSOC);
|
||||
$mailboxdata['max_new_quota'] = ($DomainQuota['quota'] * 1048576) - $MailboxUsage['in_use'];
|
||||
|
||||
@@ -62,7 +62,11 @@ if ($app_links_processed){
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Workaround to get text with <br> straight to twig.
|
||||
// Using "nl2br" doesn't work with Twig as it would escape everything by default.
|
||||
if (isset($UI_TEXTS["ui_footer"])) {
|
||||
$UI_TEXTS["ui_footer"] = nl2br($UI_TEXTS["ui_footer"]);
|
||||
}
|
||||
|
||||
$globalVariables = [
|
||||
'mailcow_hostname' => getenv('MAILCOW_HOSTNAME'),
|
||||
|
||||
@@ -4,7 +4,7 @@ function init_db_schema()
|
||||
try {
|
||||
global $pdo;
|
||||
|
||||
$db_version = "19082025_1436";
|
||||
$db_version = "28012026_1000";
|
||||
|
||||
$stmt = $pdo->query("SHOW TABLES LIKE 'versions'");
|
||||
$num_results = count($stmt->fetchAll(PDO::FETCH_ASSOC));
|
||||
@@ -185,6 +185,7 @@ function init_db_schema()
|
||||
"public_comment" => "TEXT",
|
||||
"sogo_visible" => "TINYINT(1) NOT NULL DEFAULT '1'",
|
||||
"internal" => "TINYINT(1) NOT NULL DEFAULT '0'",
|
||||
"sender_allowed" => "TINYINT(1) NOT NULL DEFAULT '1'",
|
||||
"active" => "TINYINT(1) NOT NULL DEFAULT '1'"
|
||||
),
|
||||
"keys" => array(
|
||||
@@ -554,7 +555,8 @@ function init_db_schema()
|
||||
"description" => "TEXT NOT NULL",
|
||||
"created" => "DATETIME(0) NOT NULL DEFAULT NOW(0)",
|
||||
"modified" => "DATETIME ON UPDATE CURRENT_TIMESTAMP",
|
||||
"validity" => "INT(11)"
|
||||
"validity" => "INT(11)",
|
||||
"permanent" => "TINYINT(1) NOT NULL DEFAULT '0'"
|
||||
),
|
||||
"keys" => array(
|
||||
"primary" => array(
|
||||
@@ -1337,6 +1339,14 @@ function init_db_schema()
|
||||
$pdo->query($create);
|
||||
}
|
||||
|
||||
// Clear old app_passwd log entries
|
||||
$pdo->exec("DELETE FROM logs
|
||||
WHERE role != 'unauthenticated'
|
||||
AND JSON_EXTRACT(`call`, '$[0]') = 'app_passwd'
|
||||
AND JSON_EXTRACT(`call`, '$[1]') = 'edit'
|
||||
AND (JSON_CONTAINS_PATH(`call`, 'one', '$[2].password')
|
||||
OR JSON_CONTAINS_PATH(`call`, 'one', '$[2].password2'));");
|
||||
|
||||
// Mitigate imapsync argument injection issue
|
||||
$pdo->query("UPDATE `imapsync` SET `custom_params` = ''
|
||||
WHERE `custom_params` LIKE '%pipemess%'
|
||||
@@ -1385,6 +1395,8 @@ function init_db_schema()
|
||||
$pdo->query("UPDATE `mailbox` SET `attributes` = JSON_SET(`attributes`, '$.imap_access', \"1\") WHERE JSON_VALUE(`attributes`, '$.imap_access') IS NULL;");
|
||||
$pdo->query("UPDATE `mailbox` SET `attributes` = JSON_SET(`attributes`, '$.pop3_access', \"1\") WHERE JSON_VALUE(`attributes`, '$.pop3_access') IS NULL;");
|
||||
$pdo->query("UPDATE `mailbox` SET `attributes` = JSON_SET(`attributes`, '$.smtp_access', \"1\") WHERE JSON_VALUE(`attributes`, '$.smtp_access') IS NULL;");
|
||||
$pdo->query("UPDATE `mailbox` SET `attributes` = JSON_SET(`attributes`, '$.eas_access', \"1\") WHERE JSON_VALUE(`attributes`, '$.eas_access') IS NULL;");
|
||||
$pdo->query("UPDATE `mailbox` SET `attributes` = JSON_SET(`attributes`, '$.dav_access', \"1\") WHERE JSON_VALUE(`attributes`, '$.dav_access') IS NULL;");
|
||||
$pdo->query("UPDATE `mailbox` SET `attributes` = JSON_SET(`attributes`, '$.mailbox_format', \"maildir:\") WHERE JSON_VALUE(`attributes`, '$.mailbox_format') IS NULL;");
|
||||
$pdo->query("UPDATE `mailbox` SET `attributes` = JSON_SET(`attributes`, '$.quarantine_notification', \"never\") WHERE JSON_VALUE(`attributes`, '$.quarantine_notification') IS NULL;");
|
||||
$pdo->query("UPDATE `mailbox` SET `attributes` = JSON_SET(`attributes`, '$.quarantine_category', \"reject\") WHERE JSON_VALUE(`attributes`, '$.quarantine_category') IS NULL;");
|
||||
|
||||
@@ -105,11 +105,11 @@ http_response_code(500);
|
||||
<?php
|
||||
exit;
|
||||
}
|
||||
// Stop when dockerapi is not available
|
||||
if (fsockopen("tcp://dockerapi", 443, $errno, $errstr) === false) {
|
||||
// Stop when controller is not available
|
||||
if (fsockopen("tcp://controller", 443, $errno, $errstr) === false) {
|
||||
http_response_code(500);
|
||||
?>
|
||||
<center style='font-family:sans-serif;'>Connection to dockerapi container failed.<br /><br />The following error was reported:<br/><?=$errno;?> - <?=$errstr;?></center>
|
||||
<center style='font-family:sans-serif;'>Connection to controller container failed.<br /><br />The following error was reported:<br/><?=$errno;?> - <?=$errstr;?></center>
|
||||
<?php
|
||||
exit;
|
||||
}
|
||||
@@ -121,7 +121,7 @@ class mailcowPdo extends OAuth2\Storage\Pdo {
|
||||
$this->config['user_table'] = 'mailbox';
|
||||
}
|
||||
public function checkUserCredentials($username, $password) {
|
||||
if (check_login($username, $password) == 'user') {
|
||||
if (check_login($username, $password, array("role" => "user", "service" => "NONE")) == 'user') {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@@ -165,14 +165,6 @@ if(!$DEV_MODE) {
|
||||
set_exception_handler('exception_handler');
|
||||
}
|
||||
|
||||
// TODO: Move function
|
||||
function get_remote_ip() {
|
||||
$remote = $_SERVER['REMOTE_ADDR'];
|
||||
if (filter_var($remote, FILTER_VALIDATE_IP) === false) {
|
||||
return '0.0.0.0';
|
||||
}
|
||||
return $remote;
|
||||
}
|
||||
|
||||
// Load core functions first
|
||||
require_once $_SERVER['DOCUMENT_ROOT'] . '/inc/functions.inc.php';
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
<?php
|
||||
// Start session
|
||||
if (session_status() !== PHP_SESSION_ACTIVE) {
|
||||
session_name($SESSION_NAME);
|
||||
ini_set("session.cookie_httponly", 1);
|
||||
ini_set("session.cookie_samesite", $SESSION_SAMESITE_POLICY);
|
||||
ini_set('session.gc_maxlifetime', $SESSION_LIFETIME);
|
||||
}
|
||||
|
||||
@@ -68,7 +70,7 @@ if (!empty($_SERVER['HTTP_X_API_KEY'])) {
|
||||
}
|
||||
else {
|
||||
$redis->publish("F2B_CHANNEL", "mailcow UI: Invalid password for API_USER by " . $_SERVER['REMOTE_ADDR']);
|
||||
error_log("mailcow UI: Invalid password for " . $user . " by " . $_SERVER['REMOTE_ADDR']);
|
||||
error_log("mailcow UI: Invalid password for API_USER by " . $_SERVER['REMOTE_ADDR']);
|
||||
http_response_code(401);
|
||||
echo json_encode(array(
|
||||
'type' => 'error',
|
||||
@@ -80,7 +82,7 @@ if (!empty($_SERVER['HTTP_X_API_KEY'])) {
|
||||
}
|
||||
else {
|
||||
$redis->publish("F2B_CHANNEL", "mailcow UI: Invalid password for API_USER by " . $_SERVER['REMOTE_ADDR']);
|
||||
error_log("mailcow UI: Invalid password for " . $user . " by " . $_SERVER['REMOTE_ADDR']);
|
||||
error_log("mailcow UI: Invalid password for API_USER by " . $_SERVER['REMOTE_ADDR']);
|
||||
http_response_code(401);
|
||||
echo json_encode(array(
|
||||
'type' => 'error',
|
||||
@@ -90,6 +92,16 @@ if (!empty($_SERVER['HTTP_X_API_KEY'])) {
|
||||
exit();
|
||||
}
|
||||
}
|
||||
else {
|
||||
$remote = get_remote_ip(false);
|
||||
$docker_ipv4_network = getenv('IPV4_NETWORK');
|
||||
if ($remote == "{$docker_ipv4_network}.246") {
|
||||
$_SESSION['mailcow_cc_username'] = 'Controller';
|
||||
$_SESSION['mailcow_cc_role'] = 'admin';
|
||||
$_SESSION['mailcow_cc_api'] = true;
|
||||
$_SESSION['mailcow_cc_api_access'] = 'rw';
|
||||
}
|
||||
}
|
||||
|
||||
// Handle logouts
|
||||
if (isset($_POST["logout"])) {
|
||||
|
||||
@@ -44,7 +44,7 @@ if (isset($_GET["cancel_tfa_login"])) {
|
||||
|
||||
if (isset($_POST["login_user"]) && isset($_POST["pass_user"])) {
|
||||
$login_user = strtolower(trim($_POST["login_user"]));
|
||||
$as = check_login($login_user, $_POST["pass_user"], false, array("role" => "admin"));
|
||||
$as = check_login($login_user, $_POST["pass_user"], array("role" => "admin", "service" => "MAILCOWUI"));
|
||||
|
||||
if ($as == "admin") {
|
||||
session_regenerate_id(true);
|
||||
|
||||
@@ -55,7 +55,7 @@ if (isset($_GET["cancel_tfa_login"])) {
|
||||
|
||||
if (isset($_POST["login_user"]) && isset($_POST["pass_user"])) {
|
||||
$login_user = strtolower(trim($_POST["login_user"]));
|
||||
$as = check_login($login_user, $_POST["pass_user"], false, array("role" => "domain_admin"));
|
||||
$as = check_login($login_user, $_POST["pass_user"], array("role" => "domain_admin", "service" => "MAILCOWUI"));
|
||||
|
||||
if ($as == "domainadmin") {
|
||||
session_regenerate_id(true);
|
||||
|
||||
@@ -80,7 +80,7 @@ if (isset($_POST["verify_tfa_login"])) {
|
||||
intval($user_details['attributes']['force_pw_update']) != 1 &&
|
||||
getenv('SKIP_SOGO') != "y" &&
|
||||
!$is_dual) {
|
||||
header("Location: /SOGo/so/{$_SESSION['mailcow_cc_username']}");
|
||||
header("Location: /SOGo/so/");
|
||||
die();
|
||||
} else {
|
||||
header("Location: /user");
|
||||
@@ -119,7 +119,7 @@ if (isset($_GET["cancel_tfa_login"])) {
|
||||
|
||||
if (isset($_POST["login_user"]) && isset($_POST["pass_user"])) {
|
||||
$login_user = strtolower(trim($_POST["login_user"]));
|
||||
$as = check_login($login_user, $_POST["pass_user"], false, array("role" => "user"));
|
||||
$as = check_login($login_user, $_POST["pass_user"], array("role" => "user", "service" => "MAILCOWUI"));
|
||||
|
||||
if ($as == "user") {
|
||||
set_user_loggedin_session($login_user);
|
||||
@@ -146,7 +146,7 @@ if (isset($_POST["login_user"]) && isset($_POST["pass_user"])) {
|
||||
intval($user_details['attributes']['force_pw_update']) != 1 &&
|
||||
getenv('SKIP_SOGO') != "y" &&
|
||||
!$is_dual) {
|
||||
header("Location: /SOGo/so/{$login_user}");
|
||||
header("Location: /SOGo/so/");
|
||||
die();
|
||||
} else {
|
||||
header("Location: /user");
|
||||
|
||||
@@ -33,6 +33,8 @@ if ($https_port === FALSE) {
|
||||
//$https_port = 1234;
|
||||
// Other settings =>
|
||||
$autodiscover_config = array(
|
||||
'displayName' => 'A mailcow mail server',
|
||||
'displayShortName' => 'mail server',
|
||||
// General autodiscover service type: "activesync" or "imap"
|
||||
// emClient uses autodiscover, but does not support ActiveSync. mailcow excludes emClient from ActiveSync.
|
||||
// With SOGo disabled, the type will always fallback to imap. CalDAV and CardDAV will be excluded, too.
|
||||
@@ -85,7 +87,7 @@ $AVAILABLE_LANGUAGES = array(
|
||||
// 'ca-es' => 'Català (Catalan)',
|
||||
'bg-bg' => 'Български (Bulgarian)',
|
||||
'cs-cz' => 'Čeština (Czech)',
|
||||
'da-dk' => 'Danish (Dansk)',
|
||||
'da-dk' => 'Dansk (Danish)',
|
||||
'de-de' => 'Deutsch (German)',
|
||||
'en-gb' => 'English',
|
||||
'es-es' => 'Español (Spanish)',
|
||||
@@ -110,6 +112,7 @@ $AVAILABLE_LANGUAGES = array(
|
||||
'sv-se' => 'Svenska (Swedish)',
|
||||
'tr-tr' => 'Türkçe (Turkish)',
|
||||
'uk-ua' => 'Українська (Ukrainian)',
|
||||
'vi-vn' => 'Tiếng Việt (Vietnamese)',
|
||||
'zh-cn' => '简体中文 (Simplified Chinese)',
|
||||
'zh-tw' => '繁體中文 (Traditional Chinese)',
|
||||
);
|
||||
@@ -153,6 +156,13 @@ $LOG_PAGINATION_SIZE = 50;
|
||||
// Session lifetime in seconds
|
||||
$SESSION_LIFETIME = 10800;
|
||||
|
||||
// Session SameSite Policy
|
||||
// Use "None", "Lax" or "Strict"
|
||||
$SESSION_SAMESITE_POLICY = "Lax";
|
||||
|
||||
// Name of the session cookie
|
||||
$SESSION_NAME = "MCSESSID";
|
||||
|
||||
// Label for OTP devices
|
||||
$OTP_LABEL = "mailcow UI";
|
||||
|
||||
@@ -207,6 +217,12 @@ $MAILBOX_DEFAULT_ATTRIBUTES['smtp_access'] = true;
|
||||
// Mailbox has sieve access by default
|
||||
$MAILBOX_DEFAULT_ATTRIBUTES['sieve_access'] = true;
|
||||
|
||||
// Mailbox has ActiveSync/EAS access by default
|
||||
$MAILBOX_DEFAULT_ATTRIBUTES['eas_access'] = true;
|
||||
|
||||
// Mailbox has CalDAV/CardDAV (DAV) access by default
|
||||
$MAILBOX_DEFAULT_ATTRIBUTES['dav_access'] = true;
|
||||
|
||||
// Mailbox receives notifications about...
|
||||
// "add_header" - mail that was put into the Junk folder
|
||||
// "reject" - mail that was rejected
|
||||
|
||||
@@ -12,7 +12,7 @@ elseif (isset($_SESSION['mailcow_cc_role']) && $_SESSION['mailcow_cc_role'] == '
|
||||
$user_details = mailbox("get", "mailbox_details", $_SESSION['mailcow_cc_username']);
|
||||
$is_dual = (!empty($_SESSION["dual-login"]["username"])) ? true : false;
|
||||
if (intval($user_details['attributes']['sogo_access']) == 1 && !$is_dual && getenv('SKIP_SOGO') != "y") {
|
||||
header("Location: /SOGo/so/{$_SESSION['mailcow_cc_username']}");
|
||||
header("Location: /SOGo/so/");
|
||||
} else {
|
||||
header("Location: /user");
|
||||
}
|
||||
@@ -27,6 +27,12 @@ elseif (isset($_SESSION['mailcow_cc_role']) && $_SESSION['mailcow_cc_role'] == '
|
||||
exit();
|
||||
}
|
||||
|
||||
$host = strtolower($_SERVER['HTTP_HOST'] ?? '');
|
||||
if (str_starts_with($host, 'autodiscover.') || str_starts_with($host, 'autoconfig.')) {
|
||||
http_response_code(404);
|
||||
exit();
|
||||
}
|
||||
|
||||
require_once $_SERVER['DOCUMENT_ROOT'] . '/inc/header.inc.php';
|
||||
$_SESSION['return_to'] = $_SERVER['REQUEST_URI'];
|
||||
$_SESSION['index_query_string'] = $_SERVER['QUERY_STRING'];
|
||||
|
||||
@@ -22,8 +22,8 @@ $(document).ready(function() {
|
||||
$.notify({message: msg},{z_index: 20000, delay: auto_hide, type: type,placement: {from: "bottom",align: "right"},animate: {enter: 'animated fadeInUp',exit: 'animated fadeOutDown'}});
|
||||
}
|
||||
|
||||
$(".generate_password").click(async function( event ) {
|
||||
try {
|
||||
$(".generate_password").click(async function( event ) {
|
||||
try {
|
||||
var password_policy = await window.fetch("/api/v1/get/passwordpolicy", { method:'GET', cache:'no-cache' });
|
||||
var password_policy = await password_policy.json();
|
||||
random_passwd_length = password_policy.length;
|
||||
@@ -48,7 +48,11 @@ $(document).ready(function() {
|
||||
})
|
||||
}
|
||||
$(".rot-enc").html(function(){
|
||||
return str_rot13($(this).html())
|
||||
footer_html = $(this).html();
|
||||
footer_html = footer_html.replace(/</g, '<').replace(/>/g, '>')
|
||||
.replace(/&/g, '&').replace(/&nzc;/g, '&')
|
||||
.replace(/"/g, '"').replace(/'/g, "'");
|
||||
return str_rot13(footer_html)
|
||||
});
|
||||
// https://stackoverflow.com/questions/4399005/implementing-jquerys-shake-effect-with-animate
|
||||
function shake(div,interval,distance,times) {
|
||||
@@ -125,7 +129,7 @@ $(document).ready(function() {
|
||||
}
|
||||
});
|
||||
})();
|
||||
|
||||
|
||||
// responsive tabs, scroll to opened tab
|
||||
$(document).on("shown.bs.collapse shown.bs.tab", function (e) {
|
||||
var target = $(e.target);
|
||||
@@ -409,4 +413,4 @@ function copyToClipboard(id) {
|
||||
// only works with https connections
|
||||
navigator.clipboard.writeText(copyText.value);
|
||||
mailcow_alert_box(lang.copy_to_clipboard, "success");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,7 +54,16 @@ jQuery(function($){
|
||||
$.get("/inc/ajax/show_rspamd_global_filters.php");
|
||||
$("#confirm_show_rspamd_global_filters").hide();
|
||||
$("#rspamd_global_filters").removeClass("d-none");
|
||||
localStorage.setItem('rspamd_global_filters_confirmed', 'true');
|
||||
});
|
||||
|
||||
$(document).ready(function() {
|
||||
if (localStorage.getItem('rspamd_global_filters_confirmed') === 'true') {
|
||||
$("#confirm_show_rspamd_global_filters").hide();
|
||||
$("#rspamd_global_filters").removeClass("d-none");
|
||||
}
|
||||
});
|
||||
|
||||
$("#super_delete").click(function() { return confirm(lang.queue_ays); });
|
||||
|
||||
$(".refresh_table").on('click', function(e) {
|
||||
@@ -715,7 +724,6 @@ jQuery(function($){
|
||||
$('.app_hide').off('change');
|
||||
$('.app_hide').on('change', function (e) {
|
||||
var value = $(this).is(':checked') ? '1' : '0';
|
||||
console.log(value)
|
||||
$(this).parent().children(':first-child').val(value);
|
||||
})
|
||||
}
|
||||
|
||||
@@ -47,8 +47,6 @@ $(document).ready(function() {
|
||||
window.fetch("/api/v1/get/status/host/ip", { method:'GET', cache:'no-cache' }).then(function(response) {
|
||||
return response.json();
|
||||
}).then(function(data) {
|
||||
console.log(data);
|
||||
|
||||
// display host ips
|
||||
if (data.ipv4)
|
||||
$("#host_ipv4").text(data.ipv4);
|
||||
@@ -1007,7 +1005,7 @@ jQuery(function($){
|
||||
"data-order": cellData.sortBy,
|
||||
"data-sort": cellData.sortBy
|
||||
});
|
||||
},
|
||||
},
|
||||
render: function (data) {
|
||||
return data.value;
|
||||
}
|
||||
@@ -1032,7 +1030,7 @@ jQuery(function($){
|
||||
"data-order": cellData.sortBy,
|
||||
"data-sort": cellData.sortBy
|
||||
});
|
||||
},
|
||||
},
|
||||
render: function (data) {
|
||||
return data.value;
|
||||
}
|
||||
@@ -1348,8 +1346,6 @@ function update_stats(timeout=5){
|
||||
window.fetch("/api/v1/get/status/host", {method:'GET',cache:'no-cache'}).then(function(response) {
|
||||
return response.json();
|
||||
}).then(function(data) {
|
||||
console.log(data);
|
||||
|
||||
if (data){
|
||||
// display table data
|
||||
$("#host_date").text(data.system_time);
|
||||
@@ -1399,8 +1395,6 @@ function update_container_stats(timeout=5){
|
||||
var diskIOCtx = Chart.getChart(container + "_DiskIOChart");
|
||||
var netIOCtx = Chart.getChart(container + "_NetIOChart");
|
||||
|
||||
console.log(container);
|
||||
console.log(data);
|
||||
prev_stats = null;
|
||||
if (data.length >= 2){
|
||||
prev_stats = data[data.length -2];
|
||||
|
||||
@@ -66,7 +66,6 @@ $(document).ready(function() {
|
||||
// load tags
|
||||
if ($('#tags').length){
|
||||
var tagsEl = $('#tags').parent().find('.tag-values')[0];
|
||||
console.log($(tagsEl).val())
|
||||
var tags = JSON.parse($(tagsEl).val());
|
||||
$(tagsEl).val("");
|
||||
|
||||
|
||||
@@ -352,6 +352,12 @@ $(document).ready(function() {
|
||||
if (template.sieve_access == 1){
|
||||
protocol_access.push("sieve");
|
||||
}
|
||||
if (template.eas_access == 1){
|
||||
protocol_access.push("eas");
|
||||
}
|
||||
if (template.dav_access == 1){
|
||||
protocol_access.push("dav");
|
||||
}
|
||||
$('#protocol_access').selectpicker('val', protocol_access);
|
||||
|
||||
var acl = [];
|
||||
@@ -933,6 +939,8 @@ jQuery(function($){
|
||||
item.imap_access = '<i class="text-' + (item.attributes.imap_access == 1 ? 'success' : 'danger') + ' bi bi-' + (item.attributes.imap_access == 1 ? 'check-lg' : 'x-lg') + '"></i>';
|
||||
item.smtp_access = '<i class="text-' + (item.attributes.smtp_access == 1 ? 'success' : 'danger') + ' bi bi-' + (item.attributes.smtp_access == 1 ? 'check-lg' : 'x-lg') + '"></i>';
|
||||
item.sieve_access = '<i class="text-' + (item.attributes.sieve_access == 1 ? 'success' : 'danger') + ' bi bi-' + (item.attributes.sieve_access == 1 ? 'check-lg' : 'x-lg') + '"></i>';
|
||||
item.eas_access = '<i class="text-' + (item.attributes.eas_access == 1 ? 'success' : 'danger') + ' bi bi-' + (item.attributes.eas_access == 1 ? 'check-lg' : 'x-lg') + '"></i>';
|
||||
item.dav_access = '<i class="text-' + (item.attributes.dav_access == 1 ? 'success' : 'danger') + ' bi bi-' + (item.attributes.dav_access == 1 ? 'check-lg' : 'x-lg') + '"></i>';
|
||||
if (item.attributes.quarantine_notification === 'never') {
|
||||
item.quarantine_notification = lang.never;
|
||||
} else if (item.attributes.quarantine_notification === 'hourly') {
|
||||
@@ -1096,6 +1104,18 @@ jQuery(function($){
|
||||
defaultContent: '',
|
||||
className: 'none'
|
||||
},
|
||||
{
|
||||
title: 'EAS',
|
||||
data: 'eas_access',
|
||||
defaultContent: '',
|
||||
className: 'none'
|
||||
},
|
||||
{
|
||||
title: 'DAV',
|
||||
data: 'dav_access',
|
||||
defaultContent: '',
|
||||
className: 'none'
|
||||
},
|
||||
{
|
||||
title: lang.quarantine_notification,
|
||||
data: 'quarantine_notification',
|
||||
@@ -1209,6 +1229,8 @@ jQuery(function($){
|
||||
item.attributes.imap_access = '<i class="text-' + (item.attributes.imap_access == 1 ? 'success' : 'danger') + ' bi bi-' + (item.attributes.imap_access == 1 ? 'check-lg' : 'x-lg') + '"><span class="sorting-value">' + (item.attributes.imap_access == 1 ? '1' : '0') + '</span></i>';
|
||||
item.attributes.smtp_access = '<i class="text-' + (item.attributes.smtp_access == 1 ? 'success' : 'danger') + ' bi bi-' + (item.attributes.smtp_access == 1 ? 'check-lg' : 'x-lg') + '"><span class="sorting-value">' + (item.attributes.smtp_access == 1 ? '1' : '0') + '</span></i>';
|
||||
item.attributes.sieve_access = '<i class="text-' + (item.attributes.sieve_access == 1 ? 'success' : 'danger') + ' bi bi-' + (item.attributes.sieve_access == 1 ? 'check-lg' : 'x-lg') + '"><span class="sorting-value">' + (item.attributes.sieve_access == 1 ? '1' : '0') + '</span></i>';
|
||||
item.attributes.eas_access = '<i class="text-' + (item.attributes.eas_access == 1 ? 'success' : 'danger') + ' bi bi-' + (item.attributes.eas_access == 1 ? 'check-lg' : 'x-lg') + '"><span class="sorting-value">' + (item.attributes.eas_access == 1 ? '1' : '0') + '</span></i>';
|
||||
item.attributes.dav_access = '<i class="text-' + (item.attributes.dav_access == 1 ? 'success' : 'danger') + ' bi bi-' + (item.attributes.dav_access == 1 ? 'check-lg' : 'x-lg') + '"><span class="sorting-value">' + (item.attributes.dav_access == 1 ? '1' : '0') + '</span></i>';
|
||||
item.attributes.sogo_access = '<i class="text-' + (item.attributes.sogo_access == 1 ? 'success' : 'danger') + ' bi bi-' + (item.attributes.sogo_access == 1 ? 'check-lg' : 'x-lg') + '"><span class="sorting-value">' + (item.attributes.sogo_access == 1 ? '1' : '0') + '</span></i>';
|
||||
if (item.attributes.quarantine_notification === 'never') {
|
||||
item.attributes.quarantine_notification = lang.never;
|
||||
@@ -1317,6 +1339,16 @@ jQuery(function($){
|
||||
data: 'attributes.sieve_access',
|
||||
defaultContent: '',
|
||||
},
|
||||
{
|
||||
title: 'EAS',
|
||||
data: 'attributes.eas_access',
|
||||
defaultContent: '',
|
||||
},
|
||||
{
|
||||
title: 'DAV',
|
||||
data: 'attributes.dav_access',
|
||||
defaultContent: '',
|
||||
},
|
||||
{
|
||||
title: 'SOGO',
|
||||
data: 'attributes.sogo_access',
|
||||
@@ -1949,11 +1981,6 @@ jQuery(function($){
|
||||
defaultContent: '',
|
||||
responsivePriority: 5,
|
||||
},
|
||||
{
|
||||
title: lang.bcc_destinations,
|
||||
data: 'bcc_dest',
|
||||
defaultContent: ''
|
||||
},
|
||||
{
|
||||
title: lang.sogo_visible,
|
||||
data: 'sogo_visible',
|
||||
|
||||
@@ -97,7 +97,7 @@ jQuery(function($){
|
||||
var datetime = new Date(item.datetime.replace(/-/g, "/"));
|
||||
var local_datetime = datetime.toLocaleDateString(undefined, {year: "numeric", month: "2-digit", day: "2-digit", hour: "2-digit", minute: "2-digit", second: "2-digit"});
|
||||
var service = '<div class="badge bg-secondary">' + item.service.toUpperCase() + '</div>';
|
||||
var app_password = item.app_password ? ' <a href="/edit/app-passwd/' + item.app_password + '"><i class="bi bi-app-indicator"></i> ' + escapeHtml(item.app_password_name || "App") + '</a>' : '';
|
||||
var app_password = item.app_password ? ' <a href="/edit/app-passwd/' + item.app_password + '"><i class="bi bi-key-fill"></i><span class="ms-1">' + escapeHtml(item.app_password_name || "App") + '</span></a>' : '';
|
||||
var real_rip = item.real_rip.startsWith("Web") ? item.real_rip : '<a href="https://bgp.tools/prefix/' + item.real_rip + '" target="_blank">' + item.real_rip + "</a>";
|
||||
var ip_location = item.location ? ' <span class="flag-icon flag-icon-' + item.location.toLowerCase() + '"></span>' : '';
|
||||
var ip_data = real_rip + ip_location + app_password;
|
||||
@@ -105,10 +105,9 @@ jQuery(function($){
|
||||
$(".last-sasl-login").append(`
|
||||
<li class="list-group-item d-flex justify-content-between align-items-start">
|
||||
<div class="ms-2 me-auto d-flex flex-column">
|
||||
<div class="fw-bold">` + real_rip + `</div>
|
||||
<small class="fst-italic mt-2">` + service + ` ` + local_datetime + `</small>
|
||||
<div class="fw-bold">` + ip_location + real_rip + `</div>
|
||||
<small class="fst-italic mt-2">` + service + ` ` + local_datetime + `</small>` + app_password + `
|
||||
</div>
|
||||
<span>` + ip_location + `</span>
|
||||
</li>
|
||||
`);
|
||||
})
|
||||
@@ -169,7 +168,6 @@ jQuery(function($){
|
||||
type: "GET",
|
||||
url: "/api/v1/get/time_limited_aliases",
|
||||
dataSrc: function(data){
|
||||
console.log(data);
|
||||
$.each(data, function (i, item) {
|
||||
if (acl_data.spam_alias === 1) {
|
||||
item.action = '<div class="btn-group">' +
|
||||
@@ -177,6 +175,10 @@ jQuery(function($){
|
||||
'</div>';
|
||||
item.chkbox = '<input type="checkbox" class="form-check-input" data-id="tla" name="multi_select" value="' + encodeURIComponent(item.address) + '" />';
|
||||
item.address = escapeHtml(item.address);
|
||||
item.validity = {
|
||||
value: item.validity,
|
||||
permanent: item.permanent
|
||||
};
|
||||
}
|
||||
else {
|
||||
item.chkbox = '<input type="checkbox" class="form-check-input" disabled />';
|
||||
@@ -220,9 +222,21 @@ jQuery(function($){
|
||||
title: lang.alias_valid_until,
|
||||
data: 'validity',
|
||||
defaultContent: '',
|
||||
createdCell: function(td, cellData) {
|
||||
createSortableDate(td, cellData)
|
||||
}
|
||||
render: function (data, type) {
|
||||
var date = new Date(data.value ? data.value * 1000 : 0);
|
||||
switch (type) {
|
||||
case "sort":
|
||||
if (data.permanent) {
|
||||
return 0;
|
||||
}
|
||||
return date.getTime();
|
||||
default:
|
||||
if (data.permanent) {
|
||||
return lang.forever;
|
||||
}
|
||||
return date.toLocaleDateString(LOCALE, DATETIME_FORMAT);
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
title: lang.created_on,
|
||||
@@ -262,7 +276,6 @@ jQuery(function($){
|
||||
type: "GET",
|
||||
url: '/api/v1/get/syncjobs/' + encodeURIComponent(mailcow_cc_username) + '/no_log',
|
||||
dataSrc: function(data){
|
||||
console.log(data);
|
||||
$.each(data, function (i, item) {
|
||||
item.user1 = escapeHtml(item.user1);
|
||||
item.log = '<a href="#syncjobLogModal" data-bs-toggle="modal" data-syncjob-id="' + item.id + '">' + lang.open_logs + '</a>'
|
||||
@@ -418,7 +431,6 @@ jQuery(function($){
|
||||
type: "GET",
|
||||
url: '/api/v1/get/app-passwd/all',
|
||||
dataSrc: function(data){
|
||||
console.log(data);
|
||||
$.each(data, function (i, item) {
|
||||
item.name = escapeHtml(item.name)
|
||||
item.protocols = []
|
||||
@@ -514,7 +526,6 @@ jQuery(function($){
|
||||
type: "GET",
|
||||
url: '/api/v1/get/policy_wl_mailbox',
|
||||
dataSrc: function(data){
|
||||
console.log(data);
|
||||
$.each(data, function (i, item) {
|
||||
if (validateEmail(item.object)) {
|
||||
item.chkbox = '<input type="checkbox" class="form-check-input" data-id="policy_wl_mailbox" name="multi_select" value="' + item.prefid + '" />';
|
||||
@@ -585,7 +596,6 @@ jQuery(function($){
|
||||
type: "GET",
|
||||
url: '/api/v1/get/policy_bl_mailbox',
|
||||
dataSrc: function(data){
|
||||
console.log(data);
|
||||
$.each(data, function (i, item) {
|
||||
if (validateEmail(item.object)) {
|
||||
item.chkbox = '<input type="checkbox" class="form-check-input" data-id="policy_bl_mailbox" name="multi_select" value="' + item.prefid + '" />';
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user