mirror of
https://github.com/domainaware/parsedmarc.git
synced 2026-03-04 22:06:26 +00:00
Compare commits
78 Commits
8.19.1
...
copilot/dr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4219306365 | ||
|
|
a6e009c149 | ||
|
|
33384bd612 | ||
|
|
33eb2aaf62 | ||
|
|
1387fb4899 | ||
|
|
4d97bd25aa | ||
|
|
17a612df0c | ||
|
|
221bc332ef | ||
|
|
a2a75f7a81 | ||
|
|
50fcb51577 | ||
|
|
dd9ef90773 | ||
|
|
0e3a4b0f06 | ||
|
|
343b53ef18 | ||
|
|
792079a3e8 | ||
|
|
1f3a1fc843 | ||
|
|
34fa0c145d | ||
|
|
6719a06388 | ||
|
|
eafa435868 | ||
|
|
5d772c3b36 | ||
|
|
72cabbef23 | ||
|
|
3d74cd6ac0 | ||
|
|
d1ac59a016 | ||
|
|
7fdd53008f | ||
|
|
35331d4b84 | ||
|
|
de9edd3590 | ||
|
|
abf4bdba13 | ||
|
|
7b842740f5 | ||
|
|
ebe3ccf40a | ||
|
|
808285658f | ||
|
|
bc1dae29bd | ||
|
|
4b904444e5 | ||
|
|
3608bce344 | ||
|
|
fe809c4c3f | ||
|
|
a76c2f9621 | ||
|
|
bb8f4002bf | ||
|
|
b5773c6b4a | ||
|
|
b99bd67225 | ||
|
|
af9ad568ec | ||
|
|
748164d177 | ||
|
|
487e5e1149 | ||
|
|
73010cf964 | ||
|
|
a4a5475aa8 | ||
|
|
dab78880df | ||
|
|
fb54e3b742 | ||
|
|
6799f10364 | ||
|
|
445c9565a4 | ||
|
|
4b786846ae | ||
|
|
23ae563cd8 | ||
|
|
cdd000e675 | ||
|
|
7d58abc67b | ||
|
|
a18ae439de | ||
|
|
d7061330a8 | ||
|
|
9d5654b8ec | ||
|
|
a0e0070dd0 | ||
|
|
cf3b7f2c29 | ||
|
|
d312522ab7 | ||
|
|
888d717476 | ||
|
|
1127f65fbb | ||
|
|
d017dfcddf | ||
|
|
5fae99aacc | ||
|
|
ba57368ac3 | ||
|
|
dc6ee5de98 | ||
|
|
158d63d205 | ||
|
|
f1933b906c | ||
|
|
4b98d795ff | ||
|
|
b1356f7dfc | ||
|
|
1969196e1a | ||
|
|
553f15f6a9 | ||
|
|
1fc9f638e2 | ||
|
|
48bff504b4 | ||
|
|
681b7cbf85 | ||
|
|
0922d6e83a | ||
|
|
baf3f95fb1 | ||
|
|
a51f945305 | ||
|
|
55dbf8e3db | ||
|
|
00267c9847 | ||
|
|
51356175e1 | ||
|
|
3be10d30dd |
10
.github/workflows/docker.yml
vendored
10
.github/workflows/docker.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v3
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
images: |
|
images: |
|
||||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
@@ -40,16 +40,14 @@ jobs:
|
|||||||
type=semver,pattern={{major}}.{{minor}}
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
|
||||||
- name: Log in to the Container registry
|
- name: Log in to the Container registry
|
||||||
# https://github.com/docker/login-action/releases/tag/v2.0.0
|
uses: docker/login-action@v3
|
||||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b
|
|
||||||
with:
|
with:
|
||||||
registry: ${{ env.REGISTRY }}
|
registry: ${{ env.REGISTRY }}
|
||||||
username: ${{ github.actor }}
|
username: ${{ github.actor }}
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Build and push Docker image
|
- name: Build and push Docker image
|
||||||
# https://github.com/docker/build-push-action/releases/tag/v3.0.0
|
uses: docker/build-push-action@v6
|
||||||
uses: docker/build-push-action@e551b19e49efd4e98792db7592c17c09b89db8d8
|
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
push: ${{ github.event_name == 'release' }}
|
push: ${{ github.event_name == 'release' }}
|
||||||
|
|||||||
14
.github/workflows/python-tests.yml
vendored
14
.github/workflows/python-tests.yml
vendored
@@ -15,7 +15,7 @@ jobs:
|
|||||||
|
|
||||||
services:
|
services:
|
||||||
elasticsearch:
|
elasticsearch:
|
||||||
image: elasticsearch:8.18.2
|
image: elasticsearch:8.19.7
|
||||||
env:
|
env:
|
||||||
discovery.type: single-node
|
discovery.type: single-node
|
||||||
cluster.name: parsedmarc-cluster
|
cluster.name: parsedmarc-cluster
|
||||||
@@ -30,18 +30,18 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install system dependencies
|
- name: Install system dependencies
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get -q update
|
||||||
sudo apt-get install -y libemail-outlook-message-perl
|
sudo apt-get -qy install libemail-outlook-message-perl
|
||||||
- name: Install Python dependencies
|
- name: Install Python dependencies
|
||||||
run: |
|
run: |
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
@@ -65,6 +65,6 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
hatch build
|
hatch build
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v5
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|||||||
13
.vscode/launch.json
vendored
13
.vscode/launch.json
vendored
@@ -19,20 +19,11 @@
|
|||||||
"console": "integratedTerminal"
|
"console": "integratedTerminal"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "sample.eml",
|
"name": "sample",
|
||||||
"type": "debugpy",
|
"type": "debugpy",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"module": "parsedmarc.cli",
|
"module": "parsedmarc.cli",
|
||||||
"args": ["samples/private/sample.eml"]
|
"args": ["samples/private/sample"]
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "find_sus_domains.py",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "find_sus_domains.py",
|
|
||||||
"args": ["-i", "unknown_domains.txt", "-o", "sus_domains.csv"],
|
|
||||||
"cwd": "${workspaceFolder}/parsedmarc/resources/maps",
|
|
||||||
"console": "integratedTerminal"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "sortlists.py",
|
"name": "sortlists.py",
|
||||||
|
|||||||
296
.vscode/settings.json
vendored
296
.vscode/settings.json
vendored
@@ -1,144 +1,166 @@
|
|||||||
{
|
{
|
||||||
|
"[python]": {
|
||||||
|
"editor.defaultFormatter": "charliermarsh.ruff",
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
|
||||||
|
// Let Ruff handle lint fixes + import sorting on save
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.fixAll.ruff": "explicit",
|
||||||
|
"source.organizeImports.ruff": "explicit"
|
||||||
|
}
|
||||||
|
},
|
||||||
"markdownlint.config": {
|
"markdownlint.config": {
|
||||||
"MD024": false
|
"MD024": false
|
||||||
},
|
},
|
||||||
"cSpell.words": [
|
"cSpell.words": [
|
||||||
"adkim",
|
"adkim",
|
||||||
"akamaiedge",
|
"akamaiedge",
|
||||||
"amsmath",
|
"amsmath",
|
||||||
"andrewmcgilvray",
|
"andrewmcgilvray",
|
||||||
"arcname",
|
"arcname",
|
||||||
"aspf",
|
"aspf",
|
||||||
"autoclass",
|
"autoclass",
|
||||||
"automodule",
|
"automodule",
|
||||||
"backported",
|
"backported",
|
||||||
"bellsouth",
|
"bellsouth",
|
||||||
"boto",
|
"boto",
|
||||||
"brakhane",
|
"brakhane",
|
||||||
"Brightmail",
|
"Brightmail",
|
||||||
"CEST",
|
"CEST",
|
||||||
"CHACHA",
|
"CHACHA",
|
||||||
"checkdmarc",
|
"checkdmarc",
|
||||||
"Codecov",
|
"Codecov",
|
||||||
"confnew",
|
"confnew",
|
||||||
"dateparser",
|
"dateparser",
|
||||||
"dateutil",
|
"dateutil",
|
||||||
"Davmail",
|
"Davmail",
|
||||||
"DBIP",
|
"DBIP",
|
||||||
"dearmor",
|
"dearmor",
|
||||||
"deflist",
|
"deflist",
|
||||||
"devel",
|
"devel",
|
||||||
"DMARC",
|
"DMARC",
|
||||||
"Dmarcian",
|
"Dmarcian",
|
||||||
"dnspython",
|
"dnspython",
|
||||||
"dollarmath",
|
"dollarmath",
|
||||||
"dpkg",
|
"dpkg",
|
||||||
"exampleuser",
|
"exampleuser",
|
||||||
"expiringdict",
|
"expiringdict",
|
||||||
"fieldlist",
|
"fieldlist",
|
||||||
"genindex",
|
"GELF",
|
||||||
"geoip",
|
"genindex",
|
||||||
"geoipupdate",
|
"geoip",
|
||||||
"Geolite",
|
"geoipupdate",
|
||||||
"geolocation",
|
"Geolite",
|
||||||
"githubpages",
|
"geolocation",
|
||||||
"Grafana",
|
"githubpages",
|
||||||
"hostnames",
|
"Grafana",
|
||||||
"htpasswd",
|
"hostnames",
|
||||||
"httpasswd",
|
"htpasswd",
|
||||||
"httplib",
|
"httpasswd",
|
||||||
"IMAP",
|
"httplib",
|
||||||
"imapclient",
|
"ifhost",
|
||||||
"infile",
|
"IMAP",
|
||||||
"Interaktive",
|
"imapclient",
|
||||||
"IPDB",
|
"infile",
|
||||||
"journalctl",
|
"Interaktive",
|
||||||
"keepalive",
|
"IPDB",
|
||||||
"keyout",
|
"journalctl",
|
||||||
"keyrings",
|
"kafkaclient",
|
||||||
"Leeman",
|
"keepalive",
|
||||||
"libemail",
|
"keyout",
|
||||||
"linkify",
|
"keyrings",
|
||||||
"LISTSERV",
|
"Leeman",
|
||||||
"lxml",
|
"libemail",
|
||||||
"mailparser",
|
"linkify",
|
||||||
"mailrelay",
|
"LISTSERV",
|
||||||
"mailsuite",
|
"loganalytics",
|
||||||
"maxdepth",
|
"lxml",
|
||||||
"maxmind",
|
"mailparser",
|
||||||
"mbox",
|
"mailrelay",
|
||||||
"mfrom",
|
"mailsuite",
|
||||||
"michaeldavie",
|
"maxdepth",
|
||||||
"mikesiegel",
|
"MAXHEADERS",
|
||||||
"mitigations",
|
"maxmind",
|
||||||
"MMDB",
|
"mbox",
|
||||||
"modindex",
|
"mfrom",
|
||||||
"msgconvert",
|
"mhdw",
|
||||||
"msgraph",
|
"michaeldavie",
|
||||||
"MSSP",
|
"mikesiegel",
|
||||||
"Munge",
|
"Mimecast",
|
||||||
"ndjson",
|
"mitigations",
|
||||||
"newkey",
|
"MMDB",
|
||||||
"Nhcm",
|
"modindex",
|
||||||
"nojekyll",
|
"msgconvert",
|
||||||
"nondigest",
|
"msgraph",
|
||||||
"nosecureimap",
|
"MSSP",
|
||||||
"nosniff",
|
"multiprocess",
|
||||||
"nwettbewerb",
|
"Munge",
|
||||||
"opensearch",
|
"ndjson",
|
||||||
"parsedmarc",
|
"newkey",
|
||||||
"passsword",
|
"Nhcm",
|
||||||
"Postorius",
|
"nojekyll",
|
||||||
"premade",
|
"nondigest",
|
||||||
"procs",
|
"nosecureimap",
|
||||||
"publicsuffix",
|
"nosniff",
|
||||||
"publicsuffixlist",
|
"nwettbewerb",
|
||||||
"publixsuffix",
|
"opensearch",
|
||||||
"pygelf",
|
"opensearchpy",
|
||||||
"pypy",
|
"parsedmarc",
|
||||||
"pytest",
|
"passsword",
|
||||||
"quickstart",
|
"pbar",
|
||||||
"Reindex",
|
"Postorius",
|
||||||
"replyto",
|
"premade",
|
||||||
"reversename",
|
"privatesuffix",
|
||||||
"Rollup",
|
"procs",
|
||||||
"Rpdm",
|
"publicsuffix",
|
||||||
"SAMEORIGIN",
|
"publicsuffixlist",
|
||||||
"sdist",
|
"publixsuffix",
|
||||||
"Servernameone",
|
"pygelf",
|
||||||
"setuptools",
|
"pypy",
|
||||||
"smartquotes",
|
"pytest",
|
||||||
"SMTPTLS",
|
"quickstart",
|
||||||
"sortlists",
|
"Reindex",
|
||||||
"sortmaps",
|
"replyto",
|
||||||
"sourcetype",
|
"reversename",
|
||||||
"STARTTLS",
|
"Rollup",
|
||||||
"tasklist",
|
"Rpdm",
|
||||||
"timespan",
|
"SAMEORIGIN",
|
||||||
"tlsa",
|
"sdist",
|
||||||
"tlsrpt",
|
"Servernameone",
|
||||||
"toctree",
|
"setuptools",
|
||||||
"TQDDM",
|
"smartquotes",
|
||||||
"tqdm",
|
"SMTPTLS",
|
||||||
"truststore",
|
"sortlists",
|
||||||
"Übersicht",
|
"sortmaps",
|
||||||
"uids",
|
"sourcetype",
|
||||||
"unparasable",
|
"STARTTLS",
|
||||||
"uper",
|
"tasklist",
|
||||||
"urllib",
|
"timespan",
|
||||||
"Valimail",
|
"tlsa",
|
||||||
"venv",
|
"tlsrpt",
|
||||||
"Vhcw",
|
"toctree",
|
||||||
"viewcode",
|
"TQDDM",
|
||||||
"virtualenv",
|
"tqdm",
|
||||||
"WBITS",
|
"truststore",
|
||||||
"webmail",
|
"Übersicht",
|
||||||
"Wettbewerber",
|
"uids",
|
||||||
"Whalen",
|
"Uncategorized",
|
||||||
"whitespaces",
|
"unparasable",
|
||||||
"xennn",
|
"uper",
|
||||||
"xmltodict",
|
"urllib",
|
||||||
"xpack",
|
"Valimail",
|
||||||
"zscholl"
|
"venv",
|
||||||
|
"Vhcw",
|
||||||
|
"viewcode",
|
||||||
|
"virtualenv",
|
||||||
|
"WBITS",
|
||||||
|
"webmail",
|
||||||
|
"Wettbewerber",
|
||||||
|
"Whalen",
|
||||||
|
"whitespaces",
|
||||||
|
"xennn",
|
||||||
|
"xmltodict",
|
||||||
|
"xpack",
|
||||||
|
"zscholl"
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
655
CHANGELOG.md
655
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
31
README.md
31
README.md
@@ -23,11 +23,10 @@ ProofPoint Email Fraud Defense, and Valimail.
|
|||||||
|
|
||||||
## Help Wanted
|
## Help Wanted
|
||||||
|
|
||||||
This project is maintained by one developer. Please consider
|
This project is maintained by one developer. Please consider reviewing the open
|
||||||
reviewing the open
|
[issues](https://github.com/domainaware/parsedmarc/issues) to see how you can
|
||||||
[issues](https://github.com/domainaware/parsedmarc/issues) to see how
|
contribute code, documentation, or user support. Assistance on the pinned
|
||||||
you can contribute code, documentation, or user support. Assistance on
|
issues would be particularly helpful.
|
||||||
the pinned issues would be particularly helpful.
|
|
||||||
|
|
||||||
Thanks to all
|
Thanks to all
|
||||||
[contributors](https://github.com/domainaware/parsedmarc/graphs/contributors)!
|
[contributors](https://github.com/domainaware/parsedmarc/graphs/contributors)!
|
||||||
@@ -42,6 +41,24 @@ Thanks to all
|
|||||||
- Consistent data structures
|
- Consistent data structures
|
||||||
- Simple JSON and/or CSV output
|
- Simple JSON and/or CSV output
|
||||||
- Optionally email the results
|
- Optionally email the results
|
||||||
- Optionally send the results to Elasticsearch, Opensearch, and/or Splunk, for use
|
- Optionally send the results to Elasticsearch, Opensearch, and/or Splunk, for
|
||||||
with premade dashboards
|
use with premade dashboards
|
||||||
- Optionally send reports to Apache Kafka
|
- Optionally send reports to Apache Kafka
|
||||||
|
|
||||||
|
## Python Compatibility
|
||||||
|
|
||||||
|
This project supports the following Python versions, which are either actively maintained or are the default versions
|
||||||
|
for RHEL or Debian.
|
||||||
|
|
||||||
|
| Version | Supported | Reason |
|
||||||
|
|---------|-----------|------------------------------------------------------------|
|
||||||
|
| < 3.6 | ❌ | End of Life (EOL) |
|
||||||
|
| 3.6 | ❌ | Used in RHEL 8, but not supported by project dependencies |
|
||||||
|
| 3.7 | ❌ | End of Life (EOL) |
|
||||||
|
| 3.8 | ❌ | End of Life (EOL) |
|
||||||
|
| 3.9 | ❌ | Used in Debian 11 and RHEL 9, but not supported by project dependencies |
|
||||||
|
| 3.10 | ✅ | Actively maintained |
|
||||||
|
| 3.11 | ✅ | Actively maintained; supported until June 2028 (Debian 12) |
|
||||||
|
| 3.12 | ✅ | Actively maintained; supported until May 2035 (RHEL 10) |
|
||||||
|
| 3.13 | ✅ | Actively maintained; supported until June 2030 (Debian 13) |
|
||||||
|
| 3.14 | ✅ | Actively maintained |
|
||||||
|
|||||||
3
build.sh
3
build.sh
@@ -9,12 +9,11 @@ fi
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pip install .[build]
|
pip install .[build]
|
||||||
ruff format .
|
ruff format .
|
||||||
ruff check .
|
|
||||||
cd docs
|
cd docs
|
||||||
make clean
|
make clean
|
||||||
make html
|
make html
|
||||||
touch build/html/.nojekyll
|
touch build/html/.nojekyll
|
||||||
if [ -d "./../parsedmarc-docs" ]; then
|
if [ -d "../../parsedmarc-docs" ]; then
|
||||||
cp -rf build/html/* ../../parsedmarc-docs/
|
cp -rf build/html/* ../../parsedmarc-docs/
|
||||||
fi
|
fi
|
||||||
cd ..
|
cd ..
|
||||||
|
|||||||
1
ci.ini
1
ci.ini
@@ -3,6 +3,7 @@ save_aggregate = True
|
|||||||
save_forensic = True
|
save_forensic = True
|
||||||
save_smtp_tls = True
|
save_smtp_tls = True
|
||||||
debug = True
|
debug = True
|
||||||
|
offline = True
|
||||||
|
|
||||||
[elasticsearch]
|
[elasticsearch]
|
||||||
hosts = http://localhost:9200
|
hosts = http://localhost:9200
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
version: '3.7'
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
elasticsearch:
|
elasticsearch:
|
||||||
image: docker.elastic.co/elasticsearch/elasticsearch:8.3.1
|
image: docker.elastic.co/elasticsearch/elasticsearch:8.19.7
|
||||||
environment:
|
environment:
|
||||||
- network.host=127.0.0.1
|
- network.host=127.0.0.1
|
||||||
- http.host=0.0.0.0
|
- http.host=0.0.0.0
|
||||||
@@ -14,7 +12,7 @@ services:
|
|||||||
- xpack.security.enabled=false
|
- xpack.security.enabled=false
|
||||||
- xpack.license.self_generated.type=basic
|
- xpack.license.self_generated.type=basic
|
||||||
ports:
|
ports:
|
||||||
- 127.0.0.1:9200:9200
|
- "127.0.0.1:9200:9200"
|
||||||
ulimits:
|
ulimits:
|
||||||
memlock:
|
memlock:
|
||||||
soft: -1
|
soft: -1
|
||||||
@@ -30,7 +28,7 @@ services:
|
|||||||
retries: 24
|
retries: 24
|
||||||
|
|
||||||
opensearch:
|
opensearch:
|
||||||
image: opensearchproject/opensearch:2.18.0
|
image: opensearchproject/opensearch:2
|
||||||
environment:
|
environment:
|
||||||
- network.host=127.0.0.1
|
- network.host=127.0.0.1
|
||||||
- http.host=0.0.0.0
|
- http.host=0.0.0.0
|
||||||
@@ -41,7 +39,7 @@ services:
|
|||||||
- bootstrap.memory_lock=true
|
- bootstrap.memory_lock=true
|
||||||
- OPENSEARCH_INITIAL_ADMIN_PASSWORD=${OPENSEARCH_INITIAL_ADMIN_PASSWORD}
|
- OPENSEARCH_INITIAL_ADMIN_PASSWORD=${OPENSEARCH_INITIAL_ADMIN_PASSWORD}
|
||||||
ports:
|
ports:
|
||||||
- 127.0.0.1:9201:9200
|
- "127.0.0.1:9201:9200"
|
||||||
ulimits:
|
ulimits:
|
||||||
memlock:
|
memlock:
|
||||||
soft: -1
|
soft: -1
|
||||||
|
|||||||
@@ -28,6 +28,13 @@
|
|||||||
:members:
|
:members:
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## parsedmarc.types
|
||||||
|
|
||||||
|
```{eval-rst}
|
||||||
|
.. automodule:: parsedmarc.types
|
||||||
|
:members:
|
||||||
|
```
|
||||||
|
|
||||||
## parsedmarc.utils
|
## parsedmarc.utils
|
||||||
|
|
||||||
```{eval-rst}
|
```{eval-rst}
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ from parsedmarc import __version__
|
|||||||
# -- Project information -----------------------------------------------------
|
# -- Project information -----------------------------------------------------
|
||||||
|
|
||||||
project = "parsedmarc"
|
project = "parsedmarc"
|
||||||
copyright = "2018 - 2023, Sean Whalen and contributors"
|
copyright = "2018 - 2025, Sean Whalen and contributors"
|
||||||
author = "Sean Whalen and contributors"
|
author = "Sean Whalen and contributors"
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
|
|||||||
@@ -45,6 +45,24 @@ and Valimail.
|
|||||||
with premade dashboards
|
with premade dashboards
|
||||||
- Optionally send reports to Apache Kafka
|
- Optionally send reports to Apache Kafka
|
||||||
|
|
||||||
|
## Python Compatibility
|
||||||
|
|
||||||
|
This project supports the following Python versions, which are either actively maintained or are the default versions
|
||||||
|
for RHEL or Debian.
|
||||||
|
|
||||||
|
| Version | Supported | Reason |
|
||||||
|
|---------|-----------|------------------------------------------------------------|
|
||||||
|
| < 3.6 | ❌ | End of Life (EOL) |
|
||||||
|
| 3.6 | ❌ | Used in RHEL 8, but not supported by project dependencies |
|
||||||
|
| 3.7 | ❌ | End of Life (EOL) |
|
||||||
|
| 3.8 | ❌ | End of Life (EOL) |
|
||||||
|
| 3.9 | ❌ | Used in Debian 11 and RHEL 9, but not supported by project dependencies |
|
||||||
|
| 3.10 | ✅ | Actively maintained |
|
||||||
|
| 3.11 | ✅ | Actively maintained; supported until June 2028 (Debian 12) |
|
||||||
|
| 3.12 | ✅ | Actively maintained; supported until May 2035 (RHEL 10) |
|
||||||
|
| 3.13 | ✅ | Actively maintained; supported until June 2030 (Debian 13) |
|
||||||
|
| 3.14 | ✅ | Actively maintained |
|
||||||
|
|
||||||
```{toctree}
|
```{toctree}
|
||||||
:caption: 'Contents'
|
:caption: 'Contents'
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
|||||||
@@ -162,10 +162,10 @@ sudo -u parsedmarc virtualenv /opt/parsedmarc/venv
|
|||||||
```
|
```
|
||||||
|
|
||||||
CentOS/RHEL 8 systems use Python 3.6 by default, so on those systems
|
CentOS/RHEL 8 systems use Python 3.6 by default, so on those systems
|
||||||
explicitly tell `virtualenv` to use `python3.9` instead
|
explicitly tell `virtualenv` to use `python3.10` instead
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
sudo -u parsedmarc virtualenv -p python3.9 /opt/parsedmarc/venv
|
sudo -u parsedmarc virtualenv -p python3.10 /opt/parsedmarc/venv
|
||||||
```
|
```
|
||||||
|
|
||||||
Activate the virtualenv
|
Activate the virtualenv
|
||||||
@@ -199,7 +199,7 @@ sudo apt-get install libemail-outlook-message-perl
|
|||||||
[geoipupdate releases page on github]: https://github.com/maxmind/geoipupdate/releases
|
[geoipupdate releases page on github]: https://github.com/maxmind/geoipupdate/releases
|
||||||
[ip to country lite database]: https://db-ip.com/db/download/ip-to-country-lite
|
[ip to country lite database]: https://db-ip.com/db/download/ip-to-country-lite
|
||||||
[license keys]: https://www.maxmind.com/en/accounts/current/license-key
|
[license keys]: https://www.maxmind.com/en/accounts/current/license-key
|
||||||
[maxmind geoipupdate page]: https://dev.maxmind.com/geoip/geoipupdate/
|
[maxmind geoipupdate page]: https://dev.maxmind.com/geoip/updating-databases/
|
||||||
[maxmind geolite2 country database]: https://dev.maxmind.com/geoip/geolite2-free-geolocation-data
|
[maxmind geolite2 country database]: https://dev.maxmind.com/geoip/geolite2-free-geolocation-data
|
||||||
[registering for a free geolite2 account]: https://www.maxmind.com/en/geolite2/signup
|
[registering for a free geolite2 account]: https://www.maxmind.com/en/geolite2/signup
|
||||||
[to comply with various privacy regulations]: https://blog.maxmind.com/2019/12/18/significant-changes-to-accessing-and-using-geolite2-databases/
|
[to comply with various privacy regulations]: https://blog.maxmind.com/2019/12/18/significant-changes-to-accessing-and-using-geolite2-databases/
|
||||||
|
|||||||
@@ -23,6 +23,8 @@ of the report schema.
|
|||||||
"report_id": "9391651994964116463",
|
"report_id": "9391651994964116463",
|
||||||
"begin_date": "2012-04-27 20:00:00",
|
"begin_date": "2012-04-27 20:00:00",
|
||||||
"end_date": "2012-04-28 19:59:59",
|
"end_date": "2012-04-28 19:59:59",
|
||||||
|
"timespan_requires_normalization": false,
|
||||||
|
"original_timespan_seconds": 86399,
|
||||||
"errors": []
|
"errors": []
|
||||||
},
|
},
|
||||||
"policy_published": {
|
"policy_published": {
|
||||||
@@ -39,8 +41,10 @@ of the report schema.
|
|||||||
"source": {
|
"source": {
|
||||||
"ip_address": "72.150.241.94",
|
"ip_address": "72.150.241.94",
|
||||||
"country": "US",
|
"country": "US",
|
||||||
"reverse_dns": "adsl-72-150-241-94.shv.bellsouth.net",
|
"reverse_dns": null,
|
||||||
"base_domain": "bellsouth.net"
|
"base_domain": null,
|
||||||
|
"name": null,
|
||||||
|
"type": null
|
||||||
},
|
},
|
||||||
"count": 2,
|
"count": 2,
|
||||||
"alignment": {
|
"alignment": {
|
||||||
@@ -74,7 +78,10 @@ of the report schema.
|
|||||||
"result": "pass"
|
"result": "pass"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
|
"normalized_timespan": false,
|
||||||
|
"interval_begin": "2012-04-28 00:00:00",
|
||||||
|
"interval_end": "2012-04-28 23:59:59"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@@ -83,8 +90,10 @@ of the report schema.
|
|||||||
### CSV aggregate report
|
### CSV aggregate report
|
||||||
|
|
||||||
```text
|
```text
|
||||||
xml_schema,org_name,org_email,org_extra_contact_info,report_id,begin_date,end_date,errors,domain,adkim,aspf,p,sp,pct,fo,source_ip_address,source_country,source_reverse_dns,source_base_domain,count,spf_aligned,dkim_aligned,dmarc_aligned,disposition,policy_override_reasons,policy_override_comments,envelope_from,header_from,envelope_to,dkim_domains,dkim_selectors,dkim_results,spf_domains,spf_scopes,spf_results
|
xml_schema,org_name,org_email,org_extra_contact_info,report_id,begin_date,end_date,normalized_timespan,errors,domain,adkim,aspf,p,sp,pct,fo,source_ip_address,source_country,source_reverse_dns,source_base_domain,source_name,source_type,count,spf_aligned,dkim_aligned,dmarc_aligned,disposition,policy_override_reasons,policy_override_comments,envelope_from,header_from,envelope_to,dkim_domains,dkim_selectors,dkim_results,spf_domains,spf_scopes,spf_results
|
||||||
draft,acme.com,noreply-dmarc-support@acme.com,http://acme.com/dmarc/support,9391651994964116463,2012-04-27 20:00:00,2012-04-28 19:59:59,,example.com,r,r,none,none,100,0,72.150.241.94,US,adsl-72-150-241-94.shv.bellsouth.net,bellsouth.net,2,True,False,True,none,,,example.com,example.com,,example.com,none,fail,example.com,mfrom,pass
|
draft,acme.com,noreply-dmarc-support@acme.com,http://acme.com/dmarc/support,9391651994964116463,2012-04-28 00:00:00,2012-04-28 23:59:59,False,,example.com,r,r,none,none,100,0,72.150.241.94,US,,,,,2,True,False,True,none,,,example.com,example.com,,example.com,none,fail,example.com,mfrom,pass
|
||||||
|
draft,acme.com,noreply-dmarc-support@acme.com,http://acme.com/dmarc/support,9391651994964116463,2012-04-28 00:00:00,2012-04-28 23:59:59,False,,example.com,r,r,none,none,100,0,72.150.241.94,US,,,,,2,True,False,True,none,,,example.com,example.com,,example.com,none,fail,example.com,mfrom,pass
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Sample forensic report output
|
## Sample forensic report output
|
||||||
|
|||||||
@@ -4,47 +4,50 @@
|
|||||||
|
|
||||||
```text
|
```text
|
||||||
usage: parsedmarc [-h] [-c CONFIG_FILE] [--strip-attachment-payloads] [-o OUTPUT]
|
usage: parsedmarc [-h] [-c CONFIG_FILE] [--strip-attachment-payloads] [-o OUTPUT]
|
||||||
[--aggregate-json-filename AGGREGATE_JSON_FILENAME]
|
[--aggregate-json-filename AGGREGATE_JSON_FILENAME] [--forensic-json-filename FORENSIC_JSON_FILENAME]
|
||||||
[--forensic-json-filename FORENSIC_JSON_FILENAME]
|
[--smtp-tls-json-filename SMTP_TLS_JSON_FILENAME] [--aggregate-csv-filename AGGREGATE_CSV_FILENAME]
|
||||||
[--aggregate-csv-filename AGGREGATE_CSV_FILENAME]
|
[--forensic-csv-filename FORENSIC_CSV_FILENAME] [--smtp-tls-csv-filename SMTP_TLS_CSV_FILENAME]
|
||||||
[--forensic-csv-filename FORENSIC_CSV_FILENAME]
|
[-n NAMESERVERS [NAMESERVERS ...]] [-t DNS_TIMEOUT] [--offline] [-s] [-w] [--verbose] [--debug]
|
||||||
[-n NAMESERVERS [NAMESERVERS ...]] [-t DNS_TIMEOUT] [--offline]
|
[--log-file LOG_FILE] [--no-prettify-json] [-v]
|
||||||
[-s] [--verbose] [--debug] [--log-file LOG_FILE] [-v]
|
[file_path ...]
|
||||||
[file_path ...]
|
|
||||||
|
|
||||||
Parses DMARC reports
|
Parses DMARC reports
|
||||||
|
|
||||||
positional arguments:
|
positional arguments:
|
||||||
file_path one or more paths to aggregate or forensic report
|
file_path one or more paths to aggregate or forensic report files, emails, or mbox files'
|
||||||
files, emails, or mbox files'
|
|
||||||
|
|
||||||
optional arguments:
|
options:
|
||||||
-h, --help show this help message and exit
|
-h, --help show this help message and exit
|
||||||
-c CONFIG_FILE, --config-file CONFIG_FILE
|
-c CONFIG_FILE, --config-file CONFIG_FILE
|
||||||
a path to a configuration file (--silent implied)
|
a path to a configuration file (--silent implied)
|
||||||
--strip-attachment-payloads
|
--strip-attachment-payloads
|
||||||
remove attachment payloads from forensic report output
|
remove attachment payloads from forensic report output
|
||||||
-o OUTPUT, --output OUTPUT
|
-o OUTPUT, --output OUTPUT
|
||||||
write output files to the given directory
|
write output files to the given directory
|
||||||
--aggregate-json-filename AGGREGATE_JSON_FILENAME
|
--aggregate-json-filename AGGREGATE_JSON_FILENAME
|
||||||
filename for the aggregate JSON output file
|
filename for the aggregate JSON output file
|
||||||
--forensic-json-filename FORENSIC_JSON_FILENAME
|
--forensic-json-filename FORENSIC_JSON_FILENAME
|
||||||
filename for the forensic JSON output file
|
filename for the forensic JSON output file
|
||||||
--aggregate-csv-filename AGGREGATE_CSV_FILENAME
|
--smtp-tls-json-filename SMTP_TLS_JSON_FILENAME
|
||||||
filename for the aggregate CSV output file
|
filename for the SMTP TLS JSON output file
|
||||||
--forensic-csv-filename FORENSIC_CSV_FILENAME
|
--aggregate-csv-filename AGGREGATE_CSV_FILENAME
|
||||||
filename for the forensic CSV output file
|
filename for the aggregate CSV output file
|
||||||
-n NAMESERVERS [NAMESERVERS ...], --nameservers NAMESERVERS [NAMESERVERS ...]
|
--forensic-csv-filename FORENSIC_CSV_FILENAME
|
||||||
nameservers to query
|
filename for the forensic CSV output file
|
||||||
-t DNS_TIMEOUT, --dns_timeout DNS_TIMEOUT
|
--smtp-tls-csv-filename SMTP_TLS_CSV_FILENAME
|
||||||
number of seconds to wait for an answer from DNS
|
filename for the SMTP TLS CSV output file
|
||||||
(default: 2.0)
|
-n NAMESERVERS [NAMESERVERS ...], --nameservers NAMESERVERS [NAMESERVERS ...]
|
||||||
--offline do not make online queries for geolocation or DNS
|
nameservers to query
|
||||||
-s, --silent only print errors and warnings
|
-t DNS_TIMEOUT, --dns_timeout DNS_TIMEOUT
|
||||||
--verbose more verbose output
|
number of seconds to wait for an answer from DNS (default: 2.0)
|
||||||
--debug print debugging information
|
--offline do not make online queries for geolocation or DNS
|
||||||
--log-file LOG_FILE output logging to a file
|
-s, --silent only print errors
|
||||||
-v, --version show program's version number and exit
|
-w, --warnings print warnings in addition to errors
|
||||||
|
--verbose more verbose output
|
||||||
|
--debug print debugging information
|
||||||
|
--log-file LOG_FILE output logging to a file
|
||||||
|
--no-prettify-json output JSON in a single line without indentation
|
||||||
|
-v, --version show program's version number and exit
|
||||||
```
|
```
|
||||||
|
|
||||||
:::{note}
|
:::{note}
|
||||||
@@ -169,7 +172,7 @@ The full set of configuration options are:
|
|||||||
IDLE response or the number of seconds until the next
|
IDLE response or the number of seconds until the next
|
||||||
mail check (Default: `30`)
|
mail check (Default: `30`)
|
||||||
- `since` - str: Search for messages since certain time. (Examples: `5m|3h|2d|1w`)
|
- `since` - str: Search for messages since certain time. (Examples: `5m|3h|2d|1w`)
|
||||||
Acceptable units - {"m":"minutes", "h":"hours", "d":"days", "w":"weeks"}).
|
Acceptable units - {"m":"minutes", "h":"hours", "d":"days", "w":"weeks"}.
|
||||||
Defaults to `1d` if incorrect value is provided.
|
Defaults to `1d` if incorrect value is provided.
|
||||||
- `imap`
|
- `imap`
|
||||||
- `host` - str: The IMAP server hostname or IP address
|
- `host` - str: The IMAP server hostname or IP address
|
||||||
@@ -254,7 +257,7 @@ The full set of configuration options are:
|
|||||||
:::
|
:::
|
||||||
- `user` - str: Basic auth username
|
- `user` - str: Basic auth username
|
||||||
- `password` - str: Basic auth password
|
- `password` - str: Basic auth password
|
||||||
- `apiKey` - str: API key
|
- `api_key` - str: API key
|
||||||
- `ssl` - bool: Use an encrypted SSL/TLS connection
|
- `ssl` - bool: Use an encrypted SSL/TLS connection
|
||||||
(Default: `True`)
|
(Default: `True`)
|
||||||
- `timeout` - float: Timeout in seconds (Default: 60)
|
- `timeout` - float: Timeout in seconds (Default: 60)
|
||||||
@@ -277,7 +280,7 @@ The full set of configuration options are:
|
|||||||
:::
|
:::
|
||||||
- `user` - str: Basic auth username
|
- `user` - str: Basic auth username
|
||||||
- `password` - str: Basic auth password
|
- `password` - str: Basic auth password
|
||||||
- `apiKey` - str: API key
|
- `api_key` - str: API key
|
||||||
- `ssl` - bool: Use an encrypted SSL/TLS connection
|
- `ssl` - bool: Use an encrypted SSL/TLS connection
|
||||||
(Default: `True`)
|
(Default: `True`)
|
||||||
- `timeout` - float: Timeout in seconds (Default: 60)
|
- `timeout` - float: Timeout in seconds (Default: 60)
|
||||||
@@ -333,7 +336,59 @@ The full set of configuration options are:
|
|||||||
- `secret_access_key` - str: The secret access key (Optional)
|
- `secret_access_key` - str: The secret access key (Optional)
|
||||||
- `syslog`
|
- `syslog`
|
||||||
- `server` - str: The Syslog server name or IP address
|
- `server` - str: The Syslog server name or IP address
|
||||||
- `port` - int: The UDP port to use (Default: `514`)
|
- `port` - int: The port to use (Default: `514`)
|
||||||
|
- `protocol` - str: The protocol to use: `udp`, `tcp`, or `tls` (Default: `udp`)
|
||||||
|
- `cafile_path` - str: Path to CA certificate file for TLS server verification (Optional)
|
||||||
|
- `certfile_path` - str: Path to client certificate file for TLS authentication (Optional)
|
||||||
|
- `keyfile_path` - str: Path to client private key file for TLS authentication (Optional)
|
||||||
|
- `timeout` - float: Connection timeout in seconds for TCP/TLS (Default: `5.0`)
|
||||||
|
- `retry_attempts` - int: Number of retry attempts for failed connections (Default: `3`)
|
||||||
|
- `retry_delay` - int: Delay in seconds between retry attempts (Default: `5`)
|
||||||
|
|
||||||
|
**Example UDP configuration (default):**
|
||||||
|
|
||||||
|
```ini
|
||||||
|
[syslog]
|
||||||
|
server = syslog.example.com
|
||||||
|
port = 514
|
||||||
|
```
|
||||||
|
|
||||||
|
**Example TCP configuration:**
|
||||||
|
|
||||||
|
```ini
|
||||||
|
[syslog]
|
||||||
|
server = syslog.example.com
|
||||||
|
port = 6514
|
||||||
|
protocol = tcp
|
||||||
|
timeout = 10.0
|
||||||
|
retry_attempts = 5
|
||||||
|
```
|
||||||
|
|
||||||
|
**Example TLS configuration with server verification:**
|
||||||
|
|
||||||
|
```ini
|
||||||
|
[syslog]
|
||||||
|
server = syslog.example.com
|
||||||
|
port = 6514
|
||||||
|
protocol = tls
|
||||||
|
cafile_path = /path/to/ca-cert.pem
|
||||||
|
timeout = 10.0
|
||||||
|
```
|
||||||
|
|
||||||
|
**Example TLS configuration with mutual authentication:**
|
||||||
|
|
||||||
|
```ini
|
||||||
|
[syslog]
|
||||||
|
server = syslog.example.com
|
||||||
|
port = 6514
|
||||||
|
protocol = tls
|
||||||
|
cafile_path = /path/to/ca-cert.pem
|
||||||
|
certfile_path = /path/to/client-cert.pem
|
||||||
|
keyfile_path = /path/to/client-key.pem
|
||||||
|
timeout = 10.0
|
||||||
|
retry_attempts = 3
|
||||||
|
retry_delay = 5
|
||||||
|
```
|
||||||
- `gmail_api`
|
- `gmail_api`
|
||||||
- `credentials_file` - str: Path to file containing the
|
- `credentials_file` - str: Path to file containing the
|
||||||
credentials, None to disable (Default: `None`)
|
credentials, None to disable (Default: `None`)
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@@ -3,54 +3,55 @@
|
|||||||
|
|
||||||
"""A CLI for parsing DMARC reports"""
|
"""A CLI for parsing DMARC reports"""
|
||||||
|
|
||||||
from argparse import Namespace, ArgumentParser
|
import http.client
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
from argparse import ArgumentParser, Namespace
|
||||||
from configparser import ConfigParser
|
from configparser import ConfigParser
|
||||||
from glob import glob
|
from glob import glob
|
||||||
import logging
|
|
||||||
import math
|
|
||||||
import yaml
|
|
||||||
from collections import OrderedDict
|
|
||||||
import json
|
|
||||||
from ssl import CERT_NONE, create_default_context
|
|
||||||
from multiprocessing import Pipe, Process
|
from multiprocessing import Pipe, Process
|
||||||
import sys
|
from ssl import CERT_NONE, create_default_context
|
||||||
import http.client
|
|
||||||
|
import yaml
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
|
||||||
from parsedmarc import (
|
from parsedmarc import (
|
||||||
get_dmarc_reports_from_mailbox,
|
SEEN_AGGREGATE_REPORT_IDS,
|
||||||
watch_inbox,
|
InvalidDMARCReport,
|
||||||
parse_report_file,
|
|
||||||
get_dmarc_reports_from_mbox,
|
|
||||||
elastic,
|
|
||||||
opensearch,
|
|
||||||
kafkaclient,
|
|
||||||
splunk,
|
|
||||||
save_output,
|
|
||||||
email_results,
|
|
||||||
ParserError,
|
ParserError,
|
||||||
__version__,
|
__version__,
|
||||||
InvalidDMARCReport,
|
elastic,
|
||||||
s3,
|
email_results,
|
||||||
syslog,
|
|
||||||
loganalytics,
|
|
||||||
gelf,
|
gelf,
|
||||||
|
get_dmarc_reports_from_mailbox,
|
||||||
|
get_dmarc_reports_from_mbox,
|
||||||
|
kafkaclient,
|
||||||
|
loganalytics,
|
||||||
|
opensearch,
|
||||||
|
parse_report_file,
|
||||||
|
s3,
|
||||||
|
save_output,
|
||||||
|
splunk,
|
||||||
|
syslog,
|
||||||
|
watch_inbox,
|
||||||
webhook,
|
webhook,
|
||||||
)
|
)
|
||||||
|
from parsedmarc.log import logger
|
||||||
from parsedmarc.mail import (
|
from parsedmarc.mail import (
|
||||||
IMAPConnection,
|
|
||||||
MSGraphConnection,
|
|
||||||
GmailConnection,
|
GmailConnection,
|
||||||
|
IMAPConnection,
|
||||||
MaildirConnection,
|
MaildirConnection,
|
||||||
|
MSGraphConnection,
|
||||||
)
|
)
|
||||||
from parsedmarc.mail.graph import AuthMethod
|
from parsedmarc.mail.graph import AuthMethod
|
||||||
|
from parsedmarc.types import ParsingResults
|
||||||
|
from parsedmarc.utils import get_base_domain, get_reverse_dns, is_mbox
|
||||||
|
|
||||||
from parsedmarc.log import logger
|
# Increase the max header limit for very large emails. `_MAXHEADERS` is a
|
||||||
from parsedmarc.utils import is_mbox, get_reverse_dns, get_base_domain
|
# private stdlib attribute and may not exist in type stubs.
|
||||||
from parsedmarc import SEEN_AGGREGATE_REPORT_IDS
|
setattr(http.client, "_MAXHEADERS", 200)
|
||||||
|
|
||||||
http.client._MAXHEADERS = 200 # pylint:disable=protected-access
|
|
||||||
|
|
||||||
formatter = logging.Formatter(
|
formatter = logging.Formatter(
|
||||||
fmt="%(levelname)8s:%(filename)s:%(lineno)d:%(message)s",
|
fmt="%(levelname)8s:%(filename)s:%(lineno)d:%(message)s",
|
||||||
@@ -67,6 +68,48 @@ def _str_to_list(s):
|
|||||||
return list(map(lambda i: i.lstrip(), _list))
|
return list(map(lambda i: i.lstrip(), _list))
|
||||||
|
|
||||||
|
|
||||||
|
def _configure_logging(log_level, log_file=None):
|
||||||
|
"""
|
||||||
|
Configure logging for the current process.
|
||||||
|
This is needed for child processes to properly log messages.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
log_level: The logging level (e.g., logging.DEBUG, logging.WARNING)
|
||||||
|
log_file: Optional path to log file
|
||||||
|
"""
|
||||||
|
# Get the logger
|
||||||
|
from parsedmarc.log import logger
|
||||||
|
|
||||||
|
# Set the log level
|
||||||
|
logger.setLevel(log_level)
|
||||||
|
|
||||||
|
# Add StreamHandler with formatter if not already present
|
||||||
|
# Check if we already have a StreamHandler to avoid duplicates
|
||||||
|
# Use exact type check to distinguish from FileHandler subclass
|
||||||
|
has_stream_handler = any(type(h) is logging.StreamHandler for h in logger.handlers)
|
||||||
|
|
||||||
|
if not has_stream_handler:
|
||||||
|
formatter = logging.Formatter(
|
||||||
|
fmt="%(levelname)8s:%(filename)s:%(lineno)d:%(message)s",
|
||||||
|
datefmt="%Y-%m-%d:%H:%M:%S",
|
||||||
|
)
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
logger.addHandler(handler)
|
||||||
|
|
||||||
|
# Add FileHandler if log_file is specified
|
||||||
|
if log_file:
|
||||||
|
try:
|
||||||
|
fh = logging.FileHandler(log_file, "a")
|
||||||
|
formatter = logging.Formatter(
|
||||||
|
"%(asctime)s - %(levelname)s - [%(filename)s:%(lineno)d] - %(message)s"
|
||||||
|
)
|
||||||
|
fh.setFormatter(formatter)
|
||||||
|
logger.addHandler(fh)
|
||||||
|
except (IOError, OSError, PermissionError) as error:
|
||||||
|
logger.warning("Unable to write to log file: {}".format(error))
|
||||||
|
|
||||||
|
|
||||||
def cli_parse(
|
def cli_parse(
|
||||||
file_path,
|
file_path,
|
||||||
sa,
|
sa,
|
||||||
@@ -77,9 +120,31 @@ def cli_parse(
|
|||||||
always_use_local_files,
|
always_use_local_files,
|
||||||
reverse_dns_map_path,
|
reverse_dns_map_path,
|
||||||
reverse_dns_map_url,
|
reverse_dns_map_url,
|
||||||
|
normalize_timespan_threshold_hours,
|
||||||
conn,
|
conn,
|
||||||
|
log_level=logging.ERROR,
|
||||||
|
log_file=None,
|
||||||
):
|
):
|
||||||
"""Separated this function for multiprocessing"""
|
"""Separated this function for multiprocessing
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to the report file
|
||||||
|
sa: Strip attachment payloads flag
|
||||||
|
nameservers: List of nameservers
|
||||||
|
dns_timeout: DNS timeout
|
||||||
|
ip_db_path: Path to IP database
|
||||||
|
offline: Offline mode flag
|
||||||
|
always_use_local_files: Always use local files flag
|
||||||
|
reverse_dns_map_path: Path to reverse DNS map
|
||||||
|
reverse_dns_map_url: URL to reverse DNS map
|
||||||
|
normalize_timespan_threshold_hours: Timespan threshold
|
||||||
|
conn: Pipe connection for IPC
|
||||||
|
log_level: Logging level for this process
|
||||||
|
log_file: Optional path to log file
|
||||||
|
"""
|
||||||
|
# Configure logging in this child process
|
||||||
|
_configure_logging(log_level, log_file)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
file_results = parse_report_file(
|
file_results = parse_report_file(
|
||||||
file_path,
|
file_path,
|
||||||
@@ -91,6 +156,7 @@ def cli_parse(
|
|||||||
nameservers=nameservers,
|
nameservers=nameservers,
|
||||||
dns_timeout=dns_timeout,
|
dns_timeout=dns_timeout,
|
||||||
strip_attachment_payloads=sa,
|
strip_attachment_payloads=sa,
|
||||||
|
normalize_timespan_threshold_hours=normalize_timespan_threshold_hours,
|
||||||
)
|
)
|
||||||
conn.send([file_results, file_path])
|
conn.send([file_results, file_path])
|
||||||
except ParserError as error:
|
except ParserError as error:
|
||||||
@@ -103,6 +169,7 @@ def _main():
|
|||||||
"""Called when the module is executed"""
|
"""Called when the module is executed"""
|
||||||
|
|
||||||
def get_index_prefix(report):
|
def get_index_prefix(report):
|
||||||
|
domain = None
|
||||||
if index_prefix_domain_map is None:
|
if index_prefix_domain_map is None:
|
||||||
return None
|
return None
|
||||||
if "policy_published" in report:
|
if "policy_published" in report:
|
||||||
@@ -136,7 +203,7 @@ def _main():
|
|||||||
print(output_str)
|
print(output_str)
|
||||||
if opts.output:
|
if opts.output:
|
||||||
save_output(
|
save_output(
|
||||||
results,
|
reports_,
|
||||||
output_directory=opts.output,
|
output_directory=opts.output,
|
||||||
aggregate_json_filename=opts.aggregate_json_filename,
|
aggregate_json_filename=opts.aggregate_json_filename,
|
||||||
forensic_json_filename=opts.forensic_json_filename,
|
forensic_json_filename=opts.forensic_json_filename,
|
||||||
@@ -591,7 +658,7 @@ def _main():
|
|||||||
elasticsearch_monthly_indexes=False,
|
elasticsearch_monthly_indexes=False,
|
||||||
elasticsearch_username=None,
|
elasticsearch_username=None,
|
||||||
elasticsearch_password=None,
|
elasticsearch_password=None,
|
||||||
elasticsearch_apiKey=None,
|
elasticsearch_api_key=None,
|
||||||
opensearch_hosts=None,
|
opensearch_hosts=None,
|
||||||
opensearch_timeout=60,
|
opensearch_timeout=60,
|
||||||
opensearch_number_of_shards=1,
|
opensearch_number_of_shards=1,
|
||||||
@@ -603,7 +670,7 @@ def _main():
|
|||||||
opensearch_monthly_indexes=False,
|
opensearch_monthly_indexes=False,
|
||||||
opensearch_username=None,
|
opensearch_username=None,
|
||||||
opensearch_password=None,
|
opensearch_password=None,
|
||||||
opensearch_apiKey=None,
|
opensearch_api_key=None,
|
||||||
kafka_hosts=None,
|
kafka_hosts=None,
|
||||||
kafka_username=None,
|
kafka_username=None,
|
||||||
kafka_password=None,
|
kafka_password=None,
|
||||||
@@ -630,6 +697,13 @@ def _main():
|
|||||||
s3_secret_access_key=None,
|
s3_secret_access_key=None,
|
||||||
syslog_server=None,
|
syslog_server=None,
|
||||||
syslog_port=None,
|
syslog_port=None,
|
||||||
|
syslog_protocol=None,
|
||||||
|
syslog_cafile_path=None,
|
||||||
|
syslog_certfile_path=None,
|
||||||
|
syslog_keyfile_path=None,
|
||||||
|
syslog_timeout=None,
|
||||||
|
syslog_retry_attempts=None,
|
||||||
|
syslog_retry_delay=None,
|
||||||
gmail_api_credentials_file=None,
|
gmail_api_credentials_file=None,
|
||||||
gmail_api_token_file=None,
|
gmail_api_token_file=None,
|
||||||
gmail_api_include_spam_trash=False,
|
gmail_api_include_spam_trash=False,
|
||||||
@@ -659,6 +733,7 @@ def _main():
|
|||||||
webhook_forensic_url=None,
|
webhook_forensic_url=None,
|
||||||
webhook_smtp_tls_url=None,
|
webhook_smtp_tls_url=None,
|
||||||
webhook_timeout=60,
|
webhook_timeout=60,
|
||||||
|
normalize_timespan_threshold_hours=24.0,
|
||||||
)
|
)
|
||||||
args = arg_parser.parse_args()
|
args = arg_parser.parse_args()
|
||||||
|
|
||||||
@@ -674,16 +749,19 @@ def _main():
|
|||||||
if "general" in config.sections():
|
if "general" in config.sections():
|
||||||
general_config = config["general"]
|
general_config = config["general"]
|
||||||
if "silent" in general_config:
|
if "silent" in general_config:
|
||||||
if general_config["silent"].lower() == "false":
|
opts.silent = bool(general_config.getboolean("silent"))
|
||||||
opts.silent = False
|
if "normalize_timespan_threshold_hours" in general_config:
|
||||||
|
opts.normalize_timespan_threshold_hours = general_config.getfloat(
|
||||||
|
"normalize_timespan_threshold_hours"
|
||||||
|
)
|
||||||
if "index_prefix_domain_map" in general_config:
|
if "index_prefix_domain_map" in general_config:
|
||||||
with open(general_config["index_prefix_domain_map"]) as f:
|
with open(general_config["index_prefix_domain_map"]) as f:
|
||||||
index_prefix_domain_map = yaml.safe_load(f)
|
index_prefix_domain_map = yaml.safe_load(f)
|
||||||
if "offline" in general_config:
|
if "offline" in general_config:
|
||||||
opts.offline = general_config.getboolean("offline")
|
opts.offline = bool(general_config.getboolean("offline"))
|
||||||
if "strip_attachment_payloads" in general_config:
|
if "strip_attachment_payloads" in general_config:
|
||||||
opts.strip_attachment_payloads = general_config.getboolean(
|
opts.strip_attachment_payloads = bool(
|
||||||
"strip_attachment_payloads"
|
general_config.getboolean("strip_attachment_payloads")
|
||||||
)
|
)
|
||||||
if "output" in general_config:
|
if "output" in general_config:
|
||||||
opts.output = general_config["output"]
|
opts.output = general_config["output"]
|
||||||
@@ -701,6 +779,8 @@ def _main():
|
|||||||
opts.smtp_tls_csv_filename = general_config["smtp_tls_csv_filename"]
|
opts.smtp_tls_csv_filename = general_config["smtp_tls_csv_filename"]
|
||||||
if "dns_timeout" in general_config:
|
if "dns_timeout" in general_config:
|
||||||
opts.dns_timeout = general_config.getfloat("dns_timeout")
|
opts.dns_timeout = general_config.getfloat("dns_timeout")
|
||||||
|
if opts.dns_timeout is None:
|
||||||
|
opts.dns_timeout = 2
|
||||||
if "dns_test_address" in general_config:
|
if "dns_test_address" in general_config:
|
||||||
opts.dns_test_address = general_config["dns_test_address"]
|
opts.dns_test_address = general_config["dns_test_address"]
|
||||||
if "nameservers" in general_config:
|
if "nameservers" in general_config:
|
||||||
@@ -723,19 +803,19 @@ def _main():
|
|||||||
)
|
)
|
||||||
exit(-1)
|
exit(-1)
|
||||||
if "save_aggregate" in general_config:
|
if "save_aggregate" in general_config:
|
||||||
opts.save_aggregate = general_config["save_aggregate"]
|
opts.save_aggregate = bool(general_config.getboolean("save_aggregate"))
|
||||||
if "save_forensic" in general_config:
|
if "save_forensic" in general_config:
|
||||||
opts.save_forensic = general_config["save_forensic"]
|
opts.save_forensic = bool(general_config.getboolean("save_forensic"))
|
||||||
if "save_smtp_tls" in general_config:
|
if "save_smtp_tls" in general_config:
|
||||||
opts.save_smtp_tls = general_config["save_smtp_tls"]
|
opts.save_smtp_tls = bool(general_config.getboolean("save_smtp_tls"))
|
||||||
if "debug" in general_config:
|
if "debug" in general_config:
|
||||||
opts.debug = general_config.getboolean("debug")
|
opts.debug = bool(general_config.getboolean("debug"))
|
||||||
if "verbose" in general_config:
|
if "verbose" in general_config:
|
||||||
opts.verbose = general_config.getboolean("verbose")
|
opts.verbose = bool(general_config.getboolean("verbose"))
|
||||||
if "silent" in general_config:
|
if "silent" in general_config:
|
||||||
opts.silent = general_config.getboolean("silent")
|
opts.silent = bool(general_config.getboolean("silent"))
|
||||||
if "warnings" in general_config:
|
if "warnings" in general_config:
|
||||||
opts.warnings = general_config.getboolean("warnings")
|
opts.warnings = bool(general_config.getboolean("warnings"))
|
||||||
if "log_file" in general_config:
|
if "log_file" in general_config:
|
||||||
opts.log_file = general_config["log_file"]
|
opts.log_file = general_config["log_file"]
|
||||||
if "n_procs" in general_config:
|
if "n_procs" in general_config:
|
||||||
@@ -745,15 +825,15 @@ def _main():
|
|||||||
else:
|
else:
|
||||||
opts.ip_db_path = None
|
opts.ip_db_path = None
|
||||||
if "always_use_local_files" in general_config:
|
if "always_use_local_files" in general_config:
|
||||||
opts.always_use_local_files = general_config.getboolean(
|
opts.always_use_local_files = bool(
|
||||||
"always_use_local_files"
|
general_config.getboolean("always_use_local_files")
|
||||||
)
|
)
|
||||||
if "reverse_dns_map_path" in general_config:
|
if "reverse_dns_map_path" in general_config:
|
||||||
opts.reverse_dns_map_path = general_config["reverse_dns_path"]
|
opts.reverse_dns_map_path = general_config["reverse_dns_path"]
|
||||||
if "reverse_dns_map_url" in general_config:
|
if "reverse_dns_map_url" in general_config:
|
||||||
opts.reverse_dns_map_url = general_config["reverse_dns_url"]
|
opts.reverse_dns_map_url = general_config["reverse_dns_url"]
|
||||||
if "prettify_json" in general_config:
|
if "prettify_json" in general_config:
|
||||||
opts.prettify_json = general_config.getboolean("prettify_json")
|
opts.prettify_json = bool(general_config.getboolean("prettify_json"))
|
||||||
|
|
||||||
if "mailbox" in config.sections():
|
if "mailbox" in config.sections():
|
||||||
mailbox_config = config["mailbox"]
|
mailbox_config = config["mailbox"]
|
||||||
@@ -764,11 +844,11 @@ def _main():
|
|||||||
if "archive_folder" in mailbox_config:
|
if "archive_folder" in mailbox_config:
|
||||||
opts.mailbox_archive_folder = mailbox_config["archive_folder"]
|
opts.mailbox_archive_folder = mailbox_config["archive_folder"]
|
||||||
if "watch" in mailbox_config:
|
if "watch" in mailbox_config:
|
||||||
opts.mailbox_watch = mailbox_config.getboolean("watch")
|
opts.mailbox_watch = bool(mailbox_config.getboolean("watch"))
|
||||||
if "delete" in mailbox_config:
|
if "delete" in mailbox_config:
|
||||||
opts.mailbox_delete = mailbox_config.getboolean("delete")
|
opts.mailbox_delete = bool(mailbox_config.getboolean("delete"))
|
||||||
if "test" in mailbox_config:
|
if "test" in mailbox_config:
|
||||||
opts.mailbox_test = mailbox_config.getboolean("test")
|
opts.mailbox_test = bool(mailbox_config.getboolean("test"))
|
||||||
if "batch_size" in mailbox_config:
|
if "batch_size" in mailbox_config:
|
||||||
opts.mailbox_batch_size = mailbox_config.getint("batch_size")
|
opts.mailbox_batch_size = mailbox_config.getint("batch_size")
|
||||||
if "check_timeout" in mailbox_config:
|
if "check_timeout" in mailbox_config:
|
||||||
@@ -792,14 +872,15 @@ def _main():
|
|||||||
if "port" in imap_config:
|
if "port" in imap_config:
|
||||||
opts.imap_port = imap_config.getint("port")
|
opts.imap_port = imap_config.getint("port")
|
||||||
if "timeout" in imap_config:
|
if "timeout" in imap_config:
|
||||||
opts.imap_timeout = imap_config.getfloat("timeout")
|
opts.imap_timeout = imap_config.getint("timeout")
|
||||||
if "max_retries" in imap_config:
|
if "max_retries" in imap_config:
|
||||||
opts.imap_max_retries = imap_config.getint("max_retries")
|
opts.imap_max_retries = imap_config.getint("max_retries")
|
||||||
if "ssl" in imap_config:
|
if "ssl" in imap_config:
|
||||||
opts.imap_ssl = imap_config.getboolean("ssl")
|
opts.imap_ssl = bool(imap_config.getboolean("ssl"))
|
||||||
if "skip_certificate_verification" in imap_config:
|
if "skip_certificate_verification" in imap_config:
|
||||||
imap_verify = imap_config.getboolean("skip_certificate_verification")
|
opts.imap_skip_certificate_verification = bool(
|
||||||
opts.imap_skip_certificate_verification = imap_verify
|
imap_config.getboolean("skip_certificate_verification")
|
||||||
|
)
|
||||||
if "user" in imap_config:
|
if "user" in imap_config:
|
||||||
opts.imap_user = imap_config["user"]
|
opts.imap_user = imap_config["user"]
|
||||||
else:
|
else:
|
||||||
@@ -827,7 +908,7 @@ def _main():
|
|||||||
"section instead."
|
"section instead."
|
||||||
)
|
)
|
||||||
if "watch" in imap_config:
|
if "watch" in imap_config:
|
||||||
opts.mailbox_watch = imap_config.getboolean("watch")
|
opts.mailbox_watch = bool(imap_config.getboolean("watch"))
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Use of the watch option in the imap "
|
"Use of the watch option in the imap "
|
||||||
"configuration section has been deprecated. "
|
"configuration section has been deprecated. "
|
||||||
@@ -842,7 +923,7 @@ def _main():
|
|||||||
"section instead."
|
"section instead."
|
||||||
)
|
)
|
||||||
if "test" in imap_config:
|
if "test" in imap_config:
|
||||||
opts.mailbox_test = imap_config.getboolean("test")
|
opts.mailbox_test = bool(imap_config.getboolean("test"))
|
||||||
logger.warning(
|
logger.warning(
|
||||||
"Use of the test option in the imap "
|
"Use of the test option in the imap "
|
||||||
"configuration section has been deprecated. "
|
"configuration section has been deprecated. "
|
||||||
@@ -936,8 +1017,8 @@ def _main():
|
|||||||
opts.graph_url = graph_config["graph_url"]
|
opts.graph_url = graph_config["graph_url"]
|
||||||
|
|
||||||
if "allow_unencrypted_storage" in graph_config:
|
if "allow_unencrypted_storage" in graph_config:
|
||||||
opts.graph_allow_unencrypted_storage = graph_config.getboolean(
|
opts.graph_allow_unencrypted_storage = bool(
|
||||||
"allow_unencrypted_storage"
|
graph_config.getboolean("allow_unencrypted_storage")
|
||||||
)
|
)
|
||||||
|
|
||||||
if "elasticsearch" in config:
|
if "elasticsearch" in config:
|
||||||
@@ -965,18 +1046,22 @@ def _main():
|
|||||||
if "index_prefix" in elasticsearch_config:
|
if "index_prefix" in elasticsearch_config:
|
||||||
opts.elasticsearch_index_prefix = elasticsearch_config["index_prefix"]
|
opts.elasticsearch_index_prefix = elasticsearch_config["index_prefix"]
|
||||||
if "monthly_indexes" in elasticsearch_config:
|
if "monthly_indexes" in elasticsearch_config:
|
||||||
monthly = elasticsearch_config.getboolean("monthly_indexes")
|
monthly = bool(elasticsearch_config.getboolean("monthly_indexes"))
|
||||||
opts.elasticsearch_monthly_indexes = monthly
|
opts.elasticsearch_monthly_indexes = monthly
|
||||||
if "ssl" in elasticsearch_config:
|
if "ssl" in elasticsearch_config:
|
||||||
opts.elasticsearch_ssl = elasticsearch_config.getboolean("ssl")
|
opts.elasticsearch_ssl = bool(elasticsearch_config.getboolean("ssl"))
|
||||||
if "cert_path" in elasticsearch_config:
|
if "cert_path" in elasticsearch_config:
|
||||||
opts.elasticsearch_ssl_cert_path = elasticsearch_config["cert_path"]
|
opts.elasticsearch_ssl_cert_path = elasticsearch_config["cert_path"]
|
||||||
if "user" in elasticsearch_config:
|
if "user" in elasticsearch_config:
|
||||||
opts.elasticsearch_username = elasticsearch_config["user"]
|
opts.elasticsearch_username = elasticsearch_config["user"]
|
||||||
if "password" in elasticsearch_config:
|
if "password" in elasticsearch_config:
|
||||||
opts.elasticsearch_password = elasticsearch_config["password"]
|
opts.elasticsearch_password = elasticsearch_config["password"]
|
||||||
|
# Until 8.20
|
||||||
if "apiKey" in elasticsearch_config:
|
if "apiKey" in elasticsearch_config:
|
||||||
opts.elasticsearch_apiKey = elasticsearch_config["apiKey"]
|
opts.elasticsearch_apiKey = elasticsearch_config["apiKey"]
|
||||||
|
# Since 8.20
|
||||||
|
if "api_key" in elasticsearch_config:
|
||||||
|
opts.elasticsearch_apiKey = elasticsearch_config["api_key"]
|
||||||
|
|
||||||
if "opensearch" in config:
|
if "opensearch" in config:
|
||||||
opensearch_config = config["opensearch"]
|
opensearch_config = config["opensearch"]
|
||||||
@@ -1001,18 +1086,22 @@ def _main():
|
|||||||
if "index_prefix" in opensearch_config:
|
if "index_prefix" in opensearch_config:
|
||||||
opts.opensearch_index_prefix = opensearch_config["index_prefix"]
|
opts.opensearch_index_prefix = opensearch_config["index_prefix"]
|
||||||
if "monthly_indexes" in opensearch_config:
|
if "monthly_indexes" in opensearch_config:
|
||||||
monthly = opensearch_config.getboolean("monthly_indexes")
|
monthly = bool(opensearch_config.getboolean("monthly_indexes"))
|
||||||
opts.opensearch_monthly_indexes = monthly
|
opts.opensearch_monthly_indexes = monthly
|
||||||
if "ssl" in opensearch_config:
|
if "ssl" in opensearch_config:
|
||||||
opts.opensearch_ssl = opensearch_config.getboolean("ssl")
|
opts.opensearch_ssl = bool(opensearch_config.getboolean("ssl"))
|
||||||
if "cert_path" in opensearch_config:
|
if "cert_path" in opensearch_config:
|
||||||
opts.opensearch_ssl_cert_path = opensearch_config["cert_path"]
|
opts.opensearch_ssl_cert_path = opensearch_config["cert_path"]
|
||||||
if "user" in opensearch_config:
|
if "user" in opensearch_config:
|
||||||
opts.opensearch_username = opensearch_config["user"]
|
opts.opensearch_username = opensearch_config["user"]
|
||||||
if "password" in opensearch_config:
|
if "password" in opensearch_config:
|
||||||
opts.opensearch_password = opensearch_config["password"]
|
opts.opensearch_password = opensearch_config["password"]
|
||||||
|
# Until 8.20
|
||||||
if "apiKey" in opensearch_config:
|
if "apiKey" in opensearch_config:
|
||||||
opts.opensearch_apiKey = opensearch_config["apiKey"]
|
opts.opensearch_apiKey = opensearch_config["apiKey"]
|
||||||
|
# Since 8.20
|
||||||
|
if "api_key" in opensearch_config:
|
||||||
|
opts.opensearch_apiKey = opensearch_config["api_key"]
|
||||||
|
|
||||||
if "splunk_hec" in config.sections():
|
if "splunk_hec" in config.sections():
|
||||||
hec_config = config["splunk_hec"]
|
hec_config = config["splunk_hec"]
|
||||||
@@ -1054,9 +1143,11 @@ def _main():
|
|||||||
if "password" in kafka_config:
|
if "password" in kafka_config:
|
||||||
opts.kafka_password = kafka_config["password"]
|
opts.kafka_password = kafka_config["password"]
|
||||||
if "ssl" in kafka_config:
|
if "ssl" in kafka_config:
|
||||||
opts.kafka_ssl = kafka_config.getboolean("ssl")
|
opts.kafka_ssl = bool(kafka_config.getboolean("ssl"))
|
||||||
if "skip_certificate_verification" in kafka_config:
|
if "skip_certificate_verification" in kafka_config:
|
||||||
kafka_verify = kafka_config.getboolean("skip_certificate_verification")
|
kafka_verify = bool(
|
||||||
|
kafka_config.getboolean("skip_certificate_verification")
|
||||||
|
)
|
||||||
opts.kafka_skip_certificate_verification = kafka_verify
|
opts.kafka_skip_certificate_verification = kafka_verify
|
||||||
if "aggregate_topic" in kafka_config:
|
if "aggregate_topic" in kafka_config:
|
||||||
opts.kafka_aggregate_topic = kafka_config["aggregate_topic"]
|
opts.kafka_aggregate_topic = kafka_config["aggregate_topic"]
|
||||||
@@ -1088,9 +1179,11 @@ def _main():
|
|||||||
if "port" in smtp_config:
|
if "port" in smtp_config:
|
||||||
opts.smtp_port = smtp_config.getint("port")
|
opts.smtp_port = smtp_config.getint("port")
|
||||||
if "ssl" in smtp_config:
|
if "ssl" in smtp_config:
|
||||||
opts.smtp_ssl = smtp_config.getboolean("ssl")
|
opts.smtp_ssl = bool(smtp_config.getboolean("ssl"))
|
||||||
if "skip_certificate_verification" in smtp_config:
|
if "skip_certificate_verification" in smtp_config:
|
||||||
smtp_verify = smtp_config.getboolean("skip_certificate_verification")
|
smtp_verify = bool(
|
||||||
|
smtp_config.getboolean("skip_certificate_verification")
|
||||||
|
)
|
||||||
opts.smtp_skip_certificate_verification = smtp_verify
|
opts.smtp_skip_certificate_verification = smtp_verify
|
||||||
if "user" in smtp_config:
|
if "user" in smtp_config:
|
||||||
opts.smtp_user = smtp_config["user"]
|
opts.smtp_user = smtp_config["user"]
|
||||||
@@ -1153,28 +1246,54 @@ def _main():
|
|||||||
opts.syslog_port = syslog_config["port"]
|
opts.syslog_port = syslog_config["port"]
|
||||||
else:
|
else:
|
||||||
opts.syslog_port = 514
|
opts.syslog_port = 514
|
||||||
|
if "protocol" in syslog_config:
|
||||||
|
opts.syslog_protocol = syslog_config["protocol"]
|
||||||
|
else:
|
||||||
|
opts.syslog_protocol = "udp"
|
||||||
|
if "cafile_path" in syslog_config:
|
||||||
|
opts.syslog_cafile_path = syslog_config["cafile_path"]
|
||||||
|
if "certfile_path" in syslog_config:
|
||||||
|
opts.syslog_certfile_path = syslog_config["certfile_path"]
|
||||||
|
if "keyfile_path" in syslog_config:
|
||||||
|
opts.syslog_keyfile_path = syslog_config["keyfile_path"]
|
||||||
|
if "timeout" in syslog_config:
|
||||||
|
opts.syslog_timeout = float(syslog_config["timeout"])
|
||||||
|
else:
|
||||||
|
opts.syslog_timeout = 5.0
|
||||||
|
if "retry_attempts" in syslog_config:
|
||||||
|
opts.syslog_retry_attempts = int(syslog_config["retry_attempts"])
|
||||||
|
else:
|
||||||
|
opts.syslog_retry_attempts = 3
|
||||||
|
if "retry_delay" in syslog_config:
|
||||||
|
opts.syslog_retry_delay = int(syslog_config["retry_delay"])
|
||||||
|
else:
|
||||||
|
opts.syslog_retry_delay = 5
|
||||||
|
|
||||||
if "gmail_api" in config.sections():
|
if "gmail_api" in config.sections():
|
||||||
gmail_api_config = config["gmail_api"]
|
gmail_api_config = config["gmail_api"]
|
||||||
opts.gmail_api_credentials_file = gmail_api_config.get("credentials_file")
|
opts.gmail_api_credentials_file = gmail_api_config.get("credentials_file")
|
||||||
opts.gmail_api_token_file = gmail_api_config.get("token_file", ".token")
|
opts.gmail_api_token_file = gmail_api_config.get("token_file", ".token")
|
||||||
opts.gmail_api_include_spam_trash = gmail_api_config.getboolean(
|
opts.gmail_api_include_spam_trash = bool(
|
||||||
"include_spam_trash", False
|
gmail_api_config.getboolean("include_spam_trash", False)
|
||||||
)
|
)
|
||||||
opts.gmail_api_paginate_messages = gmail_api_config.getboolean(
|
opts.gmail_api_paginate_messages = bool(
|
||||||
"paginate_messages", True
|
gmail_api_config.getboolean("paginate_messages", True)
|
||||||
)
|
)
|
||||||
opts.gmail_api_scopes = gmail_api_config.get(
|
opts.gmail_api_scopes = gmail_api_config.get(
|
||||||
"scopes", default_gmail_api_scope
|
"scopes", default_gmail_api_scope
|
||||||
)
|
)
|
||||||
opts.gmail_api_scopes = _str_to_list(opts.gmail_api_scopes)
|
opts.gmail_api_scopes = _str_to_list(opts.gmail_api_scopes)
|
||||||
if "oauth2_port" in gmail_api_config:
|
if "oauth2_port" in gmail_api_config:
|
||||||
opts.gmail_api_oauth2_port = gmail_api_config.get("oauth2_port", 8080)
|
opts.gmail_api_oauth2_port = gmail_api_config.getint(
|
||||||
|
"oauth2_port", 8080
|
||||||
|
)
|
||||||
|
|
||||||
if "maildir" in config.sections():
|
if "maildir" in config.sections():
|
||||||
maildir_api_config = config["maildir"]
|
maildir_api_config = config["maildir"]
|
||||||
opts.maildir_path = maildir_api_config.get("maildir_path")
|
opts.maildir_path = maildir_api_config.get("maildir_path")
|
||||||
opts.maildir_create = maildir_api_config.get("maildir_create")
|
opts.maildir_create = bool(
|
||||||
|
maildir_api_config.getboolean("maildir_create", fallback=False)
|
||||||
|
)
|
||||||
|
|
||||||
if "log_analytics" in config.sections():
|
if "log_analytics" in config.sections():
|
||||||
log_analytics_config = config["log_analytics"]
|
log_analytics_config = config["log_analytics"]
|
||||||
@@ -1269,14 +1388,19 @@ def _main():
|
|||||||
es_aggregate_index = "{0}{1}".format(prefix, es_aggregate_index)
|
es_aggregate_index = "{0}{1}".format(prefix, es_aggregate_index)
|
||||||
es_forensic_index = "{0}{1}".format(prefix, es_forensic_index)
|
es_forensic_index = "{0}{1}".format(prefix, es_forensic_index)
|
||||||
es_smtp_tls_index = "{0}{1}".format(prefix, es_smtp_tls_index)
|
es_smtp_tls_index = "{0}{1}".format(prefix, es_smtp_tls_index)
|
||||||
|
elastic_timeout_value = (
|
||||||
|
float(opts.elasticsearch_timeout)
|
||||||
|
if opts.elasticsearch_timeout is not None
|
||||||
|
else 60.0
|
||||||
|
)
|
||||||
elastic.set_hosts(
|
elastic.set_hosts(
|
||||||
opts.elasticsearch_hosts,
|
opts.elasticsearch_hosts,
|
||||||
opts.elasticsearch_ssl,
|
use_ssl=opts.elasticsearch_ssl,
|
||||||
opts.elasticsearch_ssl_cert_path,
|
ssl_cert_path=opts.elasticsearch_ssl_cert_path,
|
||||||
opts.elasticsearch_username,
|
username=opts.elasticsearch_username,
|
||||||
opts.elasticsearch_password,
|
password=opts.elasticsearch_password,
|
||||||
opts.elasticsearch_apiKey,
|
api_key=opts.elasticsearch_api_key,
|
||||||
timeout=opts.elasticsearch_timeout,
|
timeout=elastic_timeout_value,
|
||||||
)
|
)
|
||||||
elastic.migrate_indexes(
|
elastic.migrate_indexes(
|
||||||
aggregate_indexes=[es_aggregate_index],
|
aggregate_indexes=[es_aggregate_index],
|
||||||
@@ -1301,14 +1425,19 @@ def _main():
|
|||||||
os_aggregate_index = "{0}{1}".format(prefix, os_aggregate_index)
|
os_aggregate_index = "{0}{1}".format(prefix, os_aggregate_index)
|
||||||
os_forensic_index = "{0}{1}".format(prefix, os_forensic_index)
|
os_forensic_index = "{0}{1}".format(prefix, os_forensic_index)
|
||||||
os_smtp_tls_index = "{0}{1}".format(prefix, os_smtp_tls_index)
|
os_smtp_tls_index = "{0}{1}".format(prefix, os_smtp_tls_index)
|
||||||
|
opensearch_timeout_value = (
|
||||||
|
float(opts.opensearch_timeout)
|
||||||
|
if opts.opensearch_timeout is not None
|
||||||
|
else 60.0
|
||||||
|
)
|
||||||
opensearch.set_hosts(
|
opensearch.set_hosts(
|
||||||
opts.opensearch_hosts,
|
opts.opensearch_hosts,
|
||||||
opts.opensearch_ssl,
|
use_ssl=opts.opensearch_ssl,
|
||||||
opts.opensearch_ssl_cert_path,
|
ssl_cert_path=opts.opensearch_ssl_cert_path,
|
||||||
opts.opensearch_username,
|
username=opts.opensearch_username,
|
||||||
opts.opensearch_password,
|
password=opts.opensearch_password,
|
||||||
opts.opensearch_apiKey,
|
api_key=opts.opensearch_api_key,
|
||||||
timeout=opts.opensearch_timeout,
|
timeout=opensearch_timeout_value,
|
||||||
)
|
)
|
||||||
opensearch.migrate_indexes(
|
opensearch.migrate_indexes(
|
||||||
aggregate_indexes=[os_aggregate_index],
|
aggregate_indexes=[os_aggregate_index],
|
||||||
@@ -1336,6 +1465,17 @@ def _main():
|
|||||||
syslog_client = syslog.SyslogClient(
|
syslog_client = syslog.SyslogClient(
|
||||||
server_name=opts.syslog_server,
|
server_name=opts.syslog_server,
|
||||||
server_port=int(opts.syslog_port),
|
server_port=int(opts.syslog_port),
|
||||||
|
protocol=opts.syslog_protocol or "udp",
|
||||||
|
cafile_path=opts.syslog_cafile_path,
|
||||||
|
certfile_path=opts.syslog_certfile_path,
|
||||||
|
keyfile_path=opts.syslog_keyfile_path,
|
||||||
|
timeout=opts.syslog_timeout if opts.syslog_timeout is not None else 5.0,
|
||||||
|
retry_attempts=opts.syslog_retry_attempts
|
||||||
|
if opts.syslog_retry_attempts is not None
|
||||||
|
else 3,
|
||||||
|
retry_delay=opts.syslog_retry_delay
|
||||||
|
if opts.syslog_retry_delay is not None
|
||||||
|
else 5,
|
||||||
)
|
)
|
||||||
except Exception as error_:
|
except Exception as error_:
|
||||||
logger.error("Syslog Error: {0}".format(error_.__str__()))
|
logger.error("Syslog Error: {0}".format(error_.__str__()))
|
||||||
@@ -1417,16 +1557,23 @@ def _main():
|
|||||||
|
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
|
pbar = None
|
||||||
if sys.stdout.isatty():
|
if sys.stdout.isatty():
|
||||||
pbar = tqdm(total=len(file_paths))
|
pbar = tqdm(total=len(file_paths))
|
||||||
|
|
||||||
for batch_index in range(math.ceil(len(file_paths) / opts.n_procs)):
|
n_procs = int(opts.n_procs or 1)
|
||||||
|
if n_procs < 1:
|
||||||
|
n_procs = 1
|
||||||
|
|
||||||
|
# Capture the current log level to pass to child processes
|
||||||
|
current_log_level = logger.level
|
||||||
|
current_log_file = opts.log_file
|
||||||
|
|
||||||
|
for batch_index in range((len(file_paths) + n_procs - 1) // n_procs):
|
||||||
processes = []
|
processes = []
|
||||||
connections = []
|
connections = []
|
||||||
|
|
||||||
for proc_index in range(
|
for proc_index in range(n_procs * batch_index, n_procs * (batch_index + 1)):
|
||||||
opts.n_procs * batch_index, opts.n_procs * (batch_index + 1)
|
|
||||||
):
|
|
||||||
if proc_index >= len(file_paths):
|
if proc_index >= len(file_paths):
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -1445,7 +1592,10 @@ def _main():
|
|||||||
opts.always_use_local_files,
|
opts.always_use_local_files,
|
||||||
opts.reverse_dns_map_path,
|
opts.reverse_dns_map_path,
|
||||||
opts.reverse_dns_map_url,
|
opts.reverse_dns_map_url,
|
||||||
|
opts.normalize_timespan_threshold_hours,
|
||||||
child_conn,
|
child_conn,
|
||||||
|
current_log_level,
|
||||||
|
current_log_file,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
processes.append(process)
|
processes.append(process)
|
||||||
@@ -1458,12 +1608,15 @@ def _main():
|
|||||||
|
|
||||||
for proc in processes:
|
for proc in processes:
|
||||||
proc.join()
|
proc.join()
|
||||||
if sys.stdout.isatty():
|
if pbar is not None:
|
||||||
counter += 1
|
counter += 1
|
||||||
pbar.update(counter - pbar.n)
|
pbar.update(1)
|
||||||
|
|
||||||
|
if pbar is not None:
|
||||||
|
pbar.close()
|
||||||
|
|
||||||
for result in results:
|
for result in results:
|
||||||
if type(result[0]) is ParserError:
|
if isinstance(result[0], ParserError) or result[0] is None:
|
||||||
logger.error("Failed to parse {0} - {1}".format(result[1], result[0]))
|
logger.error("Failed to parse {0} - {1}".format(result[1], result[0]))
|
||||||
else:
|
else:
|
||||||
if result[0]["report_type"] == "aggregate":
|
if result[0]["report_type"] == "aggregate":
|
||||||
@@ -1484,6 +1637,11 @@ def _main():
|
|||||||
smtp_tls_reports.append(result[0]["report"])
|
smtp_tls_reports.append(result[0]["report"])
|
||||||
|
|
||||||
for mbox_path in mbox_paths:
|
for mbox_path in mbox_paths:
|
||||||
|
normalize_timespan_threshold_hours_value = (
|
||||||
|
float(opts.normalize_timespan_threshold_hours)
|
||||||
|
if opts.normalize_timespan_threshold_hours is not None
|
||||||
|
else 24.0
|
||||||
|
)
|
||||||
strip = opts.strip_attachment_payloads
|
strip = opts.strip_attachment_payloads
|
||||||
reports = get_dmarc_reports_from_mbox(
|
reports = get_dmarc_reports_from_mbox(
|
||||||
mbox_path,
|
mbox_path,
|
||||||
@@ -1495,12 +1653,17 @@ def _main():
|
|||||||
reverse_dns_map_path=opts.reverse_dns_map_path,
|
reverse_dns_map_path=opts.reverse_dns_map_path,
|
||||||
reverse_dns_map_url=opts.reverse_dns_map_url,
|
reverse_dns_map_url=opts.reverse_dns_map_url,
|
||||||
offline=opts.offline,
|
offline=opts.offline,
|
||||||
|
normalize_timespan_threshold_hours=normalize_timespan_threshold_hours_value,
|
||||||
)
|
)
|
||||||
aggregate_reports += reports["aggregate_reports"]
|
aggregate_reports += reports["aggregate_reports"]
|
||||||
forensic_reports += reports["forensic_reports"]
|
forensic_reports += reports["forensic_reports"]
|
||||||
smtp_tls_reports += reports["smtp_tls_reports"]
|
smtp_tls_reports += reports["smtp_tls_reports"]
|
||||||
|
|
||||||
mailbox_connection = None
|
mailbox_connection = None
|
||||||
|
mailbox_batch_size_value = 10
|
||||||
|
mailbox_check_timeout_value = 30
|
||||||
|
normalize_timespan_threshold_hours_value = 24.0
|
||||||
|
|
||||||
if opts.imap_host:
|
if opts.imap_host:
|
||||||
try:
|
try:
|
||||||
if opts.imap_user is None or opts.imap_password is None:
|
if opts.imap_user is None or opts.imap_password is None:
|
||||||
@@ -1513,16 +1676,23 @@ def _main():
|
|||||||
if opts.imap_skip_certificate_verification:
|
if opts.imap_skip_certificate_verification:
|
||||||
logger.debug("Skipping IMAP certificate verification")
|
logger.debug("Skipping IMAP certificate verification")
|
||||||
verify = False
|
verify = False
|
||||||
if opts.imap_ssl is False:
|
if not opts.imap_ssl:
|
||||||
ssl = False
|
ssl = False
|
||||||
|
|
||||||
|
imap_timeout = (
|
||||||
|
int(opts.imap_timeout) if opts.imap_timeout is not None else 30
|
||||||
|
)
|
||||||
|
imap_max_retries = (
|
||||||
|
int(opts.imap_max_retries) if opts.imap_max_retries is not None else 4
|
||||||
|
)
|
||||||
|
imap_port_value = int(opts.imap_port) if opts.imap_port is not None else 993
|
||||||
mailbox_connection = IMAPConnection(
|
mailbox_connection = IMAPConnection(
|
||||||
host=opts.imap_host,
|
host=opts.imap_host,
|
||||||
port=opts.imap_port,
|
port=imap_port_value,
|
||||||
ssl=ssl,
|
ssl=ssl,
|
||||||
verify=verify,
|
verify=verify,
|
||||||
timeout=opts.imap_timeout,
|
timeout=imap_timeout,
|
||||||
max_retries=opts.imap_max_retries,
|
max_retries=imap_max_retries,
|
||||||
user=opts.imap_user,
|
user=opts.imap_user,
|
||||||
password=opts.imap_password,
|
password=opts.imap_password,
|
||||||
)
|
)
|
||||||
@@ -1543,7 +1713,7 @@ def _main():
|
|||||||
username=opts.graph_user,
|
username=opts.graph_user,
|
||||||
password=opts.graph_password,
|
password=opts.graph_password,
|
||||||
token_file=opts.graph_token_file,
|
token_file=opts.graph_token_file,
|
||||||
allow_unencrypted_storage=opts.graph_allow_unencrypted_storage,
|
allow_unencrypted_storage=bool(opts.graph_allow_unencrypted_storage),
|
||||||
graph_url=opts.graph_url,
|
graph_url=opts.graph_url,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1588,11 +1758,24 @@ def _main():
|
|||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
if mailbox_connection:
|
if mailbox_connection:
|
||||||
|
mailbox_batch_size_value = (
|
||||||
|
int(opts.mailbox_batch_size) if opts.mailbox_batch_size is not None else 10
|
||||||
|
)
|
||||||
|
mailbox_check_timeout_value = (
|
||||||
|
int(opts.mailbox_check_timeout)
|
||||||
|
if opts.mailbox_check_timeout is not None
|
||||||
|
else 30
|
||||||
|
)
|
||||||
|
normalize_timespan_threshold_hours_value = (
|
||||||
|
float(opts.normalize_timespan_threshold_hours)
|
||||||
|
if opts.normalize_timespan_threshold_hours is not None
|
||||||
|
else 24.0
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
reports = get_dmarc_reports_from_mailbox(
|
reports = get_dmarc_reports_from_mailbox(
|
||||||
connection=mailbox_connection,
|
connection=mailbox_connection,
|
||||||
delete=opts.mailbox_delete,
|
delete=opts.mailbox_delete,
|
||||||
batch_size=opts.mailbox_batch_size,
|
batch_size=mailbox_batch_size_value,
|
||||||
reports_folder=opts.mailbox_reports_folder,
|
reports_folder=opts.mailbox_reports_folder,
|
||||||
archive_folder=opts.mailbox_archive_folder,
|
archive_folder=opts.mailbox_archive_folder,
|
||||||
ip_db_path=opts.ip_db_path,
|
ip_db_path=opts.ip_db_path,
|
||||||
@@ -1604,6 +1787,7 @@ def _main():
|
|||||||
test=opts.mailbox_test,
|
test=opts.mailbox_test,
|
||||||
strip_attachment_payloads=opts.strip_attachment_payloads,
|
strip_attachment_payloads=opts.strip_attachment_payloads,
|
||||||
since=opts.mailbox_since,
|
since=opts.mailbox_since,
|
||||||
|
normalize_timespan_threshold_hours=normalize_timespan_threshold_hours_value,
|
||||||
)
|
)
|
||||||
|
|
||||||
aggregate_reports += reports["aggregate_reports"]
|
aggregate_reports += reports["aggregate_reports"]
|
||||||
@@ -1614,27 +1798,31 @@ def _main():
|
|||||||
logger.exception("Mailbox Error")
|
logger.exception("Mailbox Error")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
results = OrderedDict(
|
parsing_results: ParsingResults = {
|
||||||
[
|
"aggregate_reports": aggregate_reports,
|
||||||
("aggregate_reports", aggregate_reports),
|
"forensic_reports": forensic_reports,
|
||||||
("forensic_reports", forensic_reports),
|
"smtp_tls_reports": smtp_tls_reports,
|
||||||
("smtp_tls_reports", smtp_tls_reports),
|
}
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
process_reports(results)
|
process_reports(parsing_results)
|
||||||
|
|
||||||
if opts.smtp_host:
|
if opts.smtp_host:
|
||||||
try:
|
try:
|
||||||
verify = True
|
verify = True
|
||||||
if opts.smtp_skip_certificate_verification:
|
if opts.smtp_skip_certificate_verification:
|
||||||
verify = False
|
verify = False
|
||||||
|
smtp_port_value = int(opts.smtp_port) if opts.smtp_port is not None else 25
|
||||||
|
smtp_to_value = (
|
||||||
|
list(opts.smtp_to)
|
||||||
|
if isinstance(opts.smtp_to, list)
|
||||||
|
else _str_to_list(str(opts.smtp_to))
|
||||||
|
)
|
||||||
email_results(
|
email_results(
|
||||||
results,
|
parsing_results,
|
||||||
opts.smtp_host,
|
opts.smtp_host,
|
||||||
opts.smtp_from,
|
opts.smtp_from,
|
||||||
opts.smtp_to,
|
smtp_to_value,
|
||||||
port=opts.smtp_port,
|
port=smtp_port_value,
|
||||||
verify=verify,
|
verify=verify,
|
||||||
username=opts.smtp_user,
|
username=opts.smtp_user,
|
||||||
password=opts.smtp_password,
|
password=opts.smtp_password,
|
||||||
@@ -1656,16 +1844,17 @@ def _main():
|
|||||||
archive_folder=opts.mailbox_archive_folder,
|
archive_folder=opts.mailbox_archive_folder,
|
||||||
delete=opts.mailbox_delete,
|
delete=opts.mailbox_delete,
|
||||||
test=opts.mailbox_test,
|
test=opts.mailbox_test,
|
||||||
check_timeout=opts.mailbox_check_timeout,
|
check_timeout=mailbox_check_timeout_value,
|
||||||
nameservers=opts.nameservers,
|
nameservers=opts.nameservers,
|
||||||
dns_timeout=opts.dns_timeout,
|
dns_timeout=opts.dns_timeout,
|
||||||
strip_attachment_payloads=opts.strip_attachment_payloads,
|
strip_attachment_payloads=opts.strip_attachment_payloads,
|
||||||
batch_size=opts.mailbox_batch_size,
|
batch_size=mailbox_batch_size_value,
|
||||||
ip_db_path=opts.ip_db_path,
|
ip_db_path=opts.ip_db_path,
|
||||||
always_use_local_files=opts.always_use_local_files,
|
always_use_local_files=opts.always_use_local_files,
|
||||||
reverse_dns_map_path=opts.reverse_dns_map_path,
|
reverse_dns_map_path=opts.reverse_dns_map_path,
|
||||||
reverse_dns_map_url=opts.reverse_dns_map_url,
|
reverse_dns_map_url=opts.reverse_dns_map_url,
|
||||||
offline=opts.offline,
|
offline=opts.offline,
|
||||||
|
normalize_timespan_threshold_hours=normalize_timespan_threshold_hours_value,
|
||||||
)
|
)
|
||||||
except FileExistsError as error:
|
except FileExistsError as error:
|
||||||
logger.error("{0}".format(error.__str__()))
|
logger.error("{0}".format(error.__str__()))
|
||||||
|
|||||||
@@ -1,2 +1,3 @@
|
|||||||
__version__ = "8.19.1"
|
__version__ = "9.1.0"
|
||||||
|
|
||||||
USER_AGENT = f"parsedmarc/{__version__}"
|
USER_AGENT = f"parsedmarc/{__version__}"
|
||||||
|
|||||||
@@ -1,27 +1,29 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from collections import OrderedDict
|
from __future__ import annotations
|
||||||
|
|
||||||
from elasticsearch_dsl.search import Q
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
|
from elasticsearch.helpers import reindex
|
||||||
from elasticsearch_dsl import (
|
from elasticsearch_dsl import (
|
||||||
connections,
|
Boolean,
|
||||||
Object,
|
Date,
|
||||||
Document,
|
Document,
|
||||||
Index,
|
Index,
|
||||||
Nested,
|
|
||||||
InnerDoc,
|
InnerDoc,
|
||||||
Integer,
|
Integer,
|
||||||
Text,
|
|
||||||
Boolean,
|
|
||||||
Ip,
|
Ip,
|
||||||
Date,
|
Nested,
|
||||||
|
Object,
|
||||||
Search,
|
Search,
|
||||||
|
Text,
|
||||||
|
connections,
|
||||||
)
|
)
|
||||||
from elasticsearch.helpers import reindex
|
from elasticsearch_dsl.search import Q
|
||||||
|
|
||||||
|
from parsedmarc import InvalidForensicReport
|
||||||
from parsedmarc.log import logger
|
from parsedmarc.log import logger
|
||||||
from parsedmarc.utils import human_timestamp_to_datetime
|
from parsedmarc.utils import human_timestamp_to_datetime
|
||||||
from parsedmarc import InvalidForensicReport
|
|
||||||
|
|
||||||
|
|
||||||
class ElasticsearchError(Exception):
|
class ElasticsearchError(Exception):
|
||||||
@@ -67,6 +69,8 @@ class _AggregateReportDoc(Document):
|
|||||||
date_range = Date()
|
date_range = Date()
|
||||||
date_begin = Date()
|
date_begin = Date()
|
||||||
date_end = Date()
|
date_end = Date()
|
||||||
|
normalized_timespan = Boolean()
|
||||||
|
original_timespan_seconds = Integer
|
||||||
errors = Text()
|
errors = Text()
|
||||||
published_policy = Object(_PublishedPolicy)
|
published_policy = Object(_PublishedPolicy)
|
||||||
source_ip_address = Ip()
|
source_ip_address = Ip()
|
||||||
@@ -87,18 +91,18 @@ class _AggregateReportDoc(Document):
|
|||||||
dkim_results = Nested(_DKIMResult)
|
dkim_results = Nested(_DKIMResult)
|
||||||
spf_results = Nested(_SPFResult)
|
spf_results = Nested(_SPFResult)
|
||||||
|
|
||||||
def add_policy_override(self, type_, comment):
|
def add_policy_override(self, type_: str, comment: str):
|
||||||
self.policy_overrides.append(_PolicyOverride(type=type_, comment=comment))
|
self.policy_overrides.append(_PolicyOverride(type=type_, comment=comment)) # pyright: ignore[reportCallIssue]
|
||||||
|
|
||||||
def add_dkim_result(self, domain, selector, result):
|
def add_dkim_result(self, domain: str, selector: str, result: _DKIMResult):
|
||||||
self.dkim_results.append(
|
self.dkim_results.append(
|
||||||
_DKIMResult(domain=domain, selector=selector, result=result)
|
_DKIMResult(domain=domain, selector=selector, result=result)
|
||||||
)
|
) # pyright: ignore[reportCallIssue]
|
||||||
|
|
||||||
def add_spf_result(self, domain, scope, result):
|
def add_spf_result(self, domain: str, scope: str, result: _SPFResult):
|
||||||
self.spf_results.append(_SPFResult(domain=domain, scope=scope, result=result))
|
self.spf_results.append(_SPFResult(domain=domain, scope=scope, result=result)) # pyright: ignore[reportCallIssue]
|
||||||
|
|
||||||
def save(self, **kwargs):
|
def save(self, **kwargs): # pyright: ignore[reportIncompatibleMethodOverride]
|
||||||
self.passed_dmarc = False
|
self.passed_dmarc = False
|
||||||
self.passed_dmarc = self.spf_aligned or self.dkim_aligned
|
self.passed_dmarc = self.spf_aligned or self.dkim_aligned
|
||||||
|
|
||||||
@@ -131,26 +135,26 @@ class _ForensicSampleDoc(InnerDoc):
|
|||||||
body = Text()
|
body = Text()
|
||||||
attachments = Nested(_EmailAttachmentDoc)
|
attachments = Nested(_EmailAttachmentDoc)
|
||||||
|
|
||||||
def add_to(self, display_name, address):
|
def add_to(self, display_name: str, address: str):
|
||||||
self.to.append(_EmailAddressDoc(display_name=display_name, address=address))
|
self.to.append(_EmailAddressDoc(display_name=display_name, address=address)) # pyright: ignore[reportCallIssue]
|
||||||
|
|
||||||
def add_reply_to(self, display_name, address):
|
def add_reply_to(self, display_name: str, address: str):
|
||||||
self.reply_to.append(
|
self.reply_to.append(
|
||||||
_EmailAddressDoc(display_name=display_name, address=address)
|
_EmailAddressDoc(display_name=display_name, address=address)
|
||||||
)
|
) # pyright: ignore[reportCallIssue]
|
||||||
|
|
||||||
def add_cc(self, display_name, address):
|
def add_cc(self, display_name: str, address: str):
|
||||||
self.cc.append(_EmailAddressDoc(display_name=display_name, address=address))
|
self.cc.append(_EmailAddressDoc(display_name=display_name, address=address)) # pyright: ignore[reportCallIssue]
|
||||||
|
|
||||||
def add_bcc(self, display_name, address):
|
def add_bcc(self, display_name: str, address: str):
|
||||||
self.bcc.append(_EmailAddressDoc(display_name=display_name, address=address))
|
self.bcc.append(_EmailAddressDoc(display_name=display_name, address=address)) # pyright: ignore[reportCallIssue]
|
||||||
|
|
||||||
def add_attachment(self, filename, content_type, sha256):
|
def add_attachment(self, filename: str, content_type: str, sha256: str):
|
||||||
self.attachments.append(
|
self.attachments.append(
|
||||||
_EmailAttachmentDoc(
|
_EmailAttachmentDoc(
|
||||||
filename=filename, content_type=content_type, sha256=sha256
|
filename=filename, content_type=content_type, sha256=sha256
|
||||||
)
|
)
|
||||||
)
|
) # pyright: ignore[reportCallIssue]
|
||||||
|
|
||||||
|
|
||||||
class _ForensicReportDoc(Document):
|
class _ForensicReportDoc(Document):
|
||||||
@@ -197,15 +201,15 @@ class _SMTPTLSPolicyDoc(InnerDoc):
|
|||||||
|
|
||||||
def add_failure_details(
|
def add_failure_details(
|
||||||
self,
|
self,
|
||||||
result_type,
|
result_type: Optional[str] = None,
|
||||||
ip_address,
|
ip_address: Optional[str] = None,
|
||||||
receiving_ip,
|
receiving_ip: Optional[str] = None,
|
||||||
receiving_mx_helo,
|
receiving_mx_helo: Optional[str] = None,
|
||||||
failed_session_count,
|
failed_session_count: Optional[int] = None,
|
||||||
sending_mta_ip=None,
|
sending_mta_ip: Optional[str] = None,
|
||||||
receiving_mx_hostname=None,
|
receiving_mx_hostname: Optional[str] = None,
|
||||||
additional_information_uri=None,
|
additional_information_uri: Optional[str] = None,
|
||||||
failure_reason_code=None,
|
failure_reason_code: Union[str, int, None] = None,
|
||||||
):
|
):
|
||||||
_details = _SMTPTLSFailureDetailsDoc(
|
_details = _SMTPTLSFailureDetailsDoc(
|
||||||
result_type=result_type,
|
result_type=result_type,
|
||||||
@@ -218,7 +222,7 @@ class _SMTPTLSPolicyDoc(InnerDoc):
|
|||||||
additional_information=additional_information_uri,
|
additional_information=additional_information_uri,
|
||||||
failure_reason_code=failure_reason_code,
|
failure_reason_code=failure_reason_code,
|
||||||
)
|
)
|
||||||
self.failure_details.append(_details)
|
self.failure_details.append(_details) # pyright: ignore[reportCallIssue]
|
||||||
|
|
||||||
|
|
||||||
class _SMTPTLSReportDoc(Document):
|
class _SMTPTLSReportDoc(Document):
|
||||||
@@ -235,13 +239,14 @@ class _SMTPTLSReportDoc(Document):
|
|||||||
|
|
||||||
def add_policy(
|
def add_policy(
|
||||||
self,
|
self,
|
||||||
policy_type,
|
policy_type: str,
|
||||||
policy_domain,
|
policy_domain: str,
|
||||||
successful_session_count,
|
successful_session_count: int,
|
||||||
failed_session_count,
|
failed_session_count: int,
|
||||||
policy_string=None,
|
*,
|
||||||
mx_host_patterns=None,
|
policy_string: Optional[str] = None,
|
||||||
failure_details=None,
|
mx_host_patterns: Optional[list[str]] = None,
|
||||||
|
failure_details: Optional[str] = None,
|
||||||
):
|
):
|
||||||
self.policies.append(
|
self.policies.append(
|
||||||
policy_type=policy_type,
|
policy_type=policy_type,
|
||||||
@@ -251,7 +256,7 @@ class _SMTPTLSReportDoc(Document):
|
|||||||
policy_string=policy_string,
|
policy_string=policy_string,
|
||||||
mx_host_patterns=mx_host_patterns,
|
mx_host_patterns=mx_host_patterns,
|
||||||
failure_details=failure_details,
|
failure_details=failure_details,
|
||||||
)
|
) # pyright: ignore[reportCallIssue]
|
||||||
|
|
||||||
|
|
||||||
class AlreadySaved(ValueError):
|
class AlreadySaved(ValueError):
|
||||||
@@ -259,24 +264,25 @@ class AlreadySaved(ValueError):
|
|||||||
|
|
||||||
|
|
||||||
def set_hosts(
|
def set_hosts(
|
||||||
hosts,
|
hosts: Union[str, list[str]],
|
||||||
use_ssl=False,
|
*,
|
||||||
ssl_cert_path=None,
|
use_ssl: bool = False,
|
||||||
username=None,
|
ssl_cert_path: Optional[str] = None,
|
||||||
password=None,
|
username: Optional[str] = None,
|
||||||
apiKey=None,
|
password: Optional[str] = None,
|
||||||
timeout=60.0,
|
api_key: Optional[str] = None,
|
||||||
|
timeout: float = 60.0,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Sets the Elasticsearch hosts to use
|
Sets the Elasticsearch hosts to use
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
hosts (str): A single hostname or URL, or list of hostnames or URLs
|
hosts (str | list[str]): A single hostname or URL, or list of hostnames or URLs
|
||||||
use_ssl (bool): Use a HTTPS connection to the server
|
use_ssl (bool): Use an HTTPS connection to the server
|
||||||
ssl_cert_path (str): Path to the certificate chain
|
ssl_cert_path (str): Path to the certificate chain
|
||||||
username (str): The username to use for authentication
|
username (str): The username to use for authentication
|
||||||
password (str): The password to use for authentication
|
password (str): The password to use for authentication
|
||||||
apiKey (str): The Base64 encoded API key to use for authentication
|
api_key (str): The Base64 encoded API key to use for authentication
|
||||||
timeout (float): Timeout in seconds
|
timeout (float): Timeout in seconds
|
||||||
"""
|
"""
|
||||||
if not isinstance(hosts, list):
|
if not isinstance(hosts, list):
|
||||||
@@ -289,14 +295,14 @@ def set_hosts(
|
|||||||
conn_params["ca_certs"] = ssl_cert_path
|
conn_params["ca_certs"] = ssl_cert_path
|
||||||
else:
|
else:
|
||||||
conn_params["verify_certs"] = False
|
conn_params["verify_certs"] = False
|
||||||
if username:
|
if username and password:
|
||||||
conn_params["http_auth"] = username + ":" + password
|
conn_params["http_auth"] = username + ":" + password
|
||||||
if apiKey:
|
if api_key:
|
||||||
conn_params["api_key"] = apiKey
|
conn_params["api_key"] = api_key
|
||||||
connections.create_connection(**conn_params)
|
connections.create_connection(**conn_params)
|
||||||
|
|
||||||
|
|
||||||
def create_indexes(names, settings=None):
|
def create_indexes(names: list[str], settings: Optional[dict[str, Any]] = None):
|
||||||
"""
|
"""
|
||||||
Create Elasticsearch indexes
|
Create Elasticsearch indexes
|
||||||
|
|
||||||
@@ -319,7 +325,10 @@ def create_indexes(names, settings=None):
|
|||||||
raise ElasticsearchError("Elasticsearch error: {0}".format(e.__str__()))
|
raise ElasticsearchError("Elasticsearch error: {0}".format(e.__str__()))
|
||||||
|
|
||||||
|
|
||||||
def migrate_indexes(aggregate_indexes=None, forensic_indexes=None):
|
def migrate_indexes(
|
||||||
|
aggregate_indexes: Optional[list[str]] = None,
|
||||||
|
forensic_indexes: Optional[list[str]] = None,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Updates index mappings
|
Updates index mappings
|
||||||
|
|
||||||
@@ -358,7 +367,7 @@ def migrate_indexes(aggregate_indexes=None, forensic_indexes=None):
|
|||||||
}
|
}
|
||||||
Index(new_index_name).create()
|
Index(new_index_name).create()
|
||||||
Index(new_index_name).put_mapping(doc_type=doc, body=body)
|
Index(new_index_name).put_mapping(doc_type=doc, body=body)
|
||||||
reindex(connections.get_connection(), aggregate_index_name, new_index_name)
|
reindex(connections.get_connection(), aggregate_index_name, new_index_name) # pyright: ignore[reportArgumentType]
|
||||||
Index(aggregate_index_name).delete()
|
Index(aggregate_index_name).delete()
|
||||||
|
|
||||||
for forensic_index in forensic_indexes:
|
for forensic_index in forensic_indexes:
|
||||||
@@ -366,18 +375,18 @@ def migrate_indexes(aggregate_indexes=None, forensic_indexes=None):
|
|||||||
|
|
||||||
|
|
||||||
def save_aggregate_report_to_elasticsearch(
|
def save_aggregate_report_to_elasticsearch(
|
||||||
aggregate_report,
|
aggregate_report: dict[str, Any],
|
||||||
index_suffix=None,
|
index_suffix: Optional[str] = None,
|
||||||
index_prefix=None,
|
index_prefix: Optional[str] = None,
|
||||||
monthly_indexes=False,
|
monthly_indexes: Optional[bool] = False,
|
||||||
number_of_shards=1,
|
number_of_shards: int = 1,
|
||||||
number_of_replicas=0,
|
number_of_replicas: int = 0,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Saves a parsed DMARC aggregate report to Elasticsearch
|
Saves a parsed DMARC aggregate report to Elasticsearch
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
aggregate_report (OrderedDict): A parsed forensic report
|
aggregate_report (dict): A parsed forensic report
|
||||||
index_suffix (str): The suffix of the name of the index to save to
|
index_suffix (str): The suffix of the name of the index to save to
|
||||||
index_prefix (str): The prefix of the name of the index to save to
|
index_prefix (str): The prefix of the name of the index to save to
|
||||||
monthly_indexes (bool): Use monthly indexes instead of daily indexes
|
monthly_indexes (bool): Use monthly indexes instead of daily indexes
|
||||||
@@ -395,21 +404,17 @@ def save_aggregate_report_to_elasticsearch(
|
|||||||
domain = aggregate_report["policy_published"]["domain"]
|
domain = aggregate_report["policy_published"]["domain"]
|
||||||
begin_date = human_timestamp_to_datetime(metadata["begin_date"], to_utc=True)
|
begin_date = human_timestamp_to_datetime(metadata["begin_date"], to_utc=True)
|
||||||
end_date = human_timestamp_to_datetime(metadata["end_date"], to_utc=True)
|
end_date = human_timestamp_to_datetime(metadata["end_date"], to_utc=True)
|
||||||
begin_date_human = begin_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
|
||||||
end_date_human = end_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
|
||||||
if monthly_indexes:
|
if monthly_indexes:
|
||||||
index_date = begin_date.strftime("%Y-%m")
|
index_date = begin_date.strftime("%Y-%m")
|
||||||
else:
|
else:
|
||||||
index_date = begin_date.strftime("%Y-%m-%d")
|
index_date = begin_date.strftime("%Y-%m-%d")
|
||||||
aggregate_report["begin_date"] = begin_date
|
|
||||||
aggregate_report["end_date"] = end_date
|
|
||||||
date_range = [aggregate_report["begin_date"], aggregate_report["end_date"]]
|
|
||||||
|
|
||||||
org_name_query = Q(dict(match_phrase=dict(org_name=org_name)))
|
org_name_query = Q(dict(match_phrase=dict(org_name=org_name))) # type: ignore
|
||||||
report_id_query = Q(dict(match_phrase=dict(report_id=report_id)))
|
report_id_query = Q(dict(match_phrase=dict(report_id=report_id))) # pyright: ignore[reportArgumentType]
|
||||||
domain_query = Q(dict(match_phrase={"published_policy.domain": domain}))
|
domain_query = Q(dict(match_phrase={"published_policy.domain": domain})) # pyright: ignore[reportArgumentType]
|
||||||
begin_date_query = Q(dict(match=dict(date_begin=begin_date)))
|
begin_date_query = Q(dict(match=dict(date_begin=begin_date))) # pyright: ignore[reportArgumentType]
|
||||||
end_date_query = Q(dict(match=dict(date_end=end_date)))
|
end_date_query = Q(dict(match=dict(date_end=end_date))) # pyright: ignore[reportArgumentType]
|
||||||
|
|
||||||
if index_suffix is not None:
|
if index_suffix is not None:
|
||||||
search_index = "dmarc_aggregate_{0}*".format(index_suffix)
|
search_index = "dmarc_aggregate_{0}*".format(index_suffix)
|
||||||
@@ -421,6 +426,8 @@ def save_aggregate_report_to_elasticsearch(
|
|||||||
query = org_name_query & report_id_query & domain_query
|
query = org_name_query & report_id_query & domain_query
|
||||||
query = query & begin_date_query & end_date_query
|
query = query & begin_date_query & end_date_query
|
||||||
search.query = query
|
search.query = query
|
||||||
|
begin_date_human = begin_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
||||||
|
end_date_human = end_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
existing = search.execute()
|
existing = search.execute()
|
||||||
@@ -450,6 +457,17 @@ def save_aggregate_report_to_elasticsearch(
|
|||||||
)
|
)
|
||||||
|
|
||||||
for record in aggregate_report["records"]:
|
for record in aggregate_report["records"]:
|
||||||
|
begin_date = human_timestamp_to_datetime(record["interval_begin"], to_utc=True)
|
||||||
|
end_date = human_timestamp_to_datetime(record["interval_end"], to_utc=True)
|
||||||
|
normalized_timespan = record["normalized_timespan"]
|
||||||
|
|
||||||
|
if monthly_indexes:
|
||||||
|
index_date = begin_date.strftime("%Y-%m")
|
||||||
|
else:
|
||||||
|
index_date = begin_date.strftime("%Y-%m-%d")
|
||||||
|
aggregate_report["begin_date"] = begin_date
|
||||||
|
aggregate_report["end_date"] = end_date
|
||||||
|
date_range = [aggregate_report["begin_date"], aggregate_report["end_date"]]
|
||||||
agg_doc = _AggregateReportDoc(
|
agg_doc = _AggregateReportDoc(
|
||||||
xml_schema=aggregate_report["xml_schema"],
|
xml_schema=aggregate_report["xml_schema"],
|
||||||
org_name=metadata["org_name"],
|
org_name=metadata["org_name"],
|
||||||
@@ -457,8 +475,9 @@ def save_aggregate_report_to_elasticsearch(
|
|||||||
org_extra_contact_info=metadata["org_extra_contact_info"],
|
org_extra_contact_info=metadata["org_extra_contact_info"],
|
||||||
report_id=metadata["report_id"],
|
report_id=metadata["report_id"],
|
||||||
date_range=date_range,
|
date_range=date_range,
|
||||||
date_begin=aggregate_report["begin_date"],
|
date_begin=begin_date,
|
||||||
date_end=aggregate_report["end_date"],
|
date_end=end_date,
|
||||||
|
normalized_timespan=normalized_timespan,
|
||||||
errors=metadata["errors"],
|
errors=metadata["errors"],
|
||||||
published_policy=published_policy,
|
published_policy=published_policy,
|
||||||
source_ip_address=record["source"]["ip_address"],
|
source_ip_address=record["source"]["ip_address"],
|
||||||
@@ -508,7 +527,7 @@ def save_aggregate_report_to_elasticsearch(
|
|||||||
number_of_shards=number_of_shards, number_of_replicas=number_of_replicas
|
number_of_shards=number_of_shards, number_of_replicas=number_of_replicas
|
||||||
)
|
)
|
||||||
create_indexes([index], index_settings)
|
create_indexes([index], index_settings)
|
||||||
agg_doc.meta.index = index
|
agg_doc.meta.index = index # pyright: ignore[reportOptionalMemberAccess, reportAttributeAccessIssue]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
agg_doc.save()
|
agg_doc.save()
|
||||||
@@ -517,18 +536,18 @@ def save_aggregate_report_to_elasticsearch(
|
|||||||
|
|
||||||
|
|
||||||
def save_forensic_report_to_elasticsearch(
|
def save_forensic_report_to_elasticsearch(
|
||||||
forensic_report,
|
forensic_report: dict[str, Any],
|
||||||
index_suffix=None,
|
index_suffix: Optional[Any] = None,
|
||||||
index_prefix=None,
|
index_prefix: Optional[str] = None,
|
||||||
monthly_indexes=False,
|
monthly_indexes: Optional[bool] = False,
|
||||||
number_of_shards=1,
|
number_of_shards: int = 1,
|
||||||
number_of_replicas=0,
|
number_of_replicas: int = 0,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Saves a parsed DMARC forensic report to Elasticsearch
|
Saves a parsed DMARC forensic report to Elasticsearch
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
forensic_report (OrderedDict): A parsed forensic report
|
forensic_report (dict): A parsed forensic report
|
||||||
index_suffix (str): The suffix of the name of the index to save to
|
index_suffix (str): The suffix of the name of the index to save to
|
||||||
index_prefix (str): The prefix of the name of the index to save to
|
index_prefix (str): The prefix of the name of the index to save to
|
||||||
monthly_indexes (bool): Use monthly indexes instead of daily
|
monthly_indexes (bool): Use monthly indexes instead of daily
|
||||||
@@ -548,7 +567,7 @@ def save_forensic_report_to_elasticsearch(
|
|||||||
sample_date = forensic_report["parsed_sample"]["date"]
|
sample_date = forensic_report["parsed_sample"]["date"]
|
||||||
sample_date = human_timestamp_to_datetime(sample_date)
|
sample_date = human_timestamp_to_datetime(sample_date)
|
||||||
original_headers = forensic_report["parsed_sample"]["headers"]
|
original_headers = forensic_report["parsed_sample"]["headers"]
|
||||||
headers = OrderedDict()
|
headers: dict[str, Any] = {}
|
||||||
for original_header in original_headers:
|
for original_header in original_headers:
|
||||||
headers[original_header.lower()] = original_headers[original_header]
|
headers[original_header.lower()] = original_headers[original_header]
|
||||||
|
|
||||||
@@ -562,7 +581,7 @@ def save_forensic_report_to_elasticsearch(
|
|||||||
if index_prefix is not None:
|
if index_prefix is not None:
|
||||||
search_index = "{0}{1}".format(index_prefix, search_index)
|
search_index = "{0}{1}".format(index_prefix, search_index)
|
||||||
search = Search(index=search_index)
|
search = Search(index=search_index)
|
||||||
q = Q(dict(match=dict(arrival_date=arrival_date_epoch_milliseconds)))
|
q = Q(dict(match=dict(arrival_date=arrival_date_epoch_milliseconds))) # pyright: ignore[reportArgumentType]
|
||||||
|
|
||||||
from_ = None
|
from_ = None
|
||||||
to_ = None
|
to_ = None
|
||||||
@@ -577,7 +596,7 @@ def save_forensic_report_to_elasticsearch(
|
|||||||
|
|
||||||
from_ = dict()
|
from_ = dict()
|
||||||
from_["sample.headers.from"] = headers["from"]
|
from_["sample.headers.from"] = headers["from"]
|
||||||
from_query = Q(dict(match_phrase=from_))
|
from_query = Q(dict(match_phrase=from_)) # pyright: ignore[reportArgumentType]
|
||||||
q = q & from_query
|
q = q & from_query
|
||||||
if "to" in headers:
|
if "to" in headers:
|
||||||
# We convert the TO header from a string list to a flat string.
|
# We convert the TO header from a string list to a flat string.
|
||||||
@@ -589,12 +608,12 @@ def save_forensic_report_to_elasticsearch(
|
|||||||
|
|
||||||
to_ = dict()
|
to_ = dict()
|
||||||
to_["sample.headers.to"] = headers["to"]
|
to_["sample.headers.to"] = headers["to"]
|
||||||
to_query = Q(dict(match_phrase=to_))
|
to_query = Q(dict(match_phrase=to_)) # pyright: ignore[reportArgumentType]
|
||||||
q = q & to_query
|
q = q & to_query
|
||||||
if "subject" in headers:
|
if "subject" in headers:
|
||||||
subject = headers["subject"]
|
subject = headers["subject"]
|
||||||
subject_query = {"match_phrase": {"sample.headers.subject": subject}}
|
subject_query = {"match_phrase": {"sample.headers.subject": subject}}
|
||||||
q = q & Q(subject_query)
|
q = q & Q(subject_query) # pyright: ignore[reportArgumentType]
|
||||||
|
|
||||||
search.query = q
|
search.query = q
|
||||||
existing = search.execute()
|
existing = search.execute()
|
||||||
@@ -672,7 +691,7 @@ def save_forensic_report_to_elasticsearch(
|
|||||||
number_of_shards=number_of_shards, number_of_replicas=number_of_replicas
|
number_of_shards=number_of_shards, number_of_replicas=number_of_replicas
|
||||||
)
|
)
|
||||||
create_indexes([index], index_settings)
|
create_indexes([index], index_settings)
|
||||||
forensic_doc.meta.index = index
|
forensic_doc.meta.index = index # pyright: ignore[reportAttributeAccessIssue, reportOptionalMemberAccess]
|
||||||
try:
|
try:
|
||||||
forensic_doc.save()
|
forensic_doc.save()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -684,18 +703,18 @@ def save_forensic_report_to_elasticsearch(
|
|||||||
|
|
||||||
|
|
||||||
def save_smtp_tls_report_to_elasticsearch(
|
def save_smtp_tls_report_to_elasticsearch(
|
||||||
report,
|
report: dict[str, Any],
|
||||||
index_suffix=None,
|
index_suffix: Optional[str] = None,
|
||||||
index_prefix=None,
|
index_prefix: Optional[str] = None,
|
||||||
monthly_indexes=False,
|
monthly_indexes: bool = False,
|
||||||
number_of_shards=1,
|
number_of_shards: int = 1,
|
||||||
number_of_replicas=0,
|
number_of_replicas: int = 0,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Saves a parsed SMTP TLS report to Elasticsearch
|
Saves a parsed SMTP TLS report to Elasticsearch
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
report (OrderedDict): A parsed SMTP TLS report
|
report (dict): A parsed SMTP TLS report
|
||||||
index_suffix (str): The suffix of the name of the index to save to
|
index_suffix (str): The suffix of the name of the index to save to
|
||||||
index_prefix (str): The prefix of the name of the index to save to
|
index_prefix (str): The prefix of the name of the index to save to
|
||||||
monthly_indexes (bool): Use monthly indexes instead of daily indexes
|
monthly_indexes (bool): Use monthly indexes instead of daily indexes
|
||||||
@@ -719,10 +738,10 @@ def save_smtp_tls_report_to_elasticsearch(
|
|||||||
report["begin_date"] = begin_date
|
report["begin_date"] = begin_date
|
||||||
report["end_date"] = end_date
|
report["end_date"] = end_date
|
||||||
|
|
||||||
org_name_query = Q(dict(match_phrase=dict(org_name=org_name)))
|
org_name_query = Q(dict(match_phrase=dict(org_name=org_name))) # pyright: ignore[reportArgumentType]
|
||||||
report_id_query = Q(dict(match_phrase=dict(report_id=report_id)))
|
report_id_query = Q(dict(match_phrase=dict(report_id=report_id))) # pyright: ignore[reportArgumentType]
|
||||||
begin_date_query = Q(dict(match=dict(date_begin=begin_date)))
|
begin_date_query = Q(dict(match=dict(date_begin=begin_date))) # pyright: ignore[reportArgumentType]
|
||||||
end_date_query = Q(dict(match=dict(date_end=end_date)))
|
end_date_query = Q(dict(match=dict(date_end=end_date))) # pyright: ignore[reportArgumentType]
|
||||||
|
|
||||||
if index_suffix is not None:
|
if index_suffix is not None:
|
||||||
search_index = "smtp_tls_{0}*".format(index_suffix)
|
search_index = "smtp_tls_{0}*".format(index_suffix)
|
||||||
@@ -781,7 +800,7 @@ def save_smtp_tls_report_to_elasticsearch(
|
|||||||
policy_doc = _SMTPTLSPolicyDoc(
|
policy_doc = _SMTPTLSPolicyDoc(
|
||||||
policy_domain=policy["policy_domain"],
|
policy_domain=policy["policy_domain"],
|
||||||
policy_type=policy["policy_type"],
|
policy_type=policy["policy_type"],
|
||||||
succesful_session_count=policy["successful_session_count"],
|
successful_session_count=policy["successful_session_count"],
|
||||||
failed_session_count=policy["failed_session_count"],
|
failed_session_count=policy["failed_session_count"],
|
||||||
policy_string=policy_strings,
|
policy_string=policy_strings,
|
||||||
mx_host_patterns=mx_host_patterns,
|
mx_host_patterns=mx_host_patterns,
|
||||||
@@ -823,10 +842,10 @@ def save_smtp_tls_report_to_elasticsearch(
|
|||||||
additional_information_uri=additional_information_uri,
|
additional_information_uri=additional_information_uri,
|
||||||
failure_reason_code=failure_reason_code,
|
failure_reason_code=failure_reason_code,
|
||||||
)
|
)
|
||||||
smtp_tls_doc.policies.append(policy_doc)
|
smtp_tls_doc.policies.append(policy_doc) # pyright: ignore[reportCallIssue]
|
||||||
|
|
||||||
create_indexes([index], index_settings)
|
create_indexes([index], index_settings)
|
||||||
smtp_tls_doc.meta.index = index
|
smtp_tls_doc.meta.index = index # pyright: ignore[reportOptionalMemberAccess, reportAttributeAccessIssue]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
smtp_tls_doc.save()
|
smtp_tls_doc.save()
|
||||||
|
|||||||
@@ -1,17 +1,19 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
import json
|
|
||||||
import threading
|
import threading
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from pygelf import GelfTcpHandler, GelfTlsHandler, GelfUdpHandler
|
||||||
|
|
||||||
from parsedmarc import (
|
from parsedmarc import (
|
||||||
parsed_aggregate_reports_to_csv_rows,
|
parsed_aggregate_reports_to_csv_rows,
|
||||||
parsed_forensic_reports_to_csv_rows,
|
parsed_forensic_reports_to_csv_rows,
|
||||||
parsed_smtp_tls_reports_to_csv_rows,
|
parsed_smtp_tls_reports_to_csv_rows,
|
||||||
)
|
)
|
||||||
from pygelf import GelfTcpHandler, GelfUdpHandler, GelfTlsHandler
|
|
||||||
|
|
||||||
|
|
||||||
log_context_data = threading.local()
|
log_context_data = threading.local()
|
||||||
|
|
||||||
@@ -48,7 +50,7 @@ class GelfClient(object):
|
|||||||
)
|
)
|
||||||
self.logger.addHandler(self.handler)
|
self.logger.addHandler(self.handler)
|
||||||
|
|
||||||
def save_aggregate_report_to_gelf(self, aggregate_reports):
|
def save_aggregate_report_to_gelf(self, aggregate_reports: list[dict[str, Any]]):
|
||||||
rows = parsed_aggregate_reports_to_csv_rows(aggregate_reports)
|
rows = parsed_aggregate_reports_to_csv_rows(aggregate_reports)
|
||||||
for row in rows:
|
for row in rows:
|
||||||
log_context_data.parsedmarc = row
|
log_context_data.parsedmarc = row
|
||||||
@@ -56,12 +58,14 @@ class GelfClient(object):
|
|||||||
|
|
||||||
log_context_data.parsedmarc = None
|
log_context_data.parsedmarc = None
|
||||||
|
|
||||||
def save_forensic_report_to_gelf(self, forensic_reports):
|
def save_forensic_report_to_gelf(self, forensic_reports: list[dict[str, Any]]):
|
||||||
rows = parsed_forensic_reports_to_csv_rows(forensic_reports)
|
rows = parsed_forensic_reports_to_csv_rows(forensic_reports)
|
||||||
for row in rows:
|
for row in rows:
|
||||||
self.logger.info(json.dumps(row))
|
log_context_data.parsedmarc = row
|
||||||
|
self.logger.info("parsedmarc forensic report")
|
||||||
|
|
||||||
def save_smtp_tls_report_to_gelf(self, smtp_tls_reports):
|
def save_smtp_tls_report_to_gelf(self, smtp_tls_reports: dict[str, Any]):
|
||||||
rows = parsed_smtp_tls_reports_to_csv_rows(smtp_tls_reports)
|
rows = parsed_smtp_tls_reports_to_csv_rows(smtp_tls_reports)
|
||||||
for row in rows:
|
for row in rows:
|
||||||
self.logger.info(json.dumps(row))
|
log_context_data.parsedmarc = row
|
||||||
|
self.logger.info("parsedmarc smtptls report")
|
||||||
|
|||||||
@@ -1,15 +1,17 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
from ssl import create_default_context
|
from ssl import SSLContext, create_default_context
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
from kafka import KafkaProducer
|
from kafka import KafkaProducer
|
||||||
from kafka.errors import NoBrokersAvailable, UnknownTopicOrPartitionError
|
from kafka.errors import NoBrokersAvailable, UnknownTopicOrPartitionError
|
||||||
from collections import OrderedDict
|
|
||||||
from parsedmarc.utils import human_timestamp_to_datetime
|
|
||||||
|
|
||||||
from parsedmarc import __version__
|
from parsedmarc import __version__
|
||||||
from parsedmarc.log import logger
|
from parsedmarc.log import logger
|
||||||
|
from parsedmarc.utils import human_timestamp_to_datetime
|
||||||
|
|
||||||
|
|
||||||
class KafkaError(RuntimeError):
|
class KafkaError(RuntimeError):
|
||||||
@@ -18,7 +20,13 @@ class KafkaError(RuntimeError):
|
|||||||
|
|
||||||
class KafkaClient(object):
|
class KafkaClient(object):
|
||||||
def __init__(
|
def __init__(
|
||||||
self, kafka_hosts, ssl=False, username=None, password=None, ssl_context=None
|
self,
|
||||||
|
kafka_hosts: list[str],
|
||||||
|
*,
|
||||||
|
ssl: Optional[bool] = False,
|
||||||
|
username: Optional[str] = None,
|
||||||
|
password: Optional[str] = None,
|
||||||
|
ssl_context: Optional[SSLContext] = None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Initializes the Kafka client
|
Initializes the Kafka client
|
||||||
@@ -28,7 +36,7 @@ class KafkaClient(object):
|
|||||||
ssl (bool): Use a SSL/TLS connection
|
ssl (bool): Use a SSL/TLS connection
|
||||||
username (str): An optional username
|
username (str): An optional username
|
||||||
password (str): An optional password
|
password (str): An optional password
|
||||||
ssl_context: SSL context options
|
ssl_context (SSLContext): SSL context options
|
||||||
|
|
||||||
Notes:
|
Notes:
|
||||||
``use_ssl=True`` is implied when a username or password are
|
``use_ssl=True`` is implied when a username or password are
|
||||||
@@ -38,7 +46,7 @@ class KafkaClient(object):
|
|||||||
``$ConnectionString``, and the password is the
|
``$ConnectionString``, and the password is the
|
||||||
Azure Event Hub connection string.
|
Azure Event Hub connection string.
|
||||||
"""
|
"""
|
||||||
config = dict(
|
config: dict[str, Any] = dict(
|
||||||
value_serializer=lambda v: json.dumps(v).encode("utf-8"),
|
value_serializer=lambda v: json.dumps(v).encode("utf-8"),
|
||||||
bootstrap_servers=kafka_hosts,
|
bootstrap_servers=kafka_hosts,
|
||||||
client_id="parsedmarc-{0}".format(__version__),
|
client_id="parsedmarc-{0}".format(__version__),
|
||||||
@@ -55,7 +63,7 @@ class KafkaClient(object):
|
|||||||
raise KafkaError("No Kafka brokers available")
|
raise KafkaError("No Kafka brokers available")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def strip_metadata(report):
|
def strip_metadata(report: dict[str, Any]):
|
||||||
"""
|
"""
|
||||||
Duplicates org_name, org_email and report_id into JSON root
|
Duplicates org_name, org_email and report_id into JSON root
|
||||||
and removes report_metadata key to bring it more inline
|
and removes report_metadata key to bring it more inline
|
||||||
@@ -69,7 +77,7 @@ class KafkaClient(object):
|
|||||||
return report
|
return report
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def generate_daterange(report):
|
def generate_date_range(report: dict[str, Any]):
|
||||||
"""
|
"""
|
||||||
Creates a date_range timestamp with format YYYY-MM-DD-T-HH:MM:SS
|
Creates a date_range timestamp with format YYYY-MM-DD-T-HH:MM:SS
|
||||||
based on begin and end dates for easier parsing in Kibana.
|
based on begin and end dates for easier parsing in Kibana.
|
||||||
@@ -86,7 +94,11 @@ class KafkaClient(object):
|
|||||||
logger.debug("date_range is {}".format(date_range))
|
logger.debug("date_range is {}".format(date_range))
|
||||||
return date_range
|
return date_range
|
||||||
|
|
||||||
def save_aggregate_reports_to_kafka(self, aggregate_reports, aggregate_topic):
|
def save_aggregate_reports_to_kafka(
|
||||||
|
self,
|
||||||
|
aggregate_reports: Union[dict[str, Any], list[dict[str, Any]]],
|
||||||
|
aggregate_topic: str,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Saves aggregate DMARC reports to Kafka
|
Saves aggregate DMARC reports to Kafka
|
||||||
|
|
||||||
@@ -96,16 +108,14 @@ class KafkaClient(object):
|
|||||||
aggregate_topic (str): The name of the Kafka topic
|
aggregate_topic (str): The name of the Kafka topic
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if isinstance(aggregate_reports, dict) or isinstance(
|
if isinstance(aggregate_reports, dict):
|
||||||
aggregate_reports, OrderedDict
|
|
||||||
):
|
|
||||||
aggregate_reports = [aggregate_reports]
|
aggregate_reports = [aggregate_reports]
|
||||||
|
|
||||||
if len(aggregate_reports) < 1:
|
if len(aggregate_reports) < 1:
|
||||||
return
|
return
|
||||||
|
|
||||||
for report in aggregate_reports:
|
for report in aggregate_reports:
|
||||||
report["date_range"] = self.generate_daterange(report)
|
report["date_range"] = self.generate_date_range(report)
|
||||||
report = self.strip_metadata(report)
|
report = self.strip_metadata(report)
|
||||||
|
|
||||||
for slice in report["records"]:
|
for slice in report["records"]:
|
||||||
@@ -129,7 +139,11 @@ class KafkaClient(object):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise KafkaError("Kafka error: {0}".format(e.__str__()))
|
raise KafkaError("Kafka error: {0}".format(e.__str__()))
|
||||||
|
|
||||||
def save_forensic_reports_to_kafka(self, forensic_reports, forensic_topic):
|
def save_forensic_reports_to_kafka(
|
||||||
|
self,
|
||||||
|
forensic_reports: Union[dict[str, Any], list[dict[str, Any]]],
|
||||||
|
forensic_topic: str,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Saves forensic DMARC reports to Kafka, sends individual
|
Saves forensic DMARC reports to Kafka, sends individual
|
||||||
records (slices) since Kafka requires messages to be <= 1MB
|
records (slices) since Kafka requires messages to be <= 1MB
|
||||||
@@ -159,7 +173,11 @@ class KafkaClient(object):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise KafkaError("Kafka error: {0}".format(e.__str__()))
|
raise KafkaError("Kafka error: {0}".format(e.__str__()))
|
||||||
|
|
||||||
def save_smtp_tls_reports_to_kafka(self, smtp_tls_reports, smtp_tls_topic):
|
def save_smtp_tls_reports_to_kafka(
|
||||||
|
self,
|
||||||
|
smtp_tls_reports: Union[list[dict[str, Any]], dict[str, Any]],
|
||||||
|
smtp_tls_topic: str,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Saves SMTP TLS reports to Kafka, sends individual
|
Saves SMTP TLS reports to Kafka, sends individual
|
||||||
records (slices) since Kafka requires messages to be <= 1MB
|
records (slices) since Kafka requires messages to be <= 1MB
|
||||||
|
|||||||
@@ -1,9 +1,15 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from parsedmarc.log import logger
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from azure.core.exceptions import HttpResponseError
|
from azure.core.exceptions import HttpResponseError
|
||||||
from azure.identity import ClientSecretCredential
|
from azure.identity import ClientSecretCredential
|
||||||
from azure.monitor.ingestion import LogsIngestionClient
|
from azure.monitor.ingestion import LogsIngestionClient
|
||||||
|
|
||||||
|
from parsedmarc.log import logger
|
||||||
|
|
||||||
|
|
||||||
class LogAnalyticsException(Exception):
|
class LogAnalyticsException(Exception):
|
||||||
"""Raised when an Elasticsearch error occurs"""
|
"""Raised when an Elasticsearch error occurs"""
|
||||||
@@ -102,7 +108,12 @@ class LogAnalyticsClient(object):
|
|||||||
"Invalid configuration. " + "One or more required settings are missing."
|
"Invalid configuration. " + "One or more required settings are missing."
|
||||||
)
|
)
|
||||||
|
|
||||||
def publish_json(self, results, logs_client: LogsIngestionClient, dcr_stream: str):
|
def publish_json(
|
||||||
|
self,
|
||||||
|
results,
|
||||||
|
logs_client: LogsIngestionClient,
|
||||||
|
dcr_stream: str,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Background function to publish given
|
Background function to publish given
|
||||||
DMARC report to specific Data Collection Rule.
|
DMARC report to specific Data Collection Rule.
|
||||||
@@ -121,7 +132,11 @@ class LogAnalyticsClient(object):
|
|||||||
raise LogAnalyticsException("Upload failed: {error}".format(error=e))
|
raise LogAnalyticsException("Upload failed: {error}".format(error=e))
|
||||||
|
|
||||||
def publish_results(
|
def publish_results(
|
||||||
self, results, save_aggregate: bool, save_forensic: bool, save_smtp_tls: bool
|
self,
|
||||||
|
results: dict[str, Any],
|
||||||
|
save_aggregate: bool,
|
||||||
|
save_forensic: bool,
|
||||||
|
save_smtp_tls: bool,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Function to publish DMARC and/or SMTP TLS reports to Log Analytics
|
Function to publish DMARC and/or SMTP TLS reports to Log Analytics
|
||||||
|
|||||||
@@ -1,3 +1,7 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from base64 import urlsafe_b64decode
|
from base64 import urlsafe_b64decode
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -112,14 +116,14 @@ class GmailConnection(MailboxConnection):
|
|||||||
else:
|
else:
|
||||||
return [id for id in self._fetch_all_message_ids(reports_label_id)]
|
return [id for id in self._fetch_all_message_ids(reports_label_id)]
|
||||||
|
|
||||||
def fetch_message(self, message_id):
|
def fetch_message(self, message_id) -> str:
|
||||||
msg = (
|
msg = (
|
||||||
self.service.users()
|
self.service.users()
|
||||||
.messages()
|
.messages()
|
||||||
.get(userId="me", id=message_id, format="raw")
|
.get(userId="me", id=message_id, format="raw")
|
||||||
.execute()
|
.execute()
|
||||||
)
|
)
|
||||||
return urlsafe_b64decode(msg["raw"])
|
return urlsafe_b64decode(msg["raw"]).decode(errors="replace")
|
||||||
|
|
||||||
def delete_message(self, message_id: str):
|
def delete_message(self, message_id: str):
|
||||||
self.service.users().messages().delete(userId="me", id=message_id)
|
self.service.users().messages().delete(userId="me", id=message_id)
|
||||||
@@ -152,3 +156,4 @@ class GmailConnection(MailboxConnection):
|
|||||||
for label in labels:
|
for label in labels:
|
||||||
if label_name == label["id"] or label_name == label["name"]:
|
if label_name == label["id"] or label_name == label["name"]:
|
||||||
return label["id"]
|
return label["id"]
|
||||||
|
return ""
|
||||||
|
|||||||
@@ -1,8 +1,12 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from typing import List, Optional
|
from typing import Any, List, Optional, Union
|
||||||
|
|
||||||
from azure.identity import (
|
from azure.identity import (
|
||||||
UsernamePasswordCredential,
|
UsernamePasswordCredential,
|
||||||
@@ -24,7 +28,7 @@ class AuthMethod(Enum):
|
|||||||
|
|
||||||
|
|
||||||
def _get_cache_args(token_path: Path, allow_unencrypted_storage):
|
def _get_cache_args(token_path: Path, allow_unencrypted_storage):
|
||||||
cache_args = {
|
cache_args: dict[str, Any] = {
|
||||||
"cache_persistence_options": TokenCachePersistenceOptions(
|
"cache_persistence_options": TokenCachePersistenceOptions(
|
||||||
name="parsedmarc", allow_unencrypted_storage=allow_unencrypted_storage
|
name="parsedmarc", allow_unencrypted_storage=allow_unencrypted_storage
|
||||||
)
|
)
|
||||||
@@ -147,9 +151,9 @@ class MSGraphConnection(MailboxConnection):
|
|||||||
else:
|
else:
|
||||||
logger.warning(f"Unknown response {resp.status_code} {resp.json()}")
|
logger.warning(f"Unknown response {resp.status_code} {resp.json()}")
|
||||||
|
|
||||||
def fetch_messages(self, folder_name: str, **kwargs) -> List[str]:
|
def fetch_messages(self, reports_folder: str, **kwargs) -> List[str]:
|
||||||
"""Returns a list of message UIDs in the specified folder"""
|
"""Returns a list of message UIDs in the specified folder"""
|
||||||
folder_id = self._find_folder_id_from_folder_path(folder_name)
|
folder_id = self._find_folder_id_from_folder_path(reports_folder)
|
||||||
url = f"/users/{self.mailbox_name}/mailFolders/{folder_id}/messages"
|
url = f"/users/{self.mailbox_name}/mailFolders/{folder_id}/messages"
|
||||||
since = kwargs.get("since")
|
since = kwargs.get("since")
|
||||||
if not since:
|
if not since:
|
||||||
@@ -162,7 +166,7 @@ class MSGraphConnection(MailboxConnection):
|
|||||||
|
|
||||||
def _get_all_messages(self, url, batch_size, since):
|
def _get_all_messages(self, url, batch_size, since):
|
||||||
messages: list
|
messages: list
|
||||||
params = {"$select": "id"}
|
params: dict[str, Union[str, int]] = {"$select": "id"}
|
||||||
if since:
|
if since:
|
||||||
params["$filter"] = f"receivedDateTime ge {since}"
|
params["$filter"] = f"receivedDateTime ge {since}"
|
||||||
if batch_size and batch_size > 0:
|
if batch_size and batch_size > 0:
|
||||||
|
|||||||
@@ -1,3 +1,9 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
|
||||||
from imapclient.exceptions import IMAPClientError
|
from imapclient.exceptions import IMAPClientError
|
||||||
@@ -11,14 +17,14 @@ from parsedmarc.mail.mailbox_connection import MailboxConnection
|
|||||||
class IMAPConnection(MailboxConnection):
|
class IMAPConnection(MailboxConnection):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
host=None,
|
host: str,
|
||||||
user=None,
|
user: str,
|
||||||
password=None,
|
password: str,
|
||||||
port=None,
|
port: int = 993,
|
||||||
ssl=True,
|
ssl: bool = True,
|
||||||
verify=True,
|
verify: bool = True,
|
||||||
timeout=30,
|
timeout: int = 30,
|
||||||
max_retries=4,
|
max_retries: int = 4,
|
||||||
):
|
):
|
||||||
self._username = user
|
self._username = user
|
||||||
self._password = password
|
self._password = password
|
||||||
@@ -40,18 +46,18 @@ class IMAPConnection(MailboxConnection):
|
|||||||
def fetch_messages(self, reports_folder: str, **kwargs):
|
def fetch_messages(self, reports_folder: str, **kwargs):
|
||||||
self._client.select_folder(reports_folder)
|
self._client.select_folder(reports_folder)
|
||||||
since = kwargs.get("since")
|
since = kwargs.get("since")
|
||||||
if since:
|
if since is not None:
|
||||||
return self._client.search(["SINCE", since])
|
return self._client.search(f"SINCE {since}")
|
||||||
else:
|
else:
|
||||||
return self._client.search()
|
return self._client.search()
|
||||||
|
|
||||||
def fetch_message(self, message_id):
|
def fetch_message(self, message_id: int):
|
||||||
return self._client.fetch_message(message_id, parse=False)
|
return cast(str, self._client.fetch_message(message_id, parse=False))
|
||||||
|
|
||||||
def delete_message(self, message_id: str):
|
def delete_message(self, message_id: int):
|
||||||
self._client.delete_messages([message_id])
|
self._client.delete_messages([message_id])
|
||||||
|
|
||||||
def move_message(self, message_id: str, folder_name: str):
|
def move_message(self, message_id: int, folder_name: str):
|
||||||
self._client.move_messages([message_id], folder_name)
|
self._client.move_messages([message_id], folder_name)
|
||||||
|
|
||||||
def keepalive(self):
|
def keepalive(self):
|
||||||
|
|||||||
@@ -1,5 +1,8 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
from abc import ABC
|
from abc import ABC
|
||||||
from typing import List
|
|
||||||
|
|
||||||
|
|
||||||
class MailboxConnection(ABC):
|
class MailboxConnection(ABC):
|
||||||
@@ -10,16 +13,16 @@ class MailboxConnection(ABC):
|
|||||||
def create_folder(self, folder_name: str):
|
def create_folder(self, folder_name: str):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def fetch_messages(self, reports_folder: str, **kwargs) -> List[str]:
|
def fetch_messages(self, reports_folder: str, **kwargs):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def fetch_message(self, message_id) -> str:
|
def fetch_message(self, message_id) -> str:
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def delete_message(self, message_id: str):
|
def delete_message(self, message_id):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def move_message(self, message_id: str, folder_name: str):
|
def move_message(self, message_id, folder_name: str):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def keepalive(self):
|
def keepalive(self):
|
||||||
|
|||||||
@@ -1,16 +1,21 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import mailbox
|
||||||
|
import os
|
||||||
from time import sleep
|
from time import sleep
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
from parsedmarc.log import logger
|
from parsedmarc.log import logger
|
||||||
from parsedmarc.mail.mailbox_connection import MailboxConnection
|
from parsedmarc.mail.mailbox_connection import MailboxConnection
|
||||||
import mailbox
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
class MaildirConnection(MailboxConnection):
|
class MaildirConnection(MailboxConnection):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
maildir_path=None,
|
maildir_path: str,
|
||||||
maildir_create=False,
|
maildir_create: bool = False,
|
||||||
):
|
):
|
||||||
self._maildir_path = maildir_path
|
self._maildir_path = maildir_path
|
||||||
self._maildir_create = maildir_create
|
self._maildir_create = maildir_create
|
||||||
@@ -27,27 +32,31 @@ class MaildirConnection(MailboxConnection):
|
|||||||
)
|
)
|
||||||
raise Exception(ex)
|
raise Exception(ex)
|
||||||
self._client = mailbox.Maildir(maildir_path, create=maildir_create)
|
self._client = mailbox.Maildir(maildir_path, create=maildir_create)
|
||||||
self._subfolder_client = {}
|
self._subfolder_client: Dict[str, mailbox.Maildir] = {}
|
||||||
|
|
||||||
def create_folder(self, folder_name: str):
|
def create_folder(self, folder_name: str):
|
||||||
self._subfolder_client[folder_name] = self._client.add_folder(folder_name)
|
self._subfolder_client[folder_name] = self._client.add_folder(folder_name)
|
||||||
self._client.add_folder(folder_name)
|
|
||||||
|
|
||||||
def fetch_messages(self, reports_folder: str, **kwargs):
|
def fetch_messages(self, reports_folder: str, **kwargs):
|
||||||
return self._client.keys()
|
return self._client.keys()
|
||||||
|
|
||||||
def fetch_message(self, message_id):
|
def fetch_message(self, message_id: str) -> str:
|
||||||
return self._client.get(message_id).as_string()
|
msg = self._client.get(message_id)
|
||||||
|
if msg is not None:
|
||||||
|
msg = msg.as_string()
|
||||||
|
if msg is not None:
|
||||||
|
return msg
|
||||||
|
return ""
|
||||||
|
|
||||||
def delete_message(self, message_id: str):
|
def delete_message(self, message_id: str):
|
||||||
self._client.remove(message_id)
|
self._client.remove(message_id)
|
||||||
|
|
||||||
def move_message(self, message_id: str, folder_name: str):
|
def move_message(self, message_id: str, folder_name: str):
|
||||||
message_data = self._client.get(message_id)
|
message_data = self._client.get(message_id)
|
||||||
if folder_name not in self._subfolder_client.keys():
|
if message_data is None:
|
||||||
self._subfolder_client = mailbox.Maildir(
|
return
|
||||||
os.join(self.maildir_path, folder_name), create=self.maildir_create
|
if folder_name not in self._subfolder_client:
|
||||||
)
|
self._subfolder_client[folder_name] = self._client.add_folder(folder_name)
|
||||||
self._subfolder_client[folder_name].add(message_data)
|
self._subfolder_client[folder_name].add(message_data)
|
||||||
self._client.remove(message_id)
|
self._client.remove(message_id)
|
||||||
|
|
||||||
|
|||||||
@@ -1,27 +1,29 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from collections import OrderedDict
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
from opensearchpy import (
|
from opensearchpy import (
|
||||||
Q,
|
Boolean,
|
||||||
connections,
|
Date,
|
||||||
Object,
|
|
||||||
Document,
|
Document,
|
||||||
Index,
|
Index,
|
||||||
Nested,
|
|
||||||
InnerDoc,
|
InnerDoc,
|
||||||
Integer,
|
Integer,
|
||||||
Text,
|
|
||||||
Boolean,
|
|
||||||
Ip,
|
Ip,
|
||||||
Date,
|
Nested,
|
||||||
|
Object,
|
||||||
|
Q,
|
||||||
Search,
|
Search,
|
||||||
|
Text,
|
||||||
|
connections,
|
||||||
)
|
)
|
||||||
from opensearchpy.helpers import reindex
|
from opensearchpy.helpers import reindex
|
||||||
|
|
||||||
|
from parsedmarc import InvalidForensicReport
|
||||||
from parsedmarc.log import logger
|
from parsedmarc.log import logger
|
||||||
from parsedmarc.utils import human_timestamp_to_datetime
|
from parsedmarc.utils import human_timestamp_to_datetime
|
||||||
from parsedmarc import InvalidForensicReport
|
|
||||||
|
|
||||||
|
|
||||||
class OpenSearchError(Exception):
|
class OpenSearchError(Exception):
|
||||||
@@ -67,6 +69,8 @@ class _AggregateReportDoc(Document):
|
|||||||
date_range = Date()
|
date_range = Date()
|
||||||
date_begin = Date()
|
date_begin = Date()
|
||||||
date_end = Date()
|
date_end = Date()
|
||||||
|
normalized_timespan = Boolean()
|
||||||
|
original_timespan_seconds = Integer
|
||||||
errors = Text()
|
errors = Text()
|
||||||
published_policy = Object(_PublishedPolicy)
|
published_policy = Object(_PublishedPolicy)
|
||||||
source_ip_address = Ip()
|
source_ip_address = Ip()
|
||||||
@@ -87,18 +91,18 @@ class _AggregateReportDoc(Document):
|
|||||||
dkim_results = Nested(_DKIMResult)
|
dkim_results = Nested(_DKIMResult)
|
||||||
spf_results = Nested(_SPFResult)
|
spf_results = Nested(_SPFResult)
|
||||||
|
|
||||||
def add_policy_override(self, type_, comment):
|
def add_policy_override(self, type_: str, comment: str):
|
||||||
self.policy_overrides.append(_PolicyOverride(type=type_, comment=comment))
|
self.policy_overrides.append(_PolicyOverride(type=type_, comment=comment))
|
||||||
|
|
||||||
def add_dkim_result(self, domain, selector, result):
|
def add_dkim_result(self, domain: str, selector: str, result: _DKIMResult):
|
||||||
self.dkim_results.append(
|
self.dkim_results.append(
|
||||||
_DKIMResult(domain=domain, selector=selector, result=result)
|
_DKIMResult(domain=domain, selector=selector, result=result)
|
||||||
)
|
)
|
||||||
|
|
||||||
def add_spf_result(self, domain, scope, result):
|
def add_spf_result(self, domain: str, scope: str, result: _SPFResult):
|
||||||
self.spf_results.append(_SPFResult(domain=domain, scope=scope, result=result))
|
self.spf_results.append(_SPFResult(domain=domain, scope=scope, result=result))
|
||||||
|
|
||||||
def save(self, **kwargs):
|
def save(self, **kwargs): # pyright: ignore[reportIncompatibleMethodOverride]
|
||||||
self.passed_dmarc = False
|
self.passed_dmarc = False
|
||||||
self.passed_dmarc = self.spf_aligned or self.dkim_aligned
|
self.passed_dmarc = self.spf_aligned or self.dkim_aligned
|
||||||
|
|
||||||
@@ -131,21 +135,21 @@ class _ForensicSampleDoc(InnerDoc):
|
|||||||
body = Text()
|
body = Text()
|
||||||
attachments = Nested(_EmailAttachmentDoc)
|
attachments = Nested(_EmailAttachmentDoc)
|
||||||
|
|
||||||
def add_to(self, display_name, address):
|
def add_to(self, display_name: str, address: str):
|
||||||
self.to.append(_EmailAddressDoc(display_name=display_name, address=address))
|
self.to.append(_EmailAddressDoc(display_name=display_name, address=address))
|
||||||
|
|
||||||
def add_reply_to(self, display_name, address):
|
def add_reply_to(self, display_name: str, address: str):
|
||||||
self.reply_to.append(
|
self.reply_to.append(
|
||||||
_EmailAddressDoc(display_name=display_name, address=address)
|
_EmailAddressDoc(display_name=display_name, address=address)
|
||||||
)
|
)
|
||||||
|
|
||||||
def add_cc(self, display_name, address):
|
def add_cc(self, display_name: str, address: str):
|
||||||
self.cc.append(_EmailAddressDoc(display_name=display_name, address=address))
|
self.cc.append(_EmailAddressDoc(display_name=display_name, address=address))
|
||||||
|
|
||||||
def add_bcc(self, display_name, address):
|
def add_bcc(self, display_name: str, address: str):
|
||||||
self.bcc.append(_EmailAddressDoc(display_name=display_name, address=address))
|
self.bcc.append(_EmailAddressDoc(display_name=display_name, address=address))
|
||||||
|
|
||||||
def add_attachment(self, filename, content_type, sha256):
|
def add_attachment(self, filename: str, content_type: str, sha256: str):
|
||||||
self.attachments.append(
|
self.attachments.append(
|
||||||
_EmailAttachmentDoc(
|
_EmailAttachmentDoc(
|
||||||
filename=filename, content_type=content_type, sha256=sha256
|
filename=filename, content_type=content_type, sha256=sha256
|
||||||
@@ -197,15 +201,15 @@ class _SMTPTLSPolicyDoc(InnerDoc):
|
|||||||
|
|
||||||
def add_failure_details(
|
def add_failure_details(
|
||||||
self,
|
self,
|
||||||
result_type,
|
result_type: Optional[str] = None,
|
||||||
ip_address,
|
ip_address: Optional[str] = None,
|
||||||
receiving_ip,
|
receiving_ip: Optional[str] = None,
|
||||||
receiving_mx_helo,
|
receiving_mx_helo: Optional[str] = None,
|
||||||
failed_session_count,
|
failed_session_count: Optional[int] = None,
|
||||||
sending_mta_ip=None,
|
sending_mta_ip: Optional[str] = None,
|
||||||
receiving_mx_hostname=None,
|
receiving_mx_hostname: Optional[str] = None,
|
||||||
additional_information_uri=None,
|
additional_information_uri: Optional[str] = None,
|
||||||
failure_reason_code=None,
|
failure_reason_code: Union[str, int, None] = None,
|
||||||
):
|
):
|
||||||
_details = _SMTPTLSFailureDetailsDoc(
|
_details = _SMTPTLSFailureDetailsDoc(
|
||||||
result_type=result_type,
|
result_type=result_type,
|
||||||
@@ -235,13 +239,14 @@ class _SMTPTLSReportDoc(Document):
|
|||||||
|
|
||||||
def add_policy(
|
def add_policy(
|
||||||
self,
|
self,
|
||||||
policy_type,
|
policy_type: str,
|
||||||
policy_domain,
|
policy_domain: str,
|
||||||
successful_session_count,
|
successful_session_count: int,
|
||||||
failed_session_count,
|
failed_session_count: int,
|
||||||
policy_string=None,
|
*,
|
||||||
mx_host_patterns=None,
|
policy_string: Optional[str] = None,
|
||||||
failure_details=None,
|
mx_host_patterns: Optional[list[str]] = None,
|
||||||
|
failure_details: Optional[str] = None,
|
||||||
):
|
):
|
||||||
self.policies.append(
|
self.policies.append(
|
||||||
policy_type=policy_type,
|
policy_type=policy_type,
|
||||||
@@ -259,24 +264,25 @@ class AlreadySaved(ValueError):
|
|||||||
|
|
||||||
|
|
||||||
def set_hosts(
|
def set_hosts(
|
||||||
hosts,
|
hosts: Union[str, list[str]],
|
||||||
use_ssl=False,
|
*,
|
||||||
ssl_cert_path=None,
|
use_ssl: Optional[bool] = False,
|
||||||
username=None,
|
ssl_cert_path: Optional[str] = None,
|
||||||
password=None,
|
username: Optional[str] = None,
|
||||||
apiKey=None,
|
password: Optional[str] = None,
|
||||||
timeout=60.0,
|
api_key: Optional[str] = None,
|
||||||
|
timeout: Optional[float] = 60.0,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Sets the OpenSearch hosts to use
|
Sets the OpenSearch hosts to use
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
hosts (str|list): A hostname or URL, or list of hostnames or URLs
|
hosts (str|list[str]): A single hostname or URL, or list of hostnames or URLs
|
||||||
use_ssl (bool): Use an HTTPS connection to the server
|
use_ssl (bool): Use an HTTPS connection to the server
|
||||||
ssl_cert_path (str): Path to the certificate chain
|
ssl_cert_path (str): Path to the certificate chain
|
||||||
username (str): The username to use for authentication
|
username (str): The username to use for authentication
|
||||||
password (str): The password to use for authentication
|
password (str): The password to use for authentication
|
||||||
apiKey (str): The Base64 encoded API key to use for authentication
|
api_key (str): The Base64 encoded API key to use for authentication
|
||||||
timeout (float): Timeout in seconds
|
timeout (float): Timeout in seconds
|
||||||
"""
|
"""
|
||||||
if not isinstance(hosts, list):
|
if not isinstance(hosts, list):
|
||||||
@@ -289,14 +295,14 @@ def set_hosts(
|
|||||||
conn_params["ca_certs"] = ssl_cert_path
|
conn_params["ca_certs"] = ssl_cert_path
|
||||||
else:
|
else:
|
||||||
conn_params["verify_certs"] = False
|
conn_params["verify_certs"] = False
|
||||||
if username:
|
if username and password:
|
||||||
conn_params["http_auth"] = username + ":" + password
|
conn_params["http_auth"] = username + ":" + password
|
||||||
if apiKey:
|
if api_key:
|
||||||
conn_params["api_key"] = apiKey
|
conn_params["api_key"] = api_key
|
||||||
connections.create_connection(**conn_params)
|
connections.create_connection(**conn_params)
|
||||||
|
|
||||||
|
|
||||||
def create_indexes(names, settings=None):
|
def create_indexes(names: list[str], settings: Optional[dict[str, Any]] = None):
|
||||||
"""
|
"""
|
||||||
Create OpenSearch indexes
|
Create OpenSearch indexes
|
||||||
|
|
||||||
@@ -319,7 +325,10 @@ def create_indexes(names, settings=None):
|
|||||||
raise OpenSearchError("OpenSearch error: {0}".format(e.__str__()))
|
raise OpenSearchError("OpenSearch error: {0}".format(e.__str__()))
|
||||||
|
|
||||||
|
|
||||||
def migrate_indexes(aggregate_indexes=None, forensic_indexes=None):
|
def migrate_indexes(
|
||||||
|
aggregate_indexes: Optional[list[str]] = None,
|
||||||
|
forensic_indexes: Optional[list[str]] = None,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Updates index mappings
|
Updates index mappings
|
||||||
|
|
||||||
@@ -366,18 +375,18 @@ def migrate_indexes(aggregate_indexes=None, forensic_indexes=None):
|
|||||||
|
|
||||||
|
|
||||||
def save_aggregate_report_to_opensearch(
|
def save_aggregate_report_to_opensearch(
|
||||||
aggregate_report,
|
aggregate_report: dict[str, Any],
|
||||||
index_suffix=None,
|
index_suffix: Optional[str] = None,
|
||||||
index_prefix=None,
|
index_prefix: Optional[str] = None,
|
||||||
monthly_indexes=False,
|
monthly_indexes: bool = False,
|
||||||
number_of_shards=1,
|
number_of_shards: int = 1,
|
||||||
number_of_replicas=0,
|
number_of_replicas: int = 0,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Saves a parsed DMARC aggregate report to OpenSearch
|
Saves a parsed DMARC aggregate report to OpenSearch
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
aggregate_report (OrderedDict): A parsed forensic report
|
aggregate_report (dict): A parsed forensic report
|
||||||
index_suffix (str): The suffix of the name of the index to save to
|
index_suffix (str): The suffix of the name of the index to save to
|
||||||
index_prefix (str): The prefix of the name of the index to save to
|
index_prefix (str): The prefix of the name of the index to save to
|
||||||
monthly_indexes (bool): Use monthly indexes instead of daily indexes
|
monthly_indexes (bool): Use monthly indexes instead of daily indexes
|
||||||
@@ -395,15 +404,11 @@ def save_aggregate_report_to_opensearch(
|
|||||||
domain = aggregate_report["policy_published"]["domain"]
|
domain = aggregate_report["policy_published"]["domain"]
|
||||||
begin_date = human_timestamp_to_datetime(metadata["begin_date"], to_utc=True)
|
begin_date = human_timestamp_to_datetime(metadata["begin_date"], to_utc=True)
|
||||||
end_date = human_timestamp_to_datetime(metadata["end_date"], to_utc=True)
|
end_date = human_timestamp_to_datetime(metadata["end_date"], to_utc=True)
|
||||||
begin_date_human = begin_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
|
||||||
end_date_human = end_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
|
||||||
if monthly_indexes:
|
if monthly_indexes:
|
||||||
index_date = begin_date.strftime("%Y-%m")
|
index_date = begin_date.strftime("%Y-%m")
|
||||||
else:
|
else:
|
||||||
index_date = begin_date.strftime("%Y-%m-%d")
|
index_date = begin_date.strftime("%Y-%m-%d")
|
||||||
aggregate_report["begin_date"] = begin_date
|
|
||||||
aggregate_report["end_date"] = end_date
|
|
||||||
date_range = [aggregate_report["begin_date"], aggregate_report["end_date"]]
|
|
||||||
|
|
||||||
org_name_query = Q(dict(match_phrase=dict(org_name=org_name)))
|
org_name_query = Q(dict(match_phrase=dict(org_name=org_name)))
|
||||||
report_id_query = Q(dict(match_phrase=dict(report_id=report_id)))
|
report_id_query = Q(dict(match_phrase=dict(report_id=report_id)))
|
||||||
@@ -421,6 +426,8 @@ def save_aggregate_report_to_opensearch(
|
|||||||
query = org_name_query & report_id_query & domain_query
|
query = org_name_query & report_id_query & domain_query
|
||||||
query = query & begin_date_query & end_date_query
|
query = query & begin_date_query & end_date_query
|
||||||
search.query = query
|
search.query = query
|
||||||
|
begin_date_human = begin_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
||||||
|
end_date_human = end_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
existing = search.execute()
|
existing = search.execute()
|
||||||
@@ -450,6 +457,17 @@ def save_aggregate_report_to_opensearch(
|
|||||||
)
|
)
|
||||||
|
|
||||||
for record in aggregate_report["records"]:
|
for record in aggregate_report["records"]:
|
||||||
|
begin_date = human_timestamp_to_datetime(record["interval_begin"], to_utc=True)
|
||||||
|
end_date = human_timestamp_to_datetime(record["interval_end"], to_utc=True)
|
||||||
|
normalized_timespan = record["normalized_timespan"]
|
||||||
|
|
||||||
|
if monthly_indexes:
|
||||||
|
index_date = begin_date.strftime("%Y-%m")
|
||||||
|
else:
|
||||||
|
index_date = begin_date.strftime("%Y-%m-%d")
|
||||||
|
aggregate_report["begin_date"] = begin_date
|
||||||
|
aggregate_report["end_date"] = end_date
|
||||||
|
date_range = [aggregate_report["begin_date"], aggregate_report["end_date"]]
|
||||||
agg_doc = _AggregateReportDoc(
|
agg_doc = _AggregateReportDoc(
|
||||||
xml_schema=aggregate_report["xml_schema"],
|
xml_schema=aggregate_report["xml_schema"],
|
||||||
org_name=metadata["org_name"],
|
org_name=metadata["org_name"],
|
||||||
@@ -457,8 +475,9 @@ def save_aggregate_report_to_opensearch(
|
|||||||
org_extra_contact_info=metadata["org_extra_contact_info"],
|
org_extra_contact_info=metadata["org_extra_contact_info"],
|
||||||
report_id=metadata["report_id"],
|
report_id=metadata["report_id"],
|
||||||
date_range=date_range,
|
date_range=date_range,
|
||||||
date_begin=aggregate_report["begin_date"],
|
date_begin=begin_date,
|
||||||
date_end=aggregate_report["end_date"],
|
date_end=end_date,
|
||||||
|
normalized_timespan=normalized_timespan,
|
||||||
errors=metadata["errors"],
|
errors=metadata["errors"],
|
||||||
published_policy=published_policy,
|
published_policy=published_policy,
|
||||||
source_ip_address=record["source"]["ip_address"],
|
source_ip_address=record["source"]["ip_address"],
|
||||||
@@ -517,18 +536,18 @@ def save_aggregate_report_to_opensearch(
|
|||||||
|
|
||||||
|
|
||||||
def save_forensic_report_to_opensearch(
|
def save_forensic_report_to_opensearch(
|
||||||
forensic_report,
|
forensic_report: dict[str, Any],
|
||||||
index_suffix=None,
|
index_suffix: Optional[str] = None,
|
||||||
index_prefix=None,
|
index_prefix: Optional[str] = None,
|
||||||
monthly_indexes=False,
|
monthly_indexes: bool = False,
|
||||||
number_of_shards=1,
|
number_of_shards: int = 1,
|
||||||
number_of_replicas=0,
|
number_of_replicas: int = 0,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Saves a parsed DMARC forensic report to OpenSearch
|
Saves a parsed DMARC forensic report to OpenSearch
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
forensic_report (OrderedDict): A parsed forensic report
|
forensic_report (dict): A parsed forensic report
|
||||||
index_suffix (str): The suffix of the name of the index to save to
|
index_suffix (str): The suffix of the name of the index to save to
|
||||||
index_prefix (str): The prefix of the name of the index to save to
|
index_prefix (str): The prefix of the name of the index to save to
|
||||||
monthly_indexes (bool): Use monthly indexes instead of daily
|
monthly_indexes (bool): Use monthly indexes instead of daily
|
||||||
@@ -548,7 +567,7 @@ def save_forensic_report_to_opensearch(
|
|||||||
sample_date = forensic_report["parsed_sample"]["date"]
|
sample_date = forensic_report["parsed_sample"]["date"]
|
||||||
sample_date = human_timestamp_to_datetime(sample_date)
|
sample_date = human_timestamp_to_datetime(sample_date)
|
||||||
original_headers = forensic_report["parsed_sample"]["headers"]
|
original_headers = forensic_report["parsed_sample"]["headers"]
|
||||||
headers = OrderedDict()
|
headers: dict[str, Any] = {}
|
||||||
for original_header in original_headers:
|
for original_header in original_headers:
|
||||||
headers[original_header.lower()] = original_headers[original_header]
|
headers[original_header.lower()] = original_headers[original_header]
|
||||||
|
|
||||||
@@ -684,18 +703,18 @@ def save_forensic_report_to_opensearch(
|
|||||||
|
|
||||||
|
|
||||||
def save_smtp_tls_report_to_opensearch(
|
def save_smtp_tls_report_to_opensearch(
|
||||||
report,
|
report: dict[str, Any],
|
||||||
index_suffix=None,
|
index_suffix: Optional[str] = None,
|
||||||
index_prefix=None,
|
index_prefix: Optional[str] = None,
|
||||||
monthly_indexes=False,
|
monthly_indexes: bool = False,
|
||||||
number_of_shards=1,
|
number_of_shards: int = 1,
|
||||||
number_of_replicas=0,
|
number_of_replicas: int = 0,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Saves a parsed SMTP TLS report to OpenSearch
|
Saves a parsed SMTP TLS report to OpenSearch
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
report (OrderedDict): A parsed SMTP TLS report
|
report (dict): A parsed SMTP TLS report
|
||||||
index_suffix (str): The suffix of the name of the index to save to
|
index_suffix (str): The suffix of the name of the index to save to
|
||||||
index_prefix (str): The prefix of the name of the index to save to
|
index_prefix (str): The prefix of the name of the index to save to
|
||||||
monthly_indexes (bool): Use monthly indexes instead of daily indexes
|
monthly_indexes (bool): Use monthly indexes instead of daily indexes
|
||||||
@@ -705,7 +724,7 @@ def save_smtp_tls_report_to_opensearch(
|
|||||||
Raises:
|
Raises:
|
||||||
AlreadySaved
|
AlreadySaved
|
||||||
"""
|
"""
|
||||||
logger.info("Saving aggregate report to OpenSearch")
|
logger.info("Saving SMTP TLS report to OpenSearch")
|
||||||
org_name = report["organization_name"]
|
org_name = report["organization_name"]
|
||||||
report_id = report["report_id"]
|
report_id = report["report_id"]
|
||||||
begin_date = human_timestamp_to_datetime(report["begin_date"], to_utc=True)
|
begin_date = human_timestamp_to_datetime(report["begin_date"], to_utc=True)
|
||||||
@@ -781,7 +800,7 @@ def save_smtp_tls_report_to_opensearch(
|
|||||||
policy_doc = _SMTPTLSPolicyDoc(
|
policy_doc = _SMTPTLSPolicyDoc(
|
||||||
policy_domain=policy["policy_domain"],
|
policy_domain=policy["policy_domain"],
|
||||||
policy_type=policy["policy_type"],
|
policy_type=policy["policy_type"],
|
||||||
succesful_session_count=policy["successful_session_count"],
|
successful_session_count=policy["successful_session_count"],
|
||||||
failed_session_count=policy["failed_session_count"],
|
failed_session_count=policy["failed_session_count"],
|
||||||
policy_string=policy_strings,
|
policy_string=policy_strings,
|
||||||
mx_host_patterns=mx_host_patterns,
|
mx_host_patterns=mx_host_patterns,
|
||||||
|
|||||||
@@ -132,6 +132,7 @@ asu-vei.ru,ASU-VEI,Industrial
|
|||||||
atextelecom.com.br,ATEX Telecom,ISP
|
atextelecom.com.br,ATEX Telecom,ISP
|
||||||
atmailcloud.com,atmail,Email Provider
|
atmailcloud.com,atmail,Email Provider
|
||||||
ats.ca,ATS Healthcare,Healthcare
|
ats.ca,ATS Healthcare,Healthcare
|
||||||
|
att.net,AT&T,ISP
|
||||||
atw.ne.jp,ATW,Web Host
|
atw.ne.jp,ATW,Web Host
|
||||||
au-net.ne.jp,KDDI,ISP
|
au-net.ne.jp,KDDI,ISP
|
||||||
au.com,au,ISP
|
au.com,au,ISP
|
||||||
@@ -242,6 +243,7 @@ carandainet.com.br,CN Internet,ISP
|
|||||||
cardhealth.com,Cardinal Health,Healthcare
|
cardhealth.com,Cardinal Health,Healthcare
|
||||||
cardinal.com,Cardinal Health,Healthcare
|
cardinal.com,Cardinal Health,Healthcare
|
||||||
cardinalhealth.com,Cardinal Health,Healthcare
|
cardinalhealth.com,Cardinal Health,Healthcare
|
||||||
|
cardinalscriptnet.com,Cardinal Health,Healthcare
|
||||||
carecentrix.com,CareCentrix,Healthcare
|
carecentrix.com,CareCentrix,Healthcare
|
||||||
carleton.edu,Carlton College,Education
|
carleton.edu,Carlton College,Education
|
||||||
carrierzone.com,carrierzone,Email Security
|
carrierzone.com,carrierzone,Email Security
|
||||||
@@ -697,6 +699,7 @@ hdsupply-email.com,HD Supply,Retail
|
|||||||
healthall.com,UC Health,Healthcare
|
healthall.com,UC Health,Healthcare
|
||||||
healthcaresupplypros.com,Healthcare Supply Pros,Healthcare
|
healthcaresupplypros.com,Healthcare Supply Pros,Healthcare
|
||||||
healthproductsforyou.com,Health Products For You,Healthcare
|
healthproductsforyou.com,Health Products For You,Healthcare
|
||||||
|
healthtouch.com,Cardinal Health,Healthcare
|
||||||
helloserver6.com,1st Source Web,Marketing
|
helloserver6.com,1st Source Web,Marketing
|
||||||
helpforcb.com,InterServer,Web Host
|
helpforcb.com,InterServer,Web Host
|
||||||
helpscout.net,Help Scout,SaaS
|
helpscout.net,Help Scout,SaaS
|
||||||
@@ -753,6 +756,8 @@ hostwindsdns.com,Hostwinds,Web Host
|
|||||||
hotnet.net.il,Hot Net Internet Services,ISP
|
hotnet.net.il,Hot Net Internet Services,ISP
|
||||||
hp.com,HP,Technology
|
hp.com,HP,Technology
|
||||||
hringdu.is,Hringdu,ISP
|
hringdu.is,Hringdu,ISP
|
||||||
|
hslda.net,Home School Legal Defense Association (HSLDA),Education
|
||||||
|
hslda.org,Home School Legal Defense Association (HSLDA),Education
|
||||||
hspherefilter.com,"DynamicNet, Inc. (DNI)",Web Host
|
hspherefilter.com,"DynamicNet, Inc. (DNI)",Web Host
|
||||||
htc.net,HTC,ISP
|
htc.net,HTC,ISP
|
||||||
htmlservices.it,HTMLServices.it,MSP
|
htmlservices.it,HTMLServices.it,MSP
|
||||||
@@ -763,6 +768,7 @@ hughston.com,Hughston Clinic,Healthcare
|
|||||||
hvvc.us,Hivelocity,Web Host
|
hvvc.us,Hivelocity,Web Host
|
||||||
i2ts.ne.jp,i2ts,Web Host
|
i2ts.ne.jp,i2ts,Web Host
|
||||||
i4i.com,i4i,Technology
|
i4i.com,i4i,Technology
|
||||||
|
ibindley.com,Cardinal Health,Healthcare
|
||||||
ice.co.cr,Grupo ICE,Industrial
|
ice.co.cr,Grupo ICE,Industrial
|
||||||
icehosting.nl,IceHosting,Web Host
|
icehosting.nl,IceHosting,Web Host
|
||||||
icewarpcloud.in,IceWrap,Email Provider
|
icewarpcloud.in,IceWrap,Email Provider
|
||||||
@@ -832,6 +838,7 @@ ip-5-196-151.eu,OVH,Web Host
|
|||||||
ip-51-161-36.net,OVH,Web Host
|
ip-51-161-36.net,OVH,Web Host
|
||||||
ip-51-195-53.eu,OVH,Web Host
|
ip-51-195-53.eu,OVH,Web Host
|
||||||
ip-51-254-53.eu,OVH,Web Host
|
ip-51-254-53.eu,OVH,Web Host
|
||||||
|
ip-51-38-67.eu,OVH,Web Host
|
||||||
ip-51-77-42.eu,OVH,Web Host
|
ip-51-77-42.eu,OVH,Web Host
|
||||||
ip-51-83-140.eu,OVH,Web Host
|
ip-51-83-140.eu,OVH,Web Host
|
||||||
ip-51-89-240.eu,OVH,Web Host
|
ip-51-89-240.eu,OVH,Web Host
|
||||||
@@ -1217,6 +1224,7 @@ nettoday.co.th,Net Today,Web Host
|
|||||||
netventure.pl,Netventure,MSP
|
netventure.pl,Netventure,MSP
|
||||||
netvigator.com,HKT,ISP
|
netvigator.com,HKT,ISP
|
||||||
netvision.net.il,013 Netvision,ISP
|
netvision.net.il,013 Netvision,ISP
|
||||||
|
network-tech.com,Network Technologies International (NTI),SaaS
|
||||||
network.kz,network.kz,ISP
|
network.kz,network.kz,ISP
|
||||||
network80.com,Network80,Web Host
|
network80.com,Network80,Web Host
|
||||||
neubox.net,Neubox,Web Host
|
neubox.net,Neubox,Web Host
|
||||||
|
|||||||
|
@@ -13,8 +13,6 @@ def _main():
|
|||||||
|
|
||||||
csv_headers = ["source_name", "message_count"]
|
csv_headers = ["source_name", "message_count"]
|
||||||
|
|
||||||
output_rows = []
|
|
||||||
|
|
||||||
known_unknown_domains = []
|
known_unknown_domains = []
|
||||||
psl_overrides = []
|
psl_overrides = []
|
||||||
known_domains = []
|
known_domains = []
|
||||||
|
|||||||
@@ -1,6 +1,10 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
import boto3
|
import boto3
|
||||||
|
|
||||||
from parsedmarc.log import logger
|
from parsedmarc.log import logger
|
||||||
@@ -8,16 +12,16 @@ from parsedmarc.utils import human_timestamp_to_datetime
|
|||||||
|
|
||||||
|
|
||||||
class S3Client(object):
|
class S3Client(object):
|
||||||
"""A client for a Amazon S3"""
|
"""A client for interacting with Amazon S3"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
bucket_name,
|
bucket_name: str,
|
||||||
bucket_path,
|
bucket_path: str,
|
||||||
region_name,
|
region_name: str,
|
||||||
endpoint_url,
|
endpoint_url: str,
|
||||||
access_key_id,
|
access_key_id: str,
|
||||||
secret_access_key,
|
secret_access_key: str,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Initializes the S3Client
|
Initializes the S3Client
|
||||||
@@ -47,18 +51,18 @@ class S3Client(object):
|
|||||||
aws_access_key_id=access_key_id,
|
aws_access_key_id=access_key_id,
|
||||||
aws_secret_access_key=secret_access_key,
|
aws_secret_access_key=secret_access_key,
|
||||||
)
|
)
|
||||||
self.bucket = self.s3.Bucket(self.bucket_name)
|
self.bucket = self.s3.Bucket(self.bucket_name) # type: ignore
|
||||||
|
|
||||||
def save_aggregate_report_to_s3(self, report):
|
def save_aggregate_report_to_s3(self, report: dict[str, Any]):
|
||||||
self.save_report_to_s3(report, "aggregate")
|
self.save_report_to_s3(report, "aggregate")
|
||||||
|
|
||||||
def save_forensic_report_to_s3(self, report):
|
def save_forensic_report_to_s3(self, report: dict[str, Any]):
|
||||||
self.save_report_to_s3(report, "forensic")
|
self.save_report_to_s3(report, "forensic")
|
||||||
|
|
||||||
def save_smtp_tls_report_to_s3(self, report):
|
def save_smtp_tls_report_to_s3(self, report: dict[str, Any]):
|
||||||
self.save_report_to_s3(report, "smtp_tls")
|
self.save_report_to_s3(report, "smtp_tls")
|
||||||
|
|
||||||
def save_report_to_s3(self, report, report_type):
|
def save_report_to_s3(self, report: dict[str, Any], report_type: str):
|
||||||
if report_type == "smtp_tls":
|
if report_type == "smtp_tls":
|
||||||
report_date = report["begin_date"]
|
report_date = report["begin_date"]
|
||||||
report_id = report["report_id"]
|
report_id = report["report_id"]
|
||||||
|
|||||||
@@ -1,9 +1,14 @@
|
|||||||
from urllib.parse import urlparse
|
# -*- coding: utf-8 -*-
|
||||||
import socket
|
|
||||||
import json
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import socket
|
||||||
|
from typing import Any, Union
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import urllib3
|
|
||||||
import requests
|
import requests
|
||||||
|
import urllib3
|
||||||
|
|
||||||
from parsedmarc.constants import USER_AGENT
|
from parsedmarc.constants import USER_AGENT
|
||||||
from parsedmarc.log import logger
|
from parsedmarc.log import logger
|
||||||
@@ -23,7 +28,13 @@ class HECClient(object):
|
|||||||
# http://docs.splunk.com/Documentation/Splunk/latest/RESTREF/RESTinput#services.2Fcollector
|
# http://docs.splunk.com/Documentation/Splunk/latest/RESTREF/RESTinput#services.2Fcollector
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, url, access_token, index, source="parsedmarc", verify=True, timeout=60
|
self,
|
||||||
|
url: str,
|
||||||
|
access_token: str,
|
||||||
|
index: str,
|
||||||
|
source: str = "parsedmarc",
|
||||||
|
verify=True,
|
||||||
|
timeout=60,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Initializes the HECClient
|
Initializes the HECClient
|
||||||
@@ -37,9 +48,9 @@ class HECClient(object):
|
|||||||
timeout (float): Number of seconds to wait for the server to send
|
timeout (float): Number of seconds to wait for the server to send
|
||||||
data before giving up
|
data before giving up
|
||||||
"""
|
"""
|
||||||
url = urlparse(url)
|
parsed_url = urlparse(url)
|
||||||
self.url = "{0}://{1}/services/collector/event/1.0".format(
|
self.url = "{0}://{1}/services/collector/event/1.0".format(
|
||||||
url.scheme, url.netloc
|
parsed_url.scheme, parsed_url.netloc
|
||||||
)
|
)
|
||||||
self.access_token = access_token.lstrip("Splunk ")
|
self.access_token = access_token.lstrip("Splunk ")
|
||||||
self.index = index
|
self.index = index
|
||||||
@@ -48,14 +59,19 @@ class HECClient(object):
|
|||||||
self.session = requests.Session()
|
self.session = requests.Session()
|
||||||
self.timeout = timeout
|
self.timeout = timeout
|
||||||
self.session.verify = verify
|
self.session.verify = verify
|
||||||
self._common_data = dict(host=self.host, source=self.source, index=self.index)
|
self._common_data: dict[str, Union[str, int, float, dict]] = dict(
|
||||||
|
host=self.host, source=self.source, index=self.index
|
||||||
|
)
|
||||||
|
|
||||||
self.session.headers = {
|
self.session.headers = {
|
||||||
"User-Agent": USER_AGENT,
|
"User-Agent": USER_AGENT,
|
||||||
"Authorization": "Splunk {0}".format(self.access_token),
|
"Authorization": "Splunk {0}".format(self.access_token),
|
||||||
}
|
}
|
||||||
|
|
||||||
def save_aggregate_reports_to_splunk(self, aggregate_reports):
|
def save_aggregate_reports_to_splunk(
|
||||||
|
self,
|
||||||
|
aggregate_reports: Union[list[dict[str, Any]], dict[str, Any]],
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Saves aggregate DMARC reports to Splunk
|
Saves aggregate DMARC reports to Splunk
|
||||||
|
|
||||||
@@ -75,9 +91,12 @@ class HECClient(object):
|
|||||||
json_str = ""
|
json_str = ""
|
||||||
for report in aggregate_reports:
|
for report in aggregate_reports:
|
||||||
for record in report["records"]:
|
for record in report["records"]:
|
||||||
new_report = dict()
|
new_report: dict[str, Union[str, int, float, dict]] = dict()
|
||||||
for metadata in report["report_metadata"]:
|
for metadata in report["report_metadata"]:
|
||||||
new_report[metadata] = report["report_metadata"][metadata]
|
new_report[metadata] = report["report_metadata"][metadata]
|
||||||
|
new_report["interval_begin"] = record["interval_begin"]
|
||||||
|
new_report["interval_end"] = record["interval_end"]
|
||||||
|
new_report["normalized_timespan"] = record["normalized_timespan"]
|
||||||
new_report["published_policy"] = report["policy_published"]
|
new_report["published_policy"] = report["policy_published"]
|
||||||
new_report["source_ip_address"] = record["source"]["ip_address"]
|
new_report["source_ip_address"] = record["source"]["ip_address"]
|
||||||
new_report["source_country"] = record["source"]["country"]
|
new_report["source_country"] = record["source"]["country"]
|
||||||
@@ -98,7 +117,9 @@ class HECClient(object):
|
|||||||
new_report["spf_results"] = record["auth_results"]["spf"]
|
new_report["spf_results"] = record["auth_results"]["spf"]
|
||||||
|
|
||||||
data["sourcetype"] = "dmarc:aggregate"
|
data["sourcetype"] = "dmarc:aggregate"
|
||||||
timestamp = human_timestamp_to_unix_timestamp(new_report["begin_date"])
|
timestamp = human_timestamp_to_unix_timestamp(
|
||||||
|
new_report["interval_begin"]
|
||||||
|
)
|
||||||
data["time"] = timestamp
|
data["time"] = timestamp
|
||||||
data["event"] = new_report.copy()
|
data["event"] = new_report.copy()
|
||||||
json_str += "{0}\n".format(json.dumps(data))
|
json_str += "{0}\n".format(json.dumps(data))
|
||||||
@@ -113,7 +134,10 @@ class HECClient(object):
|
|||||||
if response["code"] != 0:
|
if response["code"] != 0:
|
||||||
raise SplunkError(response["text"])
|
raise SplunkError(response["text"])
|
||||||
|
|
||||||
def save_forensic_reports_to_splunk(self, forensic_reports):
|
def save_forensic_reports_to_splunk(
|
||||||
|
self,
|
||||||
|
forensic_reports: Union[list[dict[str, Any]], dict[str, Any]],
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Saves forensic DMARC reports to Splunk
|
Saves forensic DMARC reports to Splunk
|
||||||
|
|
||||||
@@ -147,7 +171,9 @@ class HECClient(object):
|
|||||||
if response["code"] != 0:
|
if response["code"] != 0:
|
||||||
raise SplunkError(response["text"])
|
raise SplunkError(response["text"])
|
||||||
|
|
||||||
def save_smtp_tls_reports_to_splunk(self, reports):
|
def save_smtp_tls_reports_to_splunk(
|
||||||
|
self, reports: Union[list[dict[str, Any]], dict[str, Any]]
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Saves aggregate DMARC reports to Splunk
|
Saves aggregate DMARC reports to Splunk
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,15 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import logging.handlers
|
import logging.handlers
|
||||||
import json
|
import socket
|
||||||
|
import ssl
|
||||||
|
import time
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
from parsedmarc import (
|
from parsedmarc import (
|
||||||
parsed_aggregate_reports_to_csv_rows,
|
parsed_aggregate_reports_to_csv_rows,
|
||||||
@@ -14,31 +21,161 @@ from parsedmarc import (
|
|||||||
class SyslogClient(object):
|
class SyslogClient(object):
|
||||||
"""A client for Syslog"""
|
"""A client for Syslog"""
|
||||||
|
|
||||||
def __init__(self, server_name, server_port):
|
def __init__(
|
||||||
|
self,
|
||||||
|
server_name: str,
|
||||||
|
server_port: int,
|
||||||
|
protocol: str = "udp",
|
||||||
|
cafile_path: Optional[str] = None,
|
||||||
|
certfile_path: Optional[str] = None,
|
||||||
|
keyfile_path: Optional[str] = None,
|
||||||
|
timeout: float = 5.0,
|
||||||
|
retry_attempts: int = 3,
|
||||||
|
retry_delay: int = 5,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Initializes the SyslogClient
|
Initializes the SyslogClient
|
||||||
Args:
|
Args:
|
||||||
server_name (str): The Syslog server
|
server_name (str): The Syslog server
|
||||||
server_port (int): The Syslog UDP port
|
server_port (int): The Syslog port
|
||||||
|
protocol (str): The protocol to use: "udp", "tcp", or "tls" (Default: "udp")
|
||||||
|
cafile_path (str): Path to CA certificate file for TLS server verification (Optional)
|
||||||
|
certfile_path (str): Path to client certificate file for TLS authentication (Optional)
|
||||||
|
keyfile_path (str): Path to client private key file for TLS authentication (Optional)
|
||||||
|
timeout (float): Connection timeout in seconds for TCP/TLS (Default: 5.0)
|
||||||
|
retry_attempts (int): Number of retry attempts for failed connections (Default: 3)
|
||||||
|
retry_delay (int): Delay in seconds between retry attempts (Default: 5)
|
||||||
"""
|
"""
|
||||||
self.server_name = server_name
|
self.server_name = server_name
|
||||||
self.server_port = server_port
|
self.server_port = server_port
|
||||||
|
self.protocol = protocol.lower()
|
||||||
|
self.timeout = timeout
|
||||||
|
self.retry_attempts = retry_attempts
|
||||||
|
self.retry_delay = retry_delay
|
||||||
|
|
||||||
self.logger = logging.getLogger("parsedmarc_syslog")
|
self.logger = logging.getLogger("parsedmarc_syslog")
|
||||||
self.logger.setLevel(logging.INFO)
|
self.logger.setLevel(logging.INFO)
|
||||||
log_handler = logging.handlers.SysLogHandler(address=(server_name, server_port))
|
|
||||||
|
# Create the appropriate syslog handler based on protocol
|
||||||
|
log_handler = self._create_syslog_handler(
|
||||||
|
server_name,
|
||||||
|
server_port,
|
||||||
|
self.protocol,
|
||||||
|
cafile_path,
|
||||||
|
certfile_path,
|
||||||
|
keyfile_path,
|
||||||
|
timeout,
|
||||||
|
retry_attempts,
|
||||||
|
retry_delay,
|
||||||
|
)
|
||||||
|
|
||||||
self.logger.addHandler(log_handler)
|
self.logger.addHandler(log_handler)
|
||||||
|
|
||||||
def save_aggregate_report_to_syslog(self, aggregate_reports):
|
def _create_syslog_handler(
|
||||||
|
self,
|
||||||
|
server_name: str,
|
||||||
|
server_port: int,
|
||||||
|
protocol: str,
|
||||||
|
cafile_path: Optional[str],
|
||||||
|
certfile_path: Optional[str],
|
||||||
|
keyfile_path: Optional[str],
|
||||||
|
timeout: float,
|
||||||
|
retry_attempts: int,
|
||||||
|
retry_delay: int,
|
||||||
|
) -> logging.handlers.SysLogHandler:
|
||||||
|
"""
|
||||||
|
Creates a SysLogHandler with the specified protocol and TLS settings
|
||||||
|
"""
|
||||||
|
if protocol == "udp":
|
||||||
|
# UDP protocol (default, backward compatible)
|
||||||
|
return logging.handlers.SysLogHandler(
|
||||||
|
address=(server_name, server_port),
|
||||||
|
socktype=socket.SOCK_DGRAM,
|
||||||
|
)
|
||||||
|
elif protocol in ["tcp", "tls"]:
|
||||||
|
# TCP or TLS protocol with retry logic
|
||||||
|
for attempt in range(1, retry_attempts + 1):
|
||||||
|
try:
|
||||||
|
if protocol == "tcp":
|
||||||
|
# TCP without TLS
|
||||||
|
handler = logging.handlers.SysLogHandler(
|
||||||
|
address=(server_name, server_port),
|
||||||
|
socktype=socket.SOCK_STREAM,
|
||||||
|
)
|
||||||
|
# Set timeout on the socket
|
||||||
|
if hasattr(handler, "socket") and handler.socket:
|
||||||
|
handler.socket.settimeout(timeout)
|
||||||
|
return handler
|
||||||
|
else:
|
||||||
|
# TLS protocol
|
||||||
|
# Create SSL context with secure defaults
|
||||||
|
ssl_context = ssl.create_default_context()
|
||||||
|
|
||||||
|
# Explicitly set minimum TLS version to 1.2 for security
|
||||||
|
ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2
|
||||||
|
|
||||||
|
# Configure server certificate verification
|
||||||
|
if cafile_path:
|
||||||
|
ssl_context.load_verify_locations(cafile=cafile_path)
|
||||||
|
|
||||||
|
# Configure client certificate authentication
|
||||||
|
if certfile_path and keyfile_path:
|
||||||
|
ssl_context.load_cert_chain(
|
||||||
|
certfile=certfile_path,
|
||||||
|
keyfile=keyfile_path,
|
||||||
|
)
|
||||||
|
elif certfile_path or keyfile_path:
|
||||||
|
# Warn if only one of the two required parameters is provided
|
||||||
|
self.logger.warning(
|
||||||
|
"Both certfile_path and keyfile_path are required for "
|
||||||
|
"client certificate authentication. Client authentication "
|
||||||
|
"will not be used."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create TCP handler first
|
||||||
|
handler = logging.handlers.SysLogHandler(
|
||||||
|
address=(server_name, server_port),
|
||||||
|
socktype=socket.SOCK_STREAM,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Wrap socket with TLS
|
||||||
|
if hasattr(handler, "socket") and handler.socket:
|
||||||
|
handler.socket = ssl_context.wrap_socket(
|
||||||
|
handler.socket,
|
||||||
|
server_hostname=server_name,
|
||||||
|
)
|
||||||
|
handler.socket.settimeout(timeout)
|
||||||
|
|
||||||
|
return handler
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
if attempt < retry_attempts:
|
||||||
|
self.logger.warning(
|
||||||
|
f"Syslog connection attempt {attempt}/{retry_attempts} failed: {e}. "
|
||||||
|
f"Retrying in {retry_delay} seconds..."
|
||||||
|
)
|
||||||
|
time.sleep(retry_delay)
|
||||||
|
else:
|
||||||
|
self.logger.error(
|
||||||
|
f"Syslog connection failed after {retry_attempts} attempts: {e}"
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid protocol '{protocol}'. Must be 'udp', 'tcp', or 'tls'."
|
||||||
|
)
|
||||||
|
|
||||||
|
def save_aggregate_report_to_syslog(self, aggregate_reports: list[dict[str, Any]]):
|
||||||
rows = parsed_aggregate_reports_to_csv_rows(aggregate_reports)
|
rows = parsed_aggregate_reports_to_csv_rows(aggregate_reports)
|
||||||
for row in rows:
|
for row in rows:
|
||||||
self.logger.info(json.dumps(row))
|
self.logger.info(json.dumps(row))
|
||||||
|
|
||||||
def save_forensic_report_to_syslog(self, forensic_reports):
|
def save_forensic_report_to_syslog(self, forensic_reports: list[dict[str, Any]]):
|
||||||
rows = parsed_forensic_reports_to_csv_rows(forensic_reports)
|
rows = parsed_forensic_reports_to_csv_rows(forensic_reports)
|
||||||
for row in rows:
|
for row in rows:
|
||||||
self.logger.info(json.dumps(row))
|
self.logger.info(json.dumps(row))
|
||||||
|
|
||||||
def save_smtp_tls_report_to_syslog(self, smtp_tls_reports):
|
def save_smtp_tls_report_to_syslog(self, smtp_tls_reports: list[dict[str, Any]]):
|
||||||
rows = parsed_smtp_tls_reports_to_csv_rows(smtp_tls_reports)
|
rows = parsed_smtp_tls_reports_to_csv_rows(smtp_tls_reports)
|
||||||
for row in rows:
|
for row in rows:
|
||||||
self.logger.info(json.dumps(row))
|
self.logger.info(json.dumps(row))
|
||||||
|
|||||||
220
parsedmarc/types.py
Normal file
220
parsedmarc/types.py
Normal file
@@ -0,0 +1,220 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any, Dict, List, Literal, Optional, TypedDict, Union
|
||||||
|
|
||||||
|
# NOTE: This module is intentionally Python 3.10 compatible.
|
||||||
|
# - No PEP 604 unions (A | B)
|
||||||
|
# - No typing.NotRequired / Required (3.11+) to avoid an extra dependency.
|
||||||
|
# For optional keys, use total=False TypedDicts.
|
||||||
|
|
||||||
|
|
||||||
|
ReportType = Literal["aggregate", "forensic", "smtp_tls"]
|
||||||
|
|
||||||
|
|
||||||
|
class AggregateReportMetadata(TypedDict):
|
||||||
|
org_name: str
|
||||||
|
org_email: str
|
||||||
|
org_extra_contact_info: Optional[str]
|
||||||
|
report_id: str
|
||||||
|
begin_date: str
|
||||||
|
end_date: str
|
||||||
|
timespan_requires_normalization: bool
|
||||||
|
original_timespan_seconds: int
|
||||||
|
errors: List[str]
|
||||||
|
|
||||||
|
|
||||||
|
class AggregatePolicyPublished(TypedDict):
|
||||||
|
domain: str
|
||||||
|
adkim: str
|
||||||
|
aspf: str
|
||||||
|
p: str
|
||||||
|
sp: str
|
||||||
|
pct: str
|
||||||
|
fo: str
|
||||||
|
|
||||||
|
|
||||||
|
class IPSourceInfo(TypedDict):
|
||||||
|
ip_address: str
|
||||||
|
country: Optional[str]
|
||||||
|
reverse_dns: Optional[str]
|
||||||
|
base_domain: Optional[str]
|
||||||
|
name: Optional[str]
|
||||||
|
type: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class AggregateAlignment(TypedDict):
|
||||||
|
spf: bool
|
||||||
|
dkim: bool
|
||||||
|
dmarc: bool
|
||||||
|
|
||||||
|
|
||||||
|
class AggregateIdentifiers(TypedDict):
|
||||||
|
header_from: str
|
||||||
|
envelope_from: Optional[str]
|
||||||
|
envelope_to: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class AggregatePolicyOverrideReason(TypedDict):
|
||||||
|
type: Optional[str]
|
||||||
|
comment: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class AggregateAuthResultDKIM(TypedDict):
|
||||||
|
domain: str
|
||||||
|
result: str
|
||||||
|
selector: str
|
||||||
|
|
||||||
|
|
||||||
|
class AggregateAuthResultSPF(TypedDict):
|
||||||
|
domain: str
|
||||||
|
result: str
|
||||||
|
scope: str
|
||||||
|
|
||||||
|
|
||||||
|
class AggregateAuthResults(TypedDict):
|
||||||
|
dkim: List[AggregateAuthResultDKIM]
|
||||||
|
spf: List[AggregateAuthResultSPF]
|
||||||
|
|
||||||
|
|
||||||
|
class AggregatePolicyEvaluated(TypedDict):
|
||||||
|
disposition: str
|
||||||
|
dkim: str
|
||||||
|
spf: str
|
||||||
|
policy_override_reasons: List[AggregatePolicyOverrideReason]
|
||||||
|
|
||||||
|
|
||||||
|
class AggregateRecord(TypedDict):
|
||||||
|
interval_begin: str
|
||||||
|
interval_end: str
|
||||||
|
source: IPSourceInfo
|
||||||
|
count: int
|
||||||
|
alignment: AggregateAlignment
|
||||||
|
policy_evaluated: AggregatePolicyEvaluated
|
||||||
|
disposition: str
|
||||||
|
identifiers: AggregateIdentifiers
|
||||||
|
auth_results: AggregateAuthResults
|
||||||
|
|
||||||
|
|
||||||
|
class AggregateReport(TypedDict):
|
||||||
|
xml_schema: str
|
||||||
|
report_metadata: AggregateReportMetadata
|
||||||
|
policy_published: AggregatePolicyPublished
|
||||||
|
records: List[AggregateRecord]
|
||||||
|
|
||||||
|
|
||||||
|
class EmailAddress(TypedDict):
|
||||||
|
display_name: Optional[str]
|
||||||
|
address: str
|
||||||
|
local: Optional[str]
|
||||||
|
domain: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class EmailAttachment(TypedDict, total=False):
|
||||||
|
filename: Optional[str]
|
||||||
|
mail_content_type: Optional[str]
|
||||||
|
sha256: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
ParsedEmail = TypedDict(
|
||||||
|
"ParsedEmail",
|
||||||
|
{
|
||||||
|
# This is a lightly-specified version of mailsuite/mailparser JSON.
|
||||||
|
# It focuses on the fields parsedmarc uses in forensic handling.
|
||||||
|
"headers": Dict[str, Any],
|
||||||
|
"subject": Optional[str],
|
||||||
|
"filename_safe_subject": Optional[str],
|
||||||
|
"date": Optional[str],
|
||||||
|
"from": EmailAddress,
|
||||||
|
"to": List[EmailAddress],
|
||||||
|
"cc": List[EmailAddress],
|
||||||
|
"bcc": List[EmailAddress],
|
||||||
|
"attachments": List[EmailAttachment],
|
||||||
|
"body": Optional[str],
|
||||||
|
"has_defects": bool,
|
||||||
|
"defects": Any,
|
||||||
|
"defects_categories": Any,
|
||||||
|
},
|
||||||
|
total=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ForensicReport(TypedDict):
|
||||||
|
feedback_type: Optional[str]
|
||||||
|
user_agent: Optional[str]
|
||||||
|
version: Optional[str]
|
||||||
|
original_envelope_id: Optional[str]
|
||||||
|
original_mail_from: Optional[str]
|
||||||
|
original_rcpt_to: Optional[str]
|
||||||
|
arrival_date: str
|
||||||
|
arrival_date_utc: str
|
||||||
|
authentication_results: Optional[str]
|
||||||
|
delivery_result: Optional[str]
|
||||||
|
auth_failure: List[str]
|
||||||
|
authentication_mechanisms: List[str]
|
||||||
|
dkim_domain: Optional[str]
|
||||||
|
reported_domain: str
|
||||||
|
sample_headers_only: bool
|
||||||
|
source: IPSourceInfo
|
||||||
|
sample: str
|
||||||
|
parsed_sample: ParsedEmail
|
||||||
|
|
||||||
|
|
||||||
|
class SMTPTLSFailureDetails(TypedDict):
|
||||||
|
result_type: str
|
||||||
|
failed_session_count: int
|
||||||
|
|
||||||
|
|
||||||
|
class SMTPTLSFailureDetailsOptional(SMTPTLSFailureDetails, total=False):
|
||||||
|
sending_mta_ip: str
|
||||||
|
receiving_ip: str
|
||||||
|
receiving_mx_hostname: str
|
||||||
|
receiving_mx_helo: str
|
||||||
|
additional_info_uri: str
|
||||||
|
failure_reason_code: str
|
||||||
|
ip_address: str
|
||||||
|
|
||||||
|
|
||||||
|
class SMTPTLSPolicySummary(TypedDict):
|
||||||
|
policy_domain: str
|
||||||
|
policy_type: str
|
||||||
|
successful_session_count: int
|
||||||
|
failed_session_count: int
|
||||||
|
|
||||||
|
|
||||||
|
class SMTPTLSPolicy(SMTPTLSPolicySummary, total=False):
|
||||||
|
policy_strings: List[str]
|
||||||
|
mx_host_patterns: List[str]
|
||||||
|
failure_details: List[SMTPTLSFailureDetailsOptional]
|
||||||
|
|
||||||
|
|
||||||
|
class SMTPTLSReport(TypedDict):
|
||||||
|
organization_name: str
|
||||||
|
begin_date: str
|
||||||
|
end_date: str
|
||||||
|
contact_info: Union[str, List[str]]
|
||||||
|
report_id: str
|
||||||
|
policies: List[SMTPTLSPolicy]
|
||||||
|
|
||||||
|
|
||||||
|
class AggregateParsedReport(TypedDict):
|
||||||
|
report_type: Literal["aggregate"]
|
||||||
|
report: AggregateReport
|
||||||
|
|
||||||
|
|
||||||
|
class ForensicParsedReport(TypedDict):
|
||||||
|
report_type: Literal["forensic"]
|
||||||
|
report: ForensicReport
|
||||||
|
|
||||||
|
|
||||||
|
class SMTPTLSParsedReport(TypedDict):
|
||||||
|
report_type: Literal["smtp_tls"]
|
||||||
|
report: SMTPTLSReport
|
||||||
|
|
||||||
|
|
||||||
|
ParsedReport = Union[AggregateParsedReport, ForensicParsedReport, SMTPTLSParsedReport]
|
||||||
|
|
||||||
|
|
||||||
|
class ParsingResults(TypedDict):
|
||||||
|
aggregate_reports: List[AggregateReport]
|
||||||
|
forensic_reports: List[ForensicReport]
|
||||||
|
smtp_tls_reports: List[SMTPTLSReport]
|
||||||
@@ -1,22 +1,26 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
"""Utility functions that might be useful for other projects"""
|
"""Utility functions that might be useful for other projects"""
|
||||||
|
|
||||||
import logging
|
from __future__ import annotations
|
||||||
import os
|
|
||||||
from datetime import datetime
|
|
||||||
from datetime import timezone
|
|
||||||
from datetime import timedelta
|
|
||||||
from collections import OrderedDict
|
|
||||||
import tempfile
|
|
||||||
import subprocess
|
|
||||||
import shutil
|
|
||||||
import mailparser
|
|
||||||
import json
|
|
||||||
import hashlib
|
|
||||||
import base64
|
import base64
|
||||||
import mailbox
|
|
||||||
import re
|
|
||||||
import csv
|
import csv
|
||||||
|
import hashlib
|
||||||
import io
|
import io
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import mailbox
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from typing import Optional, TypedDict, Union, cast
|
||||||
|
|
||||||
|
import mailparser
|
||||||
|
from expiringdict import ExpiringDict
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from importlib.resources import files
|
from importlib.resources import files
|
||||||
@@ -25,19 +29,19 @@ except ImportError:
|
|||||||
from importlib.resources import files
|
from importlib.resources import files
|
||||||
|
|
||||||
|
|
||||||
from dateutil.parser import parse as parse_date
|
|
||||||
import dns.reversename
|
|
||||||
import dns.resolver
|
|
||||||
import dns.exception
|
import dns.exception
|
||||||
|
import dns.resolver
|
||||||
|
import dns.reversename
|
||||||
import geoip2.database
|
import geoip2.database
|
||||||
import geoip2.errors
|
import geoip2.errors
|
||||||
import publicsuffixlist
|
import publicsuffixlist
|
||||||
import requests
|
import requests
|
||||||
|
from dateutil.parser import parse as parse_date
|
||||||
|
|
||||||
from parsedmarc.log import logger
|
|
||||||
import parsedmarc.resources.dbip
|
import parsedmarc.resources.dbip
|
||||||
import parsedmarc.resources.maps
|
import parsedmarc.resources.maps
|
||||||
from parsedmarc.constants import USER_AGENT
|
from parsedmarc.constants import USER_AGENT
|
||||||
|
from parsedmarc.log import logger
|
||||||
|
|
||||||
parenthesis_regex = re.compile(r"\s*\(.*\)\s*")
|
parenthesis_regex = re.compile(r"\s*\(.*\)\s*")
|
||||||
|
|
||||||
@@ -60,25 +64,42 @@ class DownloadError(RuntimeError):
|
|||||||
"""Raised when an error occurs when downloading a file"""
|
"""Raised when an error occurs when downloading a file"""
|
||||||
|
|
||||||
|
|
||||||
def decode_base64(data):
|
class ReverseDNSService(TypedDict):
|
||||||
|
name: str
|
||||||
|
type: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
ReverseDNSMap = dict[str, ReverseDNSService]
|
||||||
|
|
||||||
|
|
||||||
|
class IPAddressInfo(TypedDict):
|
||||||
|
ip_address: str
|
||||||
|
reverse_dns: Optional[str]
|
||||||
|
country: Optional[str]
|
||||||
|
base_domain: Optional[str]
|
||||||
|
name: Optional[str]
|
||||||
|
type: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
def decode_base64(data: str) -> bytes:
|
||||||
"""
|
"""
|
||||||
Decodes a base64 string, with padding being optional
|
Decodes a base64 string, with padding being optional
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
data: A base64 encoded string
|
data (str): A base64 encoded string
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bytes: The decoded bytes
|
bytes: The decoded bytes
|
||||||
|
|
||||||
"""
|
"""
|
||||||
data = bytes(data, encoding="ascii")
|
data_bytes = bytes(data, encoding="ascii")
|
||||||
missing_padding = len(data) % 4
|
missing_padding = len(data_bytes) % 4
|
||||||
if missing_padding != 0:
|
if missing_padding != 0:
|
||||||
data += b"=" * (4 - missing_padding)
|
data_bytes += b"=" * (4 - missing_padding)
|
||||||
return base64.b64decode(data)
|
return base64.b64decode(data_bytes)
|
||||||
|
|
||||||
|
|
||||||
def get_base_domain(domain):
|
def get_base_domain(domain: str) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Gets the base domain name for the given domain
|
Gets the base domain name for the given domain
|
||||||
|
|
||||||
@@ -102,7 +123,14 @@ def get_base_domain(domain):
|
|||||||
return publicsuffix
|
return publicsuffix
|
||||||
|
|
||||||
|
|
||||||
def query_dns(domain, record_type, cache=None, nameservers=None, timeout=2.0):
|
def query_dns(
|
||||||
|
domain: str,
|
||||||
|
record_type: str,
|
||||||
|
*,
|
||||||
|
cache: Optional[ExpiringDict] = None,
|
||||||
|
nameservers: Optional[list[str]] = None,
|
||||||
|
timeout: float = 2.0,
|
||||||
|
) -> list[str]:
|
||||||
"""
|
"""
|
||||||
Queries DNS
|
Queries DNS
|
||||||
|
|
||||||
@@ -121,9 +149,9 @@ def query_dns(domain, record_type, cache=None, nameservers=None, timeout=2.0):
|
|||||||
record_type = record_type.upper()
|
record_type = record_type.upper()
|
||||||
cache_key = "{0}_{1}".format(domain, record_type)
|
cache_key = "{0}_{1}".format(domain, record_type)
|
||||||
if cache:
|
if cache:
|
||||||
records = cache.get(cache_key, None)
|
cached_records = cache.get(cache_key, None)
|
||||||
if records:
|
if isinstance(cached_records, list):
|
||||||
return records
|
return cast(list[str], cached_records)
|
||||||
|
|
||||||
resolver = dns.resolver.Resolver()
|
resolver = dns.resolver.Resolver()
|
||||||
timeout = float(timeout)
|
timeout = float(timeout)
|
||||||
@@ -137,33 +165,25 @@ def query_dns(domain, record_type, cache=None, nameservers=None, timeout=2.0):
|
|||||||
resolver.nameservers = nameservers
|
resolver.nameservers = nameservers
|
||||||
resolver.timeout = timeout
|
resolver.timeout = timeout
|
||||||
resolver.lifetime = timeout
|
resolver.lifetime = timeout
|
||||||
if record_type == "TXT":
|
records = list(
|
||||||
resource_records = list(
|
map(
|
||||||
map(
|
lambda r: r.to_text().replace('"', "").rstrip("."),
|
||||||
lambda r: r.strings,
|
resolver.resolve(domain, record_type, lifetime=timeout),
|
||||||
resolver.resolve(domain, record_type, lifetime=timeout),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
_resource_record = [
|
|
||||||
resource_record[0][:0].join(resource_record)
|
|
||||||
for resource_record in resource_records
|
|
||||||
if resource_record
|
|
||||||
]
|
|
||||||
records = [r.decode() for r in _resource_record]
|
|
||||||
else:
|
|
||||||
records = list(
|
|
||||||
map(
|
|
||||||
lambda r: r.to_text().replace('"', "").rstrip("."),
|
|
||||||
resolver.resolve(domain, record_type, lifetime=timeout),
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
)
|
||||||
if cache:
|
if cache:
|
||||||
cache[cache_key] = records
|
cache[cache_key] = records
|
||||||
|
|
||||||
return records
|
return records
|
||||||
|
|
||||||
|
|
||||||
def get_reverse_dns(ip_address, cache=None, nameservers=None, timeout=2.0):
|
def get_reverse_dns(
|
||||||
|
ip_address,
|
||||||
|
*,
|
||||||
|
cache: Optional[ExpiringDict] = None,
|
||||||
|
nameservers: Optional[list[str]] = None,
|
||||||
|
timeout: float = 2.0,
|
||||||
|
) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Resolves an IP address to a hostname using a reverse DNS query
|
Resolves an IP address to a hostname using a reverse DNS query
|
||||||
|
|
||||||
@@ -181,7 +201,7 @@ def get_reverse_dns(ip_address, cache=None, nameservers=None, timeout=2.0):
|
|||||||
try:
|
try:
|
||||||
address = dns.reversename.from_address(ip_address)
|
address = dns.reversename.from_address(ip_address)
|
||||||
hostname = query_dns(
|
hostname = query_dns(
|
||||||
address, "PTR", cache=cache, nameservers=nameservers, timeout=timeout
|
str(address), "PTR", cache=cache, nameservers=nameservers, timeout=timeout
|
||||||
)[0]
|
)[0]
|
||||||
|
|
||||||
except dns.exception.DNSException as e:
|
except dns.exception.DNSException as e:
|
||||||
@@ -191,7 +211,7 @@ def get_reverse_dns(ip_address, cache=None, nameservers=None, timeout=2.0):
|
|||||||
return hostname
|
return hostname
|
||||||
|
|
||||||
|
|
||||||
def timestamp_to_datetime(timestamp):
|
def timestamp_to_datetime(timestamp: int) -> datetime:
|
||||||
"""
|
"""
|
||||||
Converts a UNIX/DMARC timestamp to a Python ``datetime`` object
|
Converts a UNIX/DMARC timestamp to a Python ``datetime`` object
|
||||||
|
|
||||||
@@ -204,7 +224,7 @@ def timestamp_to_datetime(timestamp):
|
|||||||
return datetime.fromtimestamp(int(timestamp))
|
return datetime.fromtimestamp(int(timestamp))
|
||||||
|
|
||||||
|
|
||||||
def timestamp_to_human(timestamp):
|
def timestamp_to_human(timestamp: int) -> str:
|
||||||
"""
|
"""
|
||||||
Converts a UNIX/DMARC timestamp to a human-readable string
|
Converts a UNIX/DMARC timestamp to a human-readable string
|
||||||
|
|
||||||
@@ -217,7 +237,9 @@ def timestamp_to_human(timestamp):
|
|||||||
return timestamp_to_datetime(timestamp).strftime("%Y-%m-%d %H:%M:%S")
|
return timestamp_to_datetime(timestamp).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
|
|
||||||
def human_timestamp_to_datetime(human_timestamp, to_utc=False):
|
def human_timestamp_to_datetime(
|
||||||
|
human_timestamp: str, *, to_utc: bool = False
|
||||||
|
) -> datetime:
|
||||||
"""
|
"""
|
||||||
Converts a human-readable timestamp into a Python ``datetime`` object
|
Converts a human-readable timestamp into a Python ``datetime`` object
|
||||||
|
|
||||||
@@ -236,7 +258,7 @@ def human_timestamp_to_datetime(human_timestamp, to_utc=False):
|
|||||||
return dt.astimezone(timezone.utc) if to_utc else dt
|
return dt.astimezone(timezone.utc) if to_utc else dt
|
||||||
|
|
||||||
|
|
||||||
def human_timestamp_to_unix_timestamp(human_timestamp):
|
def human_timestamp_to_unix_timestamp(human_timestamp: str) -> int:
|
||||||
"""
|
"""
|
||||||
Converts a human-readable timestamp into a UNIX timestamp
|
Converts a human-readable timestamp into a UNIX timestamp
|
||||||
|
|
||||||
@@ -247,10 +269,12 @@ def human_timestamp_to_unix_timestamp(human_timestamp):
|
|||||||
float: The converted timestamp
|
float: The converted timestamp
|
||||||
"""
|
"""
|
||||||
human_timestamp = human_timestamp.replace("T", " ")
|
human_timestamp = human_timestamp.replace("T", " ")
|
||||||
return human_timestamp_to_datetime(human_timestamp).timestamp()
|
return int(human_timestamp_to_datetime(human_timestamp).timestamp())
|
||||||
|
|
||||||
|
|
||||||
def get_ip_address_country(ip_address, db_path=None):
|
def get_ip_address_country(
|
||||||
|
ip_address: str, *, db_path: Optional[str] = None
|
||||||
|
) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Returns the ISO code for the country associated
|
Returns the ISO code for the country associated
|
||||||
with the given IPv4 or IPv6 address
|
with the given IPv4 or IPv6 address
|
||||||
@@ -277,7 +301,7 @@ def get_ip_address_country(ip_address, db_path=None):
|
|||||||
]
|
]
|
||||||
|
|
||||||
if db_path is not None:
|
if db_path is not None:
|
||||||
if os.path.isfile(db_path) is False:
|
if not os.path.isfile(db_path):
|
||||||
db_path = None
|
db_path = None
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"No file exists at {db_path}. Falling back to an "
|
f"No file exists at {db_path}. Falling back to an "
|
||||||
@@ -314,12 +338,13 @@ def get_ip_address_country(ip_address, db_path=None):
|
|||||||
|
|
||||||
def get_service_from_reverse_dns_base_domain(
|
def get_service_from_reverse_dns_base_domain(
|
||||||
base_domain,
|
base_domain,
|
||||||
always_use_local_file=False,
|
*,
|
||||||
local_file_path=None,
|
always_use_local_file: bool = False,
|
||||||
url=None,
|
local_file_path: Optional[str] = None,
|
||||||
offline=False,
|
url: Optional[str] = None,
|
||||||
reverse_dns_map=None,
|
offline: bool = False,
|
||||||
):
|
reverse_dns_map: Optional[ReverseDNSMap] = None,
|
||||||
|
) -> ReverseDNSService:
|
||||||
"""
|
"""
|
||||||
Returns the service name of a given base domain name from reverse DNS.
|
Returns the service name of a given base domain name from reverse DNS.
|
||||||
|
|
||||||
@@ -336,12 +361,6 @@ def get_service_from_reverse_dns_base_domain(
|
|||||||
the supplied reverse_dns_base_domain and the type will be None
|
the supplied reverse_dns_base_domain and the type will be None
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def load_csv(_csv_file):
|
|
||||||
reader = csv.DictReader(_csv_file)
|
|
||||||
for row in reader:
|
|
||||||
key = row["base_reverse_dns"].lower().strip()
|
|
||||||
reverse_dns_map[key] = dict(name=row["name"], type=row["type"])
|
|
||||||
|
|
||||||
base_domain = base_domain.lower().strip()
|
base_domain = base_domain.lower().strip()
|
||||||
if url is None:
|
if url is None:
|
||||||
url = (
|
url = (
|
||||||
@@ -349,11 +368,24 @@ def get_service_from_reverse_dns_base_domain(
|
|||||||
"/parsedmarc/master/parsedmarc/"
|
"/parsedmarc/master/parsedmarc/"
|
||||||
"resources/maps/base_reverse_dns_map.csv"
|
"resources/maps/base_reverse_dns_map.csv"
|
||||||
)
|
)
|
||||||
|
reverse_dns_map_value: ReverseDNSMap
|
||||||
if reverse_dns_map is None:
|
if reverse_dns_map is None:
|
||||||
reverse_dns_map = dict()
|
reverse_dns_map_value = {}
|
||||||
|
else:
|
||||||
|
reverse_dns_map_value = reverse_dns_map
|
||||||
|
|
||||||
|
def load_csv(_csv_file):
|
||||||
|
reader = csv.DictReader(_csv_file)
|
||||||
|
for row in reader:
|
||||||
|
key = row["base_reverse_dns"].lower().strip()
|
||||||
|
reverse_dns_map_value[key] = {
|
||||||
|
"name": row["name"],
|
||||||
|
"type": row["type"],
|
||||||
|
}
|
||||||
|
|
||||||
csv_file = io.StringIO()
|
csv_file = io.StringIO()
|
||||||
|
|
||||||
if not (offline or always_use_local_file) and len(reverse_dns_map) == 0:
|
if not (offline or always_use_local_file) and len(reverse_dns_map_value) == 0:
|
||||||
try:
|
try:
|
||||||
logger.debug(f"Trying to fetch reverse DNS map from {url}...")
|
logger.debug(f"Trying to fetch reverse DNS map from {url}...")
|
||||||
headers = {"User-Agent": USER_AGENT}
|
headers = {"User-Agent": USER_AGENT}
|
||||||
@@ -370,7 +402,7 @@ def get_service_from_reverse_dns_base_domain(
|
|||||||
logging.debug("Response body:")
|
logging.debug("Response body:")
|
||||||
logger.debug(csv_file.read())
|
logger.debug(csv_file.read())
|
||||||
|
|
||||||
if len(reverse_dns_map) == 0:
|
if len(reverse_dns_map_value) == 0:
|
||||||
logger.info("Loading included reverse DNS map...")
|
logger.info("Loading included reverse DNS map...")
|
||||||
path = str(
|
path = str(
|
||||||
files(parsedmarc.resources.maps).joinpath("base_reverse_dns_map.csv")
|
files(parsedmarc.resources.maps).joinpath("base_reverse_dns_map.csv")
|
||||||
@@ -379,26 +411,28 @@ def get_service_from_reverse_dns_base_domain(
|
|||||||
path = local_file_path
|
path = local_file_path
|
||||||
with open(path) as csv_file:
|
with open(path) as csv_file:
|
||||||
load_csv(csv_file)
|
load_csv(csv_file)
|
||||||
|
service: ReverseDNSService
|
||||||
try:
|
try:
|
||||||
service = reverse_dns_map[base_domain]
|
service = reverse_dns_map_value[base_domain]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
service = dict(name=base_domain, type=None)
|
service = {"name": base_domain, "type": None}
|
||||||
|
|
||||||
return service
|
return service
|
||||||
|
|
||||||
|
|
||||||
def get_ip_address_info(
|
def get_ip_address_info(
|
||||||
ip_address,
|
ip_address,
|
||||||
ip_db_path=None,
|
*,
|
||||||
reverse_dns_map_path=None,
|
ip_db_path: Optional[str] = None,
|
||||||
always_use_local_files=False,
|
reverse_dns_map_path: Optional[str] = None,
|
||||||
reverse_dns_map_url=None,
|
always_use_local_files: bool = False,
|
||||||
cache=None,
|
reverse_dns_map_url: Optional[str] = None,
|
||||||
reverse_dns_map=None,
|
cache: Optional[ExpiringDict] = None,
|
||||||
offline=False,
|
reverse_dns_map: Optional[ReverseDNSMap] = None,
|
||||||
nameservers=None,
|
offline: bool = False,
|
||||||
timeout=2.0,
|
nameservers: Optional[list[str]] = None,
|
||||||
):
|
timeout: float = 2.0,
|
||||||
|
) -> IPAddressInfo:
|
||||||
"""
|
"""
|
||||||
Returns reverse DNS and country information for the given IP address
|
Returns reverse DNS and country information for the given IP address
|
||||||
|
|
||||||
@@ -416,17 +450,27 @@ def get_ip_address_info(
|
|||||||
timeout (float): Sets the DNS timeout in seconds
|
timeout (float): Sets the DNS timeout in seconds
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
OrderedDict: ``ip_address``, ``reverse_dns``
|
dict: ``ip_address``, ``reverse_dns``, ``country``
|
||||||
|
|
||||||
"""
|
"""
|
||||||
ip_address = ip_address.lower()
|
ip_address = ip_address.lower()
|
||||||
if cache is not None:
|
if cache is not None:
|
||||||
info = cache.get(ip_address, None)
|
cached_info = cache.get(ip_address, None)
|
||||||
if info:
|
if (
|
||||||
|
cached_info
|
||||||
|
and isinstance(cached_info, dict)
|
||||||
|
and "ip_address" in cached_info
|
||||||
|
):
|
||||||
logger.debug(f"IP address {ip_address} was found in cache")
|
logger.debug(f"IP address {ip_address} was found in cache")
|
||||||
return info
|
return cast(IPAddressInfo, cached_info)
|
||||||
info = OrderedDict()
|
info: IPAddressInfo = {
|
||||||
info["ip_address"] = ip_address
|
"ip_address": ip_address,
|
||||||
|
"reverse_dns": None,
|
||||||
|
"country": None,
|
||||||
|
"base_domain": None,
|
||||||
|
"name": None,
|
||||||
|
"type": None,
|
||||||
|
}
|
||||||
if offline:
|
if offline:
|
||||||
reverse_dns = None
|
reverse_dns = None
|
||||||
else:
|
else:
|
||||||
@@ -436,9 +480,6 @@ def get_ip_address_info(
|
|||||||
country = get_ip_address_country(ip_address, db_path=ip_db_path)
|
country = get_ip_address_country(ip_address, db_path=ip_db_path)
|
||||||
info["country"] = country
|
info["country"] = country
|
||||||
info["reverse_dns"] = reverse_dns
|
info["reverse_dns"] = reverse_dns
|
||||||
info["base_domain"] = None
|
|
||||||
info["name"] = None
|
|
||||||
info["type"] = None
|
|
||||||
if reverse_dns is not None:
|
if reverse_dns is not None:
|
||||||
base_domain = get_base_domain(reverse_dns)
|
base_domain = get_base_domain(reverse_dns)
|
||||||
if base_domain is not None:
|
if base_domain is not None:
|
||||||
@@ -463,7 +504,7 @@ def get_ip_address_info(
|
|||||||
return info
|
return info
|
||||||
|
|
||||||
|
|
||||||
def parse_email_address(original_address):
|
def parse_email_address(original_address: str) -> dict[str, Optional[str]]:
|
||||||
if original_address[0] == "":
|
if original_address[0] == "":
|
||||||
display_name = None
|
display_name = None
|
||||||
else:
|
else:
|
||||||
@@ -476,17 +517,15 @@ def parse_email_address(original_address):
|
|||||||
local = address_parts[0].lower()
|
local = address_parts[0].lower()
|
||||||
domain = address_parts[-1].lower()
|
domain = address_parts[-1].lower()
|
||||||
|
|
||||||
return OrderedDict(
|
return {
|
||||||
[
|
"display_name": display_name,
|
||||||
("display_name", display_name),
|
"address": address,
|
||||||
("address", address),
|
"local": local,
|
||||||
("local", local),
|
"domain": domain,
|
||||||
("domain", domain),
|
}
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_filename_safe_string(string):
|
def get_filename_safe_string(string: str) -> str:
|
||||||
"""
|
"""
|
||||||
Converts a string to a string that is safe for a filename
|
Converts a string to a string that is safe for a filename
|
||||||
|
|
||||||
@@ -508,7 +547,7 @@ def get_filename_safe_string(string):
|
|||||||
return string
|
return string
|
||||||
|
|
||||||
|
|
||||||
def is_mbox(path):
|
def is_mbox(path: str) -> bool:
|
||||||
"""
|
"""
|
||||||
Checks if the given content is an MBOX mailbox file
|
Checks if the given content is an MBOX mailbox file
|
||||||
|
|
||||||
@@ -529,7 +568,7 @@ def is_mbox(path):
|
|||||||
return _is_mbox
|
return _is_mbox
|
||||||
|
|
||||||
|
|
||||||
def is_outlook_msg(content):
|
def is_outlook_msg(content) -> bool:
|
||||||
"""
|
"""
|
||||||
Checks if the given content is an Outlook msg OLE/MSG file
|
Checks if the given content is an Outlook msg OLE/MSG file
|
||||||
|
|
||||||
@@ -544,7 +583,7 @@ def is_outlook_msg(content):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def convert_outlook_msg(msg_bytes):
|
def convert_outlook_msg(msg_bytes: bytes) -> bytes:
|
||||||
"""
|
"""
|
||||||
Uses the ``msgconvert`` Perl utility to convert an Outlook MS file to
|
Uses the ``msgconvert`` Perl utility to convert an Outlook MS file to
|
||||||
standard RFC 822 format
|
standard RFC 822 format
|
||||||
@@ -553,7 +592,7 @@ def convert_outlook_msg(msg_bytes):
|
|||||||
msg_bytes (bytes): the content of the .msg file
|
msg_bytes (bytes): the content of the .msg file
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
A RFC 822 string
|
A RFC 822 bytes payload
|
||||||
"""
|
"""
|
||||||
if not is_outlook_msg(msg_bytes):
|
if not is_outlook_msg(msg_bytes):
|
||||||
raise ValueError("The supplied bytes are not an Outlook MSG file")
|
raise ValueError("The supplied bytes are not an Outlook MSG file")
|
||||||
@@ -580,7 +619,9 @@ def convert_outlook_msg(msg_bytes):
|
|||||||
return rfc822
|
return rfc822
|
||||||
|
|
||||||
|
|
||||||
def parse_email(data, strip_attachment_payloads=False):
|
def parse_email(
|
||||||
|
data: Union[bytes, str], *, strip_attachment_payloads: bool = False
|
||||||
|
) -> dict:
|
||||||
"""
|
"""
|
||||||
A simplified email parser
|
A simplified email parser
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,9 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from parsedmarc import logger
|
from parsedmarc import logger
|
||||||
@@ -7,7 +13,13 @@ from parsedmarc.constants import USER_AGENT
|
|||||||
class WebhookClient(object):
|
class WebhookClient(object):
|
||||||
"""A client for webhooks"""
|
"""A client for webhooks"""
|
||||||
|
|
||||||
def __init__(self, aggregate_url, forensic_url, smtp_tls_url, timeout=60):
|
def __init__(
|
||||||
|
self,
|
||||||
|
aggregate_url: str,
|
||||||
|
forensic_url: str,
|
||||||
|
smtp_tls_url: str,
|
||||||
|
timeout: Optional[int] = 60,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Initializes the WebhookClient
|
Initializes the WebhookClient
|
||||||
Args:
|
Args:
|
||||||
@@ -26,25 +38,27 @@ class WebhookClient(object):
|
|||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
}
|
}
|
||||||
|
|
||||||
def save_forensic_report_to_webhook(self, report):
|
def save_forensic_report_to_webhook(self, report: str):
|
||||||
try:
|
try:
|
||||||
self._send_to_webhook(self.forensic_url, report)
|
self._send_to_webhook(self.forensic_url, report)
|
||||||
except Exception as error_:
|
except Exception as error_:
|
||||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||||
|
|
||||||
def save_smtp_tls_report_to_webhook(self, report):
|
def save_smtp_tls_report_to_webhook(self, report: str):
|
||||||
try:
|
try:
|
||||||
self._send_to_webhook(self.smtp_tls_url, report)
|
self._send_to_webhook(self.smtp_tls_url, report)
|
||||||
except Exception as error_:
|
except Exception as error_:
|
||||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||||
|
|
||||||
def save_aggregate_report_to_webhook(self, report):
|
def save_aggregate_report_to_webhook(self, report: str):
|
||||||
try:
|
try:
|
||||||
self._send_to_webhook(self.aggregate_url, report)
|
self._send_to_webhook(self.aggregate_url, report)
|
||||||
except Exception as error_:
|
except Exception as error_:
|
||||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||||
|
|
||||||
def _send_to_webhook(self, webhook_url, payload):
|
def _send_to_webhook(
|
||||||
|
self, webhook_url: str, payload: Union[bytes, str, dict[str, Any]]
|
||||||
|
):
|
||||||
try:
|
try:
|
||||||
self.session.post(webhook_url, data=payload, timeout=self.timeout)
|
self.session.post(webhook_url, data=payload, timeout=self.timeout)
|
||||||
except Exception as error_:
|
except Exception as error_:
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
requires = [
|
requires = [
|
||||||
"hatchling>=1.27.0",
|
"hatchling>=1.27.0",
|
||||||
]
|
]
|
||||||
|
requires_python = ">=3.10,<3.14"
|
||||||
build-backend = "hatchling.build"
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
@@ -28,6 +29,7 @@ classifiers = [
|
|||||||
"Operating System :: OS Independent",
|
"Operating System :: OS Independent",
|
||||||
"Programming Language :: Python :: 3"
|
"Programming Language :: Python :: 3"
|
||||||
]
|
]
|
||||||
|
requires-python = ">=3.10"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"azure-identity>=1.8.0",
|
"azure-identity>=1.8.0",
|
||||||
"azure-monitor-ingestion>=1.0.0",
|
"azure-monitor-ingestion>=1.0.0",
|
||||||
@@ -46,7 +48,7 @@ dependencies = [
|
|||||||
"imapclient>=2.1.0",
|
"imapclient>=2.1.0",
|
||||||
"kafka-python-ng>=2.2.2",
|
"kafka-python-ng>=2.2.2",
|
||||||
"lxml>=4.4.0",
|
"lxml>=4.4.0",
|
||||||
"mailsuite>=1.9.18",
|
"mailsuite>=1.11.2",
|
||||||
"msgraph-core==0.2.2",
|
"msgraph-core==0.2.2",
|
||||||
"opensearch-py>=2.4.2,<=3.0.0",
|
"opensearch-py>=2.4.2,<=3.0.0",
|
||||||
"publicsuffixlist>=0.10.0",
|
"publicsuffixlist>=0.10.0",
|
||||||
@@ -86,11 +88,11 @@ include = [
|
|||||||
|
|
||||||
[tool.hatch.build]
|
[tool.hatch.build]
|
||||||
exclude = [
|
exclude = [
|
||||||
"base_reverse_dns.csv",
|
"base_reverse_dns.csv",
|
||||||
"find_bad_utf8.py",
|
"find_bad_utf8.py",
|
||||||
"find_unknown_base_reverse_dns.py",
|
"find_unknown_base_reverse_dns.py",
|
||||||
"unknown_base_reverse_dns.csv",
|
"unknown_base_reverse_dns.csv",
|
||||||
"sortmaps.py",
|
"sortmaps.py",
|
||||||
"README.md",
|
"README.md",
|
||||||
"*.bak"
|
"*.bak"
|
||||||
]
|
]
|
||||||
|
|||||||
24
tests.py
Normal file → Executable file
24
tests.py
Normal file → Executable file
@@ -1,3 +1,6 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function, unicode_literals
|
from __future__ import absolute_import, print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
@@ -9,6 +12,9 @@ from lxml import etree
|
|||||||
import parsedmarc
|
import parsedmarc
|
||||||
import parsedmarc.utils
|
import parsedmarc.utils
|
||||||
|
|
||||||
|
# Detect if running in GitHub Actions to skip DNS lookups
|
||||||
|
OFFLINE_MODE = os.environ.get("GITHUB_ACTIONS", "false").lower() == "true"
|
||||||
|
|
||||||
|
|
||||||
def minify_xml(xml_string):
|
def minify_xml(xml_string):
|
||||||
parser = etree.XMLParser(remove_blank_text=True)
|
parser = etree.XMLParser(remove_blank_text=True)
|
||||||
@@ -74,7 +80,7 @@ class Test(unittest.TestCase):
|
|||||||
print()
|
print()
|
||||||
file = "samples/extract_report/nice-input.xml"
|
file = "samples/extract_report/nice-input.xml"
|
||||||
print("Testing {0}: ".format(file), end="")
|
print("Testing {0}: ".format(file), end="")
|
||||||
xmlout = parsedmarc.extract_report(file)
|
xmlout = parsedmarc.extract_report_from_file_path(file)
|
||||||
xmlin_file = open("samples/extract_report/nice-input.xml")
|
xmlin_file = open("samples/extract_report/nice-input.xml")
|
||||||
xmlin = xmlin_file.read()
|
xmlin = xmlin_file.read()
|
||||||
xmlin_file.close()
|
xmlin_file.close()
|
||||||
@@ -118,7 +124,7 @@ class Test(unittest.TestCase):
|
|||||||
continue
|
continue
|
||||||
print("Testing {0}: ".format(sample_path), end="")
|
print("Testing {0}: ".format(sample_path), end="")
|
||||||
parsed_report = parsedmarc.parse_report_file(
|
parsed_report = parsedmarc.parse_report_file(
|
||||||
sample_path, always_use_local_files=True
|
sample_path, always_use_local_files=True, offline=OFFLINE_MODE
|
||||||
)["report"]
|
)["report"]
|
||||||
parsedmarc.parsed_aggregate_reports_to_csv(parsed_report)
|
parsedmarc.parsed_aggregate_reports_to_csv(parsed_report)
|
||||||
print("Passed!")
|
print("Passed!")
|
||||||
@@ -126,7 +132,7 @@ class Test(unittest.TestCase):
|
|||||||
def testEmptySample(self):
|
def testEmptySample(self):
|
||||||
"""Test empty/unparasable report"""
|
"""Test empty/unparasable report"""
|
||||||
with self.assertRaises(parsedmarc.ParserError):
|
with self.assertRaises(parsedmarc.ParserError):
|
||||||
parsedmarc.parse_report_file("samples/empty.xml")
|
parsedmarc.parse_report_file("samples/empty.xml", offline=OFFLINE_MODE)
|
||||||
|
|
||||||
def testForensicSamples(self):
|
def testForensicSamples(self):
|
||||||
"""Test sample forensic/ruf/failure DMARC reports"""
|
"""Test sample forensic/ruf/failure DMARC reports"""
|
||||||
@@ -136,8 +142,12 @@ class Test(unittest.TestCase):
|
|||||||
print("Testing {0}: ".format(sample_path), end="")
|
print("Testing {0}: ".format(sample_path), end="")
|
||||||
with open(sample_path) as sample_file:
|
with open(sample_path) as sample_file:
|
||||||
sample_content = sample_file.read()
|
sample_content = sample_file.read()
|
||||||
parsed_report = parsedmarc.parse_report_email(sample_content)["report"]
|
parsed_report = parsedmarc.parse_report_email(
|
||||||
parsed_report = parsedmarc.parse_report_file(sample_path)["report"]
|
sample_content, offline=OFFLINE_MODE
|
||||||
|
)["report"]
|
||||||
|
parsed_report = parsedmarc.parse_report_file(
|
||||||
|
sample_path, offline=OFFLINE_MODE
|
||||||
|
)["report"]
|
||||||
parsedmarc.parsed_forensic_reports_to_csv(parsed_report)
|
parsedmarc.parsed_forensic_reports_to_csv(parsed_report)
|
||||||
print("Passed!")
|
print("Passed!")
|
||||||
|
|
||||||
@@ -149,7 +159,9 @@ class Test(unittest.TestCase):
|
|||||||
if os.path.isdir(sample_path):
|
if os.path.isdir(sample_path):
|
||||||
continue
|
continue
|
||||||
print("Testing {0}: ".format(sample_path), end="")
|
print("Testing {0}: ".format(sample_path), end="")
|
||||||
parsed_report = parsedmarc.parse_report_file(sample_path)["report"]
|
parsed_report = parsedmarc.parse_report_file(
|
||||||
|
sample_path, offline=OFFLINE_MODE
|
||||||
|
)["report"]
|
||||||
parsedmarc.parsed_smtp_tls_reports_to_csv(parsed_report)
|
parsedmarc.parsed_smtp_tls_reports_to_csv(parsed_report)
|
||||||
print("Passed!")
|
print("Passed!")
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user