mirror of
https://github.com/domainaware/parsedmarc.git
synced 2026-02-18 15:36:24 +00:00
Compare commits
163 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a0e0070dd0 | ||
|
|
cf3b7f2c29 | ||
|
|
d312522ab7 | ||
|
|
888d717476 | ||
|
|
1127f65fbb | ||
|
|
d017dfcddf | ||
|
|
5fae99aacc | ||
|
|
ba57368ac3 | ||
|
|
dc6ee5de98 | ||
|
|
158d63d205 | ||
|
|
f1933b906c | ||
|
|
4b98d795ff | ||
|
|
b1356f7dfc | ||
|
|
1969196e1a | ||
|
|
553f15f6a9 | ||
|
|
1fc9f638e2 | ||
|
|
48bff504b4 | ||
|
|
681b7cbf85 | ||
|
|
0922d6e83a | ||
|
|
baf3f95fb1 | ||
|
|
a51f945305 | ||
|
|
55dbf8e3db | ||
|
|
00267c9847 | ||
|
|
51356175e1 | ||
|
|
3be10d30dd | ||
|
|
98342ecac6 | ||
|
|
38a3d4eaae | ||
|
|
a05c230152 | ||
|
|
17bdc3a134 | ||
|
|
858be00f22 | ||
|
|
597ca64f9f | ||
|
|
c5dbe2c4dc | ||
|
|
082b3d355f | ||
|
|
2a7ce47bb1 | ||
|
|
9882405d96 | ||
|
|
fce84763b9 | ||
|
|
8a299b8600 | ||
|
|
b4c2b21547 | ||
|
|
865c249437 | ||
|
|
013859f10e | ||
|
|
6d4a31a120 | ||
|
|
45d3dc3b2e | ||
|
|
4bbd97dbaa | ||
|
|
5df152d469 | ||
|
|
d990bef342 | ||
|
|
caf77ca6d4 | ||
|
|
4b3d32c5a6 | ||
|
|
5df5c10f80 | ||
|
|
308d4657ab | ||
|
|
0f74e33094 | ||
|
|
9f339e11f5 | ||
|
|
391e84b717 | ||
|
|
8bf06ce5af | ||
|
|
2b7ae50a27 | ||
|
|
3feb478793 | ||
|
|
01630bb61c | ||
|
|
39347cb244 | ||
|
|
ed25526d59 | ||
|
|
880d7110fe | ||
|
|
d62001f5a4 | ||
|
|
0720bffcb6 | ||
|
|
fecd55a97d | ||
|
|
a121306eed | ||
|
|
980c9c7904 | ||
|
|
963f5d796f | ||
|
|
6532f3571b | ||
|
|
ea878443a8 | ||
|
|
9f6de41958 | ||
|
|
119192701c | ||
|
|
1d650be48a | ||
|
|
a85553fb18 | ||
|
|
5975d8eb21 | ||
|
|
87ae6175f2 | ||
|
|
68b93ed580 | ||
|
|
55508b513b | ||
|
|
71511c0cfc | ||
|
|
7c45812284 | ||
|
|
607a091a5f | ||
|
|
c308bf938c | ||
|
|
918501ccb5 | ||
|
|
036c372ea3 | ||
|
|
a969d83137 | ||
|
|
e299f7d161 | ||
|
|
4c04418dae | ||
|
|
2ca9373ed0 | ||
|
|
961ef6d804 | ||
|
|
573ba1e3e9 | ||
|
|
1d8af3ccff | ||
|
|
8426daa26b | ||
|
|
d1531b86f2 | ||
|
|
8bb046798c | ||
|
|
d64e12548a | ||
|
|
380479cbf1 | ||
|
|
ace21c8084 | ||
|
|
1a1aef21ad | ||
|
|
532dbbdb7e | ||
|
|
45738ae688 | ||
|
|
9d77bd64bc | ||
|
|
140290221d | ||
|
|
187d61b770 | ||
|
|
0443b7365e | ||
|
|
d7b887a835 | ||
|
|
a805733221 | ||
|
|
9552c3ac92 | ||
|
|
5273948be0 | ||
|
|
b51756b8bd | ||
|
|
7fa7c24cb8 | ||
|
|
972237ae7e | ||
|
|
6e5333a342 | ||
|
|
47b074c80b | ||
|
|
a1cfeb3081 | ||
|
|
c7c451b1b1 | ||
|
|
669deb9755 | ||
|
|
446c018920 | ||
|
|
38c6f86973 | ||
|
|
62ccc11925 | ||
|
|
c32ca3cae3 | ||
|
|
010f1f84a7 | ||
|
|
7da57c6382 | ||
|
|
d08e29a306 | ||
|
|
e1e53ad4cb | ||
|
|
4670e9687d | ||
|
|
7f8a2c08cd | ||
|
|
e9c05dd0bf | ||
|
|
9348a474dd | ||
|
|
e0decaba8c | ||
|
|
26a651cded | ||
|
|
bcfcd93fc6 | ||
|
|
54d5ed3543 | ||
|
|
1efbc87e0e | ||
|
|
e78e7f64af | ||
|
|
ad9de65b99 | ||
|
|
b9df12700b | ||
|
|
20843b920f | ||
|
|
e5ae89fedf | ||
|
|
f148cff11c | ||
|
|
4583769e04 | ||
|
|
0ecb80b27c | ||
|
|
b8e62e6d3b | ||
|
|
c67953a2c5 | ||
|
|
27dff4298c | ||
|
|
f2133aacd4 | ||
|
|
31917e58a9 | ||
|
|
bffb98d217 | ||
|
|
1f93b3a7ea | ||
|
|
88debb9729 | ||
|
|
a8a5564780 | ||
|
|
1e26f95b7b | ||
|
|
82b48e4d01 | ||
|
|
617b7c5b4a | ||
|
|
989bfd8f07 | ||
|
|
908cc2918c | ||
|
|
bd5774d71d | ||
|
|
8e9112bad3 | ||
|
|
40e041a8af | ||
|
|
7ba433cddb | ||
|
|
6d467c93f9 | ||
|
|
be38e83761 | ||
|
|
ef4e1ac8dc | ||
|
|
39e4c22ecc | ||
|
|
88ff3a2c23 | ||
|
|
d8aee569f7 | ||
|
|
f618f69c6c |
10
.github/workflows/docker.yml
vendored
10
.github/workflows/docker.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v5
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v3
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
@@ -40,16 +40,14 @@ jobs:
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
|
||||
- name: Log in to the Container registry
|
||||
# https://github.com/docker/login-action/releases/tag/v2.0.0
|
||||
uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
# https://github.com/docker/build-push-action/releases/tag/v3.0.0
|
||||
uses: docker/build-push-action@e551b19e49efd4e98792db7592c17c09b89db8d8
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name == 'release' }}
|
||||
|
||||
34
.github/workflows/python-tests.yml
vendored
34
.github/workflows/python-tests.yml
vendored
@@ -11,31 +11,37 @@ on:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
elasticsearch:
|
||||
image: elasticsearch:8.19.7
|
||||
env:
|
||||
discovery.type: single-node
|
||||
cluster.name: parsedmarc-cluster
|
||||
discovery.seed_hosts: elasticsearch
|
||||
bootstrap.memory_lock: true
|
||||
xpack.security.enabled: false
|
||||
xpack.license.self_generated.type: basic
|
||||
ports:
|
||||
- 9200:9200
|
||||
- 9300:9300
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libemail-outlook-message-perl
|
||||
wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | sudo gpg --dearmor -o /usr/share/keyrings/elasticsearch-keyring.gpg
|
||||
sudo apt-get install apt-transport-https
|
||||
echo "deb [signed-by=/usr/share/keyrings/elasticsearch-keyring.gpg] https://artifacts.elastic.co/packages/8.x/apt stable main" | sudo tee /etc/apt/sources.list.d/elastic-8.x.list
|
||||
sudo apt-get update && sudo apt-get install elasticsearch
|
||||
sudo sed -i 's/xpack.security.enabled: true/xpack.security.enabled: false/' /etc/elasticsearch/elasticsearch.yml
|
||||
sudo systemctl restart elasticsearch
|
||||
sudo systemctl --no-pager status elasticsearch
|
||||
sudo apt-get -q update
|
||||
sudo apt-get -qy install libemail-outlook-message-perl
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
@@ -59,6 +65,6 @@ jobs:
|
||||
run: |
|
||||
hatch build
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v4
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
11
.gitignore
vendored
11
.gitignore
vendored
@@ -106,7 +106,7 @@ ENV/
|
||||
.idea/
|
||||
|
||||
# VS Code launch config
|
||||
.vscode/launch.json
|
||||
#.vscode/launch.json
|
||||
|
||||
# Visual Studio Code settings
|
||||
#.vscode/
|
||||
@@ -136,3 +136,12 @@ samples/private
|
||||
|
||||
*.html
|
||||
*.sqlite-journal
|
||||
|
||||
parsedmarc.ini
|
||||
scratch.py
|
||||
|
||||
parsedmarc/resources/maps/base_reverse_dns.csv
|
||||
parsedmarc/resources/maps/unknown_base_reverse_dns.csv
|
||||
parsedmarc/resources/maps/sus_domains.csv
|
||||
parsedmarc/resources/maps/unknown_domains.txt
|
||||
*.bak
|
||||
|
||||
45
.vscode/launch.json
vendored
Normal file
45
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python Debugger: Current File",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${file}",
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "tests.py",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "tests.py",
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "sample",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "parsedmarc.cli",
|
||||
"args": ["samples/private/sample"]
|
||||
},
|
||||
{
|
||||
"name": "sortlists.py",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "sortlists.py",
|
||||
"cwd": "${workspaceFolder}/parsedmarc/resources/maps",
|
||||
"console": "integratedTerminal"
|
||||
},
|
||||
{
|
||||
"name": "find_unknown_base_reverse_dns.py",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "find_unknown_base_reverse_dns.py",
|
||||
"cwd": "${workspaceFolder}/parsedmarc/resources/maps",
|
||||
"console": "integratedTerminal"
|
||||
}
|
||||
]
|
||||
}
|
||||
19
.vscode/settings.json
vendored
19
.vscode/settings.json
vendored
@@ -13,6 +13,7 @@
|
||||
"automodule",
|
||||
"backported",
|
||||
"bellsouth",
|
||||
"boto",
|
||||
"brakhane",
|
||||
"Brightmail",
|
||||
"CEST",
|
||||
@@ -35,7 +36,9 @@
|
||||
"exampleuser",
|
||||
"expiringdict",
|
||||
"fieldlist",
|
||||
"GELF",
|
||||
"genindex",
|
||||
"geoip",
|
||||
"geoipupdate",
|
||||
"Geolite",
|
||||
"geolocation",
|
||||
@@ -44,7 +47,10 @@
|
||||
"hostnames",
|
||||
"htpasswd",
|
||||
"httpasswd",
|
||||
"httplib",
|
||||
"IMAP",
|
||||
"imapclient",
|
||||
"infile",
|
||||
"Interaktive",
|
||||
"IPDB",
|
||||
"journalctl",
|
||||
@@ -60,16 +66,20 @@
|
||||
"mailrelay",
|
||||
"mailsuite",
|
||||
"maxdepth",
|
||||
"MAXHEADERS",
|
||||
"maxmind",
|
||||
"mbox",
|
||||
"mfrom",
|
||||
"michaeldavie",
|
||||
"mikesiegel",
|
||||
"Mimecast",
|
||||
"mitigations",
|
||||
"MMDB",
|
||||
"modindex",
|
||||
"msgconvert",
|
||||
"msgraph",
|
||||
"MSSP",
|
||||
"multiprocess",
|
||||
"Munge",
|
||||
"ndjson",
|
||||
"newkey",
|
||||
@@ -79,14 +89,19 @@
|
||||
"nosecureimap",
|
||||
"nosniff",
|
||||
"nwettbewerb",
|
||||
"opensearch",
|
||||
"opensearchpy",
|
||||
"parsedmarc",
|
||||
"passsword",
|
||||
"Postorius",
|
||||
"premade",
|
||||
"procs",
|
||||
"publicsuffix",
|
||||
"publicsuffixlist",
|
||||
"publixsuffix",
|
||||
"pygelf",
|
||||
"pypy",
|
||||
"pytest",
|
||||
"quickstart",
|
||||
"Reindex",
|
||||
"replyto",
|
||||
@@ -94,10 +109,13 @@
|
||||
"Rollup",
|
||||
"Rpdm",
|
||||
"SAMEORIGIN",
|
||||
"sdist",
|
||||
"Servernameone",
|
||||
"setuptools",
|
||||
"smartquotes",
|
||||
"SMTPTLS",
|
||||
"sortlists",
|
||||
"sortmaps",
|
||||
"sourcetype",
|
||||
"STARTTLS",
|
||||
"tasklist",
|
||||
@@ -110,6 +128,7 @@
|
||||
"truststore",
|
||||
"Übersicht",
|
||||
"uids",
|
||||
"Uncategorized",
|
||||
"unparasable",
|
||||
"uper",
|
||||
"urllib",
|
||||
|
||||
627
CHANGELOG.md
627
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
ARG BASE_IMAGE=python:3.9-slim
|
||||
ARG BASE_IMAGE=python:3.13-slim
|
||||
ARG USERNAME=parsedmarc
|
||||
ARG USER_UID=1000
|
||||
ARG USER_GID=$USER_UID
|
||||
|
||||
41
README.md
41
README.md
@@ -9,7 +9,7 @@ Package](https://img.shields.io/pypi/v/parsedmarc.svg)](https://pypi.org/project
|
||||
[](https://pypistats.org/packages/parsedmarc)
|
||||
|
||||
<p align="center">
|
||||
<img src="https://github.com/domainaware/parsedmarc/raw/master/docs/source/_static/screenshots/dmarc-summary-charts.png?raw=true" alt="A screenshot of DMARC summary charts in Kibana"/>
|
||||
<img src="https://raw.githubusercontent.com/domainaware/parsedmarc/refs/heads/master/docs/source/_static/screenshots/dmarc-summary-charts.png?raw=true" alt="A screenshot of DMARC summary charts in Kibana"/>
|
||||
</p>
|
||||
|
||||
`parsedmarc` is a Python module and CLI utility for parsing DMARC
|
||||
@@ -23,25 +23,42 @@ ProofPoint Email Fraud Defense, and Valimail.
|
||||
|
||||
## Help Wanted
|
||||
|
||||
This project is maintained by one developer. Please consider
|
||||
reviewing the open
|
||||
[issues](https://github.com/domainaware/parsedmarc/issues) to see how
|
||||
you can contribute code, documentation, or user support. Assistance on
|
||||
the pinned issues would be particularly helpful.
|
||||
This project is maintained by one developer. Please consider reviewing the open
|
||||
[issues](https://github.com/domainaware/parsedmarc/issues) to see how you can
|
||||
contribute code, documentation, or user support. Assistance on the pinned
|
||||
issues would be particularly helpful.
|
||||
|
||||
Thanks to all
|
||||
[contributors](https://github.com/domainaware/parsedmarc/graphs/contributors)!
|
||||
|
||||
## Features
|
||||
|
||||
- Parses draft and 1.0 standard aggregate/rua reports
|
||||
- Parses forensic/failure/ruf reports
|
||||
- Can parse reports from an inbox over IMAP, Microsoft Graph, or Gmail
|
||||
API
|
||||
- Parses draft and 1.0 standard aggregate/rua DMARC reports
|
||||
- Parses forensic/failure/ruf DMARC reports
|
||||
- Parses reports from SMTP TLS Reporting
|
||||
- Can parse reports from an inbox over IMAP, Microsoft Graph, or Gmail API
|
||||
- Transparently handles gzip or zip compressed reports
|
||||
- Consistent data structures
|
||||
- Simple JSON and/or CSV output
|
||||
- Optionally email the results
|
||||
- Optionally send the results to Elasticsearch and/or Splunk, for use
|
||||
with premade dashboards
|
||||
- Optionally send the results to Elasticsearch, Opensearch, and/or Splunk, for
|
||||
use with premade dashboards
|
||||
- Optionally send reports to Apache Kafka
|
||||
|
||||
## Python Compatibility
|
||||
|
||||
This project supports the following Python versions, which are either actively maintained or are the default versions
|
||||
for RHEL or Debian.
|
||||
|
||||
| Version | Supported | Reason |
|
||||
|---------|-----------|------------------------------------------------------------|
|
||||
| < 3.6 | ❌ | End of Life (EOL) |
|
||||
| 3.6 | ❌ | Used in RHHEL 8, but not supported by project dependencies |
|
||||
| 3.7 | ❌ | End of Life (EOL) |
|
||||
| 3.8 | ❌ | End of Life (EOL) |
|
||||
| 3.9 | ✅ | Supported until August 2026 (Debian 11); May 2032 (RHEL 9) |
|
||||
| 3.10 | ✅ | Actively maintained |
|
||||
| 3.11 | ✅ | Actively maintained; supported until June 2028 (Debian 12) |
|
||||
| 3.12 | ✅ | Actively maintained; supported until May 2035 (RHEL 10) |
|
||||
| 3.13 | ✅ | Actively maintained; supported until June 2030 (Debian 13) |
|
||||
| 3.14 | ✅ | Actively maintained |
|
||||
|
||||
11
build.sh
11
build.sh
@@ -14,8 +14,15 @@ cd docs
|
||||
make clean
|
||||
make html
|
||||
touch build/html/.nojekyll
|
||||
cp -rf build/html/* ../../parsedmarc-docs/
|
||||
if [ -d "../../parsedmarc-docs" ]; then
|
||||
cp -rf build/html/* ../../parsedmarc-docs/
|
||||
fi
|
||||
cd ..
|
||||
cd parsedmarc/resources/maps
|
||||
python3 sortlists.py
|
||||
echo "Checking for invalid UTF-8 bytes in base_reverse_dns_map.csv"
|
||||
python3 find_bad_utf8.py base_reverse_dns_map.csv
|
||||
cd ../../..
|
||||
python3 tests.py
|
||||
rm -rf dist/ build/
|
||||
hatch build
|
||||
hatch build
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
version: '3.7'
|
||||
|
||||
services:
|
||||
elasticsearch:
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch:8.3.1
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch:8.19.7
|
||||
environment:
|
||||
- network.host=127.0.0.1
|
||||
- http.host=0.0.0.0
|
||||
@@ -14,7 +12,7 @@ services:
|
||||
- xpack.security.enabled=false
|
||||
- xpack.license.self_generated.type=basic
|
||||
ports:
|
||||
- 127.0.0.1:9200:9200
|
||||
- "127.0.0.1:9200:9200"
|
||||
ulimits:
|
||||
memlock:
|
||||
soft: -1
|
||||
@@ -28,3 +26,30 @@ services:
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 24
|
||||
|
||||
opensearch:
|
||||
image: opensearchproject/opensearch:2
|
||||
environment:
|
||||
- network.host=127.0.0.1
|
||||
- http.host=0.0.0.0
|
||||
- node.name=opensearch
|
||||
- discovery.type=single-node
|
||||
- cluster.name=parsedmarc-cluster
|
||||
- discovery.seed_hosts=opensearch
|
||||
- bootstrap.memory_lock=true
|
||||
- OPENSEARCH_INITIAL_ADMIN_PASSWORD=${OPENSEARCH_INITIAL_ADMIN_PASSWORD}
|
||||
ports:
|
||||
- "127.0.0.1:9201:9200"
|
||||
ulimits:
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD-SHELL",
|
||||
"curl -s -XGET http://localhost:9201/_cluster/health?pretty | grep status | grep -q '\\(green\\|yellow\\)'"
|
||||
]
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 24
|
||||
|
||||
@@ -21,7 +21,6 @@
|
||||
:members:
|
||||
```
|
||||
|
||||
|
||||
## parsedmarc.splunk
|
||||
|
||||
```{eval-rst}
|
||||
|
||||
@@ -20,7 +20,7 @@ from parsedmarc import __version__
|
||||
# -- Project information -----------------------------------------------------
|
||||
|
||||
project = "parsedmarc"
|
||||
copyright = "2018 - 2023, Sean Whalen and contributors"
|
||||
copyright = "2018 - 2025, Sean Whalen and contributors"
|
||||
author = "Sean Whalen and contributors"
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
|
||||
@@ -33,17 +33,37 @@ and Valimail.
|
||||
|
||||
## Features
|
||||
|
||||
- Parses draft and 1.0 standard aggregate/rua reports
|
||||
- Parses forensic/failure/ruf reports
|
||||
- Parses draft and 1.0 standard aggregate/rua DMARC reports
|
||||
- Parses forensic/failure/ruf DMARC reports
|
||||
- Parses reports from SMTP TLS Reporting
|
||||
- Can parse reports from an inbox over IMAP, Microsoft Graph, or Gmail API
|
||||
- Transparently handles gzip or zip compressed reports
|
||||
- Consistent data structures
|
||||
- Simple JSON and/or CSV output
|
||||
- Optionally email the results
|
||||
- Optionally send the results to Elasticsearch/OpenSearch and/or Splunk, for use with
|
||||
premade dashboards
|
||||
- Optionally send the results to Elasticsearch, Opensearch, and/or Splunk, for use
|
||||
with premade dashboards
|
||||
- Optionally send reports to Apache Kafka
|
||||
|
||||
## Python Compatibility
|
||||
|
||||
This project supports the following Python versions, which are either actively maintained or are the default versions
|
||||
for RHEL or Debian.
|
||||
|
||||
| Version | Supported | Reason |
|
||||
|---------|-----------|------------------------------------------------------------|
|
||||
| < 3.6 | ❌ | End of Life (EOL) |
|
||||
| 3.6 | ❌ | Used in RHHEL 8, but not supported by project dependencies |
|
||||
| 3.7 | ❌ | End of Life (EOL) |
|
||||
| 3.8 | ❌ | End of Life (EOL) |
|
||||
| 3.9 | ✅ | Supported until August 2026 (Debian 11); May 2032 (RHEL 9) |
|
||||
| 3.10 | ✅ | Actively maintained |
|
||||
| 3.11 | ✅ | Actively maintained; supported until June 2028 (Debian 12) |
|
||||
| 3.12 | ✅ | Actively maintained; supported until May 2035 (RHEL 10) |
|
||||
| 3.13 | ✅ | Actively maintained; supported until June 2030 (Debian 13) |
|
||||
| 3.14 | ✅ | Actively maintained |
|
||||
|
||||
|
||||
```{toctree}
|
||||
:caption: 'Contents'
|
||||
:maxdepth: 2
|
||||
|
||||
@@ -199,7 +199,7 @@ sudo apt-get install libemail-outlook-message-perl
|
||||
[geoipupdate releases page on github]: https://github.com/maxmind/geoipupdate/releases
|
||||
[ip to country lite database]: https://db-ip.com/db/download/ip-to-country-lite
|
||||
[license keys]: https://www.maxmind.com/en/accounts/current/license-key
|
||||
[maxmind geoipupdate page]: https://dev.maxmind.com/geoip/geoipupdate/
|
||||
[maxmind geoipupdate page]: https://dev.maxmind.com/geoip/updating-databases/
|
||||
[maxmind geolite2 country database]: https://dev.maxmind.com/geoip/geolite2-free-geolocation-data
|
||||
[registering for a free geolite2 account]: https://www.maxmind.com/en/geolite2/signup
|
||||
[to comply with various privacy regulations]: https://blog.maxmind.com/2019/12/18/significant-changes-to-accessing-and-using-geolite2-databases/
|
||||
|
||||
@@ -23,6 +23,8 @@ of the report schema.
|
||||
"report_id": "9391651994964116463",
|
||||
"begin_date": "2012-04-27 20:00:00",
|
||||
"end_date": "2012-04-28 19:59:59",
|
||||
"timespan_requires_normalization": false,
|
||||
"original_timespan_seconds": 86399,
|
||||
"errors": []
|
||||
},
|
||||
"policy_published": {
|
||||
@@ -39,8 +41,10 @@ of the report schema.
|
||||
"source": {
|
||||
"ip_address": "72.150.241.94",
|
||||
"country": "US",
|
||||
"reverse_dns": "adsl-72-150-241-94.shv.bellsouth.net",
|
||||
"base_domain": "bellsouth.net"
|
||||
"reverse_dns": null,
|
||||
"base_domain": null,
|
||||
"name": null,
|
||||
"type": null
|
||||
},
|
||||
"count": 2,
|
||||
"alignment": {
|
||||
@@ -74,7 +78,10 @@ of the report schema.
|
||||
"result": "pass"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"normalized_timespan": false,
|
||||
"interval_begin": "2012-04-28 00:00:00",
|
||||
"interval_end": "2012-04-28 23:59:59"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -83,8 +90,10 @@ of the report schema.
|
||||
### CSV aggregate report
|
||||
|
||||
```text
|
||||
xml_schema,org_name,org_email,org_extra_contact_info,report_id,begin_date,end_date,errors,domain,adkim,aspf,p,sp,pct,fo,source_ip_address,source_country,source_reverse_dns,source_base_domain,count,spf_aligned,dkim_aligned,dmarc_aligned,disposition,policy_override_reasons,policy_override_comments,envelope_from,header_from,envelope_to,dkim_domains,dkim_selectors,dkim_results,spf_domains,spf_scopes,spf_results
|
||||
draft,acme.com,noreply-dmarc-support@acme.com,http://acme.com/dmarc/support,9391651994964116463,2012-04-27 20:00:00,2012-04-28 19:59:59,,example.com,r,r,none,none,100,0,72.150.241.94,US,adsl-72-150-241-94.shv.bellsouth.net,bellsouth.net,2,True,False,True,none,,,example.com,example.com,,example.com,none,fail,example.com,mfrom,pass
|
||||
xml_schema,org_name,org_email,org_extra_contact_info,report_id,begin_date,end_date,normalized_timespan,errors,domain,adkim,aspf,p,sp,pct,fo,source_ip_address,source_country,source_reverse_dns,source_base_domain,source_name,source_type,count,spf_aligned,dkim_aligned,dmarc_aligned,disposition,policy_override_reasons,policy_override_comments,envelope_from,header_from,envelope_to,dkim_domains,dkim_selectors,dkim_results,spf_domains,spf_scopes,spf_results
|
||||
draft,acme.com,noreply-dmarc-support@acme.com,http://acme.com/dmarc/support,9391651994964116463,2012-04-28 00:00:00,2012-04-28 23:59:59,False,,example.com,r,r,none,none,100,0,72.150.241.94,US,,,,,2,True,False,True,none,,,example.com,example.com,,example.com,none,fail,example.com,mfrom,pass
|
||||
draft,acme.com,noreply-dmarc-support@acme.com,http://acme.com/dmarc/support,9391651994964116463,2012-04-28 00:00:00,2012-04-28 23:59:59,False,,example.com,r,r,none,none,100,0,72.150.241.94,US,,,,,2,True,False,True,none,,,example.com,example.com,,example.com,none,fail,example.com,mfrom,pass
|
||||
|
||||
```
|
||||
|
||||
## Sample forensic report output
|
||||
|
||||
@@ -4,47 +4,50 @@
|
||||
|
||||
```text
|
||||
usage: parsedmarc [-h] [-c CONFIG_FILE] [--strip-attachment-payloads] [-o OUTPUT]
|
||||
[--aggregate-json-filename AGGREGATE_JSON_FILENAME]
|
||||
[--forensic-json-filename FORENSIC_JSON_FILENAME]
|
||||
[--aggregate-csv-filename AGGREGATE_CSV_FILENAME]
|
||||
[--forensic-csv-filename FORENSIC_CSV_FILENAME]
|
||||
[-n NAMESERVERS [NAMESERVERS ...]] [-t DNS_TIMEOUT] [--offline]
|
||||
[-s] [--verbose] [--debug] [--log-file LOG_FILE] [-v]
|
||||
[file_path ...]
|
||||
[--aggregate-json-filename AGGREGATE_JSON_FILENAME] [--forensic-json-filename FORENSIC_JSON_FILENAME]
|
||||
[--smtp-tls-json-filename SMTP_TLS_JSON_FILENAME] [--aggregate-csv-filename AGGREGATE_CSV_FILENAME]
|
||||
[--forensic-csv-filename FORENSIC_CSV_FILENAME] [--smtp-tls-csv-filename SMTP_TLS_CSV_FILENAME]
|
||||
[-n NAMESERVERS [NAMESERVERS ...]] [-t DNS_TIMEOUT] [--offline] [-s] [-w] [--verbose] [--debug]
|
||||
[--log-file LOG_FILE] [--no-prettify-json] [-v]
|
||||
[file_path ...]
|
||||
|
||||
Parses DMARC reports
|
||||
Parses DMARC reports
|
||||
|
||||
positional arguments:
|
||||
file_path one or more paths to aggregate or forensic report
|
||||
files, emails, or mbox files'
|
||||
positional arguments:
|
||||
file_path one or more paths to aggregate or forensic report files, emails, or mbox files'
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
-c CONFIG_FILE, --config-file CONFIG_FILE
|
||||
a path to a configuration file (--silent implied)
|
||||
--strip-attachment-payloads
|
||||
remove attachment payloads from forensic report output
|
||||
-o OUTPUT, --output OUTPUT
|
||||
write output files to the given directory
|
||||
--aggregate-json-filename AGGREGATE_JSON_FILENAME
|
||||
filename for the aggregate JSON output file
|
||||
--forensic-json-filename FORENSIC_JSON_FILENAME
|
||||
filename for the forensic JSON output file
|
||||
--aggregate-csv-filename AGGREGATE_CSV_FILENAME
|
||||
filename for the aggregate CSV output file
|
||||
--forensic-csv-filename FORENSIC_CSV_FILENAME
|
||||
filename for the forensic CSV output file
|
||||
-n NAMESERVERS [NAMESERVERS ...], --nameservers NAMESERVERS [NAMESERVERS ...]
|
||||
nameservers to query
|
||||
-t DNS_TIMEOUT, --dns_timeout DNS_TIMEOUT
|
||||
number of seconds to wait for an answer from DNS
|
||||
(default: 2.0)
|
||||
--offline do not make online queries for geolocation or DNS
|
||||
-s, --silent only print errors and warnings
|
||||
--verbose more verbose output
|
||||
--debug print debugging information
|
||||
--log-file LOG_FILE output logging to a file
|
||||
-v, --version show program's version number and exit
|
||||
options:
|
||||
-h, --help show this help message and exit
|
||||
-c CONFIG_FILE, --config-file CONFIG_FILE
|
||||
a path to a configuration file (--silent implied)
|
||||
--strip-attachment-payloads
|
||||
remove attachment payloads from forensic report output
|
||||
-o OUTPUT, --output OUTPUT
|
||||
write output files to the given directory
|
||||
--aggregate-json-filename AGGREGATE_JSON_FILENAME
|
||||
filename for the aggregate JSON output file
|
||||
--forensic-json-filename FORENSIC_JSON_FILENAME
|
||||
filename for the forensic JSON output file
|
||||
--smtp-tls-json-filename SMTP_TLS_JSON_FILENAME
|
||||
filename for the SMTP TLS JSON output file
|
||||
--aggregate-csv-filename AGGREGATE_CSV_FILENAME
|
||||
filename for the aggregate CSV output file
|
||||
--forensic-csv-filename FORENSIC_CSV_FILENAME
|
||||
filename for the forensic CSV output file
|
||||
--smtp-tls-csv-filename SMTP_TLS_CSV_FILENAME
|
||||
filename for the SMTP TLS CSV output file
|
||||
-n NAMESERVERS [NAMESERVERS ...], --nameservers NAMESERVERS [NAMESERVERS ...]
|
||||
nameservers to query
|
||||
-t DNS_TIMEOUT, --dns_timeout DNS_TIMEOUT
|
||||
number of seconds to wait for an answer from DNS (default: 2.0)
|
||||
--offline do not make online queries for geolocation or DNS
|
||||
-s, --silent only print errors
|
||||
-w, --warnings print warnings in addition to errors
|
||||
--verbose more verbose output
|
||||
--debug print debugging information
|
||||
--log-file LOG_FILE output logging to a file
|
||||
--no-prettify-json output JSON in a single line without indentation
|
||||
-v, --version show program's version number and exit
|
||||
```
|
||||
|
||||
:::{note}
|
||||
@@ -120,8 +123,10 @@ The full set of configuration options are:
|
||||
Elasticsearch, Splunk and/or S3
|
||||
- `save_smtp_tls` - bool: Save SMTP-STS report data to
|
||||
Elasticsearch, Splunk and/or S3
|
||||
- `index_prefix_domain_map` - bool: A path mapping of Opensearch/Elasticsearch index prefixes to domain names
|
||||
- `strip_attachment_payloads` - bool: Remove attachment
|
||||
payloads from results
|
||||
- `silent` - bool: Set this to `False` to output results to STDOUT
|
||||
- `output` - str: Directory to place JSON and CSV files in. This is required if you set either of the JSON output file options.
|
||||
- `aggregate_json_filename` - str: filename for the aggregate
|
||||
JSON output file
|
||||
@@ -166,6 +171,9 @@ The full set of configuration options are:
|
||||
- `check_timeout` - int: Number of seconds to wait for a IMAP
|
||||
IDLE response or the number of seconds until the next
|
||||
mail check (Default: `30`)
|
||||
- `since` - str: Search for messages since certain time. (Examples: `5m|3h|2d|1w`)
|
||||
Acceptable units - {"m":"minutes", "h":"hours", "d":"days", "w":"weeks"}.
|
||||
Defaults to `1d` if incorrect value is provided.
|
||||
- `imap`
|
||||
- `host` - str: The IMAP server hostname or IP address
|
||||
- `port` - int: The IMAP server port (Default: `993`)
|
||||
@@ -205,6 +213,8 @@ The full set of configuration options are:
|
||||
- `mailbox` - str: The mailbox name. This defaults to the
|
||||
current user if using the UsernamePassword auth method, but
|
||||
could be a shared mailbox if the user has access to the mailbox
|
||||
- `graph_url` - str: Microsoft Graph URL. Allows for use of National Clouds (ex Azure Gov)
|
||||
(Default: https://graph.microsoft.com)
|
||||
- `token_file` - str: Path to save the token file
|
||||
(Default: `.token`)
|
||||
- `allow_unencrypted_storage` - bool: Allows the Azure Identity
|
||||
@@ -247,7 +257,7 @@ The full set of configuration options are:
|
||||
:::
|
||||
- `user` - str: Basic auth username
|
||||
- `password` - str: Basic auth password
|
||||
- `apiKey` - str: API key
|
||||
- `api_key` - str: API key
|
||||
- `ssl` - bool: Use an encrypted SSL/TLS connection
|
||||
(Default: `True`)
|
||||
- `timeout` - float: Timeout in seconds (Default: 60)
|
||||
@@ -270,7 +280,7 @@ The full set of configuration options are:
|
||||
:::
|
||||
- `user` - str: Basic auth username
|
||||
- `password` - str: Basic auth password
|
||||
- `apiKey` - str: API key
|
||||
- `api_key` - str: API key
|
||||
- `ssl` - bool: Use an encrypted SSL/TLS connection
|
||||
(Default: `True`)
|
||||
- `timeout` - float: Timeout in seconds (Default: 60)
|
||||
@@ -364,7 +374,7 @@ The full set of configuration options are:
|
||||
- `mode` - str: The GELF transport type to use. Valid modes: `tcp`, `udp`, `tls`
|
||||
|
||||
- `maildir`
|
||||
- `reports_folder` - str: Full path for mailbox maidir location (Default: `INBOX`)
|
||||
- `maildir_path` - str: Full path for mailbox maidir location (Default: `INBOX`)
|
||||
- `maildir_create` - bool: Create maildir if not present (Default: False)
|
||||
|
||||
- `webhook` - Post the individual reports to a webhook url with the report as the JSON body
|
||||
@@ -440,6 +450,28 @@ PUT _cluster/settings
|
||||
Increasing this value increases resource usage.
|
||||
:::
|
||||
|
||||
## Multi-tenant support
|
||||
|
||||
Starting in `8.19.0`, ParseDMARC provides multi-tenant support by placing data into separate OpenSearch or Elasticsearch index prefixes. To set this up, create a YAML file that is formatted where each key is a tenant name, and the value is a list of domains related to that tenant, not including subdomains, like this:
|
||||
|
||||
```yaml
|
||||
example:
|
||||
- example.com
|
||||
- example.net
|
||||
- example.org
|
||||
|
||||
whalensolutions:
|
||||
- whalensolutions.com
|
||||
```
|
||||
|
||||
Save it to disk where the user running ParseDMARC can read it, then set `index_prefix_domain_map` to that filepath in the `[general]` section of the ParseDMARC configuration file and do not set an `index_prefix` option in the `[elasticsearch]` or `[opensearch]` sections.
|
||||
|
||||
When configured correctly, if ParseDMARC finds that a report is related to a domain in the mapping, the report will be saved in an index name that has the tenant name prefixed to it with a trailing underscore. Then, you can use the security features of Opensearch or the ELK stack to only grant users access to the indexes that they need.
|
||||
|
||||
:::{note}
|
||||
A domain cannot be used in multiple tenant lists. Only the first prefix list that contains the matching domain is used.
|
||||
:::
|
||||
|
||||
## Running parsedmarc as a systemd service
|
||||
|
||||
Use systemd to run `parsedmarc` as a service and process reports as
|
||||
|
||||
5901
grafana/Grafana-DMARC_Reports.json-new_panel.json
Normal file
5901
grafana/Grafana-DMARC_Reports.json-new_panel.json
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@@ -9,11 +9,13 @@ from configparser import ConfigParser
|
||||
from glob import glob
|
||||
import logging
|
||||
import math
|
||||
import yaml
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
from ssl import CERT_NONE, create_default_context
|
||||
from multiprocessing import Pipe, Process
|
||||
import sys
|
||||
import http.client
|
||||
from tqdm import tqdm
|
||||
|
||||
from parsedmarc import (
|
||||
@@ -45,7 +47,10 @@ from parsedmarc.mail import (
|
||||
from parsedmarc.mail.graph import AuthMethod
|
||||
|
||||
from parsedmarc.log import logger
|
||||
from parsedmarc.utils import is_mbox, get_reverse_dns
|
||||
from parsedmarc.utils import is_mbox, get_reverse_dns, get_base_domain
|
||||
from parsedmarc import SEEN_AGGREGATE_REPORT_IDS
|
||||
|
||||
http.client._MAXHEADERS = 200 # pylint:disable=protected-access
|
||||
|
||||
formatter = logging.Formatter(
|
||||
fmt="%(levelname)8s:%(filename)s:%(lineno)d:%(message)s",
|
||||
@@ -72,6 +77,7 @@ def cli_parse(
|
||||
always_use_local_files,
|
||||
reverse_dns_map_path,
|
||||
reverse_dns_map_url,
|
||||
normalize_timespan_threshold_hours,
|
||||
conn,
|
||||
):
|
||||
"""Separated this function for multiprocessing"""
|
||||
@@ -86,6 +92,7 @@ def cli_parse(
|
||||
nameservers=nameservers,
|
||||
dns_timeout=dns_timeout,
|
||||
strip_attachment_payloads=sa,
|
||||
normalize_timespan_threshold_hours=normalize_timespan_threshold_hours,
|
||||
)
|
||||
conn.send([file_results, file_path])
|
||||
except ParserError as error:
|
||||
@@ -97,8 +104,35 @@ def cli_parse(
|
||||
def _main():
|
||||
"""Called when the module is executed"""
|
||||
|
||||
def get_index_prefix(report):
|
||||
if index_prefix_domain_map is None:
|
||||
return None
|
||||
if "policy_published" in report:
|
||||
domain = report["policy_published"]["domain"]
|
||||
elif "reported_domain" in report:
|
||||
domain = report("reported_domain")
|
||||
elif "policies" in report:
|
||||
domain = report["policies"][0]["domain"]
|
||||
if domain:
|
||||
domain = get_base_domain(domain)
|
||||
for prefix in index_prefix_domain_map:
|
||||
if domain in index_prefix_domain_map[prefix]:
|
||||
prefix = (
|
||||
prefix.lower()
|
||||
.strip()
|
||||
.strip("_")
|
||||
.replace(" ", "_")
|
||||
.replace("-", "_")
|
||||
)
|
||||
prefix = f"{prefix}_"
|
||||
return prefix
|
||||
return None
|
||||
|
||||
def process_reports(reports_):
|
||||
output_str = "{0}\n".format(json.dumps(reports_, ensure_ascii=False, indent=2))
|
||||
indent_value = 2 if opts.prettify_json else None
|
||||
output_str = "{0}\n".format(
|
||||
json.dumps(reports_, ensure_ascii=False, indent=indent_value)
|
||||
)
|
||||
|
||||
if not opts.silent:
|
||||
print(output_str)
|
||||
@@ -122,7 +156,8 @@ def _main():
|
||||
elastic.save_aggregate_report_to_elasticsearch(
|
||||
report,
|
||||
index_suffix=opts.elasticsearch_index_suffix,
|
||||
index_prefix=opts.elasticsearch_index_prefix,
|
||||
index_prefix=opts.elasticsearch_index_prefix
|
||||
or get_index_prefix(report),
|
||||
monthly_indexes=opts.elasticsearch_monthly_indexes,
|
||||
number_of_shards=shards,
|
||||
number_of_replicas=replicas,
|
||||
@@ -143,7 +178,8 @@ def _main():
|
||||
opensearch.save_aggregate_report_to_opensearch(
|
||||
report,
|
||||
index_suffix=opts.opensearch_index_suffix,
|
||||
index_prefix=opts.opensearch_index_prefix,
|
||||
index_prefix=opts.opensearch_index_prefix
|
||||
or get_index_prefix(report),
|
||||
monthly_indexes=opts.opensearch_monthly_indexes,
|
||||
number_of_shards=shards,
|
||||
number_of_replicas=replicas,
|
||||
@@ -185,8 +221,9 @@ def _main():
|
||||
|
||||
try:
|
||||
if opts.webhook_aggregate_url:
|
||||
indent_value = 2 if opts.prettify_json else None
|
||||
webhook_client.save_aggregate_report_to_webhook(
|
||||
json.dumps(report, ensure_ascii=False, indent=2)
|
||||
json.dumps(report, ensure_ascii=False, indent=indent_value)
|
||||
)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
@@ -208,7 +245,8 @@ def _main():
|
||||
elastic.save_forensic_report_to_elasticsearch(
|
||||
report,
|
||||
index_suffix=opts.elasticsearch_index_suffix,
|
||||
index_prefix=opts.elasticsearch_index_prefix,
|
||||
index_prefix=opts.elasticsearch_index_prefix
|
||||
or get_index_prefix(report),
|
||||
monthly_indexes=opts.elasticsearch_monthly_indexes,
|
||||
number_of_shards=shards,
|
||||
number_of_replicas=replicas,
|
||||
@@ -227,7 +265,8 @@ def _main():
|
||||
opensearch.save_forensic_report_to_opensearch(
|
||||
report,
|
||||
index_suffix=opts.opensearch_index_suffix,
|
||||
index_prefix=opts.opensearch_index_prefix,
|
||||
index_prefix=opts.opensearch_index_prefix
|
||||
or get_index_prefix(report),
|
||||
monthly_indexes=opts.opensearch_monthly_indexes,
|
||||
number_of_shards=shards,
|
||||
number_of_replicas=replicas,
|
||||
@@ -267,8 +306,9 @@ def _main():
|
||||
|
||||
try:
|
||||
if opts.webhook_forensic_url:
|
||||
indent_value = 2 if opts.prettify_json else None
|
||||
webhook_client.save_forensic_report_to_webhook(
|
||||
json.dumps(report, ensure_ascii=False, indent=2)
|
||||
json.dumps(report, ensure_ascii=False, indent=indent_value)
|
||||
)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
@@ -290,7 +330,8 @@ def _main():
|
||||
elastic.save_smtp_tls_report_to_elasticsearch(
|
||||
report,
|
||||
index_suffix=opts.elasticsearch_index_suffix,
|
||||
index_prefix=opts.elasticsearch_index_prefix,
|
||||
index_prefix=opts.elasticsearch_index_prefix
|
||||
or get_index_prefix(report),
|
||||
monthly_indexes=opts.elasticsearch_monthly_indexes,
|
||||
number_of_shards=shards,
|
||||
number_of_replicas=replicas,
|
||||
@@ -309,7 +350,8 @@ def _main():
|
||||
opensearch.save_smtp_tls_report_to_opensearch(
|
||||
report,
|
||||
index_suffix=opts.opensearch_index_suffix,
|
||||
index_prefix=opts.opensearch_index_prefix,
|
||||
index_prefix=opts.opensearch_index_prefix
|
||||
or get_index_prefix(report),
|
||||
monthly_indexes=opts.opensearch_monthly_indexes,
|
||||
number_of_shards=shards,
|
||||
number_of_replicas=replicas,
|
||||
@@ -349,8 +391,9 @@ def _main():
|
||||
|
||||
try:
|
||||
if opts.webhook_smtp_tls_url:
|
||||
indent_value = 2 if opts.prettify_json else None
|
||||
webhook_client.save_smtp_tls_report_to_webhook(
|
||||
json.dumps(report, ensure_ascii=False, indent=2)
|
||||
json.dumps(report, ensure_ascii=False, indent=indent_value)
|
||||
)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
@@ -395,7 +438,7 @@ def _main():
|
||||
arg_parser.add_argument(
|
||||
"-c",
|
||||
"--config-file",
|
||||
help="a path to a configuration file " "(--silent implied)",
|
||||
help="a path to a configuration file (--silent implied)",
|
||||
)
|
||||
arg_parser.add_argument(
|
||||
"file_path",
|
||||
@@ -403,7 +446,7 @@ def _main():
|
||||
help="one or more paths to aggregate or forensic "
|
||||
"report files, emails, or mbox files'",
|
||||
)
|
||||
strip_attachment_help = "remove attachment payloads from forensic " "report output"
|
||||
strip_attachment_help = "remove attachment payloads from forensic report output"
|
||||
arg_parser.add_argument(
|
||||
"--strip-attachment-payloads", help=strip_attachment_help, action="store_true"
|
||||
)
|
||||
@@ -446,14 +489,14 @@ def _main():
|
||||
arg_parser.add_argument(
|
||||
"-t",
|
||||
"--dns_timeout",
|
||||
help="number of seconds to wait for an answer " "from DNS (default: 2.0)",
|
||||
help="number of seconds to wait for an answer from DNS (default: 2.0)",
|
||||
type=float,
|
||||
default=2.0,
|
||||
)
|
||||
arg_parser.add_argument(
|
||||
"--offline",
|
||||
action="store_true",
|
||||
help="do not make online queries for geolocation " " or DNS",
|
||||
help="do not make online queries for geolocation or DNS",
|
||||
)
|
||||
arg_parser.add_argument(
|
||||
"-s", "--silent", action="store_true", help="only print errors"
|
||||
@@ -471,6 +514,12 @@ def _main():
|
||||
"--debug", action="store_true", help="print debugging information"
|
||||
)
|
||||
arg_parser.add_argument("--log-file", default=None, help="output logging to a file")
|
||||
arg_parser.add_argument(
|
||||
"--no-prettify-json",
|
||||
action="store_false",
|
||||
dest="prettify_json",
|
||||
help="output JSON in a single line without indentation",
|
||||
)
|
||||
arg_parser.add_argument("-v", "--version", action="version", version=__version__)
|
||||
|
||||
aggregate_reports = []
|
||||
@@ -500,6 +549,7 @@ def _main():
|
||||
dns_timeout=args.dns_timeout,
|
||||
debug=args.debug,
|
||||
verbose=args.verbose,
|
||||
prettify_json=args.prettify_json,
|
||||
save_aggregate=False,
|
||||
save_forensic=False,
|
||||
save_smtp_tls=False,
|
||||
@@ -510,6 +560,7 @@ def _main():
|
||||
mailbox_test=False,
|
||||
mailbox_batch_size=10,
|
||||
mailbox_check_timeout=30,
|
||||
mailbox_since=None,
|
||||
imap_host=None,
|
||||
imap_skip_certificate_verification=False,
|
||||
imap_ssl=True,
|
||||
@@ -526,6 +577,7 @@ def _main():
|
||||
graph_tenant_id=None,
|
||||
graph_mailbox=None,
|
||||
graph_allow_unencrypted_storage=False,
|
||||
graph_url="https://graph.microsoft.com",
|
||||
hec=None,
|
||||
hec_token=None,
|
||||
hec_index=None,
|
||||
@@ -541,7 +593,7 @@ def _main():
|
||||
elasticsearch_monthly_indexes=False,
|
||||
elasticsearch_username=None,
|
||||
elasticsearch_password=None,
|
||||
elasticsearch_apiKey=None,
|
||||
elasticsearch_api_key=None,
|
||||
opensearch_hosts=None,
|
||||
opensearch_timeout=60,
|
||||
opensearch_number_of_shards=1,
|
||||
@@ -553,7 +605,7 @@ def _main():
|
||||
opensearch_monthly_indexes=False,
|
||||
opensearch_username=None,
|
||||
opensearch_password=None,
|
||||
opensearch_apiKey=None,
|
||||
opensearch_api_key=None,
|
||||
kafka_hosts=None,
|
||||
kafka_username=None,
|
||||
kafka_password=None,
|
||||
@@ -609,6 +661,7 @@ def _main():
|
||||
webhook_forensic_url=None,
|
||||
webhook_smtp_tls_url=None,
|
||||
webhook_timeout=60,
|
||||
normalize_timespan_threshold_hours=24.0,
|
||||
)
|
||||
args = arg_parser.parse_args()
|
||||
|
||||
@@ -619,9 +672,19 @@ def _main():
|
||||
exit(-1)
|
||||
opts.silent = True
|
||||
config = ConfigParser()
|
||||
index_prefix_domain_map = None
|
||||
config.read(args.config_file)
|
||||
if "general" in config.sections():
|
||||
general_config = config["general"]
|
||||
if "silent" in general_config:
|
||||
opts.silent = general_config.getboolean("silent")
|
||||
if "normalize_timespan_threshold_hours" in general_config:
|
||||
opts.normalize_timespan_threshold_hours = general_config.getfloat(
|
||||
"normalize_timespan_threshold_hours"
|
||||
)
|
||||
if "index_prefix_domain_map" in general_config:
|
||||
with open(general_config["index_prefix_domain_map"]) as f:
|
||||
index_prefix_domain_map = yaml.safe_load(f)
|
||||
if "offline" in general_config:
|
||||
opts.offline = general_config.getboolean("offline")
|
||||
if "strip_attachment_payloads" in general_config:
|
||||
@@ -666,11 +729,11 @@ def _main():
|
||||
)
|
||||
exit(-1)
|
||||
if "save_aggregate" in general_config:
|
||||
opts.save_aggregate = general_config["save_aggregate"]
|
||||
opts.save_aggregate = general_config.getboolean("save_aggregate")
|
||||
if "save_forensic" in general_config:
|
||||
opts.save_forensic = general_config["save_forensic"]
|
||||
opts.save_forensic = general_config.getboolean("save_forensic")
|
||||
if "save_smtp_tls" in general_config:
|
||||
opts.save_smtp_tls = general_config["save_smtp_tls"]
|
||||
opts.save_smtp_tls = general_config.getboolean("save_smtp_tls")
|
||||
if "debug" in general_config:
|
||||
opts.debug = general_config.getboolean("debug")
|
||||
if "verbose" in general_config:
|
||||
@@ -695,6 +758,8 @@ def _main():
|
||||
opts.reverse_dns_map_path = general_config["reverse_dns_path"]
|
||||
if "reverse_dns_map_url" in general_config:
|
||||
opts.reverse_dns_map_url = general_config["reverse_dns_url"]
|
||||
if "prettify_json" in general_config:
|
||||
opts.prettify_json = general_config.getboolean("prettify_json")
|
||||
|
||||
if "mailbox" in config.sections():
|
||||
mailbox_config = config["mailbox"]
|
||||
@@ -714,6 +779,8 @@ def _main():
|
||||
opts.mailbox_batch_size = mailbox_config.getint("batch_size")
|
||||
if "check_timeout" in mailbox_config:
|
||||
opts.mailbox_check_timeout = mailbox_config.getint("check_timeout")
|
||||
if "since" in mailbox_config:
|
||||
opts.mailbox_since = mailbox_config["since"]
|
||||
|
||||
if "imap" in config.sections():
|
||||
imap_config = config["imap"]
|
||||
@@ -726,7 +793,7 @@ def _main():
|
||||
if "host" in imap_config:
|
||||
opts.imap_host = imap_config["host"]
|
||||
else:
|
||||
logger.error("host setting missing from the " "imap config section")
|
||||
logger.error("host setting missing from the imap config section")
|
||||
exit(-1)
|
||||
if "port" in imap_config:
|
||||
opts.imap_port = imap_config.getint("port")
|
||||
@@ -737,19 +804,18 @@ def _main():
|
||||
if "ssl" in imap_config:
|
||||
opts.imap_ssl = imap_config.getboolean("ssl")
|
||||
if "skip_certificate_verification" in imap_config:
|
||||
imap_verify = imap_config.getboolean("skip_certificate_verification")
|
||||
opts.imap_skip_certificate_verification = imap_verify
|
||||
opts.imap_skip_certificate_verification = imap_config.getboolean(
|
||||
"skip_certificate_verification"
|
||||
)
|
||||
if "user" in imap_config:
|
||||
opts.imap_user = imap_config["user"]
|
||||
else:
|
||||
logger.critical("user setting missing from the " "imap config section")
|
||||
logger.critical("user setting missing from the imap config section")
|
||||
exit(-1)
|
||||
if "password" in imap_config:
|
||||
opts.imap_password = imap_config["password"]
|
||||
else:
|
||||
logger.critical(
|
||||
"password setting missing from the " "imap config section"
|
||||
)
|
||||
logger.critical("password setting missing from the imap config section")
|
||||
exit(-1)
|
||||
if "reports_folder" in imap_config:
|
||||
opts.mailbox_reports_folder = imap_config["reports_folder"]
|
||||
@@ -818,21 +884,20 @@ def _main():
|
||||
opts.graph_user = graph_config["user"]
|
||||
else:
|
||||
logger.critical(
|
||||
"user setting missing from the " "msgraph config section"
|
||||
"user setting missing from the msgraph config section"
|
||||
)
|
||||
exit(-1)
|
||||
if "password" in graph_config:
|
||||
opts.graph_password = graph_config["password"]
|
||||
else:
|
||||
logger.critical(
|
||||
"password setting missing from the " "msgraph config section"
|
||||
"password setting missing from the msgraph config section"
|
||||
)
|
||||
if "client_secret" in graph_config:
|
||||
opts.graph_client_secret = graph_config["client_secret"]
|
||||
else:
|
||||
logger.critical(
|
||||
"client_secret setting missing from the "
|
||||
"msgraph config section"
|
||||
"client_secret setting missing from the msgraph config section"
|
||||
)
|
||||
exit(-1)
|
||||
|
||||
@@ -845,7 +910,7 @@ def _main():
|
||||
opts.graph_tenant_id = graph_config["tenant_id"]
|
||||
else:
|
||||
logger.critical(
|
||||
"tenant_id setting missing from the " "msgraph config section"
|
||||
"tenant_id setting missing from the msgraph config section"
|
||||
)
|
||||
exit(-1)
|
||||
|
||||
@@ -854,8 +919,7 @@ def _main():
|
||||
opts.graph_client_secret = graph_config["client_secret"]
|
||||
else:
|
||||
logger.critical(
|
||||
"client_secret setting missing from the "
|
||||
"msgraph config section"
|
||||
"client_secret setting missing from the msgraph config section"
|
||||
)
|
||||
exit(-1)
|
||||
|
||||
@@ -863,7 +927,7 @@ def _main():
|
||||
opts.graph_client_id = graph_config["client_id"]
|
||||
else:
|
||||
logger.critical(
|
||||
"client_id setting missing from the " "msgraph config section"
|
||||
"client_id setting missing from the msgraph config section"
|
||||
)
|
||||
exit(-1)
|
||||
|
||||
@@ -871,10 +935,13 @@ def _main():
|
||||
opts.graph_mailbox = graph_config["mailbox"]
|
||||
elif opts.graph_auth_method != AuthMethod.UsernamePassword.name:
|
||||
logger.critical(
|
||||
"mailbox setting missing from the " "msgraph config section"
|
||||
"mailbox setting missing from the msgraph config section"
|
||||
)
|
||||
exit(-1)
|
||||
|
||||
if "graph_url" in graph_config:
|
||||
opts.graph_url = graph_config["graph_url"]
|
||||
|
||||
if "allow_unencrypted_storage" in graph_config:
|
||||
opts.graph_allow_unencrypted_storage = graph_config.getboolean(
|
||||
"allow_unencrypted_storage"
|
||||
@@ -886,7 +953,7 @@ def _main():
|
||||
opts.elasticsearch_hosts = _str_to_list(elasticsearch_config["hosts"])
|
||||
else:
|
||||
logger.critical(
|
||||
"hosts setting missing from the " "elasticsearch config section"
|
||||
"hosts setting missing from the elasticsearch config section"
|
||||
)
|
||||
exit(-1)
|
||||
if "timeout" in elasticsearch_config:
|
||||
@@ -915,8 +982,12 @@ def _main():
|
||||
opts.elasticsearch_username = elasticsearch_config["user"]
|
||||
if "password" in elasticsearch_config:
|
||||
opts.elasticsearch_password = elasticsearch_config["password"]
|
||||
# Until 8.20
|
||||
if "apiKey" in elasticsearch_config:
|
||||
opts.elasticsearch_apiKey = elasticsearch_config["apiKey"]
|
||||
# Since 8.20
|
||||
if "api_key" in elasticsearch_config:
|
||||
opts.elasticsearch_apiKey = elasticsearch_config["api_key"]
|
||||
|
||||
if "opensearch" in config:
|
||||
opensearch_config = config["opensearch"]
|
||||
@@ -924,7 +995,7 @@ def _main():
|
||||
opts.opensearch_hosts = _str_to_list(opensearch_config["hosts"])
|
||||
else:
|
||||
logger.critical(
|
||||
"hosts setting missing from the " "opensearch config section"
|
||||
"hosts setting missing from the opensearch config section"
|
||||
)
|
||||
exit(-1)
|
||||
if "timeout" in opensearch_config:
|
||||
@@ -951,8 +1022,12 @@ def _main():
|
||||
opts.opensearch_username = opensearch_config["user"]
|
||||
if "password" in opensearch_config:
|
||||
opts.opensearch_password = opensearch_config["password"]
|
||||
# Until 8.20
|
||||
if "apiKey" in opensearch_config:
|
||||
opts.opensearch_apiKey = opensearch_config["apiKey"]
|
||||
# Since 8.20
|
||||
if "api_key" in opensearch_config:
|
||||
opts.opensearch_apiKey = opensearch_config["api_key"]
|
||||
|
||||
if "splunk_hec" in config.sections():
|
||||
hec_config = config["splunk_hec"]
|
||||
@@ -960,21 +1035,21 @@ def _main():
|
||||
opts.hec = hec_config["url"]
|
||||
else:
|
||||
logger.critical(
|
||||
"url setting missing from the " "splunk_hec config section"
|
||||
"url setting missing from the splunk_hec config section"
|
||||
)
|
||||
exit(-1)
|
||||
if "token" in hec_config:
|
||||
opts.hec_token = hec_config["token"]
|
||||
else:
|
||||
logger.critical(
|
||||
"token setting missing from the " "splunk_hec config section"
|
||||
"token setting missing from the splunk_hec config section"
|
||||
)
|
||||
exit(-1)
|
||||
if "index" in hec_config:
|
||||
opts.hec_index = hec_config["index"]
|
||||
else:
|
||||
logger.critical(
|
||||
"index setting missing from the " "splunk_hec config section"
|
||||
"index setting missing from the splunk_hec config section"
|
||||
)
|
||||
exit(-1)
|
||||
if "skip_certificate_verification" in hec_config:
|
||||
@@ -987,9 +1062,7 @@ def _main():
|
||||
if "hosts" in kafka_config:
|
||||
opts.kafka_hosts = _str_to_list(kafka_config["hosts"])
|
||||
else:
|
||||
logger.critical(
|
||||
"hosts setting missing from the " "kafka config section"
|
||||
)
|
||||
logger.critical("hosts setting missing from the kafka config section")
|
||||
exit(-1)
|
||||
if "user" in kafka_config:
|
||||
opts.kafka_username = kafka_config["user"]
|
||||
@@ -1004,21 +1077,20 @@ def _main():
|
||||
opts.kafka_aggregate_topic = kafka_config["aggregate_topic"]
|
||||
else:
|
||||
logger.critical(
|
||||
"aggregate_topic setting missing from the " "kafka config section"
|
||||
"aggregate_topic setting missing from the kafka config section"
|
||||
)
|
||||
exit(-1)
|
||||
if "forensic_topic" in kafka_config:
|
||||
opts.kafka_forensic_topic = kafka_config["forensic_topic"]
|
||||
else:
|
||||
logger.critical(
|
||||
"forensic_topic setting missing from the " "kafka config section"
|
||||
"forensic_topic setting missing from the kafka config section"
|
||||
)
|
||||
if "smtp_tls_topic" in kafka_config:
|
||||
opts.kafka_smtp_tls_topic = kafka_config["smtp_tls_topic"]
|
||||
else:
|
||||
logger.critical(
|
||||
"forensic_topic setting missing from the "
|
||||
"splunk_hec config section"
|
||||
"forensic_topic setting missing from the splunk_hec config section"
|
||||
)
|
||||
|
||||
if "smtp" in config.sections():
|
||||
@@ -1026,7 +1098,7 @@ def _main():
|
||||
if "host" in smtp_config:
|
||||
opts.smtp_host = smtp_config["host"]
|
||||
else:
|
||||
logger.critical("host setting missing from the " "smtp config section")
|
||||
logger.critical("host setting missing from the smtp config section")
|
||||
exit(-1)
|
||||
if "port" in smtp_config:
|
||||
opts.smtp_port = smtp_config.getint("port")
|
||||
@@ -1038,23 +1110,21 @@ def _main():
|
||||
if "user" in smtp_config:
|
||||
opts.smtp_user = smtp_config["user"]
|
||||
else:
|
||||
logger.critical("user setting missing from the " "smtp config section")
|
||||
logger.critical("user setting missing from the smtp config section")
|
||||
exit(-1)
|
||||
if "password" in smtp_config:
|
||||
opts.smtp_password = smtp_config["password"]
|
||||
else:
|
||||
logger.critical(
|
||||
"password setting missing from the " "smtp config section"
|
||||
)
|
||||
logger.critical("password setting missing from the smtp config section")
|
||||
exit(-1)
|
||||
if "from" in smtp_config:
|
||||
opts.smtp_from = smtp_config["from"]
|
||||
else:
|
||||
logger.critical("from setting missing from the " "smtp config section")
|
||||
logger.critical("from setting missing from the smtp config section")
|
||||
if "to" in smtp_config:
|
||||
opts.smtp_to = _str_to_list(smtp_config["to"])
|
||||
else:
|
||||
logger.critical("to setting missing from the " "smtp config section")
|
||||
logger.critical("to setting missing from the smtp config section")
|
||||
if "subject" in smtp_config:
|
||||
opts.smtp_subject = smtp_config["subject"]
|
||||
if "attachment" in smtp_config:
|
||||
@@ -1067,7 +1137,7 @@ def _main():
|
||||
if "bucket" in s3_config:
|
||||
opts.s3_bucket = s3_config["bucket"]
|
||||
else:
|
||||
logger.critical("bucket setting missing from the " "s3 config section")
|
||||
logger.critical("bucket setting missing from the s3 config section")
|
||||
exit(-1)
|
||||
if "path" in s3_config:
|
||||
opts.s3_path = s3_config["path"]
|
||||
@@ -1092,9 +1162,7 @@ def _main():
|
||||
if "server" in syslog_config:
|
||||
opts.syslog_server = syslog_config["server"]
|
||||
else:
|
||||
logger.critical(
|
||||
"server setting missing from the " "syslog config section"
|
||||
)
|
||||
logger.critical("server setting missing from the syslog config section")
|
||||
exit(-1)
|
||||
if "port" in syslog_config:
|
||||
opts.syslog_port = syslog_config["port"]
|
||||
@@ -1116,7 +1184,9 @@ def _main():
|
||||
)
|
||||
opts.gmail_api_scopes = _str_to_list(opts.gmail_api_scopes)
|
||||
if "oauth2_port" in gmail_api_config:
|
||||
opts.gmail_api_oauth2_port = gmail_api_config.get("oauth2_port", 8080)
|
||||
opts.gmail_api_oauth2_port = gmail_api_config.getint(
|
||||
"oauth2_port", 8080
|
||||
)
|
||||
|
||||
if "maildir" in config.sections():
|
||||
maildir_api_config = config["maildir"]
|
||||
@@ -1145,17 +1215,17 @@ def _main():
|
||||
if "host" in gelf_config:
|
||||
opts.gelf_host = gelf_config["host"]
|
||||
else:
|
||||
logger.critical("host setting missing from the " "gelf config section")
|
||||
logger.critical("host setting missing from the gelf config section")
|
||||
exit(-1)
|
||||
if "port" in gelf_config:
|
||||
opts.gelf_port = gelf_config["port"]
|
||||
else:
|
||||
logger.critical("port setting missing from the " "gelf config section")
|
||||
logger.critical("port setting missing from the gelf config section")
|
||||
exit(-1)
|
||||
if "mode" in gelf_config:
|
||||
opts.gelf_mode = gelf_config["mode"]
|
||||
else:
|
||||
logger.critical("mode setting missing from the " "gelf config section")
|
||||
logger.critical("mode setting missing from the gelf config section")
|
||||
exit(-1)
|
||||
|
||||
if "webhook" in config.sections():
|
||||
@@ -1167,7 +1237,7 @@ def _main():
|
||||
if "smtp_tls_url" in webhook_config:
|
||||
opts.webhook_smtp_tls_url = webhook_config["smtp_tls_url"]
|
||||
if "timeout" in webhook_config:
|
||||
opts.webhook_timeout = webhook_config["timeout"]
|
||||
opts.webhook_timeout = webhook_config.getint("timeout")
|
||||
|
||||
logger.setLevel(logging.ERROR)
|
||||
|
||||
@@ -1181,8 +1251,7 @@ def _main():
|
||||
try:
|
||||
fh = logging.FileHandler(opts.log_file, "a")
|
||||
formatter = logging.Formatter(
|
||||
"%(asctime)s - "
|
||||
"%(levelname)s - [%(filename)s:%(lineno)d] - %(message)s"
|
||||
"%(asctime)s - %(levelname)s - [%(filename)s:%(lineno)d] - %(message)s"
|
||||
)
|
||||
fh.setFormatter(formatter)
|
||||
logger.addHandler(fh)
|
||||
@@ -1219,11 +1288,11 @@ def _main():
|
||||
es_smtp_tls_index = "{0}{1}".format(prefix, es_smtp_tls_index)
|
||||
elastic.set_hosts(
|
||||
opts.elasticsearch_hosts,
|
||||
opts.elasticsearch_ssl,
|
||||
opts.elasticsearch_ssl_cert_path,
|
||||
opts.elasticsearch_username,
|
||||
opts.elasticsearch_password,
|
||||
opts.elasticsearch_apiKey,
|
||||
use_ssl=opts.elasticsearch_ssl,
|
||||
ssl_cert_path=opts.elasticsearch_ssl_cert_path,
|
||||
username=opts.elasticsearch_username,
|
||||
password=opts.elasticsearch_password,
|
||||
api_key=opts.elasticsearch_api_key,
|
||||
timeout=opts.elasticsearch_timeout,
|
||||
)
|
||||
elastic.migrate_indexes(
|
||||
@@ -1251,11 +1320,11 @@ def _main():
|
||||
os_smtp_tls_index = "{0}{1}".format(prefix, os_smtp_tls_index)
|
||||
opensearch.set_hosts(
|
||||
opts.opensearch_hosts,
|
||||
opts.opensearch_ssl,
|
||||
opts.opensearch_ssl_cert_path,
|
||||
opts.opensearch_username,
|
||||
opts.opensearch_password,
|
||||
opts.opensearch_apiKey,
|
||||
use_ssl=opts.opensearch_ssl,
|
||||
ssl_cert_path=opts.opensearch_ssl_cert_path,
|
||||
username=opts.opensearch_username,
|
||||
password=opts.opensearch_password,
|
||||
api_key=opts.opensearch_api_key,
|
||||
timeout=opts.opensearch_timeout,
|
||||
)
|
||||
opensearch.migrate_indexes(
|
||||
@@ -1290,7 +1359,7 @@ def _main():
|
||||
|
||||
if opts.hec:
|
||||
if opts.hec_token is None or opts.hec_index is None:
|
||||
logger.error("HEC token and HEC index are required when " "using HEC URL")
|
||||
logger.error("HEC token and HEC index are required when using HEC URL")
|
||||
exit(1)
|
||||
|
||||
verify = True
|
||||
@@ -1393,6 +1462,7 @@ def _main():
|
||||
opts.always_use_local_files,
|
||||
opts.reverse_dns_map_path,
|
||||
opts.reverse_dns_map_url,
|
||||
opts.normalize_timespan_threshold_hours,
|
||||
child_conn,
|
||||
),
|
||||
)
|
||||
@@ -1415,7 +1485,17 @@ def _main():
|
||||
logger.error("Failed to parse {0} - {1}".format(result[1], result[0]))
|
||||
else:
|
||||
if result[0]["report_type"] == "aggregate":
|
||||
aggregate_reports.append(result[0]["report"])
|
||||
report_org = result[0]["report"]["report_metadata"]["org_name"]
|
||||
report_id = result[0]["report"]["report_metadata"]["report_id"]
|
||||
report_key = f"{report_org}_{report_id}"
|
||||
if report_key not in SEEN_AGGREGATE_REPORT_IDS:
|
||||
SEEN_AGGREGATE_REPORT_IDS[report_key] = True
|
||||
aggregate_reports.append(result[0]["report"])
|
||||
else:
|
||||
logger.debug(
|
||||
"Skipping duplicate aggregate report "
|
||||
f"from {report_org} with ID: {report_id}"
|
||||
)
|
||||
elif result[0]["report_type"] == "forensic":
|
||||
forensic_reports.append(result[0]["report"])
|
||||
elif result[0]["report_type"] == "smtp_tls":
|
||||
@@ -1433,6 +1513,7 @@ def _main():
|
||||
reverse_dns_map_path=opts.reverse_dns_map_path,
|
||||
reverse_dns_map_url=opts.reverse_dns_map_url,
|
||||
offline=opts.offline,
|
||||
normalize_timespan_threshold_hours=opts.normalize_timespan_threshold_hours,
|
||||
)
|
||||
aggregate_reports += reports["aggregate_reports"]
|
||||
forensic_reports += reports["forensic_reports"]
|
||||
@@ -1443,7 +1524,7 @@ def _main():
|
||||
try:
|
||||
if opts.imap_user is None or opts.imap_password is None:
|
||||
logger.error(
|
||||
"IMAP user and password must be specified if" "host is specified"
|
||||
"IMAP user and password must be specified ifhost is specified"
|
||||
)
|
||||
|
||||
ssl = True
|
||||
@@ -1451,7 +1532,7 @@ def _main():
|
||||
if opts.imap_skip_certificate_verification:
|
||||
logger.debug("Skipping IMAP certificate verification")
|
||||
verify = False
|
||||
if opts.imap_ssl is False:
|
||||
if not opts.imap_ssl:
|
||||
ssl = False
|
||||
|
||||
mailbox_connection = IMAPConnection(
|
||||
@@ -1482,6 +1563,7 @@ def _main():
|
||||
password=opts.graph_password,
|
||||
token_file=opts.graph_token_file,
|
||||
allow_unencrypted_storage=opts.graph_allow_unencrypted_storage,
|
||||
graph_url=opts.graph_url,
|
||||
)
|
||||
|
||||
except Exception:
|
||||
@@ -1540,6 +1622,8 @@ def _main():
|
||||
nameservers=opts.nameservers,
|
||||
test=opts.mailbox_test,
|
||||
strip_attachment_payloads=opts.strip_attachment_payloads,
|
||||
since=opts.mailbox_since,
|
||||
normalize_timespan_threshold_hours=opts.normalize_timespan_threshold_hours,
|
||||
)
|
||||
|
||||
aggregate_reports += reports["aggregate_reports"]
|
||||
@@ -1575,6 +1659,7 @@ def _main():
|
||||
username=opts.smtp_user,
|
||||
password=opts.smtp_password,
|
||||
subject=opts.smtp_subject,
|
||||
require_encryption=opts.smtp_ssl,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Failed to email results")
|
||||
@@ -1601,6 +1686,7 @@ def _main():
|
||||
reverse_dns_map_path=opts.reverse_dns_map_path,
|
||||
reverse_dns_map_url=opts.reverse_dns_map_url,
|
||||
offline=opts.offline,
|
||||
normalize_timespan_threshold_hours=opts.normalize_timespan_threshold_hours,
|
||||
)
|
||||
except FileExistsError as error:
|
||||
logger.error("{0}".format(error.__str__()))
|
||||
|
||||
2
parsedmarc/constants.py
Normal file
2
parsedmarc/constants.py
Normal file
@@ -0,0 +1,2 @@
|
||||
__version__ = "9.0.2"
|
||||
USER_AGENT = f"parsedmarc/{__version__}"
|
||||
@@ -1,5 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional, Union, Any
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
from elasticsearch_dsl.search import Q
|
||||
@@ -67,6 +71,8 @@ class _AggregateReportDoc(Document):
|
||||
date_range = Date()
|
||||
date_begin = Date()
|
||||
date_end = Date()
|
||||
normalized_timespan = Boolean()
|
||||
original_timespan_seconds = Integer
|
||||
errors = Text()
|
||||
published_policy = Object(_PublishedPolicy)
|
||||
source_ip_address = Ip()
|
||||
@@ -87,15 +93,15 @@ class _AggregateReportDoc(Document):
|
||||
dkim_results = Nested(_DKIMResult)
|
||||
spf_results = Nested(_SPFResult)
|
||||
|
||||
def add_policy_override(self, type_, comment):
|
||||
def add_policy_override(self, type_: str, comment: str):
|
||||
self.policy_overrides.append(_PolicyOverride(type=type_, comment=comment))
|
||||
|
||||
def add_dkim_result(self, domain, selector, result):
|
||||
def add_dkim_result(self, domain: str, selector: str, result: _DKIMResult):
|
||||
self.dkim_results.append(
|
||||
_DKIMResult(domain=domain, selector=selector, result=result)
|
||||
)
|
||||
|
||||
def add_spf_result(self, domain, scope, result):
|
||||
def add_spf_result(self, domain: str, scope: str, result: _SPFResult):
|
||||
self.spf_results.append(_SPFResult(domain=domain, scope=scope, result=result))
|
||||
|
||||
def save(self, **kwargs):
|
||||
@@ -131,21 +137,21 @@ class _ForensicSampleDoc(InnerDoc):
|
||||
body = Text()
|
||||
attachments = Nested(_EmailAttachmentDoc)
|
||||
|
||||
def add_to(self, display_name, address):
|
||||
def add_to(self, display_name: str, address: str):
|
||||
self.to.append(_EmailAddressDoc(display_name=display_name, address=address))
|
||||
|
||||
def add_reply_to(self, display_name, address):
|
||||
def add_reply_to(self, display_name: str, address: str):
|
||||
self.reply_to.append(
|
||||
_EmailAddressDoc(display_name=display_name, address=address)
|
||||
)
|
||||
|
||||
def add_cc(self, display_name, address):
|
||||
def add_cc(self, display_name: str, address: str):
|
||||
self.cc.append(_EmailAddressDoc(display_name=display_name, address=address))
|
||||
|
||||
def add_bcc(self, display_name, address):
|
||||
def add_bcc(self, display_name: str, address: str):
|
||||
self.bcc.append(_EmailAddressDoc(display_name=display_name, address=address))
|
||||
|
||||
def add_attachment(self, filename, content_type, sha256):
|
||||
def add_attachment(self, filename: str, content_type: str, sha256: str):
|
||||
self.attachments.append(
|
||||
_EmailAttachmentDoc(
|
||||
filename=filename, content_type=content_type, sha256=sha256
|
||||
@@ -197,15 +203,15 @@ class _SMTPTLSPolicyDoc(InnerDoc):
|
||||
|
||||
def add_failure_details(
|
||||
self,
|
||||
result_type,
|
||||
ip_address,
|
||||
receiving_ip,
|
||||
receiving_mx_helo,
|
||||
failed_session_count,
|
||||
sending_mta_ip=None,
|
||||
receiving_mx_hostname=None,
|
||||
additional_information_uri=None,
|
||||
failure_reason_code=None,
|
||||
result_type: str,
|
||||
ip_address: str,
|
||||
receiving_ip: str,
|
||||
receiving_mx_helo: str,
|
||||
failed_session_count: int,
|
||||
sending_mta_ip: Optional[str] = None,
|
||||
receiving_mx_hostname: Optional[str] = None,
|
||||
additional_information_uri: Optional[str] = None,
|
||||
failure_reason_code: Union[str, int, None] = None,
|
||||
):
|
||||
_details = _SMTPTLSFailureDetailsDoc(
|
||||
result_type=result_type,
|
||||
@@ -235,13 +241,14 @@ class _SMTPTLSReportDoc(Document):
|
||||
|
||||
def add_policy(
|
||||
self,
|
||||
policy_type,
|
||||
policy_domain,
|
||||
successful_session_count,
|
||||
failed_session_count,
|
||||
policy_string=None,
|
||||
mx_host_patterns=None,
|
||||
failure_details=None,
|
||||
policy_type: str,
|
||||
policy_domain: str,
|
||||
successful_session_count: int,
|
||||
failed_session_count: int,
|
||||
*,
|
||||
policy_string: Optional[str] = None,
|
||||
mx_host_patterns: Optional[list[str]] = None,
|
||||
failure_details: Optional[str] = None,
|
||||
):
|
||||
self.policies.append(
|
||||
policy_type=policy_type,
|
||||
@@ -259,24 +266,25 @@ class AlreadySaved(ValueError):
|
||||
|
||||
|
||||
def set_hosts(
|
||||
hosts,
|
||||
use_ssl=False,
|
||||
ssl_cert_path=None,
|
||||
username=None,
|
||||
password=None,
|
||||
apiKey=None,
|
||||
timeout=60.0,
|
||||
hosts: Union[str, list[str]],
|
||||
*,
|
||||
use_ssl: Optional[bool] = False,
|
||||
ssl_cert_path: Optional[str] = None,
|
||||
username: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
api_key: Optional[str] = None,
|
||||
timeout: Optional[float] = 60.0,
|
||||
):
|
||||
"""
|
||||
Sets the Elasticsearch hosts to use
|
||||
|
||||
Args:
|
||||
hosts (str): A single hostname or URL, or list of hostnames or URLs
|
||||
use_ssl (bool): Use a HTTPS connection to the server
|
||||
hosts (str | list[str]): A single hostname or URL, or list of hostnames or URLs
|
||||
use_ssl (bool): Use an HTTPS connection to the server
|
||||
ssl_cert_path (str): Path to the certificate chain
|
||||
username (str): The username to use for authentication
|
||||
password (str): The password to use for authentication
|
||||
apiKey (str): The Base64 encoded API key to use for authentication
|
||||
api_key (str): The Base64 encoded API key to use for authentication
|
||||
timeout (float): Timeout in seconds
|
||||
"""
|
||||
if not isinstance(hosts, list):
|
||||
@@ -291,12 +299,12 @@ def set_hosts(
|
||||
conn_params["verify_certs"] = False
|
||||
if username:
|
||||
conn_params["http_auth"] = username + ":" + password
|
||||
if apiKey:
|
||||
conn_params["api_key"] = apiKey
|
||||
if api_key:
|
||||
conn_params["api_key"] = api_key
|
||||
connections.create_connection(**conn_params)
|
||||
|
||||
|
||||
def create_indexes(names, settings=None):
|
||||
def create_indexes(names: list[str], settings: Optional[dict[str, Any]] = None):
|
||||
"""
|
||||
Create Elasticsearch indexes
|
||||
|
||||
@@ -319,7 +327,10 @@ def create_indexes(names, settings=None):
|
||||
raise ElasticsearchError("Elasticsearch error: {0}".format(e.__str__()))
|
||||
|
||||
|
||||
def migrate_indexes(aggregate_indexes=None, forensic_indexes=None):
|
||||
def migrate_indexes(
|
||||
aggregate_indexes: Optional[list[str]] = None,
|
||||
forensic_indexes: Optional[list[str]] = None,
|
||||
):
|
||||
"""
|
||||
Updates index mappings
|
||||
|
||||
@@ -366,12 +377,12 @@ def migrate_indexes(aggregate_indexes=None, forensic_indexes=None):
|
||||
|
||||
|
||||
def save_aggregate_report_to_elasticsearch(
|
||||
aggregate_report,
|
||||
index_suffix=None,
|
||||
index_prefix=None,
|
||||
monthly_indexes=False,
|
||||
number_of_shards=1,
|
||||
number_of_replicas=0,
|
||||
aggregate_report: OrderedDict[str, Any],
|
||||
index_suffix: Optional[str] = None,
|
||||
index_prefix: Optional[str] = None,
|
||||
monthly_indexes: Optional[bool] = False,
|
||||
number_of_shards: Optional[int] = 1,
|
||||
number_of_replicas: Optional[int] = 0,
|
||||
):
|
||||
"""
|
||||
Saves a parsed DMARC aggregate report to Elasticsearch
|
||||
@@ -395,15 +406,11 @@ def save_aggregate_report_to_elasticsearch(
|
||||
domain = aggregate_report["policy_published"]["domain"]
|
||||
begin_date = human_timestamp_to_datetime(metadata["begin_date"], to_utc=True)
|
||||
end_date = human_timestamp_to_datetime(metadata["end_date"], to_utc=True)
|
||||
begin_date_human = begin_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
||||
end_date_human = end_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
||||
|
||||
if monthly_indexes:
|
||||
index_date = begin_date.strftime("%Y-%m")
|
||||
else:
|
||||
index_date = begin_date.strftime("%Y-%m-%d")
|
||||
aggregate_report["begin_date"] = begin_date
|
||||
aggregate_report["end_date"] = end_date
|
||||
date_range = [aggregate_report["begin_date"], aggregate_report["end_date"]]
|
||||
|
||||
org_name_query = Q(dict(match_phrase=dict(org_name=org_name)))
|
||||
report_id_query = Q(dict(match_phrase=dict(report_id=report_id)))
|
||||
@@ -425,6 +432,9 @@ def save_aggregate_report_to_elasticsearch(
|
||||
try:
|
||||
existing = search.execute()
|
||||
except Exception as error_:
|
||||
begin_date_human = begin_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
||||
end_date_human = end_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
||||
|
||||
raise ElasticsearchError(
|
||||
"Elasticsearch's search for existing report \
|
||||
error: {}".format(error_.__str__())
|
||||
@@ -450,6 +460,17 @@ def save_aggregate_report_to_elasticsearch(
|
||||
)
|
||||
|
||||
for record in aggregate_report["records"]:
|
||||
begin_date = human_timestamp_to_datetime(record["interval_begin"], to_utc=True)
|
||||
end_date = human_timestamp_to_datetime(record["interval_end"], to_utc=True)
|
||||
normalized_timespan = record["normalized_timespan"]
|
||||
|
||||
if monthly_indexes:
|
||||
index_date = begin_date.strftime("%Y-%m")
|
||||
else:
|
||||
index_date = begin_date.strftime("%Y-%m-%d")
|
||||
aggregate_report["begin_date"] = begin_date
|
||||
aggregate_report["end_date"] = end_date
|
||||
date_range = [aggregate_report["begin_date"], aggregate_report["end_date"]]
|
||||
agg_doc = _AggregateReportDoc(
|
||||
xml_schema=aggregate_report["xml_schema"],
|
||||
org_name=metadata["org_name"],
|
||||
@@ -457,8 +478,9 @@ def save_aggregate_report_to_elasticsearch(
|
||||
org_extra_contact_info=metadata["org_extra_contact_info"],
|
||||
report_id=metadata["report_id"],
|
||||
date_range=date_range,
|
||||
date_begin=aggregate_report["begin_date"],
|
||||
date_end=aggregate_report["end_date"],
|
||||
date_begin=begin_date,
|
||||
date_end=end_date,
|
||||
normalized_timespan=normalized_timespan,
|
||||
errors=metadata["errors"],
|
||||
published_policy=published_policy,
|
||||
source_ip_address=record["source"]["ip_address"],
|
||||
@@ -517,12 +539,12 @@ def save_aggregate_report_to_elasticsearch(
|
||||
|
||||
|
||||
def save_forensic_report_to_elasticsearch(
|
||||
forensic_report,
|
||||
index_suffix=None,
|
||||
index_prefix=None,
|
||||
monthly_indexes=False,
|
||||
number_of_shards=1,
|
||||
number_of_replicas=0,
|
||||
forensic_report: OrderedDict[str, Any],
|
||||
index_suffix: Optional[any] = None,
|
||||
index_prefix: Optional[str] = None,
|
||||
monthly_indexes: Optional[bool] = False,
|
||||
number_of_shards: int = 1,
|
||||
number_of_replicas: int = 0,
|
||||
):
|
||||
"""
|
||||
Saves a parsed DMARC forensic report to Elasticsearch
|
||||
@@ -552,8 +574,8 @@ def save_forensic_report_to_elasticsearch(
|
||||
for original_header in original_headers:
|
||||
headers[original_header.lower()] = original_headers[original_header]
|
||||
|
||||
arrival_date_human = forensic_report["arrival_date_utc"]
|
||||
arrival_date = human_timestamp_to_datetime(arrival_date_human)
|
||||
arrival_date = human_timestamp_to_datetime(forensic_report["arrival_date_utc"])
|
||||
arrival_date_epoch_milliseconds = int(arrival_date.timestamp() * 1000)
|
||||
|
||||
if index_suffix is not None:
|
||||
search_index = "dmarc_forensic_{0}*".format(index_suffix)
|
||||
@@ -562,20 +584,35 @@ def save_forensic_report_to_elasticsearch(
|
||||
if index_prefix is not None:
|
||||
search_index = "{0}{1}".format(index_prefix, search_index)
|
||||
search = Search(index=search_index)
|
||||
arrival_query = {"match": {"arrival_date": arrival_date}}
|
||||
q = Q(arrival_query)
|
||||
q = Q(dict(match=dict(arrival_date=arrival_date_epoch_milliseconds)))
|
||||
|
||||
from_ = None
|
||||
to_ = None
|
||||
subject = None
|
||||
if "from" in headers:
|
||||
from_ = headers["from"]
|
||||
from_query = {"match_phrase": {"sample.headers.from": from_}}
|
||||
q = q & Q(from_query)
|
||||
# We convert the FROM header from a string list to a flat string.
|
||||
headers["from"] = headers["from"][0]
|
||||
if headers["from"][0] == "":
|
||||
headers["from"] = headers["from"][1]
|
||||
else:
|
||||
headers["from"] = " <".join(headers["from"]) + ">"
|
||||
|
||||
from_ = dict()
|
||||
from_["sample.headers.from"] = headers["from"]
|
||||
from_query = Q(dict(match_phrase=from_))
|
||||
q = q & from_query
|
||||
if "to" in headers:
|
||||
to_ = headers["to"]
|
||||
to_query = {"match_phrase": {"sample.headers.to": to_}}
|
||||
q = q & Q(to_query)
|
||||
# We convert the TO header from a string list to a flat string.
|
||||
headers["to"] = headers["to"][0]
|
||||
if headers["to"][0] == "":
|
||||
headers["to"] = headers["to"][1]
|
||||
else:
|
||||
headers["to"] = " <".join(headers["to"]) + ">"
|
||||
|
||||
to_ = dict()
|
||||
to_["sample.headers.to"] = headers["to"]
|
||||
to_query = Q(dict(match_phrase=to_))
|
||||
q = q & to_query
|
||||
if "subject" in headers:
|
||||
subject = headers["subject"]
|
||||
subject_query = {"match_phrase": {"sample.headers.subject": subject}}
|
||||
@@ -589,7 +626,9 @@ def save_forensic_report_to_elasticsearch(
|
||||
"A forensic sample to {0} from {1} "
|
||||
"with a subject of {2} and arrival date of {3} "
|
||||
"already exists in "
|
||||
"Elasticsearch".format(to_, from_, subject, arrival_date_human)
|
||||
"Elasticsearch".format(
|
||||
to_, from_, subject, forensic_report["arrival_date_utc"]
|
||||
)
|
||||
)
|
||||
|
||||
parsed_sample = forensic_report["parsed_sample"]
|
||||
@@ -625,7 +664,7 @@ def save_forensic_report_to_elasticsearch(
|
||||
user_agent=forensic_report["user_agent"],
|
||||
version=forensic_report["version"],
|
||||
original_mail_from=forensic_report["original_mail_from"],
|
||||
arrival_date=arrival_date,
|
||||
arrival_date=arrival_date_epoch_milliseconds,
|
||||
domain=forensic_report["reported_domain"],
|
||||
original_envelope_id=forensic_report["original_envelope_id"],
|
||||
authentication_results=forensic_report["authentication_results"],
|
||||
@@ -667,12 +706,12 @@ def save_forensic_report_to_elasticsearch(
|
||||
|
||||
|
||||
def save_smtp_tls_report_to_elasticsearch(
|
||||
report,
|
||||
index_suffix=None,
|
||||
index_prefix=None,
|
||||
monthly_indexes=False,
|
||||
number_of_shards=1,
|
||||
number_of_replicas=0,
|
||||
report: OrderedDict[str, Any],
|
||||
index_suffix: str = None,
|
||||
index_prefix: str = None,
|
||||
monthly_indexes: Optional[bool] = False,
|
||||
number_of_shards: Optional[int] = 1,
|
||||
number_of_replicas: Optional[int] = 0,
|
||||
):
|
||||
"""
|
||||
Saves a parsed SMTP TLS report to Elasticsearch
|
||||
@@ -764,7 +803,7 @@ def save_smtp_tls_report_to_elasticsearch(
|
||||
policy_doc = _SMTPTLSPolicyDoc(
|
||||
policy_domain=policy["policy_domain"],
|
||||
policy_type=policy["policy_type"],
|
||||
succesful_session_count=policy["successful_session_count"],
|
||||
successful_session_count=policy["successful_session_count"],
|
||||
failed_session_count=policy["failed_session_count"],
|
||||
policy_string=policy_strings,
|
||||
mx_host_patterns=mx_host_patterns,
|
||||
|
||||
@@ -1,9 +1,14 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import logging
|
||||
import logging.handlers
|
||||
import json
|
||||
import threading
|
||||
from collections import OrderedDict
|
||||
|
||||
from parsedmarc import (
|
||||
parsed_aggregate_reports_to_csv_rows,
|
||||
@@ -48,7 +53,7 @@ class GelfClient(object):
|
||||
)
|
||||
self.logger.addHandler(self.handler)
|
||||
|
||||
def save_aggregate_report_to_gelf(self, aggregate_reports):
|
||||
def save_aggregate_report_to_gelf(self, aggregate_reports: OrderedDict[str, Any]):
|
||||
rows = parsed_aggregate_reports_to_csv_rows(aggregate_reports)
|
||||
for row in rows:
|
||||
log_context_data.parsedmarc = row
|
||||
@@ -56,12 +61,12 @@ class GelfClient(object):
|
||||
|
||||
log_context_data.parsedmarc = None
|
||||
|
||||
def save_forensic_report_to_gelf(self, forensic_reports):
|
||||
def save_forensic_report_to_gelf(self, forensic_reports: OrderedDict[str, Any]):
|
||||
rows = parsed_forensic_reports_to_csv_rows(forensic_reports)
|
||||
for row in rows:
|
||||
self.logger.info(json.dumps(row))
|
||||
|
||||
def save_smtp_tls_report_to_gelf(self, smtp_tls_reports):
|
||||
def save_smtp_tls_report_to_gelf(self, smtp_tls_reports: OrderedDict[str, Any]):
|
||||
rows = parsed_smtp_tls_reports_to_csv_rows(smtp_tls_reports)
|
||||
for row in rows:
|
||||
self.logger.info(json.dumps(row))
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Optional
|
||||
from ssl import SSLContext
|
||||
|
||||
import json
|
||||
from ssl import create_default_context
|
||||
|
||||
@@ -18,7 +23,13 @@ class KafkaError(RuntimeError):
|
||||
|
||||
class KafkaClient(object):
|
||||
def __init__(
|
||||
self, kafka_hosts, ssl=False, username=None, password=None, ssl_context=None
|
||||
self,
|
||||
kafka_hosts: list[str],
|
||||
*,
|
||||
ssl: Optional[bool] = False,
|
||||
username: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
ssl_context: Optional[SSLContext] = None,
|
||||
):
|
||||
"""
|
||||
Initializes the Kafka client
|
||||
@@ -28,7 +39,7 @@ class KafkaClient(object):
|
||||
ssl (bool): Use a SSL/TLS connection
|
||||
username (str): An optional username
|
||||
password (str): An optional password
|
||||
ssl_context: SSL context options
|
||||
ssl_context (SSLContext): SSL context options
|
||||
|
||||
Notes:
|
||||
``use_ssl=True`` is implied when a username or password are
|
||||
@@ -55,7 +66,7 @@ class KafkaClient(object):
|
||||
raise KafkaError("No Kafka brokers available")
|
||||
|
||||
@staticmethod
|
||||
def strip_metadata(report):
|
||||
def strip_metadata(report: OrderedDict[str, Any]):
|
||||
"""
|
||||
Duplicates org_name, org_email and report_id into JSON root
|
||||
and removes report_metadata key to bring it more inline
|
||||
@@ -69,7 +80,7 @@ class KafkaClient(object):
|
||||
return report
|
||||
|
||||
@staticmethod
|
||||
def generate_daterange(report):
|
||||
def generate_date_range(report: OrderedDict[str, Any]):
|
||||
"""
|
||||
Creates a date_range timestamp with format YYYY-MM-DD-T-HH:MM:SS
|
||||
based on begin and end dates for easier parsing in Kibana.
|
||||
@@ -86,7 +97,9 @@ class KafkaClient(object):
|
||||
logger.debug("date_range is {}".format(date_range))
|
||||
return date_range
|
||||
|
||||
def save_aggregate_reports_to_kafka(self, aggregate_reports, aggregate_topic):
|
||||
def save_aggregate_reports_to_kafka(
|
||||
self, aggregate_reports: list[OrderedDict][str, Any], aggregate_topic: str
|
||||
):
|
||||
"""
|
||||
Saves aggregate DMARC reports to Kafka
|
||||
|
||||
@@ -105,7 +118,7 @@ class KafkaClient(object):
|
||||
return
|
||||
|
||||
for report in aggregate_reports:
|
||||
report["date_range"] = self.generate_daterange(report)
|
||||
report["date_range"] = self.generate_date_range(report)
|
||||
report = self.strip_metadata(report)
|
||||
|
||||
for slice in report["records"]:
|
||||
@@ -129,7 +142,9 @@ class KafkaClient(object):
|
||||
except Exception as e:
|
||||
raise KafkaError("Kafka error: {0}".format(e.__str__()))
|
||||
|
||||
def save_forensic_reports_to_kafka(self, forensic_reports, forensic_topic):
|
||||
def save_forensic_reports_to_kafka(
|
||||
self, forensic_reports: OrderedDict[str, Any], forensic_topic: str
|
||||
):
|
||||
"""
|
||||
Saves forensic DMARC reports to Kafka, sends individual
|
||||
records (slices) since Kafka requires messages to be <= 1MB
|
||||
@@ -159,7 +174,9 @@ class KafkaClient(object):
|
||||
except Exception as e:
|
||||
raise KafkaError("Kafka error: {0}".format(e.__str__()))
|
||||
|
||||
def save_smtp_tls_reports_to_kafka(self, smtp_tls_reports, smtp_tls_topic):
|
||||
def save_smtp_tls_reports_to_kafka(
|
||||
self, smtp_tls_reports: list[OrderedDict[str, Any]], smtp_tls_topic: str
|
||||
):
|
||||
"""
|
||||
Saves SMTP TLS reports to Kafka, sends individual
|
||||
records (slices) since Kafka requires messages to be <= 1MB
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from collections import OrderedDict
|
||||
|
||||
from parsedmarc.log import logger
|
||||
from azure.core.exceptions import HttpResponseError
|
||||
from azure.identity import ClientSecretCredential
|
||||
@@ -102,7 +108,12 @@ class LogAnalyticsClient(object):
|
||||
"Invalid configuration. " + "One or more required settings are missing."
|
||||
)
|
||||
|
||||
def publish_json(self, results, logs_client: LogsIngestionClient, dcr_stream: str):
|
||||
def publish_json(
|
||||
self,
|
||||
results: OrderedDict[str, OrderedDict[str, Any]],
|
||||
logs_client: LogsIngestionClient,
|
||||
dcr_stream: str,
|
||||
):
|
||||
"""
|
||||
Background function to publish given
|
||||
DMARC report to specific Data Collection Rule.
|
||||
@@ -121,7 +132,11 @@ class LogAnalyticsClient(object):
|
||||
raise LogAnalyticsException("Upload failed: {error}".format(error=e))
|
||||
|
||||
def publish_results(
|
||||
self, results, save_aggregate: bool, save_forensic: bool, save_smtp_tls: bool
|
||||
self,
|
||||
results: OrderedDict[str, OrderedDict[str, Any]],
|
||||
save_aggregate: bool,
|
||||
save_forensic: bool,
|
||||
save_smtp_tls: bool,
|
||||
):
|
||||
"""
|
||||
Function to publish DMARC and/or SMTP TLS reports to Log Analytics
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from base64 import urlsafe_b64decode
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
@@ -63,24 +67,36 @@ class GmailConnection(MailboxConnection):
|
||||
).execute()
|
||||
except HttpError as e:
|
||||
if e.status_code == 409:
|
||||
logger.debug(
|
||||
f"Folder {folder_name} already exists, " f"skipping creation"
|
||||
)
|
||||
logger.debug(f"Folder {folder_name} already exists, skipping creation")
|
||||
else:
|
||||
raise e
|
||||
|
||||
def _fetch_all_message_ids(self, reports_label_id, page_token=None):
|
||||
results = (
|
||||
self.service.users()
|
||||
.messages()
|
||||
.list(
|
||||
userId="me",
|
||||
includeSpamTrash=self.include_spam_trash,
|
||||
labelIds=[reports_label_id],
|
||||
pageToken=page_token,
|
||||
def _fetch_all_message_ids(self, reports_label_id, page_token=None, since=None):
|
||||
if since:
|
||||
results = (
|
||||
self.service.users()
|
||||
.messages()
|
||||
.list(
|
||||
userId="me",
|
||||
includeSpamTrash=self.include_spam_trash,
|
||||
labelIds=[reports_label_id],
|
||||
pageToken=page_token,
|
||||
q=f"after:{since}",
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
else:
|
||||
results = (
|
||||
self.service.users()
|
||||
.messages()
|
||||
.list(
|
||||
userId="me",
|
||||
includeSpamTrash=self.include_spam_trash,
|
||||
labelIds=[reports_label_id],
|
||||
pageToken=page_token,
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
messages = results.get("messages", [])
|
||||
for message in messages:
|
||||
yield message["id"]
|
||||
@@ -92,7 +108,13 @@ class GmailConnection(MailboxConnection):
|
||||
|
||||
def fetch_messages(self, reports_folder: str, **kwargs) -> List[str]:
|
||||
reports_label_id = self._find_label_id_for_label(reports_folder)
|
||||
return [id for id in self._fetch_all_message_ids(reports_label_id)]
|
||||
since = kwargs.get("since")
|
||||
if since:
|
||||
return [
|
||||
id for id in self._fetch_all_message_ids(reports_label_id, since=since)
|
||||
]
|
||||
else:
|
||||
return [id for id in self._fetch_all_message_ids(reports_label_id)]
|
||||
|
||||
def fetch_message(self, message_id):
|
||||
msg = (
|
||||
@@ -134,3 +156,4 @@ class GmailConnection(MailboxConnection):
|
||||
for label in labels:
|
||||
if label_name == label["id"] or label_name == label["name"]:
|
||||
return label["id"]
|
||||
return ""
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import Enum
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
@@ -89,6 +93,7 @@ class MSGraphConnection(MailboxConnection):
|
||||
self,
|
||||
auth_method: str,
|
||||
mailbox: str,
|
||||
graph_url: str,
|
||||
client_id: str,
|
||||
client_secret: str,
|
||||
username: str,
|
||||
@@ -108,7 +113,10 @@ class MSGraphConnection(MailboxConnection):
|
||||
token_path=token_path,
|
||||
allow_unencrypted_storage=allow_unencrypted_storage,
|
||||
)
|
||||
client_params = {"credential": credential}
|
||||
client_params = {
|
||||
"credential": credential,
|
||||
"cloud": graph_url,
|
||||
}
|
||||
if not isinstance(credential, ClientSecretCredential):
|
||||
scopes = ["Mail.ReadWrite"]
|
||||
# Detect if mailbox is shared
|
||||
@@ -137,25 +145,30 @@ class MSGraphConnection(MailboxConnection):
|
||||
request_url = f"/users/{self.mailbox_name}/mailFolders{sub_url}"
|
||||
resp = self._client.post(request_url, json=request_body)
|
||||
if resp.status_code == 409:
|
||||
logger.debug(f"Folder {folder_name} already exists, " f"skipping creation")
|
||||
logger.debug(f"Folder {folder_name} already exists, skipping creation")
|
||||
elif resp.status_code == 201:
|
||||
logger.debug(f"Created folder {folder_name}")
|
||||
else:
|
||||
logger.warning(f"Unknown response " f"{resp.status_code} {resp.json()}")
|
||||
logger.warning(f"Unknown response {resp.status_code} {resp.json()}")
|
||||
|
||||
def fetch_messages(self, folder_name: str, **kwargs) -> List[str]:
|
||||
"""Returns a list of message UIDs in the specified folder"""
|
||||
folder_id = self._find_folder_id_from_folder_path(folder_name)
|
||||
url = f"/users/{self.mailbox_name}/mailFolders/" f"{folder_id}/messages"
|
||||
url = f"/users/{self.mailbox_name}/mailFolders/{folder_id}/messages"
|
||||
since = kwargs.get("since")
|
||||
if not since:
|
||||
since = None
|
||||
batch_size = kwargs.get("batch_size")
|
||||
if not batch_size:
|
||||
batch_size = 0
|
||||
emails = self._get_all_messages(url, batch_size)
|
||||
emails = self._get_all_messages(url, batch_size, since)
|
||||
return [email["id"] for email in emails]
|
||||
|
||||
def _get_all_messages(self, url, batch_size):
|
||||
def _get_all_messages(self, url, batch_size, since):
|
||||
messages: list
|
||||
params = {"$select": "id"}
|
||||
if since:
|
||||
params["$filter"] = f"receivedDateTime ge {since}"
|
||||
if batch_size and batch_size > 0:
|
||||
params["$top"] = batch_size
|
||||
else:
|
||||
@@ -166,7 +179,7 @@ class MSGraphConnection(MailboxConnection):
|
||||
messages = result.json()["value"]
|
||||
# Loop if next page is present and not obtained message limit.
|
||||
while "@odata.nextLink" in result.json() and (
|
||||
batch_size == 0 or batch_size - len(messages) > 0
|
||||
since is not None or (batch_size == 0 or batch_size - len(messages) > 0)
|
||||
):
|
||||
result = self._client.get(result.json()["@odata.nextLink"])
|
||||
if result.status_code != 200:
|
||||
@@ -180,17 +193,19 @@ class MSGraphConnection(MailboxConnection):
|
||||
resp = self._client.patch(url, json={"isRead": "true"})
|
||||
if resp.status_code != 200:
|
||||
raise RuntimeWarning(
|
||||
f"Failed to mark message read" f"{resp.status_code}: {resp.json()}"
|
||||
f"Failed to mark message read{resp.status_code}: {resp.json()}"
|
||||
)
|
||||
|
||||
def fetch_message(self, message_id: str):
|
||||
def fetch_message(self, message_id: str, **kwargs):
|
||||
url = f"/users/{self.mailbox_name}/messages/{message_id}/$value"
|
||||
result = self._client.get(url)
|
||||
if result.status_code != 200:
|
||||
raise RuntimeWarning(
|
||||
f"Failed to fetch message" f"{result.status_code}: {result.json()}"
|
||||
f"Failed to fetch message{result.status_code}: {result.json()}"
|
||||
)
|
||||
self.mark_message_read(message_id)
|
||||
mark_read = kwargs.get("mark_read")
|
||||
if mark_read:
|
||||
self.mark_message_read(message_id)
|
||||
return result.text
|
||||
|
||||
def delete_message(self, message_id: str):
|
||||
@@ -198,7 +213,7 @@ class MSGraphConnection(MailboxConnection):
|
||||
resp = self._client.delete(url)
|
||||
if resp.status_code != 204:
|
||||
raise RuntimeWarning(
|
||||
f"Failed to delete message " f"{resp.status_code}: {resp.json()}"
|
||||
f"Failed to delete message {resp.status_code}: {resp.json()}"
|
||||
)
|
||||
|
||||
def move_message(self, message_id: str, folder_name: str):
|
||||
@@ -208,7 +223,7 @@ class MSGraphConnection(MailboxConnection):
|
||||
resp = self._client.post(url, json=request_body)
|
||||
if resp.status_code != 201:
|
||||
raise RuntimeWarning(
|
||||
f"Failed to move message " f"{resp.status_code}: {resp.json()}"
|
||||
f"Failed to move message {resp.status_code}: {resp.json()}"
|
||||
)
|
||||
|
||||
def keepalive(self):
|
||||
@@ -243,7 +258,7 @@ class MSGraphConnection(MailboxConnection):
|
||||
filter = f"?$filter=displayName eq '{folder_name}'"
|
||||
folders_resp = self._client.get(url + filter)
|
||||
if folders_resp.status_code != 200:
|
||||
raise RuntimeWarning(f"Failed to list folders." f"{folders_resp.json()}")
|
||||
raise RuntimeWarning(f"Failed to list folders.{folders_resp.json()}")
|
||||
folders: list = folders_resp.json()["value"]
|
||||
matched_folders = [
|
||||
folder for folder in folders if folder["displayName"] == folder_name
|
||||
|
||||
@@ -1,3 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from time import sleep
|
||||
|
||||
from imapclient.exceptions import IMAPClientError
|
||||
@@ -11,14 +17,15 @@ from parsedmarc.mail.mailbox_connection import MailboxConnection
|
||||
class IMAPConnection(MailboxConnection):
|
||||
def __init__(
|
||||
self,
|
||||
host=None,
|
||||
user=None,
|
||||
password=None,
|
||||
port=None,
|
||||
ssl=True,
|
||||
verify=True,
|
||||
timeout=30,
|
||||
max_retries=4,
|
||||
host: Optional[str] = None,
|
||||
*,
|
||||
user: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
port: Optional[str] = None,
|
||||
ssl: Optional[bool] = True,
|
||||
verify: Optional[bool] = True,
|
||||
timeout: Optional[int] = 30,
|
||||
max_retries: Optional[int] = 4,
|
||||
):
|
||||
self._username = user
|
||||
self._password = password
|
||||
@@ -39,15 +46,19 @@ class IMAPConnection(MailboxConnection):
|
||||
|
||||
def fetch_messages(self, reports_folder: str, **kwargs):
|
||||
self._client.select_folder(reports_folder)
|
||||
return self._client.search()
|
||||
since = kwargs.get("since")
|
||||
if since:
|
||||
return self._client.search(["SINCE", since])
|
||||
else:
|
||||
return self._client.search()
|
||||
|
||||
def fetch_message(self, message_id):
|
||||
def fetch_message(self, message_id: int):
|
||||
return self._client.fetch_message(message_id, parse=False)
|
||||
|
||||
def delete_message(self, message_id: str):
|
||||
def delete_message(self, message_id: int):
|
||||
self._client.delete_messages([message_id])
|
||||
|
||||
def move_message(self, message_id: str, folder_name: str):
|
||||
def move_message(self, message_id: int, folder_name: str):
|
||||
self._client.move_messages([message_id], folder_name)
|
||||
|
||||
def keepalive(self):
|
||||
@@ -81,7 +92,5 @@ class IMAPConnection(MailboxConnection):
|
||||
logger.warning("IMAP connection timeout. Reconnecting...")
|
||||
sleep(check_timeout)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"IMAP connection error. {0}. " "Reconnecting...".format(e)
|
||||
)
|
||||
logger.warning("IMAP connection error. {0}. Reconnecting...".format(e))
|
||||
sleep(check_timeout)
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC
|
||||
from typing import List
|
||||
|
||||
|
||||
class MailboxConnection(ABC):
|
||||
@@ -10,7 +13,7 @@ class MailboxConnection(ABC):
|
||||
def create_folder(self, folder_name: str):
|
||||
raise NotImplementedError
|
||||
|
||||
def fetch_messages(self, reports_folder: str, **kwargs) -> List[str]:
|
||||
def fetch_messages(self, reports_folder: str, **kwargs) -> list[str]:
|
||||
raise NotImplementedError
|
||||
|
||||
def fetch_message(self, message_id) -> str:
|
||||
|
||||
@@ -1,3 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from time import sleep
|
||||
|
||||
from parsedmarc.log import logger
|
||||
@@ -9,8 +15,8 @@ import os
|
||||
class MaildirConnection(MailboxConnection):
|
||||
def __init__(
|
||||
self,
|
||||
maildir_path=None,
|
||||
maildir_create=False,
|
||||
maildir_path: Optional[bool] = None,
|
||||
maildir_create: Optional[bool] = False,
|
||||
):
|
||||
self._maildir_path = maildir_path
|
||||
self._maildir_create = maildir_create
|
||||
@@ -36,7 +42,7 @@ class MaildirConnection(MailboxConnection):
|
||||
def fetch_messages(self, reports_folder: str, **kwargs):
|
||||
return self._client.keys()
|
||||
|
||||
def fetch_message(self, message_id):
|
||||
def fetch_message(self, message_id: str):
|
||||
return self._client.get(message_id).as_string()
|
||||
|
||||
def delete_message(self, message_id: str):
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional, Union, Any
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
from opensearchpy import (
|
||||
@@ -67,6 +71,8 @@ class _AggregateReportDoc(Document):
|
||||
date_range = Date()
|
||||
date_begin = Date()
|
||||
date_end = Date()
|
||||
normalized_timespan = Boolean()
|
||||
original_timespan_seconds = Integer
|
||||
errors = Text()
|
||||
published_policy = Object(_PublishedPolicy)
|
||||
source_ip_address = Ip()
|
||||
@@ -87,15 +93,15 @@ class _AggregateReportDoc(Document):
|
||||
dkim_results = Nested(_DKIMResult)
|
||||
spf_results = Nested(_SPFResult)
|
||||
|
||||
def add_policy_override(self, type_, comment):
|
||||
def add_policy_override(self, type_: str, comment: str):
|
||||
self.policy_overrides.append(_PolicyOverride(type=type_, comment=comment))
|
||||
|
||||
def add_dkim_result(self, domain, selector, result):
|
||||
def add_dkim_result(self, domain: str, selector: str, result: _DKIMResult):
|
||||
self.dkim_results.append(
|
||||
_DKIMResult(domain=domain, selector=selector, result=result)
|
||||
)
|
||||
|
||||
def add_spf_result(self, domain, scope, result):
|
||||
def add_spf_result(self, domain: str, scope: str, result: _SPFResult):
|
||||
self.spf_results.append(_SPFResult(domain=domain, scope=scope, result=result))
|
||||
|
||||
def save(self, **kwargs):
|
||||
@@ -131,21 +137,21 @@ class _ForensicSampleDoc(InnerDoc):
|
||||
body = Text()
|
||||
attachments = Nested(_EmailAttachmentDoc)
|
||||
|
||||
def add_to(self, display_name, address):
|
||||
def add_to(self, display_name: str, address: str):
|
||||
self.to.append(_EmailAddressDoc(display_name=display_name, address=address))
|
||||
|
||||
def add_reply_to(self, display_name, address):
|
||||
def add_reply_to(self, display_name: str, address: str):
|
||||
self.reply_to.append(
|
||||
_EmailAddressDoc(display_name=display_name, address=address)
|
||||
)
|
||||
|
||||
def add_cc(self, display_name, address):
|
||||
def add_cc(self, display_name: str, address: str):
|
||||
self.cc.append(_EmailAddressDoc(display_name=display_name, address=address))
|
||||
|
||||
def add_bcc(self, display_name, address):
|
||||
def add_bcc(self, display_name: str, address: str):
|
||||
self.bcc.append(_EmailAddressDoc(display_name=display_name, address=address))
|
||||
|
||||
def add_attachment(self, filename, content_type, sha256):
|
||||
def add_attachment(self, filename: str, content_type: str, sha256: str):
|
||||
self.attachments.append(
|
||||
_EmailAttachmentDoc(
|
||||
filename=filename, content_type=content_type, sha256=sha256
|
||||
@@ -197,18 +203,20 @@ class _SMTPTLSPolicyDoc(InnerDoc):
|
||||
|
||||
def add_failure_details(
|
||||
self,
|
||||
result_type,
|
||||
ip_address,
|
||||
receiving_ip,
|
||||
receiving_mx_helo,
|
||||
failed_session_count,
|
||||
receiving_mx_hostname=None,
|
||||
additional_information_uri=None,
|
||||
failure_reason_code=None,
|
||||
result_type: str,
|
||||
ip_address: str,
|
||||
receiving_ip: str,
|
||||
receiving_mx_helo: str,
|
||||
failed_session_count: int,
|
||||
sending_mta_ip: Optional[str] = None,
|
||||
receiving_mx_hostname: Optional[str] = None,
|
||||
additional_information_uri: Optional[str] = None,
|
||||
failure_reason_code: Union[str, int, None] = None,
|
||||
):
|
||||
self.failure_details.append(
|
||||
_details = _SMTPTLSFailureDetailsDoc(
|
||||
result_type=result_type,
|
||||
ip_address=ip_address,
|
||||
sending_mta_ip=sending_mta_ip,
|
||||
receiving_mx_hostname=receiving_mx_hostname,
|
||||
receiving_mx_helo=receiving_mx_helo,
|
||||
receiving_ip=receiving_ip,
|
||||
@@ -216,9 +224,10 @@ class _SMTPTLSPolicyDoc(InnerDoc):
|
||||
additional_information=additional_information_uri,
|
||||
failure_reason_code=failure_reason_code,
|
||||
)
|
||||
self.failure_details.append(_details)
|
||||
|
||||
|
||||
class _SMTPTLSFailureReportDoc(Document):
|
||||
class _SMTPTLSReportDoc(Document):
|
||||
class Index:
|
||||
name = "smtp_tls"
|
||||
|
||||
@@ -232,13 +241,14 @@ class _SMTPTLSFailureReportDoc(Document):
|
||||
|
||||
def add_policy(
|
||||
self,
|
||||
policy_type,
|
||||
policy_domain,
|
||||
successful_session_count,
|
||||
failed_session_count,
|
||||
policy_string=None,
|
||||
mx_host_patterns=None,
|
||||
failure_details=None,
|
||||
policy_type: str,
|
||||
policy_domain: str,
|
||||
successful_session_count: int,
|
||||
failed_session_count: int,
|
||||
*,
|
||||
policy_string: Optional[str] = None,
|
||||
mx_host_patterns: Optional[list[str]] = None,
|
||||
failure_details: Optional[str] = None,
|
||||
):
|
||||
self.policies.append(
|
||||
policy_type=policy_type,
|
||||
@@ -256,24 +266,25 @@ class AlreadySaved(ValueError):
|
||||
|
||||
|
||||
def set_hosts(
|
||||
hosts,
|
||||
use_ssl=False,
|
||||
ssl_cert_path=None,
|
||||
username=None,
|
||||
password=None,
|
||||
apiKey=None,
|
||||
timeout=60.0,
|
||||
hosts: Union[str, list[str]],
|
||||
*,
|
||||
use_ssl: Optional[bool] = False,
|
||||
ssl_cert_path: Optional[str] = None,
|
||||
username: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
api_key: Optional[str] = None,
|
||||
timeout: Optional[float] = 60.0,
|
||||
):
|
||||
"""
|
||||
Sets the OpenSearch hosts to use
|
||||
|
||||
Args:
|
||||
hosts (str|list): A hostname or URL, or list of hostnames or URLs
|
||||
hosts (str|list[str]): A single hostname or URL, or list of hostnames or URLs
|
||||
use_ssl (bool): Use an HTTPS connection to the server
|
||||
ssl_cert_path (str): Path to the certificate chain
|
||||
username (str): The username to use for authentication
|
||||
password (str): The password to use for authentication
|
||||
apiKey (str): The Base64 encoded API key to use for authentication
|
||||
api_key (str): The Base64 encoded API key to use for authentication
|
||||
timeout (float): Timeout in seconds
|
||||
"""
|
||||
if not isinstance(hosts, list):
|
||||
@@ -288,12 +299,12 @@ def set_hosts(
|
||||
conn_params["verify_certs"] = False
|
||||
if username:
|
||||
conn_params["http_auth"] = username + ":" + password
|
||||
if apiKey:
|
||||
conn_params["api_key"] = apiKey
|
||||
if api_key:
|
||||
conn_params["api_key"] = api_key
|
||||
connections.create_connection(**conn_params)
|
||||
|
||||
|
||||
def create_indexes(names, settings=None):
|
||||
def create_indexes(names: list[str], settings: Optional[dict[str, Any]] = None):
|
||||
"""
|
||||
Create OpenSearch indexes
|
||||
|
||||
@@ -316,7 +327,10 @@ def create_indexes(names, settings=None):
|
||||
raise OpenSearchError("OpenSearch error: {0}".format(e.__str__()))
|
||||
|
||||
|
||||
def migrate_indexes(aggregate_indexes=None, forensic_indexes=None):
|
||||
def migrate_indexes(
|
||||
aggregate_indexes: Optional[list[str]] = None,
|
||||
forensic_indexes: Optional[list[str]] = None,
|
||||
):
|
||||
"""
|
||||
Updates index mappings
|
||||
|
||||
@@ -362,13 +376,13 @@ def migrate_indexes(aggregate_indexes=None, forensic_indexes=None):
|
||||
pass
|
||||
|
||||
|
||||
def save_aggregate_report_to_opensearch(
|
||||
aggregate_report,
|
||||
index_suffix=None,
|
||||
index_prefix=None,
|
||||
monthly_indexes=False,
|
||||
number_of_shards=1,
|
||||
number_of_replicas=0,
|
||||
def save_aggregate_report_to_elasticsearch(
|
||||
aggregate_report: OrderedDict[str, Any],
|
||||
index_suffix: Optional[str] = None,
|
||||
index_prefix: Optional[str] = None,
|
||||
monthly_indexes: Optional[bool] = False,
|
||||
number_of_shards: Optional[int] = 1,
|
||||
number_of_replicas: Optional[int] = 0,
|
||||
):
|
||||
"""
|
||||
Saves a parsed DMARC aggregate report to OpenSearch
|
||||
@@ -392,15 +406,11 @@ def save_aggregate_report_to_opensearch(
|
||||
domain = aggregate_report["policy_published"]["domain"]
|
||||
begin_date = human_timestamp_to_datetime(metadata["begin_date"], to_utc=True)
|
||||
end_date = human_timestamp_to_datetime(metadata["end_date"], to_utc=True)
|
||||
begin_date_human = begin_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
||||
end_date_human = end_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
||||
|
||||
if monthly_indexes:
|
||||
index_date = begin_date.strftime("%Y-%m")
|
||||
else:
|
||||
index_date = begin_date.strftime("%Y-%m-%d")
|
||||
aggregate_report["begin_date"] = begin_date
|
||||
aggregate_report["end_date"] = end_date
|
||||
date_range = [aggregate_report["begin_date"], aggregate_report["end_date"]]
|
||||
|
||||
org_name_query = Q(dict(match_phrase=dict(org_name=org_name)))
|
||||
report_id_query = Q(dict(match_phrase=dict(report_id=report_id)))
|
||||
@@ -422,6 +432,9 @@ def save_aggregate_report_to_opensearch(
|
||||
try:
|
||||
existing = search.execute()
|
||||
except Exception as error_:
|
||||
begin_date_human = begin_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
||||
end_date_human = end_date.strftime("%Y-%m-%d %H:%M:%SZ")
|
||||
|
||||
raise OpenSearchError(
|
||||
"OpenSearch's search for existing report \
|
||||
error: {}".format(error_.__str__())
|
||||
@@ -447,6 +460,17 @@ def save_aggregate_report_to_opensearch(
|
||||
)
|
||||
|
||||
for record in aggregate_report["records"]:
|
||||
begin_date = human_timestamp_to_datetime(record["interval_begin"], to_utc=True)
|
||||
end_date = human_timestamp_to_datetime(record["interval_end"], to_utc=True)
|
||||
normalized_timespan = record["normalized_timespan"]
|
||||
|
||||
if monthly_indexes:
|
||||
index_date = begin_date.strftime("%Y-%m")
|
||||
else:
|
||||
index_date = begin_date.strftime("%Y-%m-%d")
|
||||
aggregate_report["begin_date"] = begin_date
|
||||
aggregate_report["end_date"] = end_date
|
||||
date_range = [aggregate_report["begin_date"], aggregate_report["end_date"]]
|
||||
agg_doc = _AggregateReportDoc(
|
||||
xml_schema=aggregate_report["xml_schema"],
|
||||
org_name=metadata["org_name"],
|
||||
@@ -454,8 +478,9 @@ def save_aggregate_report_to_opensearch(
|
||||
org_extra_contact_info=metadata["org_extra_contact_info"],
|
||||
report_id=metadata["report_id"],
|
||||
date_range=date_range,
|
||||
date_begin=aggregate_report["begin_date"],
|
||||
date_end=aggregate_report["end_date"],
|
||||
date_begin=begin_date,
|
||||
date_end=end_date,
|
||||
normalized_timespan=normalized_timespan,
|
||||
errors=metadata["errors"],
|
||||
published_policy=published_policy,
|
||||
source_ip_address=record["source"]["ip_address"],
|
||||
@@ -499,6 +524,7 @@ def save_aggregate_report_to_opensearch(
|
||||
index = "{0}_{1}".format(index, index_suffix)
|
||||
if index_prefix:
|
||||
index = "{0}{1}".format(index_prefix, index)
|
||||
|
||||
index = "{0}-{1}".format(index, index_date)
|
||||
index_settings = dict(
|
||||
number_of_shards=number_of_shards, number_of_replicas=number_of_replicas
|
||||
@@ -512,13 +538,13 @@ def save_aggregate_report_to_opensearch(
|
||||
raise OpenSearchError("OpenSearch error: {0}".format(e.__str__()))
|
||||
|
||||
|
||||
def save_forensic_report_to_opensearch(
|
||||
forensic_report,
|
||||
index_suffix=None,
|
||||
index_prefix=None,
|
||||
monthly_indexes=False,
|
||||
number_of_shards=1,
|
||||
number_of_replicas=0,
|
||||
def save_forensic_report_to_elasticsearch(
|
||||
forensic_report: OrderedDict[str, Any],
|
||||
index_suffix: Optional[any] = None,
|
||||
index_prefix: Optional[str] = None,
|
||||
monthly_indexes: Optional[bool] = False,
|
||||
number_of_shards: int = 1,
|
||||
number_of_replicas: int = 0,
|
||||
):
|
||||
"""
|
||||
Saves a parsed DMARC forensic report to OpenSearch
|
||||
@@ -548,8 +574,8 @@ def save_forensic_report_to_opensearch(
|
||||
for original_header in original_headers:
|
||||
headers[original_header.lower()] = original_headers[original_header]
|
||||
|
||||
arrival_date_human = forensic_report["arrival_date_utc"]
|
||||
arrival_date = human_timestamp_to_datetime(arrival_date_human)
|
||||
arrival_date = human_timestamp_to_datetime(forensic_report["arrival_date_utc"])
|
||||
arrival_date_epoch_milliseconds = int(arrival_date.timestamp() * 1000)
|
||||
|
||||
if index_suffix is not None:
|
||||
search_index = "dmarc_forensic_{0}*".format(index_suffix)
|
||||
@@ -558,20 +584,35 @@ def save_forensic_report_to_opensearch(
|
||||
if index_prefix is not None:
|
||||
search_index = "{0}{1}".format(index_prefix, search_index)
|
||||
search = Search(index=search_index)
|
||||
arrival_query = {"match": {"arrival_date": arrival_date}}
|
||||
q = Q(arrival_query)
|
||||
q = Q(dict(match=dict(arrival_date=arrival_date_epoch_milliseconds)))
|
||||
|
||||
from_ = None
|
||||
to_ = None
|
||||
subject = None
|
||||
if "from" in headers:
|
||||
from_ = headers["from"]
|
||||
from_query = {"match_phrase": {"sample.headers.from": from_}}
|
||||
q = q & Q(from_query)
|
||||
# We convert the FROM header from a string list to a flat string.
|
||||
headers["from"] = headers["from"][0]
|
||||
if headers["from"][0] == "":
|
||||
headers["from"] = headers["from"][1]
|
||||
else:
|
||||
headers["from"] = " <".join(headers["from"]) + ">"
|
||||
|
||||
from_ = dict()
|
||||
from_["sample.headers.from"] = headers["from"]
|
||||
from_query = Q(dict(match_phrase=from_))
|
||||
q = q & from_query
|
||||
if "to" in headers:
|
||||
to_ = headers["to"]
|
||||
to_query = {"match_phrase": {"sample.headers.to": to_}}
|
||||
q = q & Q(to_query)
|
||||
# We convert the TO header from a string list to a flat string.
|
||||
headers["to"] = headers["to"][0]
|
||||
if headers["to"][0] == "":
|
||||
headers["to"] = headers["to"][1]
|
||||
else:
|
||||
headers["to"] = " <".join(headers["to"]) + ">"
|
||||
|
||||
to_ = dict()
|
||||
to_["sample.headers.to"] = headers["to"]
|
||||
to_query = Q(dict(match_phrase=to_))
|
||||
q = q & to_query
|
||||
if "subject" in headers:
|
||||
subject = headers["subject"]
|
||||
subject_query = {"match_phrase": {"sample.headers.subject": subject}}
|
||||
@@ -585,7 +626,9 @@ def save_forensic_report_to_opensearch(
|
||||
"A forensic sample to {0} from {1} "
|
||||
"with a subject of {2} and arrival date of {3} "
|
||||
"already exists in "
|
||||
"OpenSearch".format(to_, from_, subject, arrival_date_human)
|
||||
"OpenSearch".format(
|
||||
to_, from_, subject, forensic_report["arrival_date_utc"]
|
||||
)
|
||||
)
|
||||
|
||||
parsed_sample = forensic_report["parsed_sample"]
|
||||
@@ -621,7 +664,7 @@ def save_forensic_report_to_opensearch(
|
||||
user_agent=forensic_report["user_agent"],
|
||||
version=forensic_report["version"],
|
||||
original_mail_from=forensic_report["original_mail_from"],
|
||||
arrival_date=arrival_date,
|
||||
arrival_date=arrival_date_epoch_milliseconds,
|
||||
domain=forensic_report["reported_domain"],
|
||||
original_envelope_id=forensic_report["original_envelope_id"],
|
||||
authentication_results=forensic_report["authentication_results"],
|
||||
@@ -662,13 +705,13 @@ def save_forensic_report_to_opensearch(
|
||||
)
|
||||
|
||||
|
||||
def save_smtp_tls_report_to_opensearch(
|
||||
report,
|
||||
index_suffix=None,
|
||||
index_prefix=None,
|
||||
monthly_indexes=False,
|
||||
number_of_shards=1,
|
||||
number_of_replicas=0,
|
||||
def save_smtp_tls_report_to_elasticsearch(
|
||||
report: OrderedDict[str, Any],
|
||||
index_suffix: str = None,
|
||||
index_prefix: str = None,
|
||||
monthly_indexes: Optional[bool] = False,
|
||||
number_of_shards: Optional[int] = 1,
|
||||
number_of_replicas: Optional[int] = 0,
|
||||
):
|
||||
"""
|
||||
Saves a parsed SMTP TLS report to OpenSearch
|
||||
@@ -684,8 +727,8 @@ def save_smtp_tls_report_to_opensearch(
|
||||
Raises:
|
||||
AlreadySaved
|
||||
"""
|
||||
logger.info("Saving aggregate report to OpenSearch")
|
||||
org_name = report["org_name"]
|
||||
logger.info("Saving SMTP TLS report to OpenSearch")
|
||||
org_name = report["organization_name"]
|
||||
report_id = report["report_id"]
|
||||
begin_date = human_timestamp_to_datetime(report["begin_date"], to_utc=True)
|
||||
end_date = human_timestamp_to_datetime(report["end_date"], to_utc=True)
|
||||
@@ -741,11 +784,11 @@ def save_smtp_tls_report_to_opensearch(
|
||||
number_of_shards=number_of_shards, number_of_replicas=number_of_replicas
|
||||
)
|
||||
|
||||
smtp_tls_doc = _SMTPTLSFailureReportDoc(
|
||||
organization_name=report["organization_name"],
|
||||
date_range=[report["date_begin"], report["date_end"]],
|
||||
date_begin=report["date_begin"],
|
||||
date_end=report["date_end"],
|
||||
smtp_tls_doc = _SMTPTLSReportDoc(
|
||||
org_name=report["organization_name"],
|
||||
date_range=[report["begin_date"], report["end_date"]],
|
||||
date_begin=report["begin_date"],
|
||||
date_end=report["end_date"],
|
||||
contact_info=report["contact_info"],
|
||||
report_id=report["report_id"],
|
||||
)
|
||||
@@ -760,32 +803,48 @@ def save_smtp_tls_report_to_opensearch(
|
||||
policy_doc = _SMTPTLSPolicyDoc(
|
||||
policy_domain=policy["policy_domain"],
|
||||
policy_type=policy["policy_type"],
|
||||
successful_session_count=policy["successful_session_count"],
|
||||
failed_session_count=policy["failed_session_count"],
|
||||
policy_string=policy_strings,
|
||||
mx_host_patterns=mx_host_patterns,
|
||||
)
|
||||
if "failure_details" in policy:
|
||||
failure_details = policy["failure_details"]
|
||||
receiving_mx_hostname = None
|
||||
additional_information_uri = None
|
||||
failure_reason_code = None
|
||||
if "receiving_mx_hostname" in failure_details:
|
||||
receiving_mx_hostname = failure_details["receiving_mx_hostname"]
|
||||
if "additional_information_uri" in failure_details:
|
||||
additional_information_uri = failure_details[
|
||||
"additional_information_uri"
|
||||
]
|
||||
if "failure_reason_code" in failure_details:
|
||||
failure_reason_code = failure_details["failure_reason_code"]
|
||||
policy_doc.add_failure_details(
|
||||
result_type=failure_details["result_type"],
|
||||
ip_address=failure_details["ip_address"],
|
||||
receiving_ip=failure_details["receiving_ip"],
|
||||
receiving_mx_helo=failure_details["receiving_mx_helo"],
|
||||
failed_session_count=failure_details["failed_session_count"],
|
||||
receiving_mx_hostname=receiving_mx_hostname,
|
||||
additional_information_uri=additional_information_uri,
|
||||
failure_reason_code=failure_reason_code,
|
||||
)
|
||||
for failure_detail in policy["failure_details"]:
|
||||
receiving_mx_hostname = None
|
||||
additional_information_uri = None
|
||||
failure_reason_code = None
|
||||
ip_address = None
|
||||
receiving_ip = None
|
||||
receiving_mx_helo = None
|
||||
sending_mta_ip = None
|
||||
|
||||
if "receiving_mx_hostname" in failure_detail:
|
||||
receiving_mx_hostname = failure_detail["receiving_mx_hostname"]
|
||||
if "additional_information_uri" in failure_detail:
|
||||
additional_information_uri = failure_detail[
|
||||
"additional_information_uri"
|
||||
]
|
||||
if "failure_reason_code" in failure_detail:
|
||||
failure_reason_code = failure_detail["failure_reason_code"]
|
||||
if "ip_address" in failure_detail:
|
||||
ip_address = failure_detail["ip_address"]
|
||||
if "receiving_ip" in failure_detail:
|
||||
receiving_ip = failure_detail["receiving_ip"]
|
||||
if "receiving_mx_helo" in failure_detail:
|
||||
receiving_mx_helo = failure_detail["receiving_mx_helo"]
|
||||
if "sending_mta_ip" in failure_detail:
|
||||
sending_mta_ip = failure_detail["sending_mta_ip"]
|
||||
policy_doc.add_failure_details(
|
||||
result_type=failure_detail["result_type"],
|
||||
ip_address=ip_address,
|
||||
receiving_ip=receiving_ip,
|
||||
receiving_mx_helo=receiving_mx_helo,
|
||||
failed_session_count=failure_detail["failed_session_count"],
|
||||
sending_mta_ip=sending_mta_ip,
|
||||
receiving_mx_hostname=receiving_mx_hostname,
|
||||
additional_information_uri=additional_information_uri,
|
||||
failure_reason_code=failure_reason_code,
|
||||
)
|
||||
smtp_tls_doc.policies.append(policy_doc)
|
||||
|
||||
create_indexes([index], index_settings)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# About
|
||||
|
||||
`dbip-country-lite.mmdb` is provided by [dbip][dbip] under a
|
||||
[ Creative Commons Attribution 4.0 International License][cc].
|
||||
[Creative Commons Attribution 4.0 International License][cc].
|
||||
|
||||
[dbip]: https://db-ip.com/db/lite.php
|
||||
[dbip]: https://db-ip.com/db/download/ip-to-country-lite
|
||||
[cc]: http://creativecommons.org/licenses/by/4.0/
|
||||
|
||||
Binary file not shown.
@@ -3,6 +3,8 @@
|
||||
A mapping is meant to make it easier to identify who or what a sending source is. Please consider contributing
|
||||
additional mappings in a GitHub Pull Request.
|
||||
|
||||
Do not open these CSV files in Excel. It will replace Unicode characters with question marks. Use LibreOffice Calc instead.
|
||||
|
||||
## base_reverse_dns_map.csv
|
||||
|
||||
A CSV file with three fields: `base_reverse_dns`, `name`, and `type`.
|
||||
@@ -19,33 +21,72 @@ The `service_type` is based on the following rule precedence:
|
||||
3. All telecommunications providers that offer internet access are identified as `ISP`, even if they also offer other services, such as web hosting or email hosting.
|
||||
4. All web hosting providers are identified as `Web Hosting`, even if the service also offers email hosting.
|
||||
5. All email account providers are identified as `Email Provider`, no matter how or where they are hosted
|
||||
6. All legitimate platforms offering their Software as a Service SaaS) are identified as `SaaS`, regardless of industry. This helps simplify metrics.
|
||||
6. All legitimate platforms offering their Software as a Service (SaaS) are identified as `SaaS`, regardless of industry. This helps simplify metrics.
|
||||
7. All other senders that use their own domain as a Reverse DNS base domain should be identified based on their industry
|
||||
|
||||
- Agriculture
|
||||
- Automotive
|
||||
- Beauty
|
||||
- Conglomerate
|
||||
- Construction
|
||||
- Consulting
|
||||
- Defense
|
||||
- Education
|
||||
- Email Provider
|
||||
- Email Security
|
||||
- Education
|
||||
- Entertainment
|
||||
- Event Planning
|
||||
- Finance
|
||||
- Food
|
||||
- Government
|
||||
- Government Media
|
||||
- Healthcare
|
||||
- IaaS
|
||||
- Industrial
|
||||
- ISP
|
||||
- Legal
|
||||
- Logistics
|
||||
- Manufacturing
|
||||
- Marketing
|
||||
- MSP
|
||||
- MSSP
|
||||
- News
|
||||
- Nonprofit
|
||||
- PaaS
|
||||
- Photography
|
||||
- Physical Security
|
||||
- Print
|
||||
- Publishing
|
||||
- Real Estate
|
||||
- Retail
|
||||
- SaaS
|
||||
- Science
|
||||
- Search Engine
|
||||
- Social Media
|
||||
- Sports
|
||||
- Staffing
|
||||
- Technology
|
||||
- Travel
|
||||
- Web Host
|
||||
|
||||
The file currently contains over 600 mappings from a wide variety of email sending services, including large email
|
||||
providers, SaaS platforms, small web hosts, and healthcare companies. Ideally this mapping will continuously grow to
|
||||
include many other services and industries.
|
||||
The file currently contains over 1,400 mappings from a wide variety of email sending sources.
|
||||
|
||||
## known_unknown_base_reverse_dns.txt
|
||||
|
||||
A list of reverse DNS base domains that could not be identified as belonging to a particular organization, service, or industry.
|
||||
|
||||
## base_reverse_dns.csv
|
||||
|
||||
A CSV with the fields `source_name` and optionally `message_count`. This CSV can be generated by exporting the base DNS data from the Kibana or Splunk dashboards provided by parsedmarc. This file is not tracked by Git.
|
||||
|
||||
## unknown_base_reverse_dns.csv
|
||||
|
||||
A CSV file with the fields `source_name` and `message_count`. This file is not tracked by Git.
|
||||
|
||||
## find_bad_utf8.py
|
||||
|
||||
Locates invalid UTF-8 bytes in files and optionally tries to current them. Generated by GPT5. Helped me find where I had introduced invalid bytes in `base_reverse_dns_map.csv`.
|
||||
|
||||
## find_unknown_base_reverse_dns.py
|
||||
|
||||
This is a python script that reads the domains in `base_reverse_dns.csv` and writes the domains that are not in `base_reverse_dns_map.csv` or `known_unknown_base_reverse_dns.txt` to `unknown_base_reverse_dns.csv`. This is useful for identifying potential additional domains to contribute to `base_reverse_dns_map.csv` and `known_unknown_base_reverse_dns.txt`.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
44
parsedmarc/resources/maps/base_reverse_dns_types.txt
Normal file
44
parsedmarc/resources/maps/base_reverse_dns_types.txt
Normal file
@@ -0,0 +1,44 @@
|
||||
Agriculture
|
||||
Automotive
|
||||
Beauty
|
||||
Conglomerate
|
||||
Construction
|
||||
Consulting
|
||||
Defense
|
||||
Education
|
||||
Email Provider
|
||||
Email Security
|
||||
Entertainment
|
||||
Event Planning
|
||||
Finance
|
||||
Food
|
||||
Government
|
||||
Government Media
|
||||
Healthcare
|
||||
ISP
|
||||
IaaS
|
||||
Industrial
|
||||
Legal
|
||||
Logistics
|
||||
MSP
|
||||
MSSP
|
||||
Manufacturing
|
||||
Marketing
|
||||
News
|
||||
Nonprofit
|
||||
PaaS
|
||||
Photography
|
||||
Physical Security
|
||||
Print
|
||||
Publishing
|
||||
Real Estate
|
||||
Retail
|
||||
SaaS
|
||||
Science
|
||||
Search Engine
|
||||
Social Media
|
||||
Sports
|
||||
Staffing
|
||||
Technology
|
||||
Travel
|
||||
Web Host
|
||||
488
parsedmarc/resources/maps/find_bad_utf8.py
Executable file
488
parsedmarc/resources/maps/find_bad_utf8.py
Executable file
@@ -0,0 +1,488 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
|
||||
import argparse
|
||||
import codecs
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
from typing import List, Tuple
|
||||
|
||||
"""
|
||||
Locates and optionally corrects bad UTF-8 bytes in a file.
|
||||
Generated by GPT-5 Use at your own risk.
|
||||
"""
|
||||
|
||||
# -------------------------
|
||||
# UTF-8 scanning
|
||||
# -------------------------
|
||||
|
||||
|
||||
def scan_line_for_utf8_errors(
|
||||
line_bytes: bytes, line_no: int, base_offset: int, context: int
|
||||
):
|
||||
"""
|
||||
Scan one line of raw bytes for UTF-8 decoding errors.
|
||||
Returns a list of dicts describing each error.
|
||||
"""
|
||||
pos = 0
|
||||
results = []
|
||||
while pos < len(line_bytes):
|
||||
dec = codecs.getincrementaldecoder("utf-8")("strict")
|
||||
try:
|
||||
dec.decode(line_bytes[pos:], final=True)
|
||||
break
|
||||
except UnicodeDecodeError as e:
|
||||
rel_index = e.start
|
||||
abs_index_in_line = pos + rel_index
|
||||
abs_offset = base_offset + abs_index_in_line
|
||||
|
||||
start_ctx = max(0, abs_index_in_line - context)
|
||||
end_ctx = min(len(line_bytes), abs_index_in_line + 1 + context)
|
||||
ctx_bytes = line_bytes[start_ctx:end_ctx]
|
||||
bad_byte = line_bytes[abs_index_in_line : abs_index_in_line + 1]
|
||||
col = abs_index_in_line + 1 # 1-based byte column
|
||||
|
||||
results.append(
|
||||
{
|
||||
"line": line_no,
|
||||
"column": col,
|
||||
"abs_offset": abs_offset,
|
||||
"bad_byte_hex": bad_byte.hex(),
|
||||
"context_hex": ctx_bytes.hex(),
|
||||
"context_preview": ctx_bytes.decode("utf-8", errors="replace"),
|
||||
}
|
||||
)
|
||||
# Move past the offending byte and continue
|
||||
pos = abs_index_in_line + 1
|
||||
return results
|
||||
|
||||
|
||||
def scan_file_for_utf8_errors(path: str, context: int, limit: int):
|
||||
errors_found = 0
|
||||
limit_val = limit if limit != 0 else float("inf")
|
||||
|
||||
with open(path, "rb") as f:
|
||||
total_offset = 0
|
||||
line_no = 0
|
||||
while True:
|
||||
line = f.readline()
|
||||
if not line:
|
||||
break
|
||||
line_no += 1
|
||||
results = scan_line_for_utf8_errors(line, line_no, total_offset, context)
|
||||
for r in results:
|
||||
errors_found += 1
|
||||
print(
|
||||
f"[ERROR {errors_found}] Line {r['line']}, Column {r['column']}, "
|
||||
f"Absolute byte offset {r['abs_offset']}"
|
||||
)
|
||||
print(f" Bad byte: 0x{r['bad_byte_hex']}")
|
||||
print(f" Context (hex): {r['context_hex']}")
|
||||
print(f" Context (preview): {r['context_preview']}")
|
||||
print()
|
||||
if errors_found >= limit_val:
|
||||
print(f"Reached limit of {limit} errors. Stopping.")
|
||||
return errors_found
|
||||
total_offset += len(line)
|
||||
|
||||
if errors_found == 0:
|
||||
print("No invalid UTF-8 bytes found. 🎉")
|
||||
else:
|
||||
print(f"Found {errors_found} invalid UTF-8 byte(s).")
|
||||
return errors_found
|
||||
|
||||
|
||||
# -------------------------
|
||||
# Whole-file conversion
|
||||
# -------------------------
|
||||
|
||||
|
||||
def detect_encoding_text(path: str) -> Tuple[str, str]:
|
||||
"""
|
||||
Use charset-normalizer to detect file encoding.
|
||||
Return (encoding_name, decoded_text). Falls back to cp1252 if needed.
|
||||
"""
|
||||
try:
|
||||
from charset_normalizer import from_path
|
||||
except ImportError:
|
||||
print(
|
||||
"Please install charset-normalizer: pip install charset-normalizer",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(4)
|
||||
|
||||
matches = from_path(path)
|
||||
match = matches.best()
|
||||
if match is None or match.encoding is None:
|
||||
# Fallback heuristic for Western single-byte text
|
||||
with open(path, "rb") as fb:
|
||||
data = fb.read()
|
||||
try:
|
||||
return "cp1252", data.decode("cp1252", errors="strict")
|
||||
except UnicodeDecodeError:
|
||||
print("Unable to detect encoding reliably.", file=sys.stderr)
|
||||
sys.exit(5)
|
||||
|
||||
return match.encoding, str(match)
|
||||
|
||||
|
||||
def convert_to_utf8(src_path: str, out_path: str, src_encoding: str = None) -> str:
|
||||
"""
|
||||
Convert an entire file to UTF-8 (re-decoding everything).
|
||||
If src_encoding is provided, use it; else auto-detect.
|
||||
Returns the encoding actually used.
|
||||
"""
|
||||
if src_encoding:
|
||||
with open(src_path, "rb") as fb:
|
||||
data = fb.read()
|
||||
try:
|
||||
text = data.decode(src_encoding, errors="strict")
|
||||
except LookupError:
|
||||
print(f"Unknown encoding: {src_encoding}", file=sys.stderr)
|
||||
sys.exit(6)
|
||||
except UnicodeDecodeError as e:
|
||||
print(f"Decoding failed with {src_encoding}: {e}", file=sys.stderr)
|
||||
sys.exit(7)
|
||||
used = src_encoding
|
||||
else:
|
||||
used, text = detect_encoding_text(src_path)
|
||||
|
||||
with open(out_path, "w", encoding="utf-8", newline="") as fw:
|
||||
fw.write(text)
|
||||
return used
|
||||
|
||||
|
||||
def verify_utf8_file(path: str) -> Tuple[bool, str]:
|
||||
try:
|
||||
with open(path, "rb") as fb:
|
||||
fb.read().decode("utf-8", errors="strict")
|
||||
return True, ""
|
||||
except UnicodeDecodeError as e:
|
||||
return False, str(e)
|
||||
|
||||
|
||||
# -------------------------
|
||||
# Targeted single-byte fixer
|
||||
# -------------------------
|
||||
|
||||
|
||||
def iter_lines_with_offsets(b: bytes):
|
||||
"""
|
||||
Yield (line_bytes, line_start_abs_offset). Preserves LF/CRLF/CR in bytes.
|
||||
"""
|
||||
start = 0
|
||||
for i, byte in enumerate(b):
|
||||
if byte == 0x0A: # LF
|
||||
yield b[start : i + 1], start
|
||||
start = i + 1
|
||||
if start < len(b):
|
||||
yield b[start:], start
|
||||
|
||||
|
||||
def detect_probable_fallbacks() -> List[str]:
|
||||
# Good defaults for Western/Portuguese text
|
||||
return ["cp1252", "iso-8859-1", "iso-8859-15"]
|
||||
|
||||
|
||||
def repair_mixed_utf8_line(line: bytes, base_offset: int, fallback_chain: List[str]):
|
||||
"""
|
||||
Strictly validate UTF-8 and fix *only* the exact offending byte when an error occurs.
|
||||
This avoids touching adjacent valid UTF-8 (prevents mojibake like 'é').
|
||||
"""
|
||||
out_fragments: List[str] = []
|
||||
fixes = []
|
||||
pos = 0
|
||||
n = len(line)
|
||||
|
||||
while pos < n:
|
||||
dec = codecs.getincrementaldecoder("utf-8")("strict")
|
||||
try:
|
||||
s = dec.decode(line[pos:], final=True)
|
||||
out_fragments.append(s)
|
||||
break
|
||||
except UnicodeDecodeError as e:
|
||||
# Append the valid prefix before the error
|
||||
if e.start > 0:
|
||||
out_fragments.append(
|
||||
line[pos : pos + e.start].decode("utf-8", errors="strict")
|
||||
)
|
||||
|
||||
bad_index = pos + e.start # absolute index in 'line'
|
||||
bad_slice = line[bad_index : bad_index + 1] # FIX EXACTLY ONE BYTE
|
||||
|
||||
# Decode that single byte using the first working fallback
|
||||
decoded = None
|
||||
used_enc = None
|
||||
for enc in fallback_chain:
|
||||
try:
|
||||
decoded = bad_slice.decode(enc, errors="strict")
|
||||
used_enc = enc
|
||||
break
|
||||
except Exception:
|
||||
continue
|
||||
if decoded is None:
|
||||
# latin-1 always succeeds (byte->same code point)
|
||||
decoded = bad_slice.decode("latin-1")
|
||||
used_enc = "latin-1 (fallback)"
|
||||
|
||||
out_fragments.append(decoded)
|
||||
|
||||
# Log the fix
|
||||
col_1based = bad_index + 1 # byte-based column
|
||||
fixes.append(
|
||||
{
|
||||
"line_base_offset": base_offset,
|
||||
"line": None, # caller fills line number
|
||||
"column": col_1based,
|
||||
"abs_offset": base_offset + bad_index,
|
||||
"bad_bytes_hex": bad_slice.hex(),
|
||||
"used_encoding": used_enc,
|
||||
"replacement_preview": decoded,
|
||||
}
|
||||
)
|
||||
|
||||
# Advance exactly one byte past the offending byte and continue
|
||||
pos = bad_index + 1
|
||||
|
||||
return "".join(out_fragments), fixes
|
||||
|
||||
|
||||
def targeted_fix_to_utf8(
|
||||
src_path: str,
|
||||
out_path: str,
|
||||
fallback_chain: List[str],
|
||||
dry_run: bool,
|
||||
max_fixes: int,
|
||||
):
|
||||
with open(src_path, "rb") as fb:
|
||||
data = fb.read()
|
||||
|
||||
total_fixes = 0
|
||||
repaired_lines: List[str] = []
|
||||
line_no = 0
|
||||
max_val = max_fixes if max_fixes != 0 else float("inf")
|
||||
|
||||
for line_bytes, base_offset in iter_lines_with_offsets(data):
|
||||
line_no += 1
|
||||
# Fast path: keep lines that are already valid UTF-8
|
||||
try:
|
||||
repaired_lines.append(line_bytes.decode("utf-8", errors="strict"))
|
||||
continue
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
|
||||
fixed_text, fixes = repair_mixed_utf8_line(
|
||||
line_bytes, base_offset, fallback_chain=fallback_chain
|
||||
)
|
||||
for f in fixes:
|
||||
f["line"] = line_no
|
||||
|
||||
repaired_lines.append(fixed_text)
|
||||
|
||||
# Log fixes
|
||||
for f in fixes:
|
||||
total_fixes += 1
|
||||
print(
|
||||
f"[FIX {total_fixes}] Line {f['line']}, Column {f['column']}, Abs offset {f['abs_offset']}"
|
||||
)
|
||||
print(f" Bad bytes: 0x{f['bad_bytes_hex']}")
|
||||
print(f" Used encoding: {f['used_encoding']}")
|
||||
preview = f["replacement_preview"].replace("\r", "\\r").replace("\n", "\\n")
|
||||
if len(preview) > 40:
|
||||
preview = preview[:40] + "…"
|
||||
print(f" Replacement preview: {preview}")
|
||||
print()
|
||||
if total_fixes >= max_val:
|
||||
print(f"Reached max fixes limit ({max_fixes}). Stopping scan.")
|
||||
break
|
||||
if total_fixes >= max_val:
|
||||
break
|
||||
|
||||
if dry_run:
|
||||
print(f"Dry run complete. Detected {total_fixes} fix(es). No file written.")
|
||||
return total_fixes
|
||||
|
||||
# Join and verify result can be encoded to UTF-8
|
||||
repaired_text = "".join(repaired_lines)
|
||||
try:
|
||||
repaired_text.encode("utf-8", errors="strict")
|
||||
except UnicodeEncodeError as e:
|
||||
print(f"Internal error: repaired text not valid UTF-8: {e}", file=sys.stderr)
|
||||
sys.exit(3)
|
||||
|
||||
with open(out_path, "w", encoding="utf-8", newline="") as fw:
|
||||
fw.write(repaired_text)
|
||||
|
||||
print(f"Fixed file written to: {out_path}")
|
||||
print(f"Total fixes applied: {total_fixes}")
|
||||
return total_fixes
|
||||
|
||||
|
||||
# -------------------------
|
||||
# CLI
|
||||
# -------------------------
|
||||
|
||||
|
||||
def main():
|
||||
ap = argparse.ArgumentParser(
|
||||
description=(
|
||||
"Scan for invalid UTF-8; optionally convert whole file or fix only invalid bytes.\n\n"
|
||||
"By default, --convert and --fix **edit the input file in place** and create a backup "
|
||||
"named '<input>.bak' before writing. If you pass --output, the original file is left "
|
||||
"unchanged and no backup is created. Use --dry-run to preview fixes without writing."
|
||||
),
|
||||
formatter_class=argparse.RawTextHelpFormatter,
|
||||
)
|
||||
ap.add_argument("path", help="Path to the CSV/text file")
|
||||
ap.add_argument(
|
||||
"--context",
|
||||
type=int,
|
||||
default=20,
|
||||
help="Bytes of context to show around errors (default: 20)",
|
||||
)
|
||||
ap.add_argument(
|
||||
"--limit",
|
||||
type=int,
|
||||
default=100,
|
||||
help="Max errors to report during scan (0 = unlimited)",
|
||||
)
|
||||
ap.add_argument(
|
||||
"--skip-scan", action="store_true", help="Skip initial scan for speed"
|
||||
)
|
||||
|
||||
# Whole-file convert
|
||||
ap.add_argument(
|
||||
"--convert",
|
||||
action="store_true",
|
||||
help="Convert entire file to UTF-8 using auto/forced encoding "
|
||||
"(in-place by default; creates '<input>.bak').",
|
||||
)
|
||||
ap.add_argument(
|
||||
"--encoding",
|
||||
help="Force source encoding for --convert or first fallback for --fix",
|
||||
)
|
||||
ap.add_argument(
|
||||
"--output",
|
||||
help="Write to this path instead of in-place (no .bak is created in that case)",
|
||||
)
|
||||
|
||||
# Targeted fix
|
||||
ap.add_argument(
|
||||
"--fix",
|
||||
action="store_true",
|
||||
help="Fix only invalid byte(s) via fallback encodings "
|
||||
"(in-place by default; creates '<input>.bak').",
|
||||
)
|
||||
ap.add_argument(
|
||||
"--fallbacks",
|
||||
help="Comma-separated fallback encodings (default: cp1252,iso-8859-1,iso-8859-15)",
|
||||
)
|
||||
ap.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="(fix) Print fixes but do not write or create a .bak",
|
||||
)
|
||||
ap.add_argument(
|
||||
"--max-fixes",
|
||||
type=int,
|
||||
default=0,
|
||||
help="(fix) Stop after N fixes (0 = unlimited)",
|
||||
)
|
||||
|
||||
args = ap.parse_args()
|
||||
path = args.path
|
||||
|
||||
if not os.path.isfile(path):
|
||||
print(f"File not found: {path}", file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
# Optional scan first
|
||||
if not args.skip_scan:
|
||||
scan_file_for_utf8_errors(path, context=args.context, limit=args.limit)
|
||||
|
||||
# Mode selection guards
|
||||
if args.convert and args.fix:
|
||||
print("Choose either --convert or --fix (not both).", file=sys.stderr)
|
||||
sys.exit(9)
|
||||
if not args.convert and not args.fix and args.skip_scan:
|
||||
print("No action selected (use --convert or --fix).")
|
||||
return
|
||||
if not args.convert and not args.fix:
|
||||
# User only wanted a scan
|
||||
return
|
||||
|
||||
# Determine output path and backup behavior
|
||||
# In-place by default: create '<input>.bak' before overwriting.
|
||||
if args.output:
|
||||
out_path = args.output
|
||||
in_place = False
|
||||
else:
|
||||
out_path = path
|
||||
in_place = True
|
||||
|
||||
# CONVERT mode
|
||||
if args.convert:
|
||||
print("\n[CONVERT MODE] Converting file to UTF-8...")
|
||||
if in_place:
|
||||
# Create backup before overwriting original
|
||||
backup_path = path + ".bak"
|
||||
shutil.copy2(path, backup_path)
|
||||
print(f"Backup created: {backup_path}")
|
||||
used = convert_to_utf8(path, out_path, src_encoding=args.encoding)
|
||||
print(f"Source encoding used: {used}")
|
||||
print(f"Saved UTF-8 file as: {out_path}")
|
||||
ok, err = verify_utf8_file(out_path)
|
||||
if ok:
|
||||
print("Verification: output is valid UTF-8 ✅")
|
||||
else:
|
||||
print(f"Verification failed: {err}")
|
||||
sys.exit(8)
|
||||
return
|
||||
|
||||
# FIX mode (targeted, single-byte)
|
||||
if args.fix:
|
||||
print("\n[FIX MODE] Fixing only invalid bytes to UTF-8...")
|
||||
if args.dry_run:
|
||||
# Dry-run: never write or create backup
|
||||
out_path_effective = os.devnull
|
||||
in_place_effective = False
|
||||
else:
|
||||
out_path_effective = out_path
|
||||
in_place_effective = in_place
|
||||
|
||||
# Build fallback chain (if --encoding provided, try it first)
|
||||
if args.fallbacks:
|
||||
fallback_chain = [e.strip() for e in args.fallbacks.split(",") if e.strip()]
|
||||
else:
|
||||
fallback_chain = detect_probable_fallbacks()
|
||||
if args.encoding and args.encoding not in fallback_chain:
|
||||
fallback_chain = [args.encoding] + fallback_chain
|
||||
|
||||
if in_place_effective:
|
||||
# Create backup before overwriting original (only when actually writing)
|
||||
backup_path = path + ".bak"
|
||||
shutil.copy2(path, backup_path)
|
||||
print(f"Backup created: {backup_path}")
|
||||
|
||||
fix_count = targeted_fix_to_utf8(
|
||||
path,
|
||||
out_path_effective,
|
||||
fallback_chain=fallback_chain,
|
||||
dry_run=args.dry_run,
|
||||
max_fixes=args.max_fixes,
|
||||
)
|
||||
|
||||
if not args.dry_run:
|
||||
ok, err = verify_utf8_file(out_path_effective)
|
||||
if ok:
|
||||
print("Verification: output is valid UTF-8 ✅")
|
||||
print(f"Fix mode completed — {fix_count} byte(s) corrected.")
|
||||
else:
|
||||
print(f"Verification failed: {err}")
|
||||
sys.exit(8)
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
78
parsedmarc/resources/maps/find_unknown_base_reverse_dns.py
Executable file
78
parsedmarc/resources/maps/find_unknown_base_reverse_dns.py
Executable file
@@ -0,0 +1,78 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import csv
|
||||
|
||||
|
||||
def _main():
|
||||
input_csv_file_path = "base_reverse_dns.csv"
|
||||
base_reverse_dns_map_file_path = "base_reverse_dns_map.csv"
|
||||
known_unknown_list_file_path = "known_unknown_base_reverse_dns.txt"
|
||||
psl_overrides_file_path = "psl_overrides.txt"
|
||||
output_csv_file_path = "unknown_base_reverse_dns.csv"
|
||||
|
||||
csv_headers = ["source_name", "message_count"]
|
||||
|
||||
known_unknown_domains = []
|
||||
psl_overrides = []
|
||||
known_domains = []
|
||||
output_rows = []
|
||||
|
||||
def load_list(file_path, list_var):
|
||||
if not os.path.exists(file_path):
|
||||
print(f"Error: {file_path} does not exist")
|
||||
print(f"Loading {file_path}")
|
||||
with open(file_path) as f:
|
||||
for line in f.readlines():
|
||||
domain = line.lower().strip()
|
||||
if domain in list_var:
|
||||
print(f"Error: {domain} is in {file_path} multiple times")
|
||||
exit(1)
|
||||
elif domain != "":
|
||||
list_var.append(domain)
|
||||
|
||||
load_list(known_unknown_list_file_path, known_unknown_domains)
|
||||
load_list(psl_overrides_file_path, psl_overrides)
|
||||
if not os.path.exists(base_reverse_dns_map_file_path):
|
||||
print(f"Error: {base_reverse_dns_map_file_path} does not exist")
|
||||
print(f"Loading {base_reverse_dns_map_file_path}")
|
||||
with open(base_reverse_dns_map_file_path) as f:
|
||||
for row in csv.DictReader(f):
|
||||
domain = row["base_reverse_dns"].lower().strip()
|
||||
if domain in known_domains:
|
||||
print(
|
||||
f"Error: {domain} is in {base_reverse_dns_map_file_path} multiple times"
|
||||
)
|
||||
exit()
|
||||
else:
|
||||
known_domains.append(domain)
|
||||
if domain in known_unknown_domains and known_domains:
|
||||
print(
|
||||
f"Error:{domain} is in {known_unknown_list_file_path} and \
|
||||
{base_reverse_dns_map_file_path}"
|
||||
)
|
||||
exit(1)
|
||||
if not os.path.exists(input_csv_file_path):
|
||||
print(f"Error: {base_reverse_dns_map_file_path} does not exist")
|
||||
exit(1)
|
||||
with open(input_csv_file_path) as f:
|
||||
for row in csv.DictReader(f):
|
||||
domain = row["source_name"].lower().strip()
|
||||
if domain == "":
|
||||
continue
|
||||
for psl_domain in psl_overrides:
|
||||
if domain.endswith(psl_domain):
|
||||
domain = psl_domain.strip(".").strip("-")
|
||||
break
|
||||
if domain not in known_domains and domain not in known_unknown_domains:
|
||||
print(f"New unknown domain found: {domain}")
|
||||
output_rows.append(row)
|
||||
print(f"Writing {output_csv_file_path}")
|
||||
with open(output_csv_file_path, "w") as f:
|
||||
writer = csv.DictWriter(f, fieldnames=csv_headers)
|
||||
writer.writeheader()
|
||||
writer.writerows(output_rows)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
_main()
|
||||
601
parsedmarc/resources/maps/known_unknown_base_reverse_dns.txt
Normal file
601
parsedmarc/resources/maps/known_unknown_base_reverse_dns.txt
Normal file
@@ -0,0 +1,601 @@
|
||||
1jli.site
|
||||
26.107
|
||||
444qcuhilla.com
|
||||
4xr1.com
|
||||
9services.com
|
||||
a7e.ru
|
||||
a94434500-blog.com
|
||||
aams8.jp
|
||||
abv-10.top
|
||||
acemail.co.in
|
||||
activaicon.com
|
||||
adcritic.net
|
||||
adlucrumnewsletter.com
|
||||
admin.corpivensa.gob.ve
|
||||
advantageiq.com
|
||||
advrider.ro
|
||||
aerospacevitro.us.com
|
||||
agenturserver.de
|
||||
aghories.com
|
||||
ai270.net
|
||||
albagroup-eg.com
|
||||
alchemy.net
|
||||
alohabeachcamp.net
|
||||
alsiscad.com
|
||||
aluminumpipetubing.com
|
||||
americanstorageca.com
|
||||
amplusserver.info
|
||||
anchorfundhub.com
|
||||
anglishment.com
|
||||
anteldata.net.uy
|
||||
antis.edu
|
||||
antonaoll.com
|
||||
anviklass.org
|
||||
anwrgrp.lat
|
||||
aosau.net
|
||||
arandomserver.com
|
||||
aransk.ru
|
||||
ardcs.cn
|
||||
armninl.met
|
||||
as29550.net
|
||||
asahachimaru.com
|
||||
aserv.co.za
|
||||
asmecam.it
|
||||
ateky.net.br
|
||||
aurelienvos.com
|
||||
automatech.lat
|
||||
avistaadvantage.com
|
||||
b8sales.com
|
||||
bahjs.com
|
||||
baliaura.com
|
||||
banaras.co
|
||||
bearandbullmarketnews.com
|
||||
bestinvestingtime.com
|
||||
bhjui.com
|
||||
biocorp.com
|
||||
biosophy.net
|
||||
bitter-echo.com
|
||||
bizhostingservices.com
|
||||
blguss.com
|
||||
bluenet.ch
|
||||
bluhosting.com
|
||||
bnasg.com
|
||||
bodiax.pp.ua
|
||||
bost-law.com
|
||||
brainity.com
|
||||
brazalnde.net
|
||||
brellatransplc.shop
|
||||
brnonet.cz
|
||||
broadwaycover.com
|
||||
brushinglegal.de
|
||||
brw.net
|
||||
btes.tv
|
||||
budgeteasehub.com
|
||||
buoytoys.com
|
||||
buyjapanese.jp
|
||||
c53dw7m24rj.com
|
||||
cahtelrandom.org
|
||||
casadelmarsamara.com
|
||||
cashflowmasterypro.com
|
||||
cavabeen.com
|
||||
cbti.net
|
||||
centralmalaysia.com
|
||||
chauffeurplan.co.uk
|
||||
checkpox.fun
|
||||
chegouseuvlache.org
|
||||
chinaxingyu.xyz
|
||||
christus.mx
|
||||
churchills.market
|
||||
ci-xyz.fit
|
||||
cisumrecords.com
|
||||
ckaik.cn
|
||||
clcktoact.com
|
||||
cli-eurosignal.cz
|
||||
cloud-admin.it
|
||||
cloud-edm.com
|
||||
cloudflare-email.org
|
||||
cloudhosting.rs
|
||||
cloudlogin.co
|
||||
cloudplatformpro.com
|
||||
cnode.io
|
||||
cntcloud.com
|
||||
code-it.net
|
||||
codefriend.top
|
||||
colombiaceropapel.org
|
||||
commerceinsurance.com
|
||||
comsharempc.com
|
||||
conexiona.com
|
||||
coolblaze.com
|
||||
coowo.com
|
||||
corpemail.net
|
||||
cp2-myorderbox.com
|
||||
cps.com.ar
|
||||
crnagora.net
|
||||
cross-d-bar-troutranch.com
|
||||
ctla.co.kr
|
||||
cumbalikonakhotel.com
|
||||
currencyexconverter.com
|
||||
daakbabu.com
|
||||
daikinmae.com
|
||||
dairyvalley.com.my
|
||||
dastans.ru
|
||||
datahost36.de
|
||||
ddii.network
|
||||
deep-sek.shop
|
||||
deetownsounds.com
|
||||
descarca-counter-strike.net
|
||||
detrot.xyz
|
||||
dettlaffinc.com
|
||||
dextoolse.net
|
||||
digestivedaily.com
|
||||
digi.net.my
|
||||
dinofelis.cn
|
||||
diwkyncbi.top
|
||||
dkginternet.com
|
||||
dnexpress.info
|
||||
dns-oid.com
|
||||
dnsindia.net
|
||||
domainserver.ne.jp
|
||||
domconfig.com
|
||||
doorsrv.com
|
||||
dreampox.fun
|
||||
dreamtechmedia.com
|
||||
ds.network
|
||||
dss-group.net
|
||||
dvj.theworkpc.com
|
||||
dwlcka.com
|
||||
dynamic-wiretel.in
|
||||
dyntcorp.com
|
||||
easternkingspei.com
|
||||
economiceagles.com
|
||||
egosimail.com
|
||||
eliotporterphotos.us
|
||||
emailgids.net
|
||||
emailperegrine.com
|
||||
entendercopilot.com
|
||||
entretothom.net
|
||||
epaycontrol.com
|
||||
epicinvestmentsreview.co
|
||||
epicinvestmentsreview.com
|
||||
epik.com
|
||||
epsilon-group.com
|
||||
erestaff.com
|
||||
euro-trade-gmbh.com
|
||||
example.com
|
||||
exposervers.com-new
|
||||
extendcp.co.uk
|
||||
eyecandyhosting.xyz
|
||||
fastwebnet.it
|
||||
fd9ing7wfn.com
|
||||
feipnghardware.com
|
||||
fetscorp.shop
|
||||
fewo-usedom.net
|
||||
fin-crime.com
|
||||
financeaimpoint.com
|
||||
financeupward.com
|
||||
firmflat.com
|
||||
flex-video.bnr.la
|
||||
flourishfusionlife.com
|
||||
formicidaehunt.net
|
||||
fosterheap.com
|
||||
fredi.shop
|
||||
frontiernet.net
|
||||
ftifb7tk3c.com
|
||||
gamersprotectionvpn.online
|
||||
gendns.com
|
||||
getgreencardsfast.com
|
||||
getthatroi.com
|
||||
gibbshosting.com
|
||||
gigidea.net
|
||||
giize.com
|
||||
ginous.eu.com
|
||||
gis.net
|
||||
gist-th.com
|
||||
globalglennpartners.com
|
||||
goldsboroughplace.com
|
||||
gophermedia.com
|
||||
gqlists.us.com
|
||||
gratzl.de
|
||||
greatestworldnews.com
|
||||
greennutritioncare.com
|
||||
gsbb.com
|
||||
gumbolimbo.net
|
||||
h-serv.co.uk
|
||||
haedefpartners.com
|
||||
halcyon-aboveboard.com
|
||||
hanzubon.org
|
||||
healthfuljourneyjoy.com
|
||||
hgnbroken.us.com
|
||||
highwey-diesel.com
|
||||
hirofactory.com
|
||||
hjd.asso.fr
|
||||
hongchenggco.pro
|
||||
hongkongtaxi.co
|
||||
hopsinthehanger.com
|
||||
hosted-by-worldstream.net
|
||||
hostelsucre.com
|
||||
hosting1337.com
|
||||
hostinghane.com
|
||||
hostinglotus.cloud
|
||||
hostingmichigan.com
|
||||
hostiran.name
|
||||
hostmnl.com
|
||||
hostname.localhost
|
||||
hostnetwork.com
|
||||
hosts.net.nz
|
||||
hostserv.eu
|
||||
hostwhitelabel.com
|
||||
hpms1.jp
|
||||
hunariojmk.net
|
||||
hunriokinmuim.net
|
||||
hypericine.com
|
||||
i-mecca.net
|
||||
iaasdns.com
|
||||
iam.net.ma
|
||||
iconmarketingguy.com
|
||||
idcfcloud.net
|
||||
idealconcept.live
|
||||
igmohji.com
|
||||
igppevents.org.uk
|
||||
ihglobaldns.com
|
||||
ilmessicano.com
|
||||
imjtmn.cn
|
||||
immenzaces.com
|
||||
in-addr-arpa
|
||||
in-addr.arpa
|
||||
indsalelimited.com
|
||||
indulgent-holistic.com
|
||||
industechint.org
|
||||
inshaaegypt.com
|
||||
intal.uz
|
||||
interfarma.kz
|
||||
intocpanel.com
|
||||
ip-147-135-108.us
|
||||
ip-178-33-109.eu
|
||||
ip-ptr.tech
|
||||
iswhatpercent.com
|
||||
itsidc.com
|
||||
itwebs.com
|
||||
iuon.net
|
||||
ivol.co
|
||||
jalanet.co.id
|
||||
jimishare.com
|
||||
jlccptt.net.cn
|
||||
jlenterprises.co.uk
|
||||
jmontalto.com
|
||||
joyomokei.com
|
||||
jumanra.org
|
||||
justlongshirts.com
|
||||
kahlaa.com
|
||||
kaw.theworkpc.com
|
||||
kbronet.com.tw
|
||||
kdnursing.org
|
||||
kielnet.net
|
||||
kihy.theworkpc.com
|
||||
kingschurchwirral.org
|
||||
kitchenaildbd.com
|
||||
klaomi.shop
|
||||
knkconsult.net
|
||||
kohshikai.com
|
||||
krhfund.org
|
||||
krillaglass.com
|
||||
lancorhomes.com
|
||||
landpedia.org
|
||||
lanzatuseo.es
|
||||
layerdns.cloud
|
||||
learninglinked.com
|
||||
legenditds.com
|
||||
levertechcentre.com
|
||||
lhost.no
|
||||
lideri.net.br
|
||||
lighthouse-media.com
|
||||
lightpath.net
|
||||
limogesporcelainboxes.com
|
||||
lindsaywalt.net
|
||||
linuxsunucum.com
|
||||
listertermoformadoa.com
|
||||
llsend.com
|
||||
local.net
|
||||
lohkal.com
|
||||
londionrtim.net
|
||||
lonestarmm.net
|
||||
longmarquis.com
|
||||
longwoodmgmt.com
|
||||
lse.kz
|
||||
lunvoy.com
|
||||
luxarpro.ru
|
||||
lwl-puehringer.at
|
||||
lynx.net.lb
|
||||
lyse.net
|
||||
m-sender.com.ua
|
||||
maggiolicloud.it
|
||||
magnetmail.net
|
||||
magnumgo.uz
|
||||
maia11.com
|
||||
mail-fire.com
|
||||
mailsentinel.net
|
||||
mailset.cn
|
||||
malardino.net
|
||||
managed-vps.net
|
||||
manhattanbulletpoint.com
|
||||
manpowerservices.com
|
||||
marketmysterycode.com
|
||||
marketwizardspro.com
|
||||
masterclassjournal.com
|
||||
matroguel.cam
|
||||
maximpactipo.com
|
||||
mechanicalwalk.store
|
||||
mediavobis.com
|
||||
meqlobal.com
|
||||
mgts.by
|
||||
migrans.net
|
||||
miixta.com
|
||||
milleniumsrv.com
|
||||
mindworksunlimited.com
|
||||
mirth-gale.com
|
||||
misorpresa.com
|
||||
mitomobile.com
|
||||
mitsubachi-kibako.net
|
||||
mjinn.com
|
||||
mkegs.shop
|
||||
mobius.fr
|
||||
model-ac.ink
|
||||
moderntradingnews.com
|
||||
monnaiegroup.com
|
||||
monopolizeright.com
|
||||
moonjaws.com
|
||||
morningnewscatcher.com
|
||||
motion4ever.net
|
||||
mschosting.com
|
||||
msdp1.com
|
||||
mspnet.pro
|
||||
mts-nn.ru
|
||||
multifamilydesign.com
|
||||
mxserver.ro
|
||||
mxthunder.net
|
||||
my-ihor.ru
|
||||
mycloudmailbox.com
|
||||
myfriendforum.com
|
||||
myrewards.net
|
||||
mysagestore.com
|
||||
mysecurewebserver.com
|
||||
myshanet.net
|
||||
myvps.jp
|
||||
mywedsite.net
|
||||
mywic.eu
|
||||
name.tools
|
||||
nanshenqfurniture.com
|
||||
nask.pl
|
||||
navertise.net
|
||||
ncbb.kz
|
||||
ncport.ru
|
||||
ncsdi.ws
|
||||
nebdig.com
|
||||
neovet-base.ru
|
||||
netbri.com
|
||||
netcentertelecom.net.br
|
||||
neti.ee
|
||||
netkl.org
|
||||
newinvestingguide.com
|
||||
newwallstreetcode.com
|
||||
ngvcv.cn
|
||||
nic.name
|
||||
nidix.net
|
||||
nieuwedagnetwerk.net
|
||||
nlscanme.com
|
||||
nmeuh.cn
|
||||
noisndametal.com
|
||||
nucleusemail.com
|
||||
nutriboostlife.com
|
||||
nwo.giize.com
|
||||
nwwhalewatchers.org
|
||||
ny.adsl
|
||||
nyt1.com
|
||||
offerslatedeals.com
|
||||
office365.us
|
||||
ogicom.net
|
||||
olivettilexikon.co.uk
|
||||
omegabrasil.inf.br
|
||||
onnet21.com
|
||||
onumubunumu.com
|
||||
oppt-ac.fit
|
||||
orbitel.net.co
|
||||
orfsurface.com
|
||||
orientalspot.com
|
||||
outsidences.com
|
||||
ovaltinalization.co
|
||||
overta.ru
|
||||
ox28vgrurc.com
|
||||
pamulang.net
|
||||
panaltyspot.space
|
||||
panolacountysheriffms.com
|
||||
passionatesmiles.com
|
||||
paulinelam.com
|
||||
pdi-corp.com
|
||||
peloquinbeck.com
|
||||
perimetercenter.net
|
||||
permanentscreen.com
|
||||
permasteellisagroup.com
|
||||
perumkijhyu.net
|
||||
pesnia.com.ua
|
||||
ph8ltwdi12o.com
|
||||
pharmada.com.de
|
||||
phdns3.es
|
||||
pigelixval1.com
|
||||
pipefittingsindia.com
|
||||
planethoster.net
|
||||
playamedia.io
|
||||
plesk.page
|
||||
pmnhost.net
|
||||
pokiloandhu.net
|
||||
pokupki5.ru
|
||||
polandi.net
|
||||
popiup.com
|
||||
ports.net
|
||||
posolstvostilya.com
|
||||
potia.net
|
||||
prima.com.ar
|
||||
prima.net.ar
|
||||
profsol.co.uk
|
||||
prohealthmotion.com
|
||||
promooffermarket.site
|
||||
proudserver.com
|
||||
proxado.com
|
||||
psnm.ru
|
||||
pvcwindowsprices.live
|
||||
qontenciplc.autos
|
||||
quakeclick.com
|
||||
quasarstate.store
|
||||
quatthonggiotico.com
|
||||
qxyxab44njd.com
|
||||
radianthealthrenaissance.com
|
||||
rapidns.com
|
||||
raxa.host
|
||||
reberte.com
|
||||
reethvikintl.com
|
||||
regruhosting.ru
|
||||
reliablepanel.com
|
||||
rgb365.eu
|
||||
riddlecamera.net
|
||||
riddletrends.com
|
||||
roccopugliese.com
|
||||
runnin-rebels.com
|
||||
rupar.puglia.it
|
||||
rwdhosting.ca
|
||||
s500host.com
|
||||
sageevents.co.ke
|
||||
sahacker-2020.com
|
||||
samsales.site
|
||||
sante-lorraine.fr
|
||||
saransk.ru
|
||||
satirogluet.com
|
||||
scioncontacts.com
|
||||
sdcc.my
|
||||
seaspraymta3.net
|
||||
secorp.mx
|
||||
securen.net
|
||||
securerelay.in
|
||||
securev.net
|
||||
seductiveeyes.com
|
||||
seizethedayconsulting.com
|
||||
serroplast.shop
|
||||
server290.com
|
||||
server342.com
|
||||
server3559.cc
|
||||
servershost.biz
|
||||
sfek.kz
|
||||
sgnetway.net
|
||||
shopfox.ca
|
||||
silvestrejaguar.sbs
|
||||
silvestreonca.sbs
|
||||
simplediagnostics.org
|
||||
siriuscloud.jp
|
||||
sisglobalresearch.com
|
||||
sixpacklink.net
|
||||
sjestyle.com
|
||||
smallvillages.com
|
||||
smartape-vps.com
|
||||
solusoftware.com
|
||||
sourcedns.com
|
||||
southcoastwebhosting12.com
|
||||
specialtvvs.com
|
||||
spiritualtechnologies.io
|
||||
sprout.org
|
||||
srv.cat
|
||||
stableserver.net
|
||||
statlerfa.co.uk
|
||||
stock-smtp.top
|
||||
stockepictigers.com
|
||||
stockexchangejournal.com
|
||||
subterranean-concave.com
|
||||
suksangroup.com
|
||||
swissbluetopaz.com
|
||||
switer.shop
|
||||
sysop4.com
|
||||
system.eu.com
|
||||
szhongbing.com
|
||||
t-jon.com
|
||||
tacaindo.net
|
||||
tacom.tj
|
||||
tankertelz.co
|
||||
tataidc.com
|
||||
teamveiw.com
|
||||
tecnoxia.net
|
||||
tel-xyz.fit
|
||||
tenkids.net
|
||||
terminavalley.com
|
||||
thaicloudsolutions.com
|
||||
thaikinghost.com
|
||||
thaimonster.com
|
||||
thegermainetruth.net
|
||||
thehandmaderose.com
|
||||
thepushcase.com
|
||||
ticdns.com
|
||||
tigo.bo
|
||||
toledofibra.net.br
|
||||
topdns.com
|
||||
totaal.net
|
||||
totalplay.net
|
||||
tqh.ro
|
||||
traderlearningcenter.com
|
||||
tradeukraine.site
|
||||
traveleza.com
|
||||
trwww.com
|
||||
tsuzakij.com
|
||||
tullostrucking.com
|
||||
turbinetrends.com
|
||||
twincitiesdistinctivehomes.com
|
||||
tylerfordonline.com
|
||||
uiyum.com
|
||||
ultragate.com
|
||||
uneedacollie.com
|
||||
unified.services
|
||||
unite.services
|
||||
urawasl.com
|
||||
us.servername.us
|
||||
vagebond.net
|
||||
varvia.de
|
||||
vbcploo.com
|
||||
vdc.vn
|
||||
vendimetry.com
|
||||
vibrantwellnesscorp.com
|
||||
virtualine.org
|
||||
visit.docotor
|
||||
viviotech.us
|
||||
vlflgl.com
|
||||
volganet.ru
|
||||
vrns.net
|
||||
vulterdi.edu
|
||||
vvondertex.com
|
||||
wallstreetsgossip.com
|
||||
wamego.net
|
||||
wanekoohost.com
|
||||
wealthexpertisepro.com
|
||||
web-login.eu
|
||||
weblinkinternational.com
|
||||
webnox.io
|
||||
websale.net
|
||||
welllivinghive.com
|
||||
westparkcom.com
|
||||
wetransfer-eu.com
|
||||
wheelch.me
|
||||
whoflew.com
|
||||
whpservers.com
|
||||
wisdomhard.com
|
||||
wisewealthcircle.com
|
||||
wisvis.com
|
||||
wodeniowa.com
|
||||
wordpresshosting.xyz
|
||||
wsiph2.com
|
||||
xnt.mx
|
||||
xodiax.com
|
||||
xpnuf.cn
|
||||
xsfati.us.com
|
||||
xspmail.jp
|
||||
yourciviccompass.com
|
||||
yourinvestworkbook.com
|
||||
yoursitesecure.net
|
||||
zerowebhosting.net
|
||||
zmml.uk
|
||||
znlc.jp
|
||||
ztomy.com
|
||||
23
parsedmarc/resources/maps/psl_overrides.txt
Normal file
23
parsedmarc/resources/maps/psl_overrides.txt
Normal file
@@ -0,0 +1,23 @@
|
||||
-applefibernet.com
|
||||
-c3.net.pl
|
||||
-celsiainternet.com
|
||||
-clientes-izzi.mx
|
||||
-clientes-zap-izzi.mx
|
||||
-imnet.com.br
|
||||
-mcnbd.com
|
||||
-smile.com.bd
|
||||
-tataidc.co.in
|
||||
-veloxfiber.com.br
|
||||
-wconect.com.br
|
||||
.amazonaws.com
|
||||
.cloudaccess.net
|
||||
.ddnsgeek.com
|
||||
.fastvps-server.com
|
||||
.in-addr-arpa
|
||||
.in-addr.arpa
|
||||
.kasserver.com
|
||||
.kinghost.net
|
||||
.linode.com
|
||||
.linodeusercontent.com
|
||||
.na4u.ru
|
||||
.sakura.ne.jp
|
||||
184
parsedmarc/resources/maps/sortlists.py
Executable file
184
parsedmarc/resources/maps/sortlists.py
Executable file
@@ -0,0 +1,184 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import csv
|
||||
from pathlib import Path
|
||||
from typing import Mapping, Iterable, Optional, Collection, Union, List, Dict
|
||||
|
||||
|
||||
class CSVValidationError(Exception):
|
||||
def __init__(self, errors: list[str]):
|
||||
super().__init__("\n".join(errors))
|
||||
self.errors = errors
|
||||
|
||||
|
||||
def sort_csv(
|
||||
filepath: Union[str, Path],
|
||||
field: str,
|
||||
*,
|
||||
sort_field_value_must_be_unique: bool = True,
|
||||
strip_whitespace: bool = True,
|
||||
fields_to_lowercase: Optional[Iterable[str]] = None,
|
||||
case_insensitive_sort: bool = False,
|
||||
required_fields: Optional[Iterable[str]] = None,
|
||||
allowed_values: Optional[Mapping[str, Collection[str]]] = None,
|
||||
) -> List[Dict[str, str]]:
|
||||
"""
|
||||
Read a CSV, optionally normalize rows (strip whitespace, lowercase certain fields),
|
||||
validate field values, and write the sorted CSV back to the same path.
|
||||
|
||||
- filepath: Path to the CSV to sort.
|
||||
- field: The field name to sort by.
|
||||
- fields_to_lowercase: Permanently lowercases these field(s) in the data.
|
||||
- strip_whitespace: Remove all whitespace at the beginning and of field values.
|
||||
- case_insensitive_sort: Ignore case when sorting without changing values.
|
||||
- required_fields: A list of fields that must have data in all rows.
|
||||
- allowed_values: A mapping of allowed values for fields.
|
||||
"""
|
||||
path = Path(filepath)
|
||||
required_fields = set(required_fields or [])
|
||||
lower_set = set(fields_to_lowercase or [])
|
||||
allowed_sets = {k: set(v) for k, v in (allowed_values or {}).items()}
|
||||
if sort_field_value_must_be_unique:
|
||||
seen_sort_field_values = []
|
||||
|
||||
with path.open("r", newline="") as infile:
|
||||
reader = csv.DictReader(infile)
|
||||
fieldnames = reader.fieldnames or []
|
||||
if field not in fieldnames:
|
||||
raise CSVValidationError([f"Missing sort column: {field!r}"])
|
||||
missing_headers = required_fields - set(fieldnames)
|
||||
if missing_headers:
|
||||
raise CSVValidationError(
|
||||
[f"Missing required header(s): {sorted(missing_headers)}"]
|
||||
)
|
||||
rows = list(reader)
|
||||
|
||||
def normalize_row(row: Dict[str, str]) -> None:
|
||||
if strip_whitespace:
|
||||
for k, v in row.items():
|
||||
if isinstance(v, str):
|
||||
row[k] = v.strip()
|
||||
for fld in lower_set:
|
||||
if fld in row and isinstance(row[fld], str):
|
||||
row[fld] = row[fld].lower()
|
||||
|
||||
def validate_row(
|
||||
row: Dict[str, str], sort_field: str, line_no: int, errors: list[str]
|
||||
) -> None:
|
||||
if sort_field_value_must_be_unique:
|
||||
if row[sort_field] in seen_sort_field_values:
|
||||
errors.append(f"Line {line_no}: Duplicate row for '{row[sort_field]}'")
|
||||
else:
|
||||
seen_sort_field_values.append(row[sort_field])
|
||||
for rf in required_fields:
|
||||
val = row.get(rf)
|
||||
if val is None or val == "":
|
||||
errors.append(
|
||||
f"Line {line_no}: Missing value for required field '{rf}'"
|
||||
)
|
||||
for field, allowed_values in allowed_sets.items():
|
||||
if field in row:
|
||||
val = row[field]
|
||||
if val not in allowed_values:
|
||||
errors.append(
|
||||
f"Line {line_no}: '{val}' is not an allowed value for '{field}' "
|
||||
f"(allowed: {sorted(allowed_values)})"
|
||||
)
|
||||
|
||||
errors: list[str] = []
|
||||
for idx, row in enumerate(rows, start=2): # header is line 1
|
||||
normalize_row(row)
|
||||
validate_row(row, field, idx, errors)
|
||||
|
||||
if errors:
|
||||
raise CSVValidationError(errors)
|
||||
|
||||
def sort_key(r: Dict[str, str]):
|
||||
v = r.get(field, "")
|
||||
if isinstance(v, str) and case_insensitive_sort:
|
||||
return v.casefold()
|
||||
return v
|
||||
|
||||
rows.sort(key=sort_key)
|
||||
|
||||
with open(filepath, "w", newline="") as outfile:
|
||||
writer = csv.DictWriter(outfile, fieldnames=fieldnames)
|
||||
writer.writeheader()
|
||||
writer.writerows(rows)
|
||||
|
||||
|
||||
def sort_list_file(
|
||||
filepath: Union[str, Path],
|
||||
*,
|
||||
lowercase: bool = True,
|
||||
strip: bool = True,
|
||||
deduplicate: bool = True,
|
||||
remove_blank_lines: bool = True,
|
||||
ending_newline: bool = True,
|
||||
newline: Optional[str] = "\n",
|
||||
):
|
||||
"""Read a list from a file, sort it, optionally strip and deduplicate the values,
|
||||
then write that list back to the file.
|
||||
|
||||
- Filepath: The path to the file.
|
||||
- lowercase: Lowercase all values prior to sorting.
|
||||
- remove_blank_lines: Remove any plank lines.
|
||||
- ending_newline: End the file with a newline, even if remove_blank_lines is true.
|
||||
- newline: The newline character to use.
|
||||
"""
|
||||
with open(filepath, mode="r", newline=newline) as infile:
|
||||
lines = infile.readlines()
|
||||
for i in range(len(lines)):
|
||||
if lowercase:
|
||||
lines[i] = lines[i].lower()
|
||||
if strip:
|
||||
lines[i] = lines[i].strip()
|
||||
if deduplicate:
|
||||
lines = list(set(lines))
|
||||
if remove_blank_lines:
|
||||
while "" in lines:
|
||||
lines.remove("")
|
||||
lines = sorted(lines)
|
||||
if ending_newline:
|
||||
if lines[-1] != "":
|
||||
lines.append("")
|
||||
with open(filepath, mode="w", newline=newline) as outfile:
|
||||
outfile.write("\n".join(lines))
|
||||
|
||||
|
||||
def _main():
|
||||
map_file = "base_reverse_dns_map.csv"
|
||||
map_key = "base_reverse_dns"
|
||||
list_files = ["known_unknown_base_reverse_dns.txt", "psl_overrides.txt"]
|
||||
types_file = "base_reverse_dns_types.txt"
|
||||
|
||||
with open(types_file) as f:
|
||||
types = f.readlines()
|
||||
while "" in types:
|
||||
types.remove("")
|
||||
|
||||
map_allowed_values = {"Type": types}
|
||||
|
||||
for list_file in list_files:
|
||||
if not os.path.exists(list_file):
|
||||
print(f"Error: {list_file} does not exist")
|
||||
exit(1)
|
||||
sort_list_file(list_file)
|
||||
if not os.path.exists(types_file):
|
||||
print(f"Error: {types_file} does not exist")
|
||||
exit(1)
|
||||
sort_list_file(types_file, lowercase=False)
|
||||
if not os.path.exists(map_file):
|
||||
print(f"Error: {map_file} does not exist")
|
||||
exit(1)
|
||||
try:
|
||||
sort_csv(map_file, map_key, allowed_values=map_allowed_values)
|
||||
except CSVValidationError as e:
|
||||
print(f"{map_file} did not validate: {e}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
_main()
|
||||
@@ -1,23 +1,29 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import json
|
||||
import boto3
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
from parsedmarc.log import logger
|
||||
from parsedmarc.utils import human_timestamp_to_datetime
|
||||
|
||||
|
||||
class S3Client(object):
|
||||
"""A client for a Amazon S3"""
|
||||
"""A client for interacting with Amazon S3"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
bucket_name,
|
||||
bucket_path,
|
||||
region_name,
|
||||
endpoint_url,
|
||||
access_key_id,
|
||||
secret_access_key,
|
||||
bucket_name: str,
|
||||
bucket_path: str,
|
||||
region_name: str,
|
||||
endpoint_url: str,
|
||||
access_key_id: str,
|
||||
secret_access_key: str,
|
||||
):
|
||||
"""
|
||||
Initializes the S3Client
|
||||
@@ -49,16 +55,16 @@ class S3Client(object):
|
||||
)
|
||||
self.bucket = self.s3.Bucket(self.bucket_name)
|
||||
|
||||
def save_aggregate_report_to_s3(self, report):
|
||||
def save_aggregate_report_to_s3(self, report: OrderedDict[str, Any]):
|
||||
self.save_report_to_s3(report, "aggregate")
|
||||
|
||||
def save_forensic_report_to_s3(self, report):
|
||||
def save_forensic_report_to_s3(self, report: OrderedDict[str, Any]):
|
||||
self.save_report_to_s3(report, "forensic")
|
||||
|
||||
def save_smtp_tls_report_to_s3(self, report):
|
||||
def save_smtp_tls_report_to_s3(self, report: OrderedDict[str, Any]):
|
||||
self.save_report_to_s3(report, "smtp_tls")
|
||||
|
||||
def save_report_to_s3(self, report, report_type):
|
||||
def save_report_to_s3(self, report: OrderedDict[str, Any], report_type: str):
|
||||
if report_type == "smtp_tls":
|
||||
report_date = report["begin_date"]
|
||||
report_id = report["report_id"]
|
||||
|
||||
@@ -1,3 +1,11 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
from urllib.parse import urlparse
|
||||
import socket
|
||||
import json
|
||||
@@ -5,7 +13,7 @@ import json
|
||||
import urllib3
|
||||
import requests
|
||||
|
||||
from parsedmarc import __version__
|
||||
from parsedmarc.constants import USER_AGENT
|
||||
from parsedmarc.log import logger
|
||||
from parsedmarc.utils import human_timestamp_to_unix_timestamp
|
||||
|
||||
@@ -23,7 +31,13 @@ class HECClient(object):
|
||||
# http://docs.splunk.com/Documentation/Splunk/latest/RESTREF/RESTinput#services.2Fcollector
|
||||
|
||||
def __init__(
|
||||
self, url, access_token, index, source="parsedmarc", verify=True, timeout=60
|
||||
self,
|
||||
url: str,
|
||||
access_token: str,
|
||||
index: str,
|
||||
source: bool = "parsedmarc",
|
||||
verify=True,
|
||||
timeout=60,
|
||||
):
|
||||
"""
|
||||
Initializes the HECClient
|
||||
@@ -51,11 +65,13 @@ class HECClient(object):
|
||||
self._common_data = dict(host=self.host, source=self.source, index=self.index)
|
||||
|
||||
self.session.headers = {
|
||||
"User-Agent": "parsedmarc/{0}".format(__version__),
|
||||
"User-Agent": USER_AGENT,
|
||||
"Authorization": "Splunk {0}".format(self.access_token),
|
||||
}
|
||||
|
||||
def save_aggregate_reports_to_splunk(self, aggregate_reports):
|
||||
def save_aggregate_reports_to_splunk(
|
||||
self, aggregate_reports: list[OrderedDict[str, Any]]
|
||||
):
|
||||
"""
|
||||
Saves aggregate DMARC reports to Splunk
|
||||
|
||||
@@ -78,6 +94,9 @@ class HECClient(object):
|
||||
new_report = dict()
|
||||
for metadata in report["report_metadata"]:
|
||||
new_report[metadata] = report["report_metadata"][metadata]
|
||||
new_report["interval_begin"] = record["interval_begin"]
|
||||
new_report["interval_end"] = record["interval_end"]
|
||||
new_report["normalized_timespan"] = record["normalized_timespan"]
|
||||
new_report["published_policy"] = report["policy_published"]
|
||||
new_report["source_ip_address"] = record["source"]["ip_address"]
|
||||
new_report["source_country"] = record["source"]["country"]
|
||||
@@ -98,7 +117,9 @@ class HECClient(object):
|
||||
new_report["spf_results"] = record["auth_results"]["spf"]
|
||||
|
||||
data["sourcetype"] = "dmarc:aggregate"
|
||||
timestamp = human_timestamp_to_unix_timestamp(new_report["begin_date"])
|
||||
timestamp = human_timestamp_to_unix_timestamp(
|
||||
new_report["interval_begin"]
|
||||
)
|
||||
data["time"] = timestamp
|
||||
data["event"] = new_report.copy()
|
||||
json_str += "{0}\n".format(json.dumps(data))
|
||||
@@ -113,7 +134,9 @@ class HECClient(object):
|
||||
if response["code"] != 0:
|
||||
raise SplunkError(response["text"])
|
||||
|
||||
def save_forensic_reports_to_splunk(self, forensic_reports):
|
||||
def save_forensic_reports_to_splunk(
|
||||
self, forensic_reports: list[OrderedDict[str, Any]]
|
||||
):
|
||||
"""
|
||||
Saves forensic DMARC reports to Splunk
|
||||
|
||||
@@ -147,7 +170,7 @@ class HECClient(object):
|
||||
if response["code"] != 0:
|
||||
raise SplunkError(response["text"])
|
||||
|
||||
def save_smtp_tls_reports_to_splunk(self, reports):
|
||||
def save_smtp_tls_reports_to_splunk(self, reports: OrderedDict[str, Any]):
|
||||
"""
|
||||
Saves aggregate DMARC reports to Splunk
|
||||
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import logging.handlers
|
||||
|
||||
from typing import Any
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
import json
|
||||
|
||||
from parsedmarc import (
|
||||
@@ -14,7 +22,7 @@ from parsedmarc import (
|
||||
class SyslogClient(object):
|
||||
"""A client for Syslog"""
|
||||
|
||||
def __init__(self, server_name, server_port):
|
||||
def __init__(self, server_name: str, server_port: int):
|
||||
"""
|
||||
Initializes the SyslogClient
|
||||
Args:
|
||||
@@ -28,17 +36,23 @@ class SyslogClient(object):
|
||||
log_handler = logging.handlers.SysLogHandler(address=(server_name, server_port))
|
||||
self.logger.addHandler(log_handler)
|
||||
|
||||
def save_aggregate_report_to_syslog(self, aggregate_reports):
|
||||
def save_aggregate_report_to_syslog(
|
||||
self, aggregate_reports: list[OrderedDict[str, Any]]
|
||||
):
|
||||
rows = parsed_aggregate_reports_to_csv_rows(aggregate_reports)
|
||||
for row in rows:
|
||||
self.logger.info(json.dumps(row))
|
||||
|
||||
def save_forensic_report_to_syslog(self, forensic_reports):
|
||||
def save_forensic_report_to_syslog(
|
||||
self, forensic_reports: list[OrderedDict[str, Any]]
|
||||
):
|
||||
rows = parsed_forensic_reports_to_csv_rows(forensic_reports)
|
||||
for row in rows:
|
||||
self.logger.info(json.dumps(row))
|
||||
|
||||
def save_smtp_tls_report_to_syslog(self, smtp_tls_reports):
|
||||
def save_smtp_tls_report_to_syslog(
|
||||
self, smtp_tls_reports: list[OrderedDict[str, Any]]
|
||||
):
|
||||
rows = parsed_smtp_tls_reports_to_csv_rows(smtp_tls_reports)
|
||||
for row in rows:
|
||||
self.logger.info(json.dumps(row))
|
||||
|
||||
@@ -1,11 +1,18 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Utility functions that might be useful for other projects"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Optional, Union
|
||||
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
from datetime import timedelta
|
||||
from collections import OrderedDict
|
||||
from expiringdict import ExpiringDict
|
||||
import tempfile
|
||||
import subprocess
|
||||
import shutil
|
||||
@@ -19,10 +26,11 @@ import csv
|
||||
import io
|
||||
|
||||
try:
|
||||
import importlib.resources as pkg_resources
|
||||
from importlib.resources import files
|
||||
except ImportError:
|
||||
# Try backported to PY<37 `importlib_resources`
|
||||
import importlib_resources as pkg_resources
|
||||
# Try backported to PY<3 `importlib_resources`
|
||||
from importlib.resources import files
|
||||
|
||||
|
||||
from dateutil.parser import parse as parse_date
|
||||
import dns.reversename
|
||||
@@ -36,13 +44,19 @@ import requests
|
||||
from parsedmarc.log import logger
|
||||
import parsedmarc.resources.dbip
|
||||
import parsedmarc.resources.maps
|
||||
|
||||
from parsedmarc.constants import USER_AGENT
|
||||
|
||||
parenthesis_regex = re.compile(r"\s*\(.*\)\s*")
|
||||
|
||||
null_file = open(os.devnull, "w")
|
||||
mailparser_logger = logging.getLogger("mailparser")
|
||||
mailparser_logger.setLevel(logging.CRITICAL)
|
||||
psl = publicsuffixlist.PublicSuffixList()
|
||||
psl_overrides_path = str(files(parsedmarc.resources.maps).joinpath("psl_overrides.txt"))
|
||||
with open(psl_overrides_path) as f:
|
||||
psl_overrides = [line.rstrip() for line in f.readlines()]
|
||||
while "" in psl_overrides:
|
||||
psl_overrides.remove("")
|
||||
|
||||
|
||||
class EmailParserError(RuntimeError):
|
||||
@@ -53,12 +67,12 @@ class DownloadError(RuntimeError):
|
||||
"""Raised when an error occurs when downloading a file"""
|
||||
|
||||
|
||||
def decode_base64(data):
|
||||
def decode_base64(data: str) -> bytes:
|
||||
"""
|
||||
Decodes a base64 string, with padding being optional
|
||||
|
||||
Args:
|
||||
data: A base64 encoded string
|
||||
data (str): A base64 encoded string
|
||||
|
||||
Returns:
|
||||
bytes: The decoded bytes
|
||||
@@ -71,13 +85,14 @@ def decode_base64(data):
|
||||
return base64.b64decode(data)
|
||||
|
||||
|
||||
def get_base_domain(domain):
|
||||
def get_base_domain(domain: str) -> str:
|
||||
"""
|
||||
Gets the base domain name for the given domain
|
||||
|
||||
.. note::
|
||||
Results are based on a list of public domain suffixes at
|
||||
https://publicsuffix.org/list/public_suffix_list.dat.
|
||||
https://publicsuffix.org/list/public_suffix_list.dat and overrides included in
|
||||
parsedmarc.resources.maps.psl_overrides.txt
|
||||
|
||||
Args:
|
||||
domain (str): A domain or subdomain
|
||||
@@ -86,11 +101,22 @@ def get_base_domain(domain):
|
||||
str: The base domain of the given domain
|
||||
|
||||
"""
|
||||
psl = publicsuffixlist.PublicSuffixList()
|
||||
return psl.privatesuffix(domain)
|
||||
domain = domain.lower()
|
||||
publicsuffix = psl.privatesuffix(domain)
|
||||
for override in psl_overrides:
|
||||
if domain.endswith(override):
|
||||
return override.strip(".").strip("-")
|
||||
return publicsuffix
|
||||
|
||||
|
||||
def query_dns(domain, record_type, cache=None, nameservers=None, timeout=2.0):
|
||||
def query_dns(
|
||||
domain: str,
|
||||
record_type: str,
|
||||
*,
|
||||
cache: Optional[ExpiringDict] = None,
|
||||
nameservers: list[str] = None,
|
||||
timeout: int = 2.0,
|
||||
) -> list[str]:
|
||||
"""
|
||||
Queries DNS
|
||||
|
||||
@@ -151,7 +177,13 @@ def query_dns(domain, record_type, cache=None, nameservers=None, timeout=2.0):
|
||||
return records
|
||||
|
||||
|
||||
def get_reverse_dns(ip_address, cache=None, nameservers=None, timeout=2.0):
|
||||
def get_reverse_dns(
|
||||
ip_address,
|
||||
*,
|
||||
cache: Optional[ExpiringDict] = None,
|
||||
nameservers: list[str] = None,
|
||||
timeout: int = 2.0,
|
||||
) -> str:
|
||||
"""
|
||||
Resolves an IP address to a hostname using a reverse DNS query
|
||||
|
||||
@@ -179,7 +211,7 @@ def get_reverse_dns(ip_address, cache=None, nameservers=None, timeout=2.0):
|
||||
return hostname
|
||||
|
||||
|
||||
def timestamp_to_datetime(timestamp):
|
||||
def timestamp_to_datetime(timestamp: int) -> datetime:
|
||||
"""
|
||||
Converts a UNIX/DMARC timestamp to a Python ``datetime`` object
|
||||
|
||||
@@ -192,7 +224,7 @@ def timestamp_to_datetime(timestamp):
|
||||
return datetime.fromtimestamp(int(timestamp))
|
||||
|
||||
|
||||
def timestamp_to_human(timestamp):
|
||||
def timestamp_to_human(timestamp: int) -> str:
|
||||
"""
|
||||
Converts a UNIX/DMARC timestamp to a human-readable string
|
||||
|
||||
@@ -205,7 +237,9 @@ def timestamp_to_human(timestamp):
|
||||
return timestamp_to_datetime(timestamp).strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
|
||||
def human_timestamp_to_datetime(human_timestamp, to_utc=False):
|
||||
def human_timestamp_to_datetime(
|
||||
human_timestamp: str, *, to_utc: Optional[bool] = False
|
||||
) -> datetime:
|
||||
"""
|
||||
Converts a human-readable timestamp into a Python ``datetime`` object
|
||||
|
||||
@@ -224,7 +258,7 @@ def human_timestamp_to_datetime(human_timestamp, to_utc=False):
|
||||
return dt.astimezone(timezone.utc) if to_utc else dt
|
||||
|
||||
|
||||
def human_timestamp_to_unix_timestamp(human_timestamp):
|
||||
def human_timestamp_to_unix_timestamp(human_timestamp: str) -> int:
|
||||
"""
|
||||
Converts a human-readable timestamp into a UNIX timestamp
|
||||
|
||||
@@ -238,7 +272,7 @@ def human_timestamp_to_unix_timestamp(human_timestamp):
|
||||
return human_timestamp_to_datetime(human_timestamp).timestamp()
|
||||
|
||||
|
||||
def get_ip_address_country(ip_address, db_path=None):
|
||||
def get_ip_address_country(ip_address: str, *, db_path: Optional[str] = None) -> str:
|
||||
"""
|
||||
Returns the ISO code for the country associated
|
||||
with the given IPv4 or IPv6 address
|
||||
@@ -265,7 +299,7 @@ def get_ip_address_country(ip_address, db_path=None):
|
||||
]
|
||||
|
||||
if db_path is not None:
|
||||
if os.path.isfile(db_path) is False:
|
||||
if not os.path.isfile(db_path):
|
||||
db_path = None
|
||||
logger.warning(
|
||||
f"No file exists at {db_path}. Falling back to an "
|
||||
@@ -280,14 +314,13 @@ def get_ip_address_country(ip_address, db_path=None):
|
||||
break
|
||||
|
||||
if db_path is None:
|
||||
with pkg_resources.path(
|
||||
parsedmarc.resources.dbip, "dbip-country-lite.mmdb"
|
||||
) as path:
|
||||
db_path = path
|
||||
db_path = str(
|
||||
files(parsedmarc.resources.dbip).joinpath("dbip-country-lite.mmdb")
|
||||
)
|
||||
|
||||
db_age = datetime.now() - datetime.fromtimestamp(os.stat(db_path).st_mtime)
|
||||
if db_age > timedelta(days=30):
|
||||
logger.warning("IP database is more than a month old")
|
||||
db_age = datetime.now() - datetime.fromtimestamp(os.stat(db_path).st_mtime)
|
||||
if db_age > timedelta(days=30):
|
||||
logger.warning("IP database is more than a month old")
|
||||
|
||||
db_reader = geoip2.database.Reader(db_path)
|
||||
|
||||
@@ -303,12 +336,13 @@ def get_ip_address_country(ip_address, db_path=None):
|
||||
|
||||
def get_service_from_reverse_dns_base_domain(
|
||||
base_domain,
|
||||
always_use_local_file=False,
|
||||
local_file_path=None,
|
||||
url=None,
|
||||
offline=False,
|
||||
reverse_dns_map=None,
|
||||
):
|
||||
*,
|
||||
always_use_local_file: Optional[bool] = False,
|
||||
local_file_path: Optional[bool] = None,
|
||||
url: Optional[bool] = None,
|
||||
offline: Optional[bool] = False,
|
||||
reverse_dns_map: Optional[bool] = None,
|
||||
) -> str:
|
||||
"""
|
||||
Returns the service name of a given base domain name from reverse DNS.
|
||||
|
||||
@@ -344,21 +378,30 @@ def get_service_from_reverse_dns_base_domain(
|
||||
|
||||
if not (offline or always_use_local_file) and len(reverse_dns_map) == 0:
|
||||
try:
|
||||
logger.debug(f"Trying to fetch " f"reverse DNS map from {url}...")
|
||||
csv_file.write(requests.get(url).text)
|
||||
logger.debug(f"Trying to fetch reverse DNS map from {url}...")
|
||||
headers = {"User-Agent": USER_AGENT}
|
||||
response = requests.get(url, headers=headers)
|
||||
response.raise_for_status()
|
||||
csv_file.write(response.text)
|
||||
csv_file.seek(0)
|
||||
load_csv(csv_file)
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.warning(f"Failed to fetch reverse DNS map: {e}")
|
||||
except Exception:
|
||||
logger.warning("Not a valid CSV file")
|
||||
csv_file.seek(0)
|
||||
logging.debug("Response body:")
|
||||
logger.debug(csv_file.read())
|
||||
|
||||
if len(reverse_dns_map) == 0:
|
||||
logger.info("Loading included reverse DNS map...")
|
||||
with pkg_resources.path(
|
||||
parsedmarc.resources.maps, "base_reverse_dns_map.csv"
|
||||
) as path:
|
||||
if local_file_path is not None:
|
||||
path = local_file_path
|
||||
with open(path) as csv_file:
|
||||
load_csv(csv_file)
|
||||
path = str(
|
||||
files(parsedmarc.resources.maps).joinpath("base_reverse_dns_map.csv")
|
||||
)
|
||||
if local_file_path is not None:
|
||||
path = local_file_path
|
||||
with open(path) as csv_file:
|
||||
load_csv(csv_file)
|
||||
try:
|
||||
service = reverse_dns_map[base_domain]
|
||||
except KeyError:
|
||||
@@ -369,16 +412,17 @@ def get_service_from_reverse_dns_base_domain(
|
||||
|
||||
def get_ip_address_info(
|
||||
ip_address,
|
||||
ip_db_path=None,
|
||||
reverse_dns_map_path=None,
|
||||
always_use_local_files=False,
|
||||
reverse_dns_map_url=None,
|
||||
cache=None,
|
||||
reverse_dns_map=None,
|
||||
offline=False,
|
||||
nameservers=None,
|
||||
timeout=2.0,
|
||||
):
|
||||
*,
|
||||
ip_db_path: Optional[str] = None,
|
||||
reverse_dns_map_path: Optional[str] = None,
|
||||
always_use_local_files: Optional[bool] = False,
|
||||
reverse_dns_map_url: Optional[bool] = None,
|
||||
cache: Optional[ExpiringDict] = None,
|
||||
reverse_dns_map: Optional[bool] = None,
|
||||
offline: Optional[bool] = False,
|
||||
nameservers: Optional[list[str]] = None,
|
||||
timeout: Optional[float] = 2.0,
|
||||
) -> OrderedDict[str, str]:
|
||||
"""
|
||||
Returns reverse DNS and country information for the given IP address
|
||||
|
||||
@@ -396,7 +440,7 @@ def get_ip_address_info(
|
||||
timeout (float): Sets the DNS timeout in seconds
|
||||
|
||||
Returns:
|
||||
OrderedDict: ``ip_address``, ``reverse_dns``
|
||||
OrderedDict: ``ip_address``, ``reverse_dns``, ``country``
|
||||
|
||||
"""
|
||||
ip_address = ip_address.lower()
|
||||
@@ -443,7 +487,7 @@ def get_ip_address_info(
|
||||
return info
|
||||
|
||||
|
||||
def parse_email_address(original_address):
|
||||
def parse_email_address(original_address: str) -> OrderedDict[str, str]:
|
||||
if original_address[0] == "":
|
||||
display_name = None
|
||||
else:
|
||||
@@ -466,7 +510,7 @@ def parse_email_address(original_address):
|
||||
)
|
||||
|
||||
|
||||
def get_filename_safe_string(string):
|
||||
def get_filename_safe_string(string: str) -> str:
|
||||
"""
|
||||
Converts a string to a string that is safe for a filename
|
||||
|
||||
@@ -488,7 +532,7 @@ def get_filename_safe_string(string):
|
||||
return string
|
||||
|
||||
|
||||
def is_mbox(path):
|
||||
def is_mbox(path: str) -> bool:
|
||||
"""
|
||||
Checks if the given content is an MBOX mailbox file
|
||||
|
||||
@@ -509,7 +553,7 @@ def is_mbox(path):
|
||||
return _is_mbox
|
||||
|
||||
|
||||
def is_outlook_msg(content):
|
||||
def is_outlook_msg(content) -> bool:
|
||||
"""
|
||||
Checks if the given content is an Outlook msg OLE/MSG file
|
||||
|
||||
@@ -524,7 +568,7 @@ def is_outlook_msg(content):
|
||||
)
|
||||
|
||||
|
||||
def convert_outlook_msg(msg_bytes):
|
||||
def convert_outlook_msg(msg_bytes: bytes) -> str:
|
||||
"""
|
||||
Uses the ``msgconvert`` Perl utility to convert an Outlook MS file to
|
||||
standard RFC 822 format
|
||||
@@ -560,7 +604,9 @@ def convert_outlook_msg(msg_bytes):
|
||||
return rfc822
|
||||
|
||||
|
||||
def parse_email(data, strip_attachment_payloads=False):
|
||||
def parse_email(
|
||||
data: Union[bytes, str], *, strip_attachment_payloads: Optional[bool] = False
|
||||
):
|
||||
"""
|
||||
A simplified email parser
|
||||
|
||||
|
||||
@@ -1,12 +1,27 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
import requests
|
||||
|
||||
from parsedmarc import logger
|
||||
from parsedmarc.constants import USER_AGENT
|
||||
|
||||
|
||||
class WebhookClient(object):
|
||||
"""A client for webhooks"""
|
||||
|
||||
def __init__(self, aggregate_url, forensic_url, smtp_tls_url, timeout=60):
|
||||
def __init__(
|
||||
self,
|
||||
aggregate_url: str,
|
||||
forensic_url: str,
|
||||
smtp_tls_url: str,
|
||||
timeout: Optional[int] = 60,
|
||||
):
|
||||
"""
|
||||
Initializes the WebhookClient
|
||||
Args:
|
||||
@@ -21,29 +36,31 @@ class WebhookClient(object):
|
||||
self.timeout = timeout
|
||||
self.session = requests.Session()
|
||||
self.session.headers = {
|
||||
"User-Agent": "parsedmarc",
|
||||
"User-Agent": USER_AGENT,
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
def save_forensic_report_to_webhook(self, report):
|
||||
def save_forensic_report_to_webhook(self, report: OrderedDict[str, Any]):
|
||||
try:
|
||||
self._send_to_webhook(self.forensic_url, report)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
|
||||
def save_smtp_tls_report_to_webhook(self, report):
|
||||
def save_smtp_tls_report_to_webhook(self, report: OrderedDict[str, Any]):
|
||||
try:
|
||||
self._send_to_webhook(self.smtp_tls_url, report)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
|
||||
def save_aggregate_report_to_webhook(self, report):
|
||||
def save_aggregate_report_to_webhook(self, report: OrderedDict[str, Any]):
|
||||
try:
|
||||
self._send_to_webhook(self.aggregate_url, report)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
|
||||
def _send_to_webhook(self, webhook_url, payload):
|
||||
def _send_to_webhook(
|
||||
self, webhook_url: str, payload: Union[bytes, str, dict[str, Any]]
|
||||
):
|
||||
try:
|
||||
self.session.post(webhook_url, data=payload, timeout=self.timeout)
|
||||
except Exception as error_:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[build-system]
|
||||
requires = [
|
||||
"hatchling>=1.8.1",
|
||||
"hatchling>=1.27.0",
|
||||
]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
@@ -28,6 +28,7 @@ classifiers = [
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3"
|
||||
]
|
||||
requires-python = ">= 3.9"
|
||||
dependencies = [
|
||||
"azure-identity>=1.8.0",
|
||||
"azure-monitor-ingestion>=1.0.0",
|
||||
@@ -55,11 +56,12 @@ dependencies = [
|
||||
"tqdm>=4.31.1",
|
||||
"urllib3>=1.25.7",
|
||||
"xmltodict>=0.12.0",
|
||||
"PyYAML>=6.0.3"
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
build = [
|
||||
"hatch",
|
||||
"hatch>=1.14.0",
|
||||
"myst-parser[linkify]",
|
||||
"nose",
|
||||
"pytest",
|
||||
@@ -76,9 +78,20 @@ parsedmarc = "parsedmarc.cli:_main"
|
||||
Homepage = "https://domainaware.github.io/parsedmarc"
|
||||
|
||||
[tool.hatch.version]
|
||||
path = "parsedmarc/__init__.py"
|
||||
path = "parsedmarc/constants.py"
|
||||
|
||||
[tool.hatch.build.targets.sdist]
|
||||
include = [
|
||||
"/parsedmarc",
|
||||
]
|
||||
|
||||
[tool.hatch.build]
|
||||
exclude = [
|
||||
"base_reverse_dns.csv",
|
||||
"find_bad_utf8.py",
|
||||
"find_unknown_base_reverse_dns.py",
|
||||
"unknown_base_reverse_dns.csv",
|
||||
"sortmaps.py",
|
||||
"README.md",
|
||||
"*.bak"
|
||||
]
|
||||
|
||||
107
splunk/smtp_tls_dashboard.xml
Normal file
107
splunk/smtp_tls_dashboard.xml
Normal file
@@ -0,0 +1,107 @@
|
||||
<form version="1.1" theme="dark">
|
||||
<label>SMTP TLS Reporting</label>
|
||||
<fieldset submitButton="false" autoRun="true">
|
||||
<input type="time" token="time">
|
||||
<label></label>
|
||||
<default>
|
||||
<earliest>-7d@h</earliest>
|
||||
<latest>now</latest>
|
||||
</default>
|
||||
</input>
|
||||
<input type="text" token="organization_name" searchWhenChanged="true">
|
||||
<label>Organization name</label>
|
||||
<default>*</default>
|
||||
<initialValue>*</initialValue>
|
||||
</input>
|
||||
<input type="text" token="policy_domain">
|
||||
<label>Policy domain</label>
|
||||
<default>*</default>
|
||||
<initialValue>*</initialValue>
|
||||
</input>
|
||||
<input type="dropdown" token="policy_type" searchWhenChanged="true">
|
||||
<label>Policy type</label>
|
||||
<choice value="*">Any</choice>
|
||||
<choice value="tlsa">tlsa</choice>
|
||||
<choice value="sts">sts</choice>
|
||||
<choice value="no-policy-found">no-policy-found</choice>
|
||||
<default>*</default>
|
||||
<initialValue>*</initialValue>
|
||||
</input>
|
||||
</fieldset>
|
||||
<row>
|
||||
<panel>
|
||||
<title>Reporting organizations</title>
|
||||
<table>
|
||||
<search>
|
||||
<query>index=email sourcetype=smtp:tls organization_name=$organization_name$ policies{}.policy_domain=$policy_domain$
|
||||
| rename policies{}.policy_domain as policy_domain
|
||||
| rename policies{}.policy_type as policy_type
|
||||
| rename policies{}.failed_session_count as failed_sessions
|
||||
| rename policies{}.failure_details{}.failed_session_count as failed_sessions
|
||||
| rename policies{}.successful_session_count as successful_sessions
|
||||
| rename policies{}.failure_details{}.sending_mta_ip as sending_mta_ip
|
||||
| rename policies{}.failure_details{}.receiving_ip as receiving_ip
|
||||
| rename policies{}.failure_details{}.receiving_mx_hostname as receiving_mx_hostname
|
||||
| rename policies{}.failure_details{}.result_type as failure_type
|
||||
| fillnull value=0 failed_sessions
|
||||
| stats sum(failed_sessions) as failed_sessions sum(successful_sessions) as successful_sessions by organization_name
|
||||
| sort -successful_sessions 0</query>
|
||||
<earliest>$time.earliest$</earliest>
|
||||
<latest>$time.latest$</latest>
|
||||
</search>
|
||||
<option name="drilldown">none</option>
|
||||
<option name="refresh.display">progressbar</option>
|
||||
</table>
|
||||
</panel>
|
||||
<panel>
|
||||
<title>Domains</title>
|
||||
<table>
|
||||
<search>
|
||||
<query>index=email sourcetype=smtp:tls organization_name=$organization_name$ policies{}.policy_domain=$policy_domain$
|
||||
| rename policies{}.policy_domain as policy_domain
|
||||
| rename policies{}.policy_type as policy_type
|
||||
| rename policies{}.failed_session_count as failed_sessions
|
||||
| rename policies{}.failure_details{}.failed_session_count as failed_sessions
|
||||
| rename policies{}.successful_session_count as successful_sessions
|
||||
| rename policies{}.failure_details{}.sending_mta_ip as sending_mta_ip
|
||||
| rename policies{}.failure_details{}.receiving_ip as receiving_ip
|
||||
| rename policies{}.failure_details{}.receiving_mx_hostname as receiving_mx_hostname
|
||||
| rename policies{}.failure_details{}.result_type as failure_type
|
||||
| fillnull value=0 failed_sessions
|
||||
| stats sum(failed_sessions) as failed_sessions sum(successful_sessions) as successful_sessions by policy_domain
|
||||
| sort -successful_sessions 0</query>
|
||||
<earliest>$time.earliest$</earliest>
|
||||
<latest>$time.latest$</latest>
|
||||
</search>
|
||||
<option name="drilldown">none</option>
|
||||
<option name="refresh.display">progressbar</option>
|
||||
</table>
|
||||
</panel>
|
||||
</row>
|
||||
<row>
|
||||
<panel>
|
||||
<title>Failure details</title>
|
||||
<table>
|
||||
<search>
|
||||
<query>index=email sourcetype=smtp:tls organization_name=$organization_name$ policies{}.policy_domain=$policy_domain$ policies{}.failure_details{}.result_type=*
|
||||
| rename policies{}.policy_domain as policy_domain
|
||||
| rename policies{}.policy_type as policy_type
|
||||
| rename policies{}.failed_session_count as failed_sessions
|
||||
| rename policies{}.failure_details{}.failed_session_count as failed_sessions
|
||||
| rename policies{}.successful_session_count as successful_sessions
|
||||
| rename policies{}.failure_details{}.sending_mta_ip as sending_mta_ip
|
||||
| rename policies{}.failure_details{}.receiving_ip as receiving_ip
|
||||
| rename policies{}.failure_details{}.receiving_mx_hostname as receiving_mx_hostname
|
||||
| fillnull value=0 failed_sessions
|
||||
| rename policies{}.failure_details{}.result_type as failure_type
|
||||
| table _time organization_name policy_domain policy_type failed_sessions successful_sessions sending_mta_ip receiving_ip receiving_mx_hostname failure_type
|
||||
| sort by -_time 0</query>
|
||||
<earliest>$time.earliest$</earliest>
|
||||
<latest>$time.latest$</latest>
|
||||
</search>
|
||||
<option name="drilldown">none</option>
|
||||
<option name="refresh.display">progressbar</option>
|
||||
</table>
|
||||
</panel>
|
||||
</row>
|
||||
</form>
|
||||
36
tests.py
36
tests.py
@@ -43,11 +43,12 @@ class Test(unittest.TestCase):
|
||||
|
||||
def testExtractReportXMLComparator(self):
|
||||
"""Test XML comparator function"""
|
||||
print()
|
||||
xmlnice = open("samples/extract_report/nice-input.xml").read()
|
||||
print(xmlnice)
|
||||
xmlchanged = minify_xml(open("samples/extract_report/changed-input.xml").read())
|
||||
print(xmlchanged)
|
||||
xmlnice_file = open("samples/extract_report/nice-input.xml")
|
||||
xmlnice = xmlnice_file.read()
|
||||
xmlnice_file.close()
|
||||
xmlchanged_file = open("samples/extract_report/changed-input.xml")
|
||||
xmlchanged = minify_xml(xmlchanged_file.read())
|
||||
xmlchanged_file.close()
|
||||
self.assertTrue(compare_xml(xmlnice, xmlnice))
|
||||
self.assertTrue(compare_xml(xmlchanged, xmlchanged))
|
||||
self.assertFalse(compare_xml(xmlnice, xmlchanged))
|
||||
@@ -62,7 +63,9 @@ class Test(unittest.TestCase):
|
||||
data = f.read()
|
||||
print("Testing {0}: ".format(file), end="")
|
||||
xmlout = parsedmarc.extract_report(data)
|
||||
xmlin = open("samples/extract_report/nice-input.xml").read()
|
||||
xmlin_file = open("samples/extract_report/nice-input.xml")
|
||||
xmlin = xmlin_file.read()
|
||||
xmlin_file.close()
|
||||
self.assertTrue(compare_xml(xmlout, xmlin))
|
||||
print("Passed!")
|
||||
|
||||
@@ -71,8 +74,10 @@ class Test(unittest.TestCase):
|
||||
print()
|
||||
file = "samples/extract_report/nice-input.xml"
|
||||
print("Testing {0}: ".format(file), end="")
|
||||
xmlout = parsedmarc.extract_report(file)
|
||||
xmlin = open("samples/extract_report/nice-input.xml").read()
|
||||
xmlout = parsedmarc.extract_report_from_file_path(file)
|
||||
xmlin_file = open("samples/extract_report/nice-input.xml")
|
||||
xmlin = xmlin_file.read()
|
||||
xmlin_file.close()
|
||||
self.assertTrue(compare_xml(xmlout, xmlin))
|
||||
print("Passed!")
|
||||
|
||||
@@ -82,7 +87,9 @@ class Test(unittest.TestCase):
|
||||
file = "samples/extract_report/nice-input.xml.gz"
|
||||
print("Testing {0}: ".format(file), end="")
|
||||
xmlout = parsedmarc.extract_report_from_file_path(file)
|
||||
xmlin = open("samples/extract_report/nice-input.xml").read()
|
||||
xmlin_file = open("samples/extract_report/nice-input.xml")
|
||||
xmlin = xmlin_file.read()
|
||||
xmlin_file.close()
|
||||
self.assertTrue(compare_xml(xmlout, xmlin))
|
||||
print("Passed!")
|
||||
|
||||
@@ -92,12 +99,13 @@ class Test(unittest.TestCase):
|
||||
file = "samples/extract_report/nice-input.xml.zip"
|
||||
print("Testing {0}: ".format(file), end="")
|
||||
xmlout = parsedmarc.extract_report_from_file_path(file)
|
||||
print(xmlout)
|
||||
xmlin = minify_xml(open("samples/extract_report/nice-input.xml").read())
|
||||
print(xmlin)
|
||||
xmlin_file = open("samples/extract_report/nice-input.xml")
|
||||
xmlin = minify_xml(xmlin_file.read())
|
||||
xmlin_file.close()
|
||||
self.assertTrue(compare_xml(xmlout, xmlin))
|
||||
xmlin = minify_xml(open("samples/extract_report/changed-input.xml").read())
|
||||
print(xmlin)
|
||||
xmlin_file = open("samples/extract_report/changed-input.xml")
|
||||
xmlin = xmlin_file.read()
|
||||
xmlin_file.close()
|
||||
self.assertFalse(compare_xml(xmlout, xmlin))
|
||||
print("Passed!")
|
||||
|
||||
|
||||
Reference in New Issue
Block a user