mirror of
https://github.com/domainaware/parsedmarc.git
synced 2026-02-19 07:56:23 +00:00
Compare commits
35 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
62ccc11925 | ||
|
|
c32ca3cae3 | ||
|
|
010f1f84a7 | ||
|
|
7da57c6382 | ||
|
|
d08e29a306 | ||
|
|
e1e53ad4cb | ||
|
|
4670e9687d | ||
|
|
7f8a2c08cd | ||
|
|
e9c05dd0bf | ||
|
|
9348a474dd | ||
|
|
e0decaba8c | ||
|
|
26a651cded | ||
|
|
bcfcd93fc6 | ||
|
|
54d5ed3543 | ||
|
|
1efbc87e0e | ||
|
|
e78e7f64af | ||
|
|
ad9de65b99 | ||
|
|
b9df12700b | ||
|
|
20843b920f | ||
|
|
e5ae89fedf | ||
|
|
f148cff11c | ||
|
|
4583769e04 | ||
|
|
0ecb80b27c | ||
|
|
b8e62e6d3b | ||
|
|
c67953a2c5 | ||
|
|
27dff4298c | ||
|
|
f2133aacd4 | ||
|
|
31917e58a9 | ||
|
|
bffb98d217 | ||
|
|
1f93b3a7ea | ||
|
|
88debb9729 | ||
|
|
a8a5564780 | ||
|
|
1e26f95b7b | ||
|
|
82b48e4d01 | ||
|
|
617b7c5b4a |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -136,3 +136,6 @@ samples/private
|
||||
|
||||
*.html
|
||||
*.sqlite-journal
|
||||
|
||||
parsedmarc.ini
|
||||
scratch.py
|
||||
|
||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -70,6 +70,7 @@
|
||||
"modindex",
|
||||
"msgconvert",
|
||||
"msgraph",
|
||||
"MSSP",
|
||||
"Munge",
|
||||
"ndjson",
|
||||
"newkey",
|
||||
|
||||
17
CHANGELOG.md
17
CHANGELOG.md
@@ -1,6 +1,23 @@
|
||||
Changelog
|
||||
=========
|
||||
|
||||
8.17.0
|
||||
------
|
||||
|
||||
- Ignore duplicate aggregate DMARC reports with the same `org_name` and `report_id` seen within the same hour (Fixes [#539](https://github.com/domainaware/parsedmarc/issues/539))
|
||||
- Fix saving SMTP TLS reports to OpenSearch (PR #585 closed issue #576)
|
||||
- Add 303 entries to `base_reverse_dns_map.csv`
|
||||
|
||||
8.16.1
|
||||
------
|
||||
|
||||
- Failed attempt to ignore aggregate DMARC reports seen within a period of one hour (#535)
|
||||
|
||||
8.16.0
|
||||
------
|
||||
|
||||
- Add a `since` option to only search for emails since a certain time (PR #527)
|
||||
|
||||
8.15.4
|
||||
------
|
||||
|
||||
|
||||
1
build.sh
1
build.sh
@@ -16,6 +16,7 @@ make html
|
||||
touch build/html/.nojekyll
|
||||
cp -rf build/html/* ../../parsedmarc-docs/
|
||||
cd ..
|
||||
./sortmaps.py
|
||||
python3 tests.py
|
||||
rm -rf dist/ build/
|
||||
hatch build
|
||||
@@ -28,3 +28,30 @@ services:
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 24
|
||||
|
||||
opensearch:
|
||||
image: opensearchproject/opensearch:2.18.0
|
||||
environment:
|
||||
- network.host=127.0.0.1
|
||||
- http.host=0.0.0.0
|
||||
- node.name=opensearch
|
||||
- discovery.type=single-node
|
||||
- cluster.name=parsedmarc-cluster
|
||||
- discovery.seed_hosts=opensearch
|
||||
- bootstrap.memory_lock=true
|
||||
- OPENSEARCH_INITIAL_ADMIN_PASSWORD=${OPENSEARCH_INITIAL_ADMIN_PASSWORD}
|
||||
ports:
|
||||
- 127.0.0.1:9201:9200
|
||||
ulimits:
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
"CMD-SHELL",
|
||||
"curl -s -XGET http://localhost:9201/_cluster/health?pretty | grep status | grep -q '\\(green\\|yellow\\)'"
|
||||
]
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
retries: 24
|
||||
|
||||
5901
grafana/Grafana-DMARC_Reports.json-new_panel.json
Normal file
5901
grafana/Grafana-DMARC_Reports.json-new_panel.json
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
@@ -39,7 +39,7 @@ from parsedmarc.utils import is_outlook_msg, convert_outlook_msg
|
||||
from parsedmarc.utils import parse_email
|
||||
from parsedmarc.utils import timestamp_to_human, human_timestamp_to_datetime
|
||||
|
||||
__version__ = "8.15.4"
|
||||
__version__ = "8.17.0"
|
||||
|
||||
logger.debug("parsedmarc v{0}".format(__version__))
|
||||
|
||||
@@ -54,6 +54,7 @@ MAGIC_XML = b"\x3c\x3f\x78\x6d\x6c\x20"
|
||||
MAGIC_JSON = b"\7b"
|
||||
|
||||
IP_ADDRESS_CACHE = ExpiringDict(max_len=10000, max_age_seconds=14400)
|
||||
SEEN_AGGREGATE_REPORT_IDS = ExpiringDict(max_len=100000000, max_age_seconds=3600)
|
||||
REVERSE_DNS_MAP = dict()
|
||||
|
||||
|
||||
@@ -1470,7 +1471,17 @@ def get_dmarc_reports_from_mbox(
|
||||
strip_attachment_payloads=sa,
|
||||
)
|
||||
if parsed_email["report_type"] == "aggregate":
|
||||
aggregate_reports.append(parsed_email["report"])
|
||||
report_org = parsed_email["report"]["report_metadata"]["org_name"]
|
||||
report_id = parsed_email["report"]["report_metadata"]["report_id"]
|
||||
report_key = f"{report_org}_{report_id}"
|
||||
if report_key not in SEEN_AGGREGATE_REPORT_IDS:
|
||||
SEEN_AGGREGATE_REPORT_IDS[report_key] = True
|
||||
aggregate_reports.append(parsed_email["report"])
|
||||
else:
|
||||
logger.debug(
|
||||
"Skipping duplicate aggregate report "
|
||||
f"from {report_org} with ID: {report_id}"
|
||||
)
|
||||
elif parsed_email["report_type"] == "forensic":
|
||||
forensic_reports.append(parsed_email["report"])
|
||||
elif parsed_email["report_type"] == "smtp_tls":
|
||||
@@ -1647,7 +1658,16 @@ def get_dmarc_reports_from_mailbox(
|
||||
keep_alive=connection.keepalive,
|
||||
)
|
||||
if parsed_email["report_type"] == "aggregate":
|
||||
aggregate_reports.append(parsed_email["report"])
|
||||
report_org = parsed_email["report"]["report_metadata"]["org_name"]
|
||||
report_id = parsed_email["report"]["report_metadata"]["report_id"]
|
||||
report_key = f"{report_org}_{report_id}"
|
||||
if report_key not in SEEN_AGGREGATE_REPORT_IDS:
|
||||
SEEN_AGGREGATE_REPORT_IDS[report_key] = True
|
||||
aggregate_reports.append(parsed_email["report"])
|
||||
else:
|
||||
logger.debug(
|
||||
"Skipping duplicate aggregate report " f"with ID: {report_id}"
|
||||
)
|
||||
aggregate_report_msg_uids.append(msg_uid)
|
||||
elif parsed_email["report_type"] == "forensic":
|
||||
forensic_reports.append(parsed_email["report"])
|
||||
|
||||
@@ -46,6 +46,7 @@ from parsedmarc.mail.graph import AuthMethod
|
||||
|
||||
from parsedmarc.log import logger
|
||||
from parsedmarc.utils import is_mbox, get_reverse_dns
|
||||
from parsedmarc import SEEN_AGGREGATE_REPORT_IDS
|
||||
|
||||
formatter = logging.Formatter(
|
||||
fmt="%(levelname)8s:%(filename)s:%(lineno)d:%(message)s",
|
||||
@@ -1418,7 +1419,17 @@ def _main():
|
||||
logger.error("Failed to parse {0} - {1}".format(result[1], result[0]))
|
||||
else:
|
||||
if result[0]["report_type"] == "aggregate":
|
||||
aggregate_reports.append(result[0]["report"])
|
||||
report_org = result[0]["report"]["report_metadata"]["org_name"]
|
||||
report_id = result[0]["report"]["report_metadata"]["report_id"]
|
||||
report_key = f"{report_org}_{report_id}"
|
||||
if report_key not in SEEN_AGGREGATE_REPORT_IDS:
|
||||
SEEN_AGGREGATE_REPORT_IDS[report_key] = True
|
||||
aggregate_reports.append(result[0]["report"])
|
||||
else:
|
||||
logger.debug(
|
||||
"Skipping duplicate aggregate report "
|
||||
f"from {report_org} with ID: {report_id}"
|
||||
)
|
||||
elif result[0]["report_type"] == "forensic":
|
||||
forensic_reports.append(result[0]["report"])
|
||||
elif result[0]["report_type"] == "smtp_tls":
|
||||
|
||||
@@ -202,13 +202,15 @@ class _SMTPTLSPolicyDoc(InnerDoc):
|
||||
receiving_ip,
|
||||
receiving_mx_helo,
|
||||
failed_session_count,
|
||||
sending_mta_ip=None,
|
||||
receiving_mx_hostname=None,
|
||||
additional_information_uri=None,
|
||||
failure_reason_code=None,
|
||||
):
|
||||
self.failure_details.append(
|
||||
_details = _SMTPTLSFailureDetailsDoc(
|
||||
result_type=result_type,
|
||||
ip_address=ip_address,
|
||||
sending_mta_ip=sending_mta_ip,
|
||||
receiving_mx_hostname=receiving_mx_hostname,
|
||||
receiving_mx_helo=receiving_mx_helo,
|
||||
receiving_ip=receiving_ip,
|
||||
@@ -216,9 +218,10 @@ class _SMTPTLSPolicyDoc(InnerDoc):
|
||||
additional_information=additional_information_uri,
|
||||
failure_reason_code=failure_reason_code,
|
||||
)
|
||||
self.failure_details.append(_details)
|
||||
|
||||
|
||||
class _SMTPTLSFailureReportDoc(Document):
|
||||
class _SMTPTLSReportDoc(Document):
|
||||
class Index:
|
||||
name = "smtp_tls"
|
||||
|
||||
@@ -499,6 +502,7 @@ def save_aggregate_report_to_opensearch(
|
||||
index = "{0}_{1}".format(index, index_suffix)
|
||||
if index_prefix:
|
||||
index = "{0}{1}".format(index_prefix, index)
|
||||
|
||||
index = "{0}-{1}".format(index, index_date)
|
||||
index_settings = dict(
|
||||
number_of_shards=number_of_shards, number_of_replicas=number_of_replicas
|
||||
@@ -685,7 +689,7 @@ def save_smtp_tls_report_to_opensearch(
|
||||
AlreadySaved
|
||||
"""
|
||||
logger.info("Saving aggregate report to OpenSearch")
|
||||
org_name = report["org_name"]
|
||||
org_name = report["organization_name"]
|
||||
report_id = report["report_id"]
|
||||
begin_date = human_timestamp_to_datetime(report["begin_date"], to_utc=True)
|
||||
end_date = human_timestamp_to_datetime(report["end_date"], to_utc=True)
|
||||
@@ -741,11 +745,11 @@ def save_smtp_tls_report_to_opensearch(
|
||||
number_of_shards=number_of_shards, number_of_replicas=number_of_replicas
|
||||
)
|
||||
|
||||
smtp_tls_doc = _SMTPTLSFailureReportDoc(
|
||||
organization_name=report["organization_name"],
|
||||
date_range=[report["date_begin"], report["date_end"]],
|
||||
date_begin=report["date_begin"],
|
||||
date_end=report["date_end"],
|
||||
smtp_tls_doc = _SMTPTLSReportDoc(
|
||||
org_name=report["organization_name"],
|
||||
date_range=[report["begin_date"], report["end_date"]],
|
||||
date_begin=report["begin_date"],
|
||||
date_end=report["end_date"],
|
||||
contact_info=report["contact_info"],
|
||||
report_id=report["report_id"],
|
||||
)
|
||||
@@ -760,32 +764,48 @@ def save_smtp_tls_report_to_opensearch(
|
||||
policy_doc = _SMTPTLSPolicyDoc(
|
||||
policy_domain=policy["policy_domain"],
|
||||
policy_type=policy["policy_type"],
|
||||
succesful_session_count=policy["successful_session_count"],
|
||||
failed_session_count=policy["failed_session_count"],
|
||||
policy_string=policy_strings,
|
||||
mx_host_patterns=mx_host_patterns,
|
||||
)
|
||||
if "failure_details" in policy:
|
||||
failure_details = policy["failure_details"]
|
||||
receiving_mx_hostname = None
|
||||
additional_information_uri = None
|
||||
failure_reason_code = None
|
||||
if "receiving_mx_hostname" in failure_details:
|
||||
receiving_mx_hostname = failure_details["receiving_mx_hostname"]
|
||||
if "additional_information_uri" in failure_details:
|
||||
additional_information_uri = failure_details[
|
||||
"additional_information_uri"
|
||||
]
|
||||
if "failure_reason_code" in failure_details:
|
||||
failure_reason_code = failure_details["failure_reason_code"]
|
||||
policy_doc.add_failure_details(
|
||||
result_type=failure_details["result_type"],
|
||||
ip_address=failure_details["ip_address"],
|
||||
receiving_ip=failure_details["receiving_ip"],
|
||||
receiving_mx_helo=failure_details["receiving_mx_helo"],
|
||||
failed_session_count=failure_details["failed_session_count"],
|
||||
receiving_mx_hostname=receiving_mx_hostname,
|
||||
additional_information_uri=additional_information_uri,
|
||||
failure_reason_code=failure_reason_code,
|
||||
)
|
||||
for failure_detail in policy["failure_details"]:
|
||||
receiving_mx_hostname = None
|
||||
additional_information_uri = None
|
||||
failure_reason_code = None
|
||||
ip_address = None
|
||||
receiving_ip = None
|
||||
receiving_mx_helo = None
|
||||
sending_mta_ip = None
|
||||
|
||||
if "receiving_mx_hostname" in failure_detail:
|
||||
receiving_mx_hostname = failure_detail["receiving_mx_hostname"]
|
||||
if "additional_information_uri" in failure_detail:
|
||||
additional_information_uri = failure_detail[
|
||||
"additional_information_uri"
|
||||
]
|
||||
if "failure_reason_code" in failure_detail:
|
||||
failure_reason_code = failure_detail["failure_reason_code"]
|
||||
if "ip_address" in failure_detail:
|
||||
ip_address = failure_detail["ip_address"]
|
||||
if "receiving_ip" in failure_detail:
|
||||
receiving_ip = failure_detail["receiving_ip"]
|
||||
if "receiving_mx_helo" in failure_detail:
|
||||
receiving_mx_helo = failure_detail["receiving_mx_helo"]
|
||||
if "sending_mta_ip" in failure_detail:
|
||||
sending_mta_ip = failure_detail["sending_mta_ip"]
|
||||
policy_doc.add_failure_details(
|
||||
result_type=failure_detail["result_type"],
|
||||
ip_address=ip_address,
|
||||
receiving_ip=receiving_ip,
|
||||
receiving_mx_helo=receiving_mx_helo,
|
||||
failed_session_count=failure_detail["failed_session_count"],
|
||||
sending_mta_ip=sending_mta_ip,
|
||||
receiving_mx_hostname=receiving_mx_hostname,
|
||||
additional_information_uri=additional_information_uri,
|
||||
failure_reason_code=failure_reason_code,
|
||||
)
|
||||
smtp_tls_doc.policies.append(policy_doc)
|
||||
|
||||
create_indexes([index], index_settings)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
[build-system]
|
||||
requires = [
|
||||
"hatchling>=1.8.1",
|
||||
"hatchling>=1.27.0",
|
||||
]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
@@ -59,7 +59,7 @@ dependencies = [
|
||||
|
||||
[project.optional-dependencies]
|
||||
build = [
|
||||
"hatch",
|
||||
"hatch>=1.14.0",
|
||||
"myst-parser[linkify]",
|
||||
"nose",
|
||||
"pytest",
|
||||
|
||||
25
sortmaps.py
Executable file
25
sortmaps.py
Executable file
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import glob
|
||||
import csv
|
||||
|
||||
|
||||
maps_dir = os.path.join("parsedmarc", "resources", "maps")
|
||||
csv_files = glob.glob(os.path.join(maps_dir, "*.csv"))
|
||||
|
||||
|
||||
def sort_csv(filepath, column=0):
|
||||
with open(filepath, mode="r", newline="") as infile:
|
||||
reader = csv.reader(infile)
|
||||
header = next(reader)
|
||||
sorted_rows = sorted(reader, key=lambda row: row[column])
|
||||
|
||||
with open(filepath, mode="w", newline="\n") as outfile:
|
||||
writer = csv.writer(outfile)
|
||||
writer.writerow(header)
|
||||
writer.writerows(sorted_rows)
|
||||
|
||||
|
||||
for csv_file in csv_files:
|
||||
sort_csv(csv_file)
|
||||
Reference in New Issue
Block a user