- Ignore duplicate aggregate DMARC reports with the same `org_name` and `report_id` seen within the same hour ([#539](https://github.com/domainaware/parsedmarc/issues/539))
- Fix saving SMTP TLS reports to OpenSearch (PR #585 closed issue #576)
- Add 303 entries to `base_reverse_dns_map.csv`
This commit is contained in:
Sean Whalen
2025-01-09 22:22:55 -05:00
parent 7da57c6382
commit 010f1f84a7
3 changed files with 10 additions and 5 deletions

View File

@@ -1,10 +1,17 @@
Changelog
=========
8.17.0
------
- Ignore duplicate aggregate DMARC reports with the same `org_name` and `report_id` seen within the same hour ([#539](https://github.com/domainaware/parsedmarc/issues/539))
- Fix saving SMTP TLS reports to OpenSearch (PR #585 closed issue #576)
- Add 303 entries to `base_reverse_dns_map.csv`
8.16.1
------
- Ignore aggregate DMARC reports seen within a period of one hour (#535)
- Failed attempt to ignore aggregate DMARC reports seen within a period of one hour (#535)
8.16.0
------

View File

@@ -39,7 +39,7 @@ from parsedmarc.utils import is_outlook_msg, convert_outlook_msg
from parsedmarc.utils import parse_email
from parsedmarc.utils import timestamp_to_human, human_timestamp_to_datetime
__version__ = "8.16.1"
__version__ = "8.17.0"
logger.debug("parsedmarc v{0}".format(__version__))
@@ -1666,8 +1666,7 @@ def get_dmarc_reports_from_mailbox(
aggregate_reports.append(parsed_email["report"])
else:
logger.debug(
"Skipping duplicate aggregate report "
f"with ID: {report_id}"
"Skipping duplicate aggregate report " f"with ID: {report_id}"
)
aggregate_report_msg_uids.append(msg_uid)
elif parsed_email["report_type"] == "forensic":

View File

@@ -1,7 +1,6 @@
#!/usr/bin/env python3
import os
import sys
import glob
import csv