Compare commits

..

2 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
b1e76c8569 Fix IMAP since option date format to use RFC 3501 compliant DD-Mon-YYYY format
Co-authored-by: seanthegeek <44679+seanthegeek@users.noreply.github.com>
2025-12-29 17:49:27 +00:00
copilot-swe-agent[bot]
a5de313a57 Initial plan 2025-12-29 17:43:14 +00:00
11 changed files with 30 additions and 133 deletions

View File

@@ -30,7 +30,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
steps:
- uses: actions/checkout@v5

View File

@@ -1,28 +1,5 @@
# Changelog
## 9.0.10
- Support Python 3.14+
## 9.0.9
### Fixes
- Validate that a string is base64-encoded before trying to base64 decode it. (PRs #648 and #649)
## 9.0.8
### Fixes
- Fix logging configuration not propagating to child parser processes (#646).
- Update `mailsuite` dependency to `?=1.11.1` to solve issues with iCloud IMAP (#493).
## 9.0.7
## Fixes
- Fix IMAP `since` option (#PR 645 closes issues #581 and #643).
## 9.0.6
### Fixes

View File

@@ -61,4 +61,4 @@ for RHEL or Debian.
| 3.11 | ✅ | Actively maintained; supported until June 2028 (Debian 12) |
| 3.12 | ✅ | Actively maintained; supported until May 2035 (RHEL 10) |
| 3.13 | ✅ | Actively maintained; supported until June 2030 (Debian 13) |
| 3.14 | | Actively maintained |
| 3.14 | | Not currently supported due to [this imapclient bug](https://github.com/mjs/imapclient/issues/618)|

1
ci.ini
View File

@@ -3,7 +3,6 @@ save_aggregate = True
save_forensic = True
save_smtp_tls = True
debug = True
offline = True
[elasticsearch]
hosts = http://localhost:9200

View File

@@ -61,7 +61,7 @@ for RHEL or Debian.
| 3.11 | ✅ | Actively maintained; supported until June 2028 (Debian 12) |
| 3.12 | ✅ | Actively maintained; supported until May 2035 (RHEL 10) |
| 3.13 | ✅ | Actively maintained; supported until June 2030 (Debian 13) |
| 3.14 | | Actively maintained |
| 3.14 | | Not currently supported due to [this imapclient bug](https://github.com/mjs/imapclient/issues/618)|
```{toctree}
:caption: 'Contents'

File diff suppressed because one or more lines are too long

View File

@@ -595,12 +595,18 @@ def parsed_smtp_tls_reports_to_csv_rows(
if "mx_host_patterns" in policy:
record["mx_host_patterns"] = "|".join(policy["mx_host_patterns"])
successful_record = record.copy()
successful_record["policy_domain"] = policy["policy_domain"]
successful_record["policy_type"] = policy["policy_type"]
successful_record["policy_domain"] = policy[
"policy_domain"
]
successful_record["policy_type"] = policy[
"policy_type"
]
successful_record["successful_session_count"] = policy[
"successful_session_count"
]
successful_record["failed_session_count"] = policy["failed_session_count"]
successful_record["failed_session_count"] = policy[
"failed_session_count"
]
rows.append(successful_record)
if "failure_details" in policy:
for failure_details in policy["failure_details"]:
@@ -751,8 +757,8 @@ def parse_aggregate_report_xml(
new_report_metadata["report_id"] = report_id
date_range = report["report_metadata"]["date_range"]
begin_ts = int(date_range["begin"].split(".")[0])
end_ts = int(date_range["end"].split(".")[0])
begin_ts = int(date_range["begin"])
end_ts = int(date_range["end"])
span_seconds = end_ts - begin_ts
normalize_timespan = span_seconds > normalize_timespan_threshold_hours * 3600
@@ -892,11 +898,7 @@ def extract_report(content: Union[bytes, str, BinaryIO]) -> str:
try:
if isinstance(content, str):
try:
file_object = BytesIO(
b64decode(
content.replace("\n", "").replace("\r", ""), validate=True
)
)
file_object = BytesIO(b64decode(content))
except binascii.Error:
return content
header = file_object.read(6)
@@ -1946,9 +1948,7 @@ def get_dmarc_reports_from_mailbox(
"Only days and weeks values in 'since' option are \
considered for IMAP connections. Examples: 2d or 1w"
)
since = (datetime.now(timezone.utc) - timedelta(minutes=_since)).strftime(
"%d-%b-%Y"
)
since = (datetime.now(timezone.utc) - timedelta(minutes=_since)).strftime("%d-%b-%Y")
current_time = datetime.now(timezone.utc).strftime("%d-%b-%Y")
elif isinstance(connection, MSGraphConnection):
since = (

View File

@@ -46,7 +46,6 @@ from parsedmarc.mail import (
MSGraphConnection,
)
from parsedmarc.mail.graph import AuthMethod
from parsedmarc.types import ParsingResults
from parsedmarc.utils import get_base_domain, get_reverse_dns, is_mbox
# Increase the max header limit for very large emails. `_MAXHEADERS` is a
@@ -68,48 +67,6 @@ def _str_to_list(s):
return list(map(lambda i: i.lstrip(), _list))
def _configure_logging(log_level, log_file=None):
"""
Configure logging for the current process.
This is needed for child processes to properly log messages.
Args:
log_level: The logging level (e.g., logging.DEBUG, logging.WARNING)
log_file: Optional path to log file
"""
# Get the logger
from parsedmarc.log import logger
# Set the log level
logger.setLevel(log_level)
# Add StreamHandler with formatter if not already present
# Check if we already have a StreamHandler to avoid duplicates
# Use exact type check to distinguish from FileHandler subclass
has_stream_handler = any(type(h) is logging.StreamHandler for h in logger.handlers)
if not has_stream_handler:
formatter = logging.Formatter(
fmt="%(levelname)8s:%(filename)s:%(lineno)d:%(message)s",
datefmt="%Y-%m-%d:%H:%M:%S",
)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
# Add FileHandler if log_file is specified
if log_file:
try:
fh = logging.FileHandler(log_file, "a")
formatter = logging.Formatter(
"%(asctime)s - %(levelname)s - [%(filename)s:%(lineno)d] - %(message)s"
)
fh.setFormatter(formatter)
logger.addHandler(fh)
except (IOError, OSError, PermissionError) as error:
logger.warning("Unable to write to log file: {}".format(error))
def cli_parse(
file_path,
sa,
@@ -122,29 +79,8 @@ def cli_parse(
reverse_dns_map_url,
normalize_timespan_threshold_hours,
conn,
log_level=logging.ERROR,
log_file=None,
):
"""Separated this function for multiprocessing
Args:
file_path: Path to the report file
sa: Strip attachment payloads flag
nameservers: List of nameservers
dns_timeout: DNS timeout
ip_db_path: Path to IP database
offline: Offline mode flag
always_use_local_files: Always use local files flag
reverse_dns_map_path: Path to reverse DNS map
reverse_dns_map_url: URL to reverse DNS map
normalize_timespan_threshold_hours: Timespan threshold
conn: Pipe connection for IPC
log_level: Logging level for this process
log_file: Optional path to log file
"""
# Configure logging in this child process
_configure_logging(log_level, log_file)
"""Separated this function for multiprocessing"""
try:
file_results = parse_report_file(
file_path,
@@ -1525,10 +1461,6 @@ def _main():
if n_procs < 1:
n_procs = 1
# Capture the current log level to pass to child processes
current_log_level = logger.level
current_log_file = opts.log_file
for batch_index in range((len(file_paths) + n_procs - 1) // n_procs):
processes = []
connections = []
@@ -1554,8 +1486,6 @@ def _main():
opts.reverse_dns_map_url,
opts.normalize_timespan_threshold_hours,
child_conn,
current_log_level,
current_log_file,
),
)
processes.append(process)
@@ -1758,13 +1688,13 @@ def _main():
logger.exception("Mailbox Error")
exit(1)
parsing_results: ParsingResults = {
results = {
"aggregate_reports": aggregate_reports,
"forensic_reports": forensic_reports,
"smtp_tls_reports": smtp_tls_reports,
}
process_reports(parsing_results)
process_reports(results)
if opts.smtp_host:
try:
@@ -1778,7 +1708,7 @@ def _main():
else _str_to_list(str(opts.smtp_to))
)
email_results(
parsing_results,
results,
opts.smtp_host,
opts.smtp_from,
smtp_to_value,

View File

@@ -1,3 +1,3 @@
__version__ = "9.0.10"
__version__ = "9.0.6"
USER_AGENT = f"parsedmarc/{__version__}"

View File

@@ -29,7 +29,7 @@ classifiers = [
"Operating System :: OS Independent",
"Programming Language :: Python :: 3"
]
requires-python = ">=3.9"
requires-python = ">=3.9, <3.14"
dependencies = [
"azure-identity>=1.8.0",
"azure-monitor-ingestion>=1.0.0",
@@ -48,7 +48,7 @@ dependencies = [
"imapclient>=2.1.0",
"kafka-python-ng>=2.2.2",
"lxml>=4.4.0",
"mailsuite>=1.11.2",
"mailsuite>=1.11.0",
"msgraph-core==0.2.2",
"opensearch-py>=2.4.2,<=3.0.0",
"publicsuffixlist>=0.10.0",

View File

@@ -12,9 +12,6 @@ from lxml import etree
import parsedmarc
import parsedmarc.utils
# Detect if running in GitHub Actions to skip DNS lookups
OFFLINE_MODE = os.environ.get("GITHUB_ACTIONS", "false").lower() == "true"
def minify_xml(xml_string):
parser = etree.XMLParser(remove_blank_text=True)
@@ -124,7 +121,7 @@ class Test(unittest.TestCase):
continue
print("Testing {0}: ".format(sample_path), end="")
parsed_report = parsedmarc.parse_report_file(
sample_path, always_use_local_files=True, offline=OFFLINE_MODE
sample_path, always_use_local_files=True
)["report"]
parsedmarc.parsed_aggregate_reports_to_csv(parsed_report)
print("Passed!")
@@ -132,7 +129,7 @@ class Test(unittest.TestCase):
def testEmptySample(self):
"""Test empty/unparasable report"""
with self.assertRaises(parsedmarc.ParserError):
parsedmarc.parse_report_file("samples/empty.xml", offline=OFFLINE_MODE)
parsedmarc.parse_report_file("samples/empty.xml")
def testForensicSamples(self):
"""Test sample forensic/ruf/failure DMARC reports"""
@@ -142,12 +139,8 @@ class Test(unittest.TestCase):
print("Testing {0}: ".format(sample_path), end="")
with open(sample_path) as sample_file:
sample_content = sample_file.read()
parsed_report = parsedmarc.parse_report_email(
sample_content, offline=OFFLINE_MODE
)["report"]
parsed_report = parsedmarc.parse_report_file(
sample_path, offline=OFFLINE_MODE
)["report"]
parsed_report = parsedmarc.parse_report_email(sample_content)["report"]
parsed_report = parsedmarc.parse_report_file(sample_path)["report"]
parsedmarc.parsed_forensic_reports_to_csv(parsed_report)
print("Passed!")
@@ -159,9 +152,7 @@ class Test(unittest.TestCase):
if os.path.isdir(sample_path):
continue
print("Testing {0}: ".format(sample_path), end="")
parsed_report = parsedmarc.parse_report_file(
sample_path, offline=OFFLINE_MODE
)["report"]
parsed_report = parsedmarc.parse_report_file(sample_path)["report"]
parsedmarc.parsed_smtp_tls_reports_to_csv(parsed_report)
print("Passed!")