mirror of
https://github.com/domainaware/parsedmarc.git
synced 2026-03-21 05:55:59 +00:00
Compare commits
15 Commits
9.2.0
...
copilot/su
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
11e615565d | ||
|
|
e3a6ea7e92 | ||
|
|
6fceb3e2ce | ||
|
|
510d5d05a9 | ||
|
|
3445438684 | ||
|
|
17defb75b0 | ||
|
|
893d0a4f03 | ||
|
|
58e07140a8 | ||
|
|
dfdffe4947 | ||
|
|
dd1a8fd461 | ||
|
|
81656c75e9 | ||
|
|
691b0fcd41 | ||
|
|
b9343a295f | ||
|
|
b51a62463f | ||
|
|
66ba5b0e5e |
16
.claude/settings.json
Normal file
16
.claude/settings.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"permissions": {
|
||||
"allow": [
|
||||
"Bash(python -c \"import py_compile; py_compile.compile\\(''parsedmarc/cli.py'', doraise=True\\)\")",
|
||||
"Bash(ruff check:*)",
|
||||
"Bash(ruff format:*)",
|
||||
"Bash(GITHUB_ACTIONS=true pytest --cov tests.py)",
|
||||
"Bash(ls tests*)",
|
||||
"Bash(GITHUB_ACTIONS=true python -m pytest --cov tests.py -x)",
|
||||
"Bash(GITHUB_ACTIONS=true python -m pytest tests.py -x -v)"
|
||||
],
|
||||
"additionalDirectories": [
|
||||
"/tmp"
|
||||
]
|
||||
}
|
||||
}
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -137,7 +137,7 @@ samples/private
|
||||
*.html
|
||||
*.sqlite-journal
|
||||
|
||||
parsedmarc.ini
|
||||
parsedmarc*.ini
|
||||
scratch.py
|
||||
|
||||
parsedmarc/resources/maps/base_reverse_dns.csv
|
||||
|
||||
24
CHANGELOG.md
24
CHANGELOG.md
@@ -1,5 +1,29 @@
|
||||
# Changelog
|
||||
|
||||
## 9.3.0
|
||||
|
||||
### Added
|
||||
|
||||
- SIGHUP-based configuration reload for watch mode — update output
|
||||
destinations, DNS/GeoIP settings, processing flags, and log level
|
||||
without restarting the service or interrupting in-progress report
|
||||
processing. Use `systemctl reload parsedmarc` when running under
|
||||
systemd.
|
||||
- Extracted `_parse_config_file()` and `_init_output_clients()` from
|
||||
`_main()` in `cli.py` to support config reload and reduce code
|
||||
duplication.
|
||||
|
||||
## 9.2.1
|
||||
|
||||
### Added
|
||||
|
||||
- Better checking of `msconfig` configuration (PR #695)
|
||||
|
||||
### Changed
|
||||
|
||||
- Updated `dbip-country-lite` database to version `2026-03`
|
||||
- DNS query error logging level from `warning` to `debug`
|
||||
|
||||
## 9.2.0
|
||||
|
||||
### Added
|
||||
|
||||
45
docker-compose.dashboard-dev.yml
Normal file
45
docker-compose.dashboard-dev.yml
Normal file
@@ -0,0 +1,45 @@
|
||||
name: parsedmarc-dashboards
|
||||
|
||||
include:
|
||||
- docker-compose.yml
|
||||
|
||||
services:
|
||||
kibana:
|
||||
image: docker.elastic.co/kibana/kibana:8.19.7
|
||||
environment:
|
||||
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
|
||||
ports:
|
||||
- "127.0.0.1:5601:5601"
|
||||
depends_on:
|
||||
elasticsearch:
|
||||
condition: service_healthy
|
||||
|
||||
opensearch-dashboards:
|
||||
image: opensearchproject/opensearch-dashboards:2
|
||||
environment:
|
||||
- OPENSEARCH_HOSTS=["https://opensearch:9200"]
|
||||
ports:
|
||||
- "127.0.0.1:5602:5601"
|
||||
depends_on:
|
||||
opensearch:
|
||||
condition: service_healthy
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana:latest
|
||||
environment:
|
||||
- GF_SECURITY_ADMIN_PASSWORD=${GRAFANA_PASSWORD}
|
||||
- GF_INSTALL_PLUGINS=grafana-piechart-panel,grafana-worldmap-panel
|
||||
ports:
|
||||
- "127.0.0.1:3000:3000"
|
||||
depends_on:
|
||||
elasticsearch:
|
||||
condition: service_healthy
|
||||
|
||||
splunk:
|
||||
image: splunk/splunk:latest
|
||||
environment:
|
||||
- SPLUNK_START_ARGS=--accept-license
|
||||
- "SPLUNK_GENERAL_TERMS=--accept-sgt-current-at-splunk-com"
|
||||
- SPLUNK_PASSWORD=${SPLUNK_PASSWORD}
|
||||
ports:
|
||||
- "127.0.0.1:8000:8000"
|
||||
@@ -48,7 +48,7 @@ services:
|
||||
test:
|
||||
[
|
||||
"CMD-SHELL",
|
||||
"curl -s -XGET http://localhost:9201/_cluster/health?pretty | grep status | grep -q '\\(green\\|yellow\\)'"
|
||||
"curl -sk -u admin:${OPENSEARCH_INITIAL_ADMIN_PASSWORD} -XGET https://localhost:9200/_cluster/health?pretty | grep status | grep -q '\\(green\\|yellow\\)'"
|
||||
]
|
||||
interval: 10s
|
||||
timeout: 10s
|
||||
|
||||
@@ -404,6 +404,7 @@ The full set of configuration options are:
|
||||
retry_attempts = 3
|
||||
retry_delay = 5
|
||||
```
|
||||
|
||||
- `gmail_api`
|
||||
- `credentials_file` - str: Path to file containing the
|
||||
credentials, None to disable (Default: `None`)
|
||||
@@ -442,7 +443,7 @@ The full set of configuration options are:
|
||||
- `dcr_smtp_tls_stream` - str: The stream name for the SMTP TLS reports in the DCR
|
||||
|
||||
:::{note}
|
||||
Information regarding the setup of the Data Collection Rule can be found [here](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal).
|
||||
Information regarding the setup of the Data Collection Rule can be found [in the Azure documentation](https://learn.microsoft.com/en-us/azure/azure-monitor/logs/tutorial-logs-ingestion-portal).
|
||||
:::
|
||||
- `gelf`
|
||||
- `host` - str: The GELF server name or IP address
|
||||
@@ -602,6 +603,7 @@ After=network.target network-online.target elasticsearch.service
|
||||
|
||||
[Service]
|
||||
ExecStart=/opt/parsedmarc/venv/bin/parsedmarc -c /etc/parsedmarc.ini
|
||||
ExecReload=/bin/kill -HUP $MAINPID
|
||||
User=parsedmarc
|
||||
Group=parsedmarc
|
||||
Restart=always
|
||||
@@ -634,6 +636,44 @@ sudo service parsedmarc restart
|
||||
|
||||
:::
|
||||
|
||||
### Reloading configuration without restarting
|
||||
|
||||
When running in watch mode, `parsedmarc` supports reloading its
|
||||
configuration file without restarting the service or interrupting
|
||||
report processing that is already in progress. Send a `SIGHUP` signal
|
||||
to the process, or use `systemctl reload` if the unit file includes
|
||||
the `ExecReload` line shown above:
|
||||
|
||||
```bash
|
||||
sudo systemctl reload parsedmarc
|
||||
```
|
||||
|
||||
The reload takes effect after the current batch of reports finishes
|
||||
processing and all output operations (Elasticsearch, Kafka, S3, etc.)
|
||||
for that batch have completed. The following settings are reloaded:
|
||||
|
||||
- All output destinations (Elasticsearch, OpenSearch, Kafka, S3,
|
||||
Splunk, syslog, GELF, webhooks, Log Analytics)
|
||||
- Multi-tenant index prefix domain map (`index_prefix_domain_map` —
|
||||
the referenced YAML file is re-read on reload)
|
||||
- DNS and GeoIP settings (`nameservers`, `dns_timeout`, `ip_db_path`,
|
||||
`offline`, etc.)
|
||||
- Processing flags (`strip_attachment_payloads`, `batch_size`,
|
||||
`check_timeout`, etc.)
|
||||
- Log level (`debug`, `verbose`, `warnings`, `silent`)
|
||||
|
||||
Mailbox connection settings (IMAP host/credentials, Microsoft Graph,
|
||||
Gmail API, Maildir path) are **not** reloaded — changing those still
|
||||
requires a full restart.
|
||||
|
||||
If the new configuration file contains errors, the reload is aborted
|
||||
and the previous configuration remains active. Check the logs for
|
||||
details:
|
||||
|
||||
```bash
|
||||
journalctl -u parsedmarc.service -r
|
||||
```
|
||||
|
||||
To check the status of the service, run:
|
||||
|
||||
```bash
|
||||
|
||||
@@ -2195,6 +2195,7 @@ def watch_inbox(
|
||||
batch_size: int = 10,
|
||||
since: Optional[Union[datetime, date, str]] = None,
|
||||
normalize_timespan_threshold_hours: float = 24,
|
||||
should_reload: Optional[Callable] = None,
|
||||
):
|
||||
"""
|
||||
Watches the mailbox for new messages and
|
||||
@@ -2222,6 +2223,8 @@ def watch_inbox(
|
||||
batch_size (int): Number of messages to read and process before saving
|
||||
since: Search for messages since certain time
|
||||
normalize_timespan_threshold_hours (float): Normalize timespans beyond this
|
||||
should_reload: Optional callable that returns True when a config
|
||||
reload has been requested (e.g. via SIGHUP)
|
||||
"""
|
||||
|
||||
def check_callback(connection):
|
||||
@@ -2246,7 +2249,14 @@ def watch_inbox(
|
||||
)
|
||||
callback(res)
|
||||
|
||||
mailbox_connection.watch(check_callback=check_callback, check_timeout=check_timeout)
|
||||
watch_kwargs: dict = {
|
||||
"check_callback": check_callback,
|
||||
"check_timeout": check_timeout,
|
||||
}
|
||||
if should_reload is not None:
|
||||
watch_kwargs["should_reload"] = should_reload
|
||||
|
||||
mailbox_connection.watch(**watch_kwargs)
|
||||
|
||||
|
||||
def append_json(
|
||||
|
||||
1887
parsedmarc/cli.py
1887
parsedmarc/cli.py
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,3 @@
|
||||
__version__ = "9.2.0"
|
||||
__version__ = "9.2.1"
|
||||
|
||||
USER_AGENT = f"parsedmarc/{__version__}"
|
||||
|
||||
@@ -69,3 +69,8 @@ class GelfClient(object):
|
||||
for row in rows:
|
||||
log_context_data.parsedmarc = row
|
||||
self.logger.info("parsedmarc smtptls report")
|
||||
|
||||
def close(self):
|
||||
"""Remove and close the GELF handler, releasing its connection."""
|
||||
self.logger.removeHandler(self.handler)
|
||||
self.handler.close()
|
||||
|
||||
@@ -62,6 +62,10 @@ class KafkaClient(object):
|
||||
except NoBrokersAvailable:
|
||||
raise KafkaError("No Kafka brokers available")
|
||||
|
||||
def close(self):
|
||||
"""Close the Kafka producer, releasing background threads and sockets."""
|
||||
self.producer.close()
|
||||
|
||||
@staticmethod
|
||||
def strip_metadata(report: dict[str, Any]):
|
||||
"""
|
||||
|
||||
@@ -175,10 +175,14 @@ class GmailConnection(MailboxConnection):
|
||||
# Not needed
|
||||
pass
|
||||
|
||||
def watch(self, check_callback, check_timeout):
|
||||
def watch(self, check_callback, check_timeout, should_reload=None):
|
||||
"""Checks the mailbox for new messages every n seconds"""
|
||||
while True:
|
||||
if should_reload and should_reload():
|
||||
return
|
||||
sleep(check_timeout)
|
||||
if should_reload and should_reload():
|
||||
return
|
||||
check_callback(self)
|
||||
|
||||
@lru_cache(maxsize=10)
|
||||
|
||||
@@ -278,9 +278,11 @@ class MSGraphConnection(MailboxConnection):
|
||||
# Not needed
|
||||
pass
|
||||
|
||||
def watch(self, check_callback, check_timeout):
|
||||
def watch(self, check_callback, check_timeout, should_reload=None):
|
||||
"""Checks the mailbox for new messages every n seconds"""
|
||||
while True:
|
||||
if should_reload and should_reload():
|
||||
return
|
||||
sleep(check_timeout)
|
||||
check_callback(self)
|
||||
|
||||
|
||||
@@ -81,7 +81,7 @@ class IMAPConnection(MailboxConnection):
|
||||
def keepalive(self):
|
||||
self._client.noop()
|
||||
|
||||
def watch(self, check_callback, check_timeout):
|
||||
def watch(self, check_callback, check_timeout, should_reload=None):
|
||||
"""
|
||||
Use an IDLE IMAP connection to parse incoming emails,
|
||||
and pass the results to a callback function
|
||||
@@ -111,3 +111,5 @@ class IMAPConnection(MailboxConnection):
|
||||
except Exception as e:
|
||||
logger.warning("IMAP connection error. {0}. Reconnecting...".format(e))
|
||||
sleep(check_timeout)
|
||||
if should_reload and should_reload():
|
||||
return
|
||||
|
||||
@@ -28,5 +28,5 @@ class MailboxConnection(ABC):
|
||||
def keepalive(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def watch(self, check_callback, check_timeout):
|
||||
def watch(self, check_callback, check_timeout, should_reload=None):
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -63,10 +63,12 @@ class MaildirConnection(MailboxConnection):
|
||||
def keepalive(self):
|
||||
return
|
||||
|
||||
def watch(self, check_callback, check_timeout):
|
||||
def watch(self, check_callback, check_timeout, should_reload=None):
|
||||
while True:
|
||||
try:
|
||||
check_callback(self)
|
||||
except Exception as e:
|
||||
logger.warning("Maildir init error. {0}".format(e))
|
||||
if should_reload and should_reload():
|
||||
return
|
||||
sleep(check_timeout)
|
||||
|
||||
@@ -316,9 +316,7 @@ def set_hosts(
|
||||
raise OpenSearchError(
|
||||
"Unable to load AWS credentials for OpenSearch SigV4 authentication"
|
||||
)
|
||||
conn_params["http_auth"] = AWSV4SignerAuth(
|
||||
credentials, aws_region, aws_service
|
||||
)
|
||||
conn_params["http_auth"] = AWSV4SignerAuth(credentials, aws_region, aws_service)
|
||||
conn_params["connection_class"] = RequestsHttpConnection
|
||||
elif normalized_auth_type == "basic":
|
||||
if username and password:
|
||||
|
||||
Binary file not shown.
@@ -57,7 +57,7 @@ class SyslogClient(object):
|
||||
self.logger.setLevel(logging.INFO)
|
||||
|
||||
# Create the appropriate syslog handler based on protocol
|
||||
log_handler = self._create_syslog_handler(
|
||||
self.log_handler = self._create_syslog_handler(
|
||||
server_name,
|
||||
server_port,
|
||||
self.protocol,
|
||||
@@ -69,7 +69,7 @@ class SyslogClient(object):
|
||||
retry_delay,
|
||||
)
|
||||
|
||||
self.logger.addHandler(log_handler)
|
||||
self.logger.addHandler(self.log_handler)
|
||||
|
||||
def _create_syslog_handler(
|
||||
self,
|
||||
@@ -179,3 +179,8 @@ class SyslogClient(object):
|
||||
rows = parsed_smtp_tls_reports_to_csv_rows(smtp_tls_reports)
|
||||
for row in rows:
|
||||
self.logger.info(json.dumps(row))
|
||||
|
||||
def close(self):
|
||||
"""Remove and close the syslog handler, releasing its socket."""
|
||||
self.logger.removeHandler(self.log_handler)
|
||||
self.log_handler.close()
|
||||
|
||||
@@ -205,8 +205,7 @@ def get_reverse_dns(
|
||||
)[0]
|
||||
|
||||
except dns.exception.DNSException as e:
|
||||
logger.warning(f"get_reverse_dns({ip_address}) exception: {e}")
|
||||
pass
|
||||
logger.debug(f"get_reverse_dns({ip_address}) exception: {e}")
|
||||
|
||||
return hostname
|
||||
|
||||
|
||||
@@ -63,3 +63,7 @@ class WebhookClient(object):
|
||||
self.session.post(webhook_url, data=payload, timeout=self.timeout)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
|
||||
def close(self):
|
||||
"""Close the underlying HTTP session."""
|
||||
self.session.close()
|
||||
|
||||
761
tests.py
761
tests.py
@@ -4,6 +4,7 @@
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
import tempfile
|
||||
import unittest
|
||||
@@ -298,7 +299,9 @@ authentication_type = awssigv4
|
||||
aws_region = eu-west-1
|
||||
aws_service = aoss
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as config_file:
|
||||
with tempfile.NamedTemporaryFile(
|
||||
"w", suffix=".ini", delete=False
|
||||
) as config_file:
|
||||
config_file.write(config)
|
||||
config_path = config_file.name
|
||||
self.addCleanup(lambda: os.path.exists(config_path) and os.remove(config_path))
|
||||
@@ -347,7 +350,9 @@ password = test-password
|
||||
[elasticsearch]
|
||||
hosts = localhost
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as config_file:
|
||||
with tempfile.NamedTemporaryFile(
|
||||
"w", suffix=".ini", delete=False
|
||||
) as config_file:
|
||||
config_file.write(config)
|
||||
config_path = config_file.name
|
||||
self.addCleanup(lambda: os.path.exists(config_path) and os.remove(config_path))
|
||||
@@ -395,7 +400,9 @@ password = test-password
|
||||
[elasticsearch]
|
||||
hosts = localhost
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as config_file:
|
||||
with tempfile.NamedTemporaryFile(
|
||||
"w", suffix=".ini", delete=False
|
||||
) as config_file:
|
||||
config_file.write(config)
|
||||
config_path = config_file.name
|
||||
self.addCleanup(lambda: os.path.exists(config_path) and os.remove(config_path))
|
||||
@@ -435,8 +442,8 @@ hosts = localhost
|
||||
mock_save_aggregate.side_effect = parsedmarc.elastic.ElasticsearchError(
|
||||
"aggregate sink failed"
|
||||
)
|
||||
mock_save_forensic_opensearch.side_effect = parsedmarc.cli.opensearch.OpenSearchError(
|
||||
"forensic sink failed"
|
||||
mock_save_forensic_opensearch.side_effect = (
|
||||
parsedmarc.cli.opensearch.OpenSearchError("forensic sink failed")
|
||||
)
|
||||
|
||||
config = """[general]
|
||||
@@ -456,7 +463,9 @@ hosts = localhost
|
||||
[opensearch]
|
||||
hosts = localhost
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as config_file:
|
||||
with tempfile.NamedTemporaryFile(
|
||||
"w", suffix=".ini", delete=False
|
||||
) as config_file:
|
||||
config_file.write(config)
|
||||
config_path = config_file.name
|
||||
self.addCleanup(lambda: os.path.exists(config_path) and os.remove(config_path))
|
||||
@@ -555,9 +564,7 @@ class TestGmailConnection(unittest.TestCase):
|
||||
"from_authorized_user_file",
|
||||
return_value=creds,
|
||||
):
|
||||
returned = _get_creds(
|
||||
token_path, "credentials.json", ["scope"], 8080
|
||||
)
|
||||
returned = _get_creds(token_path, "credentials.json", ["scope"], 8080)
|
||||
finally:
|
||||
os.remove(token_path)
|
||||
self.assertEqual(returned, creds)
|
||||
@@ -611,9 +618,7 @@ class TestGmailConnection(unittest.TestCase):
|
||||
"from_authorized_user_file",
|
||||
return_value=expired_creds,
|
||||
):
|
||||
returned = _get_creds(
|
||||
token_path, "credentials.json", ["scope"], 8080
|
||||
)
|
||||
returned = _get_creds(token_path, "credentials.json", ["scope"], 8080)
|
||||
finally:
|
||||
os.remove(token_path)
|
||||
|
||||
@@ -671,7 +676,9 @@ class TestGraphConnection(unittest.TestCase):
|
||||
with patch.object(graph_module, "sleep") as mocked_sleep:
|
||||
messages = connection._get_all_messages("/url", batch_size=0, since=None)
|
||||
self.assertEqual([msg["id"] for msg in messages], ["1"])
|
||||
mocked_sleep.assert_called_once_with(graph_module.GRAPH_REQUEST_RETRY_DELAY_SECONDS)
|
||||
mocked_sleep.assert_called_once_with(
|
||||
graph_module.GRAPH_REQUEST_RETRY_DELAY_SECONDS
|
||||
)
|
||||
|
||||
def testGetAllMessagesRaisesAfterRetryExhaustion(self):
|
||||
connection = MSGraphConnection.__new__(MSGraphConnection)
|
||||
@@ -715,7 +722,9 @@ class TestGraphConnection(unittest.TestCase):
|
||||
def testFetchMessagesPassesSinceAndBatchSize(self):
|
||||
connection = MSGraphConnection.__new__(MSGraphConnection)
|
||||
connection.mailbox_name = "mailbox@example.com"
|
||||
connection._find_folder_id_from_folder_path = MagicMock(return_value="folder-id")
|
||||
connection._find_folder_id_from_folder_path = MagicMock(
|
||||
return_value="folder-id"
|
||||
)
|
||||
connection._get_all_messages = MagicMock(return_value=[{"id": "1"}])
|
||||
self.assertEqual(
|
||||
connection.fetch_messages("Inbox", since="2026-03-01", batch_size=5), ["1"]
|
||||
@@ -776,7 +785,9 @@ class TestGraphConnection(unittest.TestCase):
|
||||
|
||||
def testGenerateCredentialDeviceCode(self):
|
||||
fake_credential = object()
|
||||
with patch.object(graph_module, "_get_cache_args", return_value={"cached": True}):
|
||||
with patch.object(
|
||||
graph_module, "_get_cache_args", return_value={"cached": True}
|
||||
):
|
||||
with patch.object(
|
||||
graph_module,
|
||||
"DeviceCodeCredential",
|
||||
@@ -916,14 +927,18 @@ class TestGraphConnection(unittest.TestCase):
|
||||
fake_credential.authenticate.assert_called_once_with(scopes=["Mail.ReadWrite"])
|
||||
cache_auth.assert_called_once()
|
||||
graph_client.assert_called_once()
|
||||
self.assertEqual(graph_client.call_args.kwargs.get("scopes"), ["Mail.ReadWrite"])
|
||||
self.assertEqual(
|
||||
graph_client.call_args.kwargs.get("scopes"), ["Mail.ReadWrite"]
|
||||
)
|
||||
|
||||
def testInitCertificateAuthSkipsInteractiveAuthenticate(self):
|
||||
class DummyCertificateCredential:
|
||||
pass
|
||||
|
||||
fake_credential = DummyCertificateCredential()
|
||||
with patch.object(graph_module, "CertificateCredential", DummyCertificateCredential):
|
||||
with patch.object(
|
||||
graph_module, "CertificateCredential", DummyCertificateCredential
|
||||
):
|
||||
with patch.object(
|
||||
graph_module, "_generate_credential", return_value=fake_credential
|
||||
):
|
||||
@@ -1023,8 +1038,12 @@ class TestImapConnection(unittest.TestCase):
|
||||
with self.assertRaises(_BreakLoop):
|
||||
connection.watch(callback, check_timeout=1)
|
||||
callback.assert_called_once_with(connection)
|
||||
|
||||
|
||||
class TestGmailAuthModes(unittest.TestCase):
|
||||
@patch("parsedmarc.mail.gmail.service_account.Credentials.from_service_account_file")
|
||||
@patch(
|
||||
"parsedmarc.mail.gmail.service_account.Credentials.from_service_account_file"
|
||||
)
|
||||
def testGetCredsServiceAccountWithoutSubject(self, mock_from_service_account_file):
|
||||
service_creds = MagicMock()
|
||||
service_creds.with_subject.return_value = MagicMock()
|
||||
@@ -1046,7 +1065,9 @@ class TestGmailAuthModes(unittest.TestCase):
|
||||
)
|
||||
service_creds.with_subject.assert_not_called()
|
||||
|
||||
@patch("parsedmarc.mail.gmail.service_account.Credentials.from_service_account_file")
|
||||
@patch(
|
||||
"parsedmarc.mail.gmail.service_account.Credentials.from_service_account_file"
|
||||
)
|
||||
def testGetCredsServiceAccountWithSubject(self, mock_from_service_account_file):
|
||||
base_creds = MagicMock()
|
||||
delegated_creds = MagicMock()
|
||||
@@ -1252,11 +1273,12 @@ class TestImapFallbacks(unittest.TestCase):
|
||||
connection.move_message(99, "Archive")
|
||||
delete_mock.assert_not_called()
|
||||
|
||||
|
||||
class TestMailboxWatchSince(unittest.TestCase):
|
||||
def testWatchInboxPassesSinceToMailboxFetch(self):
|
||||
mailbox_connection = SimpleNamespace()
|
||||
|
||||
def fake_watch(check_callback, check_timeout):
|
||||
def fake_watch(check_callback, check_timeout, should_reload=None):
|
||||
check_callback(mailbox_connection)
|
||||
raise _BreakLoop()
|
||||
|
||||
@@ -1353,6 +1375,7 @@ class TestMailboxPerformance(unittest.TestCase):
|
||||
create_folders=False,
|
||||
)
|
||||
self.assertEqual(len(connection.fetch_calls), 1)
|
||||
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.MSGraphConnection")
|
||||
def testCliPassesMsGraphCertificateAuthSettings(
|
||||
@@ -1423,12 +1446,85 @@ mailbox = shared@example.com
|
||||
parsedmarc.cli._main()
|
||||
|
||||
self.assertEqual(system_exit.exception.code, -1)
|
||||
mock_logger.critical.assert_called_once_with(
|
||||
mock_logger.error.assert_called_once_with(
|
||||
"certificate_path setting missing from the msgraph config section"
|
||||
)
|
||||
mock_graph_connection.assert_not_called()
|
||||
mock_get_mailbox_reports.assert_not_called()
|
||||
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.MSGraphConnection")
|
||||
def testCliUsesMsGraphUserAsMailboxForUsernamePasswordAuth(
|
||||
self, mock_graph_connection, mock_get_mailbox_reports
|
||||
):
|
||||
mock_graph_connection.return_value = object()
|
||||
mock_get_mailbox_reports.return_value = {
|
||||
"aggregate_reports": [],
|
||||
"forensic_reports": [],
|
||||
"smtp_tls_reports": [],
|
||||
}
|
||||
|
||||
config_text = """[general]
|
||||
silent = true
|
||||
|
||||
[msgraph]
|
||||
auth_method = UsernamePassword
|
||||
client_id = client-id
|
||||
client_secret = client-secret
|
||||
user = owner@example.com
|
||||
password = test-password
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as cfg:
|
||||
cfg.write(config_text)
|
||||
cfg_path = cfg.name
|
||||
self.addCleanup(lambda: os.path.exists(cfg_path) and os.remove(cfg_path))
|
||||
|
||||
with patch.object(sys, "argv", ["parsedmarc", "-c", cfg_path]):
|
||||
parsedmarc.cli._main()
|
||||
|
||||
self.assertEqual(
|
||||
mock_graph_connection.call_args.kwargs.get("mailbox"),
|
||||
"owner@example.com",
|
||||
)
|
||||
self.assertEqual(
|
||||
mock_graph_connection.call_args.kwargs.get("username"),
|
||||
"owner@example.com",
|
||||
)
|
||||
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.MSGraphConnection")
|
||||
@patch("parsedmarc.cli.logger")
|
||||
def testCliRequiresMsGraphPasswordForUsernamePasswordAuth(
|
||||
self, mock_logger, mock_graph_connection, mock_get_mailbox_reports
|
||||
):
|
||||
config_text = """[general]
|
||||
silent = true
|
||||
|
||||
[msgraph]
|
||||
auth_method = UsernamePassword
|
||||
client_id = client-id
|
||||
client_secret = client-secret
|
||||
user = owner@example.com
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as cfg:
|
||||
cfg.write(config_text)
|
||||
cfg_path = cfg.name
|
||||
self.addCleanup(lambda: os.path.exists(cfg_path) and os.remove(cfg_path))
|
||||
|
||||
with patch.object(sys, "argv", ["parsedmarc", "-c", cfg_path]):
|
||||
with self.assertRaises(SystemExit) as system_exit:
|
||||
parsedmarc.cli._main()
|
||||
|
||||
self.assertEqual(system_exit.exception.code, -1)
|
||||
mock_logger.error.assert_called_once_with(
|
||||
"password setting missing from the msgraph config section"
|
||||
)
|
||||
mock_graph_connection.assert_not_called()
|
||||
mock_get_mailbox_reports.assert_not_called()
|
||||
|
||||
|
||||
class _FakeGraphClient:
|
||||
def get(self, url, params=None):
|
||||
if "/mailFolders/inbox?$select=id,displayName" in url:
|
||||
@@ -1467,15 +1563,14 @@ class TestMSGraphFolderFallback(unittest.TestCase):
|
||||
connection._request_with_retries = MagicMock(
|
||||
side_effect=lambda method_name, *args, **kwargs: getattr(
|
||||
connection._client, method_name
|
||||
)(
|
||||
*args, **kwargs
|
||||
)
|
||||
)(*args, **kwargs)
|
||||
)
|
||||
|
||||
folder_id = connection._find_folder_id_with_parent("Inbox", None)
|
||||
self.assertEqual(folder_id, "inbox-id")
|
||||
connection._request_with_retries.assert_any_call(
|
||||
"get", "/users/shared@example.com/mailFolders?$filter=displayName eq 'Inbox'"
|
||||
"get",
|
||||
"/users/shared@example.com/mailFolders?$filter=displayName eq 'Inbox'",
|
||||
)
|
||||
connection._request_with_retries.assert_any_call(
|
||||
"get", "/users/shared@example.com/mailFolders/inbox?$select=id,displayName"
|
||||
@@ -1488,9 +1583,7 @@ class TestMSGraphFolderFallback(unittest.TestCase):
|
||||
connection._request_with_retries = MagicMock(
|
||||
side_effect=lambda method_name, *args, **kwargs: getattr(
|
||||
connection._client, method_name
|
||||
)(
|
||||
*args, **kwargs
|
||||
)
|
||||
)(*args, **kwargs)
|
||||
)
|
||||
|
||||
with self.assertRaises(RuntimeWarning):
|
||||
@@ -1509,5 +1602,617 @@ class TestMSGraphFolderFallback(unittest.TestCase):
|
||||
connection._get_well_known_folder_id.assert_not_called()
|
||||
|
||||
|
||||
class TestMSGraphCliValidation(unittest.TestCase):
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.MSGraphConnection")
|
||||
def testCliPassesMsGraphClientSecretAuthSettings(
|
||||
self, mock_graph_connection, mock_get_mailbox_reports
|
||||
):
|
||||
mock_graph_connection.return_value = object()
|
||||
mock_get_mailbox_reports.return_value = {
|
||||
"aggregate_reports": [],
|
||||
"forensic_reports": [],
|
||||
"smtp_tls_reports": [],
|
||||
}
|
||||
|
||||
config_text = """[general]
|
||||
silent = true
|
||||
|
||||
[msgraph]
|
||||
auth_method = ClientSecret
|
||||
client_id = client-id
|
||||
client_secret = client-secret
|
||||
tenant_id = tenant-id
|
||||
mailbox = shared@example.com
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as cfg:
|
||||
cfg.write(config_text)
|
||||
cfg_path = cfg.name
|
||||
self.addCleanup(lambda: os.path.exists(cfg_path) and os.remove(cfg_path))
|
||||
|
||||
with patch.object(sys, "argv", ["parsedmarc", "-c", cfg_path]):
|
||||
parsedmarc.cli._main()
|
||||
|
||||
self.assertEqual(
|
||||
mock_graph_connection.call_args.kwargs.get("auth_method"), "ClientSecret"
|
||||
)
|
||||
self.assertEqual(
|
||||
mock_graph_connection.call_args.kwargs.get("client_secret"),
|
||||
"client-secret",
|
||||
)
|
||||
self.assertEqual(
|
||||
mock_graph_connection.call_args.kwargs.get("tenant_id"), "tenant-id"
|
||||
)
|
||||
self.assertEqual(
|
||||
mock_graph_connection.call_args.kwargs.get("mailbox"),
|
||||
"shared@example.com",
|
||||
)
|
||||
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.MSGraphConnection")
|
||||
@patch("parsedmarc.cli.logger")
|
||||
def testCliRequiresMsGraphClientSecretForClientSecretAuth(
|
||||
self, mock_logger, mock_graph_connection, mock_get_mailbox_reports
|
||||
):
|
||||
config_text = """[general]
|
||||
silent = true
|
||||
|
||||
[msgraph]
|
||||
auth_method = ClientSecret
|
||||
client_id = client-id
|
||||
tenant_id = tenant-id
|
||||
mailbox = shared@example.com
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as cfg:
|
||||
cfg.write(config_text)
|
||||
cfg_path = cfg.name
|
||||
self.addCleanup(lambda: os.path.exists(cfg_path) and os.remove(cfg_path))
|
||||
|
||||
with patch.object(sys, "argv", ["parsedmarc", "-c", cfg_path]):
|
||||
with self.assertRaises(SystemExit) as system_exit:
|
||||
parsedmarc.cli._main()
|
||||
|
||||
self.assertEqual(system_exit.exception.code, -1)
|
||||
mock_logger.error.assert_called_once_with(
|
||||
"client_secret setting missing from the msgraph config section"
|
||||
)
|
||||
mock_graph_connection.assert_not_called()
|
||||
mock_get_mailbox_reports.assert_not_called()
|
||||
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.MSGraphConnection")
|
||||
@patch("parsedmarc.cli.logger")
|
||||
def testCliRequiresMsGraphTenantIdForClientSecretAuth(
|
||||
self, mock_logger, mock_graph_connection, mock_get_mailbox_reports
|
||||
):
|
||||
config_text = """[general]
|
||||
silent = true
|
||||
|
||||
[msgraph]
|
||||
auth_method = ClientSecret
|
||||
client_id = client-id
|
||||
client_secret = client-secret
|
||||
mailbox = shared@example.com
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as cfg:
|
||||
cfg.write(config_text)
|
||||
cfg_path = cfg.name
|
||||
self.addCleanup(lambda: os.path.exists(cfg_path) and os.remove(cfg_path))
|
||||
|
||||
with patch.object(sys, "argv", ["parsedmarc", "-c", cfg_path]):
|
||||
with self.assertRaises(SystemExit) as system_exit:
|
||||
parsedmarc.cli._main()
|
||||
|
||||
self.assertEqual(system_exit.exception.code, -1)
|
||||
mock_logger.error.assert_called_once_with(
|
||||
"tenant_id setting missing from the msgraph config section"
|
||||
)
|
||||
mock_graph_connection.assert_not_called()
|
||||
mock_get_mailbox_reports.assert_not_called()
|
||||
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.MSGraphConnection")
|
||||
@patch("parsedmarc.cli.logger")
|
||||
def testCliRequiresMsGraphMailboxForClientSecretAuth(
|
||||
self, mock_logger, mock_graph_connection, mock_get_mailbox_reports
|
||||
):
|
||||
config_text = """[general]
|
||||
silent = true
|
||||
|
||||
[msgraph]
|
||||
auth_method = ClientSecret
|
||||
client_id = client-id
|
||||
client_secret = client-secret
|
||||
tenant_id = tenant-id
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as cfg:
|
||||
cfg.write(config_text)
|
||||
cfg_path = cfg.name
|
||||
self.addCleanup(lambda: os.path.exists(cfg_path) and os.remove(cfg_path))
|
||||
|
||||
with patch.object(sys, "argv", ["parsedmarc", "-c", cfg_path]):
|
||||
with self.assertRaises(SystemExit) as system_exit:
|
||||
parsedmarc.cli._main()
|
||||
|
||||
self.assertEqual(system_exit.exception.code, -1)
|
||||
mock_logger.error.assert_called_once_with(
|
||||
"mailbox setting missing from the msgraph config section"
|
||||
)
|
||||
mock_graph_connection.assert_not_called()
|
||||
mock_get_mailbox_reports.assert_not_called()
|
||||
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.MSGraphConnection")
|
||||
def testCliAllowsMsGraphDeviceCodeWithoutUser(
|
||||
self, mock_graph_connection, mock_get_mailbox_reports
|
||||
):
|
||||
mock_graph_connection.return_value = object()
|
||||
mock_get_mailbox_reports.return_value = {
|
||||
"aggregate_reports": [],
|
||||
"forensic_reports": [],
|
||||
"smtp_tls_reports": [],
|
||||
}
|
||||
|
||||
config_text = """[general]
|
||||
silent = true
|
||||
|
||||
[msgraph]
|
||||
auth_method = DeviceCode
|
||||
client_id = client-id
|
||||
tenant_id = tenant-id
|
||||
mailbox = shared@example.com
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as cfg:
|
||||
cfg.write(config_text)
|
||||
cfg_path = cfg.name
|
||||
self.addCleanup(lambda: os.path.exists(cfg_path) and os.remove(cfg_path))
|
||||
|
||||
with patch.object(sys, "argv", ["parsedmarc", "-c", cfg_path]):
|
||||
parsedmarc.cli._main()
|
||||
|
||||
self.assertEqual(
|
||||
mock_graph_connection.call_args.kwargs.get("auth_method"), "DeviceCode"
|
||||
)
|
||||
self.assertEqual(
|
||||
mock_graph_connection.call_args.kwargs.get("mailbox"),
|
||||
"shared@example.com",
|
||||
)
|
||||
self.assertIsNone(mock_graph_connection.call_args.kwargs.get("username"))
|
||||
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.MSGraphConnection")
|
||||
@patch("parsedmarc.cli.logger")
|
||||
def testCliRequiresMsGraphTenantIdForDeviceCodeAuth(
|
||||
self, mock_logger, mock_graph_connection, mock_get_mailbox_reports
|
||||
):
|
||||
config_text = """[general]
|
||||
silent = true
|
||||
|
||||
[msgraph]
|
||||
auth_method = DeviceCode
|
||||
client_id = client-id
|
||||
mailbox = shared@example.com
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as cfg:
|
||||
cfg.write(config_text)
|
||||
cfg_path = cfg.name
|
||||
self.addCleanup(lambda: os.path.exists(cfg_path) and os.remove(cfg_path))
|
||||
|
||||
with patch.object(sys, "argv", ["parsedmarc", "-c", cfg_path]):
|
||||
with self.assertRaises(SystemExit) as system_exit:
|
||||
parsedmarc.cli._main()
|
||||
|
||||
self.assertEqual(system_exit.exception.code, -1)
|
||||
mock_logger.error.assert_called_once_with(
|
||||
"tenant_id setting missing from the msgraph config section"
|
||||
)
|
||||
mock_graph_connection.assert_not_called()
|
||||
mock_get_mailbox_reports.assert_not_called()
|
||||
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.MSGraphConnection")
|
||||
@patch("parsedmarc.cli.logger")
|
||||
def testCliRequiresMsGraphMailboxForDeviceCodeAuth(
|
||||
self, mock_logger, mock_graph_connection, mock_get_mailbox_reports
|
||||
):
|
||||
config_text = """[general]
|
||||
silent = true
|
||||
|
||||
[msgraph]
|
||||
auth_method = DeviceCode
|
||||
client_id = client-id
|
||||
tenant_id = tenant-id
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as cfg:
|
||||
cfg.write(config_text)
|
||||
cfg_path = cfg.name
|
||||
self.addCleanup(lambda: os.path.exists(cfg_path) and os.remove(cfg_path))
|
||||
|
||||
with patch.object(sys, "argv", ["parsedmarc", "-c", cfg_path]):
|
||||
with self.assertRaises(SystemExit) as system_exit:
|
||||
parsedmarc.cli._main()
|
||||
|
||||
self.assertEqual(system_exit.exception.code, -1)
|
||||
mock_logger.error.assert_called_once_with(
|
||||
"mailbox setting missing from the msgraph config section"
|
||||
)
|
||||
mock_graph_connection.assert_not_called()
|
||||
mock_get_mailbox_reports.assert_not_called()
|
||||
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.MSGraphConnection")
|
||||
@patch("parsedmarc.cli.logger")
|
||||
def testCliRequiresMsGraphTenantIdForCertificateAuth(
|
||||
self, mock_logger, mock_graph_connection, mock_get_mailbox_reports
|
||||
):
|
||||
config_text = """[general]
|
||||
silent = true
|
||||
|
||||
[msgraph]
|
||||
auth_method = Certificate
|
||||
client_id = client-id
|
||||
mailbox = shared@example.com
|
||||
certificate_path = /tmp/msgraph-cert.pem
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as cfg:
|
||||
cfg.write(config_text)
|
||||
cfg_path = cfg.name
|
||||
self.addCleanup(lambda: os.path.exists(cfg_path) and os.remove(cfg_path))
|
||||
|
||||
with patch.object(sys, "argv", ["parsedmarc", "-c", cfg_path]):
|
||||
with self.assertRaises(SystemExit) as system_exit:
|
||||
parsedmarc.cli._main()
|
||||
|
||||
self.assertEqual(system_exit.exception.code, -1)
|
||||
mock_logger.error.assert_called_once_with(
|
||||
"tenant_id setting missing from the msgraph config section"
|
||||
)
|
||||
mock_graph_connection.assert_not_called()
|
||||
mock_get_mailbox_reports.assert_not_called()
|
||||
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.MSGraphConnection")
|
||||
@patch("parsedmarc.cli.logger")
|
||||
def testCliRequiresMsGraphMailboxForCertificateAuth(
|
||||
self, mock_logger, mock_graph_connection, mock_get_mailbox_reports
|
||||
):
|
||||
config_text = """[general]
|
||||
silent = true
|
||||
|
||||
[msgraph]
|
||||
auth_method = Certificate
|
||||
client_id = client-id
|
||||
tenant_id = tenant-id
|
||||
certificate_path = /tmp/msgraph-cert.pem
|
||||
"""
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as cfg:
|
||||
cfg.write(config_text)
|
||||
cfg_path = cfg.name
|
||||
self.addCleanup(lambda: os.path.exists(cfg_path) and os.remove(cfg_path))
|
||||
|
||||
with patch.object(sys, "argv", ["parsedmarc", "-c", cfg_path]):
|
||||
with self.assertRaises(SystemExit) as system_exit:
|
||||
parsedmarc.cli._main()
|
||||
|
||||
self.assertEqual(system_exit.exception.code, -1)
|
||||
mock_logger.error.assert_called_once_with(
|
||||
"mailbox setting missing from the msgraph config section"
|
||||
)
|
||||
mock_graph_connection.assert_not_called()
|
||||
mock_get_mailbox_reports.assert_not_called()
|
||||
|
||||
|
||||
class TestSighupReload(unittest.TestCase):
|
||||
"""Tests for SIGHUP-driven configuration reload in watch mode."""
|
||||
|
||||
_BASE_CONFIG = """[general]
|
||||
silent = true
|
||||
|
||||
[imap]
|
||||
host = imap.example.com
|
||||
user = user
|
||||
password = pass
|
||||
|
||||
[mailbox]
|
||||
watch = true
|
||||
"""
|
||||
|
||||
@unittest.skipUnless(hasattr(signal, "SIGHUP"), "SIGHUP not available on this platform")
|
||||
@patch("parsedmarc.cli._init_output_clients")
|
||||
@patch("parsedmarc.cli._parse_config_file")
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.watch_inbox")
|
||||
@patch("parsedmarc.cli.IMAPConnection")
|
||||
def testSighupTriggersReloadAndWatchRestarts(
|
||||
self,
|
||||
mock_imap,
|
||||
mock_watch,
|
||||
mock_get_reports,
|
||||
mock_parse_config,
|
||||
mock_init_clients,
|
||||
):
|
||||
"""SIGHUP causes watch to return, config is re-parsed, and watch restarts."""
|
||||
import signal as signal_module
|
||||
|
||||
mock_imap.return_value = object()
|
||||
mock_get_reports.return_value = {
|
||||
"aggregate_reports": [],
|
||||
"forensic_reports": [],
|
||||
"smtp_tls_reports": [],
|
||||
}
|
||||
|
||||
def parse_side_effect(config_file, opts):
|
||||
opts.imap_host = "imap.example.com"
|
||||
opts.imap_user = "user"
|
||||
opts.imap_password = "pass"
|
||||
opts.mailbox_watch = True
|
||||
return None
|
||||
|
||||
mock_parse_config.side_effect = parse_side_effect
|
||||
mock_init_clients.return_value = {}
|
||||
|
||||
call_count = [0]
|
||||
|
||||
def watch_side_effect(*args, **kwargs):
|
||||
call_count[0] += 1
|
||||
if call_count[0] == 1:
|
||||
# Simulate SIGHUP arriving while watch is running
|
||||
if hasattr(signal_module, "SIGHUP"):
|
||||
import os
|
||||
|
||||
os.kill(os.getpid(), signal_module.SIGHUP)
|
||||
return # Normal return — reload loop will continue
|
||||
else:
|
||||
raise FileExistsError("stop-watch-loop")
|
||||
|
||||
mock_watch.side_effect = watch_side_effect
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as cfg:
|
||||
cfg.write(self._BASE_CONFIG)
|
||||
cfg_path = cfg.name
|
||||
self.addCleanup(lambda: os.path.exists(cfg_path) and os.remove(cfg_path))
|
||||
|
||||
with patch.object(sys, "argv", ["parsedmarc", "-c", cfg_path]):
|
||||
with self.assertRaises(SystemExit) as cm:
|
||||
parsedmarc.cli._main()
|
||||
|
||||
# Exited with code 1 (from FileExistsError handler)
|
||||
self.assertEqual(cm.exception.code, 1)
|
||||
# watch_inbox was called twice: initial run + after reload
|
||||
self.assertEqual(mock_watch.call_count, 2)
|
||||
# _parse_config_file called for initial load + reload
|
||||
self.assertGreaterEqual(mock_parse_config.call_count, 2)
|
||||
|
||||
@unittest.skipUnless(hasattr(signal, "SIGHUP"), "SIGHUP not available on this platform")
|
||||
@patch("parsedmarc.cli._init_output_clients")
|
||||
@patch("parsedmarc.cli._parse_config_file")
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.watch_inbox")
|
||||
@patch("parsedmarc.cli.IMAPConnection")
|
||||
def testInvalidConfigOnReloadKeepsPreviousState(
|
||||
self,
|
||||
mock_imap,
|
||||
mock_watch,
|
||||
mock_get_reports,
|
||||
mock_parse_config,
|
||||
mock_init_clients,
|
||||
):
|
||||
"""A failing reload leaves opts and clients unchanged."""
|
||||
import signal as signal_module
|
||||
|
||||
mock_imap.return_value = object()
|
||||
mock_get_reports.return_value = {
|
||||
"aggregate_reports": [],
|
||||
"forensic_reports": [],
|
||||
"smtp_tls_reports": [],
|
||||
}
|
||||
|
||||
# Initial parse sets required opts; reload parse raises
|
||||
initial_map = {"prefix_": ["example.com"]}
|
||||
call_count = [0]
|
||||
|
||||
def parse_side_effect(config_file, opts):
|
||||
call_count[0] += 1
|
||||
opts.imap_host = "imap.example.com"
|
||||
opts.imap_user = "user"
|
||||
opts.imap_password = "pass"
|
||||
opts.mailbox_watch = True
|
||||
if call_count[0] == 1:
|
||||
return initial_map
|
||||
raise RuntimeError("bad config")
|
||||
|
||||
mock_parse_config.side_effect = parse_side_effect
|
||||
|
||||
initial_clients = {"s3_client": MagicMock()}
|
||||
mock_init_clients.return_value = initial_clients
|
||||
|
||||
watch_calls = [0]
|
||||
|
||||
def watch_side_effect(*args, **kwargs):
|
||||
watch_calls[0] += 1
|
||||
if watch_calls[0] == 1:
|
||||
if hasattr(signal_module, "SIGHUP"):
|
||||
import os
|
||||
|
||||
os.kill(os.getpid(), signal_module.SIGHUP)
|
||||
return
|
||||
else:
|
||||
raise FileExistsError("stop")
|
||||
|
||||
mock_watch.side_effect = watch_side_effect
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as cfg:
|
||||
cfg.write(self._BASE_CONFIG)
|
||||
cfg_path = cfg.name
|
||||
self.addCleanup(lambda: os.path.exists(cfg_path) and os.remove(cfg_path))
|
||||
|
||||
with patch.object(sys, "argv", ["parsedmarc", "-c", cfg_path]):
|
||||
with self.assertRaises(SystemExit) as cm:
|
||||
parsedmarc.cli._main()
|
||||
|
||||
self.assertEqual(cm.exception.code, 1)
|
||||
# watch was still called twice (reload loop continued after failed reload)
|
||||
self.assertEqual(mock_watch.call_count, 2)
|
||||
# The failed reload must not have closed the original clients
|
||||
initial_clients["s3_client"].close.assert_not_called()
|
||||
|
||||
@unittest.skipUnless(hasattr(signal, "SIGHUP"), "SIGHUP not available on this platform")
|
||||
@patch("parsedmarc.cli._init_output_clients")
|
||||
@patch("parsedmarc.cli._parse_config_file")
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.watch_inbox")
|
||||
@patch("parsedmarc.cli.IMAPConnection")
|
||||
def testReloadClosesOldClients(
|
||||
self,
|
||||
mock_imap,
|
||||
mock_watch,
|
||||
mock_get_reports,
|
||||
mock_parse_config,
|
||||
mock_init_clients,
|
||||
):
|
||||
"""Successful reload closes the old output clients before replacing them."""
|
||||
import signal as signal_module
|
||||
|
||||
mock_imap.return_value = object()
|
||||
mock_get_reports.return_value = {
|
||||
"aggregate_reports": [],
|
||||
"forensic_reports": [],
|
||||
"smtp_tls_reports": [],
|
||||
}
|
||||
|
||||
def parse_side_effect(config_file, opts):
|
||||
opts.imap_host = "imap.example.com"
|
||||
opts.imap_user = "user"
|
||||
opts.imap_password = "pass"
|
||||
opts.mailbox_watch = True
|
||||
return None
|
||||
|
||||
mock_parse_config.side_effect = parse_side_effect
|
||||
|
||||
old_client = MagicMock()
|
||||
new_client = MagicMock()
|
||||
init_call = [0]
|
||||
|
||||
def init_side_effect(opts):
|
||||
init_call[0] += 1
|
||||
if init_call[0] == 1:
|
||||
return {"kafka_client": old_client}
|
||||
return {"kafka_client": new_client}
|
||||
|
||||
mock_init_clients.side_effect = init_side_effect
|
||||
|
||||
watch_calls = [0]
|
||||
|
||||
def watch_side_effect(*args, **kwargs):
|
||||
watch_calls[0] += 1
|
||||
if watch_calls[0] == 1:
|
||||
if hasattr(signal_module, "SIGHUP"):
|
||||
import os
|
||||
|
||||
os.kill(os.getpid(), signal_module.SIGHUP)
|
||||
return
|
||||
else:
|
||||
raise FileExistsError("stop")
|
||||
|
||||
mock_watch.side_effect = watch_side_effect
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as cfg:
|
||||
cfg.write(self._BASE_CONFIG)
|
||||
cfg_path = cfg.name
|
||||
self.addCleanup(lambda: os.path.exists(cfg_path) and os.remove(cfg_path))
|
||||
|
||||
with patch.object(sys, "argv", ["parsedmarc", "-c", cfg_path]):
|
||||
with self.assertRaises(SystemExit):
|
||||
parsedmarc.cli._main()
|
||||
|
||||
# Old client must have been closed when reload succeeded
|
||||
old_client.close.assert_called_once()
|
||||
|
||||
@unittest.skipUnless(hasattr(signal, "SIGHUP"), "SIGHUP not available on this platform")
|
||||
@patch("parsedmarc.cli._init_output_clients")
|
||||
@patch("parsedmarc.cli.get_dmarc_reports_from_mailbox")
|
||||
@patch("parsedmarc.cli.watch_inbox")
|
||||
@patch("parsedmarc.cli.IMAPConnection")
|
||||
def testRemovedConfigSectionTakesEffectOnReload(
|
||||
self,
|
||||
mock_imap,
|
||||
mock_watch,
|
||||
mock_get_reports,
|
||||
mock_init_clients,
|
||||
):
|
||||
"""Removing a config section on reload resets that option to its default."""
|
||||
import signal as signal_module
|
||||
|
||||
mock_imap.return_value = object()
|
||||
mock_get_reports.return_value = {
|
||||
"aggregate_reports": [],
|
||||
"forensic_reports": [],
|
||||
"smtp_tls_reports": [],
|
||||
}
|
||||
mock_init_clients.return_value = {}
|
||||
|
||||
# First config sets kafka_hosts (with required topics); second removes it.
|
||||
config_v1 = (
|
||||
self._BASE_CONFIG
|
||||
+ "\n[kafka]\nhosts = kafka.example.com:9092\n"
|
||||
+ "aggregate_topic = dmarc_agg\n"
|
||||
+ "forensic_topic = dmarc_forensic\n"
|
||||
+ "smtp_tls_topic = smtp_tls\n"
|
||||
)
|
||||
config_v2 = self._BASE_CONFIG # no [kafka] section
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".ini", delete=False) as cfg:
|
||||
cfg.write(config_v1)
|
||||
cfg_path = cfg.name
|
||||
self.addCleanup(lambda: os.path.exists(cfg_path) and os.remove(cfg_path))
|
||||
|
||||
watch_calls = [0]
|
||||
|
||||
def watch_side_effect(*args, **kwargs):
|
||||
watch_calls[0] += 1
|
||||
if watch_calls[0] == 1:
|
||||
# Rewrite config to remove kafka before triggering reload
|
||||
with open(cfg_path, "w") as f:
|
||||
f.write(config_v2)
|
||||
if hasattr(signal_module, "SIGHUP"):
|
||||
import os
|
||||
|
||||
os.kill(os.getpid(), signal_module.SIGHUP)
|
||||
return
|
||||
else:
|
||||
raise FileExistsError("stop")
|
||||
|
||||
mock_watch.side_effect = watch_side_effect
|
||||
|
||||
# Capture opts used on each _init_output_clients call
|
||||
init_opts_captures = []
|
||||
|
||||
def init_side_effect(opts):
|
||||
from argparse import Namespace as NS
|
||||
|
||||
init_opts_captures.append(NS(**vars(opts)))
|
||||
return {}
|
||||
|
||||
mock_init_clients.side_effect = init_side_effect
|
||||
|
||||
with patch.object(sys, "argv", ["parsedmarc", "-c", cfg_path]):
|
||||
with self.assertRaises(SystemExit):
|
||||
parsedmarc.cli._main()
|
||||
|
||||
# First init: kafka_hosts should be set from v1 config
|
||||
self.assertIsNotNone(init_opts_captures[0].kafka_hosts)
|
||||
# Second init (after reload with v2 config): kafka_hosts should be None
|
||||
self.assertIsNone(init_opts_captures[1].kafka_hosts)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main(verbosity=2)
|
||||
|
||||
Reference in New Issue
Block a user