mirror of
https://github.com/domainaware/parsedmarc.git
synced 2026-03-11 01:01:26 +00:00
Compare commits
42 Commits
copilot/ad
...
9.2.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
691b0fcd41 | ||
|
|
b9343a295f | ||
|
|
b51a62463f | ||
|
|
66ba5b0e5e | ||
|
|
7929919223 | ||
|
|
faa68333a9 | ||
|
|
d34a33e980 | ||
|
|
9040a38842 | ||
|
|
ea0e3b11c1 | ||
|
|
199b782191 | ||
|
|
25f3c3e1d0 | ||
|
|
a14ff66f5a | ||
|
|
fb738bf9c4 | ||
|
|
0e811fe0ff | ||
|
|
56eb565ad2 | ||
|
|
2c3abb3e8c | ||
|
|
326e630f50 | ||
|
|
cdc30e6780 | ||
|
|
f2febf21d3 | ||
|
|
79f47121a4 | ||
|
|
6e6c90e19b | ||
|
|
c4d7455839 | ||
|
|
95e6fb85a1 | ||
|
|
298d5b6e6e | ||
|
|
a3c5bb906b | ||
|
|
d49ce6a13f | ||
|
|
adb0d31382 | ||
|
|
ae5d20ecf5 | ||
|
|
e98fdfa96b | ||
|
|
9551c8b467 | ||
|
|
d987943c22 | ||
|
|
3d8a99b5d3 | ||
|
|
5aaaedf463 | ||
|
|
2e3ee25ec9 | ||
|
|
33eb2aaf62 | ||
|
|
1387fb4899 | ||
|
|
4d97bd25aa | ||
|
|
17a612df0c | ||
|
|
221bc332ef | ||
|
|
a2a75f7a81 | ||
|
|
50fcb51577 | ||
|
|
dd9ef90773 |
72
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
72
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
name: Bug report
|
||||
description: Report a reproducible parsedmarc bug
|
||||
title: "[Bug]: "
|
||||
labels:
|
||||
- bug
|
||||
body:
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: parsedmarc version
|
||||
description: Include the parsedmarc version or commit if known.
|
||||
placeholder: 9.x.x
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: input_backend
|
||||
attributes:
|
||||
label: Input backend
|
||||
description: Which input path or mailbox backend is involved?
|
||||
options:
|
||||
- IMAP
|
||||
- MS Graph
|
||||
- Gmail API
|
||||
- Maildir
|
||||
- mbox
|
||||
- Local file / direct parse
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Environment
|
||||
description: Runtime, container image, OS, Python version, or deployment details.
|
||||
placeholder: Docker on Debian, Python 3.12, parsedmarc installed from PyPI
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Sanitized config
|
||||
description: Include the relevant config fragment with secrets removed.
|
||||
render: ini
|
||||
- type: textarea
|
||||
id: steps
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: Describe the smallest reproducible sequence you can.
|
||||
placeholder: |
|
||||
1. Configure parsedmarc with ...
|
||||
2. Run ...
|
||||
3. Observe ...
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected_actual
|
||||
attributes:
|
||||
label: Expected vs actual behavior
|
||||
description: What did you expect, and what happened instead?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Logs or traceback
|
||||
description: Paste sanitized logs or a traceback if available.
|
||||
render: text
|
||||
- type: textarea
|
||||
id: samples
|
||||
attributes:
|
||||
label: Sample report availability
|
||||
description: If you can share a sanitized sample report or message, note that here.
|
||||
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
5
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Security issue
|
||||
url: https://github.com/domainaware/parsedmarc/security/policy
|
||||
about: Please use the security policy and avoid filing public issues for undisclosed vulnerabilities.
|
||||
30
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
30
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: Feature request
|
||||
description: Suggest a new feature or behavior change
|
||||
title: "[Feature]: "
|
||||
labels:
|
||||
- enhancement
|
||||
body:
|
||||
- type: textarea
|
||||
id: problem
|
||||
attributes:
|
||||
label: Problem statement
|
||||
description: What workflow or limitation are you trying to solve?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: proposal
|
||||
attributes:
|
||||
label: Proposed behavior
|
||||
description: Describe the feature or behavior you want.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: alternatives
|
||||
attributes:
|
||||
label: Alternatives considered
|
||||
description: Describe workarounds or alternative approaches you considered.
|
||||
- type: textarea
|
||||
id: impact
|
||||
attributes:
|
||||
label: Compatibility or operational impact
|
||||
description: Note config, output, performance, or deployment implications if relevant.
|
||||
24
.github/pull_request_template.md
vendored
Normal file
24
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
## Summary
|
||||
|
||||
-
|
||||
|
||||
## Why
|
||||
|
||||
-
|
||||
|
||||
## Testing
|
||||
|
||||
-
|
||||
|
||||
## Backward Compatibility / Risk
|
||||
|
||||
-
|
||||
|
||||
## Related Issue
|
||||
|
||||
- Closes #
|
||||
|
||||
## Checklist
|
||||
|
||||
- [ ] Tests added or updated if behavior changed
|
||||
- [ ] Docs updated if config or user-facing behavior changed
|
||||
39
.github/workflows/python-tests.yml
vendored
39
.github/workflows/python-tests.yml
vendored
@@ -10,7 +10,32 @@ on:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
lint-docs-build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install .[build]
|
||||
- name: Check code style
|
||||
run: |
|
||||
ruff check .
|
||||
- name: Test building documentation
|
||||
run: |
|
||||
cd docs
|
||||
make html
|
||||
- name: Test building packages
|
||||
run: |
|
||||
hatch build
|
||||
|
||||
test:
|
||||
needs: lint-docs-build
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
@@ -30,7 +55,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
|
||||
python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
@@ -46,13 +71,6 @@ jobs:
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install .[build]
|
||||
- name: Test building documentation
|
||||
run: |
|
||||
cd docs
|
||||
make html
|
||||
- name: Check code style
|
||||
run: |
|
||||
ruff check .
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
pytest --cov --cov-report=xml tests.py
|
||||
@@ -61,9 +79,6 @@ jobs:
|
||||
pip install -e .
|
||||
parsedmarc --debug -c ci.ini samples/aggregate/*
|
||||
parsedmarc --debug -c ci.ini samples/forensic/*
|
||||
- name: Test building packages
|
||||
run: |
|
||||
hatch build
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
|
||||
64
AGENTS.md
Normal file
64
AGENTS.md
Normal file
@@ -0,0 +1,64 @@
|
||||
# AGENTS.md
|
||||
|
||||
This file provides guidance to AI agents when working with code in this repository.
|
||||
|
||||
## Project Overview
|
||||
|
||||
parsedmarc is a Python module and CLI utility for parsing DMARC aggregate (RUA), forensic (RUF), and SMTP TLS reports. It reads reports from IMAP, Microsoft Graph, Gmail API, Maildir, mbox files, or direct file paths, and outputs to JSON/CSV, Elasticsearch, OpenSearch, Splunk, Kafka, S3, Azure Log Analytics, syslog, or webhooks.
|
||||
|
||||
## Common Commands
|
||||
|
||||
```bash
|
||||
# Install with dev/build dependencies
|
||||
pip install .[build]
|
||||
|
||||
# Run all tests with coverage
|
||||
pytest --cov --cov-report=xml tests.py
|
||||
|
||||
# Run a single test
|
||||
pytest tests.py::Test::testAggregateSamples
|
||||
|
||||
# Lint and format
|
||||
ruff check .
|
||||
ruff format .
|
||||
|
||||
# Test CLI with sample reports
|
||||
parsedmarc --debug -c ci.ini samples/aggregate/*
|
||||
parsedmarc --debug -c ci.ini samples/forensic/*
|
||||
|
||||
# Build docs
|
||||
cd docs && make html
|
||||
|
||||
# Build distribution
|
||||
hatch build
|
||||
```
|
||||
|
||||
To skip DNS lookups during testing, set `GITHUB_ACTIONS=true`.
|
||||
|
||||
## Architecture
|
||||
|
||||
**Data flow:** Input sources → CLI (`cli.py:_main`) → Parse (`__init__.py`) → Enrich (DNS/GeoIP via `utils.py`) → Output integrations
|
||||
|
||||
### Key modules
|
||||
|
||||
- `parsedmarc/__init__.py` — Core parsing logic. Main functions: `parse_report_file()`, `parse_report_email()`, `parse_aggregate_report_xml()`, `parse_forensic_report()`, `parse_smtp_tls_report_json()`, `get_dmarc_reports_from_mailbox()`, `watch_inbox()`
|
||||
- `parsedmarc/cli.py` — CLI entry point (`_main`), config file parsing, output orchestration
|
||||
- `parsedmarc/types.py` — TypedDict definitions for all report types (`AggregateReport`, `ForensicReport`, `SMTPTLSReport`, `ParsingResults`)
|
||||
- `parsedmarc/utils.py` — IP/DNS/GeoIP enrichment, base64 decoding, compression handling
|
||||
- `parsedmarc/mail/` — Polymorphic mail connections: `IMAPConnection`, `GmailConnection`, `MSGraphConnection`, `MaildirConnection`
|
||||
- `parsedmarc/{elastic,opensearch,splunk,kafkaclient,loganalytics,syslog,s3,webhook,gelf}.py` — Output integrations
|
||||
|
||||
### Report type system
|
||||
|
||||
`ReportType = Literal["aggregate", "forensic", "smtp_tls"]`. Exception hierarchy: `ParserError` → `InvalidDMARCReport` → `InvalidAggregateReport`/`InvalidForensicReport`, and `InvalidSMTPTLSReport`.
|
||||
|
||||
### Caching
|
||||
|
||||
IP address info cached for 4 hours, seen aggregate report IDs cached for 1 hour (via `ExpiringDict`).
|
||||
|
||||
## Code Style
|
||||
|
||||
- Ruff for formatting and linting (configured in `.vscode/settings.json`)
|
||||
- TypedDict for structured data, type hints throughout
|
||||
- Python ≥3.10 required
|
||||
- Tests are in a single `tests.py` file using unittest; sample reports live in `samples/`
|
||||
56
CHANGELOG.md
56
CHANGELOG.md
@@ -1,5 +1,61 @@
|
||||
# Changelog
|
||||
|
||||
## 9.2.1
|
||||
|
||||
### Added
|
||||
|
||||
- Better checking of `msconfig` configuration (PR #695)
|
||||
|
||||
### Changed
|
||||
|
||||
- Updated `dbip-country-lite` database to version `2026-03`
|
||||
- DNS query error logging level from `warning` to `debug`
|
||||
|
||||
## 9.2.0
|
||||
|
||||
### Added
|
||||
|
||||
- OpenSearch AWS SigV4 authentication support (PR #673)
|
||||
- IMAP move/delete compatibility fallbacks (PR #671)
|
||||
- `fail_on_output_error` CLI option for sink failures (PR #672)
|
||||
- Gmail service account auth mode for non-interactive runs (PR #676)
|
||||
- Microsoft Graph certificate authentication support (PRs #692 and #693)
|
||||
- Microsoft Graph well-known folder fallback for root listing failures (PR #618 and #684 close #609)
|
||||
|
||||
### Fixed
|
||||
|
||||
- Pass mailbox since filter through `watch_inbox` callback (PR #670 closes issue #581)
|
||||
- `parsedmarc.mail.gmail.GmailConnection.delete_message` now properly calls the Gmail API (PR #668)
|
||||
- Avoid extra mailbox fetch in batch and test mode (PR #691 closes #533)
|
||||
|
||||
## 9.1.2
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix duplicate detection for normalized aggregate reports in Elasticsearch/OpenSearch (PR #666 fixes issue #665)
|
||||
|
||||
## 9.1.1
|
||||
|
||||
### Fixes
|
||||
|
||||
- Fix the use of Elasticsearch and OpenSearch API keys (PR #660 fixes issue #653)
|
||||
|
||||
### Changes
|
||||
|
||||
- Drop support for Python 3.9 (PR #661)
|
||||
|
||||
## 9.1.0
|
||||
|
||||
## Enhancements
|
||||
|
||||
- Add TCP and TLS support for syslog output. (#656)
|
||||
- Skip DNS lookups in GitHub Actions to prevent DNS timeouts during tests timeouts. (#657)
|
||||
- Remove microseconds from DMARC aggregate report time ranges before parsing them.
|
||||
|
||||
## 9.0.10
|
||||
|
||||
- Support Python 3.14+
|
||||
|
||||
## 9.0.9
|
||||
|
||||
### Fixes
|
||||
|
||||
78
CONTRIBUTING.md
Normal file
78
CONTRIBUTING.md
Normal file
@@ -0,0 +1,78 @@
|
||||
# Contributing
|
||||
|
||||
Thanks for contributing to parsedmarc.
|
||||
|
||||
## Local setup
|
||||
|
||||
Use a virtual environment for local development.
|
||||
|
||||
```bash
|
||||
python3 -m venv .venv
|
||||
. .venv/bin/activate
|
||||
python -m pip install --upgrade pip
|
||||
pip install .[build]
|
||||
```
|
||||
|
||||
## Before opening a pull request
|
||||
|
||||
Run the checks that match your change:
|
||||
|
||||
```bash
|
||||
ruff check .
|
||||
pytest --cov --cov-report=xml tests.py
|
||||
```
|
||||
|
||||
If you changed documentation:
|
||||
|
||||
```bash
|
||||
cd docs
|
||||
make html
|
||||
```
|
||||
|
||||
If you changed CLI behavior or parsing logic, it is also useful to exercise the
|
||||
sample reports:
|
||||
|
||||
```bash
|
||||
parsedmarc --debug -c ci.ini samples/aggregate/*
|
||||
parsedmarc --debug -c ci.ini samples/forensic/*
|
||||
```
|
||||
|
||||
To skip DNS lookups during tests, set:
|
||||
|
||||
```bash
|
||||
GITHUB_ACTIONS=true
|
||||
```
|
||||
|
||||
## Pull request guidelines
|
||||
|
||||
- Keep pull requests small and focused. Separate bug fixes, docs updates, and
|
||||
repo-maintenance changes where practical.
|
||||
- Add or update tests when behavior changes.
|
||||
- Update docs when configuration or user-facing behavior changes.
|
||||
- Include a short summary, the reason for the change, and the testing you ran.
|
||||
- Link the related issue when there is one.
|
||||
|
||||
## Branch maintenance
|
||||
|
||||
Upstream `master` may move quickly. Before asking for review or after another PR
|
||||
lands, rebase your branch onto the current upstream branch and force-push with
|
||||
lease if needed:
|
||||
|
||||
```bash
|
||||
git fetch upstream
|
||||
git rebase upstream/master
|
||||
git push --force-with-lease
|
||||
```
|
||||
|
||||
## CI and coverage
|
||||
|
||||
GitHub Actions is the source of truth for linting, docs, and test status.
|
||||
|
||||
Codecov patch coverage is usually the most relevant signal for small PRs. Project
|
||||
coverage can be noisier when the base comparison is stale, so interpret it in
|
||||
the context of the actual diff.
|
||||
|
||||
## Questions
|
||||
|
||||
Use GitHub issues for bugs and feature requests. If you are not sure whether a
|
||||
change is wanted, opening an issue first is usually the safest path.
|
||||
@@ -44,7 +44,6 @@ Thanks to all
|
||||
- Optionally send the results to Elasticsearch, Opensearch, and/or Splunk, for
|
||||
use with premade dashboards
|
||||
- Optionally send reports to Apache Kafka
|
||||
- Optionally send reports to Google SecOps (Chronicle) in UDM format via API or stdout
|
||||
|
||||
## Python Compatibility
|
||||
|
||||
@@ -57,9 +56,9 @@ for RHEL or Debian.
|
||||
| 3.6 | ❌ | Used in RHEL 8, but not supported by project dependencies |
|
||||
| 3.7 | ❌ | End of Life (EOL) |
|
||||
| 3.8 | ❌ | End of Life (EOL) |
|
||||
| 3.9 | ✅ | Supported until August 2026 (Debian 11); May 2032 (RHEL 9) |
|
||||
| 3.9 | ❌ | Used in Debian 11 and RHEL 9, but not supported by project dependencies |
|
||||
| 3.10 | ✅ | Actively maintained |
|
||||
| 3.11 | ✅ | Actively maintained; supported until June 2028 (Debian 12) |
|
||||
| 3.12 | ✅ | Actively maintained; supported until May 2035 (RHEL 10) |
|
||||
| 3.13 | ✅ | Actively maintained; supported until June 2030 (Debian 13) |
|
||||
| 3.14 | ❌ | Not currently supported due to [this imapclient bug](https://github.com/mjs/imapclient/issues/618)|
|
||||
| 3.14 | ✅ | Supported (requires `imapclient>=3.1.0`) |
|
||||
|
||||
29
SECURITY.md
Normal file
29
SECURITY.md
Normal file
@@ -0,0 +1,29 @@
|
||||
# Security Policy
|
||||
|
||||
## Reporting a vulnerability
|
||||
|
||||
Please do not open a public GitHub issue for an undisclosed security
|
||||
vulnerability. Use GitHub private vulnerability reporting in the Security tab of this project instead.
|
||||
|
||||
When reporting a vulnerability, include:
|
||||
|
||||
- the affected parsedmarc version or commit
|
||||
- the component or integration involved
|
||||
- clear reproduction details if available
|
||||
- potential impact
|
||||
- any suggested mitigation or workaround
|
||||
|
||||
## Supported versions
|
||||
|
||||
Security fixes will be applied to the latest released version and
|
||||
the current `master` branch.
|
||||
|
||||
Older versions will not receive backported fixes.
|
||||
|
||||
## Disclosure process
|
||||
|
||||
After a report is received, maintainers can validate the issue, assess impact,
|
||||
and coordinate a fix before public disclosure.
|
||||
|
||||
Please avoid publishing proof-of-concept details until maintainers have had a
|
||||
reasonable opportunity to investigate and release a fix or mitigation.
|
||||
1
ci.ini
1
ci.ini
@@ -3,6 +3,7 @@ save_aggregate = True
|
||||
save_forensic = True
|
||||
save_smtp_tls = True
|
||||
debug = True
|
||||
offline = True
|
||||
|
||||
[elasticsearch]
|
||||
hosts = http://localhost:9200
|
||||
|
||||
11
codecov.yml
Normal file
11
codecov.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
codecov:
|
||||
require_ci_to_pass: true
|
||||
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
informational: true
|
||||
patch:
|
||||
default:
|
||||
informational: false
|
||||
@@ -1,494 +0,0 @@
|
||||
# Google SecOps (Chronicle) Output
|
||||
|
||||
`parsedmarc` can output DMARC reports to Google SecOps (Chronicle) in UDM (Unified Data Model) format.
|
||||
|
||||
## Configuration
|
||||
|
||||
To enable Google SecOps output, add a `[google_secops]` section to your configuration file:
|
||||
|
||||
### Primary Method: Chronicle Ingestion API
|
||||
|
||||
The recommended approach is to send events directly to Chronicle via the Ingestion API:
|
||||
|
||||
```ini
|
||||
[general]
|
||||
save_aggregate = True
|
||||
save_forensic = True
|
||||
|
||||
[google_secops]
|
||||
# Required: Path to Google service account JSON credentials file
|
||||
api_credentials_file = /path/to/service-account-credentials.json
|
||||
|
||||
# Required: Chronicle customer ID
|
||||
api_customer_id = your-customer-id-here
|
||||
|
||||
# Optional: Chronicle region (default: us)
|
||||
# Options: us, europe, asia-southeast1, me-central2, australia-southeast1
|
||||
api_region = us
|
||||
|
||||
# Optional: Log type for Chronicle ingestion (default: DMARC)
|
||||
api_log_type = DMARC
|
||||
|
||||
# Optional: Include forensic report message payload (default: False)
|
||||
# For privacy, message bodies are excluded by default
|
||||
include_ruf_payload = False
|
||||
|
||||
# Optional: Maximum bytes of forensic message payload to include (default: 4096)
|
||||
ruf_payload_max_bytes = 4096
|
||||
|
||||
# Optional: Static observer name for telemetry identification
|
||||
static_observer_name = my-parsedmarc-instance
|
||||
|
||||
# Optional: Static observer vendor (default: parsedmarc)
|
||||
static_observer_vendor = parsedmarc
|
||||
|
||||
# Optional: Static environment label (e.g., prod, dev)
|
||||
static_environment = prod
|
||||
```
|
||||
|
||||
### Alternative Method: stdout Output
|
||||
|
||||
If you prefer to use an external log shipper (Fluentd, Logstash, Chronicle forwarder), set `use_stdout = True`:
|
||||
|
||||
```ini
|
||||
[google_secops]
|
||||
# Output to stdout instead of Chronicle API
|
||||
use_stdout = True
|
||||
|
||||
# Other optional configuration options (as above)
|
||||
include_ruf_payload = False
|
||||
ruf_payload_max_bytes = 4096
|
||||
static_observer_name = my-instance
|
||||
static_observer_vendor = parsedmarc
|
||||
static_environment = prod
|
||||
```
|
||||
|
||||
## Output Format
|
||||
|
||||
The Google SecOps output produces newline-delimited JSON (NDJSON) in Chronicle UDM format, which can be ingested into Google SecOps for hunting and dashboarding.
|
||||
|
||||
### Event Types
|
||||
|
||||
1. **DMARC_AGGREGATE**: One event per aggregate report row, preserving count and period information
|
||||
2. **DMARC_FORENSIC**: One event per forensic report
|
||||
3. **SMTP_TLS_REPORT**: One event per SMTP TLS failure detail
|
||||
4. **DMARC_PARSE_ERROR**: Generated when parsing fails (does not crash)
|
||||
|
||||
### UDM Schema
|
||||
|
||||
Each event includes:
|
||||
|
||||
- **metadata**: Event timestamp, type, product name, and vendor
|
||||
- `event_timestamp`: RFC 3339 formatted timestamp
|
||||
- `event_type`: Always "GENERIC_EVENT" for UDM
|
||||
- `product_name`: Always "parsedmarc"
|
||||
- `vendor_name`: Configurable via `static_observer_vendor` (default: "parsedmarc")
|
||||
- `product_deployment_id` (optional): Set via `static_observer_name` config
|
||||
|
||||
- **principal**: Source IP address, location (country), and hostname (reverse DNS)
|
||||
- `ip`: Array containing source IP address (always present)
|
||||
- `location.country_or_region` (optional): ISO country code from IP geolocation
|
||||
- `hostname` (optional): Reverse DNS hostname for source IP
|
||||
|
||||
- **target**: Domain name (from DMARC policy)
|
||||
- `domain.name`: The domain being protected by DMARC
|
||||
|
||||
- **security_result**: Severity level, description, and detection fields for dashboarding
|
||||
- `severity`: Derived severity level (HIGH/MEDIUM/LOW/ERROR)
|
||||
- `description`: Human-readable event description
|
||||
- **detection_fields**: Key DMARC dimensions for filtering and grouping
|
||||
- All dashboard-relevant fields use `dmarc.*` or `smtp_tls.*` prefixes for easy identification
|
||||
- Includes IP enrichment data (service name and type from reverse DNS mapping) for enhanced filtering
|
||||
- See "Detection Fields" section below for complete field listings
|
||||
|
||||
- **additional.fields** (optional): Low-value context fields not typically used for dashboarding
|
||||
- SPF/DKIM authentication details (e.g., `spf_0_domain`, `spf_0_result`)
|
||||
- Forensic report metadata (e.g., `feedback_type`, `message_id`, `authentication_results`)
|
||||
- Base domain, environment tags, optional message samples
|
||||
|
||||
**Design Rationale**: DMARC dimensions are placed in `security_result[].detection_fields` rather than `additional.fields` because Chronicle dashboards, stats searches, and aggregations work best with UDM label arrays. The `additional.fields` is a protobuf Struct intended for opaque context and is not reliably queryable for dashboard operations.
|
||||
|
||||
### Detection Fields
|
||||
|
||||
**Aggregate Report Fields** (`DMARC_AGGREGATE` events):
|
||||
- `dmarc.disposition`: DMARC policy action (none, quarantine, reject)
|
||||
- `dmarc.policy`: Published DMARC policy (none, quarantine, reject)
|
||||
- `dmarc.pass`: Boolean - overall DMARC authentication result
|
||||
- `dmarc.spf_aligned`: Boolean - SPF alignment status
|
||||
- `dmarc.dkim_aligned`: Boolean - DKIM alignment status
|
||||
- `dmarc.header_from`: Header From domain
|
||||
- `dmarc.envelope_from`: Envelope From domain (MAIL FROM)
|
||||
- `dmarc.report_org`: Reporting organization name
|
||||
- `dmarc.report_id`: Unique report identifier
|
||||
- `dmarc.report_begin`: Report period start timestamp
|
||||
- `dmarc.report_end`: Report period end timestamp
|
||||
- `dmarc.row_count`: Number of messages represented by this record
|
||||
- `dmarc.spf_result` (optional): SPF authentication result (pass, fail, etc.)
|
||||
- `dmarc.dkim_result` (optional): DKIM authentication result (pass, fail, etc.)
|
||||
- `dmarc.source_service_name` (optional): Enriched service name from reverse DNS mapping
|
||||
- `dmarc.source_service_type` (optional): Enriched service type (e.g., "Email Provider", "Webmail", "Marketing")
|
||||
|
||||
**Forensic Report Fields** (`DMARC_FORENSIC` events):
|
||||
- `dmarc.auth_failure`: Authentication failure type(s) (dmarc, spf, dkim)
|
||||
- `dmarc.reported_domain`: Domain that failed DMARC authentication
|
||||
- `dmarc.source_service_name` (optional): Enriched service name from reverse DNS mapping
|
||||
- `dmarc.source_service_type` (optional): Enriched service type (e.g., "Email Provider", "Webmail", "Marketing")
|
||||
|
||||
**SMTP TLS Report Fields** (`SMTP_TLS_REPORT` events):
|
||||
- `smtp_tls.policy_domain`: Domain being monitored for TLS policy
|
||||
- `smtp_tls.result_type`: Type of TLS failure (certificate-expired, validation-failure, etc.)
|
||||
- `smtp_tls.failed_session_count`: Number of failed sessions
|
||||
- `smtp_tls.report_org`: Reporting organization name
|
||||
- `smtp_tls.report_begin`: Report period start timestamp
|
||||
- `smtp_tls.report_end`: Report period end timestamp
|
||||
|
||||
### Severity Heuristics
|
||||
|
||||
- **HIGH**: DMARC disposition = reject
|
||||
- **MEDIUM**: DMARC disposition = quarantine with partial SPF/DKIM failures
|
||||
- **LOW**: DMARC disposition = none or pass
|
||||
|
||||
## Example Output
|
||||
|
||||
### Aggregate Report Event
|
||||
|
||||
```json
|
||||
{
|
||||
"event_type": "DMARC_AGGREGATE",
|
||||
"metadata": {
|
||||
"event_timestamp": "2018-06-19T00:00:00+00:00",
|
||||
"event_type": "GENERIC_EVENT",
|
||||
"product_name": "parsedmarc",
|
||||
"vendor_name": "parsedmarc"
|
||||
},
|
||||
"principal": {
|
||||
"ip": ["199.230.200.36"],
|
||||
"location": {"country_or_region": "US"}
|
||||
},
|
||||
"target": {
|
||||
"domain": {"name": "example.com"}
|
||||
},
|
||||
"security_result": [{
|
||||
"severity": "LOW",
|
||||
"description": "DMARC fail; SPF=pass; DKIM=pass; SPF not aligned; DKIM not aligned; disposition=none",
|
||||
"detection_fields": [
|
||||
{"key": "dmarc.disposition", "value": "none"},
|
||||
{"key": "dmarc.policy", "value": "none"},
|
||||
{"key": "dmarc.pass", "value": false},
|
||||
{"key": "dmarc.spf_aligned", "value": false},
|
||||
{"key": "dmarc.dkim_aligned", "value": false},
|
||||
{"key": "dmarc.header_from", "value": "example.com"},
|
||||
{"key": "dmarc.envelope_from", "value": "example.com"},
|
||||
{"key": "dmarc.report_org", "value": "example.net"},
|
||||
{"key": "dmarc.report_id", "value": "b043f0e264cf4ea995e93765242f6dfb"},
|
||||
{"key": "dmarc.report_begin", "value": "2018-06-19 00:00:00"},
|
||||
{"key": "dmarc.report_end", "value": "2018-06-19 23:59:59"},
|
||||
{"key": "dmarc.row_count", "value": 1},
|
||||
{"key": "dmarc.spf_result", "value": "pass"},
|
||||
{"key": "dmarc.dkim_result", "value": "pass"},
|
||||
{"key": "dmarc.source_service_name", "value": "Example Mail Service"},
|
||||
{"key": "dmarc.source_service_type", "value": "Email Provider"}
|
||||
]
|
||||
}],
|
||||
"additional": {
|
||||
"fields": [
|
||||
{"key": "spf_0_domain", "value": "example.com"},
|
||||
{"key": "spf_0_result", "value": "pass"},
|
||||
{"key": "dkim_0_domain", "value": "example.com"},
|
||||
{"key": "dkim_0_result", "value": "pass"}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Forensic Report Event
|
||||
|
||||
```json
|
||||
{
|
||||
"event_type": "DMARC_FORENSIC",
|
||||
"metadata": {
|
||||
"event_timestamp": "2019-04-30T02:09:00+00:00",
|
||||
"event_type": "GENERIC_EVENT",
|
||||
"product_name": "parsedmarc",
|
||||
"vendor_name": "parsedmarc"
|
||||
},
|
||||
"principal": {
|
||||
"ip": ["10.10.10.10"],
|
||||
"location": {"country_or_region": "US"},
|
||||
"hostname": "mail.example-sender.com"
|
||||
},
|
||||
"target": {
|
||||
"domain": {"name": "example.com"}
|
||||
},
|
||||
"security_result": [{
|
||||
"severity": "MEDIUM",
|
||||
"description": "DMARC forensic report: authentication failure (dmarc)",
|
||||
"detection_fields": [
|
||||
{"key": "dmarc.auth_failure", "value": "dmarc"},
|
||||
{"key": "dmarc.reported_domain", "value": "example.com"},
|
||||
{"key": "dmarc.source_service_name", "value": "Example Mail Provider"},
|
||||
{"key": "dmarc.source_service_type", "value": "Email Provider"}
|
||||
]
|
||||
}],
|
||||
"additional": {
|
||||
"fields": [
|
||||
{"key": "feedback_type", "value": "auth-failure"},
|
||||
{"key": "message_id", "value": "<01010101010101010101010101010101@ABAB01MS0016.someserver.loc>"},
|
||||
{"key": "authentication_results", "value": "dmarc=fail (p=none; dis=none) header.from=example.com"},
|
||||
{"key": "delivery_result", "value": "delivered"}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### SMTP TLS Report Event
|
||||
|
||||
```json
|
||||
{
|
||||
"event_type": "SMTP_TLS_REPORT",
|
||||
"metadata": {
|
||||
"event_timestamp": "2016-04-01T00:00:00+00:00",
|
||||
"event_type": "GENERIC_EVENT",
|
||||
"product_name": "parsedmarc",
|
||||
"vendor_name": "parsedmarc"
|
||||
},
|
||||
"target": {
|
||||
"domain": {
|
||||
"name": "company-y.example"
|
||||
}
|
||||
},
|
||||
"security_result": [{
|
||||
"severity": "LOW",
|
||||
"description": "SMTP TLS failure: certificate-expired",
|
||||
"detection_fields": [
|
||||
{"key": "smtp_tls.policy_domain", "value": "company-y.example"},
|
||||
{"key": "smtp_tls.result_type", "value": "certificate-expired"},
|
||||
{"key": "smtp_tls.failed_session_count", "value": 100},
|
||||
{"key": "smtp_tls.report_org", "value": "Company-X"},
|
||||
{"key": "smtp_tls.report_begin", "value": "2016-04-01T00:00:00Z"},
|
||||
{"key": "smtp_tls.report_end", "value": "2016-04-01T23:59:59Z"}
|
||||
]
|
||||
}],
|
||||
"principal": {
|
||||
"ip": ["2001:db8:abcd:0012::1"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Parse Error Event
|
||||
|
||||
```json
|
||||
{
|
||||
"event_type": "DMARC_PARSE_ERROR",
|
||||
"metadata": {
|
||||
"event_timestamp": "2026-01-09T16:22:10.933751+00:00",
|
||||
"event_type": "GENERIC_EVENT",
|
||||
"product_name": "parsedmarc",
|
||||
"vendor_name": "parsedmarc"
|
||||
},
|
||||
"security_result": [{
|
||||
"severity": "ERROR",
|
||||
"description": "Failed to parse DMARC report: Invalid XML structure"
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||
## Google SecOps Searches
|
||||
|
||||
Here are some example YARA-L rules you can use in Google SecOps to hunt for DMARC issues:
|
||||
|
||||
### Find all DMARC aggregate report failures
|
||||
|
||||
```yara-l
|
||||
rule dmarc_aggregate_failures {
|
||||
meta:
|
||||
author = "parsedmarc"
|
||||
description = "Detect DMARC authentication failures in aggregate reports"
|
||||
|
||||
events:
|
||||
$e.metadata.product_name = "parsedmarc"
|
||||
$e.event_type = "DMARC_AGGREGATE"
|
||||
$e.security_result.detection_fields.key = "dmarc.pass"
|
||||
$e.security_result.detection_fields.value = false
|
||||
|
||||
condition:
|
||||
$e
|
||||
}
|
||||
```
|
||||
|
||||
### Find high severity DMARC events (rejected mail)
|
||||
|
||||
```yara-l
|
||||
rule high_severity_dmarc_events {
|
||||
meta:
|
||||
author = "parsedmarc"
|
||||
description = "Detect high severity DMARC aggregate events (rejected mail)"
|
||||
|
||||
events:
|
||||
$e.metadata.product_name = "parsedmarc"
|
||||
$e.event_type = "DMARC_AGGREGATE"
|
||||
$e.security_result.severity = "HIGH"
|
||||
|
||||
condition:
|
||||
$e
|
||||
}
|
||||
```
|
||||
|
||||
### Find repeated DMARC failures from same source IP
|
||||
|
||||
```yara-l
|
||||
rule repeated_dmarc_failures {
|
||||
meta:
|
||||
author = "parsedmarc"
|
||||
description = "Detect repeated DMARC failures from the same source IP"
|
||||
|
||||
events:
|
||||
$e.metadata.product_name = "parsedmarc"
|
||||
$e.event_type = "DMARC_AGGREGATE"
|
||||
$e.security_result.detection_fields.key = "dmarc.pass"
|
||||
$e.security_result.detection_fields.value = false
|
||||
$e.principal.ip = $source_ip
|
||||
|
||||
match:
|
||||
$source_ip over 1h
|
||||
|
||||
condition:
|
||||
#e > 5
|
||||
}
|
||||
```
|
||||
|
||||
### Find DMARC forensic reports with authentication failures
|
||||
|
||||
```yara-l
|
||||
rule dmarc_forensic_failures {
|
||||
meta:
|
||||
author = "parsedmarc"
|
||||
description = "Detect DMARC forensic reports with authentication failures"
|
||||
|
||||
events:
|
||||
$e.metadata.product_name = "parsedmarc"
|
||||
$e.event_type = "DMARC_FORENSIC"
|
||||
$e.security_result.detection_fields.key = "dmarc.auth_failure"
|
||||
|
||||
condition:
|
||||
$e
|
||||
}
|
||||
```
|
||||
|
||||
### Find DMARC failures from specific mail service types
|
||||
|
||||
```yara-l
|
||||
rule dmarc_failures_by_service_type {
|
||||
meta:
|
||||
author = "parsedmarc"
|
||||
description = "Detect DMARC failures from specific mail service types"
|
||||
|
||||
events:
|
||||
$e.metadata.product_name = "parsedmarc"
|
||||
$e.event_type = "DMARC_AGGREGATE"
|
||||
$e.security_result.detection_fields.key = "dmarc.pass"
|
||||
$e.security_result.detection_fields.value = false
|
||||
$e.security_result.detection_fields.key = "dmarc.source_service_type"
|
||||
$e.security_result.detection_fields.value = "Email Provider"
|
||||
|
||||
condition:
|
||||
$e
|
||||
}
|
||||
```
|
||||
|
||||
### Find SMTP TLS failures
|
||||
|
||||
```yara-l
|
||||
rule smtp_tls_failures {
|
||||
meta:
|
||||
author = "parsedmarc"
|
||||
description = "Detect SMTP TLS failures"
|
||||
|
||||
events:
|
||||
$e.metadata.product_name = "parsedmarc"
|
||||
$e.event_type = "SMTP_TLS_REPORT"
|
||||
|
||||
condition:
|
||||
$e
|
||||
}
|
||||
```
|
||||
|
||||
## Privacy Considerations
|
||||
|
||||
By default, forensic report message bodies are **excluded** from the output to protect privacy. If you need to include message samples for investigation:
|
||||
|
||||
1. Set `include_ruf_payload = True` in your configuration
|
||||
2. Adjust `ruf_payload_max_bytes` to limit the amount of data included (default: 4096 bytes)
|
||||
3. Message samples will be truncated if they exceed the configured maximum
|
||||
|
||||
**Note**: Be aware of data privacy regulations (GDPR, CCPA, etc.) when including message payloads in security telemetry.
|
||||
|
||||
## Usage
|
||||
|
||||
The Google SecOps output works with all parsedmarc input methods, including file processing and mailbox monitoring.
|
||||
|
||||
### Primary Method: Direct API Ingestion
|
||||
|
||||
With Chronicle Ingestion API configured, events are sent directly to Chronicle:
|
||||
|
||||
```bash
|
||||
# Process files - events are sent to Chronicle API automatically
|
||||
parsedmarc -c config.ini samples/aggregate/*.xml
|
||||
|
||||
# Monitor mailbox - events are sent to Chronicle API in real-time
|
||||
parsedmarc -c config.ini
|
||||
```
|
||||
|
||||
No additional log shippers or pipelines are needed. The Google SecOps client handles authentication and batching automatically.
|
||||
|
||||
### Alternative Method: stdout Output with Log Shipper
|
||||
|
||||
If using `use_stdout = True` in your configuration, output DMARC reports to an external log shipper:
|
||||
|
||||
#### Processing Files
|
||||
|
||||
```bash
|
||||
# Output to stdout
|
||||
parsedmarc -c config.ini samples/aggregate/*.xml > dmarc_events.ndjson
|
||||
|
||||
# Stream to file
|
||||
parsedmarc -c config.ini samples/aggregate/*.xml >> /var/log/dmarc/events.ndjson
|
||||
|
||||
# Pipe to log shipper (e.g., Fluentd, Logstash, Chronicle forwarder)
|
||||
parsedmarc -c config.ini samples/aggregate/*.xml | fluentd
|
||||
```
|
||||
|
||||
#### Monitoring Mailboxes
|
||||
|
||||
The Google SecOps output automatically works when monitoring mailboxes via IMAP, Microsoft Graph, or Gmail API. Configure your mailbox connection and enable watching:
|
||||
|
||||
```ini
|
||||
[general]
|
||||
save_aggregate = True
|
||||
save_forensic = True
|
||||
|
||||
[mailbox]
|
||||
watch = True
|
||||
delete = False
|
||||
batch_size = 10
|
||||
|
||||
[imap]
|
||||
host = imap.example.com
|
||||
user = dmarc@example.com
|
||||
password = yourpassword
|
||||
|
||||
[google_secops]
|
||||
# Use stdout mode for log shipper integration
|
||||
use_stdout = True
|
||||
include_ruf_payload = False
|
||||
static_observer_name = mailbox-monitor
|
||||
static_environment = prod
|
||||
```
|
||||
|
||||
When watching a mailbox with stdout mode, parsedmarc continuously outputs UDM events as new reports arrive:
|
||||
|
||||
```bash
|
||||
parsedmarc -c config.ini | fluentd
|
||||
```
|
||||
|
||||
The output is in newline-delimited JSON format, with one UDM event per line, ready for collection by your log shipper.
|
||||
@@ -44,7 +44,6 @@ and Valimail.
|
||||
- Optionally send the results to Elasticsearch, Opensearch, and/or Splunk, for use
|
||||
with premade dashboards
|
||||
- Optionally send reports to Apache Kafka
|
||||
- Optionally send reports to Google SecOps (Chronicle) in UDM format
|
||||
|
||||
## Python Compatibility
|
||||
|
||||
@@ -57,12 +56,12 @@ for RHEL or Debian.
|
||||
| 3.6 | ❌ | Used in RHEL 8, but not supported by project dependencies |
|
||||
| 3.7 | ❌ | End of Life (EOL) |
|
||||
| 3.8 | ❌ | End of Life (EOL) |
|
||||
| 3.9 | ✅ | Supported until August 2026 (Debian 11); May 2032 (RHEL 9) |
|
||||
| 3.9 | ❌ | Used in Debian 11 and RHEL 9, but not supported by project dependencies |
|
||||
| 3.10 | ✅ | Actively maintained |
|
||||
| 3.11 | ✅ | Actively maintained; supported until June 2028 (Debian 12) |
|
||||
| 3.12 | ✅ | Actively maintained; supported until May 2035 (RHEL 10) |
|
||||
| 3.13 | ✅ | Actively maintained; supported until June 2030 (Debian 13) |
|
||||
| 3.14 | ❌ | Not currently supported due to [this imapclient bug](https://github.com/mjs/imapclient/issues/618)|
|
||||
| 3.14 | ✅ | Supported (requires `imapclient>=3.1.0`) |
|
||||
|
||||
```{toctree}
|
||||
:caption: 'Contents'
|
||||
@@ -75,7 +74,6 @@ elasticsearch
|
||||
opensearch
|
||||
kibana
|
||||
splunk
|
||||
google_secops
|
||||
davmail
|
||||
dmarc
|
||||
contributing
|
||||
|
||||
@@ -162,10 +162,10 @@ sudo -u parsedmarc virtualenv /opt/parsedmarc/venv
|
||||
```
|
||||
|
||||
CentOS/RHEL 8 systems use Python 3.6 by default, so on those systems
|
||||
explicitly tell `virtualenv` to use `python3.9` instead
|
||||
explicitly tell `virtualenv` to use `python3.10` instead
|
||||
|
||||
```bash
|
||||
sudo -u parsedmarc virtualenv -p python3.9 /opt/parsedmarc/venv
|
||||
sudo -u parsedmarc virtualenv -p python3.10 /opt/parsedmarc/venv
|
||||
```
|
||||
|
||||
Activate the virtualenv
|
||||
|
||||
@@ -146,6 +146,9 @@ The full set of configuration options are:
|
||||
- `dns_timeout` - float: DNS timeout period
|
||||
- `debug` - bool: Print debugging messages
|
||||
- `silent` - bool: Only print errors (Default: `True`)
|
||||
- `fail_on_output_error` - bool: Exit with a non-zero status code if
|
||||
any configured output destination fails while saving/publishing
|
||||
reports (Default: `False`)
|
||||
- `log_file` - str: Write log messages to a file at this path
|
||||
- `n_procs` - int: Number of process to run in parallel when
|
||||
parsing in CLI mode (Default: `1`)
|
||||
@@ -171,8 +174,8 @@ The full set of configuration options are:
|
||||
- `check_timeout` - int: Number of seconds to wait for a IMAP
|
||||
IDLE response or the number of seconds until the next
|
||||
mail check (Default: `30`)
|
||||
- `since` - str: Search for messages since certain time. (Examples: `5m|3h|2d|1w`)
|
||||
Acceptable units - {"m":"minutes", "h":"hours", "d":"days", "w":"weeks"}.
|
||||
- `since` - str: Search for messages since certain time. (Examples: `5m|3h|2d|1w`)
|
||||
Acceptable units - {"m":"minutes", "h":"hours", "d":"days", "w":"weeks"}.
|
||||
Defaults to `1d` if incorrect value is provided.
|
||||
- `imap`
|
||||
- `host` - str: The IMAP server hostname or IP address
|
||||
@@ -200,7 +203,7 @@ The full set of configuration options are:
|
||||
- `password` - str: The IMAP password
|
||||
- `msgraph`
|
||||
- `auth_method` - str: Authentication method, valid types are
|
||||
`UsernamePassword`, `DeviceCode`, or `ClientSecret`
|
||||
`UsernamePassword`, `DeviceCode`, `ClientSecret`, or `Certificate`
|
||||
(Default: `UsernamePassword`).
|
||||
- `user` - str: The M365 user, required when the auth method is
|
||||
UsernamePassword
|
||||
@@ -208,6 +211,11 @@ The full set of configuration options are:
|
||||
method is UsernamePassword
|
||||
- `client_id` - str: The app registration's client ID
|
||||
- `client_secret` - str: The app registration's secret
|
||||
- `certificate_path` - str: Path to a PEM or PKCS12 certificate
|
||||
including the private key. Required when the auth method is
|
||||
`Certificate`
|
||||
- `certificate_password` - str: Optional password for the
|
||||
certificate file when using `Certificate` auth
|
||||
- `tenant_id` - str: The Azure AD tenant ID. This is required
|
||||
for all auth methods except UsernamePassword.
|
||||
- `mailbox` - str: The mailbox name. This defaults to the
|
||||
@@ -240,11 +248,14 @@ The full set of configuration options are:
|
||||
group and use that as the group id.
|
||||
|
||||
```powershell
|
||||
New-ApplicationAccessPolicy -AccessRight RestrictAccess
|
||||
New-ApplicationAccessPolicy -AccessRight RestrictAccess
|
||||
-AppId "<CLIENT_ID>" -PolicyScopeGroupId "<MAILBOX>"
|
||||
-Description "Restrict access to dmarc reports mailbox."
|
||||
```
|
||||
|
||||
The same application permission and mailbox scoping guidance
|
||||
applies to the `Certificate` auth method.
|
||||
|
||||
:::
|
||||
- `elasticsearch`
|
||||
- `hosts` - str: A comma separated list of hostnames and ports
|
||||
@@ -281,6 +292,10 @@ The full set of configuration options are:
|
||||
- `user` - str: Basic auth username
|
||||
- `password` - str: Basic auth password
|
||||
- `api_key` - str: API key
|
||||
- `auth_type` - str: Authentication type: `basic` (default) or `awssigv4` (the key `authentication_type` is accepted as an alias for this option)
|
||||
- `aws_region` - str: AWS region for SigV4 authentication
|
||||
(required when `auth_type = awssigv4`)
|
||||
- `aws_service` - str: AWS service for SigV4 signing (Default: `es`)
|
||||
- `ssl` - bool: Use an encrypted SSL/TLS connection
|
||||
(Default: `True`)
|
||||
- `timeout` - float: Timeout in seconds (Default: 60)
|
||||
@@ -336,16 +351,77 @@ The full set of configuration options are:
|
||||
- `secret_access_key` - str: The secret access key (Optional)
|
||||
- `syslog`
|
||||
- `server` - str: The Syslog server name or IP address
|
||||
- `port` - int: The UDP port to use (Default: `514`)
|
||||
- `port` - int: The port to use (Default: `514`)
|
||||
- `protocol` - str: The protocol to use: `udp`, `tcp`, or `tls` (Default: `udp`)
|
||||
- `cafile_path` - str: Path to CA certificate file for TLS server verification (Optional)
|
||||
- `certfile_path` - str: Path to client certificate file for TLS authentication (Optional)
|
||||
- `keyfile_path` - str: Path to client private key file for TLS authentication (Optional)
|
||||
- `timeout` - float: Connection timeout in seconds for TCP/TLS (Default: `5.0`)
|
||||
- `retry_attempts` - int: Number of retry attempts for failed connections (Default: `3`)
|
||||
- `retry_delay` - int: Delay in seconds between retry attempts (Default: `5`)
|
||||
|
||||
**Example UDP configuration (default):**
|
||||
|
||||
```ini
|
||||
[syslog]
|
||||
server = syslog.example.com
|
||||
port = 514
|
||||
```
|
||||
|
||||
**Example TCP configuration:**
|
||||
|
||||
```ini
|
||||
[syslog]
|
||||
server = syslog.example.com
|
||||
port = 6514
|
||||
protocol = tcp
|
||||
timeout = 10.0
|
||||
retry_attempts = 5
|
||||
```
|
||||
|
||||
**Example TLS configuration with server verification:**
|
||||
|
||||
```ini
|
||||
[syslog]
|
||||
server = syslog.example.com
|
||||
port = 6514
|
||||
protocol = tls
|
||||
cafile_path = /path/to/ca-cert.pem
|
||||
timeout = 10.0
|
||||
```
|
||||
|
||||
**Example TLS configuration with mutual authentication:**
|
||||
|
||||
```ini
|
||||
[syslog]
|
||||
server = syslog.example.com
|
||||
port = 6514
|
||||
protocol = tls
|
||||
cafile_path = /path/to/ca-cert.pem
|
||||
certfile_path = /path/to/client-cert.pem
|
||||
keyfile_path = /path/to/client-key.pem
|
||||
timeout = 10.0
|
||||
retry_attempts = 3
|
||||
retry_delay = 5
|
||||
```
|
||||
- `gmail_api`
|
||||
- `credentials_file` - str: Path to file containing the
|
||||
credentials, None to disable (Default: `None`)
|
||||
- `token_file` - str: Path to save the token file
|
||||
(Default: `.token`)
|
||||
|
||||
- `auth_mode` - str: Authentication mode, `installed_app` (default)
|
||||
or `service_account`
|
||||
- `service_account_user` - str: Delegated mailbox user for Gmail
|
||||
service account auth (required for domain-wide delegation). Also
|
||||
accepted as `delegated_user` for backward compatibility.
|
||||
|
||||
:::{note}
|
||||
credentials_file and token_file can be got with [quickstart](https://developers.google.com/gmail/api/quickstart/python).Please change the scope to `https://www.googleapis.com/auth/gmail.modify`.
|
||||
:::
|
||||
:::{note}
|
||||
When `auth_mode = service_account`, `credentials_file` must point to a
|
||||
Google service account key JSON file, and `token_file` is not used.
|
||||
:::
|
||||
- `include_spam_trash` - bool: Include messages in Spam and
|
||||
Trash when searching reports (Default: `False`)
|
||||
- `scopes` - str: Comma separated list of scopes to use when
|
||||
@@ -442,7 +518,7 @@ Update the limit to 2k per example:
|
||||
PUT _cluster/settings
|
||||
{
|
||||
"persistent" : {
|
||||
"cluster.max_shards_per_node" : 2000
|
||||
"cluster.max_shards_per_node" : 2000
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -450,6 +526,33 @@ PUT _cluster/settings
|
||||
Increasing this value increases resource usage.
|
||||
:::
|
||||
|
||||
## Performance tuning
|
||||
|
||||
For large mailbox imports or backfills, parsedmarc can consume a noticeable amount
|
||||
of memory, especially when it runs on the same host as Elasticsearch or
|
||||
OpenSearch. The following settings can reduce peak memory usage and make long
|
||||
imports more predictable:
|
||||
|
||||
- Reduce `mailbox.batch_size` to smaller values such as `100-500` instead of
|
||||
processing a very large message set at once. Smaller batches trade throughput
|
||||
for lower peak memory use and less sink pressure.
|
||||
- Keep `n_procs` low for mailbox-heavy runs. In practice, `1-2` workers is often
|
||||
a safer starting point for large backfills than aggressive parallelism.
|
||||
- Use `mailbox.since` to process reports in smaller time windows such as `1d`,
|
||||
`7d`, or another interval that fits the backlog. This makes it easier to catch
|
||||
up incrementally instead of loading an entire mailbox history in one run.
|
||||
- Set `strip_attachment_payloads = True` when forensic reports contain large
|
||||
attachments and you do not need to retain the raw payloads in the parsed
|
||||
output.
|
||||
- Prefer running parsedmarc separately from Elasticsearch or OpenSearch, or
|
||||
reserve enough RAM for both services if they must share a host.
|
||||
- For very large imports, prefer incremental supervised runs, such as a
|
||||
scheduler or systemd service, over infrequent massive backfills.
|
||||
|
||||
These are operational tuning recommendations rather than hard requirements, but
|
||||
they are often enough to avoid memory pressure and reduce failures during
|
||||
high-volume mailbox processing.
|
||||
|
||||
## Multi-tenant support
|
||||
|
||||
Starting in `8.19.0`, ParseDMARC provides multi-tenant support by placing data into separate OpenSearch or Elasticsearch index prefixes. To set this up, create a YAML file that is formatted where each key is a tenant name, and the value is a list of domains related to that tenant, not including subdomains, like this:
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -751,8 +751,8 @@ def parse_aggregate_report_xml(
|
||||
new_report_metadata["report_id"] = report_id
|
||||
date_range = report["report_metadata"]["date_range"]
|
||||
|
||||
begin_ts = int(date_range["begin"])
|
||||
end_ts = int(date_range["end"])
|
||||
begin_ts = int(date_range["begin"].split(".")[0])
|
||||
end_ts = int(date_range["end"].split(".")[0])
|
||||
span_seconds = end_ts - begin_ts
|
||||
|
||||
normalize_timespan = span_seconds > normalize_timespan_threshold_hours * 3600
|
||||
@@ -962,10 +962,12 @@ def extract_report(content: Union[bytes, str, BinaryIO]) -> str:
|
||||
return report
|
||||
|
||||
|
||||
def extract_report_from_file_path(file_path: str):
|
||||
def extract_report_from_file_path(
|
||||
file_path: Union[str, bytes, os.PathLike[str], os.PathLike[bytes]],
|
||||
) -> str:
|
||||
"""Extracts report from a file at the given file_path"""
|
||||
try:
|
||||
with open(file_path, "rb") as report_file:
|
||||
with open(os.fspath(file_path), "rb") as report_file:
|
||||
return extract_report(report_file.read())
|
||||
except FileNotFoundError:
|
||||
raise ParserError("File was not found")
|
||||
@@ -1660,7 +1662,7 @@ def parse_report_email(
|
||||
|
||||
|
||||
def parse_report_file(
|
||||
input_: Union[bytes, str, BinaryIO],
|
||||
input_: Union[bytes, str, os.PathLike[str], os.PathLike[bytes], BinaryIO],
|
||||
*,
|
||||
nameservers: Optional[list[str]] = None,
|
||||
dns_timeout: float = 2.0,
|
||||
@@ -1677,7 +1679,8 @@ def parse_report_file(
|
||||
file-like object. or bytes
|
||||
|
||||
Args:
|
||||
input_ (str | bytes | BinaryIO): A path to a file, a file like object, or bytes
|
||||
input_ (str | os.PathLike | bytes | BinaryIO): A path to a file,
|
||||
a file-like object, or bytes
|
||||
nameservers (list): A list of one or more nameservers to use
|
||||
(Cloudflare's public DNS resolvers by default)
|
||||
dns_timeout (float): Sets the DNS timeout in seconds
|
||||
@@ -1694,9 +1697,10 @@ def parse_report_file(
|
||||
dict: The parsed DMARC report
|
||||
"""
|
||||
file_object: BinaryIO
|
||||
if isinstance(input_, str):
|
||||
logger.debug("Parsing {0}".format(input_))
|
||||
file_object = open(input_, "rb")
|
||||
if isinstance(input_, (str, os.PathLike)):
|
||||
file_path = os.fspath(input_)
|
||||
logger.debug("Parsing {0}".format(file_path))
|
||||
file_object = open(file_path, "rb")
|
||||
elif isinstance(input_, (bytes, bytearray, memoryview)):
|
||||
file_object = BytesIO(bytes(input_))
|
||||
else:
|
||||
@@ -2137,14 +2141,17 @@ def get_dmarc_reports_from_mailbox(
|
||||
"smtp_tls_reports": smtp_tls_reports,
|
||||
}
|
||||
|
||||
if current_time:
|
||||
total_messages = len(
|
||||
connection.fetch_messages(reports_folder, since=current_time)
|
||||
)
|
||||
if not test and not batch_size:
|
||||
if current_time:
|
||||
total_messages = len(
|
||||
connection.fetch_messages(reports_folder, since=current_time)
|
||||
)
|
||||
else:
|
||||
total_messages = len(connection.fetch_messages(reports_folder))
|
||||
else:
|
||||
total_messages = len(connection.fetch_messages(reports_folder))
|
||||
total_messages = 0
|
||||
|
||||
if not test and not batch_size and total_messages > 0:
|
||||
if total_messages > 0:
|
||||
# Process emails that came in during the last run
|
||||
results = get_dmarc_reports_from_mailbox(
|
||||
connection=connection,
|
||||
@@ -2186,6 +2193,7 @@ def watch_inbox(
|
||||
dns_timeout: float = 6.0,
|
||||
strip_attachment_payloads: bool = False,
|
||||
batch_size: int = 10,
|
||||
since: Optional[Union[datetime, date, str]] = None,
|
||||
normalize_timespan_threshold_hours: float = 24,
|
||||
):
|
||||
"""
|
||||
@@ -2212,6 +2220,7 @@ def watch_inbox(
|
||||
strip_attachment_payloads (bool): Replace attachment payloads in
|
||||
forensic report samples with None
|
||||
batch_size (int): Number of messages to read and process before saving
|
||||
since: Search for messages since certain time
|
||||
normalize_timespan_threshold_hours (float): Normalize timespans beyond this
|
||||
"""
|
||||
|
||||
@@ -2231,6 +2240,7 @@ def watch_inbox(
|
||||
dns_timeout=dns_timeout,
|
||||
strip_attachment_payloads=strip_attachment_payloads,
|
||||
batch_size=batch_size,
|
||||
since=since,
|
||||
create_folders=False,
|
||||
normalize_timespan_threshold_hours=normalize_timespan_threshold_hours,
|
||||
)
|
||||
|
||||
@@ -27,7 +27,6 @@ from parsedmarc import (
|
||||
gelf,
|
||||
get_dmarc_reports_from_mailbox,
|
||||
get_dmarc_reports_from_mbox,
|
||||
google_secops,
|
||||
kafkaclient,
|
||||
loganalytics,
|
||||
opensearch,
|
||||
@@ -195,6 +194,13 @@ def _main():
|
||||
return None
|
||||
|
||||
def process_reports(reports_):
|
||||
output_errors = []
|
||||
|
||||
def log_output_error(destination, error):
|
||||
message = f"{destination} Error: {error}"
|
||||
logger.error(message)
|
||||
output_errors.append(message)
|
||||
|
||||
indent_value = 2 if opts.prettify_json else None
|
||||
output_str = "{0}\n".format(
|
||||
json.dumps(reports_, ensure_ascii=False, indent=indent_value)
|
||||
@@ -231,11 +237,9 @@ def _main():
|
||||
except elastic.AlreadySaved as warning:
|
||||
logger.warning(warning.__str__())
|
||||
except elastic.ElasticsearchError as error_:
|
||||
logger.error("Elasticsearch Error: {0}".format(error_.__str__()))
|
||||
log_output_error("Elasticsearch", error_.__str__())
|
||||
except Exception as error_:
|
||||
logger.error(
|
||||
"Elasticsearch exception error: {}".format(error_.__str__())
|
||||
)
|
||||
log_output_error("Elasticsearch exception", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.opensearch_hosts:
|
||||
@@ -253,11 +257,9 @@ def _main():
|
||||
except opensearch.AlreadySaved as warning:
|
||||
logger.warning(warning.__str__())
|
||||
except opensearch.OpenSearchError as error_:
|
||||
logger.error("OpenSearch Error: {0}".format(error_.__str__()))
|
||||
log_output_error("OpenSearch", error_.__str__())
|
||||
except Exception as error_:
|
||||
logger.error(
|
||||
"OpenSearch exception error: {}".format(error_.__str__())
|
||||
)
|
||||
log_output_error("OpenSearch exception", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.kafka_hosts:
|
||||
@@ -265,33 +267,25 @@ def _main():
|
||||
report, kafka_aggregate_topic
|
||||
)
|
||||
except Exception as error_:
|
||||
logger.error("Kafka Error: {0}".format(error_.__str__()))
|
||||
log_output_error("Kafka", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.s3_bucket:
|
||||
s3_client.save_aggregate_report_to_s3(report)
|
||||
except Exception as error_:
|
||||
logger.error("S3 Error: {0}".format(error_.__str__()))
|
||||
log_output_error("S3", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.syslog_server:
|
||||
syslog_client.save_aggregate_report_to_syslog(report)
|
||||
except Exception as error_:
|
||||
logger.error("Syslog Error: {0}".format(error_.__str__()))
|
||||
log_output_error("Syslog", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.gelf_host:
|
||||
gelf_client.save_aggregate_report_to_gelf(report)
|
||||
except Exception as error_:
|
||||
logger.error("GELF Error: {0}".format(error_.__str__()))
|
||||
|
||||
try:
|
||||
if opts.google_secops:
|
||||
events = google_secops_client.save_aggregate_report_to_google_secops(report)
|
||||
for event in events:
|
||||
print(event)
|
||||
except Exception as error_:
|
||||
logger.error("Google SecOps Error: {0}".format(error_.__str__()))
|
||||
log_output_error("GELF", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.webhook_aggregate_url:
|
||||
@@ -300,7 +294,7 @@ def _main():
|
||||
json.dumps(report, ensure_ascii=False, indent=indent_value)
|
||||
)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
log_output_error("Webhook", error_.__str__())
|
||||
|
||||
if opts.hec:
|
||||
try:
|
||||
@@ -308,7 +302,7 @@ def _main():
|
||||
if len(aggregate_reports_) > 0:
|
||||
hec_client.save_aggregate_reports_to_splunk(aggregate_reports_)
|
||||
except splunk.SplunkError as e:
|
||||
logger.error("Splunk HEC error: {0}".format(e.__str__()))
|
||||
log_output_error("Splunk HEC", e.__str__())
|
||||
|
||||
if opts.save_forensic:
|
||||
for report in reports_["forensic_reports"]:
|
||||
@@ -328,9 +322,9 @@ def _main():
|
||||
except elastic.AlreadySaved as warning:
|
||||
logger.warning(warning.__str__())
|
||||
except elastic.ElasticsearchError as error_:
|
||||
logger.error("Elasticsearch Error: {0}".format(error_.__str__()))
|
||||
log_output_error("Elasticsearch", error_.__str__())
|
||||
except InvalidDMARCReport as error_:
|
||||
logger.error(error_.__str__())
|
||||
log_output_error("Invalid DMARC report", error_.__str__())
|
||||
|
||||
try:
|
||||
shards = opts.opensearch_number_of_shards
|
||||
@@ -348,9 +342,9 @@ def _main():
|
||||
except opensearch.AlreadySaved as warning:
|
||||
logger.warning(warning.__str__())
|
||||
except opensearch.OpenSearchError as error_:
|
||||
logger.error("OpenSearch Error: {0}".format(error_.__str__()))
|
||||
log_output_error("OpenSearch", error_.__str__())
|
||||
except InvalidDMARCReport as error_:
|
||||
logger.error(error_.__str__())
|
||||
log_output_error("Invalid DMARC report", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.kafka_hosts:
|
||||
@@ -358,33 +352,25 @@ def _main():
|
||||
report, kafka_forensic_topic
|
||||
)
|
||||
except Exception as error_:
|
||||
logger.error("Kafka Error: {0}".format(error_.__str__()))
|
||||
log_output_error("Kafka", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.s3_bucket:
|
||||
s3_client.save_forensic_report_to_s3(report)
|
||||
except Exception as error_:
|
||||
logger.error("S3 Error: {0}".format(error_.__str__()))
|
||||
log_output_error("S3", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.syslog_server:
|
||||
syslog_client.save_forensic_report_to_syslog(report)
|
||||
except Exception as error_:
|
||||
logger.error("Syslog Error: {0}".format(error_.__str__()))
|
||||
log_output_error("Syslog", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.gelf_host:
|
||||
gelf_client.save_forensic_report_to_gelf(report)
|
||||
except Exception as error_:
|
||||
logger.error("GELF Error: {0}".format(error_.__str__()))
|
||||
|
||||
try:
|
||||
if opts.google_secops:
|
||||
events = google_secops_client.save_forensic_report_to_google_secops(report)
|
||||
for event in events:
|
||||
print(event)
|
||||
except Exception as error_:
|
||||
logger.error("Google SecOps Error: {0}".format(error_.__str__()))
|
||||
log_output_error("GELF", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.webhook_forensic_url:
|
||||
@@ -393,7 +379,7 @@ def _main():
|
||||
json.dumps(report, ensure_ascii=False, indent=indent_value)
|
||||
)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
log_output_error("Webhook", error_.__str__())
|
||||
|
||||
if opts.hec:
|
||||
try:
|
||||
@@ -401,7 +387,7 @@ def _main():
|
||||
if len(forensic_reports_) > 0:
|
||||
hec_client.save_forensic_reports_to_splunk(forensic_reports_)
|
||||
except splunk.SplunkError as e:
|
||||
logger.error("Splunk HEC error: {0}".format(e.__str__()))
|
||||
log_output_error("Splunk HEC", e.__str__())
|
||||
|
||||
if opts.save_smtp_tls:
|
||||
for report in reports_["smtp_tls_reports"]:
|
||||
@@ -421,9 +407,9 @@ def _main():
|
||||
except elastic.AlreadySaved as warning:
|
||||
logger.warning(warning.__str__())
|
||||
except elastic.ElasticsearchError as error_:
|
||||
logger.error("Elasticsearch Error: {0}".format(error_.__str__()))
|
||||
log_output_error("Elasticsearch", error_.__str__())
|
||||
except InvalidDMARCReport as error_:
|
||||
logger.error(error_.__str__())
|
||||
log_output_error("Invalid DMARC report", error_.__str__())
|
||||
|
||||
try:
|
||||
shards = opts.opensearch_number_of_shards
|
||||
@@ -441,9 +427,9 @@ def _main():
|
||||
except opensearch.AlreadySaved as warning:
|
||||
logger.warning(warning.__str__())
|
||||
except opensearch.OpenSearchError as error_:
|
||||
logger.error("OpenSearch Error: {0}".format(error_.__str__()))
|
||||
log_output_error("OpenSearch", error_.__str__())
|
||||
except InvalidDMARCReport as error_:
|
||||
logger.error(error_.__str__())
|
||||
log_output_error("Invalid DMARC report", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.kafka_hosts:
|
||||
@@ -451,33 +437,25 @@ def _main():
|
||||
smtp_tls_reports, kafka_smtp_tls_topic
|
||||
)
|
||||
except Exception as error_:
|
||||
logger.error("Kafka Error: {0}".format(error_.__str__()))
|
||||
log_output_error("Kafka", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.s3_bucket:
|
||||
s3_client.save_smtp_tls_report_to_s3(report)
|
||||
except Exception as error_:
|
||||
logger.error("S3 Error: {0}".format(error_.__str__()))
|
||||
log_output_error("S3", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.syslog_server:
|
||||
syslog_client.save_smtp_tls_report_to_syslog(report)
|
||||
except Exception as error_:
|
||||
logger.error("Syslog Error: {0}".format(error_.__str__()))
|
||||
log_output_error("Syslog", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.gelf_host:
|
||||
gelf_client.save_smtp_tls_report_to_gelf(report)
|
||||
except Exception as error_:
|
||||
logger.error("GELF Error: {0}".format(error_.__str__()))
|
||||
|
||||
try:
|
||||
if opts.google_secops:
|
||||
events = google_secops_client.save_smtp_tls_report_to_google_secops(report)
|
||||
for event in events:
|
||||
print(event)
|
||||
except Exception as error_:
|
||||
logger.error("Google SecOps Error: {0}".format(error_.__str__()))
|
||||
log_output_error("GELF", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.webhook_smtp_tls_url:
|
||||
@@ -486,7 +464,7 @@ def _main():
|
||||
json.dumps(report, ensure_ascii=False, indent=indent_value)
|
||||
)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
log_output_error("Webhook", error_.__str__())
|
||||
|
||||
if opts.hec:
|
||||
try:
|
||||
@@ -494,7 +472,7 @@ def _main():
|
||||
if len(smtp_tls_reports_) > 0:
|
||||
hec_client.save_smtp_tls_reports_to_splunk(smtp_tls_reports_)
|
||||
except splunk.SplunkError as e:
|
||||
logger.error("Splunk HEC error: {0}".format(e.__str__()))
|
||||
log_output_error("Splunk HEC", e.__str__())
|
||||
|
||||
if opts.la_dce:
|
||||
try:
|
||||
@@ -515,14 +493,16 @@ def _main():
|
||||
opts.save_smtp_tls,
|
||||
)
|
||||
except loganalytics.LogAnalyticsException as e:
|
||||
logger.error("Log Analytics error: {0}".format(e.__str__()))
|
||||
log_output_error("Log Analytics", e.__str__())
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Unknown error occurred"
|
||||
+ " during the publishing"
|
||||
+ " to Log Analytics: "
|
||||
+ e.__str__()
|
||||
log_output_error("Log Analytics", f"Unknown publishing error: {e}")
|
||||
|
||||
if opts.fail_on_output_error and output_errors:
|
||||
raise ParserError(
|
||||
"Output destination failures detected: {0}".format(
|
||||
" | ".join(output_errors)
|
||||
)
|
||||
)
|
||||
|
||||
arg_parser = ArgumentParser(description="Parses DMARC reports")
|
||||
arg_parser.add_argument(
|
||||
@@ -664,6 +644,8 @@ def _main():
|
||||
graph_password=None,
|
||||
graph_client_id=None,
|
||||
graph_client_secret=None,
|
||||
graph_certificate_path=None,
|
||||
graph_certificate_password=None,
|
||||
graph_tenant_id=None,
|
||||
graph_mailbox=None,
|
||||
graph_allow_unencrypted_storage=False,
|
||||
@@ -696,6 +678,9 @@ def _main():
|
||||
opensearch_username=None,
|
||||
opensearch_password=None,
|
||||
opensearch_api_key=None,
|
||||
opensearch_auth_type="basic",
|
||||
opensearch_aws_region=None,
|
||||
opensearch_aws_service="es",
|
||||
kafka_hosts=None,
|
||||
kafka_username=None,
|
||||
kafka_password=None,
|
||||
@@ -722,12 +707,21 @@ def _main():
|
||||
s3_secret_access_key=None,
|
||||
syslog_server=None,
|
||||
syslog_port=None,
|
||||
syslog_protocol=None,
|
||||
syslog_cafile_path=None,
|
||||
syslog_certfile_path=None,
|
||||
syslog_keyfile_path=None,
|
||||
syslog_timeout=None,
|
||||
syslog_retry_attempts=None,
|
||||
syslog_retry_delay=None,
|
||||
gmail_api_credentials_file=None,
|
||||
gmail_api_token_file=None,
|
||||
gmail_api_include_spam_trash=False,
|
||||
gmail_api_paginate_messages=True,
|
||||
gmail_api_scopes=[],
|
||||
gmail_api_oauth2_port=8080,
|
||||
gmail_api_auth_mode="installed_app",
|
||||
gmail_api_service_account_user=None,
|
||||
maildir_path=None,
|
||||
maildir_create=False,
|
||||
log_file=args.log_file,
|
||||
@@ -747,22 +741,12 @@ def _main():
|
||||
gelf_host=None,
|
||||
gelf_port=None,
|
||||
gelf_mode=None,
|
||||
google_secops=False,
|
||||
google_secops_include_ruf_payload=False,
|
||||
google_secops_ruf_payload_max_bytes=4096,
|
||||
google_secops_static_observer_name=None,
|
||||
google_secops_static_observer_vendor="parsedmarc",
|
||||
google_secops_static_environment=None,
|
||||
google_secops_api_credentials_file=None,
|
||||
google_secops_api_customer_id=None,
|
||||
google_secops_api_region="us",
|
||||
google_secops_api_log_type="DMARC",
|
||||
google_secops_use_stdout=False,
|
||||
webhook_aggregate_url=None,
|
||||
webhook_forensic_url=None,
|
||||
webhook_smtp_tls_url=None,
|
||||
webhook_timeout=60,
|
||||
normalize_timespan_threshold_hours=24.0,
|
||||
fail_on_output_error=False,
|
||||
)
|
||||
args = arg_parser.parse_args()
|
||||
|
||||
@@ -845,6 +829,10 @@ def _main():
|
||||
opts.silent = bool(general_config.getboolean("silent"))
|
||||
if "warnings" in general_config:
|
||||
opts.warnings = bool(general_config.getboolean("warnings"))
|
||||
if "fail_on_output_error" in general_config:
|
||||
opts.fail_on_output_error = bool(
|
||||
general_config.getboolean("fail_on_output_error")
|
||||
)
|
||||
if "log_file" in general_config:
|
||||
opts.log_file = general_config["log_file"]
|
||||
if "n_procs" in general_config:
|
||||
@@ -996,6 +984,7 @@ def _main():
|
||||
logger.critical(
|
||||
"password setting missing from the msgraph config section"
|
||||
)
|
||||
exit(-1)
|
||||
if "client_secret" in graph_config:
|
||||
opts.graph_client_secret = graph_config["client_secret"]
|
||||
else:
|
||||
@@ -1026,6 +1015,19 @@ def _main():
|
||||
)
|
||||
exit(-1)
|
||||
|
||||
if opts.graph_auth_method == AuthMethod.Certificate.name:
|
||||
if "certificate_path" in graph_config:
|
||||
opts.graph_certificate_path = graph_config["certificate_path"]
|
||||
else:
|
||||
logger.critical(
|
||||
"certificate_path setting missing from the msgraph config section"
|
||||
)
|
||||
exit(-1)
|
||||
if "certificate_password" in graph_config:
|
||||
opts.graph_certificate_password = graph_config[
|
||||
"certificate_password"
|
||||
]
|
||||
|
||||
if "client_id" in graph_config:
|
||||
opts.graph_client_id = graph_config["client_id"]
|
||||
else:
|
||||
@@ -1087,10 +1089,10 @@ def _main():
|
||||
opts.elasticsearch_password = elasticsearch_config["password"]
|
||||
# Until 8.20
|
||||
if "apiKey" in elasticsearch_config:
|
||||
opts.elasticsearch_apiKey = elasticsearch_config["apiKey"]
|
||||
opts.elasticsearch_api_key = elasticsearch_config["apiKey"]
|
||||
# Since 8.20
|
||||
if "api_key" in elasticsearch_config:
|
||||
opts.elasticsearch_apiKey = elasticsearch_config["api_key"]
|
||||
opts.elasticsearch_api_key = elasticsearch_config["api_key"]
|
||||
|
||||
if "opensearch" in config:
|
||||
opensearch_config = config["opensearch"]
|
||||
@@ -1127,10 +1129,22 @@ def _main():
|
||||
opts.opensearch_password = opensearch_config["password"]
|
||||
# Until 8.20
|
||||
if "apiKey" in opensearch_config:
|
||||
opts.opensearch_apiKey = opensearch_config["apiKey"]
|
||||
opts.opensearch_api_key = opensearch_config["apiKey"]
|
||||
# Since 8.20
|
||||
if "api_key" in opensearch_config:
|
||||
opts.opensearch_apiKey = opensearch_config["api_key"]
|
||||
opts.opensearch_api_key = opensearch_config["api_key"]
|
||||
if "auth_type" in opensearch_config:
|
||||
opts.opensearch_auth_type = (
|
||||
opensearch_config["auth_type"].strip().lower()
|
||||
)
|
||||
elif "authentication_type" in opensearch_config:
|
||||
opts.opensearch_auth_type = (
|
||||
opensearch_config["authentication_type"].strip().lower()
|
||||
)
|
||||
if "aws_region" in opensearch_config:
|
||||
opts.opensearch_aws_region = opensearch_config["aws_region"].strip()
|
||||
if "aws_service" in opensearch_config:
|
||||
opts.opensearch_aws_service = opensearch_config["aws_service"].strip()
|
||||
|
||||
if "splunk_hec" in config.sections():
|
||||
hec_config = config["splunk_hec"]
|
||||
@@ -1275,6 +1289,28 @@ def _main():
|
||||
opts.syslog_port = syslog_config["port"]
|
||||
else:
|
||||
opts.syslog_port = 514
|
||||
if "protocol" in syslog_config:
|
||||
opts.syslog_protocol = syslog_config["protocol"]
|
||||
else:
|
||||
opts.syslog_protocol = "udp"
|
||||
if "cafile_path" in syslog_config:
|
||||
opts.syslog_cafile_path = syslog_config["cafile_path"]
|
||||
if "certfile_path" in syslog_config:
|
||||
opts.syslog_certfile_path = syslog_config["certfile_path"]
|
||||
if "keyfile_path" in syslog_config:
|
||||
opts.syslog_keyfile_path = syslog_config["keyfile_path"]
|
||||
if "timeout" in syslog_config:
|
||||
opts.syslog_timeout = float(syslog_config["timeout"])
|
||||
else:
|
||||
opts.syslog_timeout = 5.0
|
||||
if "retry_attempts" in syslog_config:
|
||||
opts.syslog_retry_attempts = int(syslog_config["retry_attempts"])
|
||||
else:
|
||||
opts.syslog_retry_attempts = 3
|
||||
if "retry_delay" in syslog_config:
|
||||
opts.syslog_retry_delay = int(syslog_config["retry_delay"])
|
||||
else:
|
||||
opts.syslog_retry_delay = 5
|
||||
|
||||
if "gmail_api" in config.sections():
|
||||
gmail_api_config = config["gmail_api"]
|
||||
@@ -1294,6 +1330,16 @@ def _main():
|
||||
opts.gmail_api_oauth2_port = gmail_api_config.getint(
|
||||
"oauth2_port", 8080
|
||||
)
|
||||
if "auth_mode" in gmail_api_config:
|
||||
opts.gmail_api_auth_mode = gmail_api_config.get("auth_mode").strip()
|
||||
if "service_account_user" in gmail_api_config:
|
||||
opts.gmail_api_service_account_user = gmail_api_config.get(
|
||||
"service_account_user"
|
||||
).strip()
|
||||
elif "delegated_user" in gmail_api_config:
|
||||
opts.gmail_api_service_account_user = gmail_api_config.get(
|
||||
"delegated_user"
|
||||
).strip()
|
||||
|
||||
if "maildir" in config.sections():
|
||||
maildir_api_config = config["maildir"]
|
||||
@@ -1337,50 +1383,6 @@ def _main():
|
||||
logger.critical("mode setting missing from the gelf config section")
|
||||
exit(-1)
|
||||
|
||||
if "google_secops" in config.sections():
|
||||
google_secops_config = config["google_secops"]
|
||||
opts.google_secops = True
|
||||
if "include_ruf_payload" in google_secops_config:
|
||||
opts.google_secops_include_ruf_payload = bool(
|
||||
google_secops_config.getboolean("include_ruf_payload")
|
||||
)
|
||||
if "ruf_payload_max_bytes" in google_secops_config:
|
||||
opts.google_secops_ruf_payload_max_bytes = google_secops_config.getint(
|
||||
"ruf_payload_max_bytes"
|
||||
)
|
||||
if "static_observer_name" in google_secops_config:
|
||||
opts.google_secops_static_observer_name = google_secops_config[
|
||||
"static_observer_name"
|
||||
]
|
||||
if "static_observer_vendor" in google_secops_config:
|
||||
opts.google_secops_static_observer_vendor = google_secops_config[
|
||||
"static_observer_vendor"
|
||||
]
|
||||
if "static_environment" in google_secops_config:
|
||||
opts.google_secops_static_environment = google_secops_config[
|
||||
"static_environment"
|
||||
]
|
||||
if "api_credentials_file" in google_secops_config:
|
||||
opts.google_secops_api_credentials_file = google_secops_config[
|
||||
"api_credentials_file"
|
||||
]
|
||||
if "api_customer_id" in google_secops_config:
|
||||
opts.google_secops_api_customer_id = google_secops_config[
|
||||
"api_customer_id"
|
||||
]
|
||||
if "api_region" in google_secops_config:
|
||||
opts.google_secops_api_region = google_secops_config[
|
||||
"api_region"
|
||||
]
|
||||
if "api_log_type" in google_secops_config:
|
||||
opts.google_secops_api_log_type = google_secops_config[
|
||||
"api_log_type"
|
||||
]
|
||||
if "use_stdout" in google_secops_config:
|
||||
opts.google_secops_use_stdout = google_secops_config.getboolean(
|
||||
"use_stdout"
|
||||
)
|
||||
|
||||
if "webhook" in config.sections():
|
||||
webhook_config = config["webhook"]
|
||||
if "aggregate_url" in webhook_config:
|
||||
@@ -1489,6 +1491,9 @@ def _main():
|
||||
password=opts.opensearch_password,
|
||||
api_key=opts.opensearch_api_key,
|
||||
timeout=opensearch_timeout_value,
|
||||
auth_type=opts.opensearch_auth_type,
|
||||
aws_region=opts.opensearch_aws_region,
|
||||
aws_service=opts.opensearch_aws_service,
|
||||
)
|
||||
opensearch.migrate_indexes(
|
||||
aggregate_indexes=[os_aggregate_index],
|
||||
@@ -1516,6 +1521,17 @@ def _main():
|
||||
syslog_client = syslog.SyslogClient(
|
||||
server_name=opts.syslog_server,
|
||||
server_port=int(opts.syslog_port),
|
||||
protocol=opts.syslog_protocol or "udp",
|
||||
cafile_path=opts.syslog_cafile_path,
|
||||
certfile_path=opts.syslog_certfile_path,
|
||||
keyfile_path=opts.syslog_keyfile_path,
|
||||
timeout=opts.syslog_timeout if opts.syslog_timeout is not None else 5.0,
|
||||
retry_attempts=opts.syslog_retry_attempts
|
||||
if opts.syslog_retry_attempts is not None
|
||||
else 3,
|
||||
retry_delay=opts.syslog_retry_delay
|
||||
if opts.syslog_retry_delay is not None
|
||||
else 5,
|
||||
)
|
||||
except Exception as error_:
|
||||
logger.error("Syslog Error: {0}".format(error_.__str__()))
|
||||
@@ -1559,23 +1575,6 @@ def _main():
|
||||
except Exception as error_:
|
||||
logger.error("GELF Error: {0}".format(error_.__str__()))
|
||||
|
||||
if opts.google_secops:
|
||||
try:
|
||||
google_secops_client = google_secops.GoogleSecOpsClient(
|
||||
include_ruf_payload=opts.google_secops_include_ruf_payload,
|
||||
ruf_payload_max_bytes=opts.google_secops_ruf_payload_max_bytes,
|
||||
static_observer_name=opts.google_secops_static_observer_name,
|
||||
static_observer_vendor=opts.google_secops_static_observer_vendor,
|
||||
static_environment=opts.google_secops_static_environment,
|
||||
api_credentials_file=opts.google_secops_api_credentials_file,
|
||||
api_customer_id=opts.google_secops_api_customer_id,
|
||||
api_region=opts.google_secops_api_region,
|
||||
api_log_type=opts.google_secops_api_log_type,
|
||||
use_stdout=opts.google_secops_use_stdout,
|
||||
)
|
||||
except Exception as error_:
|
||||
logger.error("Google SecOps Error: {0}".format(error_.__str__()))
|
||||
|
||||
if (
|
||||
opts.webhook_aggregate_url
|
||||
or opts.webhook_forensic_url
|
||||
@@ -1767,6 +1766,8 @@ def _main():
|
||||
tenant_id=opts.graph_tenant_id,
|
||||
client_id=opts.graph_client_id,
|
||||
client_secret=opts.graph_client_secret,
|
||||
certificate_path=opts.graph_certificate_path,
|
||||
certificate_password=opts.graph_certificate_password,
|
||||
username=opts.graph_user,
|
||||
password=opts.graph_password,
|
||||
token_file=opts.graph_token_file,
|
||||
@@ -1798,6 +1799,8 @@ def _main():
|
||||
paginate_messages=opts.gmail_api_paginate_messages,
|
||||
reports_folder=opts.mailbox_reports_folder,
|
||||
oauth2_port=opts.gmail_api_oauth2_port,
|
||||
auth_mode=opts.gmail_api_auth_mode,
|
||||
service_account_user=opts.gmail_api_service_account_user,
|
||||
)
|
||||
|
||||
except Exception:
|
||||
@@ -1861,7 +1864,11 @@ def _main():
|
||||
"smtp_tls_reports": smtp_tls_reports,
|
||||
}
|
||||
|
||||
process_reports(parsing_results)
|
||||
try:
|
||||
process_reports(parsing_results)
|
||||
except ParserError as error:
|
||||
logger.error(error.__str__())
|
||||
exit(1)
|
||||
|
||||
if opts.smtp_host:
|
||||
try:
|
||||
@@ -1906,6 +1913,7 @@ def _main():
|
||||
dns_timeout=opts.dns_timeout,
|
||||
strip_attachment_payloads=opts.strip_attachment_payloads,
|
||||
batch_size=mailbox_batch_size_value,
|
||||
since=opts.mailbox_since,
|
||||
ip_db_path=opts.ip_db_path,
|
||||
always_use_local_files=opts.always_use_local_files,
|
||||
reverse_dns_map_path=opts.reverse_dns_map_path,
|
||||
@@ -1916,6 +1924,9 @@ def _main():
|
||||
except FileExistsError as error:
|
||||
logger.error("{0}".format(error.__str__()))
|
||||
exit(1)
|
||||
except ParserError as error:
|
||||
logger.error(error.__str__())
|
||||
exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
__version__ = "9.0.9"
|
||||
__version__ = "9.2.1"
|
||||
|
||||
USER_AGENT = f"parsedmarc/{__version__}"
|
||||
|
||||
@@ -413,8 +413,8 @@ def save_aggregate_report_to_elasticsearch(
|
||||
org_name_query = Q(dict(match_phrase=dict(org_name=org_name))) # type: ignore
|
||||
report_id_query = Q(dict(match_phrase=dict(report_id=report_id))) # pyright: ignore[reportArgumentType]
|
||||
domain_query = Q(dict(match_phrase={"published_policy.domain": domain})) # pyright: ignore[reportArgumentType]
|
||||
begin_date_query = Q(dict(match=dict(date_begin=begin_date))) # pyright: ignore[reportArgumentType]
|
||||
end_date_query = Q(dict(match=dict(date_end=end_date))) # pyright: ignore[reportArgumentType]
|
||||
begin_date_query = Q(dict(range=dict(date_begin=dict(gte=begin_date)))) # pyright: ignore[reportArgumentType]
|
||||
end_date_query = Q(dict(range=dict(date_end=dict(lte=end_date)))) # pyright: ignore[reportArgumentType]
|
||||
|
||||
if index_suffix is not None:
|
||||
search_index = "dmarc_aggregate_{0}*".format(index_suffix)
|
||||
|
||||
@@ -1,709 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Google SecOps (Chronicle) output module for parsedmarc"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Optional
|
||||
|
||||
import requests
|
||||
from google.auth.transport.requests import Request
|
||||
from google.oauth2 import service_account
|
||||
|
||||
from parsedmarc.constants import USER_AGENT
|
||||
from parsedmarc.log import logger
|
||||
from parsedmarc.utils import human_timestamp_to_datetime
|
||||
|
||||
|
||||
class GoogleSecOpsError(RuntimeError):
|
||||
"""Raised when a Google SecOps API error occurs"""
|
||||
|
||||
|
||||
class GoogleSecOpsClient:
|
||||
"""A client for Google SecOps (Chronicle) UDM output"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
include_ruf_payload: bool = False,
|
||||
ruf_payload_max_bytes: int = 4096,
|
||||
static_observer_name: Optional[str] = None,
|
||||
static_observer_vendor: str = "parsedmarc",
|
||||
static_environment: Optional[str] = None,
|
||||
api_credentials_file: Optional[str] = None,
|
||||
api_customer_id: Optional[str] = None,
|
||||
api_region: str = "us",
|
||||
api_log_type: str = "DMARC",
|
||||
use_stdout: bool = False,
|
||||
):
|
||||
"""
|
||||
Initializes the GoogleSecOpsClient
|
||||
|
||||
Args:
|
||||
include_ruf_payload: Include RUF message payload in output
|
||||
ruf_payload_max_bytes: Maximum bytes of RUF payload to include
|
||||
static_observer_name: Static observer name for telemetry
|
||||
static_observer_vendor: Static observer vendor (default: parsedmarc)
|
||||
static_environment: Static environment (prod/dev/custom string)
|
||||
api_credentials_file: Path to Google service account JSON credentials
|
||||
api_customer_id: Chronicle customer ID (required for API)
|
||||
api_region: Chronicle region (us, europe, asia-southeast1, etc.)
|
||||
api_log_type: Log type for Chronicle ingestion (default: DMARC)
|
||||
use_stdout: Output to stdout instead of API (default: False)
|
||||
"""
|
||||
self.include_ruf_payload = include_ruf_payload
|
||||
self.ruf_payload_max_bytes = ruf_payload_max_bytes
|
||||
self.static_observer_name = static_observer_name
|
||||
self.static_observer_vendor = static_observer_vendor
|
||||
self.static_environment = static_environment
|
||||
self.use_stdout = use_stdout
|
||||
|
||||
# API configuration
|
||||
self.api_credentials_file = api_credentials_file
|
||||
self.api_customer_id = api_customer_id
|
||||
self.api_region = api_region
|
||||
self.api_log_type = api_log_type
|
||||
self.credentials = None
|
||||
self.session = None
|
||||
|
||||
# Initialize API client if not using stdout
|
||||
if not self.use_stdout:
|
||||
if not self.api_credentials_file or not self.api_customer_id:
|
||||
raise GoogleSecOpsError(
|
||||
"api_credentials_file and api_customer_id are required when not using stdout. "
|
||||
"Set use_stdout=True to output to stdout instead."
|
||||
)
|
||||
self._initialize_api_client()
|
||||
|
||||
def _initialize_api_client(self):
|
||||
"""Initialize the Chronicle API client with authentication"""
|
||||
try:
|
||||
logger.debug("Initializing Chronicle API client")
|
||||
|
||||
# Load service account credentials
|
||||
self.credentials = service_account.Credentials.from_service_account_file(
|
||||
self.api_credentials_file,
|
||||
scopes=["https://www.googleapis.com/auth/cloud-platform"],
|
||||
)
|
||||
|
||||
# Create session with authentication
|
||||
self.session = requests.Session()
|
||||
self.session.headers.update({"User-Agent": USER_AGENT})
|
||||
|
||||
logger.info("Chronicle API client initialized successfully")
|
||||
except Exception as e:
|
||||
raise GoogleSecOpsError(f"Failed to initialize Chronicle API client: {e}")
|
||||
|
||||
def _get_api_endpoint(self) -> str:
|
||||
"""Get the Chronicle Ingestion API endpoint based on region"""
|
||||
return f"https://{self.api_region}-chronicle.googleapis.com/v1alpha/projects/{self.api_customer_id}/locations/{self.api_region}/instances/default/logTypes/{self.api_log_type}/logs:import"
|
||||
|
||||
def _send_events_to_api(self, events: list[str]) -> None:
|
||||
"""
|
||||
Send UDM events to Chronicle Ingestion API
|
||||
|
||||
Args:
|
||||
events: List of NDJSON event strings
|
||||
"""
|
||||
if not events:
|
||||
return
|
||||
|
||||
try:
|
||||
# Refresh credentials if needed
|
||||
if not self.credentials.valid:
|
||||
self.credentials.refresh(Request())
|
||||
|
||||
# Prepare request
|
||||
endpoint = self._get_api_endpoint()
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.credentials.token}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
# Chronicle expects events in inline_source format
|
||||
# Each event should be a separate log entry
|
||||
payload = {
|
||||
"inline_source": {
|
||||
"logs": [json.loads(event) for event in events]
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(f"Sending {len(events)} events to Chronicle API")
|
||||
|
||||
response = self.session.post(
|
||||
endpoint,
|
||||
headers=headers,
|
||||
json=payload,
|
||||
timeout=60,
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
logger.info(f"Successfully sent {len(events)} events to Chronicle")
|
||||
else:
|
||||
error_msg = f"Chronicle API error: {response.status_code} - {response.text}"
|
||||
logger.error(error_msg)
|
||||
raise GoogleSecOpsError(error_msg)
|
||||
|
||||
except GoogleSecOpsError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise GoogleSecOpsError(f"Failed to send events to Chronicle API: {e}")
|
||||
|
||||
def _output_events(self, events: list[str]) -> None:
|
||||
"""
|
||||
Output events either to stdout or Chronicle API
|
||||
|
||||
Args:
|
||||
events: List of NDJSON event strings
|
||||
"""
|
||||
if self.use_stdout:
|
||||
# Output to stdout for collection by external log shippers
|
||||
for event in events:
|
||||
print(event)
|
||||
else:
|
||||
# Send directly to Chronicle API
|
||||
self._send_events_to_api(events)
|
||||
|
||||
def _get_severity(self, disposition: str, spf_aligned: bool, dkim_aligned: bool) -> str:
|
||||
"""
|
||||
Derive severity from DMARC disposition and alignment
|
||||
|
||||
Args:
|
||||
disposition: DMARC policy disposition
|
||||
spf_aligned: Whether SPF is aligned
|
||||
dkim_aligned: Whether DKIM is aligned
|
||||
|
||||
Returns:
|
||||
Severity level: HIGH, MEDIUM, or LOW
|
||||
"""
|
||||
if disposition == "reject":
|
||||
return "HIGH"
|
||||
elif disposition == "quarantine" and not (spf_aligned or dkim_aligned):
|
||||
return "MEDIUM"
|
||||
else:
|
||||
return "LOW"
|
||||
|
||||
def _get_description(
|
||||
self,
|
||||
dmarc_pass: bool,
|
||||
spf_result: Optional[str],
|
||||
dkim_result: Optional[str],
|
||||
spf_aligned: bool,
|
||||
dkim_aligned: bool,
|
||||
disposition: str,
|
||||
) -> str:
|
||||
"""
|
||||
Generate description for the event
|
||||
|
||||
Args:
|
||||
dmarc_pass: Whether DMARC passed
|
||||
spf_result: SPF result
|
||||
dkim_result: DKIM result
|
||||
spf_aligned: Whether SPF is aligned
|
||||
dkim_aligned: Whether DKIM is aligned
|
||||
disposition: DMARC disposition
|
||||
|
||||
Returns:
|
||||
Human-readable description
|
||||
"""
|
||||
parts = []
|
||||
|
||||
if dmarc_pass:
|
||||
parts.append("DMARC pass")
|
||||
else:
|
||||
parts.append("DMARC fail")
|
||||
|
||||
if spf_result:
|
||||
parts.append(f"SPF={spf_result}")
|
||||
if dkim_result:
|
||||
parts.append(f"DKIM={dkim_result}")
|
||||
|
||||
if spf_aligned:
|
||||
parts.append("SPF aligned")
|
||||
elif spf_result:
|
||||
parts.append("SPF not aligned")
|
||||
|
||||
if dkim_aligned:
|
||||
parts.append("DKIM aligned")
|
||||
elif dkim_result:
|
||||
parts.append("DKIM not aligned")
|
||||
|
||||
parts.append(f"disposition={disposition}")
|
||||
|
||||
return "; ".join(parts)
|
||||
|
||||
def _format_timestamp(self, timestamp_str: str) -> str:
|
||||
"""
|
||||
Convert parsedmarc timestamp to RFC 3339 format
|
||||
|
||||
Args:
|
||||
timestamp_str: Timestamp string from parsedmarc
|
||||
|
||||
Returns:
|
||||
RFC 3339 formatted timestamp
|
||||
"""
|
||||
try:
|
||||
dt = human_timestamp_to_datetime(timestamp_str)
|
||||
# Ensure timezone-aware datetime
|
||||
if dt.tzinfo is None:
|
||||
dt = dt.replace(tzinfo=timezone.utc)
|
||||
return dt.isoformat()
|
||||
except Exception:
|
||||
# Fallback to current time if parsing fails
|
||||
return datetime.now(timezone.utc).isoformat()
|
||||
|
||||
def save_aggregate_report_to_google_secops(
|
||||
self, aggregate_report: dict[str, Any]
|
||||
) -> list[str]:
|
||||
"""
|
||||
Convert aggregate DMARC report to Google SecOps UDM format and send to Chronicle
|
||||
|
||||
When use_stdout=False: Events are sent to Chronicle API, returns empty list
|
||||
When use_stdout=True: Returns list of NDJSON event strings for stdout
|
||||
|
||||
Args:
|
||||
aggregate_report: Aggregate report dictionary from parsedmarc
|
||||
|
||||
Returns:
|
||||
List of NDJSON event strings (empty if sent to API)
|
||||
"""
|
||||
logger.debug("Converting aggregate report to Google SecOps UDM format")
|
||||
events = []
|
||||
|
||||
try:
|
||||
report_metadata = aggregate_report["report_metadata"]
|
||||
policy_published = aggregate_report["policy_published"]
|
||||
|
||||
for record in aggregate_report["records"]:
|
||||
# Extract values
|
||||
source_ip = record["source"]["ip_address"]
|
||||
source_country = record["source"].get("country")
|
||||
source_reverse_dns = record["source"].get("reverse_dns")
|
||||
source_base_domain = record["source"].get("base_domain")
|
||||
source_name = record["source"].get("name")
|
||||
source_type = record["source"].get("type")
|
||||
|
||||
header_from = record["identifiers"]["header_from"]
|
||||
envelope_from = record["identifiers"]["envelope_from"]
|
||||
|
||||
disposition = record["policy_evaluated"]["disposition"]
|
||||
spf_aligned = record["alignment"]["spf"]
|
||||
dkim_aligned = record["alignment"]["dkim"]
|
||||
dmarc_pass = record["alignment"]["dmarc"]
|
||||
|
||||
count = record["count"]
|
||||
interval_begin = record["interval_begin"]
|
||||
|
||||
# Get auth results
|
||||
spf_results = record["auth_results"].get("spf", [])
|
||||
dkim_results = record["auth_results"].get("dkim", [])
|
||||
|
||||
spf_result = spf_results[0]["result"] if spf_results else None
|
||||
dkim_result = dkim_results[0]["result"] if dkim_results else None
|
||||
|
||||
# Build UDM event
|
||||
event: dict[str, Any] = {
|
||||
"event_type": "DMARC_AGGREGATE",
|
||||
"metadata": {
|
||||
"event_timestamp": self._format_timestamp(interval_begin),
|
||||
"event_type": "GENERIC_EVENT",
|
||||
"product_name": "parsedmarc",
|
||||
"vendor_name": self.static_observer_vendor,
|
||||
},
|
||||
"principal": {
|
||||
"ip": [source_ip],
|
||||
},
|
||||
"target": {
|
||||
"domain": {
|
||||
"name": header_from,
|
||||
}
|
||||
},
|
||||
"security_result": [
|
||||
{
|
||||
"severity": self._get_severity(
|
||||
disposition, spf_aligned, dkim_aligned
|
||||
),
|
||||
"description": self._get_description(
|
||||
dmarc_pass,
|
||||
spf_result,
|
||||
dkim_result,
|
||||
spf_aligned,
|
||||
dkim_aligned,
|
||||
disposition,
|
||||
),
|
||||
"detection_fields": [
|
||||
{"key": "dmarc.disposition", "value": disposition},
|
||||
{"key": "dmarc.policy", "value": policy_published["p"]},
|
||||
{"key": "dmarc.pass", "value": dmarc_pass},
|
||||
{"key": "dmarc.spf_aligned", "value": spf_aligned},
|
||||
{"key": "dmarc.dkim_aligned", "value": dkim_aligned},
|
||||
{"key": "dmarc.header_from", "value": header_from},
|
||||
{"key": "dmarc.envelope_from", "value": envelope_from},
|
||||
{"key": "dmarc.report_org", "value": report_metadata["org_name"]},
|
||||
{"key": "dmarc.report_id", "value": report_metadata["report_id"]},
|
||||
{"key": "dmarc.report_begin", "value": report_metadata["begin_date"]},
|
||||
{"key": "dmarc.report_end", "value": report_metadata["end_date"]},
|
||||
{"key": "dmarc.row_count", "value": count},
|
||||
],
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
# Add optional fields to detection_fields
|
||||
if spf_result:
|
||||
event["security_result"][0]["detection_fields"].append(
|
||||
{"key": "dmarc.spf_result", "value": spf_result}
|
||||
)
|
||||
if dkim_result:
|
||||
event["security_result"][0]["detection_fields"].append(
|
||||
{"key": "dmarc.dkim_result", "value": dkim_result}
|
||||
)
|
||||
if source_name:
|
||||
event["security_result"][0]["detection_fields"].append(
|
||||
{"key": "dmarc.source_service_name", "value": source_name}
|
||||
)
|
||||
if source_type:
|
||||
event["security_result"][0]["detection_fields"].append(
|
||||
{"key": "dmarc.source_service_type", "value": source_type}
|
||||
)
|
||||
|
||||
# Add optional context fields (low-value, not for dashboarding)
|
||||
additional_context = []
|
||||
|
||||
if source_base_domain:
|
||||
additional_context.append(
|
||||
{"key": "source_base_domain", "value": source_base_domain}
|
||||
)
|
||||
|
||||
if self.static_environment:
|
||||
additional_context.append(
|
||||
{"key": "environment", "value": self.static_environment}
|
||||
)
|
||||
|
||||
# Add SPF auth results to context
|
||||
if spf_results:
|
||||
for idx, spf in enumerate(spf_results):
|
||||
additional_context.append(
|
||||
{"key": f"spf_{idx}_domain", "value": spf.get("domain", "")}
|
||||
)
|
||||
additional_context.append(
|
||||
{"key": f"spf_{idx}_result", "value": spf.get("result", "")}
|
||||
)
|
||||
|
||||
# Add DKIM auth results to context
|
||||
if dkim_results:
|
||||
for idx, dkim in enumerate(dkim_results):
|
||||
additional_context.append(
|
||||
{"key": f"dkim_{idx}_domain", "value": dkim.get("domain", "")}
|
||||
)
|
||||
additional_context.append(
|
||||
{"key": f"dkim_{idx}_result", "value": dkim.get("result", "")}
|
||||
)
|
||||
|
||||
# Only add additional section if there's context to include
|
||||
if additional_context:
|
||||
event["additional"] = {"fields": additional_context}
|
||||
|
||||
# Add optional UDM fields
|
||||
if source_country:
|
||||
event["principal"]["location"] = {"country_or_region": source_country}
|
||||
|
||||
if source_reverse_dns:
|
||||
event["principal"]["hostname"] = source_reverse_dns
|
||||
|
||||
if self.static_observer_name:
|
||||
event["metadata"]["product_deployment_id"] = self.static_observer_name
|
||||
|
||||
events.append(json.dumps(event, ensure_ascii=False))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error converting aggregate report to Google SecOps format: {e}")
|
||||
# Generate error event
|
||||
error_event: dict[str, Any] = {
|
||||
"event_type": "DMARC_PARSE_ERROR",
|
||||
"metadata": {
|
||||
"event_timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"event_type": "GENERIC_EVENT",
|
||||
"product_name": "parsedmarc",
|
||||
"vendor_name": self.static_observer_vendor,
|
||||
},
|
||||
"security_result": [
|
||||
{
|
||||
"severity": "ERROR",
|
||||
"description": f"Failed to parse DMARC aggregate report: {str(e)}",
|
||||
}
|
||||
],
|
||||
}
|
||||
events.append(json.dumps(error_event, ensure_ascii=False))
|
||||
|
||||
# Output events (to stdout or API)
|
||||
self._output_events(events)
|
||||
|
||||
# Return events only if using stdout (for CLI to print)
|
||||
return events if self.use_stdout else []
|
||||
|
||||
def save_forensic_report_to_google_secops(
|
||||
self, forensic_report: dict[str, Any]
|
||||
) -> list[str]:
|
||||
"""
|
||||
Convert forensic DMARC report to Google SecOps UDM format and send to Chronicle
|
||||
|
||||
When use_stdout=False: Events are sent to Chronicle API, returns empty list
|
||||
When use_stdout=True: Returns list of NDJSON event strings for stdout
|
||||
|
||||
Args:
|
||||
forensic_report: Forensic report dictionary from parsedmarc
|
||||
|
||||
Returns:
|
||||
List of NDJSON event strings (empty if sent to API)
|
||||
"""
|
||||
logger.debug("Converting forensic report to Google SecOps UDM format")
|
||||
events = []
|
||||
|
||||
try:
|
||||
source_ip = forensic_report["source"]["ip_address"]
|
||||
source_country = forensic_report["source"].get("country")
|
||||
source_reverse_dns = forensic_report["source"].get("reverse_dns")
|
||||
source_base_domain = forensic_report["source"].get("base_domain")
|
||||
source_name = forensic_report["source"].get("name")
|
||||
source_type = forensic_report["source"].get("type")
|
||||
|
||||
reported_domain = forensic_report["reported_domain"]
|
||||
arrival_date = forensic_report["arrival_date_utc"]
|
||||
auth_failure = forensic_report.get("auth_failure", [])
|
||||
|
||||
# Determine severity - forensic reports indicate failures
|
||||
# Default to MEDIUM for authentication failures
|
||||
severity = "MEDIUM"
|
||||
|
||||
# Build description
|
||||
auth_failure_str = ", ".join(auth_failure) if auth_failure else "unknown"
|
||||
description = f"DMARC forensic report: authentication failure ({auth_failure_str})"
|
||||
|
||||
# Build UDM event
|
||||
event: dict[str, Any] = {
|
||||
"event_type": "DMARC_FORENSIC",
|
||||
"metadata": {
|
||||
"event_timestamp": self._format_timestamp(arrival_date),
|
||||
"event_type": "GENERIC_EVENT",
|
||||
"product_name": "parsedmarc",
|
||||
"vendor_name": self.static_observer_vendor,
|
||||
},
|
||||
"principal": {
|
||||
"ip": [source_ip],
|
||||
},
|
||||
"target": {
|
||||
"domain": {
|
||||
"name": reported_domain,
|
||||
}
|
||||
},
|
||||
"security_result": [
|
||||
{
|
||||
"severity": severity,
|
||||
"description": description,
|
||||
"detection_fields": [
|
||||
{"key": "dmarc.auth_failure", "value": auth_failure_str},
|
||||
{"key": "dmarc.reported_domain", "value": reported_domain},
|
||||
],
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
# Add optional fields to detection_fields (matching aggregate report field names)
|
||||
if source_name:
|
||||
event["security_result"][0]["detection_fields"].append(
|
||||
{"key": "dmarc.source_service_name", "value": source_name}
|
||||
)
|
||||
if source_type:
|
||||
event["security_result"][0]["detection_fields"].append(
|
||||
{"key": "dmarc.source_service_type", "value": source_type}
|
||||
)
|
||||
|
||||
# Add optional context fields (low-value, not for dashboarding)
|
||||
additional_context = []
|
||||
|
||||
if source_base_domain:
|
||||
additional_context.append(
|
||||
{"key": "source_base_domain", "value": source_base_domain}
|
||||
)
|
||||
|
||||
if forensic_report.get("feedback_type"):
|
||||
additional_context.append(
|
||||
{"key": "feedback_type", "value": forensic_report["feedback_type"]}
|
||||
)
|
||||
|
||||
if forensic_report.get("message_id"):
|
||||
additional_context.append(
|
||||
{"key": "message_id", "value": forensic_report["message_id"]}
|
||||
)
|
||||
|
||||
if forensic_report.get("authentication_results"):
|
||||
additional_context.append(
|
||||
{"key": "authentication_results", "value": forensic_report["authentication_results"]}
|
||||
)
|
||||
|
||||
if forensic_report.get("delivery_result"):
|
||||
additional_context.append(
|
||||
{"key": "delivery_result", "value": forensic_report["delivery_result"]}
|
||||
)
|
||||
|
||||
if self.static_environment:
|
||||
additional_context.append(
|
||||
{"key": "environment", "value": self.static_environment}
|
||||
)
|
||||
|
||||
# Add payload excerpt if enabled
|
||||
if self.include_ruf_payload and forensic_report.get("sample"):
|
||||
sample = forensic_report["sample"]
|
||||
if len(sample) > self.ruf_payload_max_bytes:
|
||||
sample = sample[:self.ruf_payload_max_bytes] + "... [truncated]"
|
||||
additional_context.append(
|
||||
{"key": "message_sample", "value": sample}
|
||||
)
|
||||
|
||||
# Only add additional section if there's context to include
|
||||
if additional_context:
|
||||
event["additional"] = {"fields": additional_context}
|
||||
|
||||
# Add optional UDM fields
|
||||
if source_country:
|
||||
event["principal"]["location"] = {"country_or_region": source_country}
|
||||
|
||||
if source_reverse_dns:
|
||||
event["principal"]["hostname"] = source_reverse_dns
|
||||
|
||||
if self.static_observer_name:
|
||||
event["metadata"]["product_deployment_id"] = self.static_observer_name
|
||||
|
||||
events.append(json.dumps(event, ensure_ascii=False))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error converting forensic report to Google SecOps format: {e}")
|
||||
# Generate error event
|
||||
error_event: dict[str, Any] = {
|
||||
"event_type": "DMARC_PARSE_ERROR",
|
||||
"metadata": {
|
||||
"event_timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"event_type": "GENERIC_EVENT",
|
||||
"product_name": "parsedmarc",
|
||||
"vendor_name": self.static_observer_vendor,
|
||||
},
|
||||
"security_result": [
|
||||
{
|
||||
"severity": "ERROR",
|
||||
"description": f"Failed to parse DMARC forensic report: {str(e)}",
|
||||
}
|
||||
],
|
||||
}
|
||||
events.append(json.dumps(error_event, ensure_ascii=False))
|
||||
|
||||
# Output events (to stdout or API)
|
||||
self._output_events(events)
|
||||
|
||||
# Return events only if using stdout (for CLI to print)
|
||||
return events if self.use_stdout else []
|
||||
|
||||
def save_smtp_tls_report_to_google_secops(
|
||||
self, smtp_tls_report: dict[str, Any]
|
||||
) -> list[str]:
|
||||
"""
|
||||
Convert SMTP TLS report to Google SecOps UDM format and send to Chronicle
|
||||
|
||||
When use_stdout=False: Events are sent to Chronicle API, returns empty list
|
||||
When use_stdout=True: Returns list of NDJSON event strings for stdout
|
||||
|
||||
Args:
|
||||
smtp_tls_report: SMTP TLS report dictionary from parsedmarc
|
||||
|
||||
Returns:
|
||||
List of NDJSON event strings (empty if sent to API)
|
||||
"""
|
||||
logger.debug("Converting SMTP TLS report to Google SecOps UDM format")
|
||||
events = []
|
||||
|
||||
try:
|
||||
organization_name = smtp_tls_report.get("organization_name", "")
|
||||
begin_date = smtp_tls_report["begin_date"]
|
||||
end_date = smtp_tls_report["end_date"]
|
||||
|
||||
for policy in smtp_tls_report.get("policies", []):
|
||||
policy_domain = policy["policy_domain"]
|
||||
|
||||
for failure in policy.get("failure_details", []):
|
||||
# Build UDM event for each failure
|
||||
event: dict[str, Any] = {
|
||||
"event_type": "SMTP_TLS_REPORT",
|
||||
"metadata": {
|
||||
"event_timestamp": self._format_timestamp(begin_date),
|
||||
"event_type": "GENERIC_EVENT",
|
||||
"product_name": "parsedmarc",
|
||||
"vendor_name": self.static_observer_vendor,
|
||||
},
|
||||
"target": {
|
||||
"domain": {
|
||||
"name": policy_domain,
|
||||
}
|
||||
},
|
||||
"security_result": [
|
||||
{
|
||||
"severity": "LOW",
|
||||
"description": f"SMTP TLS failure: {failure.get('result_type', 'unknown')}",
|
||||
"detection_fields": [
|
||||
{"key": "smtp_tls.policy_domain", "value": policy_domain},
|
||||
{"key": "smtp_tls.result_type", "value": failure.get("result_type", "")},
|
||||
{"key": "smtp_tls.failed_session_count", "value": failure.get("failed_session_count", 0)},
|
||||
{"key": "smtp_tls.report_org", "value": organization_name},
|
||||
{"key": "smtp_tls.report_begin", "value": begin_date},
|
||||
{"key": "smtp_tls.report_end", "value": end_date},
|
||||
],
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
# Add optional context fields (low-value, not for dashboarding)
|
||||
additional_context = []
|
||||
|
||||
if self.static_environment:
|
||||
additional_context.append(
|
||||
{"key": "environment", "value": self.static_environment}
|
||||
)
|
||||
|
||||
# Only add additional section if there's context to include
|
||||
if additional_context:
|
||||
event["additional"] = {"fields": additional_context}
|
||||
|
||||
# Add optional UDM fields
|
||||
if failure.get("sending_mta_ip"):
|
||||
event["principal"] = {"ip": [failure["sending_mta_ip"]]}
|
||||
|
||||
if self.static_observer_name:
|
||||
event["metadata"]["product_deployment_id"] = self.static_observer_name
|
||||
|
||||
events.append(json.dumps(event, ensure_ascii=False))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error converting SMTP TLS report to Google SecOps format: {e}")
|
||||
# Generate error event
|
||||
error_event: dict[str, Any] = {
|
||||
"event_type": "DMARC_PARSE_ERROR",
|
||||
"metadata": {
|
||||
"event_timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"event_type": "GENERIC_EVENT",
|
||||
"product_name": "parsedmarc",
|
||||
"vendor_name": self.static_observer_vendor,
|
||||
},
|
||||
"security_result": [
|
||||
{
|
||||
"severity": "ERROR",
|
||||
"description": f"Failed to parse SMTP TLS report: {str(e)}",
|
||||
}
|
||||
],
|
||||
}
|
||||
events.append(json.dumps(error_event, ensure_ascii=False))
|
||||
|
||||
# Output events (to stdout or API)
|
||||
self._output_events(events)
|
||||
|
||||
# Return events only if using stdout (for CLI to print)
|
||||
return events if self.use_stdout else []
|
||||
@@ -10,6 +10,7 @@ from typing import List
|
||||
|
||||
from google.auth.transport.requests import Request
|
||||
from google.oauth2.credentials import Credentials
|
||||
from google.oauth2 import service_account
|
||||
from google_auth_oauthlib.flow import InstalledAppFlow
|
||||
from googleapiclient.discovery import build
|
||||
from googleapiclient.errors import HttpError
|
||||
@@ -18,7 +19,29 @@ from parsedmarc.log import logger
|
||||
from parsedmarc.mail.mailbox_connection import MailboxConnection
|
||||
|
||||
|
||||
def _get_creds(token_file, credentials_file, scopes, oauth2_port):
|
||||
def _get_creds(
|
||||
token_file,
|
||||
credentials_file,
|
||||
scopes,
|
||||
oauth2_port,
|
||||
auth_mode="installed_app",
|
||||
service_account_user=None,
|
||||
):
|
||||
normalized_auth_mode = (auth_mode or "installed_app").strip().lower()
|
||||
if normalized_auth_mode == "service_account":
|
||||
creds = service_account.Credentials.from_service_account_file(
|
||||
credentials_file,
|
||||
scopes=scopes,
|
||||
)
|
||||
if service_account_user:
|
||||
creds = creds.with_subject(service_account_user)
|
||||
return creds
|
||||
if normalized_auth_mode != "installed_app":
|
||||
raise ValueError(
|
||||
f"Unsupported Gmail auth_mode '{auth_mode}'. "
|
||||
"Expected 'installed_app' or 'service_account'."
|
||||
)
|
||||
|
||||
creds = None
|
||||
|
||||
if Path(token_file).exists():
|
||||
@@ -47,8 +70,17 @@ class GmailConnection(MailboxConnection):
|
||||
reports_folder: str,
|
||||
oauth2_port: int,
|
||||
paginate_messages: bool,
|
||||
auth_mode: str = "installed_app",
|
||||
service_account_user: str | None = None,
|
||||
):
|
||||
creds = _get_creds(token_file, credentials_file, scopes, oauth2_port)
|
||||
creds = _get_creds(
|
||||
token_file,
|
||||
credentials_file,
|
||||
scopes,
|
||||
oauth2_port,
|
||||
auth_mode=auth_mode,
|
||||
service_account_user=service_account_user,
|
||||
)
|
||||
self.service = build("gmail", "v1", credentials=creds)
|
||||
self.include_spam_trash = include_spam_trash
|
||||
self.reports_label_id = self._find_label_id_for_label(reports_folder)
|
||||
@@ -126,7 +158,7 @@ class GmailConnection(MailboxConnection):
|
||||
return urlsafe_b64decode(msg["raw"]).decode(errors="replace")
|
||||
|
||||
def delete_message(self, message_id: str):
|
||||
self.service.users().messages().delete(userId="me", id=message_id)
|
||||
self.service.users().messages().delete(userId="me", id=message_id).execute()
|
||||
|
||||
def move_message(self, message_id: str, folder_name: str):
|
||||
label_id = self._find_label_id_for_label(folder_name)
|
||||
|
||||
@@ -12,19 +12,25 @@ from azure.identity import (
|
||||
UsernamePasswordCredential,
|
||||
DeviceCodeCredential,
|
||||
ClientSecretCredential,
|
||||
CertificateCredential,
|
||||
TokenCachePersistenceOptions,
|
||||
AuthenticationRecord,
|
||||
)
|
||||
from msgraph.core import GraphClient
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
from parsedmarc.log import logger
|
||||
from parsedmarc.mail.mailbox_connection import MailboxConnection
|
||||
|
||||
GRAPH_REQUEST_RETRY_ATTEMPTS = 3
|
||||
GRAPH_REQUEST_RETRY_DELAY_SECONDS = 5
|
||||
|
||||
|
||||
class AuthMethod(Enum):
|
||||
DeviceCode = 1
|
||||
UsernamePassword = 2
|
||||
ClientSecret = 3
|
||||
Certificate = 4
|
||||
|
||||
|
||||
def _get_cache_args(token_path: Path, allow_unencrypted_storage):
|
||||
@@ -83,30 +89,55 @@ def _generate_credential(auth_method: str, token_path: Path, **kwargs):
|
||||
tenant_id=kwargs["tenant_id"],
|
||||
client_secret=kwargs["client_secret"],
|
||||
)
|
||||
elif auth_method == AuthMethod.Certificate.name:
|
||||
cert_path = kwargs.get("certificate_path")
|
||||
if not cert_path:
|
||||
raise ValueError(
|
||||
"certificate_path is required when auth_method is 'Certificate'"
|
||||
)
|
||||
credential = CertificateCredential(
|
||||
client_id=kwargs["client_id"],
|
||||
tenant_id=kwargs["tenant_id"],
|
||||
certificate_path=cert_path,
|
||||
password=kwargs.get("certificate_password"),
|
||||
)
|
||||
else:
|
||||
raise RuntimeError(f"Auth method {auth_method} not found")
|
||||
return credential
|
||||
|
||||
|
||||
class MSGraphConnection(MailboxConnection):
|
||||
_WELL_KNOWN_FOLDERS = {
|
||||
"inbox": "inbox",
|
||||
"archive": "archive",
|
||||
"drafts": "drafts",
|
||||
"sentitems": "sentitems",
|
||||
"deleteditems": "deleteditems",
|
||||
"junkemail": "junkemail",
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
auth_method: str,
|
||||
mailbox: str,
|
||||
graph_url: str,
|
||||
client_id: str,
|
||||
client_secret: str,
|
||||
username: str,
|
||||
password: str,
|
||||
client_secret: Optional[str],
|
||||
username: Optional[str],
|
||||
password: Optional[str],
|
||||
tenant_id: str,
|
||||
token_file: str,
|
||||
allow_unencrypted_storage: bool,
|
||||
certificate_path: Optional[str] = None,
|
||||
certificate_password: Optional[Union[str, bytes]] = None,
|
||||
):
|
||||
token_path = Path(token_file)
|
||||
credential = _generate_credential(
|
||||
auth_method,
|
||||
client_id=client_id,
|
||||
client_secret=client_secret,
|
||||
certificate_path=certificate_path,
|
||||
certificate_password=certificate_password,
|
||||
username=username,
|
||||
password=password,
|
||||
tenant_id=tenant_id,
|
||||
@@ -117,10 +148,10 @@ class MSGraphConnection(MailboxConnection):
|
||||
"credential": credential,
|
||||
"cloud": graph_url,
|
||||
}
|
||||
if not isinstance(credential, ClientSecretCredential):
|
||||
if not isinstance(credential, (ClientSecretCredential, CertificateCredential)):
|
||||
scopes = ["Mail.ReadWrite"]
|
||||
# Detect if mailbox is shared
|
||||
if mailbox and username != mailbox:
|
||||
if mailbox and username and username != mailbox:
|
||||
scopes = ["Mail.ReadWrite.Shared"]
|
||||
auth_record = credential.authenticate(scopes=scopes)
|
||||
_cache_auth_record(auth_record, token_path)
|
||||
@@ -129,6 +160,23 @@ class MSGraphConnection(MailboxConnection):
|
||||
self._client = GraphClient(**client_params)
|
||||
self.mailbox_name = mailbox
|
||||
|
||||
def _request_with_retries(self, method_name: str, *args, **kwargs):
|
||||
for attempt in range(1, GRAPH_REQUEST_RETRY_ATTEMPTS + 1):
|
||||
try:
|
||||
return getattr(self._client, method_name)(*args, **kwargs)
|
||||
except RequestException as error:
|
||||
if attempt == GRAPH_REQUEST_RETRY_ATTEMPTS:
|
||||
raise
|
||||
logger.warning(
|
||||
"Transient MS Graph %s error on attempt %s/%s: %s",
|
||||
method_name.upper(),
|
||||
attempt,
|
||||
GRAPH_REQUEST_RETRY_ATTEMPTS,
|
||||
error,
|
||||
)
|
||||
sleep(GRAPH_REQUEST_RETRY_DELAY_SECONDS)
|
||||
raise RuntimeError("no retry attempts configured")
|
||||
|
||||
def create_folder(self, folder_name: str):
|
||||
sub_url = ""
|
||||
path_parts = folder_name.split("/")
|
||||
@@ -143,7 +191,7 @@ class MSGraphConnection(MailboxConnection):
|
||||
|
||||
request_body = {"displayName": folder_name}
|
||||
request_url = f"/users/{self.mailbox_name}/mailFolders{sub_url}"
|
||||
resp = self._client.post(request_url, json=request_body)
|
||||
resp = self._request_with_retries("post", request_url, json=request_body)
|
||||
if resp.status_code == 409:
|
||||
logger.debug(f"Folder {folder_name} already exists, skipping creation")
|
||||
elif resp.status_code == 201:
|
||||
@@ -173,7 +221,7 @@ class MSGraphConnection(MailboxConnection):
|
||||
params["$top"] = batch_size
|
||||
else:
|
||||
params["$top"] = 100
|
||||
result = self._client.get(url, params=params)
|
||||
result = self._request_with_retries("get", url, params=params)
|
||||
if result.status_code != 200:
|
||||
raise RuntimeError(f"Failed to fetch messages {result.text}")
|
||||
messages = result.json()["value"]
|
||||
@@ -181,7 +229,7 @@ class MSGraphConnection(MailboxConnection):
|
||||
while "@odata.nextLink" in result.json() and (
|
||||
since is not None or (batch_size == 0 or batch_size - len(messages) > 0)
|
||||
):
|
||||
result = self._client.get(result.json()["@odata.nextLink"])
|
||||
result = self._request_with_retries("get", result.json()["@odata.nextLink"])
|
||||
if result.status_code != 200:
|
||||
raise RuntimeError(f"Failed to fetch messages {result.text}")
|
||||
messages.extend(result.json()["value"])
|
||||
@@ -190,7 +238,7 @@ class MSGraphConnection(MailboxConnection):
|
||||
def mark_message_read(self, message_id: str):
|
||||
"""Marks a message as read"""
|
||||
url = f"/users/{self.mailbox_name}/messages/{message_id}"
|
||||
resp = self._client.patch(url, json={"isRead": "true"})
|
||||
resp = self._request_with_retries("patch", url, json={"isRead": "true"})
|
||||
if resp.status_code != 200:
|
||||
raise RuntimeWarning(
|
||||
f"Failed to mark message read{resp.status_code}: {resp.json()}"
|
||||
@@ -198,7 +246,7 @@ class MSGraphConnection(MailboxConnection):
|
||||
|
||||
def fetch_message(self, message_id: str, **kwargs):
|
||||
url = f"/users/{self.mailbox_name}/messages/{message_id}/$value"
|
||||
result = self._client.get(url)
|
||||
result = self._request_with_retries("get", url)
|
||||
if result.status_code != 200:
|
||||
raise RuntimeWarning(
|
||||
f"Failed to fetch message{result.status_code}: {result.json()}"
|
||||
@@ -210,7 +258,7 @@ class MSGraphConnection(MailboxConnection):
|
||||
|
||||
def delete_message(self, message_id: str):
|
||||
url = f"/users/{self.mailbox_name}/messages/{message_id}"
|
||||
resp = self._client.delete(url)
|
||||
resp = self._request_with_retries("delete", url)
|
||||
if resp.status_code != 204:
|
||||
raise RuntimeWarning(
|
||||
f"Failed to delete message {resp.status_code}: {resp.json()}"
|
||||
@@ -220,7 +268,7 @@ class MSGraphConnection(MailboxConnection):
|
||||
folder_id = self._find_folder_id_from_folder_path(folder_name)
|
||||
request_body = {"destinationId": folder_id}
|
||||
url = f"/users/{self.mailbox_name}/messages/{message_id}/move"
|
||||
resp = self._client.post(url, json=request_body)
|
||||
resp = self._request_with_retries("post", url, json=request_body)
|
||||
if resp.status_code != 201:
|
||||
raise RuntimeWarning(
|
||||
f"Failed to move message {resp.status_code}: {resp.json()}"
|
||||
@@ -248,6 +296,19 @@ class MSGraphConnection(MailboxConnection):
|
||||
else:
|
||||
return self._find_folder_id_with_parent(folder_name, None)
|
||||
|
||||
def _get_well_known_folder_id(self, folder_name: str) -> Optional[str]:
|
||||
folder_key = folder_name.lower().replace(" ", "").replace("-", "")
|
||||
alias = self._WELL_KNOWN_FOLDERS.get(folder_key)
|
||||
if alias is None:
|
||||
return None
|
||||
|
||||
url = f"/users/{self.mailbox_name}/mailFolders/{alias}?$select=id,displayName"
|
||||
folder_resp = self._request_with_retries("get", url)
|
||||
if folder_resp.status_code != 200:
|
||||
return None
|
||||
payload = folder_resp.json()
|
||||
return payload.get("id")
|
||||
|
||||
def _find_folder_id_with_parent(
|
||||
self, folder_name: str, parent_folder_id: Optional[str]
|
||||
):
|
||||
@@ -256,8 +317,12 @@ class MSGraphConnection(MailboxConnection):
|
||||
sub_url = f"/{parent_folder_id}/childFolders"
|
||||
url = f"/users/{self.mailbox_name}/mailFolders{sub_url}"
|
||||
filter = f"?$filter=displayName eq '{folder_name}'"
|
||||
folders_resp = self._client.get(url + filter)
|
||||
folders_resp = self._request_with_retries("get", url + filter)
|
||||
if folders_resp.status_code != 200:
|
||||
if parent_folder_id is None:
|
||||
well_known_folder_id = self._get_well_known_folder_id(folder_name)
|
||||
if well_known_folder_id:
|
||||
return well_known_folder_id
|
||||
raise RuntimeWarning(f"Failed to list folders.{folders_resp.json()}")
|
||||
folders: list = folders_resp.json()["value"]
|
||||
matched_folders = [
|
||||
|
||||
@@ -55,10 +55,28 @@ class IMAPConnection(MailboxConnection):
|
||||
return cast(str, self._client.fetch_message(message_id, parse=False))
|
||||
|
||||
def delete_message(self, message_id: int):
|
||||
self._client.delete_messages([message_id])
|
||||
try:
|
||||
self._client.delete_messages([message_id])
|
||||
except IMAPClientError as error:
|
||||
logger.warning(
|
||||
"IMAP delete fallback for message %s due to server error: %s",
|
||||
message_id,
|
||||
error,
|
||||
)
|
||||
self._client.add_flags([message_id], [r"\Deleted"], silent=True)
|
||||
self._client.expunge()
|
||||
|
||||
def move_message(self, message_id: int, folder_name: str):
|
||||
self._client.move_messages([message_id], folder_name)
|
||||
try:
|
||||
self._client.move_messages([message_id], folder_name)
|
||||
except IMAPClientError as error:
|
||||
logger.warning(
|
||||
"IMAP move fallback for message %s due to server error: %s",
|
||||
message_id,
|
||||
error,
|
||||
)
|
||||
self._client.copy([message_id], folder_name)
|
||||
self.delete_message(message_id)
|
||||
|
||||
def keepalive(self):
|
||||
self._client.noop()
|
||||
|
||||
@@ -4,7 +4,9 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
import boto3
|
||||
from opensearchpy import (
|
||||
AWSV4SignerAuth,
|
||||
Boolean,
|
||||
Date,
|
||||
Document,
|
||||
@@ -15,6 +17,7 @@ from opensearchpy import (
|
||||
Nested,
|
||||
Object,
|
||||
Q,
|
||||
RequestsHttpConnection,
|
||||
Search,
|
||||
Text,
|
||||
connections,
|
||||
@@ -272,6 +275,9 @@ def set_hosts(
|
||||
password: Optional[str] = None,
|
||||
api_key: Optional[str] = None,
|
||||
timeout: Optional[float] = 60.0,
|
||||
auth_type: str = "basic",
|
||||
aws_region: Optional[str] = None,
|
||||
aws_service: str = "es",
|
||||
):
|
||||
"""
|
||||
Sets the OpenSearch hosts to use
|
||||
@@ -284,6 +290,9 @@ def set_hosts(
|
||||
password (str): The password to use for authentication
|
||||
api_key (str): The Base64 encoded API key to use for authentication
|
||||
timeout (float): Timeout in seconds
|
||||
auth_type (str): OpenSearch auth mode: basic (default) or awssigv4
|
||||
aws_region (str): AWS region for SigV4 auth (required for awssigv4)
|
||||
aws_service (str): AWS service for SigV4 signing (default: es)
|
||||
"""
|
||||
if not isinstance(hosts, list):
|
||||
hosts = [hosts]
|
||||
@@ -295,10 +304,30 @@ def set_hosts(
|
||||
conn_params["ca_certs"] = ssl_cert_path
|
||||
else:
|
||||
conn_params["verify_certs"] = False
|
||||
if username and password:
|
||||
conn_params["http_auth"] = username + ":" + password
|
||||
if api_key:
|
||||
conn_params["api_key"] = api_key
|
||||
normalized_auth_type = (auth_type or "basic").strip().lower()
|
||||
if normalized_auth_type == "awssigv4":
|
||||
if not aws_region:
|
||||
raise OpenSearchError(
|
||||
"OpenSearch AWS SigV4 auth requires 'aws_region' to be set"
|
||||
)
|
||||
session = boto3.Session()
|
||||
credentials = session.get_credentials()
|
||||
if credentials is None:
|
||||
raise OpenSearchError(
|
||||
"Unable to load AWS credentials for OpenSearch SigV4 authentication"
|
||||
)
|
||||
conn_params["http_auth"] = AWSV4SignerAuth(credentials, aws_region, aws_service)
|
||||
conn_params["connection_class"] = RequestsHttpConnection
|
||||
elif normalized_auth_type == "basic":
|
||||
if username and password:
|
||||
conn_params["http_auth"] = username + ":" + password
|
||||
if api_key:
|
||||
conn_params["api_key"] = api_key
|
||||
else:
|
||||
raise OpenSearchError(
|
||||
f"Unsupported OpenSearch auth_type '{auth_type}'. "
|
||||
"Expected 'basic' or 'awssigv4'."
|
||||
)
|
||||
connections.create_connection(**conn_params)
|
||||
|
||||
|
||||
@@ -413,8 +442,8 @@ def save_aggregate_report_to_opensearch(
|
||||
org_name_query = Q(dict(match_phrase=dict(org_name=org_name)))
|
||||
report_id_query = Q(dict(match_phrase=dict(report_id=report_id)))
|
||||
domain_query = Q(dict(match_phrase={"published_policy.domain": domain}))
|
||||
begin_date_query = Q(dict(match=dict(date_begin=begin_date)))
|
||||
end_date_query = Q(dict(match=dict(date_end=end_date)))
|
||||
begin_date_query = Q(dict(range=dict(date_begin=dict(gte=begin_date))))
|
||||
end_date_query = Q(dict(range=dict(date_end=dict(lte=end_date))))
|
||||
|
||||
if index_suffix is not None:
|
||||
search_index = "dmarc_aggregate_{0}*".format(index_suffix)
|
||||
|
||||
Binary file not shown.
@@ -6,7 +6,10 @@ from __future__ import annotations
|
||||
import json
|
||||
import logging
|
||||
import logging.handlers
|
||||
from typing import Any
|
||||
import socket
|
||||
import ssl
|
||||
import time
|
||||
from typing import Any, Optional
|
||||
|
||||
from parsedmarc import (
|
||||
parsed_aggregate_reports_to_csv_rows,
|
||||
@@ -18,20 +21,150 @@ from parsedmarc import (
|
||||
class SyslogClient(object):
|
||||
"""A client for Syslog"""
|
||||
|
||||
def __init__(self, server_name: str, server_port: int):
|
||||
def __init__(
|
||||
self,
|
||||
server_name: str,
|
||||
server_port: int,
|
||||
protocol: str = "udp",
|
||||
cafile_path: Optional[str] = None,
|
||||
certfile_path: Optional[str] = None,
|
||||
keyfile_path: Optional[str] = None,
|
||||
timeout: float = 5.0,
|
||||
retry_attempts: int = 3,
|
||||
retry_delay: int = 5,
|
||||
):
|
||||
"""
|
||||
Initializes the SyslogClient
|
||||
Args:
|
||||
server_name (str): The Syslog server
|
||||
server_port (int): The Syslog UDP port
|
||||
server_port (int): The Syslog port
|
||||
protocol (str): The protocol to use: "udp", "tcp", or "tls" (Default: "udp")
|
||||
cafile_path (str): Path to CA certificate file for TLS server verification (Optional)
|
||||
certfile_path (str): Path to client certificate file for TLS authentication (Optional)
|
||||
keyfile_path (str): Path to client private key file for TLS authentication (Optional)
|
||||
timeout (float): Connection timeout in seconds for TCP/TLS (Default: 5.0)
|
||||
retry_attempts (int): Number of retry attempts for failed connections (Default: 3)
|
||||
retry_delay (int): Delay in seconds between retry attempts (Default: 5)
|
||||
"""
|
||||
self.server_name = server_name
|
||||
self.server_port = server_port
|
||||
self.protocol = protocol.lower()
|
||||
self.timeout = timeout
|
||||
self.retry_attempts = retry_attempts
|
||||
self.retry_delay = retry_delay
|
||||
|
||||
self.logger = logging.getLogger("parsedmarc_syslog")
|
||||
self.logger.setLevel(logging.INFO)
|
||||
log_handler = logging.handlers.SysLogHandler(address=(server_name, server_port))
|
||||
|
||||
# Create the appropriate syslog handler based on protocol
|
||||
log_handler = self._create_syslog_handler(
|
||||
server_name,
|
||||
server_port,
|
||||
self.protocol,
|
||||
cafile_path,
|
||||
certfile_path,
|
||||
keyfile_path,
|
||||
timeout,
|
||||
retry_attempts,
|
||||
retry_delay,
|
||||
)
|
||||
|
||||
self.logger.addHandler(log_handler)
|
||||
|
||||
def _create_syslog_handler(
|
||||
self,
|
||||
server_name: str,
|
||||
server_port: int,
|
||||
protocol: str,
|
||||
cafile_path: Optional[str],
|
||||
certfile_path: Optional[str],
|
||||
keyfile_path: Optional[str],
|
||||
timeout: float,
|
||||
retry_attempts: int,
|
||||
retry_delay: int,
|
||||
) -> logging.handlers.SysLogHandler:
|
||||
"""
|
||||
Creates a SysLogHandler with the specified protocol and TLS settings
|
||||
"""
|
||||
if protocol == "udp":
|
||||
# UDP protocol (default, backward compatible)
|
||||
return logging.handlers.SysLogHandler(
|
||||
address=(server_name, server_port),
|
||||
socktype=socket.SOCK_DGRAM,
|
||||
)
|
||||
elif protocol in ["tcp", "tls"]:
|
||||
# TCP or TLS protocol with retry logic
|
||||
for attempt in range(1, retry_attempts + 1):
|
||||
try:
|
||||
if protocol == "tcp":
|
||||
# TCP without TLS
|
||||
handler = logging.handlers.SysLogHandler(
|
||||
address=(server_name, server_port),
|
||||
socktype=socket.SOCK_STREAM,
|
||||
)
|
||||
# Set timeout on the socket
|
||||
if hasattr(handler, "socket") and handler.socket:
|
||||
handler.socket.settimeout(timeout)
|
||||
return handler
|
||||
else:
|
||||
# TLS protocol
|
||||
# Create SSL context with secure defaults
|
||||
ssl_context = ssl.create_default_context()
|
||||
|
||||
# Explicitly set minimum TLS version to 1.2 for security
|
||||
ssl_context.minimum_version = ssl.TLSVersion.TLSv1_2
|
||||
|
||||
# Configure server certificate verification
|
||||
if cafile_path:
|
||||
ssl_context.load_verify_locations(cafile=cafile_path)
|
||||
|
||||
# Configure client certificate authentication
|
||||
if certfile_path and keyfile_path:
|
||||
ssl_context.load_cert_chain(
|
||||
certfile=certfile_path,
|
||||
keyfile=keyfile_path,
|
||||
)
|
||||
elif certfile_path or keyfile_path:
|
||||
# Warn if only one of the two required parameters is provided
|
||||
self.logger.warning(
|
||||
"Both certfile_path and keyfile_path are required for "
|
||||
"client certificate authentication. Client authentication "
|
||||
"will not be used."
|
||||
)
|
||||
|
||||
# Create TCP handler first
|
||||
handler = logging.handlers.SysLogHandler(
|
||||
address=(server_name, server_port),
|
||||
socktype=socket.SOCK_STREAM,
|
||||
)
|
||||
|
||||
# Wrap socket with TLS
|
||||
if hasattr(handler, "socket") and handler.socket:
|
||||
handler.socket = ssl_context.wrap_socket(
|
||||
handler.socket,
|
||||
server_hostname=server_name,
|
||||
)
|
||||
handler.socket.settimeout(timeout)
|
||||
|
||||
return handler
|
||||
|
||||
except Exception as e:
|
||||
if attempt < retry_attempts:
|
||||
self.logger.warning(
|
||||
f"Syslog connection attempt {attempt}/{retry_attempts} failed: {e}. "
|
||||
f"Retrying in {retry_delay} seconds..."
|
||||
)
|
||||
time.sleep(retry_delay)
|
||||
else:
|
||||
self.logger.error(
|
||||
f"Syslog connection failed after {retry_attempts} attempts: {e}"
|
||||
)
|
||||
raise
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Invalid protocol '{protocol}'. Must be 'udp', 'tcp', or 'tls'."
|
||||
)
|
||||
|
||||
def save_aggregate_report_to_syslog(self, aggregate_reports: list[dict[str, Any]]):
|
||||
rows = parsed_aggregate_reports_to_csv_rows(aggregate_reports)
|
||||
for row in rows:
|
||||
|
||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any, Dict, List, Literal, Optional, TypedDict, Union
|
||||
|
||||
# NOTE: This module is intentionally Python 3.9 compatible.
|
||||
# NOTE: This module is intentionally Python 3.10 compatible.
|
||||
# - No PEP 604 unions (A | B)
|
||||
# - No typing.NotRequired / Required (3.11+) to avoid an extra dependency.
|
||||
# For optional keys, use total=False TypedDicts.
|
||||
|
||||
@@ -205,8 +205,7 @@ def get_reverse_dns(
|
||||
)[0]
|
||||
|
||||
except dns.exception.DNSException as e:
|
||||
logger.warning(f"get_reverse_dns({ip_address}) exception: {e}")
|
||||
pass
|
||||
logger.debug(f"get_reverse_dns({ip_address}) exception: {e}")
|
||||
|
||||
return hostname
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
requires = [
|
||||
"hatchling>=1.27.0",
|
||||
]
|
||||
requires_python = ">=3.9,<3.14"
|
||||
requires_python = ">=3.10,<3.15"
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
@@ -29,7 +29,7 @@ classifiers = [
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3"
|
||||
]
|
||||
requires-python = ">=3.9, <3.14"
|
||||
requires-python = ">=3.10"
|
||||
dependencies = [
|
||||
"azure-identity>=1.8.0",
|
||||
"azure-monitor-ingestion>=1.0.0",
|
||||
@@ -45,10 +45,10 @@ dependencies = [
|
||||
"google-auth-httplib2>=0.1.0",
|
||||
"google-auth-oauthlib>=0.4.6",
|
||||
"google-auth>=2.3.3",
|
||||
"imapclient>=2.1.0",
|
||||
"imapclient>=3.1.0",
|
||||
"kafka-python-ng>=2.2.2",
|
||||
"lxml>=4.4.0",
|
||||
"mailsuite>=1.11.1",
|
||||
"mailsuite>=1.11.2",
|
||||
"msgraph-core==0.2.2",
|
||||
"opensearch-py>=2.4.2,<=3.0.0",
|
||||
"publicsuffixlist>=0.10.0",
|
||||
|
||||
Reference in New Issue
Block a user