mirror of
https://github.com/domainaware/parsedmarc.git
synced 2026-02-17 07:03:58 +00:00
Added output to webhook as an option (#558)
* Added output to webhook as an option * added documentation for new webhook configuration --------- Co-authored-by: Sean Whalen <44679+seanthegeek@users.noreply.github.com>
This commit is contained in:
@@ -103,6 +103,12 @@ port = 514
|
||||
host = logger
|
||||
port = 12201
|
||||
mode = tcp
|
||||
|
||||
[webhook]
|
||||
aggregate_url = https://aggregate_url.example.com
|
||||
forensic_url = https://forensic_url.example.com
|
||||
smtp_tls_url = https://smtp_tls_url.example.com
|
||||
timeout = 60
|
||||
```
|
||||
|
||||
The full set of configuration options are:
|
||||
@@ -359,6 +365,12 @@ The full set of configuration options are:
|
||||
- `reports_folder` - str: Full path for mailbox maidir location (Default: `INBOX`)
|
||||
- `maildir_create` - bool: Create maildir if not present (Default: False)
|
||||
|
||||
- `webhook` - Post the individual reports to a webhook url with the report as the JSON body
|
||||
- `aggregate_url` - str: URL of the webhook which should receive the aggregate reports
|
||||
- `forensic_url` - str: URL of the webhook which should receive the forensic reports
|
||||
- `smtp_tls_url` - str: URL of the webhook which should receive the smtp_tls reports
|
||||
- `timeout` - int: Interval in which the webhook call should timeout
|
||||
|
||||
:::{warning}
|
||||
It is **strongly recommended** to **not** use the `nameservers`
|
||||
setting. By default, `parsedmarc` uses
|
||||
|
||||
@@ -19,7 +19,8 @@ from tqdm import tqdm
|
||||
from parsedmarc import get_dmarc_reports_from_mailbox, watch_inbox, \
|
||||
parse_report_file, get_dmarc_reports_from_mbox, elastic, opensearch, \
|
||||
kafkaclient, splunk, save_output, email_results, ParserError, \
|
||||
__version__, InvalidDMARCReport, s3, syslog, loganalytics, gelf
|
||||
__version__, InvalidDMARCReport, s3, syslog, loganalytics, gelf, \
|
||||
webhook
|
||||
from parsedmarc.mail import IMAPConnection, MSGraphConnection, \
|
||||
GmailConnection, MaildirConnection
|
||||
from parsedmarc.mail.graph import AuthMethod
|
||||
@@ -154,6 +155,18 @@ def _main():
|
||||
except Exception as error_:
|
||||
logger.error("GELF Error: {0}".format(error_.__str__()))
|
||||
|
||||
try:
|
||||
if opts.webhook_aggregate_url:
|
||||
webhook_client.save_aggregate_report_to_webhook(
|
||||
json.dumps(
|
||||
report,
|
||||
ensure_ascii=False,
|
||||
indent=2
|
||||
)
|
||||
)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
|
||||
if opts.hec:
|
||||
try:
|
||||
aggregate_reports_ = reports_["aggregate_reports"]
|
||||
@@ -229,6 +242,16 @@ def _main():
|
||||
except Exception as error_:
|
||||
logger.error("GELF Error: {0}".format(error_.__str__()))
|
||||
|
||||
try:
|
||||
if opts.webhook_forensic_url:
|
||||
webhook_client.save_forensic_report_to_webhook(
|
||||
json.dumps(
|
||||
report,
|
||||
ensure_ascii=False,
|
||||
indent=2))
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
|
||||
if opts.hec:
|
||||
try:
|
||||
forensic_reports_ = reports_["forensic_reports"]
|
||||
@@ -304,6 +327,16 @@ def _main():
|
||||
except Exception as error_:
|
||||
logger.error("GELF Error: {0}".format(error_.__str__()))
|
||||
|
||||
try:
|
||||
if opts.webhook_smtp_tls_url:
|
||||
webhook_client.save_smtp_tls_report_to_webhook(
|
||||
json.dumps(
|
||||
report,
|
||||
ensure_ascii=False,
|
||||
indent=2))
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
|
||||
if opts.hec:
|
||||
try:
|
||||
smtp_tls_reports_ = reports_["smtp_tls_reports"]
|
||||
@@ -524,6 +557,10 @@ def _main():
|
||||
gelf_host=None,
|
||||
gelf_port=None,
|
||||
gelf_mode=None,
|
||||
webhook_aggregate_url=None,
|
||||
webhook_forensic_url=None,
|
||||
webhook_smtp_tls_url=None,
|
||||
webhook_timeout=60
|
||||
)
|
||||
args = arg_parser.parse_args()
|
||||
|
||||
@@ -1058,6 +1095,17 @@ def _main():
|
||||
"gelf config section")
|
||||
exit(-1)
|
||||
|
||||
if "webhook" in config.sections():
|
||||
webhook_config = config["webhook"]
|
||||
if "aggregate_url" in webhook_config:
|
||||
opts.webhook_aggregate_url = webhook_config["aggregate_url"]
|
||||
if "forensic_url" in webhook_config:
|
||||
opts.webhook_forensic_url = webhook_config["forensic_url"]
|
||||
if "smtp_tls_url" in webhook_config:
|
||||
opts.webhook_smtp_tls_url = webhook_config["smtp_tls_url"]
|
||||
if "timeout" in webhook_config:
|
||||
opts.webhook_timeout = webhook_config["timeout"]
|
||||
|
||||
logger.setLevel(logging.ERROR)
|
||||
|
||||
if opts.warnings:
|
||||
@@ -1225,6 +1273,19 @@ def _main():
|
||||
except Exception as error_:
|
||||
logger.error("GELF Error: {0}".format(error_.__str__()))
|
||||
|
||||
if opts.webhook_aggregate_url or \
|
||||
opts.webhook_forensic_url or \
|
||||
opts.webhook_smtp_tls_url:
|
||||
try:
|
||||
webhook_client = webhook.WebhookClient(
|
||||
aggregate_url=opts.webhook_aggregate_url,
|
||||
forensic_url=opts.webhook_forensic_url,
|
||||
smtp_tls_url=opts.webhook_smtp_tls_url,
|
||||
timeout=opts.webhook_timeout
|
||||
)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
|
||||
kafka_aggregate_topic = opts.kafka_aggregate_topic
|
||||
kafka_forensic_topic = opts.kafka_forensic_topic
|
||||
kafka_smtp_tls_topic = opts.kafka_smtp_tls_topic
|
||||
|
||||
51
parsedmarc/webhook.py
Normal file
51
parsedmarc/webhook.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import requests
|
||||
|
||||
from parsedmarc import logger
|
||||
|
||||
|
||||
class WebhookClient(object):
|
||||
""" A client for webhooks"""
|
||||
|
||||
def __init__(self, aggregate_url, forensic_url, smtp_tls_url,
|
||||
timeout=60):
|
||||
"""
|
||||
Initializes the WebhookClient
|
||||
Args:
|
||||
aggregate_url (str): The aggregate report webhook url
|
||||
forensic_url (str): The forensic report webhook url
|
||||
smtp_tls_url (str): The smtp_tls report webhook url
|
||||
timeout (int): The timeout to use when calling the webhooks
|
||||
"""
|
||||
self.aggregate_url = aggregate_url
|
||||
self.forensic_url = forensic_url
|
||||
self.smtp_tls_url = smtp_tls_url
|
||||
self.timeout = timeout
|
||||
self.session = requests.Session()
|
||||
self.session.headers = {
|
||||
'User-Agent': 'parsedmarc',
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
def save_forensic_report_to_webhook(self, report):
|
||||
try:
|
||||
self._send_to_webhook(self.forensic_url, report)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
|
||||
def save_smtp_tls_report_to_webhook(self, report):
|
||||
try:
|
||||
self._send_to_webhook(self.smtp_tls_url, report)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
|
||||
def save_aggregate_report_to_webhook(self, report):
|
||||
try:
|
||||
self._send_to_webhook(self.aggregate_url, report)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
|
||||
def _send_to_webhook(self, webhook_url, payload):
|
||||
try:
|
||||
self.session.post(webhook_url, data=payload, timeout=self.timeout)
|
||||
except Exception as error_:
|
||||
logger.error("Webhook Error: {0}".format(error_.__str__()))
|
||||
Reference in New Issue
Block a user