diff --git a/README.rst b/README.rst index 3d5684e..6184f7a 100644 --- a/README.rst +++ b/README.rst @@ -144,6 +144,10 @@ For example bucket = my-bucket path = parsedmarc + [syslog] + server = localhost + port = 514 + The full set of configuration options are: - ``general`` @@ -222,6 +226,9 @@ The full set of configuration options are: - ``s3`` - ``bucket`` - str: The S3 bucket name - ``path`` - int: The path to upload reports to (Default: /) +- ``syslog`` + - ``server`` - str: The Syslog server name or IP address + - ``port`` - int: The UDP port to use (Default: 514) .. warning:: diff --git a/parsedmarc/cli.py b/parsedmarc/cli.py index be2026a..5f85fff 100644 --- a/parsedmarc/cli.py +++ b/parsedmarc/cli.py @@ -19,7 +19,7 @@ from tqdm import tqdm from parsedmarc import get_dmarc_reports_from_inbox, watch_inbox, \ parse_report_file, get_dmarc_reports_from_mbox, elastic, kafkaclient, \ splunk, save_output, email_results, ParserError, __version__, \ - InvalidDMARCReport, s3 + InvalidDMARCReport, s3, syslog from parsedmarc.utils import is_mbox logger = logging.getLogger("parsedmarc") @@ -87,6 +87,14 @@ def _main(): ) except Exception as error_: logger.error("S3 Error: {0}".format(error_.__str__())) + if opts.syslog_server: + try: + syslog_client = syslog.SyslogClient( + server_name=opts.syslog_server, + server_port=int(opts.syslog_port), + ) + except Exception as error_: + logger.error("Syslog Error: {0}".format(error_.__str__())) if opts.save_aggregate: for report in reports_["aggregate_reports"]: try: @@ -117,6 +125,11 @@ def _main(): s3_client.save_aggregate_report_to_s3(report) except Exception as error_: logger.error("S3 Error: {0}".format(error_.__str__())) + try: + if opts.syslog_server: + syslog_client.save_aggregate_report_to_syslog(report) + except Exception as error_: + logger.error("Syslog Error: {0}".format(error_.__str__())) if opts.hec: try: aggregate_reports_ = reports_["aggregate_reports"] @@ -156,6 +169,11 @@ def _main(): s3_client.save_forensic_report_to_s3(report) except Exception as error_: logger.error("S3 Error: {0}".format(error_.__str__())) + try: + if opts.syslog_server: + syslog_client.save_forensic_report_to_syslog(report) + except Exception as error_: + logger.error("Syslog Error: {0}".format(error_.__str__())) if opts.hec: try: forensic_reports_ = reports_["forensic_reports"] @@ -279,6 +297,8 @@ def _main(): smtp_message="Please see the attached DMARC results.", s3_bucket=None, s3_path=None, + syslog_server=None, + syslog_port=None, log_file=args.log_file, n_procs=1, chunk_size=1 @@ -539,6 +559,18 @@ def _main(): opts.s3_path = opts.s3_path[:-1] else: opts.s3_path = "" + if "syslog" in config.sections(): + syslog_config = config["syslog"] + if "server" in syslog_config: + opts.syslog_server = syslog_config["server"] + else: + logger.critical("server setting missing from the " + "syslog config section") + exit(-1) + if "port" in syslog_config: + opts.syslog_port = syslog_config["port"] + else: + opts.syslog_port = 514 logging.basicConfig(level=logging.WARNING) logger.setLevel(logging.WARNING) diff --git a/parsedmarc/syslog.py b/parsedmarc/syslog.py new file mode 100644 index 0000000..cf9e000 --- /dev/null +++ b/parsedmarc/syslog.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +import logging +import logging.handlers +import json + +from parsedmarc import parsed_aggregate_reports_to_csv_rows, parsed_forensic_reports_to_csv_rows + +logger = logging.getLogger("parsedmarc") + +class SyslogClient(object): + """A client for Syslog""" + + def __init__(self, server_name, server_port): + """ + Initializes the SyslogClient + Args: + server_name (str): The Syslog server + server_port (int): The Syslog UDP port + """ + self.server_name = server_name + self.server_port = server_port + self.logger = logging.getLogger('parsedmarc_syslog') + self.logger.setLevel(logging.INFO) + log_handler = logging.handlers.SysLogHandler(address = (server_name,server_port)) + self.logger.addHandler(log_handler) + + def save_aggregate_report_to_syslog(self, aggregate_reports): + rows = parsed_aggregate_reports_to_csv_rows(aggregate_reports) + for row in rows: + self.logger.info(json.dumps(row)) + + def save_forensic_report_to_syslog(self, forensic_reports): + rows = parsed_forensic_reports_to_csv_rows(forensic_reports) + for row in rows: + self.logger.info(json.dumps(row))