Move CLI options to a config file
This commit is contained in:
Sean Whalen
2019-02-04 17:03:33 -05:00
parent 5c0de87d4e
commit 528cfb2822
7 changed files with 309 additions and 220 deletions

5
.gitignore vendored
View File

@@ -121,4 +121,7 @@ output/
*.mmdb
# Temp files
tmp/
tmp/
# Config files
prod.ini

View File

@@ -1,3 +1,9 @@
6.0.0
-----
- Move options from CLI to a config file (see updated installation documentation)
- Refactoring to make argument names consistent
5.3.0
-----

20
docs/example.ini Normal file
View File

@@ -0,0 +1,20 @@
# This is an example comment
[general]
save_aggregate = True
save_forensic = False
[imap]
host = imap.example.com
user = dmarcresports@example.com
password = $uperSecure
watch = True
[elasticsearch]
urls = 127.0.0.1:92000
ssl = False
[splunk_hec]
url = https://splunkhec.example.com
token = HECTokenGoesHere
index = email

View File

@@ -38,7 +38,7 @@ from parsedmarc.utils import is_outlook_msg, convert_outlook_msg
from parsedmarc.utils import timestamp_to_human, human_timestamp_to_datetime
from parsedmarc.utils import parse_email
__version__ = "5.3.0"
__version__ = "6.0.0"
logging.basicConfig(
format='%(levelname)8s:%(filename)s:%(lineno)d:'
@@ -83,7 +83,7 @@ class InvalidForensicReport(InvalidDMARCReport):
"""Raised when an invalid DMARC forensic report is encountered"""
def _parse_report_record(record, nameservers=None, timeout=2.0):
def _parse_report_record(record, nameservers=None, dns_timeout=2.0):
"""
Converts a record from a DMARC aggregate report into a more consistent
format
@@ -92,7 +92,7 @@ def _parse_report_record(record, nameservers=None, timeout=2.0):
record (OrderedDict): The record to convert
nameservers (list): A list of one or more nameservers to use
(Cloudflare's public DNS resolvers by default)
timeout (float): Sets the DNS timeout in seconds
dns_timeout (float): Sets the DNS timeout in seconds
Returns:
OrderedDict: The converted record
@@ -104,7 +104,7 @@ def _parse_report_record(record, nameservers=None, timeout=2.0):
new_record_source = get_ip_address_info(record["row"]["source_ip"],
cache=IP_ADDRESS_CACHE,
nameservers=nameservers,
timeout=timeout)
timeout=dns_timeout)
new_record["source"] = new_record_source
new_record["count"] = int(record["row"]["count"])
policy_evaluated = record["row"]["policy_evaluated"].copy()
@@ -303,13 +303,13 @@ def parse_aggregate_report_xml(xml, nameservers=None, timeout=2.0):
for record in report["record"]:
report_record = _parse_report_record(record,
nameservers=nameservers,
timeout=timeout)
dns_timeout=timeout)
records.append(report_record)
else:
report_record = _parse_report_record(report["record"],
nameservers=nameservers,
timeout=timeout)
dns_timeout=timeout)
records.append(report_record)
new_report["records"] = records
@@ -375,7 +375,7 @@ def extract_xml(input_):
return xml
def parse_aggregate_report_file(_input, nameservers=None, timeout=2.0):
def parse_aggregate_report_file(_input, nameservers=None, dns_timeout=2.0):
"""Parses a file at the given path, a file-like object. or bytes as a
aggregate DMARC report
@@ -383,7 +383,7 @@ def parse_aggregate_report_file(_input, nameservers=None, timeout=2.0):
_input: A path to a file, a file like object, or bytes
nameservers (list): A list of one or more nameservers to use
(Cloudflare's public DNS resolvers by default)
timeout (float): Sets the DNS timeout in seconds
dns_timeout (float): Sets the DNS timeout in seconds
Returns:
OrderedDict: The parsed DMARC aggregate report
@@ -392,7 +392,7 @@ def parse_aggregate_report_file(_input, nameservers=None, timeout=2.0):
return parse_aggregate_report_xml(xml,
nameservers=nameservers,
timeout=timeout)
timeout=dns_timeout)
def parsed_aggregate_reports_to_csv(reports):
@@ -506,7 +506,7 @@ def parsed_aggregate_reports_to_csv(reports):
def parse_forensic_report(feedback_report, sample, msg_date,
nameservers=None, timeout=2.0,
nameservers=None, dns_timeout=2.0,
strip_attachment_payloads=False):
"""
Converts a DMARC forensic report and sample to a ``OrderedDict``
@@ -517,7 +517,7 @@ def parse_forensic_report(feedback_report, sample, msg_date,
msg_date (str): The message's date header
nameservers (list): A list of one or more nameservers to use
(Cloudflare's public DNS resolvers by default)
timeout (float): Sets the DNS timeout in seconds
dns_timeout (float): Sets the DNS timeout in seconds
strip_attachment_payloads (bool): Remove attachment payloads from
forensic report results
@@ -551,7 +551,7 @@ def parse_forensic_report(feedback_report, sample, msg_date,
ip_address = parsed_report["source_ip"]
parsed_report_source = get_ip_address_info(ip_address,
nameservers=nameservers,
timeout=timeout)
timeout=dns_timeout)
parsed_report["source"] = parsed_report_source
del parsed_report["source_ip"]
@@ -650,7 +650,7 @@ def parsed_forensic_reports_to_csv(reports):
return csv_file.getvalue()
def parse_report_email(input_, nameservers=None, timeout=2.0,
def parse_report_email(input_, nameservers=None, dns_timeout=2.0,
strip_attachment_payloads=False):
"""
Parses a DMARC report from an email
@@ -658,7 +658,7 @@ def parse_report_email(input_, nameservers=None, timeout=2.0,
Args:
input_: An emailed DMARC report in RFC 822 format, as bytes or a string
nameservers (list): A list of one or more nameservers to use
timeout (float): Sets the DNS timeout in seconds
dns_timeout (float): Sets the DNS timeout in seconds
strip_attachment_payloads (bool): Remove attachment payloads from
forensic report results
@@ -720,7 +720,7 @@ def parse_report_email(input_, nameservers=None, timeout=2.0,
sample,
date,
nameservers=nameservers,
timeout=timeout,
dns_timeout=dns_timeout,
strip_attachment_payloads=strip_attachment_payloads)
except Exception as e:
raise InvalidForensicReport(e.__str__())
@@ -735,9 +735,10 @@ def parse_report_email(input_, nameservers=None, timeout=2.0,
payload.startswith(MAGIC_GZIP) or \
payload.startswith(MAGIC_XML):
ns = nameservers
aggregate_report = parse_aggregate_report_file(payload,
nameservers=ns,
timeout=timeout)
aggregate_report = parse_aggregate_report_file(
payload,
nameservers=ns,
dns_timeout=dns_timeout)
result = OrderedDict([("report_type", "aggregate"),
("report", aggregate_report)])
except (TypeError, ValueError, binascii.Error):
@@ -768,7 +769,7 @@ def parse_report_email(input_, nameservers=None, timeout=2.0,
return result
def parse_report_file(input_, nameservers=None, timeout=2.0,
def parse_report_file(input_, nameservers=None, dns_timeout=2.0,
strip_attachment_payloads=False):
"""Parses a DMARC aggregate or forensic file at the given path, a
file-like object. or bytes
@@ -777,7 +778,7 @@ def parse_report_file(input_, nameservers=None, timeout=2.0,
input_: A path to a file, a file like object, or bytes
nameservers (list): A list of one or more nameservers to use
(Cloudflare's public DNS resolvers by default)
timeout (float): Sets the DNS timeout in seconds
dns_timeout (float): Sets the DNS timeout in seconds
strip_attachment_payloads (bool): Remove attachment payloads from
forensic report results
@@ -794,7 +795,7 @@ def parse_report_file(input_, nameservers=None, timeout=2.0,
content = file_object.read()
try:
report = parse_aggregate_report_file(content, nameservers=nameservers,
timeout=timeout)
dns_timeout=dns_timeout)
results = OrderedDict([("report_type", "aggregate"),
("report", report)])
except InvalidAggregateReport:
@@ -802,7 +803,7 @@ def parse_report_file(input_, nameservers=None, timeout=2.0,
sa = strip_attachment_payloads
results = parse_report_email(content,
nameservers=nameservers,
timeout=timeout,
dns_timeout=dns_timeout,
strip_attachment_payloads=sa)
except InvalidDMARCReport:
raise InvalidDMARCReport("Not a valid aggregate or forensic "
@@ -1018,7 +1019,7 @@ def get_dmarc_reports_from_inbox(host=None,
sa = strip_attachment_payloads
parsed_email = parse_report_email(msg_content,
nameservers=nameservers,
timeout=dns_timeout,
dns_timeout=dns_timeout,
strip_attachment_payloads=sa)
if parsed_email["report_type"] == "aggregate":
aggregate_reports.append(parsed_email["report"])

View File

@@ -3,8 +3,8 @@
"""A CLI for parsing DMARC reports"""
from argparse import ArgumentParser
from argparse import Namespace, ArgumentParser
from configparser import ConfigParser
from glob import glob
import logging
from collections import OrderedDict
@@ -18,45 +18,50 @@ from parsedmarc import IMAPError, get_dmarc_reports_from_inbox, \
logger = logging.getLogger("parsedmarc")
def _str_to_list(s):
"""Converts a comma separated string to a list"""
_list = s.split(",")
return list(map(lambda i: i.lstrip(), _list))
def _main():
"""Called when the module is executed"""
def process_reports(reports_):
output_str = "{0}\n".format(json.dumps(reports_,
ensure_ascii=False,
indent=2))
if not args.silent:
if not opts.silent:
print(output_str)
if args.kafka_hosts:
if opts.kafka_hosts:
try:
kafka_client = kafkaclient.KafkaClient(
args.kafka_hosts,
username=args.kafka_username,
password=args.kafka_password
opts.kafka_hosts,
username=opts.kafka_username,
password=opts.kafka_password
)
except Exception as error_:
logger.error("Kafka Error: {0}".format(error_.__str__()))
if args.save_aggregate:
if opts.save_aggregate:
for report in reports_["aggregate_reports"]:
try:
if args.elasticsearch_host:
if opts.elasticsearch_host:
elastic.save_aggregate_report_to_elasticsearch(
report,
index_suffix=args.elasticsearch_index_suffix,
monthly_indexes=args.elasticsearch_monthly_indexes)
index_suffix=opts.elasticsearch_index_suffix,
monthly_indexes=opts.elasticsearch_monthly_indexes)
except elastic.AlreadySaved as warning:
logger.warning(warning.__str__())
except elastic.ElasticsearchError as error_:
logger.error("Elasticsearch Error: {0}".format(
error_.__str__()))
try:
if args.kafka_hosts:
if opts.kafka_hosts:
kafka_client.save_aggregate_reports_to_kafka(
report, kafka_aggregate_topic)
except Exception as error_:
logger.error("Kafka Error: {0}".format(
error_.__str__()))
if args.hec:
if opts.hec:
try:
aggregate_reports_ = reports_["aggregate_reports"]
if len(aggregate_reports_) > 0:
@@ -64,27 +69,27 @@ def _main():
aggregate_reports_)
except splunk.SplunkError as e:
logger.error("Splunk HEC error: {0}".format(e.__str__()))
if args.save_forensic:
if opts.save_forensic:
for report in reports_["forensic_reports"]:
try:
if args.elasticsearch_host:
if opts.elasticsearch_host:
elastic.save_forensic_report_to_elasticsearch(
report,
index_suffix=args.elasticsearch_index_suffix,
monthly_indexes=args.elasticsearch_monthly_indexes)
index_suffix=opts.elasticsearch_index_suffix,
monthly_indexes=opts.elasticsearch_monthly_indexes)
except elastic.AlreadySaved as warning:
logger.warning(warning.__str__())
except elastic.ElasticsearchError as error_:
logger.error("Elasticsearch Error: {0}".format(
error_.__str__()))
try:
if args.kafka_hosts:
if opts.kafka_hosts:
kafka_client.save_forensic_reports_to_kafka(
report, kafka_forensic_topic)
except Exception as error_:
logger.error("Kafka Error: {0}".format(
error_.__str__()))
if args.hec:
if opts.hec:
try:
forensic_reports_ = reports_["forensic_reports"]
if len(forensic_reports_) > 0:
@@ -95,7 +100,8 @@ def _main():
arg_parser = ArgumentParser(description="Parses DMARC reports")
arg_parser.add_argument("-c", "--config-file",
help="A path to a configuration file")
help="A path to a configuration file "
"(--silent implied)")
arg_parser.add_argument("file_path", nargs="*",
help="one or more paths to aggregate or forensic "
"report files or emails")
@@ -108,119 +114,11 @@ def _main():
arg_parser.add_argument("-n", "--nameservers", nargs="+",
help="nameservers to query "
"(default is Cloudflare's nameservers)")
arg_parser.add_argument("-t", "--timeout",
arg_parser.add_argument("-t", "--dns_timeout",
help="number of seconds to wait for an answer "
"from DNS (default: 6.0)",
type=float,
default=6.0)
arg_parser.add_argument("-H", "--host",
help="an IMAP hostname or IP address")
arg_parser.add_argument("-u", "--user", help="an IMAP user")
arg_parser.add_argument("-p", "--password", help="an IMAP password")
arg_parser.add_argument("--imap-port", default=None, help="an IMAP port")
arg_parser.add_argument("--imap-skip-certificate-verification",
action="store_true",
default=False,
help="skip certificate verification for IMAP")
arg_parser.add_argument("--imap-no-ssl", action="store_true",
default=False,
help="do not use SSL/TLS when connecting to IMAP")
arg_parser.add_argument("-r", "--reports-folder", default="INBOX",
help="the IMAP folder containing the reports\n"
"(default: INBOX)")
arg_parser.add_argument("-a", "--archive-folder",
help="specifies the IMAP folder to move "
"messages to after processing them\n"
"(default: Archive)",
default="Archive")
arg_parser.add_argument("-d", "--delete",
help="delete the reports after processing them",
action="store_true", default=False)
arg_parser.add_argument("-E", "--elasticsearch-host", nargs="*",
help="une or more Elasticsearch "
"hostnames or URLs to use (e.g. "
"localhost:9200)")
arg_parser.add_argument("--elasticsearch-index-suffix",
help="append this suffix to the "
"dmarc_aggregate and dmarc_forensic "
"Elasticsearch index names, joined by _")
arg_parser.add_argument("--elasticsearch-use-ssl", default=False,
action="store_true",
help="Use SSL when connecting to Elasticsearch")
arg_parser.add_argument("--elasticsearch-ssl-cert-path", default=None,
help="Path to the Elasticsearch SSL certificate")
arg_parser.add_argument("--elasticsearch-monthly-indexes",
action="store_true", default=False,
help="Use monthly Elasticsearch indexes instead "
"of daily indexes")
arg_parser.add_argument("--hec", help="the URL to a Splunk HTTP Event "
"Collector (HEC)")
arg_parser.add_argument("--hec-token", help="the authorization token for "
"a Splunk "
"HTTP Event Collector (HEC)")
arg_parser.add_argument("--hec-index", help="the index to use when "
"sending events to the "
"Splunk HTTP Event Collector "
"(HEC)")
arg_parser.add_argument("--hec-skip-certificate-verification",
action="store_true",
default=False,
help="skip certificate verification for Splunk "
"HEC")
arg_parser.add_argument("-K", "--kafka-hosts", nargs="*",
help="a list of one or more Kafka hostnames")
arg_parser.add_argument("--kafka-username",
help='an optional Kafka username')
arg_parser.add_argument("--kafka-password",
help="an optional Kafka password")
arg_parser.add_argument("--kafka-use-ssl",
action="store_true",
help="use SSL/TLS to connect to Kafka "
"(implied when --kafka-username or "
"--kafka-password are provided)")
arg_parser.add_argument("--kafka-aggregate-topic",
help="the Kafka topic to publish aggregate "
"reports to (default: dmarc_aggregate)",
default="dmarc_aggregate")
arg_parser.add_argument("--kafka-forensic_topic",
help="the Kafka topic to publish forensic reports"
" to (default: dmarc_forensic)",
default="dmarc_forensic")
arg_parser.add_argument("--save-aggregate", action="store_true",
default=False,
help="save aggregate reports to search indexes")
arg_parser.add_argument("--save-forensic", action="store_true",
default=False,
help="save forensic reports to search indexes")
arg_parser.add_argument("-O", "--outgoing-host",
help="email the results using this host")
arg_parser.add_argument("-U", "--outgoing-user",
help="email the results using this user")
arg_parser.add_argument("-P", "--outgoing-password",
help="email the results using this password")
arg_parser.add_argument("--outgoing-port",
help="email the results using this port")
arg_parser.add_argument("--outgoing-ssl",
help="use SSL/TLS instead of STARTTLS (more "
"secure, and required by some providers, "
"like Gmail)")
arg_parser.add_argument("-F", "--outgoing-from",
help="email the results using this from address")
arg_parser.add_argument("-T", "--outgoing-to", nargs="+",
help="email the results to these addresses")
arg_parser.add_argument("-S", "--outgoing-subject",
help="email the results using this subject")
arg_parser.add_argument("-A", "--outgoing-attachment",
help="email the results using this filename")
arg_parser.add_argument("-M", "--outgoing-message",
help="email the results using this message")
arg_parser.add_argument("-w", "--watch", action="store_true",
help="use an IMAP IDLE connection to process "
"reports as they arrive in the inbox")
arg_parser.add_argument("--test",
help="do not move or delete IMAP messages",
action="store_true", default=False)
arg_parser.add_argument("-s", "--silent", action="store_true",
help="only print errors and warnings")
arg_parser.add_argument("--debug", action="store_true",
@@ -234,62 +132,221 @@ def _main():
forensic_reports = []
args = arg_parser.parse_args()
opts = Namespace(file_path=args.file_path,
onfig_file=args.config_file,
strip_attachment_payloads=args.strip_attachment_payloads,
output=args.output,
nameservers=args.nameservers,
silent=args.silent,
dns_timeout=args.dns_timeout,
debug=args.debug,
save_aggregate=False,
save_forensic=False,
imap_host=None,
imap_skip_certificate_verification=False,
imap_ssl=True,
imap_port=993,
imap_user=None,
imap_password=None,
imap_reports_folder="INBOX",
imap_archive_folder="Archive",
imap_watch=False,
imap_delete=False,
imap_test=False,
hec=None,
hec_token=None,
hec_index=None,
hec_skip_certificate_verification=False,
elasticsearch_host=None,
elasticsearch_index_suffix=None,
elasticsearch_ssl=True,
kafka_hosts=None,
kafka_username=None,
kafka_password=None,
kafka_aggregate_topic=None,
kafka_forensic_topic=None,
kafka_ssl=False,
smtp_host=None,
smtp_port=25,
smtp_ssl=False,
smtp_user=None,
smtp_password=None,
smtp_from=None,
smtp_to=[],
smtp_subject="parsedmarc report",
smtp_message="Please see the attached DMARC results.",
log_file=args.log_file
)
args = arg_parser.parse_args()
if args.config_file:
opts.silent = True
config = ConfigParser()
config.read(args.config_file)
if "general" in config.sections():
general_config = config["general"]
if "strip_attachments_payloads" in general_config:
opts.strip_attachment_payloads = general_config[
"strip_attachment_payloads"]
if "output" in general_config:
opts.output = general_config["output"]
if "nameservers" in general_config:
opts.nameservers = _str_to_list(general_config["nameservers"])
if "dns_timeout" in general_config:
opts.dns_timeout = general_config.getfloat("dns_timeout")
if "save_aggregate" in general_config:
opts.save_aggregate = general_config["save_aggregate"]
if "save_forensic" in general_config:
opts.save_forensic = general_config["save_forensic"]
if "debug" in general_config:
opts.debug = general_config.getboolean("debug")
if "silent" in general_config:
opts.silent = general_config.getboolean("silent")
if "log_file" in general_config:
opts.log_file = general_config["log_file"]
if "imap" in config.sections():
imap_config = config["imap"]
if "host" in imap_config:
opts.imap_host = imap_config["host"]
if "port" in imap_config:
opts.imap_port = imap_config["port"]
if "ssl" in imap_config:
opts.imap_ssl = imap_config.getboolean("ssl")
if "skip_certificate_verification" in imap_config:
imap_verify = imap_config.getboolean(
"skip_certificate_verification")
opts.imap_skip_certificate_verification = imap_verify
if "user" in imap_config:
opts.imap_user = imap_config["user"]
if "password" in imap_config:
opts.imap_password = imap_config["password"]
if "reports_folder" in imap_config:
opts.imap_reports_folder = imap_config["reports_folder"]
if "archive_folder" in imap_config:
opts.imap_archive_folder = imap_config["archive_folder"]
if "watch" in imap_config:
opts.imap_watch = imap_config.getboolean("watch")
if "delete" in imap_config:
opts.imap_delete = imap_config.getboolean("delete")
if "test" in imap_config:
opts.imap_test = imap_config.getboolean("test")
if "elasticsearch" in config:
elasticsearch_config = config["elasticsearch"]
if "hosts" in elasticsearch_config:
opts.elasticsearch_host = _str_to_list(elasticsearch_config[
"hosts"])
if "index_suffix" in elasticsearch_config:
opts.elasticsearch_index_suffix = elasticsearch_config[
"index_suffix"]
if "monthly_indexes" in elasticsearch_config:
opts.elasticsearch_monthly_indexes = elasticsearch_config[
"monthly_indexes"]
if "ssl" in elasticsearch_config:
opts.elasticsearch_ssl = elasticsearch_config.getboolean(
"ssl")
if "cert_path" in elasticsearch_config:
opts.elasticsearch_ssl_cert_path = elasticsearch_config[
"cert_path"]
if "splunk_hec" in config.sections():
hec_config = config["splunk_hec"]
if "url" in hec_config:
opts.hec = hec_config["url"]
if "token" in hec_config:
opts.hec_token = hec_config["token"]
if "index" in hec_config:
opts.hec_index = hec_config["index"]
if "skip_certificate_verification" in hec_config:
opts.hec_skip_certificate_verification = hec_config[
"skip_certificate_verification"]
if "kafka" in config.sections():
kafka_config = config["kafka"]
if "hosts" in kafka_config:
opts.kafka_hosts = _str_to_list(kafka_config["hosts"])
if "user" in kafka_config:
opts.kafka_username = kafka_config["user"]
if "password" in kafka_config:
opts.kafka_password = kafka_config["password"]
if "ssl" in kafka_config:
opts.kafka_ssl = kafka_config["ssl"].getboolean()
if "aggregate_topic" in kafka_config:
opts.kafka_aggregate = kafka_config["aggregate_topic"]
if "forensic_topic" in kafka_config:
opts.kafka_username = kafka_config["forensic_topic"]
if "smtp" in config.sections():
smtp_config = config["smtp"]
if "host" in smtp_config:
opts.smtp_host = smtp_config["host"]
if "port" in smtp_config:
opts.smtp_port = smtp_config["port"]
if "ssl" in smtp_config:
opts.smtp_ssl = smtp_config.getboolean("ssl")
if "username" in smtp_config:
opts.smtp_user = smtp_config["username"]
if "password" in smtp_config:
opts.smtp_password = smtp_config["password"]
if "from" in smtp_config:
opts.smtp_from = smtp_config["from"]
if "to" in smtp_config:
opts.smtp_to = _str_to_list(smtp_config["to"])
if "subject" in smtp_config:
opts.smtp_subject = smtp_config["subject"]
if "attachment" in smtp_config:
opts.smtp_attachment = smtp_config["attachment"]
if "message" in smtp_config:
opts.smtp_message = smtp_config["message"]
logging.basicConfig(level=logging.WARNING)
logger.setLevel(logging.WARNING)
if args.debug:
if opts.debug:
logging.basicConfig(level=logging.DEBUG)
logger.setLevel(logging.DEBUG)
if args.log_file:
fh = logging.FileHandler(args.log_file)
if opts.log_file:
fh = logging.FileHandler(opts.log_file)
formatter = logging.Formatter(
'%(asctime)s - '
'%(levelname)s - [%(filename)s:%(lineno)d] - %(message)s')
fh.setFormatter(formatter)
logger.addHandler(fh)
if args.host is None and len(args.file_path) == 0:
if opts.imap_host is None and len(opts.file_path) == 0:
arg_parser.print_help()
exit(1)
if args.save_aggregate or args.save_forensic:
if (args.elasticsearch_host is None and args.hec is None
and args.kafka_hosts is None):
args.elasticsearch_host = ["localhost:9200"]
if opts.save_aggregate or opts.save_forensic:
try:
if args.elasticsearch_host:
if opts.elasticsearch_host:
es_aggregate_index = "dmarc_aggregate"
es_forensic_index = "dmarc_forensic"
if args.elasticsearch_index_suffix:
suffix = args.elasticsearch_index_suffix
if opts.elasticsearch_index_suffix:
suffix = opts.elasticsearch_index_suffix
es_aggregate_index = "{0}_{1}".format(
es_aggregate_index, suffix)
es_forensic_index = "{0}_{1}".format(
es_forensic_index, suffix)
elastic.set_hosts(args.elasticsearch_host,
args.elasticsearch_use_ssl,
args.elasticsearch_ssl_cert_path)
elastic.set_hosts(opts.elasticsearch_host,
opts.elasticsearch_ssl,
opts.elasticsearch_ssl_cert_path)
elastic.migrate_indexes(aggregate_indexes=[es_aggregate_index],
forensic_indexes=[es_forensic_index])
except elastic.ElasticsearchError as error:
logger.error("Elasticsearch Error: {0}".format(error.__str__()))
exit(1)
if args.hec:
if args.hec_token is None or args.hec_index is None:
if opts.hec:
if opts.hec_token is None or opts.hec_index is None:
logger.error("HEC token and HEC index are required when "
"using HEC URL")
exit(1)
verify = True
if args.hec_skip_certificate_verification:
if opts.hec_skip_certificate_verification:
verify = False
hec_client = splunk.HECClient(args.hec, args.hec_token,
args.hec_index,
hec_client = splunk.HECClient(opts.hec, opts.hec_token,
opts.hec_index,
verify=verify)
kafka_aggregate_topic = args.kafka_aggregate_topic
kafka_forensic_topic = args.kafka_forensic_topic
kafka_aggregate_topic = opts.kafka_aggregate_topic
kafka_forensic_topic = opts.kafka_forensic_topic
file_paths = []
for file_path in args.file_path:
@@ -298,10 +355,10 @@ def _main():
for file_path in file_paths:
try:
sa = args.strip_attachment_payloads
sa = opts.strip_attachment_payloads
file_results = parse_report_file(file_path,
nameservers=args.nameservers,
timeout=args.timeout,
nameservers=opts.nameservers,
dns_timeout=opts.dns_timeout,
strip_attachment_payloads=sa)
if file_results["report_type"] == "aggregate":
aggregate_reports.append(file_results["report"])
@@ -312,36 +369,36 @@ def _main():
logger.error("Failed to parse {0} - {1}".format(file_path,
error))
if args.host:
if opts.imap_host:
try:
if args.user is None or args.password is None:
logger.error("user and password must be specified if"
if opts.imap_user is None or opts.imap_password is None:
logger.error("IMAP user and password must be specified if"
"host is specified")
rf = args.reports_folder
af = args.archive_folder
ns = args.nameservers
sa = args.strip_attachment_payloads
rf = opts.imap_reports_folder
af = opts.imap_archive_folder
ns = opts.nameservers
sa = opts.strip_attachment_payloads
ssl = True
ssl_context = None
if args.imap_skip_certificate_verification:
if opts.imap_skip_certificate_verification:
logger.debug("Skipping IMAP certificate verification")
ssl_context = create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = CERT_NONE
if args.imap_no_ssl:
if opts.imap_ssl is False:
ssl = False
reports = get_dmarc_reports_from_inbox(host=args.host,
port=args.imap_port,
reports = get_dmarc_reports_from_inbox(host=opts.imap_host,
port=opts.imap_port,
ssl=ssl,
ssl_context=ssl_context,
user=args.user,
password=args.password,
user=opts.imap_user,
password=opts.imap_password,
reports_folder=rf,
archive_folder=af,
delete=args.delete,
delete=opts.imap_delete,
nameservers=ns,
test=args.test,
test=opts.imap_test,
strip_attachment_payloads=sa
)
@@ -355,46 +412,48 @@ def _main():
results = OrderedDict([("aggregate_reports", aggregate_reports),
("forensic_reports", forensic_reports)])
if args.output:
save_output(results, output_directory=args.output)
if opts.output:
save_output(results, output_directory=opts.output)
process_reports(results)
if args.outgoing_host:
if args.outgoing_from is None or args.outgoing_to is None:
logger.error("--outgoing-from and --outgoing-to must "
"be provided if --outgoing-host is used")
if opts.smtp_host:
if opts.smtp_from is None or opts.smtp_to is None:
logger.error("Missing mail from and/or mail to")
exit(1)
try:
email_results(results, args.outgoing_host, args.outgoing_from,
args.outgoing_to, use_ssl=args.outgoing_ssl,
user=args.outgoing_user,
password=args.outgoing_password,
subject=args.outgoing_subject)
email_results(results, opts.smtp_host, opts.smtp_from,
opts.smtp_to, ssl=opts.smtp_ssl,
user=opts.smtp_user,
password=opts.smtp_password,
subject=opts.smtp_subject)
except SMTPError as error:
logger.error("SMTP Error: {0}".format(error.__str__()))
exit(1)
if args.host and args.watch:
if opts.imap_host and opts.imap_watch:
logger.info("Watching for email - Quit with ctrl-c")
ssl = True
ssl_context = None
if args.imap_skip_certificate_verification:
if opts.imap_skip_certificate_verification:
logger.debug("Skipping IMAP certificate verification")
ssl_context = create_default_context()
ssl_context.check_hostname = False
ssl_context.verify_mode = CERT_NONE
if args.imap_no_ssl:
if opts.imap_ssl is False:
ssl = False
try:
sa = args.strip_attachment_payloads
watch_inbox(args.host, args.user, args.password, process_reports,
port=args.imap_port, ssl=ssl, ssl_context=ssl_context,
reports_folder=args.reports_folder,
archive_folder=args.archive_folder, delete=args.delete,
test=args.test, nameservers=args.nameservers,
dns_timeout=args.timeout, strip_attachment_payloads=sa)
sa = opts.strip_attachment_payloads
watch_inbox(opts.imap_host, opts.imap_user, opts.imap_password,
process_reports, port=opts.imap_port, ssl=ssl,
ssl_context=ssl_context,
reports_folder=opts.imap_reports_folder,
archive_folder=opts.imap_archive_folder,
delete=opts.imap_delete,
test=opts.imap_test, nameservers=opts.nameservers,
dns_timeout=opts.dns_timeout,
strip_attachment_payloads=sa)
except IMAPError as error:
logger.error("IMAP error: {0}".format(error.__str__()))
exit(1)

View File

@@ -2,7 +2,7 @@
import logging
import json
import ssl
from ssl import create_default_context
from kafka import KafkaProducer
from kafka.errors import NoBrokersAvailable, UnknownTopicOrPartitionError
@@ -19,14 +19,14 @@ class KafkaError(RuntimeError):
class KafkaClient(object):
def __init__(self, kafka_hosts, use_ssl=False, username=None,
def __init__(self, kafka_hosts, ssl=False, username=None,
password=None):
"""
Initializes the Kafka client
Args:
kafka_hosts (list): A list of Kafka hostnames
(with optional port numbers)
use_ssl (bool): Use a SSL/TLS connection
ssl (bool): Use a SSL/TLS connection
username (str): An optional username
password (str): An optional password
@@ -42,9 +42,9 @@ class KafkaClient(object):
'utf-8'),
bootstrap_servers=kafka_hosts,
client_id="parsedmarc-{0}".format(__version__))
if use_ssl or username or password:
if ssl or username or password:
config["security_protocol"] = "SSL"
config["ssl_context"] = ssl.create_default_context()
config["ssl_context"] = create_default_context()
if username or password:
config["sasl_plain_username"] = username or ""
config["sasl_plain_password"] = password or ""

View File

@@ -14,7 +14,7 @@ from setuptools import setup
from codecs import open
from os import path
__version__ = "5.3.0"
__version__ = "6.0.0"
description = "A Python package and CLI for parsing aggregate and " \
"forensic DMARC reports"