mirror of
https://github.com/domainaware/parsedmarc.git
synced 2026-04-04 20:58:53 +00:00
Rename forensic references to failure in cli.py
- Rename all forensic_* variables to failure_* - Update CLI argument names (--forensic-* to --failure-*) - Update default filenames (forensic.json/csv to failure.json/csv) - Update function calls to match renamed output module functions - Update index names (dmarc_forensic to dmarc_failure) - Update report type strings and dict keys - Add backward-compatible config key reading (accept both old and new names) - Update help text and log messages Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
This commit is contained in:
committed by
Sean Whalen
parent
07c518f19d
commit
8ebc290ea1
@@ -319,14 +319,18 @@ def _parse_config(config: ConfigParser, opts):
|
||||
opts.output = _expand_path(general_config["output"])
|
||||
if "aggregate_json_filename" in general_config:
|
||||
opts.aggregate_json_filename = general_config["aggregate_json_filename"]
|
||||
if "forensic_json_filename" in general_config:
|
||||
opts.forensic_json_filename = general_config["forensic_json_filename"]
|
||||
if "failure_json_filename" in general_config:
|
||||
opts.failure_json_filename = general_config["failure_json_filename"]
|
||||
elif "forensic_json_filename" in general_config:
|
||||
opts.failure_json_filename = general_config["forensic_json_filename"]
|
||||
if "smtp_tls_json_filename" in general_config:
|
||||
opts.smtp_tls_json_filename = general_config["smtp_tls_json_filename"]
|
||||
if "aggregate_csv_filename" in general_config:
|
||||
opts.aggregate_csv_filename = general_config["aggregate_csv_filename"]
|
||||
if "forensic_csv_filename" in general_config:
|
||||
opts.forensic_csv_filename = general_config["forensic_csv_filename"]
|
||||
if "failure_csv_filename" in general_config:
|
||||
opts.failure_csv_filename = general_config["failure_csv_filename"]
|
||||
elif "forensic_csv_filename" in general_config:
|
||||
opts.failure_csv_filename = general_config["forensic_csv_filename"]
|
||||
if "smtp_tls_csv_filename" in general_config:
|
||||
opts.smtp_tls_csv_filename = general_config["smtp_tls_csv_filename"]
|
||||
if "dns_timeout" in general_config:
|
||||
@@ -357,8 +361,10 @@ def _parse_config(config: ConfigParser, opts):
|
||||
)
|
||||
if "save_aggregate" in general_config:
|
||||
opts.save_aggregate = bool(general_config.getboolean("save_aggregate"))
|
||||
if "save_forensic" in general_config:
|
||||
opts.save_forensic = bool(general_config.getboolean("save_forensic"))
|
||||
if "save_failure" in general_config:
|
||||
opts.save_failure = bool(general_config.getboolean("save_failure"))
|
||||
elif "save_forensic" in general_config:
|
||||
opts.save_failure = bool(general_config.getboolean("save_forensic"))
|
||||
if "save_smtp_tls" in general_config:
|
||||
opts.save_smtp_tls = bool(general_config.getboolean("save_smtp_tls"))
|
||||
if "debug" in general_config:
|
||||
@@ -737,11 +743,13 @@ def _parse_config(config: ConfigParser, opts):
|
||||
raise ConfigurationError(
|
||||
"aggregate_topic setting missing from the kafka config section"
|
||||
)
|
||||
if "forensic_topic" in kafka_config:
|
||||
opts.kafka_forensic_topic = kafka_config["forensic_topic"]
|
||||
if "failure_topic" in kafka_config:
|
||||
opts.kafka_failure_topic = kafka_config["failure_topic"]
|
||||
elif "forensic_topic" in kafka_config:
|
||||
opts.kafka_failure_topic = kafka_config["forensic_topic"]
|
||||
else:
|
||||
raise ConfigurationError(
|
||||
"forensic_topic setting missing from the kafka config section"
|
||||
"failure_topic setting missing from the kafka config section"
|
||||
)
|
||||
if "smtp_tls_topic" in kafka_config:
|
||||
opts.kafka_smtp_tls_topic = kafka_config["smtp_tls_topic"]
|
||||
@@ -905,7 +913,9 @@ def _parse_config(config: ConfigParser, opts):
|
||||
opts.la_dce = log_analytics_config.get("dce")
|
||||
opts.la_dcr_immutable_id = log_analytics_config.get("dcr_immutable_id")
|
||||
opts.la_dcr_aggregate_stream = log_analytics_config.get("dcr_aggregate_stream")
|
||||
opts.la_dcr_forensic_stream = log_analytics_config.get("dcr_forensic_stream")
|
||||
opts.la_dcr_failure_stream = log_analytics_config.get(
|
||||
"dcr_failure_stream"
|
||||
) or log_analytics_config.get("dcr_forensic_stream")
|
||||
opts.la_dcr_smtp_tls_stream = log_analytics_config.get("dcr_smtp_tls_stream")
|
||||
|
||||
if "gelf" in config.sections():
|
||||
@@ -933,8 +943,10 @@ def _parse_config(config: ConfigParser, opts):
|
||||
webhook_config = config["webhook"]
|
||||
if "aggregate_url" in webhook_config:
|
||||
opts.webhook_aggregate_url = webhook_config["aggregate_url"]
|
||||
if "forensic_url" in webhook_config:
|
||||
opts.webhook_forensic_url = webhook_config["forensic_url"]
|
||||
if "failure_url" in webhook_config:
|
||||
opts.webhook_failure_url = webhook_config["failure_url"]
|
||||
elif "forensic_url" in webhook_config:
|
||||
opts.webhook_failure_url = webhook_config["forensic_url"]
|
||||
if "smtp_tls_url" in webhook_config:
|
||||
opts.webhook_smtp_tls_url = webhook_config["smtp_tls_url"]
|
||||
if "timeout" in webhook_config:
|
||||
@@ -1064,12 +1076,12 @@ def _init_output_clients(opts):
|
||||
try:
|
||||
if (
|
||||
opts.webhook_aggregate_url
|
||||
or opts.webhook_forensic_url
|
||||
or opts.webhook_failure_url
|
||||
or opts.webhook_smtp_tls_url
|
||||
):
|
||||
clients["webhook_client"] = webhook.WebhookClient(
|
||||
aggregate_url=opts.webhook_aggregate_url,
|
||||
forensic_url=opts.webhook_forensic_url,
|
||||
failure_url=opts.webhook_failure_url,
|
||||
smtp_tls_url=opts.webhook_smtp_tls_url,
|
||||
timeout=opts.webhook_timeout,
|
||||
)
|
||||
@@ -1081,21 +1093,21 @@ def _init_output_clients(opts):
|
||||
# step fails. Initialise them last so that all other clients are created
|
||||
# successfully first; this minimises the window for partial-init problems
|
||||
# during config reload.
|
||||
if opts.save_aggregate or opts.save_forensic or opts.save_smtp_tls:
|
||||
if opts.save_aggregate or opts.save_failure or opts.save_smtp_tls:
|
||||
try:
|
||||
if opts.elasticsearch_hosts:
|
||||
es_aggregate_index = "dmarc_aggregate"
|
||||
es_forensic_index = "dmarc_forensic"
|
||||
es_failure_index = "dmarc_failure"
|
||||
es_smtp_tls_index = "smtp_tls"
|
||||
if opts.elasticsearch_index_suffix:
|
||||
suffix = opts.elasticsearch_index_suffix
|
||||
es_aggregate_index = "{0}_{1}".format(es_aggregate_index, suffix)
|
||||
es_forensic_index = "{0}_{1}".format(es_forensic_index, suffix)
|
||||
es_failure_index = "{0}_{1}".format(es_failure_index, suffix)
|
||||
es_smtp_tls_index = "{0}_{1}".format(es_smtp_tls_index, suffix)
|
||||
if opts.elasticsearch_index_prefix:
|
||||
prefix = opts.elasticsearch_index_prefix
|
||||
es_aggregate_index = "{0}{1}".format(prefix, es_aggregate_index)
|
||||
es_forensic_index = "{0}{1}".format(prefix, es_forensic_index)
|
||||
es_failure_index = "{0}{1}".format(prefix, es_failure_index)
|
||||
es_smtp_tls_index = "{0}{1}".format(prefix, es_smtp_tls_index)
|
||||
elastic_timeout_value = (
|
||||
float(opts.elasticsearch_timeout)
|
||||
@@ -1114,7 +1126,7 @@ def _init_output_clients(opts):
|
||||
)
|
||||
elastic.migrate_indexes(
|
||||
aggregate_indexes=[es_aggregate_index],
|
||||
forensic_indexes=[es_forensic_index],
|
||||
failure_indexes=[es_failure_index],
|
||||
)
|
||||
clients["elasticsearch"] = _ElasticsearchHandle()
|
||||
except Exception as e:
|
||||
@@ -1123,17 +1135,17 @@ def _init_output_clients(opts):
|
||||
try:
|
||||
if opts.opensearch_hosts:
|
||||
os_aggregate_index = "dmarc_aggregate"
|
||||
os_forensic_index = "dmarc_forensic"
|
||||
os_failure_index = "dmarc_failure"
|
||||
os_smtp_tls_index = "smtp_tls"
|
||||
if opts.opensearch_index_suffix:
|
||||
suffix = opts.opensearch_index_suffix
|
||||
os_aggregate_index = "{0}_{1}".format(os_aggregate_index, suffix)
|
||||
os_forensic_index = "{0}_{1}".format(os_forensic_index, suffix)
|
||||
os_failure_index = "{0}_{1}".format(os_failure_index, suffix)
|
||||
os_smtp_tls_index = "{0}_{1}".format(os_smtp_tls_index, suffix)
|
||||
if opts.opensearch_index_prefix:
|
||||
prefix = opts.opensearch_index_prefix
|
||||
os_aggregate_index = "{0}{1}".format(prefix, os_aggregate_index)
|
||||
os_forensic_index = "{0}{1}".format(prefix, os_forensic_index)
|
||||
os_failure_index = "{0}{1}".format(prefix, os_failure_index)
|
||||
os_smtp_tls_index = "{0}{1}".format(prefix, os_smtp_tls_index)
|
||||
opensearch_timeout_value = (
|
||||
float(opts.opensearch_timeout)
|
||||
@@ -1155,7 +1167,7 @@ def _init_output_clients(opts):
|
||||
)
|
||||
opensearch.migrate_indexes(
|
||||
aggregate_indexes=[os_aggregate_index],
|
||||
forensic_indexes=[os_forensic_index],
|
||||
failure_indexes=[os_failure_index],
|
||||
)
|
||||
clients["opensearch"] = _OpenSearchHandle()
|
||||
except Exception as e:
|
||||
@@ -1247,10 +1259,10 @@ def _main():
|
||||
reports_,
|
||||
output_directory=opts.output,
|
||||
aggregate_json_filename=opts.aggregate_json_filename,
|
||||
forensic_json_filename=opts.forensic_json_filename,
|
||||
failure_json_filename=opts.failure_json_filename,
|
||||
smtp_tls_json_filename=opts.smtp_tls_json_filename,
|
||||
aggregate_csv_filename=opts.aggregate_csv_filename,
|
||||
forensic_csv_filename=opts.forensic_csv_filename,
|
||||
failure_csv_filename=opts.failure_csv_filename,
|
||||
smtp_tls_csv_filename=opts.smtp_tls_csv_filename,
|
||||
)
|
||||
|
||||
@@ -1262,7 +1274,7 @@ def _main():
|
||||
webhook_client = clients.get("webhook_client")
|
||||
|
||||
kafka_aggregate_topic = opts.kafka_aggregate_topic
|
||||
kafka_forensic_topic = opts.kafka_forensic_topic
|
||||
kafka_failure_topic = opts.kafka_failure_topic
|
||||
kafka_smtp_tls_topic = opts.kafka_smtp_tls_topic
|
||||
|
||||
if opts.save_aggregate:
|
||||
@@ -1350,13 +1362,13 @@ def _main():
|
||||
except splunk.SplunkError as e:
|
||||
log_output_error("Splunk HEC", e.__str__())
|
||||
|
||||
if opts.save_forensic:
|
||||
for report in reports_["forensic_reports"]:
|
||||
if opts.save_failure:
|
||||
for report in reports_["failure_reports"]:
|
||||
try:
|
||||
shards = opts.elasticsearch_number_of_shards
|
||||
replicas = opts.elasticsearch_number_of_replicas
|
||||
if opts.elasticsearch_hosts:
|
||||
elastic.save_forensic_report_to_elasticsearch(
|
||||
elastic.save_failure_report_to_elasticsearch(
|
||||
report,
|
||||
index_suffix=opts.elasticsearch_index_suffix,
|
||||
index_prefix=opts.elasticsearch_index_prefix
|
||||
@@ -1376,7 +1388,7 @@ def _main():
|
||||
shards = opts.opensearch_number_of_shards
|
||||
replicas = opts.opensearch_number_of_replicas
|
||||
if opts.opensearch_hosts:
|
||||
opensearch.save_forensic_report_to_opensearch(
|
||||
opensearch.save_failure_report_to_opensearch(
|
||||
report,
|
||||
index_suffix=opts.opensearch_index_suffix,
|
||||
index_prefix=opts.opensearch_index_prefix
|
||||
@@ -1394,34 +1406,34 @@ def _main():
|
||||
|
||||
try:
|
||||
if kafka_client:
|
||||
kafka_client.save_forensic_reports_to_kafka(
|
||||
report, kafka_forensic_topic
|
||||
kafka_client.save_failure_reports_to_kafka(
|
||||
report, kafka_failure_topic
|
||||
)
|
||||
except Exception as error_:
|
||||
log_output_error("Kafka", error_.__str__())
|
||||
|
||||
try:
|
||||
if s3_client:
|
||||
s3_client.save_forensic_report_to_s3(report)
|
||||
s3_client.save_failure_report_to_s3(report)
|
||||
except Exception as error_:
|
||||
log_output_error("S3", error_.__str__())
|
||||
|
||||
try:
|
||||
if syslog_client:
|
||||
syslog_client.save_forensic_report_to_syslog(report)
|
||||
syslog_client.save_failure_report_to_syslog(report)
|
||||
except Exception as error_:
|
||||
log_output_error("Syslog", error_.__str__())
|
||||
|
||||
try:
|
||||
if gelf_client:
|
||||
gelf_client.save_forensic_report_to_gelf(report)
|
||||
gelf_client.save_failure_report_to_gelf(report)
|
||||
except Exception as error_:
|
||||
log_output_error("GELF", error_.__str__())
|
||||
|
||||
try:
|
||||
if opts.webhook_forensic_url and webhook_client:
|
||||
if opts.webhook_failure_url and webhook_client:
|
||||
indent_value = 2 if opts.prettify_json else None
|
||||
webhook_client.save_forensic_report_to_webhook(
|
||||
webhook_client.save_failure_report_to_webhook(
|
||||
json.dumps(report, ensure_ascii=False, indent=indent_value)
|
||||
)
|
||||
except Exception as error_:
|
||||
@@ -1429,9 +1441,9 @@ def _main():
|
||||
|
||||
if hec_client:
|
||||
try:
|
||||
forensic_reports_ = reports_["forensic_reports"]
|
||||
if len(forensic_reports_) > 0:
|
||||
hec_client.save_forensic_reports_to_splunk(forensic_reports_)
|
||||
failure_reports_ = reports_["failure_reports"]
|
||||
if len(failure_reports_) > 0:
|
||||
hec_client.save_failure_reports_to_splunk(failure_reports_)
|
||||
except splunk.SplunkError as e:
|
||||
log_output_error("Splunk HEC", e.__str__())
|
||||
|
||||
@@ -1529,13 +1541,13 @@ def _main():
|
||||
dce=opts.la_dce,
|
||||
dcr_immutable_id=opts.la_dcr_immutable_id,
|
||||
dcr_aggregate_stream=opts.la_dcr_aggregate_stream,
|
||||
dcr_forensic_stream=opts.la_dcr_forensic_stream,
|
||||
dcr_failure_stream=opts.la_dcr_failure_stream,
|
||||
dcr_smtp_tls_stream=opts.la_dcr_smtp_tls_stream,
|
||||
)
|
||||
la_client.publish_results(
|
||||
reports_,
|
||||
opts.save_aggregate,
|
||||
opts.save_forensic,
|
||||
opts.save_failure,
|
||||
opts.save_smtp_tls,
|
||||
)
|
||||
except loganalytics.LogAnalyticsException as e:
|
||||
@@ -1575,9 +1587,9 @@ def _main():
|
||||
default="aggregate.json",
|
||||
)
|
||||
arg_parser.add_argument(
|
||||
"--forensic-json-filename",
|
||||
help="filename for the forensic JSON output file",
|
||||
default="forensic.json",
|
||||
"--failure-json-filename",
|
||||
help="filename for the failure JSON output file",
|
||||
default="failure.json",
|
||||
)
|
||||
arg_parser.add_argument(
|
||||
"--smtp-tls-json-filename",
|
||||
@@ -1590,9 +1602,9 @@ def _main():
|
||||
default="aggregate.csv",
|
||||
)
|
||||
arg_parser.add_argument(
|
||||
"--forensic-csv-filename",
|
||||
help="filename for the forensic CSV output file",
|
||||
default="forensic.csv",
|
||||
"--failure-csv-filename",
|
||||
help="filename for the failure CSV output file",
|
||||
default="failure.csv",
|
||||
)
|
||||
arg_parser.add_argument(
|
||||
"--smtp-tls-csv-filename",
|
||||
@@ -1639,7 +1651,7 @@ def _main():
|
||||
arg_parser.add_argument("-v", "--version", action="version", version=__version__)
|
||||
|
||||
aggregate_reports = []
|
||||
forensic_reports = []
|
||||
failure_reports = []
|
||||
smtp_tls_reports = []
|
||||
|
||||
args = arg_parser.parse_args()
|
||||
@@ -1652,8 +1664,8 @@ def _main():
|
||||
output=args.output,
|
||||
aggregate_csv_filename=args.aggregate_csv_filename,
|
||||
aggregate_json_filename=args.aggregate_json_filename,
|
||||
forensic_csv_filename=args.forensic_csv_filename,
|
||||
forensic_json_filename=args.forensic_json_filename,
|
||||
failure_csv_filename=args.failure_csv_filename,
|
||||
failure_json_filename=args.failure_json_filename,
|
||||
smtp_tls_json_filename=args.smtp_tls_json_filename,
|
||||
smtp_tls_csv_filename=args.smtp_tls_csv_filename,
|
||||
nameservers=args.nameservers,
|
||||
@@ -1665,7 +1677,7 @@ def _main():
|
||||
verbose=args.verbose,
|
||||
prettify_json=args.prettify_json,
|
||||
save_aggregate=False,
|
||||
save_forensic=False,
|
||||
save_failure=False,
|
||||
save_smtp_tls=False,
|
||||
mailbox_reports_folder="INBOX",
|
||||
mailbox_archive_folder="Archive",
|
||||
@@ -1731,7 +1743,7 @@ def _main():
|
||||
kafka_username=None,
|
||||
kafka_password=None,
|
||||
kafka_aggregate_topic=None,
|
||||
kafka_forensic_topic=None,
|
||||
kafka_failure_topic=None,
|
||||
kafka_smtp_tls_topic=None,
|
||||
kafka_ssl=False,
|
||||
kafka_skip_certificate_verification=False,
|
||||
@@ -1782,13 +1794,13 @@ def _main():
|
||||
la_dce=None,
|
||||
la_dcr_immutable_id=None,
|
||||
la_dcr_aggregate_stream=None,
|
||||
la_dcr_forensic_stream=None,
|
||||
la_dcr_failure_stream=None,
|
||||
la_dcr_smtp_tls_stream=None,
|
||||
gelf_host=None,
|
||||
gelf_port=None,
|
||||
gelf_mode=None,
|
||||
webhook_aggregate_url=None,
|
||||
webhook_forensic_url=None,
|
||||
webhook_failure_url=None,
|
||||
webhook_smtp_tls_url=None,
|
||||
webhook_timeout=60,
|
||||
normalize_timespan_threshold_hours=24.0,
|
||||
@@ -1952,8 +1964,8 @@ def _main():
|
||||
"Skipping duplicate aggregate report "
|
||||
f"from {report_org} with ID: {report_id}"
|
||||
)
|
||||
elif result[0]["report_type"] == "forensic":
|
||||
forensic_reports.append(result[0]["report"])
|
||||
elif result[0]["report_type"] == "failure":
|
||||
failure_reports.append(result[0]["report"])
|
||||
elif result[0]["report_type"] == "smtp_tls":
|
||||
smtp_tls_reports.append(result[0]["report"])
|
||||
|
||||
@@ -1977,7 +1989,7 @@ def _main():
|
||||
normalize_timespan_threshold_hours=normalize_timespan_threshold_hours_value,
|
||||
)
|
||||
aggregate_reports += reports["aggregate_reports"]
|
||||
forensic_reports += reports["forensic_reports"]
|
||||
failure_reports += reports["failure_reports"]
|
||||
smtp_tls_reports += reports["smtp_tls_reports"]
|
||||
|
||||
mailbox_connection = None
|
||||
@@ -2117,7 +2129,7 @@ def _main():
|
||||
)
|
||||
|
||||
aggregate_reports += reports["aggregate_reports"]
|
||||
forensic_reports += reports["forensic_reports"]
|
||||
failure_reports += reports["failure_reports"]
|
||||
smtp_tls_reports += reports["smtp_tls_reports"]
|
||||
|
||||
except Exception:
|
||||
@@ -2126,7 +2138,7 @@ def _main():
|
||||
|
||||
parsing_results: ParsingResults = {
|
||||
"aggregate_reports": aggregate_reports,
|
||||
"forensic_reports": forensic_reports,
|
||||
"failure_reports": failure_reports,
|
||||
"smtp_tls_reports": smtp_tls_reports,
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user