mirror of
https://github.com/domainaware/parsedmarc.git
synced 2026-02-23 17:46:24 +00:00
4.5.0
This commit is contained in:
@@ -1,8 +1,8 @@
|
||||
4.5.0
|
||||
-----
|
||||
|
||||
- Bugfix: `fo` tag is a string, not an integer (closes issue #31)
|
||||
- Bugfix: IDE email processing in Gmail/G-Suite accounts (closes issue #33)
|
||||
- Bugfix: IDLE email processing in Gmail/G-Suite accounts (closes issue #33)
|
||||
- Bugfix: Fix inaccurate DNS timeout in CLI documentation (closes issue #34)
|
||||
- Cache DNS queries in memory
|
||||
|
||||
4.4.1
|
||||
|
||||
@@ -100,7 +100,7 @@ def _main():
|
||||
"(Default is Cloudflare's nameservers)")
|
||||
arg_parser.add_argument("-t", "--timeout",
|
||||
help="number of seconds to wait for an answer "
|
||||
"from DNS (Default: 2.0)",
|
||||
"from DNS (Default: 6.0)",
|
||||
type=float,
|
||||
default=6.0)
|
||||
arg_parser.add_argument("-H", "--host", help="IMAP hostname or IP address")
|
||||
@@ -240,6 +240,8 @@ def _main():
|
||||
if args.elasticsearch_host:
|
||||
elastic.set_hosts(args.elasticsearch_host)
|
||||
elastic.create_indexes([es_aggregate_index, es_forensic_index])
|
||||
elastic.migrate_indexes(aggregate_indexes=[es_aggregate_index],
|
||||
forensic_indexes=[es_forensic_index])
|
||||
except elastic.ElasticsearchError as error:
|
||||
logger.error("Elasticsearch Error: {0}".format(error.__str__()))
|
||||
exit(1)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
|
||||
from elasticsearch_dsl.search import Q
|
||||
from elasticsearch_dsl import connections, Object, Document, Index, Nested, \
|
||||
@@ -28,7 +29,7 @@ class _PublishedPolicy(InnerDoc):
|
||||
p = Text()
|
||||
sp = Text()
|
||||
pct = Integer()
|
||||
fo = Text()
|
||||
fo = Integer() # TODO: Change this to Text (issue #31)
|
||||
|
||||
|
||||
class _DKIMResult(InnerDoc):
|
||||
@@ -202,6 +203,32 @@ def create_indexes(names=None, settings=None):
|
||||
"Elasticsearch error: {0}".format(e.__str__()))
|
||||
|
||||
|
||||
def migrate_indexes(aggregate_indexes=None, forensic_indexes=None):
|
||||
"""
|
||||
Updates index mappings
|
||||
|
||||
Args:
|
||||
aggregate_indexes (list): A list of aggregate index names
|
||||
forensic_indexes (list): A list of forensic index names
|
||||
"""
|
||||
if aggregate_indexes is None:
|
||||
aggregate_indexes = []
|
||||
if forensic_indexes is None:
|
||||
forensic_indexes = []
|
||||
for aggregate_index_name in aggregate_indexes:
|
||||
aggregate_index = Index(aggregate_index_name)
|
||||
doc = "doc"
|
||||
fo_field = "published_policy.fo"
|
||||
fo = "fo"
|
||||
fo_mapping = aggregate_index.get_field_mapping(fields=[fo_field])[
|
||||
aggregate_index_name]["mappings"][doc][fo_field]["mapping"][fo]
|
||||
fo_type = fo_mapping["type"]
|
||||
if fo_type == "long":
|
||||
pass # TODO: Do reindex, delete, and alias here (issue #31)
|
||||
for forensic_index in forensic_indexes:
|
||||
pass
|
||||
|
||||
|
||||
def save_aggregate_report_to_elasticsearch(aggregate_report,
|
||||
index="dmarc_aggregate"):
|
||||
"""
|
||||
|
||||
Reference in New Issue
Block a user