Added argment parsing, changed name of config file
[dmarc.git] / dmarc_to_database
1 #!/usr/bin/python3
2
3 import configparser
4 import imaplib
5 import email
6 import io
7 import zipfile
8 import xml.etree.ElementTree
9 import psycopg2
10 import re
11 import datetime
12 import argparse
13
14 parser = argparse.ArgumentParser(description='Process DMARC records.')
15 parser.add_argument('-c', '--config', action='store',
16 default='', dest='config_file',
17 help='Path to config file')
18 parser.add_argument('-t', '--test', action='store_true',
19 default=False,
20 help='Test, but do not add records to the database')
21 args = parser.parse_args()
22
23 def fetch_msg(num):
24 return mailbox.uid('FETCH', num, '(RFC822)')[1][0][1]
25
26 def xml_of_part(part):
27 with zipfile.ZipFile(io.BytesIO(part.get_payload(decode=True))) as zf:
28 fn = zf.infolist()[0].filename
29 contents = zf.read(fn).decode('utf-8')
30 return xml.etree.ElementTree.fromstring(contents)
31
32
33 def xml_of(message):
34 reports = []
35 if message.is_multipart():
36 for p in message.get_payload():
37 if 'zip' in p.get_content_type():
38 reports += [xml_of_part(p)]
39 else:
40 reports = [xml_of_part(message)]
41 return reports
42
43 def extract_report(msg):
44 pmsg = email.message_from_bytes(msg)
45 return xml_of(pmsg)
46
47 def maybe_strip(text):
48 if text:
49 return text.strip()
50 else:
51 return ''
52
53 field_maps = {'./policy_published/adkim': {'pg_field_name': 'policy_published_adkim',
54 'pg_table': 'reports',
55 'pg_type': 'varchar'},
56 './policy_published/aspf': {'pg_field_name': 'policy_published_aspf',
57 'pg_table': 'reports',
58 'pg_type': 'varchar'},
59 './policy_published/domain': {'pg_field_name': 'policy_published_domain',
60 'pg_table': 'reports',
61 'pg_type': 'varchar'},
62 './policy_published/p': {'pg_field_name': 'policy_published_p',
63 'pg_table': 'reports',
64 'pg_type': 'varchar'},
65 './policy_published/pct': {'pg_field_name': 'policy_published_pct',
66 'pg_table': 'reports',
67 'pg_type': 'int'},
68 './record[{}]/auth_results/dkim/domain': {'pg_field_name': 'auth_results_dkim_domain',
69 'pg_table': 'report_items',
70 'pg_type': 'varchar'},
71 './record[{}]/auth_results/dkim/result': {'pg_field_name': 'auth_results_dkim_result',
72 'pg_table': 'report_items',
73 'pg_type': 'varchar'},
74 './record[{}]/auth_results/spf/domain': {'pg_field_name': 'auth_results_spf_domain',
75 'pg_table': 'report_items',
76 'pg_type': 'varchar'},
77 './record[{}]/auth_results/spf/result': {'pg_field_name': 'auth_results_spf_result',
78 'pg_table': 'report_items',
79 'pg_type': 'varchar'},
80 './record[{}]/identifiers/header_from': {'pg_field_name': 'identifiers_header_from',
81 'pg_table': 'report_items',
82 'pg_type': 'varchar'},
83 './record[{}]/row/count': {'pg_field_name': 'count',
84 'pg_table': 'report_items',
85 'pg_type': 'int'},
86 './record[{}]/row/policy_evaluated/disposition': {'pg_field_name': 'policy_evaluated_disposition',
87 'pg_table': 'report_items',
88 'pg_type': 'varchar'},
89 './record[{}]/row/policy_evaluated/dkim': {'pg_field_name': 'policy_evaluated_dkim',
90 'pg_table': 'report_items',
91 'pg_type': 'varchar'},
92 './record[{}]/row/policy_evaluated/spf': {'pg_field_name': 'policy_evaluated_spf',
93 'pg_table': 'report_items',
94 'pg_type': 'varchar'},
95 './record[{}]/row/source_ip': {'pg_field_name': 'source_ip',
96 'pg_table': 'report_items',
97 'pg_type': 'inet'},
98 './report_metadata/date_range/begin': {'pg_field_name': 'report_metadata_date_range_begin',
99 'pg_table': 'reports',
100 'pg_type': 'timestamptz'},
101 './report_metadata/date_range/end': {'pg_field_name': 'report_metadata_date_range_end',
102 'pg_table': 'reports',
103 'pg_type': 'timestamptz'},
104 './report_metadata/email': {'pg_field_name': 'report_metadata_email',
105 'pg_table': 'reports',
106 'pg_type': 'varchar'},
107 './report_metadata/org_name': {'pg_field_name': 'report_metadata_org_name',
108 'pg_table': 'reports',
109 'pg_type': 'varchar'},
110 './report_metadata/report_id': {'pg_field_name': 'report_metadata_report_id',
111 'pg_table': 'reports',
112 'pg_type': 'varchar'}}
113
114
115
116 def build_insert_command(table_name, report, preamble_values=None, i=None):
117 field_names = []
118 if preamble_values:
119 values = preamble_values.copy()
120 else:
121 values = {}
122 for f in [f for f in field_maps if field_maps[f]['pg_table'] == table_name]:
123 if i:
124 fp = f.format(i)
125 else:
126 fp = f
127 field_names += [field_maps[f]['pg_field_name']]
128 if field_maps[f]['pg_type'] == 'int':
129 values[field_maps[f]['pg_field_name']] = int(report.find(fp).text)
130 elif field_maps[f]['pg_type'] == 'timestamptz':
131 # values[field_maps[f]['pg_field_name']] = datetime.datetime.utcfromtimestamp(int(report.find(fp).text))
132 values[field_maps[f]['pg_field_name']] = \
133 datetime.datetime.fromtimestamp(int(report.find(fp).text),
134 tz=datetime.timezone.utc)
135 elif field_maps[f]['pg_type'] == 'inet':
136 values[field_maps[f]['pg_field_name']] = maybe_strip(report.find(fp).text)
137 else:
138 values[field_maps[f]['pg_field_name']] = maybe_strip(report.find(fp).text)
139 insert_string = 'insert into {} ('.format(table_name)
140 if preamble_values:
141 insert_string += ', '.join(sorted(preamble_values.keys())) + ', '
142 insert_string += ', '.join(field_names) + ') '
143 insert_string += 'values ('
144 if preamble_values:
145 insert_string += ', '.join('%({})s'.format(fn) for fn in sorted(preamble_values.keys())) + ', '
146 insert_string += ', '.join('%({})s'.format(f) for f in field_names) + ');'
147 return insert_string, values
148
149
150 def write_report(connection, cursor, report):
151 insert_string, values = build_insert_command('reports', report)
152 # print(insert_string, values)
153 cursor.execute(insert_string, values)
154
155 for i in range(1, len(report.findall('./record'))+1):
156 field_names = []
157 cursor.execute('select id, report_metadata_report_id from reports where report_metadata_report_id = %s;',
158 [report.find('./report_metadata/report_id').text])
159 results = cursor.fetchall()
160 if len(results) != 1:
161 raise RuntimeError('Could not find report record for report item')
162 else:
163 report_id = results[0][0]
164 insert_string, values = build_insert_command('report_items', report, i=i,
165 preamble_values={'report_id': report_id})
166 # print(insert_string, values)
167 cursor.execute(insert_string, values)
168 connection.commit()
169
170 config = configparser.ConfigParser()
171 if args.config_file:
172 config.read(args.config_file)
173 else:
174 config.read(['/etc/dmarc_to_database.ini', './dmarc_to_database.ini'])
175
176 if not config.sections():
177 raise RuntimeError('Could not find configuration file')
178
179 conn = psycopg2.connect(host=config['database']['server'],
180 database=config['database']['database'],
181 user=config['database']['username'],
182 password=config['database']['password'])
183
184 cur = conn.cursor()
185 cur.execute('select max(report_metadata_date_range_end) from reports')
186 results = cur.fetchall()
187 most_recent_date = results[0][0]
188
189 mailbox = imaplib.IMAP4(host=config['imap']['server'],
190 port=config['imap']['port'])
191 mailbox.starttls()
192 mailbox.login(config['imap']['username'], config['imap']['password'])
193 mailbox.select('INBOX', readonly=True)
194
195
196 if most_recent_date:
197 mails_from = "SINCE " + (most_recent_date - datetime.timedelta(days=2)).strftime("%d-%b-%Y")
198 else:
199 mails_from = "ALL"
200 resp, nums = mailbox.uid('SEARCH', None, mails_from)
201
202
203 dmarc_reports = [report for report_set in [extract_report(fetch_msg(n)) for n in nums[0].split()]
204 for report in report_set]
205
206 mailbox.close()
207 mailbox.logout()
208
209 for report in dmarc_reports:
210 cur.execute('select id, report_metadata_report_id from reports where report_metadata_report_id = %s;',
211 [report.find('./report_metadata/report_id').text])
212 results = cur.fetchall()
213 if not results:
214 print('write', report.find('./report_metadata/report_id').text)
215 if not args.test:
216 write_report(conn, cur, report)
217
218 conn.close()