7 url_prefix = 'https://www.berlin.de'
8 pm_dir = '/sen/gpg/service/presse/2020/'
9 pm_nav_path = pm_dir + '?page_at_1_0='
11 # Map abbreviations to full names (and their alternate spellings).
13 'CW': {'Charlottenburg-Wilmersdorf'},
14 'FK': {'Friedrichshain-Kreuzberg'},
15 'Li': {'Lichtenberg'},
16 'MH': {'Marzahn-Hellersdorf'},
18 'Ne': {'Neukölln', 'Neuköln'},
20 'Re': {'Reinickendorf'},
22 'SZ': {'Steglitz-Zehlendorf'},
23 'TS': {'Tempelhof-Schöneberg'},
24 'TK': {'Treptow-Köpenick'},
25 'sum': {'Summe', 'Berlin'},
28 # some pre-filled values
30 # For these, only image files are available for the table data.
31 datetime.datetime(2020, 7, 2): {
32 'CW': {'growth': 4, 'total': 851},
33 'FK': {'growth': 10, 'total': 681},
34 'Li': {'growth': 3, 'total': 427},
35 'MH': {'growth': 4, 'total': 468},
36 'Mi': {'growth': 0, 'total': 1202},
37 'Ne': {'growth': 7, 'total': 1031},
38 'Pa': {'growth': 3, 'total': 784},
39 'Re': {'growth': 6, 'total': 660},
40 'Sp': {'growth': 3, 'total': 450},
41 'SZ': {'growth': 0, 'total': 591},
42 'TS': {'growth': 3, 'total': 798},
43 'TK': {'growth': 0, 'total': 401},
44 'sum': {'growth': 43, 'total': 8344}
46 datetime.datetime(2020, 4, 5): {
47 'CW': {'growth': 9, 'total': 462},
48 'FK': {'growth': 2, 'total': 352},
49 'Li': {'growth': 0, 'total': 142},
50 'MH': {'growth': 3, 'total': 127},
51 'Mi': {'growth': 14, 'total': 537},
52 'Ne': {'growth': 0, 'total': 392},
53 'Pa': {'growth': 10, 'total': 378},
54 'Re': {'growth': 9, 'total': 248},
55 'Sp': {'growth': 3, 'total': 150},
56 'SZ': {'growth': 0, 'total': 312},
57 'TS': {'growth': 8, 'total': 394},
58 'TK': {'growth': 3, 'total': 193},
59 'sum': {'growth': 61, 'total': 3687}
61 datetime.datetime(2020, 4, 4): {
62 'CW': {'growth': 2, 'total': 453},
63 'FK': {'growth': 7, 'total': 350},
64 'Li': {'growth': 0, 'total': 142},
65 'MH': {'growth': 15, 'total': 124},
66 'Mi': {'growth': 22, 'total': 523},
67 'Ne': {'growth': 15, 'total': 392},
68 'Pa': {'growth': 10, 'total': 368},
69 'Re': {'growth': 5, 'total': 239},
70 'Sp': {'growth': 21, 'total': 147},
71 'SZ': {'growth': 12, 'total': 312},
72 'TS': {'growth': 24, 'total': 386},
73 'TK': {'growth': 7, 'total': 190},
74 'sum': {'growth': 140, 'total': 3626}
76 datetime.datetime(2020, 4, 3): {
77 'CW': {'growth': 44, 'total': 451},
78 'FK': {'growth': 17, 'total': 343},
79 'Li': {'growth': 7, 'total': 142},
80 'MH': {'growth': 4, 'total': 109},
81 'Mi': {'growth': 4, 'total': 501},
82 'Ne': {'growth': 40, 'total': 377},
83 'Pa': {'growth': 39, 'total': 358},
84 'Re': {'growth': 26, 'total': 234},
85 'Sp': {'growth': 9, 'total': 126},
86 'SZ': {'growth': 18, 'total': 300},
87 'TS': {'growth': 41, 'total': 362},
88 'TK': {'growth': 14, 'total': 183},
89 'sum': {'growth': 263, 'total': 3486}
91 # Here the growth numbers needed to be reconstructed.
92 datetime.datetime(2020, 3, 10): {
93 'CW': {'growth': 2, 'total': 15},
94 'FK': {'growth': 0, 'total': 12},
95 'Li': {'growth': 4, 'total': 5},
96 'MH': {'growth': 1, 'total': 3},
97 'Mi': {'growth': 0, 'total': 8},
98 'Ne': {'growth': 2, 'total': 5},
99 'Pa': {'growth': 2, 'total': 8},
100 'Re': {'growth': 0, 'total': 3},
101 'Sp': {'growth': 4, 'total': 6},
102 'SZ': {'growth': 3, 'total': 6},
103 'TS': {'growth': 2, 'total': 7},
104 'TK': {'growth': 3, 'total': 3},
105 'sum': {'growth': 23, 'total': 81}
107 # Here the totals needed to be reconstructed.
108 datetime.datetime(2020, 3, 9): {
109 'CW': {'growth': 4, 'total': 13},
110 'FK': {'growth': 3, 'total': 12},
111 'Li': {'growth': 0, 'total': 1},
112 'MH': {'growth': 1, 'total': 2},
113 'Mi': {'growth': 0, 'total': 8},
114 'Ne': {'growth': 1, 'total': 3},
115 'Pa': {'growth': 1, 'total': 6},
116 'Re': {'growth': 0, 'total': 3},
117 'Sp': {'growth': 0, 'total': 2},
118 'SZ': {'growth': 0, 'total': 3},
119 'TS': {'growth': 0, 'total': 5},
120 'TK': {'growth': 0, 'total': 0},
121 'sum': {'growth': 10, 'total': 58}
123 # Here the growth numbers needed to be reconstructed.
124 datetime.datetime(2020, 3, 8): {
125 'CW': {'growth': 0, 'total': 9},
126 'FK': {'growth': 4, 'total': 9},
127 'Li': {'growth': 1, 'total': 1},
128 'MH': {'growth': 0, 'total': 1},
129 'Mi': {'growth': 0, 'total': 8},
130 'Ne': {'growth': 0, 'total': 2},
131 'Pa': {'growth': 0, 'total': 5},
132 'Re': {'growth': 0, 'total': 3},
133 'Sp': {'growth': 2, 'total': 2},
134 'SZ': {'growth': 1, 'total': 3},
135 'TS': {'growth': 0, 'total': 5},
136 'TK': {'growth': 0, 'total': 0},
137 'sum': {'growth': 8, 'total': 48}
139 # Here the growth numbers needed to be reconstructed.
140 datetime.datetime(2020, 3, 7): {
141 'CW': {'growth': 6, 'total': 9},
142 'FK': {'growth': 1, 'total': 5},
143 'Li': {'growth': 0, 'total': 0},
144 'MH': {'growth': 0, 'total': 1},
145 'Mi': {'growth': 1, 'total': 8},
146 'Ne': {'growth': 0, 'total': 2},
147 'Pa': {'growth': 1, 'total': 5},
148 'Re': {'growth': 0, 'total': 3},
149 'Sp': {'growth': 0, 'total': 0},
150 'SZ': {'growth': 2, 'total': 2},
151 'TS': {'growth': 1, 'total': 5},
152 'TK': {'growth': 0, 'total': 0},
153 'sum': {'growth': 12, 'total': 40}
155 # Here the growth numbers needed to be reconstructed.
156 datetime.datetime(2020, 3, 6): {
157 'CW': {'growth': 1, 'total': 3},
158 'FK': {'growth': 0, 'total': 4},
159 'Li': {'growth': 0, 'total': 0},
160 'MH': {'growth': 0, 'total': 1},
161 'Mi': {'growth': 4, 'total': 7},
162 'Ne': {'growth': 1, 'total': 2},
163 'Pa': {'growth': 1, 'total': 4},
164 'Re': {'growth': 0, 'total': 3},
165 'Sp': {'growth': 0, 'total': 0},
166 'SZ': {'growth': 0, 'total': 0},
167 'TS': {'growth': 2, 'total': 4},
168 'TK': {'growth': 0, 'total': 0},
169 'sum': {'growth': 9, 'total': 28}
171 # Here the growth numbers needed to be reconstructed.
172 datetime.datetime(2020, 3, 5): {
173 'CW': {'growth': 2, 'total': 2},
174 'FK': {'growth': 0, 'total': 4},
175 'Li': {'growth': 0, 'total': 0},
176 'MH': {'growth': 0, 'total': 1},
177 'Mi': {'growth': 0, 'total': 3},
178 'Ne': {'growth': 0, 'total': 1},
179 'Pa': {'growth': 1, 'total': 3},
180 'Re': {'growth': 2, 'total': 3},
181 'Sp': {'growth': 0, 'total': 0},
182 'SZ': {'growth': 0, 'total': 0},
183 'TS': {'growth': 1, 'total': 2},
184 'TK': {'growth': 0, 'total': 0},
185 'sum': {'growth': 6, 'total': 19}
187 # Here the growth numbers needed to be reconstructed.
188 datetime.datetime(2020, 3, 4): {
189 'CW': {'growth': 0, 'total': 0},
190 'FK': {'growth': 2, 'total': 4},
191 'Li': {'growth': 0, 'total': 0},
192 'MH': {'growth': 0, 'total': 1},
193 'Mi': {'growth': 0, 'total': 3},
194 'Ne': {'growth': 0, 'total': 1},
195 'Pa': {'growth': 1, 'total': 2},
196 'Re': {'growth': 1, 'total': 1},
197 'Sp': {'growth': 0, 'total': 0},
198 'SZ': {'growth': 0, 'total': 0},
199 'TS': {'growth': 0, 'total': 1},
200 'TK': {'growth': 0, 'total': 0},
201 'sum': {'growth': 4, 'total': 13}
203 # Here the growth numbers needed to be reconstructed.
204 datetime.datetime(2020, 3, 3): {
205 'CW': {'growth': 0, 'total': 0},
206 'FK': {'growth': 2, 'total': 2},
207 'Li': {'growth': 0, 'total': 0},
208 'MH': {'growth': 0, 'total': 1},
209 'Mi': {'growth': 0, 'total': 3},
210 'Ne': {'growth': 0, 'total': 1},
211 'Pa': {'growth': 1, 'total': 1},
212 'Re': {'growth': 0, 'total': 0},
213 'Sp': {'growth': 0, 'total': 0},
214 'SZ': {'growth': 0, 'total': 0},
215 'TS': {'growth': 0, 'total': 1},
216 'TK': {'growth': 0, 'total': 0},
217 'sum': {'growth': 3, 'total': 9}
221 # Here the official total is 215, while the summation of district
222 # numbers only adds up to 125 – pretty much looks like a mere
223 # transposition of digits.
224 datetime.datetime(2020, 3, 27): {
229 # Here the official total is 1937, while the summation of district
230 # numbers only adds up to 1917; furthermore, the original value for
231 # SZ is 118 (+18), which makes no sense, as the day before is
232 # 120 (+15) and the day after is 147 (+15). The following is a
233 # compromise to keep as many surrounding numbers stable as possible.
234 datetime.datetime(2020, 3, 26): {
242 # Here the official total is 220, while the summation of district
243 # numbers adds up to 228 – looks like someone misread an 8 as a 0.
244 datetime.datetime(2020, 3, 25): {
251 # Scan navigation bar for maximum pagination value.
252 url = url_prefix + pm_dir
253 with urllib.request.urlopen(url) as response:
254 html = response.read()
255 soup = bs4.BeautifulSoup(html, 'html.parser')
257 for link in soup.find_all('a'):
259 if str.startswith(href, pm_nav_path):
260 max_test = int(href.split('=')[1])
261 max_page = max_test if max_test > max_page else max_page
263 # Scan paginated press release links for daily Corona number briefing links.
265 for i in range(max_page):
266 url = url_prefix + pm_nav_path + str(i + 1)
267 with urllib.request.urlopen(url) as response:
268 html = response.read()
269 soup = bs4.BeautifulSoup(html, 'html.parser')
270 for link in soup.find_all('a'):
271 if (not link.string) or\
272 (not link.string.startswith('Coronavirus: Derzeit') and
273 not link.string.startswith('Coronavirus in Berlin: Bestätigte Fälle')):
275 day_urls += [link['href']]
277 # Collect infection data.
279 districts_sorted = []
280 # TODO: Push limit further back (might need more data fixes for that).
281 date_limit = datetime.datetime(2020, 3, 12)
282 for path in day_urls:
283 url = url_prefix + path
284 with urllib.request.urlopen(url) as response:
285 html = response.read()
286 soup = bs4.BeautifulSoup(html, 'html.parser')
287 date_title = soup.find('div', class_='pressnumber')
288 m = re.search('[0-9]+\\.[0-9]+\\.[0-9]+', date_title.string)
289 date_formatted = m.group(0)
290 date = datetime.datetime.strptime(date_formatted , '%d.%m.%Y')
291 if date_limit > date:
293 # On that day, two press releases were released, for that and the prev day.
294 if date == datetime.datetime(2020, 3, 15) and date in data:
295 date = datetime.datetime(2020, 3, 14)
296 # From here on, press releases describe numbers from prev day.
297 if date <= datetime.datetime(2020, 3, 13):
298 date = date - datetime.timedelta(days=1)
299 table = soup.find('table')
300 # For 13th of March we lack a press release.
301 if table is None and (date in data or date == datetime.datetime(2020, 3, 13)):
304 for tr in [tr for tr in table.children if type(tr) == bs4.element.Tag][1:]:
306 for td in [td for td in tr.children if type(td) == bs4.element.Tag][:2]:
307 printable_string = ' '.join([s for s in td.strings])
308 printable_tds += [printable_string.strip()]
309 district_long = printable_tds[0]
310 district_short = [k for k in abbrevs if district_long in abbrevs[k]][0]
312 districts_sorted += [district_short]
314 if not split_char in printable_tds[1]:
316 total_str, growth_str = printable_tds[1].split(split_char)
317 growth = int(growth_str.replace('(', '').replace(')', '').replace('+', ''))
318 total = int(total_str.replace('.', ''))
319 data[date][district_short] = {'growth': growth, 'total': total}
322 # Reconstruct data for 13th of March.
323 day_target = datetime.datetime(2020, 3, 13)
324 day_after = day_target + datetime.timedelta(days=1)
325 day_before = day_target - datetime.timedelta(days=1)
326 data[day_target] = {}
327 for district in [d for d in districts_sorted]:
328 data[day_target][district] = {}
329 total_after = data[day_after][district]['total']
330 growth_after = data[day_after][district]['growth']
331 total_target = total_after - growth_after
332 data[day_target][district]['total'] = total_target
333 total_before = data[day_before][district]['total']
334 data[day_target][district]['growth'] = total_target - total_before
336 dates_sorted = list(data.keys())
338 dates_sorted.reverse()
340 # Apply fixes and ensure integrity of results
342 for district in fixes[date]:
343 for type_ in fixes[date][district]:
344 data[date][district][type_] = fixes[date][district][type_]
345 for date in dates_sorted:
348 for district in [d for d in districts_sorted if not d=='sum']:
349 prev_date = date - datetime.timedelta(days=1)
350 if prev_date not in dates_sorted:
351 if prev_date >= date_limit:
352 raise Exception('Dates not contiguous: %s missing', prev_date)
355 prev_total = data[prev_date][district]['total']
356 cur_total = data[date][district]['total']
357 if cur_total - data[date][district]['growth'] != prev_total:
358 raise Exception('Questionable district infection total in %s/%s' % (district, date))
360 for district in [d for d in districts_sorted if not d=='sum']:
361 day_sum += data[date][district]['total']
362 if day_sum != data[date]['sum']['total']:
363 raise Exception('Questionable district infection total sum in %s' % date)
365 for district in [d for d in districts_sorted if not d=='sum']:
366 day_sum += data[date][district]['growth']
367 if day_sum != data[date]['sum']['growth']:
368 raise Exception('Questionable district infection growth sum in %s' % date)
371 print(' '*10, ' '.join(['%3s' % d for d in districts_sorted]))
372 for date in dates_sorted:
374 for d in districts_sorted:
375 growths += [data[date][d]['growth']]
376 print(date.strftime('%Y-%m-%d'), ' '.join(['%3s' % g for g in growths]))