2021-06-01 07:32:16 -04:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import csv
|
|
|
|
import pprint
|
|
|
|
import json
|
|
|
|
import requests
|
2021-09-28 03:25:49 -04:00
|
|
|
import html
|
2021-06-01 07:32:16 -04:00
|
|
|
|
|
|
|
output_csv = 'securedrop-api.csv'
|
|
|
|
sd_url = 'https://securedrop.org/api/v1/directory/'
|
2021-11-21 18:35:28 -05:00
|
|
|
fieldnames = "flaky category site_name onion_name onion_url proof_url comment".split()
|
2021-06-01 07:32:16 -04:00
|
|
|
|
2021-09-28 03:25:49 -04:00
|
|
|
def xx(thing, key):
|
|
|
|
val = thing.get(key, None) or '' # catches instance where key=existing and val=None
|
|
|
|
return html.escape(val)
|
2021-06-01 07:32:16 -04:00
|
|
|
|
|
|
|
def push(stack, entry):
|
2021-06-01 18:26:38 -04:00
|
|
|
method = 'http' # this needs some discussion with Securedrop
|
2021-06-01 07:32:16 -04:00
|
|
|
result = dict()
|
|
|
|
result['flaky'] = ''
|
2022-03-05 18:12:50 -05:00
|
|
|
result['category'] = 'SecureDrop'
|
2021-06-01 07:32:16 -04:00
|
|
|
result['site_name'] = xx(entry, 'title')
|
|
|
|
result['onion_url'] = '{0}://{1}'.format(method, xx(entry, 'onion_address'))
|
|
|
|
result['onion_name'] = xx(entry, 'onion_name')
|
|
|
|
result['proof_url'] = xx(entry, 'landing_page_url')
|
|
|
|
result['comment'] = 'via: {}'.format(sd_url)
|
|
|
|
stack.append(result)
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
session = requests.Session()
|
|
|
|
response = session.get(sd_url)
|
|
|
|
data = response.json()
|
|
|
|
# pprint.pprint(data)
|
|
|
|
entries = []
|
|
|
|
for entry in data: push(entries, entry)
|
|
|
|
with open(output_csv, 'w', newline='') as csvfile:
|
|
|
|
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
|
|
|
|
writer.writeheader()
|
|
|
|
writer.writerows(entries)
|