3 ###############################################################################
5 # This script assumes the presence of the COVID-19 repo.
7 # It (1) reads in the article list and then (2) calls the Wikimedia API to
8 # fetch view information for each article. Output is to (3) JSON and TSV.
10 ###############################################################################
14 from os import path, mkdir
18 from functools import partial
19 from itertools import chain
20 from csv import DictWriter
25 parser = argparse.ArgumentParser(description='Call the views API to collect Wikipedia revision data.')
26 parser.add_argument('-o', '--output_folder', help='Where to save output', default="wikipedia/data", type=str)
27 parser.add_argument('-i', '--input_file', help="Input a file of page names from the English Wikiproject.", type=argparse.FileType('r'), default='./wikipedia/resources/enwp_wikiproject_covid19_articles.txt')
28 parser.add_argument('-d', '--input_db', help="Input a path to a sqlite3 database from the real-time-covid-tracker project", type = sqlite3.connect, default='real-time-wiki-covid-tracker/AllWikidataItems.sqlite')
29 parser.add_argument('-L', '--logging_level', help='Logging level. Options are debug, info, warning, error, critical. Default: info.', default='info', type=digobs.get_loglevel),
30 parser.add_argument('-W', '--logging_destination', help='Logging destination file. (default: standard error)', type=argparse.FileType('a'))
32 args = parser.parse_args()
34 logging = digobs.init_logging(args)
37 conn.row_factory = sqlite3.Row
39 projects = (row['project'] for row in conn.execute("SELECT DISTINCT project from pagesPerProjectTable;").fetchall())
41 tsv_fields = ['title', 'pageid', 'namespace']
43 # list of properties from the API we want to gather (basically all of
44 # them supported by mediawik-utilities)
46 rv_props = {'revid' : 'ids',
47 'timestamp' : 'timestamp',
52 'contentmodel' : 'contentmodel',
55 'comment' : 'comment',
56 'content' : 'content' }
58 def get_project_pages(project):
59 return (row['page'] for row in conn.execute(f"SELECT DISTINCT page FROM pagesPerProjectTable WHERE project == '{project}';").fetchall())
61 def get_project_revisions(project):
62 pages = get_project_pages(project)
63 if project=="en.wikipedia":
64 pages = chain(pages, map(str.strip,args.input_file))
65 return digobs.get_pages_revisions(pages, project=project, logging=logging, rv_props=rv_props)
67 tsv_fields = tsv_fields + list(rv_props.keys())
69 exclude_from_tsv = ['tags', 'comment', 'content', 'flags']
71 # drop fields that we identified for exclusion
72 tsv_fields = [e for e in tsv_fields if e not in exclude_from_tsv]
74 # add special export fields
75 tsv_fields = tsv_fields + ['anon', 'minor', 'url', 'export_timestamp', 'export_commit']
77 export_time = str(datetime.datetime.now())
79 rev_batch_to_tsv = partial(digobs.rev_batch_to_tsv,
80 tsv_fields = tsv_fields,
81 export_info={'export_timestamp':export_time,
82 'export_commit':digobs.git_hash(short=True)})
84 export_info = { 'git_commit' : digobs.git_hash(),
85 'timestamp' : export_time }
87 export_date = datetime.datetime.today().strftime("%Y%m%d")
89 rev_batch_to_json = partial(digobs.rev_batch_to_json,
90 export_info = export_info)
92 def write_project_pages(project):
93 project_folder = path.join(args.output_folder, project)
94 if not path.exists(project_folder):
97 dump_folder = path.join(project_folder, export_date)
98 if not path.exists(dump_folder):
101 project_revs = get_project_revisions(project)
103 json_output_filename = path.join(dump_folder, f"digobs_covid19_{project}_revisions-{export_date}.json")
104 tsv_output_filename = path.join(dump_folder, f"digobs_covid19_{project}_revisions-{export_date}.tsv")
106 with open(json_output_filename, 'w') as json_output, \
107 open(tsv_output_filename, 'w') as tsv_output:
108 tsv_writer = DictWriter(tsv_output, fieldnames=tsv_fields, delimiter="\t")
109 tsv_writer.writeheader()
111 for rev_batch in project_revs:
112 logging.debug(f"processing raw revision: {rev_batch}")
113 rev_batch_to_json(rev_batch, json_output=json_output)
114 rev_batch_to_tsv(rev_batch, project=project, tsv_writer=tsv_writer)
116 for project in projects:
117 write_project_pages(project)
119 if __name__ == "__main__":