parser = argparse.ArgumentParser(description='Call the views API to collect Wikipedia revision data.')
parser.add_argument('-o', '--output_folder', help='Where to save output', default="wikipedia/data", type=str)
parser.add_argument('-i', '--article_file', help='File listing article names', default="wikipedia/resources/enwp_wikiproject_covid19_articles.txt", type=str)
- parser.add_argument('-d', '--query_date', help='Date if not yesterday, in YYYYMMDD format.', type=str)
parser.add_argument('-L', '--logging_level', help='Logging level. Options are debug, info, warning, error, critical. Default: info.', default='info', type=str),
parser.add_argument('-W', '--logging_destination', help='Logging destination file. (default: standard error)', type=str),
args = parser.parse_args()
output_path = args.output_folder
article_filename = args.article_file
- #handle -d
- if args.query_date:
- query_date = args.query_date
- else:
- yesterday = datetime.datetime.today() - datetime.timedelta(days=1)
- query_date = yesterday.strftime("%Y%m%d")
-
- query_data = query_date + "00" #requires specifying hours
#handle -L
loglevel_mapping = { 'debug' : logging.DEBUG,
export_git_hash = subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode().strip()
export_git_short_hash = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode().strip()
export_time = str(datetime.datetime.now())
+ export_date = datetime.datetime.today().strftime("%Y%m%d")
logging.info(f"Starting run at {export_time}")
logging.info(f"Last commit: {export_git_hash}")
- json_output_filename = os.path.join(output_path, f"digobs_covid19-wikipedia-enwiki_revisions-{query_date}.json")
- tsv_output_filename = os.path.join(output_path, f"digobs_covid19-wikipedia-enwiki_revisions-{query_data}.tsv")
+ json_output_filename = os.path.join(output_path, f"digobs_covid19-wikipedia-enwiki_revisions-{export_date}.json")
+ tsv_output_filename = os.path.join(output_path, f"digobs_covid19-wikipedia-enwiki_revisions-{export_date}.tsv")
api_session = api.Session("https://en.wikipedia.org/w/api.php")
'sha1' : 'sha1',
'contentmodel' : 'contentmodel',
'tags' : 'tags',
+ 'flags' : 'flags',
'comment' : 'comment',
'content' : 'content' }
- exclude_from_tsv = ['tags', 'comment', 'content']
+ exclude_from_tsv = ['tags', 'comment', 'content', 'flags']
# load the list of articles
with open(article_filename, 'r') as infile:
tsv_fields = [e for e in tsv_fields if e not in exclude_from_tsv]
# add special export fields
- tsv_fields = tsv_fields + ['url', 'export_timestamp', 'export_commit']
+ tsv_fields = tsv_fields + ['anon', 'minor', 'url', 'export_timestamp', 'export_commit']
export_info = { 'git_commit' : export_git_hash,
'timestamp' : export_time }
tsv_writer.writeheader()
for article in article_list:
- logging.info(f"pulling revisiosn for: {article}")
+ logging.info(f"pulling revisions for: {article}")
for rev in get_revisions_for_page(article):
logging.debug(f"processing raw revision: {rev}")
if "sha1" not in rev:
rev["sha1"] = ""
+ if "userhidden" in rev:
+ rev["user"] = ""
+ rev["userid"] = ""
+
+ # recode anon so it's true or false instead of present/missing
+ if "anon" in rev:
+ rev["anon"] = True
+ else:
+ rev["anon"] = False
+
+ # let's recode "minor" in the same way
+ if "minor" in rev:
+ rev["minor"] = True
+ else:
+ rev["minor"] = False
+
# add page title information
rev['title'] = rev['page']['title']
rev['pageid'] = rev['page']['pageid']
rev['export_commit'] = export_git_short_hash
tsv_writer.writerow({k: rev[k] for k in tsv_fields})
- break
if __name__ == "__main__":
-
main()