###############################################################################
import sys
-import subprocess
import requests
import argparse
import json
import time
import os.path
-import argparse
import datetime
import logging
from csv import DictWriter
+import digobs
#import feather #TBD
def parse_args():
-
parser = argparse.ArgumentParser(description='Call the views API to collect Wikipedia view data.')
parser.add_argument('-o', '--output_folder', help='Where to save output', default="wikipedia/data", type=str)
parser.add_argument('-i', '--article_file', help='File listing article names', default="wikipedia/resources/enwp_wikiproject_covid19_articles.txt", type=str)
parser.add_argument('-d', '--query_date', help='Date if not yesterday, in YYYYMMDD format.', type=str)
- parser.add_argument('-L', '--logging_level', help='Logging level. Options are debug, info, warning, error, critical. Default: info.', default='info', type=str),
+ parser.add_argument('-L', '--logging_level', help='Logging level. Options are debug, info, warning, error, critical. Default: info.', default='info', type=digobs.get_loglevel),
parser.add_argument('-W', '--logging_destination', help='Logging destination file. (default: standard error)', type=str),
args = parser.parse_args()
return(args)
#handle -d
if args.query_date:
- queryDate = args.query_date
+ query_date = args.query_date
else:
yesterday = datetime.datetime.today() - datetime.timedelta(days=1)
- queryDate = yesterday.strftime("%Y%m%d")
-
- queryDate = queryDate + "00" #requires specifying hours
-
- #handle -L
- loglevel_mapping = { 'debug' : logging.DEBUG,
- 'info' : logging.INFO,
- 'warning' : logging.WARNING,
- 'error' : logging.ERROR,
- 'critical' : logging.CRITICAL }
-
- if args.logging_level in loglevel_mapping:
- loglevel = loglevel_mapping[args.logging_level]
- else:
- print("Choose a valid log level: debug, info, warning, error, or critical")
- exit
+ query_date = yesterday.strftime("%Y%m%d")
#handle -W
if args.logging_destination:
- logging.basicConfig(filename=args.logging_destination, filemode='a', level=loglevel)
+ logging.basicConfig(filename=args.logging_destination, filemode='a', level=args.logging_level)
else:
- logging.basicConfig(level=loglevel)
+ logging.basicConfig(level=args.logging_level)
- export_git_hash = subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode().strip()
- export_git_short_hash = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode().strip()
export_time = str(datetime.datetime.now())
+ export_date = datetime.datetime.today().strftime("%Y%m%d")
logging.info(f"Starting run at {export_time}")
- logging.info(f"Last commit: {export_git_hash}")
+ logging.info(f"Last commit: {digobs.git_hash()}")
#1 Load up the list of article names
- j_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{queryDate}.json")
- t_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{queryDate}.tsv")
+ j_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{query_date}.json")
+ t_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{query_date}.tsv")
with open(articleFile, 'r') as infile:
- articleList = list(infile)
+ articleList = list(map(str.strip, infile))
success = 0 #for logging how many work/fail
failure = 0
#2 Repeatedly call the API with that list of names
for a in articleList:
- a = a.strip("\"\n") #destringify
- url= f"https://wikimedia.org/api/rest_v1/metrics/pageviews/per-article/en.wikipedia/all-access/all-agents/{a}/daily/{queryDate}/{queryDate}"
+ url= f"https://wikimedia.org/api/rest_v1/metrics/pageviews/per-article/en.wikipedia/all-access/all-agents/{a}/daily/{query_date}00/{query_date}00"
response = requests.get(url)
if response.ok:
else:
failure = failure + 1
logging.warning(f"Failure: {response.status_code} from {url}")
+ continue
# start writing the CSV File if it doesn't exist yet
try:
# write out of the csv file
dw.writerow(jd)
- # f_Out = outputPath + "dailyviews" + queryDate + ".feather"
+ # f_Out = outputPath + "dailyviews" + query_date + ".feather"
# read the json back in and make a feather file?
logging.debug(f"Run complete at {datetime.datetime.now()}")
logging.info(f"Processed {success} successful URLs and {failure} failures.")