X-Git-Url: https://code.communitydata.science/covid19.git/blobdiff_plain/17c3f75389d7cadba4400ee5a0d2281f71fcdc4c..refs/remotes/gh-cdsc/master:/wikipedia/scripts/fetch_enwiki_daily_views.py diff --git a/wikipedia/scripts/fetch_enwiki_daily_views.py b/wikipedia/scripts/fetch_enwiki_daily_views.py index 7015a3b..9f147e0 100755 --- a/wikipedia/scripts/fetch_enwiki_daily_views.py +++ b/wikipedia/scripts/fetch_enwiki_daily_views.py @@ -26,7 +26,7 @@ def parse_args(): parser.add_argument('-o', '--output_folder', help='Where to save output', default="wikipedia/data", type=str) parser.add_argument('-i', '--article_file', help='File listing article names', default="wikipedia/resources/enwp_wikiproject_covid19_articles.txt", type=str) parser.add_argument('-d', '--query_date', help='Date if not yesterday, in YYYYMMDD format.', type=str) - parser.add_argument('-L', '--logging_level', help='Logging level. Options are debug, info, warning, error, critical. Default: info.', default='info', type=str), + parser.add_argument('-L', '--logging_level', help='Logging level. Options are debug, info, warning, error, critical. Default: info.', default='info', type=digobs.get_loglevel), parser.add_argument('-W', '--logging_destination', help='Logging destination file. (default: standard error)', type=str), args = parser.parse_args() return(args) @@ -45,14 +45,11 @@ def main(): yesterday = datetime.datetime.today() - datetime.timedelta(days=1) query_date = yesterday.strftime("%Y%m%d") - #handle -L - loglevel = digobs.get_loglevel(args.logging_level) - #handle -W if args.logging_destination: - logging.basicConfig(filename=args.logging_destination, filemode='a', level=loglevel) + logging.basicConfig(filename=args.logging_destination, filemode='a', level=args.logging_level) else: - logging.basicConfig(level=loglevel) + logging.basicConfig(level=args.logging_level) export_time = str(datetime.datetime.now()) export_date = datetime.datetime.today().strftime("%Y%m%d") @@ -61,8 +58,8 @@ def main(): logging.info(f"Last commit: {digobs.git_hash()}") #1 Load up the list of article names - j_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{export_date}.json") - t_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{export_date}.tsv") + j_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{query_date}.json") + t_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{query_date}.tsv") with open(articleFile, 'r') as infile: articleList = list(map(str.strip, infile)) @@ -85,6 +82,7 @@ def main(): else: failure = failure + 1 logging.warning(f"Failure: {response.status_code} from {url}") + continue # start writing the CSV File if it doesn't exist yet try: