]> code.communitydata.science - covid19.git/blobdiff - wikipedia/scripts/fetch_enwiki_daily_views.py
Merge pull request #20 from makoshark/master
[covid19.git] / wikipedia / scripts / fetch_enwiki_daily_views.py
index 829343de318c9aaf906350d2ad57880482547ce8..9f147e07a30cc20e78b27ec1afb71c64be582b1e 100755 (executable)
@@ -58,8 +58,8 @@ def main():
     logging.info(f"Last commit: {digobs.git_hash()}")
 
     #1 Load up the list of article names
-    j_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{export_date}.json")
-    t_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{export_date}.tsv")
+    j_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{query_date}.json")
+    t_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{query_date}.tsv")
 
     with open(articleFile, 'r') as infile:
         articleList = list(map(str.strip, infile))
@@ -82,6 +82,7 @@ def main():
             else:
                 failure = failure + 1
                 logging.warning(f"Failure: {response.status_code} from {url}")
+                continue
 
             # start writing the CSV File if it doesn't exist yet
             try:

Community Data Science Collective || Want to submit a patch?