]> code.communitydata.science - covid19.git/commitdiff
changes to allow historical view data collection
authorBenjamin Mako Hill <mako@atdot.cc>
Thu, 2 Apr 2020 18:28:34 +0000 (13:28 -0500)
committerBenjamin Mako Hill <mako@atdot.cc>
Thu, 2 Apr 2020 18:28:34 +0000 (13:28 -0500)
- fix bug where it would fail if the first essay had no view data
- add ability to override dates in the cron script

cron-wikipedia_views.sh
wikipedia/scripts/fetch_enwiki_daily_views.py

index 4b39caab30117cab45941f89fd9c563b60934625..4afe380ee1a75902e5e019061924c52dee6d4c78 100644 (file)
@@ -1,13 +1,13 @@
 #!/bin/bash -x
 
 TZ="UTC"
-date_string=$(date +%Y%m%d)
+date_string=${OVERRIDE_DATE_STRING:-$(date +%Y%m%d)}
 
 view_log="enwp-daily_views-${date_string}.log"
-./wikipedia/scripts/wikiproject_scraper.py 2> >(tee wikipedia/logs/{$view_log})
+./wikipedia/scripts/wikiproject_scraper.py 2> >(tee wikipedia/logs/${view_log})
 
 # get the list of files
-./wikipedia/scripts/fetch_enwiki_daily_views.py 2> >(tee -a wikipedia/logs/${view_log})
+./wikipedia/scripts/fetch_enwiki_daily_views.py -d "${date_string}" 2> >(tee -a wikipedia/logs/${view_log})
 mv wikipedia/logs/${view_log} /var/www/covid19/wikipedia/logs/${view_log}
 mv wikipedia/data/digobs_covid19-wikipedia-enwiki_dailyviews-${date_string}.tsv /var/www/covid19/wikipedia/
 
index 829343de318c9aaf906350d2ad57880482547ce8..9f147e07a30cc20e78b27ec1afb71c64be582b1e 100755 (executable)
@@ -58,8 +58,8 @@ def main():
     logging.info(f"Last commit: {digobs.git_hash()}")
 
     #1 Load up the list of article names
-    j_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{export_date}.json")
-    t_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{export_date}.tsv")
+    j_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{query_date}.json")
+    t_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{query_date}.tsv")
 
     with open(articleFile, 'r') as infile:
         articleList = list(map(str.strip, infile))
@@ -82,6 +82,7 @@ def main():
             else:
                 failure = failure + 1
                 logging.warning(f"Failure: {response.status_code} from {url}")
+                continue
 
             # start writing the CSV File if it doesn't exist yet
             try:

Community Data Science Collective || Want to submit a patch?