#!/bin/bash -x
TZ="UTC"
-date_string=$(date +%Y%m%d)
+date_string=${OVERRIDE_DATE_STRING:-$(date +%Y%m%d)}
view_log="enwp-daily_views-${date_string}.log"
-./wikipedia/scripts/wikiproject_scraper.py 2> >(tee wikipedia/logs/{$view_log})
+./wikipedia/scripts/wikiproject_scraper.py 2> >(tee wikipedia/logs/${view_log})
# get the list of files
-./wikipedia/scripts/fetch_enwiki_daily_views.py 2> >(tee -a wikipedia/logs/${view_log})
+./wikipedia/scripts/fetch_enwiki_daily_views.py -d "${date_string}" 2> >(tee -a wikipedia/logs/${view_log})
mv wikipedia/logs/${view_log} /var/www/covid19/wikipedia/logs/${view_log}
mv wikipedia/data/digobs_covid19-wikipedia-enwiki_dailyviews-${date_string}.tsv /var/www/covid19/wikipedia/
logging.info(f"Last commit: {digobs.git_hash()}")
#1 Load up the list of article names
- j_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{export_date}.json")
- t_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{export_date}.tsv")
+ j_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{query_date}.json")
+ t_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{query_date}.tsv")
with open(articleFile, 'r') as infile:
articleList = list(map(str.strip, infile))
else:
failure = failure + 1
logging.warning(f"Failure: {response.status_code} from {url}")
+ continue
# start writing the CSV File if it doesn't exist yet
try: