]> code.communitydata.science - covid19.git/commitdiff
changes in response to code review by nate
authorBenjamin Mako Hill <mako@atdot.cc>
Wed, 1 Apr 2020 22:16:34 +0000 (17:16 -0500)
committerBenjamin Mako Hill <mako@atdot.cc>
Wed, 1 Apr 2020 22:16:34 +0000 (17:16 -0500)
- moved some common functions into files
- other smaller changes

wikipedia/scripts/digobs.py [new file with mode: 0644]
wikipedia/scripts/fetch_enwiki_daily_views.py
wikipedia/scripts/fetch_enwiki_revisions.py

diff --git a/wikipedia/scripts/digobs.py b/wikipedia/scripts/digobs.py
new file mode 100644 (file)
index 0000000..0bce250
--- /dev/null
@@ -0,0 +1,27 @@
+#!/usr/bin/env python3
+
+import sys
+import subprocess
+import logging
+
+def git_hash(short=False):
+    if short:
+        return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode().strip()
+    else:
+        subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode().strip()
+
+def get_loglevel(arg_loglevel):
+    loglevel_mapping = { 'debug' : logging.DEBUG,
+                         'info' : logging.INFO,
+                         'warning' : logging.WARNING,
+                         'error' : logging.ERROR,
+                         'critical' : logging.CRITICAL }
+
+    if arg_loglevel in loglevel_mapping:
+        loglevel = loglevel_mapping[arg_loglevel]
+        return loglevel
+    else:
+        print("Choose a valid log level: debug, info, warning, error, or critical", file=sys.stderr)
+        return logging.INFO
+
+
index f766ed8eb6ffaa56ba1fa1bf08614cd7fb4c12ba..7015a3b5a2ac03fa4ec1bf5ce4ee06d2958f8a4e 100755 (executable)
 ###############################################################################
 
 import sys
 ###############################################################################
 
 import sys
-import subprocess
 import requests
 import argparse
 import json
 import time
 import os.path
 import requests
 import argparse
 import json
 import time
 import os.path
-import argparse
 import datetime
 import logging
 from csv import DictWriter
 import datetime
 import logging
 from csv import DictWriter
+import digobs
 #import feather #TBD
 
 def parse_args():
 #import feather #TBD
 
 def parse_args():
@@ -47,17 +46,7 @@ def main():
         query_date = yesterday.strftime("%Y%m%d")
 
     #handle -L
         query_date = yesterday.strftime("%Y%m%d")
 
     #handle -L
-    loglevel_mapping = { 'debug' : logging.DEBUG,
-                         'info' : logging.INFO,
-                         'warning' : logging.WARNING,
-                         'error' : logging.ERROR,
-                         'critical' : logging.CRITICAL }
-
-    if args.logging_level in loglevel_mapping:
-        loglevel = loglevel_mapping[args.logging_level]
-    else:
-        print("Choose a valid log level: debug, info, warning, error, or critical") 
-        exit
+    loglevel = digobs.get_loglevel(args.logging_level)
 
     #handle -W
     if args.logging_destination:
 
     #handle -W
     if args.logging_destination:
@@ -65,20 +54,18 @@ def main():
     else:
         logging.basicConfig(level=loglevel)
 
     else:
         logging.basicConfig(level=loglevel)
 
-    export_git_hash = subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode().strip()
-    export_git_short_hash = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode().strip()
     export_time = str(datetime.datetime.now())
     export_date = datetime.datetime.today().strftime("%Y%m%d")
 
     logging.info(f"Starting run at {export_time}")
     export_time = str(datetime.datetime.now())
     export_date = datetime.datetime.today().strftime("%Y%m%d")
 
     logging.info(f"Starting run at {export_time}")
-    logging.info(f"Last commit: {export_git_hash}")
+    logging.info(f"Last commit: {digobs.git_hash()}")
 
     #1 Load up the list of article names
     j_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{export_date}.json")
     t_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{export_date}.tsv")
 
     with open(articleFile, 'r') as infile:
 
     #1 Load up the list of article names
     j_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{export_date}.json")
     t_outfilename = os.path.join(outputPath, f"digobs_covid19-wikipedia-enwiki_dailyviews-{export_date}.tsv")
 
     with open(articleFile, 'r') as infile:
-        articleList = list(infile)
+        articleList = list(map(str.strip, infile))
 
     success = 0 #for logging how many work/fail
     failure = 0 
 
     success = 0 #for logging how many work/fail
     failure = 0 
@@ -89,7 +76,6 @@ def main():
 
         #2 Repeatedly call the API with that list of names
         for a in articleList:
 
         #2 Repeatedly call the API with that list of names
         for a in articleList:
-            a = a.strip("\"\n") #destringify
             url= f"https://wikimedia.org/api/rest_v1/metrics/pageviews/per-article/en.wikipedia/all-access/all-agents/{a}/daily/{query_date}00/{query_date}00"
 
             response = requests.get(url)
             url= f"https://wikimedia.org/api/rest_v1/metrics/pageviews/per-article/en.wikipedia/all-access/all-agents/{a}/daily/{query_date}00/{query_date}00"
 
             response = requests.get(url)
index 6a3563f2588b99192280e22bfc52d73f29c373e4..2c0ef7ab1aed2d179507258416ef9544b550cbc5 100755 (executable)
@@ -13,12 +13,12 @@ import argparse
 import logging
 import os.path
 import json
 import logging
 import os.path
 import json
-import subprocess
 import datetime
 
 from requests import Request
 from csv import DictWriter
 from mw import api
 import datetime
 
 from requests import Request
 from csv import DictWriter
 from mw import api
+import digobs
 
 
 def parse_args():
 
 
 def parse_args():
@@ -38,17 +38,7 @@ def main():
     article_filename = args.article_file
 
     #handle -L
     article_filename = args.article_file
 
     #handle -L
-    loglevel_mapping = { 'debug' : logging.DEBUG,
-                         'info' : logging.INFO,
-                         'warning' : logging.WARNING,
-                         'error' : logging.ERROR,
-                         'critical' : logging.CRITICAL }
-
-    if args.logging_level in loglevel_mapping:
-        loglevel = loglevel_mapping[args.logging_level]
-    else:
-        print("Choose a valid log level: debug, info, warning, error, or critical") 
-        exit
+    loglevel = digobs.get_loglevel(args.logging_level)
 
     #handle -W
     if args.logging_destination:
 
     #handle -W
     if args.logging_destination:
@@ -56,13 +46,11 @@ def main():
     else:
         logging.basicConfig(level=loglevel)
 
     else:
         logging.basicConfig(level=loglevel)
 
-    export_git_hash = subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode().strip()
-    export_git_short_hash = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode().strip()
     export_time = str(datetime.datetime.now())
     export_date = datetime.datetime.today().strftime("%Y%m%d")
 
     logging.info(f"Starting run at {export_time}")
     export_time = str(datetime.datetime.now())
     export_date = datetime.datetime.today().strftime("%Y%m%d")
 
     logging.info(f"Starting run at {export_time}")
-    logging.info(f"Last commit: {export_git_hash}")
+    logging.info(f"Last commit: {digobs.git_hash()}")
 
     json_output_filename = os.path.join(output_path, f"digobs_covid19-wikipedia-enwiki_revisions-{export_date}.json")
     tsv_output_filename =  os.path.join(output_path, f"digobs_covid19-wikipedia-enwiki_revisions-{export_date}.tsv")
 
     json_output_filename = os.path.join(output_path, f"digobs_covid19-wikipedia-enwiki_revisions-{export_date}.json")
     tsv_output_filename =  os.path.join(output_path, f"digobs_covid19-wikipedia-enwiki_revisions-{export_date}.tsv")
@@ -88,7 +76,7 @@ def main():
 
     # load the list of articles
     with open(article_filename, 'r') as infile:
 
     # load the list of articles
     with open(article_filename, 'r') as infile:
-        article_list = [art.strip() for art in list(infile)]
+        article_list= list(map(str.strip, infile))
 
     def get_revisions_for_page(title):
         return api_session.revisions.query(properties=rv_props.values(),
 
     def get_revisions_for_page(title):
         return api_session.revisions.query(properties=rv_props.values(),
@@ -104,7 +92,7 @@ def main():
     # add special export fields
     tsv_fields = tsv_fields + ['anon', 'minor', 'url', 'export_timestamp', 'export_commit']
 
     # add special export fields
     tsv_fields = tsv_fields + ['anon', 'minor', 'url', 'export_timestamp', 'export_commit']
 
-    export_info = { 'git_commit' : export_git_hash,
+    export_info = { 'git_commit' : digobs.git_hash(),
                     'timestamp' : export_time }
 
     with open(json_output_filename, 'w') as json_output, \
                     'timestamp' : export_time }
 
     with open(json_output_filename, 'w') as json_output, \
@@ -155,7 +143,7 @@ def main():
                                             'oldid' : rev['revid']}).prepare().url
 
                 rev['export_timestamp'] = export_time
                                             'oldid' : rev['revid']}).prepare().url
 
                 rev['export_timestamp'] = export_time
-                rev['export_commit'] = export_git_short_hash
+                rev['export_commit'] = digobs.git_hash(short=True)
 
                 tsv_writer.writerow({k: rev[k] for k in tsv_fields})
 
 
                 tsv_writer.writerow({k: rev[k] for k in tsv_fields})
 

Community Data Science Collective || Want to submit a patch?