]> code.communitydata.science - covid19.git/commitdiff
Merge branch 'kaylea/master' of github.com:CommunityDataScienceCollective/COVID-19_Di... kaylea/master
authorNathan TeBlunthuis <nathante@uw.edu>
Sat, 28 Mar 2020 21:13:46 +0000 (14:13 -0700)
committerNathan TeBlunthuis <nathante@uw.edu>
Sat, 28 Mar 2020 21:13:46 +0000 (14:13 -0700)
wikipedia_views/scripts/fetch_daily_views.py

index ab824c20db507d4a2c47fb064582d09b1ac08109..b604e264ac2a1292b577e7b3aa407138d946380d 100755 (executable)
@@ -57,23 +57,23 @@ def main():
         next(infile) #skip header
         articleList = list(infile)
 
-        j = []
-
-        #2 Repeatedly call the API with that list of names
-
-        for a in articleList:
-            a = a.strip("\"\n") #destringify
-            url= f"https://wikimedia.org/api/rest_v1/metrics/pageviews/per-article/en.wikipedia/all-access/all-agents/{a}/daily/{queryDate}/{queryDate}"
-
-            response = requests.get(url)
-            if response.ok:
-                jd = json.loads(response.content)
-                j.append(jd["items"][0])
-                time.sleep(.1)
-            else:
-                print(f"Not ok response: {response.status_code} from {url}")
-                
-        #3 Save results as a JSON and TSV
+    j = []
+
+    #2 Repeatedly call the API with that list of names
+
+    for a in articleList:
+        a = a.strip("\"\n") #destringify
+        url= f"https://wikimedia.org/api/rest_v1/metrics/pageviews/per-article/en.wikipedia/all-access/all-agents/{a}/daily/{queryDate}/{queryDate}"
+
+        response = requests.get(url)
+        if response.ok:
+            jd = json.loads(response.content)
+            j.append(jd["items"][0])
+            time.sleep(.1)
+        else:
+            print(f"Not ok response: {response.status_code} from {url}")
+
+    #3 Save results as a JSON and TSV
 
     #all data in j now, make json file
     with open(j_Out, 'w') as j_outfile: 

Community Data Science Collective || Want to submit a patch?