]> code.communitydata.science - covid19.git/blobdiff - wikipedia_views/scripts/fetch_daily_views.py
Merge branch 'kaylea/master' of github.com:CommunityDataScienceCollective/COVID-19_Di...
[covid19.git] / wikipedia_views / scripts / fetch_daily_views.py
index 18bc01fdd512a7268d6377a7261be0a236701524..b604e264ac2a1292b577e7b3aa407138d946380d 100755 (executable)
@@ -55,25 +55,25 @@ def main():
 
     with open(articleFile, 'r') as infile:
         next(infile) #skip header
-        articleList = infile
+        articleList = list(infile)
 
-        j = []
+    j = []
 
-        #2 Repeatedly call the API with that list of names
+    #2 Repeatedly call the API with that list of names
 
-        for a in articleList:
-            a = a.strip("\"\n") #destringify
-            url= f"https://wikimedia.org/api/rest_v1/metrics/pageviews/per-article/en.wikipedia/all-access/all-agents/{a}/daily/{queryDate}/{queryDate}"
+    for a in articleList:
+        a = a.strip("\"\n") #destringify
+        url= f"https://wikimedia.org/api/rest_v1/metrics/pageviews/per-article/en.wikipedia/all-access/all-agents/{a}/daily/{queryDate}/{queryDate}"
 
-            response = requests.get(url)
-            if response.ok:
-                jd = json.loads(response.content)
-                j.append(jd["items"][0])
-                time.sleep(.1)
-            else:
-                print(f"Not ok response: {response.status_code} from {url}")
-                
-        #3 Save results as a JSON and TSV
+        response = requests.get(url)
+        if response.ok:
+            jd = json.loads(response.content)
+            j.append(jd["items"][0])
+            time.sleep(.1)
+        else:
+            print(f"Not ok response: {response.status_code} from {url}")
+
+    #3 Save results as a JSON and TSV
 
     #all data in j now, make json file
     with open(j_Out, 'w') as j_outfile: 

Community Data Science Collective || Want to submit a patch?