1 ### COVID-19 Digital Observatory
4 ### Minimal example analysis file using pageview data
9 ### Import and cleanup one datafile from the observatory
12 url("https://covid19.communitydata.science/datasets/wikipedia/digobs_covid19-wikipedia-enwiki_dailyviews-20200401.tsv")
15 read.table(DataURL, sep="\t", header=TRUE, stringsAsFactors=FALSE)
17 ### Alternatively, uncomment and run if working locally with full git
20 ### Identify data source directory and file
21 ## DataDir <- ("../data/")
22 ## DataFile <- ("dailyviews2020032600.tsv")
24 ## related.searches.top <- read.table(paste(DataDir,DataFile, sep=""),
25 ## sep="\t", header=TRUE,
26 ## stringsAsFactors=FALSE)
28 ### Cleanup and do the grouping with functions from the Tidyverse
29 ### (see https://www.tidyverse.org for more info)
31 views <- views[,c("article", "project", "timestamp", "views")]
32 views$timestamp <- fct_explicit_na(views$timestamp)
35 ### Sorts and groups at the same time
36 views.by.proj.date <- arrange(group_by(views, project, timestamp),
40 ### Export just the top 10 by pageviews
41 write.table(head(views.by.proj.date, 10),
42 file="output/top10_views_by_project_date.csv", sep=",",
45 ### A simple visualization
46 p <- ggplot(data=views.by.proj.date, aes(views))
48 ## Density plot with log-transformed axis
49 p + geom_density() + scale_x_log10(labels=comma)