"term","date","query.1","query.2","query.3","query.4","query.5"
-"coronavirus","2020-03-27",coronavirus update,corona,coronavirus symptoms,news coronavirus,coronavirus cases
-"covid-19","2020-03-27",covid-19 coronavirus,coronavirus,covid,covid-19 cases,covid 19
-"covid-19 pandemic","2020-03-27",coronavirus,covid-19 coronavirus pandemic,coronavirus pandemic,who,is covid-19 a pandemic
-"covid19","2020-03-27",covid,covid 19,coronavirus covid19,coronavirus,covid19 cases
-"sars-cov-2","2020-03-27",coronavirus,coronavirus sars-cov-2,covid-19,covid-19 sars-cov-2,sars
+"coronavirus",2020-03-27,coronavirus update,corona,coronavirus symptoms,news coronavirus,coronavirus cases
+"covid-19",2020-03-27,covid-19 coronavirus,coronavirus,covid,covid-19 cases,covid 19
+"covid-19 pandemic",2020-03-27,coronavirus,covid-19 coronavirus pandemic,coronavirus pandemic,who,is covid-19 a pandemic
+"covid19",2020-03-27,covid,covid 19,coronavirus covid19,coronavirus,covid19 cases
+"sars-cov-2",2020-03-27,coronavirus,coronavirus sars-cov-2,covid-19,covid-19 sars-cov-2,sars
+"coronavirus",2020-03-28,coronavirus update,corona,coronavirus symptoms,news coronavirus,coronavirus cases
+"covid-19",2020-03-28,coronavirus,coronavirus covid-19,covid,covid-19 cases,covid 19
+"covid-19 pandemic",2020-03-28,coronavirus pandemic,coronavirus,covid-19 coronavirus pandemic,is covid-19 a pandemic,who pandemic
+"covid19",2020-03-28,covid,covid 19,coronavirus covid19,coronavirus,covid19 cases
+"sars-cov-2",2020-03-28,coronavirus sars-cov-2,coronavirus,sars-cov-2 covid-19,covid-19,sars
###
### Minimal example analysis file using trending search data
-### Identify data source directory and file
-DataDir <- ("../data/output/")
-DataFile <- ("related_searches_top.csv")
-
### Import and cleanup data
-related.searches.top <- read.table(paste(DataDir,DataFile,
- sep=""),
+
+DataURL <-
+ url("https://github.com/CommunityDataScienceCollective/COVID-19_Digital_Observatory/blob/master/transliterations/data/output/related_searches_top.csv")
+
+related.searches.top <- read.table(DataURL,
sep=",", header=TRUE,
stringsAsFactors=FALSE)
+### Alternatively, uncomment and run if working locally with full git tree
+### Identify data source directory and file
+## DataDir <- ("../data/output/")
+## DataFile <- ("related_searches_top.csv")
+
+## related.searches.top <- read.table(paste(DataDir,DataFile, sep=""),
+## sep=",", header=TRUE,
+## stringsAsFactors=FALSE)
+
### Aggregate top 5 search queries by term/day
top5.per.term.date <- aggregate(query ~ term + date,
data=related.searches.top,
--- /dev/null
+"article","project","timestamp","views"
+"2019–20_coronavirus_pandemic","en.wikipedia","2020032600",1148284
+"2020_coronavirus_pandemic_in_India","en.wikipedia","2020032600",513901
+"Coronavirus","en.wikipedia","2020032600",397959
+"2020_coronavirus_pandemic_in_the_United_States","en.wikipedia","2020032600",337676
+"2019–20_coronavirus_pandemic_by_country_and_territory","en.wikipedia","2020032600",298603
+"2020_coronavirus_pandemic_in_Italy","en.wikipedia","2020032600",297687
+"Coronavirus_disease_2019","en.wikipedia","2020032600",292272
+"2020_coronavirus_pandemic_in_Spain","en.wikipedia","2020032600",114732
+"2020_coronavirus_pandemic_in_the_United_Kingdom","en.wikipedia","2020032600",111856
+"Anthony_Fauci","en.wikipedia","2020032600",103205
--- /dev/null
+### COVID-19 Digital Observatory
+### 2020-03-28
+###
+### Minimal example analysis file using pageview data
+
+library(tidyverse)
+library(ggplot2)
+library(scales)
+
+### Import and cleanup data
+
+DataURL <-
+ url("https://github.com/CommunityDataScienceCollective/COVID-19_Digital_Observatory/raw/master/wikipedia_views/data/dailyviews2020032600.tsv")
+
+views <-
+ read.table(DataURL, sep="\t", header=TRUE, stringsAsFactors=FALSE)
+
+### Alternatively, uncomment and run if working locally with full git
+### tree
+###
+### Identify data source directory and file
+## DataDir <- ("../data/")
+## DataFile <- ("dailyviews2020032600.tsv")
+
+## related.searches.top <- read.table(paste(DataDir,DataFile, sep=""),
+## sep="\t", header=TRUE,
+## stringsAsFactors=FALSE)
+
+### Cleanup and do the grouping with functions from the Tidyverse
+### (see https://www.tidyverse.org for more info)
+
+views <- views[,c("article", "project", "timestamp", "views")]
+views$timestamp <- factor(views$timestamp)
+
+### Sorts and groups at the same time
+views.by.proj.date <- arrange(group_by(views, project, timestamp),
+ desc(views))
+
+### Export just the top 10 by pageviews
+write.table(head(views.by.proj.date, 10),
+ file="output/top10_views_by_project_date.csv", sep=",",
+ row.names=FALSE)
+
+### A simple visualization
+p <- ggplot(data=views.by.proj.date, aes(views))
+
+## Density plot with log-transformed axis
+p + geom_density() + scale_x_log10(labels=comma)
+
+
+