1 # generate a list of wikidata entities related to keywords
4 from wikidata_api_calls import search_wikidata, get_wikidata_api
6 from itertools import chain
8 class Wikidata_ResultSet:
12 def extend(self, term, results):
14 (Wikidata_Result(term, result, i)
15 for i, result in enumerate(results))
18 def to_csv(self, outfile=None, mode='w'):
23 if path.exists(outfile) and mode != 'w':
24 of = open(outfile,'a',newline='')
26 of = open(outfile,'w',newline='')
27 writer = csv.writer(of)
28 writer.writerow(Wikidata_Result.__slots__)
29 writer.writerows(map(Wikidata_Result.to_list, chain(* self.results)))
32 class Wikidata_Result:
33 # store unique entities found in the search results, the position in the search result, and the date
34 __slots__=['search_term','entityid','pageid','search_position','timestamp']
41 self.search_term = term.strip()
42 self.entityid = search_result['title']
43 self.pageid = int(search_result['pageid'])
44 self.search_position = int(position)
45 self.timestamp = search_result['timestamp']
48 return [self.search_term,
54 def run_wikidata_searches(terms):
55 api = get_wikidata_api()
56 resultset = Wikidata_ResultSet()
58 search_results = search_wikidata(api, term)
59 resultset.extend(term, search_results)
62 def read_google_trends_files(terms_files):
63 def _read_file(infile):
64 return csv.DictReader(open(infile,'r',newline=''))
66 for row in chain(* [_read_file(terms_file) for terms_file in terms_files]):
70 def trawl_google_trends(terms_files, outfile = None, mode='w'):
71 terms = read_google_trends_files(terms_files)
72 resultset = run_wikidata_searches(terms)
73 resultset.to_csv(outfile, mode)
75 def trawl_base_terms(infiles, outfile = None, mode='w'):
76 terms = chain(* (open(infile,'r') for infile in infiles))
77 resultset = run_wikidata_searches(terms)
78 resultset.to_csv(outfile, mode)
80 ## search each of the base terms in wikidata
82 # store unique entities found in the search results, the position in the search result, and the date
84 if __name__ == "__main__":
86 parser = argparse.ArgumentParser("Search wikidata for entities related to a set of terms.")
87 parser.add_argument('inputs', type=str, nargs='+', help='one or more files to read')
88 parser.add_argument('--use-gtrends', action='store_true', help = 'toggle whether the input is the output from google trends')
89 parser.add_argument('--output', type=str, help='an output file. defaults to stdout')
90 parser.add_argument('--overwrite', action='store_true', help = 'overwrite existing output files instead of appending')
91 args = parser.parse_args()
93 trawl_google_trends(args.inputs, args.output)
95 trawl_base_terms(args.inputs, args.output)