1 from wikidata_api_calls import run_sparql_query
2 from itertools import chain, islice
4 from json import JSONDecodeError
7 __slots__ = ['entityid','label','langcode','is_alt']
9 def __init__(self, wd_res, entityid, is_alt):
10 obj = wd_res.get('label',None)
11 self.label = obj.get('value',None)
12 self.langcode = obj.get('xml:lang',None)
13 self.entityid = entityid
17 return [self.entityid,
23 def GetAllLabels(in_csv, outfile, topN):
25 def load_entity_ids(in_csv, topN=5):
26 with open(in_csv,'r',newline='') as infile:
27 reader = csv.DictReader(infile)
29 if int(row['search_position']) < topN:
32 ids = set(load_entity_ids(in_csv, topN))
34 labeldata = chain(* map(GetEntityLabels, ids))
36 with open(outfile, 'w', newline='') as of:
37 writer = csv.writer(of)
38 writer.writerow(LabelData.__slots__)
39 writer.writerows(map(LabelData.to_list,labeldata))
42 def GetEntityLabels(entityid):
44 def run_query_and_parse(query, entityid, is_alt):
45 results = run_sparql_query(query % entityid)
48 res = jobj.get('results',None)
50 res = res.get('bindings',None)
52 raise requests.APIError(f"got invalid response from wikidata for {query % entityid}")
54 yield LabelData(info, entityid, is_alt)
56 except JSONDecodeError as e:
58 print(query % entityid)
61 label_base_query = """
62 SELECT DISTINCT ?label WHERE {
63 wd:%s rdfs:label ?label;
66 altLabel_base_query = """
67 SELECT DISTINCT ?label WHERE {
68 wd:%s skos:altLabel ?label;
71 label_results = run_query_and_parse(label_base_query, entityid, is_alt=False)
73 altLabel_results = run_query_and_parse(altLabel_base_query, entityid, is_alt=True)
75 return chain(label_results, altLabel_results)
78 if __name__ == "__main__":
79 GetAllLabels("../data/output/wikidata_search_results.csv","../data/output/wikidata_entity_labels.csv", topN=20)