X-Git-Url: https://code.communitydata.science/covid19.git/blobdiff_plain/0fb8ac2ed9b10c75cc8a2e0a6ecb5f08d3604e08..6493361fbd95f44a3b27131f4f79329d40e61c90:/transliterations/src/wikidata_transliterations.py?ds=inline diff --git a/transliterations/src/wikidata_transliterations.py b/transliterations/src/wikidata_transliterations.py index 0856c9c..1ac956c 100644 --- a/transliterations/src/wikidata_transliterations.py +++ b/transliterations/src/wikidata_transliterations.py @@ -2,6 +2,7 @@ from wikidata_api_calls import run_sparql_query from itertools import chain, islice import csv from json import JSONDecodeError +from os import path class LabelData: __slots__ = ['entityid','label','langcode','is_alt'] @@ -84,6 +85,14 @@ def GetEntityLabels(entityids): return chain(*calls) +def find_new_output_file(output, i = 1): + if path.exists(output): + name, ext = path.splitext(output) + + return find_new_output_file(f"{name}_{i}.{ext}", i+1) + else: + return output + if __name__ == "__main__": import argparse parser = argparse.ArgumentParser("Use wikidata to find transliterations of terms") @@ -93,4 +102,6 @@ if __name__ == "__main__": args = parser.parse_args() - GetAllLabels(args.inputs, args.output, topNs=args.topN) + output = find_new_output_file(args.output) + + GetAllLabels(args.inputs, output, topNs=args.topN)