1 # Functions common to both scrapers
2 # Copyright (C) 2018 Nathan TeBlunthuis
4 # This program is free software: you can redistribute it and/or modify
5 # it under the terms of the GNU General Public License as published by
6 # the Free Software Foundation, either version 3 of the License, or
7 # (at your option) any later version.
9 # returns an iterator of wiki,url tuples
11 from os import makedirs, path
12 from shutil import rmtree
13 from itertools import islice
16 def _add_wikitype(tpl):
21 if "wikipedia.org" in url:
22 wikitype = "wikipedia"
25 elif "wikia.com" in url:
32 tpl = (wiki, url, wikitype)
36 def read_wikilist(args):
37 if args.sep in ['\\t', '\t', 'tab', 't']:
42 if not args.no_header:
43 wikilist = pd.read_table(args.wikilist, sep=sep)
44 wikilist = ((t.dbname, t.url)
45 for t in wikilist.loc[:, ['dbname', 'url']].itertuples())
48 j, k = [int(i) for i in args.i.split(',')[0:2]]
50 wikilist = open(args.wikilist)
51 wikilist = (line.split(sep) for line in wikilist)
52 wikilist = ((fields[j], fields[k]) for fields in wikilist)
53 wikilist = islice(wikilist, 1, None)
55 wikilist = (_add_wikitype(t) for t in wikilist)
59 def add_parser_arguments(parser):
60 parser.add_argument('--no-header', action='store_true',
61 help='does the wikilist have no header?')
63 parser.add_argument('--nuke-old', action='store_true',
64 help='remove old files.')
66 parser.add_argument('--sep', type=str,
67 help='input table delimiter', default=',')
72 help='path to the input file: a wiki list with wiki\turl\filename')
77 help='path to put the logs we scrape e.g. /com/projects/messagewalls/allusers/')
79 parser.add_argument('-i',
81 help='<j,k> two 0-based indices for wiki and url in the csv, default=0,1',
87 def prepare_output(output_path, nuke_old):
88 if not path.exists(output_path):
92 if not path.exists(output_path):