X-Git-Url: https://code.communitydata.science/mediawiki_dump_tools.git/blobdiff_plain/19eda6dd0e3d342e3785f4d09258f287d0f43d5f..refs/heads/tests:/wikiq?ds=sidebyside diff --git a/wikiq b/wikiq index 731f59a..334f195 100755 --- a/wikiq +++ b/wikiq @@ -130,9 +130,8 @@ class WikiqPage(Page): # 2 B A True # 3 A B True # 4 A A False - # Post-loop A Always + # Post-loop A Always def __find_next_revision(self): - if self.prev_rev is None: prev_rev = WikiqPage._correct_sha(next(self.revisions)) self.prev_rev = prev_rev @@ -193,7 +192,7 @@ class WikiqPage(Page): class WikiqParser(): - def __init__(self, input_file, output_file, collapse_user=False, persist=None, urlencode=False, namespaces = None): + def __init__(self, input_file, output_file, collapse_user=False, persist=None, urlencode=False, namespaces = None, exclude_punct = False, exclude_ws = False): """ Parameters: persist : what persistence method to use. Takes a PersistMethod value @@ -210,11 +209,9 @@ class WikiqParser(): else: self.namespace_filter = None - # create a regex that creates the output filename - # output_filename = re.sub(r'^.*/(enwiki\-\d+)\-.*p(\d+)p.*$', - # r'output/wikiq-\1-\2.tsv', - # input_filename) - + self.exclude_punct = exclude_punct + self.exclude_ws = exclude_ws + # Construct dump file iterator self.dump = WikiqIterator.from_file(self.input_file, self.collapse_user) @@ -226,29 +223,6 @@ class WikiqParser(): if self.persist == PersistMethod.segment: self.diff_engine = SegmentMatcher(tokenizer = wikitext_split) - # def __get_namespace_from_title(self, title): - # default_ns = None - - # for ns in self.namespaces: - # # skip if the namespace is not defined - # if ns == None: - # default_ns = self.namespaces[ns] - # continue - - # if title.startswith(ns + ":"): - # return self.namespaces[ns] - - # # if we've made it this far with no matches, we return the default namespace - # return default_ns - - # def _set_namespace(self, rev_docs): - - # for rev_data in rev_docs: - # if 'namespace' not in rev_data['page']: - # namespace = self.__get_namespace_from_title(page['title']) - # rev_data['page']['namespace'] = namespace - # yield rev_data - def process(self): page_count = 0 rev_count = 0 @@ -332,16 +306,11 @@ class WikiqParser(): rev_data['anon'] = "" rev_data['editor'] = "" - #if re.match(r'^#redirect \[\[.*\]\]', rev.text, re.I): - # redirect = True - #else: - # redirect = False - - #TODO missing: additions_size deletions_size + # we can easily add redirect info + # rev_data['redirect'] = rev.page.redirect - # if collapse user was on, lets run that - # if self.collapse_user: - # rev_data.collapsed_revs = rev.collapsed_revs + if self.collapse_user: + rev_data['collapsed_revs'] = rev.collapsed_revs if self.persist != PersistMethod.none: if rev.deleted.text: @@ -360,8 +329,16 @@ class WikiqParser(): if len(window) == PERSISTENCE_RADIUS: old_rev_id, old_rev_data, old_tokens_added, old_tokens_removed = window[0] - num_token_revs, num_tokens_added, num_tokens_removed = calculate_persistence(old_tokens_added, old_tokens_removed, legacy = self.persist == PersistMethod.legacy) - + num_token_revs, \ + num_tokens_added, \ + num_tokens_removed = \ + calculate_persistence( + old_tokens_added, + old_tokens_removed, + exclude_ws = self.exclude_ws, + exclude_punct = self.exclude_punct, + legacy = self.persist == PersistMethod.legacy) + old_rev_data["token_revs"] = num_token_revs old_rev_data["tokens_added"] = num_tokens_added old_rev_data["tokens_removed"] = num_tokens_removed @@ -383,7 +360,15 @@ class WikiqParser(): rev_id, rev_data, tokens_added, tokens_removed = item - num_token_revs, num_tokens_added, num_tokens_removed = calculate_persistence(tokens_added, tokens_removed, legacy = self.persist == PersistMethod.legacy) + num_token_revs, \ + num_tokens_added, \ + num_tokens_removed = calculate_persistence( + tokens_added, + tokens_removed, + exclude_ws = self.exclude_ws, + exclude_punct = self.exclude_punct, + legacy = self.persist == PersistMethod.legacy) + rev_data["token_revs"] = num_token_revs rev_data["tokens_added"] = num_tokens_added @@ -458,7 +443,11 @@ parser.add_argument('-u', '--url-encode', dest="urlencode", action="store_true", parser.add_argument('-n', '--namespace-include', dest="namespace_filter", type=int, action='append', help="Id number of namspace to include. Can be specified more than once.") +parser.add_argument('--exclude-whitespace', dest="exclude_ws", action="store_true", + help="Flag to remove whitespace from persistence measures.") +parser.add_argument('--exclude-punctuation', dest="exclude_punct", action="store_true", + help="Flag to remove punctuation from persistence measures.") args = parser.parse_args() @@ -500,7 +489,9 @@ if len(args.dumpfiles) > 0: collapse_user=args.collapse_user, persist=persist, urlencode=args.urlencode, - namespaces = namespaces) + namespaces = namespaces, + exclude_punct = args.exclude_punct, + exclude_ws = args.exclude_ws) wikiq.process()