]> code.communitydata.science - rises_declines_wikia_code.git/blob - mediawiki_dump_tools/Mediawiki-Utilities/mw/lib/persistence/tests/test_tokenization.py
Initial commit
[rises_declines_wikia_code.git] / mediawiki_dump_tools / Mediawiki-Utilities / mw / lib / persistence / tests / test_tokenization.py
1 from nose.tools import eq_
2
3 from .. import tokenization
4
5
6 def test_wikitext_split():
7     eq_(
8         list(tokenization.wikitext_split("foo bar herp {{derp}}")),
9         ["foo", " ", "bar", " ", "herp", " ", "{{", "derp", "}}"]
10     )

Community Data Science Collective || Want to submit a patch?