-
Notifications
You must be signed in to change notification settings - Fork 9
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Adds token_class option to tokenization and propagates the param to d…
…iffengines.
- Loading branch information
Showing
8 changed files
with
73 additions
and
34 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,10 +1,38 @@ | ||
from nose.tools import eq_ | ||
|
||
from ...apply import apply | ||
from ...operations import Delete, Equal, Insert | ||
from ...tests.diff_and_replay import diff_and_replay | ||
from ...tests.diff_sequence import diff_sequence | ||
from ...tokenizers import text_split, wikitext_split | ||
from ..segment_matcher import diff, process | ||
from ...tokenizers import text_split | ||
|
||
|
||
def test_diff_and_replay(): | ||
return diff_and_replay(diff) | ||
|
||
|
||
def test_engine(): | ||
return diff_sequence(process) | ||
|
||
|
||
def test_easy_diff(): | ||
a = "Apples are red." | ||
b = "Apples are tasty and red." | ||
|
||
operation_tokens = process([a, b], tokenizer=wikitext_split) | ||
|
||
# Apples are red. | ||
operations, a, b = next(operation_tokens) | ||
|
||
# Apples are tasty and red. | ||
operations, a, b = next(operation_tokens) | ||
|
||
eq_( | ||
list(operations), | ||
[ | ||
Equal(0, 4, 0, 4), | ||
Insert(4, 4, 4, 8), | ||
Equal(4, 6, 8, 10) | ||
] | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters