Skip to content

Commit

Permalink
more explicit import
Browse files Browse the repository at this point in the history
  • Loading branch information
SoluMilken committed Jan 24, 2019
1 parent 56a5ef0 commit 2d91ec6
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 6 deletions.
4 changes: 2 additions & 2 deletions uttut/pipeline/bert/tests/test_basic.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import pytest

from .tokenization import BasicTokenizer
from .tokenization import BasicTokenizer as BertBasicTokenizer
from ..basic import basic_pipe

from uttut.elements import Datum
Expand Down Expand Up @@ -28,7 +28,7 @@

@pytest.fixture
def tokenizer():
yield BasicTokenizer()
yield BertBasicTokenizer()


@pytest.mark.parametrize("input_str", test_cases)
Expand Down
4 changes: 2 additions & 2 deletions uttut/pipeline/bert/tests/test_full.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import pytest

from .tokenization import FullTokenizer
from .tokenization import FullTokenizer as BertFullTokenizer
from ..full import full_pipe, vocab_tokens
from uttut.elements import Datum

Expand All @@ -28,7 +28,7 @@ def tokenizer():

vocab_file = vocab_writer.name

tokenizer = FullTokenizer(vocab_file)
tokenizer = BertFullTokenizer(vocab_file)
yield tokenizer
os.unlink(vocab_file)

Expand Down
4 changes: 2 additions & 2 deletions uttut/pipeline/bert/tests/test_word_piece.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import pytest

from .tokenization import WordpieceTokenizer
from .tokenization import WordpieceTokenizer as BertWordpieceTokenizer
from ..word_piece import word_piece_pipe, vocab

from uttut.elements import Datum
Expand All @@ -20,7 +20,7 @@

@pytest.fixture
def tokenizer():
yield WordpieceTokenizer(vocab)
yield BertWordpieceTokenizer(vocab)


@pytest.mark.parametrize("input_str", test_cases)
Expand Down

0 comments on commit 2d91ec6

Please sign in to comment.