diff --git a/tensorflow_text/python/ops/sentencepiece_tokenizer_test.py b/tensorflow_text/python/ops/sentencepiece_tokenizer_test.py index 42a8e49b0..14b917420 100644 --- a/tensorflow_text/python/ops/sentencepiece_tokenizer_test.py +++ b/tensorflow_text/python/ops/sentencepiece_tokenizer_test.py @@ -98,7 +98,7 @@ def setUp(self): super(SentencepieceTokenizerOpTest, self).setUp() sentencepiece_model_file = ( 'tensorflow_text/python/ops/test_data/test_oss_model.model') - self.model = gfile.GFile(sentencepiece_model_file, 'r').read() + self.model = gfile.GFile(sentencepiece_model_file, 'rb').read() def testGetVocabSize(self): sp = SentencepieceTokenizer(self.model)