diff --git a/tests/test_bert_embedder.py b/tests/test_bert_embedder.py index b43c27b..7732a47 100644 --- a/tests/test_bert_embedder.py +++ b/tests/test_bert_embedder.py @@ -181,7 +181,7 @@ def test_end_to_end(self): assert list(bert_vectors.shape) == [2, 10, 768] def test_max_length(self): - """Test that max input length works.""" + """Test that max input length works (default max len = 512).""" token_embedder = PretrainedBertEmbedder( self.model_name, @@ -206,7 +206,7 @@ def test_max_length(self): token_embedder(tokens["bert"], tokens["bert-offsets"]) def test_max_length_raise_error(self): - """Test that input greater than max length raises error.""" + """Test that input greater than max length (default = 512) raises error.""" token_embedder = PretrainedBertEmbedder( self.model_name, diff --git a/tests/test_roberta_embedder.py b/tests/test_roberta_embedder.py index c0faf00..95899ee 100644 --- a/tests/test_roberta_embedder.py +++ b/tests/test_roberta_embedder.py @@ -166,7 +166,7 @@ def test_end_to_end(self): assert list(bert_vectors.shape) == [2, 10, 768] def test_max_length(self): - """Test that max input length works.""" + """Test that max input length works (default max len = 512).""" token_embedder = PretrainedBertEmbedder( self.model_name, @@ -191,7 +191,7 @@ def test_max_length(self): token_embedder(tokens["bert"], tokens["bert-offsets"]) def test_max_length_raise_error(self): - """Test that input greater than max length raises error.""" + """Test that input greater than max length (default = 512) raises error.""" token_embedder = PretrainedBertEmbedder( self.model_name,