diff --git a/server/tests/utils/test_tokens.py b/server/tests/utils/test_tokens.py index ee080a28..7f78c982 100644 --- a/server/tests/utils/test_tokens.py +++ b/server/tests/utils/test_tokens.py @@ -10,6 +10,16 @@ from text_generation_server.utils.tokens import ( ) from transformers import AutoTokenizer + +import pytest +@pytest.fixture +def skip_tokenizer_env_var(): + import os + os.environ["SKIP_TOKENIZER_IN_TGI"] = "true" + yield + del os.environ['SKIP_TOKENIZER_IN_TGI'] + + def test_stop_sequence_criteria(): criteria = StopSequenceCriteria("/test;") @@ -71,8 +81,7 @@ def test_batch_top_tokens(): assert topn_tok_logprobs[4] == [-1, -2, -3, -3, -4] - -def test_pass_through_tokenizer(): +def test_pass_through_tokenizer(skip_tokenizer_env_var): tokenizer = AutoTokenizer.from_pretrained( 'meta-llama/Llama-2-7b-chat-hf', revision=None,