mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-09-11 12:24:53 +00:00
fix: adjust assert typo
This commit is contained in:
parent
f2080c4114
commit
0b82080849
@ -39,7 +39,7 @@ def test_flash_llama_completion_single_prompt(
|
|||||||
response = response.json()
|
response = response.json()
|
||||||
assert len(response["choices"]) == 1
|
assert len(response["choices"]) == 1
|
||||||
|
|
||||||
return response == response_snapshot
|
assert response == response_snapshot
|
||||||
|
|
||||||
|
|
||||||
def test_flash_llama_completion_many_prompts(flash_llama_completion, response_snapshot):
|
def test_flash_llama_completion_many_prompts(flash_llama_completion, response_snapshot):
|
||||||
@ -61,7 +61,7 @@ def test_flash_llama_completion_many_prompts(flash_llama_completion, response_sn
|
|||||||
all_indexes.sort()
|
all_indexes.sort()
|
||||||
assert all_indexes == [0, 1, 2, 3]
|
assert all_indexes == [0, 1, 2, 3]
|
||||||
|
|
||||||
return response == response_snapshot
|
assert response == response_snapshot
|
||||||
|
|
||||||
|
|
||||||
async def test_flash_llama_completion_many_prompts_stream(
|
async def test_flash_llama_completion_many_prompts_stream(
|
||||||
@ -100,4 +100,4 @@ async def test_flash_llama_completion_many_prompts_stream(
|
|||||||
assert 0 <= c["choices"][0]["index"] <= 4
|
assert 0 <= c["choices"][0]["index"] <= 4
|
||||||
|
|
||||||
assert response.status == 200
|
assert response.status == 200
|
||||||
return response == response_snapshot
|
assert response == response_snapshot
|
||||||
|
Loading…
Reference in New Issue
Block a user