mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-04-24 00:12:08 +00:00
Update the flaky mllama test.
This commit is contained in:
parent
571ac9b507
commit
8a870b31b9
@ -6,7 +6,7 @@
|
||||
"index": 0,
|
||||
"logprobs": null,
|
||||
"message": {
|
||||
"content": "In a small town, a chicken named Cluck",
|
||||
"content": "A chicken sits on a pile of money, looking",
|
||||
"name": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
@ -14,15 +14,15 @@
|
||||
"usage": null
|
||||
}
|
||||
],
|
||||
"created": 1738753835,
|
||||
"created": 1739290197,
|
||||
"id": "",
|
||||
"model": "meta-llama/Llama-3.2-11B-Vision-Instruct",
|
||||
"object": "chat.completion",
|
||||
"system_fingerprint": "3.1.1-dev0-native",
|
||||
"usage": {
|
||||
"completion_tokens": 10,
|
||||
"prompt_tokens": 50,
|
||||
"total_tokens": 60
|
||||
"prompt_tokens": 45,
|
||||
"total_tokens": 55
|
||||
}
|
||||
},
|
||||
{
|
||||
@ -32,7 +32,7 @@
|
||||
"index": 0,
|
||||
"logprobs": null,
|
||||
"message": {
|
||||
"content": "In a small town, a chicken named Cluck",
|
||||
"content": "A chicken sits on a pile of money, looking",
|
||||
"name": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
@ -40,15 +40,15 @@
|
||||
"usage": null
|
||||
}
|
||||
],
|
||||
"created": 1738753835,
|
||||
"created": 1739290197,
|
||||
"id": "",
|
||||
"model": "meta-llama/Llama-3.2-11B-Vision-Instruct",
|
||||
"object": "chat.completion",
|
||||
"system_fingerprint": "3.1.1-dev0-native",
|
||||
"usage": {
|
||||
"completion_tokens": 10,
|
||||
"prompt_tokens": 50,
|
||||
"total_tokens": 60
|
||||
"prompt_tokens": 45,
|
||||
"total_tokens": 55
|
||||
}
|
||||
}
|
||||
]
|
||||
|
@ -5,7 +5,7 @@
|
||||
"index": 0,
|
||||
"logprobs": null,
|
||||
"message": {
|
||||
"content": "In a small town, a chicken named Cluck",
|
||||
"content": "A chicken sits on a pile of money, looking",
|
||||
"name": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
@ -13,14 +13,14 @@
|
||||
"usage": null
|
||||
}
|
||||
],
|
||||
"created": 1738753833,
|
||||
"created": 1739290152,
|
||||
"id": "",
|
||||
"model": "meta-llama/Llama-3.2-11B-Vision-Instruct",
|
||||
"object": "chat.completion",
|
||||
"system_fingerprint": "3.1.1-dev0-native",
|
||||
"usage": {
|
||||
"completion_tokens": 10,
|
||||
"prompt_tokens": 50,
|
||||
"total_tokens": 60
|
||||
"prompt_tokens": 45,
|
||||
"total_tokens": 55
|
||||
}
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ async def test_mllama_simpl(mllama, response_snapshot):
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "Can you tell me a very short story based on the image?",
|
||||
"text": "Describe the image in 10 words.",
|
||||
},
|
||||
{
|
||||
"type": "image_url",
|
||||
@ -43,11 +43,12 @@ async def test_mllama_simpl(mllama, response_snapshot):
|
||||
|
||||
assert response.usage == {
|
||||
"completion_tokens": 10,
|
||||
"prompt_tokens": 50,
|
||||
"total_tokens": 60,
|
||||
"prompt_tokens": 45,
|
||||
"total_tokens": 55,
|
||||
}
|
||||
assert (
|
||||
response.choices[0].message.content == "In a small town, a chicken named Cluck"
|
||||
response.choices[0].message.content
|
||||
== "A chicken sits on a pile of money, looking"
|
||||
)
|
||||
assert response == response_snapshot
|
||||
|
||||
@ -65,7 +66,7 @@ async def test_mllama_load(mllama, generate_load, response_snapshot):
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "Can you tell me a very short story based on the image?",
|
||||
"text": "Describe the image in 10 words.",
|
||||
},
|
||||
{
|
||||
"type": "image_url",
|
||||
@ -86,7 +87,7 @@ async def test_mllama_load(mllama, generate_load, response_snapshot):
|
||||
generated_texts = [response.choices[0].message.content for response in responses]
|
||||
|
||||
# XXX: TODO: Fix this test.
|
||||
assert generated_texts[0] == "In a small town, a chicken named Cluck"
|
||||
assert generated_texts[0] == "A chicken sits on a pile of money, looking"
|
||||
assert len(generated_texts) == 2
|
||||
assert generated_texts, all(
|
||||
[text == generated_texts[0] for text in generated_texts]
|
||||
|
Loading…
Reference in New Issue
Block a user