Updating mllama after strftime.

This commit is contained in:
Nicolas Patry 2025-02-05 12:11:18 +01:00
parent 36223f834e
commit a91127c24b
No known key found for this signature in database
GPG Key ID: D2920555C90F704C
3 changed files with 17 additions and 69 deletions

View File

@ -6,7 +6,7 @@
"index": 0,
"logprobs": null,
"message": {
"content": "In a bustling city, a chicken named Cluck",
"content": "In a small town, a chicken named Cluck",
"name": null,
"role": "assistant",
"tool_calls": null
@ -14,11 +14,11 @@
"usage": null
}
],
"created": 1727773835,
"created": 1738753835,
"id": "",
"model": "meta-llama/Llama-3.2-11B-Vision-Instruct",
"object": "chat.completion",
"system_fingerprint": "2.4.2-dev0-native",
"system_fingerprint": "3.1.1-dev0-native",
"usage": {
"completion_tokens": 10,
"prompt_tokens": 50,
@ -32,7 +32,7 @@
"index": 0,
"logprobs": null,
"message": {
"content": "In a world where even chickens could dream big,",
"content": "In a small town, a chicken named Cluck",
"name": null,
"role": "assistant",
"tool_calls": null
@ -40,63 +40,11 @@
"usage": null
}
],
"created": 1727773835,
"created": 1738753835,
"id": "",
"model": "meta-llama/Llama-3.2-11B-Vision-Instruct",
"object": "chat.completion",
"system_fingerprint": "2.4.2-dev0-native",
"usage": {
"completion_tokens": 10,
"prompt_tokens": 50,
"total_tokens": 60
}
},
{
"choices": [
{
"finish_reason": "length",
"index": 0,
"logprobs": null,
"message": {
"content": "In a world where even chickens could dream big,",
"name": null,
"role": "assistant",
"tool_calls": null
},
"usage": null
}
],
"created": 1727773835,
"id": "",
"model": "meta-llama/Llama-3.2-11B-Vision-Instruct",
"object": "chat.completion",
"system_fingerprint": "2.4.2-dev0-native",
"usage": {
"completion_tokens": 10,
"prompt_tokens": 50,
"total_tokens": 60
}
},
{
"choices": [
{
"finish_reason": "length",
"index": 0,
"logprobs": null,
"message": {
"content": "In a world where even chickens could dream big,",
"name": null,
"role": "assistant",
"tool_calls": null
},
"usage": null
}
],
"created": 1727773835,
"id": "",
"model": "meta-llama/Llama-3.2-11B-Vision-Instruct",
"object": "chat.completion",
"system_fingerprint": "2.4.2-dev0-native",
"system_fingerprint": "3.1.1-dev0-native",
"usage": {
"completion_tokens": 10,
"prompt_tokens": 50,

View File

@ -5,7 +5,7 @@
"index": 0,
"logprobs": null,
"message": {
"content": "In a bustling city, a chicken named Cluck",
"content": "In a small village, a chicken named Cluck",
"name": null,
"role": "assistant",
"tool_calls": null
@ -13,11 +13,11 @@
"usage": null
}
],
"created": 1727556016,
"created": 1738753833,
"id": "",
"model": "meta-llama/Llama-3.2-11B-Vision-Instruct",
"object": "chat.completion",
"system_fingerprint": "2.4.2-dev0-native",
"system_fingerprint": "3.1.1-dev0-native",
"usage": {
"completion_tokens": 10,
"prompt_tokens": 50,

View File

@ -48,7 +48,7 @@ async def test_mllama_simpl(mllama, response_snapshot):
}
assert (
response.choices[0].message.content
== "In a bustling city, a chicken named Cluck"
== "In a small village, a chicken named Cluck"
)
assert response == response_snapshot
@ -84,12 +84,12 @@ async def test_mllama_load(mllama, generate_load, response_snapshot):
]
responses = await asyncio.gather(*futures)
_ = [response.choices[0].message.content for response in responses]
generated_texts = [response.choices[0].message.content for response in responses]
# XXX: TODO: Fix this test.
# assert generated_texts[0] == "In a bustling city, a chicken named Cluck"
# assert len(generated_texts) == 4
# assert generated_texts, all(
# [text == generated_texts[0] for text in generated_texts]
# )
# assert responses == response_snapshot
assert generated_texts[0] == "In a small town, a chicken named Cluck"
assert len(generated_texts) == 2
assert generated_texts, all(
[text == generated_texts[0] for text in generated_texts]
)
assert responses == response_snapshot