Fixup mllama

Also switch to `unsloth/Llama-3.2-11B-Vision-Instruct` for testing
from the EU :).
This commit is contained in:
Daniël de Kok 2025-05-14 13:54:06 +00:00
parent c9b6478b14
commit d859dd36b7
3 changed files with 9 additions and 9 deletions

View File

@ -14,9 +14,9 @@
"usage": null
}
],
"created": 1746054921,
"created": 1747230173,
"id": "",
"model": "meta-llama/Llama-3.2-11B-Vision-Instruct",
"model": "unsloth/Llama-3.2-11B-Vision-Instruct",
"object": "chat.completion",
"system_fingerprint": "3.3.0-dev0-native",
"usage": {
@ -40,9 +40,9 @@
"usage": null
}
],
"created": 1746054921,
"created": 1747230173,
"id": "",
"model": "meta-llama/Llama-3.2-11B-Vision-Instruct",
"model": "unsloth/Llama-3.2-11B-Vision-Instruct",
"object": "chat.completion",
"system_fingerprint": "3.3.0-dev0-native",
"usage": {

View File

@ -5,7 +5,7 @@
"index": 0,
"logprobs": null,
"message": {
"content": "A chicken stands on a pile of money, looking",
"content": "A chicken sits on a pile of money, looking",
"name": null,
"role": "assistant",
"tool_calls": null
@ -13,9 +13,9 @@
"usage": null
}
],
"created": 1746054919,
"created": 1747230171,
"id": "",
"model": "meta-llama/Llama-3.2-11B-Vision-Instruct",
"model": "unsloth/Llama-3.2-11B-Vision-Instruct",
"object": "chat.completion",
"system_fingerprint": "3.3.0-dev0-native",
"usage": {

View File

@ -5,7 +5,7 @@ import asyncio
@pytest.fixture(scope="module")
def mllama_handle(launcher):
with launcher(
"meta-llama/Llama-3.2-11B-Vision-Instruct",
"unsloth/Llama-3.2-11B-Vision-Instruct",
num_shard=2,
) as handle:
yield handle
@ -48,7 +48,7 @@ async def test_mllama_simpl(mllama, response_snapshot):
}
assert (
response.choices[0].message.content
== "A chicken stands on a pile of money, looking"
== "A chicken sits on a pile of money, looking"
)
assert response == response_snapshot