mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-09-09 11:24:53 +00:00
Fixup mllama
Also switch to `unsloth/Llama-3.2-11B-Vision-Instruct` for testing from the EU :).
This commit is contained in:
parent
c9b6478b14
commit
d859dd36b7
@ -14,9 +14,9 @@
|
|||||||
"usage": null
|
"usage": null
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"created": 1746054921,
|
"created": 1747230173,
|
||||||
"id": "",
|
"id": "",
|
||||||
"model": "meta-llama/Llama-3.2-11B-Vision-Instruct",
|
"model": "unsloth/Llama-3.2-11B-Vision-Instruct",
|
||||||
"object": "chat.completion",
|
"object": "chat.completion",
|
||||||
"system_fingerprint": "3.3.0-dev0-native",
|
"system_fingerprint": "3.3.0-dev0-native",
|
||||||
"usage": {
|
"usage": {
|
||||||
@ -40,9 +40,9 @@
|
|||||||
"usage": null
|
"usage": null
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"created": 1746054921,
|
"created": 1747230173,
|
||||||
"id": "",
|
"id": "",
|
||||||
"model": "meta-llama/Llama-3.2-11B-Vision-Instruct",
|
"model": "unsloth/Llama-3.2-11B-Vision-Instruct",
|
||||||
"object": "chat.completion",
|
"object": "chat.completion",
|
||||||
"system_fingerprint": "3.3.0-dev0-native",
|
"system_fingerprint": "3.3.0-dev0-native",
|
||||||
"usage": {
|
"usage": {
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
"index": 0,
|
"index": 0,
|
||||||
"logprobs": null,
|
"logprobs": null,
|
||||||
"message": {
|
"message": {
|
||||||
"content": "A chicken stands on a pile of money, looking",
|
"content": "A chicken sits on a pile of money, looking",
|
||||||
"name": null,
|
"name": null,
|
||||||
"role": "assistant",
|
"role": "assistant",
|
||||||
"tool_calls": null
|
"tool_calls": null
|
||||||
@ -13,9 +13,9 @@
|
|||||||
"usage": null
|
"usage": null
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"created": 1746054919,
|
"created": 1747230171,
|
||||||
"id": "",
|
"id": "",
|
||||||
"model": "meta-llama/Llama-3.2-11B-Vision-Instruct",
|
"model": "unsloth/Llama-3.2-11B-Vision-Instruct",
|
||||||
"object": "chat.completion",
|
"object": "chat.completion",
|
||||||
"system_fingerprint": "3.3.0-dev0-native",
|
"system_fingerprint": "3.3.0-dev0-native",
|
||||||
"usage": {
|
"usage": {
|
||||||
|
@ -5,7 +5,7 @@ import asyncio
|
|||||||
@pytest.fixture(scope="module")
|
@pytest.fixture(scope="module")
|
||||||
def mllama_handle(launcher):
|
def mllama_handle(launcher):
|
||||||
with launcher(
|
with launcher(
|
||||||
"meta-llama/Llama-3.2-11B-Vision-Instruct",
|
"unsloth/Llama-3.2-11B-Vision-Instruct",
|
||||||
num_shard=2,
|
num_shard=2,
|
||||||
) as handle:
|
) as handle:
|
||||||
yield handle
|
yield handle
|
||||||
@ -48,7 +48,7 @@ async def test_mllama_simpl(mllama, response_snapshot):
|
|||||||
}
|
}
|
||||||
assert (
|
assert (
|
||||||
response.choices[0].message.content
|
response.choices[0].message.content
|
||||||
== "A chicken stands on a pile of money, looking"
|
== "A chicken sits on a pile of money, looking"
|
||||||
)
|
)
|
||||||
assert response == response_snapshot
|
assert response == response_snapshot
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user