diff --git a/integration-tests/models/__snapshots__/test_mllama/test_mllama_load.json b/integration-tests/models/__snapshots__/test_mllama/test_mllama_load.json index 9d4c98ef..37c8ef8e 100644 --- a/integration-tests/models/__snapshots__/test_mllama/test_mllama_load.json +++ b/integration-tests/models/__snapshots__/test_mllama/test_mllama_load.json @@ -14,9 +14,9 @@ "usage": null } ], - "created": 1746054921, + "created": 1747230173, "id": "", - "model": "meta-llama/Llama-3.2-11B-Vision-Instruct", + "model": "unsloth/Llama-3.2-11B-Vision-Instruct", "object": "chat.completion", "system_fingerprint": "3.3.0-dev0-native", "usage": { @@ -40,9 +40,9 @@ "usage": null } ], - "created": 1746054921, + "created": 1747230173, "id": "", - "model": "meta-llama/Llama-3.2-11B-Vision-Instruct", + "model": "unsloth/Llama-3.2-11B-Vision-Instruct", "object": "chat.completion", "system_fingerprint": "3.3.0-dev0-native", "usage": { diff --git a/integration-tests/models/__snapshots__/test_mllama/test_mllama_simpl.json b/integration-tests/models/__snapshots__/test_mllama/test_mllama_simpl.json index d1049ead..75dc0ddf 100644 --- a/integration-tests/models/__snapshots__/test_mllama/test_mllama_simpl.json +++ b/integration-tests/models/__snapshots__/test_mllama/test_mllama_simpl.json @@ -5,7 +5,7 @@ "index": 0, "logprobs": null, "message": { - "content": "A chicken stands on a pile of money, looking", + "content": "A chicken sits on a pile of money, looking", "name": null, "role": "assistant", "tool_calls": null @@ -13,9 +13,9 @@ "usage": null } ], - "created": 1746054919, + "created": 1747230171, "id": "", - "model": "meta-llama/Llama-3.2-11B-Vision-Instruct", + "model": "unsloth/Llama-3.2-11B-Vision-Instruct", "object": "chat.completion", "system_fingerprint": "3.3.0-dev0-native", "usage": { diff --git a/integration-tests/models/test_mllama.py b/integration-tests/models/test_mllama.py index 95ec1d99..fd58e4bf 100644 --- a/integration-tests/models/test_mllama.py +++ b/integration-tests/models/test_mllama.py @@ -5,7 +5,7 @@ import asyncio @pytest.fixture(scope="module") def mllama_handle(launcher): with launcher( - "meta-llama/Llama-3.2-11B-Vision-Instruct", + "unsloth/Llama-3.2-11B-Vision-Instruct", num_shard=2, ) as handle: yield handle @@ -48,7 +48,7 @@ async def test_mllama_simpl(mllama, response_snapshot): } assert ( response.choices[0].message.content - == "A chicken stands on a pile of money, looking" + == "A chicken sits on a pile of money, looking" ) assert response == response_snapshot