Upgrade other tests.

This commit is contained in:
Nicolas Patry 2025-03-06 16:19:37 +01:00
parent ad904be5fc
commit 0e0844ce00
No known key found for this signature in database
GPG Key ID: 4242CEF24CB6DBF9
4 changed files with 48 additions and 33 deletions

View File

@ -42,6 +42,7 @@ from syrupy.extensions.json import JSONSnapshotExtension
from text_generation import AsyncClient from text_generation import AsyncClient
from text_generation.types import ( from text_generation.types import (
Completion,
BestOfSequence, BestOfSequence,
Message, Message,
ChatComplete, ChatComplete,
@ -131,6 +132,7 @@ class ResponseComparator(JSONSnapshotExtension):
or isinstance(data, ChatComplete) or isinstance(data, ChatComplete)
or isinstance(data, ChatCompletionChunk) or isinstance(data, ChatCompletionChunk)
or isinstance(data, ChatCompletionComplete) or isinstance(data, ChatCompletionComplete)
or isinstance(data, Completion)
or isinstance(data, OAIChatCompletionChunk) or isinstance(data, OAIChatCompletionChunk)
): ):
data = data.model_dump() data = data.model_dump()
@ -140,6 +142,8 @@ class ResponseComparator(JSONSnapshotExtension):
data = dict(data) data = dict(data)
elif isinstance(data, List): elif isinstance(data, List):
data = [self._serialize(d) for d in data] data = [self._serialize(d) for d in data]
elif isinstance(data, dict):
return data
else: else:
raise RuntimeError(f"Unexpected data {type(data)} : {data}") raise RuntimeError(f"Unexpected data {type(data)} : {data}")
return data return data

View File

@ -12,11 +12,11 @@
"logprobs": null "logprobs": null
} }
], ],
"created": 1726656043, "created": 1741274364,
"id": "", "id": "",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct", "model": "meta-llama/Llama-3.1-8B-Instruct",
"object": "chat.completion.chunk", "object": "chat.completion.chunk",
"system_fingerprint": "2.2.1-dev0-native", "system_fingerprint": "3.1.2-dev0-native",
"usage": null "usage": null
}, },
{ {
@ -32,11 +32,11 @@
"logprobs": null "logprobs": null
} }
], ],
"created": 1726656043, "created": 1741274364,
"id": "", "id": "",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct", "model": "meta-llama/Llama-3.1-8B-Instruct",
"object": "chat.completion.chunk", "object": "chat.completion.chunk",
"system_fingerprint": "2.2.1-dev0-native", "system_fingerprint": "3.1.2-dev0-native",
"usage": null "usage": null
}, },
{ {
@ -52,11 +52,11 @@
"logprobs": null "logprobs": null
} }
], ],
"created": 1726656043, "created": 1741274364,
"id": "", "id": "",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct", "model": "meta-llama/Llama-3.1-8B-Instruct",
"object": "chat.completion.chunk", "object": "chat.completion.chunk",
"system_fingerprint": "2.2.1-dev0-native", "system_fingerprint": "3.1.2-dev0-native",
"usage": null "usage": null
}, },
{ {
@ -72,11 +72,11 @@
"logprobs": null "logprobs": null
} }
], ],
"created": 1726656043, "created": 1741274364,
"id": "", "id": "",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct", "model": "meta-llama/Llama-3.1-8B-Instruct",
"object": "chat.completion.chunk", "object": "chat.completion.chunk",
"system_fingerprint": "2.2.1-dev0-native", "system_fingerprint": "3.1.2-dev0-native",
"usage": null "usage": null
}, },
{ {
@ -92,11 +92,11 @@
"logprobs": null "logprobs": null
} }
], ],
"created": 1726656043, "created": 1741274364,
"id": "", "id": "",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct", "model": "meta-llama/Llama-3.1-8B-Instruct",
"object": "chat.completion.chunk", "object": "chat.completion.chunk",
"system_fingerprint": "2.2.1-dev0-native", "system_fingerprint": "3.1.2-dev0-native",
"usage": null "usage": null
}, },
{ {
@ -112,11 +112,11 @@
"logprobs": null "logprobs": null
} }
], ],
"created": 1726656043, "created": 1741274364,
"id": "", "id": "",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct", "model": "meta-llama/Llama-3.1-8B-Instruct",
"object": "chat.completion.chunk", "object": "chat.completion.chunk",
"system_fingerprint": "2.2.1-dev0-native", "system_fingerprint": "3.1.2-dev0-native",
"usage": null "usage": null
}, },
{ {
@ -132,11 +132,11 @@
"logprobs": null "logprobs": null
} }
], ],
"created": 1726656044, "created": 1741274364,
"id": "", "id": "",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct", "model": "meta-llama/Llama-3.1-8B-Instruct",
"object": "chat.completion.chunk", "object": "chat.completion.chunk",
"system_fingerprint": "2.2.1-dev0-native", "system_fingerprint": "3.1.2-dev0-native",
"usage": null "usage": null
}, },
{ {
@ -152,11 +152,11 @@
"logprobs": null "logprobs": null
} }
], ],
"created": 1726656044, "created": 1741274364,
"id": "", "id": "",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct", "model": "meta-llama/Llama-3.1-8B-Instruct",
"object": "chat.completion.chunk", "object": "chat.completion.chunk",
"system_fingerprint": "2.2.1-dev0-native", "system_fingerprint": "3.1.2-dev0-native",
"usage": null "usage": null
}, },
{ {
@ -172,11 +172,11 @@
"logprobs": null "logprobs": null
} }
], ],
"created": 1726656044, "created": 1741274364,
"id": "", "id": "",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct", "model": "meta-llama/Llama-3.1-8B-Instruct",
"object": "chat.completion.chunk", "object": "chat.completion.chunk",
"system_fingerprint": "2.2.1-dev0-native", "system_fingerprint": "3.1.2-dev0-native",
"usage": null "usage": null
}, },
{ {
@ -192,11 +192,20 @@
"logprobs": null "logprobs": null
} }
], ],
"created": 1726656044, "created": 1741274364,
"id": "", "id": "",
"model": "meta-llama/Meta-Llama-3.1-8B-Instruct", "model": "meta-llama/Llama-3.1-8B-Instruct",
"object": "chat.completion.chunk", "object": "chat.completion.chunk",
"system_fingerprint": "2.2.1-dev0-native", "system_fingerprint": "3.1.2-dev0-native",
"usage": null
},
{
"choices": [],
"created": 1741274364,
"id": "",
"model": "meta-llama/Llama-3.1-8B-Instruct",
"object": "chat.completion.chunk",
"system_fingerprint": "3.1.2-dev0-native",
"usage": { "usage": {
"completion_tokens": 10, "completion_tokens": 10,
"prompt_tokens": 40, "prompt_tokens": 40,

View File

@ -96,8 +96,10 @@ async def test_flash_llama_completion_stream_usage(
assert not had_usage assert not had_usage
if has_usage: if has_usage:
had_usage = True had_usage = True
elif c["usage"]:
had_usage = True
else: else:
raise RuntimeError("Expected different payload") raise RuntimeError(f"Expected different payload: {c}")
assert had_usage assert had_usage
assert ( assert (
string string
@ -147,6 +149,8 @@ async def test_flash_llama_completion_stream_usage(
assert not had_usage assert not had_usage
if has_usage: if has_usage:
had_usage = True had_usage = True
elif c["usage"]:
had_usage = True
else: else:
raise RuntimeError("Expected different payload") raise RuntimeError("Expected different payload")
assert not had_usage assert not had_usage

View File

@ -1,6 +1,4 @@
import pytest import pytest
import requests
import json
from openai import OpenAI from openai import OpenAI
from huggingface_hub import InferenceClient from huggingface_hub import InferenceClient
from huggingface_hub.inference._generated.types.chat_completion import ( from huggingface_hub.inference._generated.types.chat_completion import (