mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-09-10 20:04:52 +00:00
Adding (failing) integration tests.
This commit is contained in:
parent
c62527a542
commit
24c0f1cc7a
48
integration-tests/models/test_mpt.py
Normal file
48
integration-tests/models/test_mpt.py
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module")
|
||||||
|
def mpt_sharded_handle(launcher):
|
||||||
|
with launcher("mosaicml/mpt-7b", num_shard=2) as handle:
|
||||||
|
yield handle
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="module")
|
||||||
|
async def mpt_sharded(mpt_sharded_handle):
|
||||||
|
await mpt_sharded_handle.health(300)
|
||||||
|
return mpt_sharded_handle.client
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_mpt(mpt_sharded, response_snapshot):
|
||||||
|
response = await mpt_sharded.generate(
|
||||||
|
"What is Deep Learning?",
|
||||||
|
max_new_tokens=17,
|
||||||
|
decoder_input_details=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.details.generated_tokens == 17
|
||||||
|
assert (
|
||||||
|
response.generated_text
|
||||||
|
== " - Deep Learning\nDeep Learning is a subfield of machine learning that uses artificial neural"
|
||||||
|
)
|
||||||
|
assert response == response_snapshot
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_mpt_load(mpt_sharded, generate_load, response_snapshot):
|
||||||
|
responses = await generate_load(
|
||||||
|
mpt_sharded,
|
||||||
|
"What is Deep Learning?",
|
||||||
|
max_new_tokens=17,
|
||||||
|
n=4,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(responses) == 4
|
||||||
|
assert all([r.generated_text == responses[0].generated_text for r in responses])
|
||||||
|
assert (
|
||||||
|
responses[0].generated_text
|
||||||
|
== " - Deep Learning\nDeep Learning is a subfield of machine learning that uses artificial neural"
|
||||||
|
)
|
||||||
|
|
||||||
|
assert responses == response_snapshot
|
@ -66,7 +66,7 @@ class MPTSharded(CausalLM):
|
|||||||
super(CausalLM, self).__init__(
|
super(CausalLM, self).__init__(
|
||||||
model=model,
|
model=model,
|
||||||
tokenizer=tokenizer,
|
tokenizer=tokenizer,
|
||||||
requires_padding=True,
|
requires_padding=False,
|
||||||
dtype=dtype,
|
dtype=dtype,
|
||||||
device=device,
|
device=device,
|
||||||
rank=rank,
|
rank=rank,
|
||||||
|
Loading…
Reference in New Issue
Block a user