mirror of
https://github.com/huggingface/text-generation-inference.git
synced 2025-09-11 12:24:53 +00:00
update comment
This commit is contained in:
parent
6e0f37c0ca
commit
52afdcc281
@ -243,10 +243,8 @@ class TransformersFlashCausalLM(FlashCausalLM):
|
|||||||
adapter_data=None, # not supported, but passed to match original signature
|
adapter_data=None, # not supported, but passed to match original signature
|
||||||
):
|
):
|
||||||
hidden_states = self.model.model.forward(
|
hidden_states = self.model.model.forward(
|
||||||
input_ids=input_ids.unsqueeze(0), # expand dim to easily fit transformers
|
input_ids=input_ids.unsqueeze(0), # expand dim to fit Transformers
|
||||||
position_ids=position_ids.unsqueeze(
|
position_ids=position_ids.unsqueeze(0), # expand dim to fit Transformers
|
||||||
0
|
|
||||||
), # expand dim to easily fit transformers
|
|
||||||
past_key_values=None, # we use self.kv_cache instead of transformers cache object
|
past_key_values=None, # we use self.kv_cache instead of transformers cache object
|
||||||
use_cache=False, # we use self.kv_cache instead of transformers cache object
|
use_cache=False, # we use self.kv_cache instead of transformers cache object
|
||||||
return_dict=True,
|
return_dict=True,
|
||||||
|
Loading…
Reference in New Issue
Block a user