From c56df3b167e0d9e021f67e82b97fad6cd2cbd603 Mon Sep 17 00:00:00 2001 From: Nicolas Patry Date: Mon, 6 May 2024 19:55:39 +0200 Subject: [PATCH] Too many removal. --- server/text_generation_server/models/flash_causal_lm.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/server/text_generation_server/models/flash_causal_lm.py b/server/text_generation_server/models/flash_causal_lm.py index 62df1f59..f567bea9 100644 --- a/server/text_generation_server/models/flash_causal_lm.py +++ b/server/text_generation_server/models/flash_causal_lm.py @@ -12,6 +12,9 @@ from dataclasses import dataclass from opentelemetry import trace from transformers import PreTrainedTokenizerBase from typing import Optional, Tuple, List, Type, Dict +from text_generation_server.models import Model +from text_generation_server.utils.tokens import batch_top_tokens +from text_generation_server.utils.speculate import get_speculate from text_generation_server.models.types import ( Batch, Tokens,