Fix after rebase.

This commit is contained in:
Nicolas Patry 2023-08-17 15:18:14 +00:00
parent 25d6ddf6ea
commit e0b197ea09
2 changed files with 0 additions and 2 deletions

View File

@ -382,7 +382,6 @@ async fn generate_stream(
yield Ok(Event::from(err)); yield Ok(Event::from(err));
} else { } else {
match infer.generate_stream(req).instrument(info_span!(parent: &span, "async_stream")).await { match infer.generate_stream(req).instrument(info_span!(parent: &span, "async_stream")).await {
let top_n_tokens = req.parameters.top_n_tokens;
// Keep permit as long as generate_stream lives // Keep permit as long as generate_stream lives
Ok((_permit, mut response_stream)) => { Ok((_permit, mut response_stream)) => {
// Server-Sent Event stream // Server-Sent Event stream

View File

@ -6,7 +6,6 @@ from typing import List, Tuple, Optional, TypeVar, Type
from transformers import PreTrainedTokenizerBase, PretrainedConfig from transformers import PreTrainedTokenizerBase, PretrainedConfig
from text_generation_server.models.types import Batch, Generation from text_generation_server.models.types import Batch, Generation
>>>>>>> 8471e18 (Defer building top-token objects to Rust)
from text_generation_server.pb.generate_pb2 import InfoResponse from text_generation_server.pb.generate_pb2 import InfoResponse
B = TypeVar("B", bound=Batch) B = TypeVar("B", bound=Batch)