text-generation-inference/router/src/server.rs

127 lines
3.1 KiB
Rust
Raw Normal View History

2022-10-11 16:14:39 +00:00
use std::net::SocketAddr;
use axum::{Router, Json};
use axum::http::StatusCode;
use axum::extract::Extension;
use axum::routing::post;
use crate::{Batcher, ShardedClient, Validation};
use serde::Deserialize;
use tokenizers::Tokenizer;
2022-10-11 08:36:51 +00:00
use tokio::time::Instant;
use tracing::instrument;
#[derive(Clone, Debug, Deserialize)]
pub(crate) struct GenerateParameters {
#[serde(default = "default_temperature")]
pub temperature: f32,
#[serde(default = "default_top_k")]
pub top_k: u32,
#[serde(default = "default_top_p")]
pub top_p: f32,
#[serde(default = "default_do_sample")]
pub do_sample: bool,
#[serde(default = "default_max_new_tokens")]
pub max_new_tokens: u32,
}
fn default_temperature() -> f32 {
1.0
}
fn default_top_k() -> u32 {
0
}
fn default_top_p() -> f32 {
1.0
}
fn default_do_sample() -> bool {
false
}
fn default_max_new_tokens() -> u32 {
20
}
fn default_parameters() -> GenerateParameters {
GenerateParameters {
temperature: default_temperature(),
top_k: default_top_k(),
top_p: default_top_p(),
do_sample: default_do_sample(),
max_new_tokens: default_max_new_tokens(),
}
}
#[derive(Clone, Debug, Deserialize)]
pub(crate) struct GenerateRequest {
pub inputs: String,
#[serde(default = "default_parameters")]
pub parameters: GenerateParameters,
}
2022-10-11 16:14:39 +00:00
#[instrument(skip(state), fields(time, time_per_token))]
2022-10-11 08:36:51 +00:00
async fn generate(
2022-10-11 16:14:39 +00:00
state: Extension<ServerState>,
2022-10-11 08:36:51 +00:00
req: Json<GenerateRequest>,
2022-10-11 16:14:39 +00:00
) -> Result<Json<serde_json::Value>, StatusCode> {
2022-10-11 08:36:51 +00:00
let start = Instant::now();
2022-10-11 16:14:39 +00:00
let (input_length, validated_request) = match state.validation
.validate(GenerateRequest {
2022-10-11 08:36:51 +00:00
inputs: req.inputs.clone(),
parameters: req.parameters.clone(),
})
.await {
Ok(result) => result,
2022-10-11 16:14:39 +00:00
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR)
};
2022-10-11 16:14:39 +00:00
let output = state.infer.infer(input_length, validated_request).await;
2022-10-11 08:36:51 +00:00
match output {
Ok(generated_text) => {
tracing::Span::current().record("time", format!("{:?}", start.elapsed()));
tracing::Span::current().record(
"time_per_token",
format!("{:?}", start.elapsed() / req.parameters.max_new_tokens),
);
tracing::info!("response: {}", generated_text);
Ok(Json(serde_json::json!({
"generated_text": generated_text,
})))
}
2022-10-11 16:14:39 +00:00
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
2022-10-11 08:36:51 +00:00
}
}
2022-10-11 16:14:39 +00:00
#[derive(Clone)]
struct ServerState {
validation: Validation,
infer: Batcher,
}
pub async fn run(
client: ShardedClient,
tokenizer: Tokenizer,
2022-10-11 16:14:39 +00:00
addr: SocketAddr,
) {
client.clear_cache().await.expect("Unable to clear cache");
2022-10-11 08:36:51 +00:00
tracing::info!("Connected");
let infer = Batcher::new(client);
let validation = Validation::new(tokenizer);
2022-10-11 16:14:39 +00:00
let shared_state = ServerState {
validation,
infer,
};
let app = Router::new().route("/generate", post(generate)).layer(Extension(shared_state));
2022-10-11 08:36:51 +00:00
2022-10-11 16:14:39 +00:00
axum::Server::bind(&addr)
.serve(app.into_make_service()).await.unwrap();
}