From 70ac5c3e101225ac4ea13d67ded220783bfbbfce Mon Sep 17 00:00:00 2001 From: drbh Date: Thu, 22 Feb 2024 08:56:42 -0500 Subject: [PATCH] fix: avoid default message (#1579) This PR avoids setting a default message in order to avoid unexpected generations --- router/src/lib.rs | 16 ++++------------ 1 file changed, 4 insertions(+), 12 deletions(-) diff --git a/router/src/lib.rs b/router/src/lib.rs index 113e2642..1c06eb8a 100644 --- a/router/src/lib.rs +++ b/router/src/lib.rs @@ -446,23 +446,14 @@ impl ChatCompletionChunk { } } -fn default_request_messages() -> Vec { - vec![Message { - role: "user".to_string(), - content: "My name is David and I".to_string(), - name: None, - }] -} - #[derive(Clone, Deserialize, ToSchema, Serialize)] pub(crate) struct ChatRequest { - /// UNUSED #[schema(example = "mistralai/Mistral-7B-Instruct-v0.2")] - /// ID of the model to use. See the model endpoint compatibility table for details on which models work with the Chat API. + /// [UNUSED] ID of the model to use. See the model endpoint compatibility table for details on which models work with the Chat API. pub model: String, - /* NOTE: UNUSED */ + /// A list of messages comprising the conversation so far. - #[serde(default = "default_request_messages")] + #[schema(example = "[{\"role\": \"user\", \"content\": \"What is Deep Learning?\"}]")] pub messages: Vec, /// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, @@ -545,6 +536,7 @@ pub(crate) struct Message { pub role: String, #[schema(example = "My name is David and I")] pub content: String, + #[serde(default, skip_serializing_if = "Option::is_none")] #[schema(example = "\"David\"")] pub name: Option, }