diff --git a/router/src/server.rs b/router/src/server.rs index 9e57af27..e9d2fcf4 100644 --- a/router/src/server.rs +++ b/router/src/server.rs @@ -1877,6 +1877,8 @@ pub async fn run( // Only send usage stats when TGI is run in container and the function returns Some let is_container = matches!(usage_stats::is_container(), Ok(true)); + // retrieve the huggingface_hub user agent origin if set, and add the origin to telemetry + let origin = std::env::var("HF_HUB_USER_AGENT_ORIGIN").ok(); let user_agent = match (usage_stats_level, is_container) { (usage_stats::UsageStatsLevel::On | usage_stats::UsageStatsLevel::NoStack, true) => { let reduced_args = usage_stats::Args::new( @@ -1899,6 +1901,7 @@ pub async fn run( max_client_batch_size, usage_stats_level, backend.name(), + origin, ); Some(usage_stats::UserAgent::new(reduced_args)) } diff --git a/router/src/usage_stats.rs b/router/src/usage_stats.rs index c3df0c80..353e9e37 100644 --- a/router/src/usage_stats.rs +++ b/router/src/usage_stats.rs @@ -98,6 +98,7 @@ pub struct Args { max_client_batch_size: usize, usage_stats_level: UsageStatsLevel, backend_name: &'static str, + origin: Option, } impl Args { @@ -122,6 +123,7 @@ impl Args { max_client_batch_size: usize, usage_stats_level: UsageStatsLevel, backend_name: &'static str, + origin: Option, ) -> Self { Self { model_config, @@ -143,6 +145,7 @@ impl Args { max_client_batch_size, usage_stats_level, backend_name, + origin, } } }