diff --git a/crates/meilisearch/src/metrics.rs b/crates/meilisearch/src/metrics.rs index 29c1aeae8..d52e04cc6 100644 --- a/crates/meilisearch/src/metrics.rs +++ b/crates/meilisearch/src/metrics.rs @@ -15,6 +15,33 @@ lazy_static! { "Meilisearch number of degraded search requests" )) .expect("Can't create a metric"); + pub static ref MEILISEARCH_CHAT_SEARCH_REQUESTS: IntCounterVec = register_int_counter_vec!( + opts!( + "meilisearch_chat_search_requests", + "Meilisearch number of search requests performed by the chat route itself" + ), + &["type"] + ) + .expect("Can't create a metric"); + pub static ref MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE: IntCounterVec = register_int_counter_vec!( + opts!("meilisearch_chat_prompt_tokens_usage", "Meilisearch Chat Prompt Tokens Usage"), + &["workspace", "model"] + ) + .expect("Can't create a metric"); + pub static ref MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE: IntCounterVec = + register_int_counter_vec!( + opts!( + "meilisearch_chat_completion_tokens_usage", + "Meilisearch Chat Completion Tokens Usage" + ), + &["workspace", "model"] + ) + .expect("Can't create a metric"); + pub static ref MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE: IntCounterVec = register_int_counter_vec!( + opts!("meilisearch_chat_total_tokens_usage", "Meilisearch Chat Total Tokens Usage"), + &["workspace", "model"] + ) + .expect("Can't create a metric"); pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge = register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch DB Size In Bytes")) .expect("Can't create a metric"); diff --git a/crates/meilisearch/src/routes/chats/chat_completions.rs b/crates/meilisearch/src/routes/chats/chat_completions.rs index ccbdccbbc..4f7087ae8 100644 --- a/crates/meilisearch/src/routes/chats/chat_completions.rs +++ b/crates/meilisearch/src/routes/chats/chat_completions.rs @@ -13,9 +13,9 @@ use async_openai::types::{ ChatCompletionRequestDeveloperMessageContent, ChatCompletionRequestMessage, ChatCompletionRequestSystemMessage, ChatCompletionRequestSystemMessageContent, ChatCompletionRequestToolMessage, ChatCompletionRequestToolMessageContent, - ChatCompletionStreamResponseDelta, ChatCompletionToolArgs, ChatCompletionToolType, - CreateChatCompletionRequest, CreateChatCompletionStreamResponse, FinishReason, FunctionCall, - FunctionCallStream, FunctionObjectArgs, + ChatCompletionStreamOptions, ChatCompletionStreamResponseDelta, ChatCompletionToolArgs, + ChatCompletionToolType, CreateChatCompletionRequest, CreateChatCompletionStreamResponse, + FinishReason, FunctionCall, FunctionCallStream, FunctionObjectArgs, }; use async_openai::Client; use bumpalo::Bump; @@ -48,7 +48,11 @@ use crate::analytics::Analytics; use crate::error::MeilisearchHttpError; use crate::extractors::authentication::policies::ActionPolicy; use crate::extractors::authentication::{extract_token_from_request, GuardedData, Policy as _}; -use crate::metrics::MEILISEARCH_DEGRADED_SEARCH_REQUESTS; +use crate::metrics::{ + MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE, MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE, + MEILISEARCH_CHAT_SEARCH_REQUESTS, MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE, + MEILISEARCH_DEGRADED_SEARCH_REQUESTS, +}; use crate::routes::chats::utils::SseEventSender; use crate::routes::indexes::search::search_kind; use crate::search::{add_search_rules, prepare_search, search_from_kind, SearchQuery}; @@ -286,7 +290,7 @@ async fn process_search_request( let output = output?; let mut documents = Vec::new(); if let Ok((ref rtxn, ref search_result)) = output { - // aggregate.succeed(search_result); + MEILISEARCH_CHAT_SEARCH_REQUESTS.with_label_values(&["internal"]).inc(); if search_result.degraded { MEILISEARCH_DEGRADED_SEARCH_REQUESTS.inc(); } @@ -488,6 +492,7 @@ async fn streamed_chat( let (tx, rx) = tokio::sync::mpsc::channel(10); let tx = SseEventSender::new(tx); + let workspace_uid = workspace_uid.to_string(); let _join_handle = Handle::current().spawn(async move { let client = Client::with_config(config.clone()); let mut global_tool_calls = HashMap::::new(); @@ -497,6 +502,7 @@ async fn streamed_chat( let output = run_conversation( &index_scheduler, &auth_ctrl, + &workspace_uid, &search_queue, &auth_token, &client, @@ -534,6 +540,7 @@ async fn run_conversation( Data, >, auth_ctrl: &web::Data, + workspace_uid: &str, search_queue: &web::Data, auth_token: &str, client: &Client, @@ -543,13 +550,34 @@ async fn run_conversation( global_tool_calls: &mut HashMap, function_support: FunctionSupport, ) -> Result, ()>, SendError> { + use DbChatCompletionSource::*; + let mut finish_reason = None; + chat_completion.stream_options = match source { + OpenAi | AzureOpenAi => Some(ChatCompletionStreamOptions { include_usage: true }), + Mistral | VLlm => None, + }; + // safety: unwrap: can only happens if `stream` was set to `false` let mut response = client.chat().create_stream(chat_completion.clone()).await.unwrap(); while let Some(result) = response.next().await { match result { Ok(resp) => { - let choice = &resp.choices[0]; + if let Some(usage) = resp.usage.as_ref() { + MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE + .with_label_values(&[workspace_uid, &chat_completion.model]) + .inc_by(usage.prompt_tokens as u64); + MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE + .with_label_values(&[workspace_uid, &chat_completion.model]) + .inc_by(usage.completion_tokens as u64); + MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE + .with_label_values(&[workspace_uid, &chat_completion.model]) + .inc_by(usage.total_tokens as u64); + } + let choice = match resp.choices.first() { + Some(choice) => choice, + None => break, + }; finish_reason = choice.finish_reason; let ChatCompletionStreamResponseDelta { ref tool_calls, .. } = &choice.delta;