mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-03 11:57:07 +02:00
Merge #1910
1910: After v0.24.0: import `stable` in `main` r=MarinPostma a=curquiza Co-authored-by: Tamo <tamo@meilisearch.com> Co-authored-by: many <maxime@meilisearch.com> Co-authored-by: bors[bot] <26634292+bors[bot]@users.noreply.github.com> Co-authored-by: Guillaume Mourier <guillaume@meilisearch.com> Co-authored-by: Irevoire <tamo@meilisearch.com> Co-authored-by: Clémentine Urquizar <clementine@meilisearch.com>
This commit is contained in:
commit
8363200fd7
22 changed files with 320 additions and 157 deletions
|
@ -18,7 +18,7 @@ impl SearchAggregator {
|
|||
Self::default()
|
||||
}
|
||||
|
||||
pub fn finish(&mut self, _: &dyn Any) {}
|
||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||
}
|
||||
|
||||
impl MockAnalytics {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use std::collections::{HashMap, HashSet};
|
||||
use std::collections::{BinaryHeap, HashMap, HashSet};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
@ -38,7 +38,7 @@ fn write_user_id(db_path: &Path, user_id: &str) {
|
|||
}
|
||||
}
|
||||
|
||||
const SEGMENT_API_KEY: &str = "vHi89WrNDckHSQssyUJqLvIyp2QFITSC";
|
||||
const SEGMENT_API_KEY: &str = "P3FWhhEsJiEDCuEHpmcN9DHcK4hVfBvb";
|
||||
|
||||
pub fn extract_user_agents(request: &HttpRequest) -> Vec<String> {
|
||||
request
|
||||
|
@ -187,7 +187,7 @@ impl Segment {
|
|||
"kernel_version": kernel_version,
|
||||
"cores": sys.processors().len(),
|
||||
"ram_size": sys.total_memory(),
|
||||
"disk_size": sys.disks().iter().map(|disk| disk.available_space()).max(),
|
||||
"disk_size": sys.disks().iter().map(|disk| disk.total_space()).max(),
|
||||
"server_provider": std::env::var("MEILI_SERVER_PROVIDER").ok(),
|
||||
})
|
||||
});
|
||||
|
@ -254,9 +254,9 @@ impl Segment {
|
|||
.await;
|
||||
}
|
||||
let get_search = std::mem::take(&mut self.get_search_aggregator)
|
||||
.into_event(&self.user, "Document Searched GET");
|
||||
.into_event(&self.user, "Documents Searched GET");
|
||||
let post_search = std::mem::take(&mut self.post_search_aggregator)
|
||||
.into_event(&self.user, "Document Searched POST");
|
||||
.into_event(&self.user, "Documents Searched POST");
|
||||
let add_documents = std::mem::take(&mut self.add_documents_aggregator)
|
||||
.into_event(&self.user, "Documents Added");
|
||||
let update_documents = std::mem::take(&mut self.update_documents_aggregator)
|
||||
|
@ -286,7 +286,7 @@ pub struct SearchAggregator {
|
|||
// requests
|
||||
total_received: usize,
|
||||
total_succeeded: usize,
|
||||
time_spent: Vec<usize>,
|
||||
time_spent: BinaryHeap<usize>,
|
||||
|
||||
// sort
|
||||
sort_with_geo_point: bool,
|
||||
|
@ -364,7 +364,7 @@ impl SearchAggregator {
|
|||
ret
|
||||
}
|
||||
|
||||
pub fn finish(&mut self, result: &SearchResult) {
|
||||
pub fn succeed(&mut self, result: &SearchResult) {
|
||||
self.total_succeeded += 1;
|
||||
self.time_spent.push(result.processing_time_ms as usize);
|
||||
}
|
||||
|
@ -398,17 +398,21 @@ impl SearchAggregator {
|
|||
self.max_offset = self.max_offset.max(other.max_offset);
|
||||
}
|
||||
|
||||
pub fn into_event(mut self, user: &User, event_name: &str) -> Option<Track> {
|
||||
pub fn into_event(self, user: &User, event_name: &str) -> Option<Track> {
|
||||
if self.total_received == 0 {
|
||||
None
|
||||
} else {
|
||||
// the index of the 99th percentage of value
|
||||
let percentile_99th = 0.99 * (self.total_succeeded as f64 - 1.) + 1.;
|
||||
self.time_spent.drain(percentile_99th as usize..);
|
||||
// we get all the values in a sorted manner
|
||||
let time_spent = self.time_spent.into_sorted_vec();
|
||||
// We are only intersted by the slowest value of the 99th fastest results
|
||||
let time_spent = time_spent[percentile_99th as usize];
|
||||
|
||||
let properties = json!({
|
||||
"user-agent": self.user_agents,
|
||||
"requests": {
|
||||
"99th_response_time": format!("{:.2}", self.time_spent.iter().sum::<usize>() as f64 / self.time_spent.len() as f64),
|
||||
"99th_response_time": format!("{:.2}", time_spent),
|
||||
"total_succeeded": self.total_succeeded,
|
||||
"total_failed": self.total_received.saturating_sub(self.total_succeeded), // just to be sure we never panics
|
||||
"total_received": self.total_received,
|
||||
|
|
|
@ -2,14 +2,14 @@ use meilisearch_error::{Code, ErrorCode};
|
|||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum AuthenticationError {
|
||||
#[error("You must have an authorization token")]
|
||||
#[error("The X-MEILI-API-KEY header is missing.")]
|
||||
MissingAuthorizationHeader,
|
||||
#[error("Invalid API key")]
|
||||
#[error("The provided API key is invalid.")]
|
||||
InvalidToken(String),
|
||||
// Triggered on configuration error.
|
||||
#[error("Irretrievable state")]
|
||||
#[error("An internal error has occurred. `Irretrievable state`.")]
|
||||
IrretrievableState,
|
||||
#[error("Unknown authentication policy")]
|
||||
#[error("An internal error has occurred. `Unknown authentication policy`.")]
|
||||
UnknownPolicy,
|
||||
}
|
||||
|
||||
|
|
|
@ -118,17 +118,18 @@ pub async fn search_with_url_query(
|
|||
|
||||
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||
|
||||
let search_result = meilisearch
|
||||
.search(path.into_inner().index_uid, query)
|
||||
.await?;
|
||||
let search_result = meilisearch.search(path.into_inner().index_uid, query).await;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
aggregate.succeed(search_result);
|
||||
}
|
||||
analytics.get_search(aggregate);
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
// Tests that the nb_hits is always set to false
|
||||
#[cfg(test)]
|
||||
assert!(!search_result.exhaustive_nb_hits);
|
||||
|
||||
aggregate.finish(&search_result);
|
||||
analytics.get_search(aggregate);
|
||||
|
||||
debug!("returns: {:?}", search_result);
|
||||
Ok(HttpResponse::Ok().json(search_result))
|
||||
}
|
||||
|
@ -145,17 +146,18 @@ pub async fn search_with_post(
|
|||
|
||||
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||
|
||||
let search_result = meilisearch
|
||||
.search(path.into_inner().index_uid, query)
|
||||
.await?;
|
||||
let search_result = meilisearch.search(path.into_inner().index_uid, query).await;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
aggregate.succeed(search_result);
|
||||
}
|
||||
analytics.post_search(aggregate);
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
// Tests that the nb_hits is always set to false
|
||||
#[cfg(test)]
|
||||
assert!(!search_result.exhaustive_nb_hits);
|
||||
|
||||
aggregate.finish(&search_result);
|
||||
analytics.post_search(aggregate);
|
||||
|
||||
debug!("returns: {:?}", search_result);
|
||||
Ok(HttpResponse::Ok().json(search_result))
|
||||
}
|
||||
|
|
|
@ -97,8 +97,7 @@ pub struct FailedUpdateResult {
|
|||
pub update_id: u64,
|
||||
#[serde(rename = "type")]
|
||||
pub update_type: UpdateType,
|
||||
#[serde(flatten)]
|
||||
pub response: ResponseError,
|
||||
pub error: ResponseError,
|
||||
pub duration: f64, // in seconds
|
||||
pub enqueued_at: DateTime<Utc>,
|
||||
pub processed_at: DateTime<Utc>,
|
||||
|
@ -190,12 +189,12 @@ impl From<UpdateStatus> for UpdateStatusResponse {
|
|||
let update_id = failed.id();
|
||||
let processed_at = failed.failed_at;
|
||||
let enqueued_at = failed.from.from.enqueued_at;
|
||||
let response = failed.into();
|
||||
let error = failed.into();
|
||||
|
||||
let content = FailedUpdateResult {
|
||||
update_id,
|
||||
update_type,
|
||||
response,
|
||||
error,
|
||||
duration,
|
||||
enqueued_at,
|
||||
processed_at,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue