mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-12-22 20:50:04 +01:00
Merge #5011
5011: Revamp analytics r=ManyTheFish a=irevoire # Pull Request ## Related issue Fixes https://github.com/meilisearch/meilisearch/issues/5009 ## What does this PR do? - Force every analytics to go through a trait that forces you to handle aggregation correcty - Put the code to retrieve the `user-agent`, `timestamp` and `requests.total_received` in common between all aggregates, so there is no mistake - Get rids of all the different channel for each kind of event in favor of an any map - Ensure that we never [send empty event ever again](https://github.com/meilisearch/meilisearch/pull/5001) - Merge all the sub-settings route into a global « Settings Updated » event. - Fix: When using one of the three following feature, we were not sending any analytics IF they were set from the global route - /non-separator-tokens - /separator-tokens - /dictionary Co-authored-by: Tamo <tamo@meilisearch.com>
This commit is contained in:
commit
3753f87fd8
7
Cargo.lock
generated
7
Cargo.lock
generated
@ -3415,6 +3415,7 @@ dependencies = [
|
||||
"meilisearch-types",
|
||||
"mimalloc",
|
||||
"mime",
|
||||
"mopa-maintained",
|
||||
"num_cpus",
|
||||
"obkv",
|
||||
"once_cell",
|
||||
@ -3681,6 +3682,12 @@ dependencies = [
|
||||
"syn 2.0.60",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mopa-maintained"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79b7f3e22167862cc7c95b21a6f326c22e4bf40da59cbf000b368a310173ba11"
|
||||
|
||||
[[package]]
|
||||
name = "mutually_exclusive_features"
|
||||
version = "0.0.3"
|
||||
|
@ -75,7 +75,7 @@ reqwest = { version = "0.12.5", features = [
|
||||
rustls = { version = "0.23.11", features = ["ring"], default-features = false }
|
||||
rustls-pki-types = { version = "1.7.0", features = ["alloc"] }
|
||||
rustls-pemfile = "2.1.2"
|
||||
segment = { version = "0.2.4", optional = true }
|
||||
segment = { version = "0.2.4" }
|
||||
serde = { version = "1.0.204", features = ["derive"] }
|
||||
serde_json = { version = "1.0.120", features = ["preserve_order"] }
|
||||
sha2 = "0.10.8"
|
||||
@ -104,6 +104,7 @@ tracing-trace = { version = "0.1.0", path = "../tracing-trace" }
|
||||
tracing-actix-web = "0.7.11"
|
||||
build-info = { version = "1.7.0", path = "../build-info" }
|
||||
roaring = "0.10.2"
|
||||
mopa-maintained = "0.2.3"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.10.0"
|
||||
@ -131,8 +132,7 @@ tempfile = { version = "3.10.1", optional = true }
|
||||
zip = { version = "2.1.3", optional = true }
|
||||
|
||||
[features]
|
||||
default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"]
|
||||
analytics = ["segment"]
|
||||
default = ["meilisearch-types/all-tokenizations", "mini-dashboard"]
|
||||
mini-dashboard = [
|
||||
"static-files",
|
||||
"anyhow",
|
||||
|
@ -1,109 +0,0 @@
|
||||
use std::any::Any;
|
||||
use std::sync::Arc;
|
||||
|
||||
use actix_web::HttpRequest;
|
||||
use meilisearch_types::InstanceUid;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::{find_user_id, Analytics, DocumentDeletionKind, DocumentFetchKind};
|
||||
use crate::routes::indexes::documents::{DocumentEditionByFunction, UpdateDocumentsQuery};
|
||||
use crate::Opt;
|
||||
|
||||
pub struct MockAnalytics {
|
||||
instance_uid: Option<InstanceUid>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SearchAggregator;
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl SearchAggregator {
|
||||
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SimilarAggregator;
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl SimilarAggregator {
|
||||
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct MultiSearchAggregator;
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl MultiSearchAggregator {
|
||||
pub fn from_federated_search(_: &dyn Any, _: &dyn Any) -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self) {}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct FacetSearchAggregator;
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl FacetSearchAggregator {
|
||||
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||
}
|
||||
|
||||
impl MockAnalytics {
|
||||
#[allow(clippy::new_ret_no_self)]
|
||||
pub fn new(opt: &Opt) -> Arc<dyn Analytics> {
|
||||
let instance_uid = find_user_id(&opt.db_path);
|
||||
Arc::new(Self { instance_uid })
|
||||
}
|
||||
}
|
||||
|
||||
impl Analytics for MockAnalytics {
|
||||
fn instance_uid(&self) -> Option<&meilisearch_types::InstanceUid> {
|
||||
self.instance_uid.as_ref()
|
||||
}
|
||||
|
||||
// These methods are noop and should be optimized out
|
||||
fn publish(&self, _event_name: String, _send: Value, _request: Option<&HttpRequest>) {}
|
||||
fn get_search(&self, _aggregate: super::SearchAggregator) {}
|
||||
fn post_search(&self, _aggregate: super::SearchAggregator) {}
|
||||
fn get_similar(&self, _aggregate: super::SimilarAggregator) {}
|
||||
fn post_similar(&self, _aggregate: super::SimilarAggregator) {}
|
||||
fn post_multi_search(&self, _aggregate: super::MultiSearchAggregator) {}
|
||||
fn post_facet_search(&self, _aggregate: super::FacetSearchAggregator) {}
|
||||
fn add_documents(
|
||||
&self,
|
||||
_documents_query: &UpdateDocumentsQuery,
|
||||
_index_creation: bool,
|
||||
_request: &HttpRequest,
|
||||
) {
|
||||
}
|
||||
fn delete_documents(&self, _kind: DocumentDeletionKind, _request: &HttpRequest) {}
|
||||
fn update_documents(
|
||||
&self,
|
||||
_documents_query: &UpdateDocumentsQuery,
|
||||
_index_creation: bool,
|
||||
_request: &HttpRequest,
|
||||
) {
|
||||
}
|
||||
fn update_documents_by_function(
|
||||
&self,
|
||||
_documents_query: &DocumentEditionByFunction,
|
||||
_index_creation: bool,
|
||||
_request: &HttpRequest,
|
||||
) {
|
||||
}
|
||||
fn get_fetch_documents(&self, _documents_query: &DocumentFetchKind, _request: &HttpRequest) {}
|
||||
fn post_fetch_documents(&self, _documents_query: &DocumentFetchKind, _request: &HttpRequest) {}
|
||||
}
|
@ -1,44 +1,45 @@
|
||||
mod mock_analytics;
|
||||
#[cfg(feature = "analytics")]
|
||||
mod segment_analytics;
|
||||
pub mod segment_analytics;
|
||||
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use actix_web::HttpRequest;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_types::InstanceUid;
|
||||
pub use mock_analytics::MockAnalytics;
|
||||
use mopa::mopafy;
|
||||
use once_cell::sync::Lazy;
|
||||
use platform_dirs::AppDirs;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::routes::indexes::documents::{DocumentEditionByFunction, UpdateDocumentsQuery};
|
||||
|
||||
// if the analytics feature is disabled
|
||||
// the `SegmentAnalytics` point to the mock instead of the real analytics
|
||||
#[cfg(not(feature = "analytics"))]
|
||||
pub type SegmentAnalytics = mock_analytics::MockAnalytics;
|
||||
#[cfg(not(feature = "analytics"))]
|
||||
pub type SearchAggregator = mock_analytics::SearchAggregator;
|
||||
#[cfg(not(feature = "analytics"))]
|
||||
pub type SimilarAggregator = mock_analytics::SimilarAggregator;
|
||||
#[cfg(not(feature = "analytics"))]
|
||||
pub type MultiSearchAggregator = mock_analytics::MultiSearchAggregator;
|
||||
#[cfg(not(feature = "analytics"))]
|
||||
pub type FacetSearchAggregator = mock_analytics::FacetSearchAggregator;
|
||||
|
||||
// if the feature analytics is enabled we use the real analytics
|
||||
#[cfg(feature = "analytics")]
|
||||
pub type SegmentAnalytics = segment_analytics::SegmentAnalytics;
|
||||
#[cfg(feature = "analytics")]
|
||||
pub type SearchAggregator = segment_analytics::SearchAggregator;
|
||||
#[cfg(feature = "analytics")]
|
||||
pub type SimilarAggregator = segment_analytics::SimilarAggregator;
|
||||
#[cfg(feature = "analytics")]
|
||||
pub type MultiSearchAggregator = segment_analytics::MultiSearchAggregator;
|
||||
#[cfg(feature = "analytics")]
|
||||
pub type FacetSearchAggregator = segment_analytics::FacetSearchAggregator;
|
||||
|
||||
use crate::Opt;
|
||||
|
||||
/// A macro used to quickly define events that don't aggregate or send anything besides an empty event with its name.
|
||||
#[macro_export]
|
||||
macro_rules! empty_analytics {
|
||||
($struct_name:ident, $event_name:literal) => {
|
||||
#[derive(Default)]
|
||||
struct $struct_name {}
|
||||
|
||||
impl $crate::analytics::Aggregate for $struct_name {
|
||||
fn event_name(&self) -> &'static str {
|
||||
$event_name
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, _other: Box<Self>) -> Box<Self> {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::json!({})
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// The Meilisearch config dir:
|
||||
/// `~/.config/Meilisearch` on *NIX or *BSD.
|
||||
@ -78,60 +79,88 @@ pub enum DocumentFetchKind {
|
||||
Normal { with_filter: bool, limit: usize, offset: usize, retrieve_vectors: bool },
|
||||
}
|
||||
|
||||
pub trait Analytics: Sync + Send {
|
||||
fn instance_uid(&self) -> Option<&InstanceUid>;
|
||||
/// To send an event to segment, your event must be able to aggregate itself with another event of the same type.
|
||||
pub trait Aggregate: 'static + mopa::Any + Send {
|
||||
/// The name of the event that will be sent to segment.
|
||||
fn event_name(&self) -> &'static str;
|
||||
|
||||
/// Will be called every time an event has been used twice before segment flushed its buffer.
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
/// Converts your structure to the final event that'll be sent to segment.
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value;
|
||||
}
|
||||
|
||||
mopafy!(Aggregate);
|
||||
|
||||
/// Helper trait to define multiple aggregates with the same content but a different name.
|
||||
/// Commonly used when you must aggregate a search with POST or with GET, for example.
|
||||
pub trait AggregateMethod: 'static + Default + Send {
|
||||
fn event_name() -> &'static str;
|
||||
}
|
||||
|
||||
/// A macro used to quickly define multiple aggregate method with their name
|
||||
/// Usage:
|
||||
/// ```rust
|
||||
/// use meilisearch::aggregate_methods;
|
||||
///
|
||||
/// aggregate_methods!(
|
||||
/// SearchGET => "Documents Searched GET",
|
||||
/// SearchPOST => "Documents Searched POST",
|
||||
/// );
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! aggregate_methods {
|
||||
($method:ident => $event_name:literal) => {
|
||||
#[derive(Default)]
|
||||
pub struct $method {}
|
||||
|
||||
impl $crate::analytics::AggregateMethod for $method {
|
||||
fn event_name() -> &'static str {
|
||||
$event_name
|
||||
}
|
||||
}
|
||||
};
|
||||
($($method:ident => $event_name:literal,)+) => {
|
||||
$(
|
||||
aggregate_methods!($method => $event_name);
|
||||
)+
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Analytics {
|
||||
segment: Option<Arc<SegmentAnalytics>>,
|
||||
}
|
||||
|
||||
impl Analytics {
|
||||
pub async fn new(
|
||||
opt: &Opt,
|
||||
index_scheduler: Arc<IndexScheduler>,
|
||||
auth_controller: Arc<AuthController>,
|
||||
) -> Self {
|
||||
if opt.no_analytics {
|
||||
Self { segment: None }
|
||||
} else {
|
||||
Self { segment: SegmentAnalytics::new(opt, index_scheduler, auth_controller).await }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn no_analytics() -> Self {
|
||||
Self { segment: None }
|
||||
}
|
||||
|
||||
pub fn instance_uid(&self) -> Option<&InstanceUid> {
|
||||
self.segment.as_ref().map(|segment| segment.instance_uid.as_ref())
|
||||
}
|
||||
|
||||
/// The method used to publish most analytics that do not need to be batched every hours
|
||||
fn publish(&self, event_name: String, send: Value, request: Option<&HttpRequest>);
|
||||
|
||||
/// This method should be called to aggregate a get search
|
||||
fn get_search(&self, aggregate: SearchAggregator);
|
||||
|
||||
/// This method should be called to aggregate a post search
|
||||
fn post_search(&self, aggregate: SearchAggregator);
|
||||
|
||||
/// This method should be called to aggregate a get similar request
|
||||
fn get_similar(&self, aggregate: SimilarAggregator);
|
||||
|
||||
/// This method should be called to aggregate a post similar request
|
||||
fn post_similar(&self, aggregate: SimilarAggregator);
|
||||
|
||||
/// This method should be called to aggregate a post array of searches
|
||||
fn post_multi_search(&self, aggregate: MultiSearchAggregator);
|
||||
|
||||
/// This method should be called to aggregate post facet values searches
|
||||
fn post_facet_search(&self, aggregate: FacetSearchAggregator);
|
||||
|
||||
// this method should be called to aggregate an add documents request
|
||||
fn add_documents(
|
||||
&self,
|
||||
documents_query: &UpdateDocumentsQuery,
|
||||
index_creation: bool,
|
||||
request: &HttpRequest,
|
||||
);
|
||||
|
||||
// this method should be called to aggregate a fetch documents request
|
||||
fn get_fetch_documents(&self, documents_query: &DocumentFetchKind, request: &HttpRequest);
|
||||
|
||||
// this method should be called to aggregate a fetch documents request
|
||||
fn post_fetch_documents(&self, documents_query: &DocumentFetchKind, request: &HttpRequest);
|
||||
|
||||
// this method should be called to aggregate a add documents request
|
||||
fn delete_documents(&self, kind: DocumentDeletionKind, request: &HttpRequest);
|
||||
|
||||
// this method should be called to batch an update documents request
|
||||
fn update_documents(
|
||||
&self,
|
||||
documents_query: &UpdateDocumentsQuery,
|
||||
index_creation: bool,
|
||||
request: &HttpRequest,
|
||||
);
|
||||
|
||||
// this method should be called to batch an update documents by function request
|
||||
fn update_documents_by_function(
|
||||
&self,
|
||||
documents_query: &DocumentEditionByFunction,
|
||||
index_creation: bool,
|
||||
request: &HttpRequest,
|
||||
);
|
||||
pub fn publish<T: Aggregate>(&self, event: T, request: &HttpRequest) {
|
||||
if let Some(ref segment) = self.segment {
|
||||
let _ = segment.sender.try_send(segment_analytics::Message::new(event, request));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -120,7 +120,7 @@ pub fn create_app(
|
||||
search_queue: Data<SearchQueue>,
|
||||
opt: Opt,
|
||||
logs: (LogRouteHandle, LogStderrHandle),
|
||||
analytics: Arc<dyn Analytics>,
|
||||
analytics: Data<Analytics>,
|
||||
enable_dashboard: bool,
|
||||
) -> actix_web::App<
|
||||
impl ServiceFactory<
|
||||
@ -473,14 +473,14 @@ pub fn configure_data(
|
||||
search_queue: Data<SearchQueue>,
|
||||
opt: &Opt,
|
||||
(logs_route, logs_stderr): (LogRouteHandle, LogStderrHandle),
|
||||
analytics: Arc<dyn Analytics>,
|
||||
analytics: Data<Analytics>,
|
||||
) {
|
||||
let http_payload_size_limit = opt.http_payload_size_limit.as_u64() as usize;
|
||||
config
|
||||
.app_data(index_scheduler)
|
||||
.app_data(auth)
|
||||
.app_data(search_queue)
|
||||
.app_data(web::Data::from(analytics))
|
||||
.app_data(analytics)
|
||||
.app_data(web::Data::new(logs_route))
|
||||
.app_data(web::Data::new(logs_stderr))
|
||||
.app_data(web::Data::new(opt.clone()))
|
||||
|
@ -124,19 +124,12 @@ async fn try_main() -> anyhow::Result<()> {
|
||||
|
||||
let (index_scheduler, auth_controller) = setup_meilisearch(&opt)?;
|
||||
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
let analytics = if !opt.no_analytics {
|
||||
analytics::SegmentAnalytics::new(&opt, index_scheduler.clone(), auth_controller.clone())
|
||||
.await
|
||||
} else {
|
||||
analytics::MockAnalytics::new(&opt)
|
||||
};
|
||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
||||
let analytics = analytics::MockAnalytics::new(&opt);
|
||||
let analytics =
|
||||
analytics::Analytics::new(&opt, index_scheduler.clone(), auth_controller.clone()).await;
|
||||
|
||||
print_launch_resume(&opt, analytics.clone(), config_read_from);
|
||||
|
||||
run_http(index_scheduler, auth_controller, opt, log_handle, analytics).await?;
|
||||
run_http(index_scheduler, auth_controller, opt, log_handle, Arc::new(analytics)).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -146,12 +139,13 @@ async fn run_http(
|
||||
auth_controller: Arc<AuthController>,
|
||||
opt: Opt,
|
||||
logs: (LogRouteHandle, LogStderrHandle),
|
||||
analytics: Arc<dyn Analytics>,
|
||||
analytics: Arc<Analytics>,
|
||||
) -> anyhow::Result<()> {
|
||||
let enable_dashboard = &opt.env == "development";
|
||||
let opt_clone = opt.clone();
|
||||
let index_scheduler = Data::from(index_scheduler);
|
||||
let auth_controller = Data::from(auth_controller);
|
||||
let analytics = Data::from(analytics);
|
||||
let search_queue = SearchQueue::new(
|
||||
opt.experimental_search_queue_size,
|
||||
available_parallelism()
|
||||
@ -187,11 +181,7 @@ async fn run_http(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn print_launch_resume(
|
||||
opt: &Opt,
|
||||
analytics: Arc<dyn Analytics>,
|
||||
config_read_from: Option<PathBuf>,
|
||||
) {
|
||||
pub fn print_launch_resume(opt: &Opt, analytics: Analytics, config_read_from: Option<PathBuf>) {
|
||||
let build_info = build_info::BuildInfo::from_build();
|
||||
|
||||
let protocol =
|
||||
@ -233,7 +223,6 @@ pub fn print_launch_resume(
|
||||
eprintln!("Prototype:\t\t{:?}", prototype);
|
||||
}
|
||||
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
{
|
||||
if !opt.no_analytics {
|
||||
eprintln!(
|
||||
|
@ -29,7 +29,6 @@ const MEILI_MASTER_KEY: &str = "MEILI_MASTER_KEY";
|
||||
const MEILI_ENV: &str = "MEILI_ENV";
|
||||
const MEILI_TASK_WEBHOOK_URL: &str = "MEILI_TASK_WEBHOOK_URL";
|
||||
const MEILI_TASK_WEBHOOK_AUTHORIZATION_HEADER: &str = "MEILI_TASK_WEBHOOK_AUTHORIZATION_HEADER";
|
||||
#[cfg(feature = "analytics")]
|
||||
const MEILI_NO_ANALYTICS: &str = "MEILI_NO_ANALYTICS";
|
||||
const MEILI_HTTP_PAYLOAD_SIZE_LIMIT: &str = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT";
|
||||
const MEILI_SSL_CERT_PATH: &str = "MEILI_SSL_CERT_PATH";
|
||||
@ -210,7 +209,6 @@ pub struct Opt {
|
||||
/// Meilisearch automatically collects data from all instances that do not opt out using this flag.
|
||||
/// All gathered data is used solely for the purpose of improving Meilisearch, and can be deleted
|
||||
/// at any time.
|
||||
#[cfg(feature = "analytics")]
|
||||
#[serde(default)] // we can't send true
|
||||
#[clap(long, env = MEILI_NO_ANALYTICS)]
|
||||
pub no_analytics: bool,
|
||||
@ -425,7 +423,6 @@ pub struct Opt {
|
||||
|
||||
impl Opt {
|
||||
/// Whether analytics should be enabled or not.
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
pub fn analytics(&self) -> bool {
|
||||
!self.no_analytics
|
||||
}
|
||||
@ -505,7 +502,6 @@ impl Opt {
|
||||
ignore_missing_dump: _,
|
||||
ignore_dump_if_db_exists: _,
|
||||
config_file_path: _,
|
||||
#[cfg(feature = "analytics")]
|
||||
no_analytics,
|
||||
experimental_contains_filter,
|
||||
experimental_enable_metrics,
|
||||
@ -533,10 +529,7 @@ impl Opt {
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(feature = "analytics")]
|
||||
{
|
||||
export_to_env_if_not_present(MEILI_NO_ANALYTICS, no_analytics.to_string());
|
||||
}
|
||||
export_to_env_if_not_present(MEILI_NO_ANALYTICS, no_analytics.to_string());
|
||||
export_to_env_if_not_present(
|
||||
MEILI_HTTP_PAYLOAD_SIZE_LIMIT,
|
||||
http_payload_size_limit.to_string(),
|
||||
|
@ -4,7 +4,6 @@ use index_scheduler::IndexScheduler;
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use serde_json::json;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
@ -18,14 +17,16 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))));
|
||||
}
|
||||
|
||||
crate::empty_analytics!(DumpAnalytics, "Dump Created");
|
||||
|
||||
pub async fn create_dump(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DUMPS_CREATE }>, Data<IndexScheduler>>,
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::DUMPS_CREATE }>, Data<AuthController>>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish("Dump Created".to_string(), json!({}), Some(&req));
|
||||
analytics.publish(DumpAnalytics::default(), &req);
|
||||
|
||||
let task = KindWithContent::DumpCreation {
|
||||
keys: auth_controller.list_keys()?,
|
||||
|
@ -6,10 +6,10 @@ use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::keys::actions;
|
||||
use serde_json::json;
|
||||
use serde::Serialize;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::analytics::{Aggregate, Analytics};
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
@ -17,7 +17,7 @@ use crate::extractors::sequential_extractor::SeqHandler;
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(SeqHandler(get_features)))
|
||||
.route(web::get().to(get_features))
|
||||
.route(web::patch().to(SeqHandler(patch_features))),
|
||||
);
|
||||
}
|
||||
@ -27,12 +27,9 @@ async fn get_features(
|
||||
ActionPolicy<{ actions::EXPERIMENTAL_FEATURES_GET }>,
|
||||
Data<IndexScheduler>,
|
||||
>,
|
||||
req: HttpRequest,
|
||||
analytics: Data<dyn Analytics>,
|
||||
) -> HttpResponse {
|
||||
let features = index_scheduler.features();
|
||||
|
||||
analytics.publish("Experimental features Seen".to_string(), json!(null), Some(&req));
|
||||
let features = features.runtime_features();
|
||||
debug!(returns = ?features, "Get features");
|
||||
HttpResponse::Ok().json(features)
|
||||
@ -53,6 +50,35 @@ pub struct RuntimeTogglableFeatures {
|
||||
pub contains_filter: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct PatchExperimentalFeatureAnalytics {
|
||||
vector_store: bool,
|
||||
metrics: bool,
|
||||
logs_route: bool,
|
||||
edit_documents_by_function: bool,
|
||||
contains_filter: bool,
|
||||
}
|
||||
|
||||
impl Aggregate for PatchExperimentalFeatureAnalytics {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Experimental features Updated"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
vector_store: new.vector_store,
|
||||
metrics: new.metrics,
|
||||
logs_route: new.logs_route,
|
||||
edit_documents_by_function: new.edit_documents_by_function,
|
||||
contains_filter: new.contains_filter,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
async fn patch_features(
|
||||
index_scheduler: GuardedData<
|
||||
ActionPolicy<{ actions::EXPERIMENTAL_FEATURES_UPDATE }>,
|
||||
@ -60,7 +86,7 @@ async fn patch_features(
|
||||
>,
|
||||
new_features: AwebJson<RuntimeTogglableFeatures, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: Data<dyn Analytics>,
|
||||
analytics: Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let features = index_scheduler.features();
|
||||
debug!(parameters = ?new_features, "Patch features");
|
||||
@ -89,15 +115,14 @@ async fn patch_features(
|
||||
} = new_features;
|
||||
|
||||
analytics.publish(
|
||||
"Experimental features Updated".to_string(),
|
||||
json!({
|
||||
"vector_store": vector_store,
|
||||
"metrics": metrics,
|
||||
"logs_route": logs_route,
|
||||
"edit_documents_by_function": edit_documents_by_function,
|
||||
"contains_filter": contains_filter,
|
||||
}),
|
||||
Some(&req),
|
||||
PatchExperimentalFeatureAnalytics {
|
||||
vector_store,
|
||||
metrics,
|
||||
logs_route,
|
||||
edit_documents_by_function,
|
||||
contains_filter,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
index_scheduler.put_runtime_features(new_features)?;
|
||||
debug!(returns = ?new_features, "Patch features");
|
||||
|
@ -1,4 +1,6 @@
|
||||
use std::collections::HashSet;
|
||||
use std::io::ErrorKind;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use actix_web::http::header::CONTENT_TYPE;
|
||||
use actix_web::web::Data;
|
||||
@ -23,14 +25,14 @@ use meilisearch_types::tasks::KindWithContent;
|
||||
use meilisearch_types::{milli, Document, Index};
|
||||
use mime::Mime;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::Deserialize;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use tempfile::tempfile;
|
||||
use tokio::fs::File;
|
||||
use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter};
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::{Analytics, DocumentDeletionKind, DocumentFetchKind};
|
||||
use crate::analytics::{Aggregate, AggregateMethod, Analytics};
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::error::PayloadError::ReceivePayload;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
@ -41,7 +43,7 @@ use crate::routes::{
|
||||
get_task_id, is_dry_run, PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT,
|
||||
};
|
||||
use crate::search::{parse_filter, RetrieveVectors};
|
||||
use crate::Opt;
|
||||
use crate::{aggregate_methods, Opt};
|
||||
|
||||
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
||||
vec!["application/json".to_string(), "application/x-ndjson".to_string(), "text/csv".to_string()]
|
||||
@ -100,12 +102,84 @@ pub struct GetDocument {
|
||||
retrieve_vectors: Param<bool>,
|
||||
}
|
||||
|
||||
aggregate_methods!(
|
||||
DocumentsGET => "Documents Fetched GET",
|
||||
DocumentsPOST => "Documents Fetched POST",
|
||||
);
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct DocumentsFetchAggregator<Method: AggregateMethod> {
|
||||
// a call on ../documents/:doc_id
|
||||
per_document_id: bool,
|
||||
// if a filter was used
|
||||
per_filter: bool,
|
||||
|
||||
#[serde(rename = "vector.retrieve_vectors")]
|
||||
retrieve_vectors: bool,
|
||||
|
||||
// pagination
|
||||
#[serde(rename = "pagination.max_limit")]
|
||||
max_limit: usize,
|
||||
#[serde(rename = "pagination.max_offset")]
|
||||
max_offset: usize,
|
||||
|
||||
marker: std::marker::PhantomData<Method>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum DocumentFetchKind {
|
||||
PerDocumentId { retrieve_vectors: bool },
|
||||
Normal { with_filter: bool, limit: usize, offset: usize, retrieve_vectors: bool },
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> DocumentsFetchAggregator<Method> {
|
||||
pub fn from_query(query: &DocumentFetchKind) -> Self {
|
||||
let (limit, offset, retrieve_vectors) = match query {
|
||||
DocumentFetchKind::PerDocumentId { retrieve_vectors } => (1, 0, *retrieve_vectors),
|
||||
DocumentFetchKind::Normal { limit, offset, retrieve_vectors, .. } => {
|
||||
(*limit, *offset, *retrieve_vectors)
|
||||
}
|
||||
};
|
||||
|
||||
Self {
|
||||
per_document_id: matches!(query, DocumentFetchKind::PerDocumentId { .. }),
|
||||
per_filter: matches!(query, DocumentFetchKind::Normal { with_filter, .. } if *with_filter),
|
||||
max_limit: limit,
|
||||
max_offset: offset,
|
||||
retrieve_vectors,
|
||||
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> Aggregate for DocumentsFetchAggregator<Method> {
|
||||
fn event_name(&self) -> &'static str {
|
||||
Method::event_name()
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
per_document_id: self.per_document_id | new.per_document_id,
|
||||
per_filter: self.per_filter | new.per_filter,
|
||||
retrieve_vectors: self.retrieve_vectors | new.retrieve_vectors,
|
||||
max_limit: self.max_limit.max(new.max_limit),
|
||||
max_offset: self.max_offset.max(new.max_offset),
|
||||
marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_document(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
|
||||
document_param: web::Path<DocumentParam>,
|
||||
params: AwebQueryParameter<GetDocument, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let DocumentParam { index_uid, document_id } = document_param.into_inner();
|
||||
debug!(parameters = ?params, "Get document");
|
||||
@ -117,8 +191,15 @@ pub async fn get_document(
|
||||
let features = index_scheduler.features();
|
||||
let retrieve_vectors = RetrieveVectors::new(param_retrieve_vectors.0, features)?;
|
||||
|
||||
analytics.get_fetch_documents(
|
||||
&DocumentFetchKind::PerDocumentId { retrieve_vectors: param_retrieve_vectors.0 },
|
||||
analytics.publish(
|
||||
DocumentsFetchAggregator::<DocumentsGET> {
|
||||
retrieve_vectors: param_retrieve_vectors.0,
|
||||
per_document_id: true,
|
||||
per_filter: false,
|
||||
max_limit: 0,
|
||||
max_offset: 0,
|
||||
marker: PhantomData,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
@ -129,17 +210,52 @@ pub async fn get_document(
|
||||
Ok(HttpResponse::Ok().json(document))
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct DocumentsDeletionAggregator {
|
||||
per_document_id: bool,
|
||||
clear_all: bool,
|
||||
per_batch: bool,
|
||||
per_filter: bool,
|
||||
}
|
||||
|
||||
impl Aggregate for DocumentsDeletionAggregator {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Documents Deleted"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
per_document_id: self.per_document_id | new.per_document_id,
|
||||
clear_all: self.clear_all | new.clear_all,
|
||||
per_batch: self.per_batch | new.per_batch,
|
||||
per_filter: self.per_filter | new.per_filter,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn delete_document(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
||||
path: web::Path<DocumentParam>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let DocumentParam { index_uid, document_id } = path.into_inner();
|
||||
let index_uid = IndexUid::try_from(index_uid)?;
|
||||
|
||||
analytics.delete_documents(DocumentDeletionKind::PerDocumentId, &req);
|
||||
analytics.publish(
|
||||
DocumentsDeletionAggregator {
|
||||
per_document_id: true,
|
||||
clear_all: false,
|
||||
per_batch: false,
|
||||
per_filter: false,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
let task = KindWithContent::DocumentDeletion {
|
||||
index_uid: index_uid.to_string(),
|
||||
@ -190,17 +306,19 @@ pub async fn documents_by_query_post(
|
||||
index_uid: web::Path<String>,
|
||||
body: AwebJson<BrowseQuery, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let body = body.into_inner();
|
||||
debug!(parameters = ?body, "Get documents POST");
|
||||
|
||||
analytics.post_fetch_documents(
|
||||
&DocumentFetchKind::Normal {
|
||||
with_filter: body.filter.is_some(),
|
||||
limit: body.limit,
|
||||
offset: body.offset,
|
||||
analytics.publish(
|
||||
DocumentsFetchAggregator::<DocumentsPOST> {
|
||||
per_filter: body.filter.is_some(),
|
||||
retrieve_vectors: body.retrieve_vectors,
|
||||
max_limit: body.limit,
|
||||
max_offset: body.offset,
|
||||
per_document_id: false,
|
||||
marker: PhantomData,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
@ -213,7 +331,7 @@ pub async fn get_documents(
|
||||
index_uid: web::Path<String>,
|
||||
params: AwebQueryParameter<BrowseQueryGet, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?params, "Get documents GET");
|
||||
|
||||
@ -235,12 +353,14 @@ pub async fn get_documents(
|
||||
filter,
|
||||
};
|
||||
|
||||
analytics.get_fetch_documents(
|
||||
&DocumentFetchKind::Normal {
|
||||
with_filter: query.filter.is_some(),
|
||||
limit: query.limit,
|
||||
offset: query.offset,
|
||||
analytics.publish(
|
||||
DocumentsFetchAggregator::<DocumentsGET> {
|
||||
per_filter: query.filter.is_some(),
|
||||
retrieve_vectors: query.retrieve_vectors,
|
||||
max_limit: query.limit,
|
||||
max_offset: query.offset,
|
||||
per_document_id: false,
|
||||
marker: PhantomData,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
@ -298,6 +418,39 @@ fn from_char_csv_delimiter(
|
||||
}
|
||||
}
|
||||
|
||||
aggregate_methods!(
|
||||
Replaced => "Documents Added",
|
||||
Updated => "Documents Updated",
|
||||
);
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct DocumentsAggregator<T: AggregateMethod> {
|
||||
payload_types: HashSet<String>,
|
||||
primary_key: HashSet<String>,
|
||||
index_creation: bool,
|
||||
#[serde(skip)]
|
||||
method: PhantomData<T>,
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> Aggregate for DocumentsAggregator<Method> {
|
||||
fn event_name(&self) -> &'static str {
|
||||
Method::event_name()
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
payload_types: self.payload_types.union(&new.payload_types).cloned().collect(),
|
||||
primary_key: self.primary_key.union(&new.primary_key).cloned().collect(),
|
||||
index_creation: self.index_creation | new.index_creation,
|
||||
method: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn replace_documents(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
@ -305,16 +458,32 @@ pub async fn replace_documents(
|
||||
body: Payload,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
debug!(parameters = ?params, "Replace documents");
|
||||
let params = params.into_inner();
|
||||
|
||||
analytics.add_documents(
|
||||
¶ms,
|
||||
index_scheduler.index_exists(&index_uid).map_or(true, |x| !x),
|
||||
let mut content_types = HashSet::new();
|
||||
let content_type = req
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|s| s.to_str().ok())
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
content_types.insert(content_type);
|
||||
let mut primary_keys = HashSet::new();
|
||||
if let Some(primary_key) = params.primary_key.clone() {
|
||||
primary_keys.insert(primary_key);
|
||||
}
|
||||
analytics.publish(
|
||||
DocumentsAggregator::<Replaced> {
|
||||
payload_types: content_types,
|
||||
primary_key: primary_keys,
|
||||
index_creation: index_scheduler.index_exists(&index_uid).map_or(true, |x| !x),
|
||||
method: PhantomData,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
@ -346,16 +515,32 @@ pub async fn update_documents(
|
||||
body: Payload,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
let params = params.into_inner();
|
||||
debug!(parameters = ?params, "Update documents");
|
||||
|
||||
analytics.add_documents(
|
||||
¶ms,
|
||||
index_scheduler.index_exists(&index_uid).map_or(true, |x| !x),
|
||||
let mut content_types = HashSet::new();
|
||||
let content_type = req
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|s| s.to_str().ok())
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
content_types.insert(content_type);
|
||||
let mut primary_keys = HashSet::new();
|
||||
if let Some(primary_key) = params.primary_key.clone() {
|
||||
primary_keys.insert(primary_key);
|
||||
}
|
||||
analytics.publish(
|
||||
DocumentsAggregator::<Updated> {
|
||||
payload_types: content_types,
|
||||
primary_key: primary_keys,
|
||||
index_creation: index_scheduler.index_exists(&index_uid).map_or(true, |x| !x),
|
||||
method: PhantomData,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
@ -524,12 +709,20 @@ pub async fn delete_documents_batch(
|
||||
body: web::Json<Vec<Value>>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?body, "Delete documents by batch");
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
analytics.delete_documents(DocumentDeletionKind::PerBatch, &req);
|
||||
analytics.publish(
|
||||
DocumentsDeletionAggregator {
|
||||
per_batch: true,
|
||||
per_document_id: false,
|
||||
clear_all: false,
|
||||
per_filter: false,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
let ids = body
|
||||
.iter()
|
||||
@ -562,14 +755,22 @@ pub async fn delete_documents_by_filter(
|
||||
body: AwebJson<DocumentDeletionByFilter, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?body, "Delete documents by filter");
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
let index_uid = index_uid.into_inner();
|
||||
let filter = body.into_inner().filter;
|
||||
|
||||
analytics.delete_documents(DocumentDeletionKind::PerFilter, &req);
|
||||
analytics.publish(
|
||||
DocumentsDeletionAggregator {
|
||||
per_filter: true,
|
||||
per_document_id: false,
|
||||
clear_all: false,
|
||||
per_batch: false,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
// we ensure the filter is well formed before enqueuing it
|
||||
crate::search::parse_filter(&filter, Code::InvalidDocumentFilter, index_scheduler.features())?
|
||||
@ -599,13 +800,41 @@ pub struct DocumentEditionByFunction {
|
||||
pub function: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct EditDocumentsByFunctionAggregator {
|
||||
// Set to true if at least one request was filtered
|
||||
filtered: bool,
|
||||
// Set to true if at least one request contained a context
|
||||
with_context: bool,
|
||||
|
||||
index_creation: bool,
|
||||
}
|
||||
|
||||
impl Aggregate for EditDocumentsByFunctionAggregator {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Documents Edited By Function"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
filtered: self.filtered | new.filtered,
|
||||
with_context: self.with_context | new.with_context,
|
||||
index_creation: self.index_creation | new.index_creation,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn edit_documents_by_function(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ALL }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
params: AwebJson<DocumentEditionByFunction, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?params, "Edit documents by function");
|
||||
|
||||
@ -617,9 +846,12 @@ pub async fn edit_documents_by_function(
|
||||
let index_uid = index_uid.into_inner();
|
||||
let params = params.into_inner();
|
||||
|
||||
analytics.update_documents_by_function(
|
||||
¶ms,
|
||||
index_scheduler.index(&index_uid).is_err(),
|
||||
analytics.publish(
|
||||
EditDocumentsByFunctionAggregator {
|
||||
filtered: params.filter.is_some(),
|
||||
with_context: params.context.is_some(),
|
||||
index_creation: index_scheduler.index(&index_uid).is_err(),
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
@ -670,10 +902,18 @@ pub async fn clear_all_documents(
|
||||
index_uid: web::Path<String>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
analytics.delete_documents(DocumentDeletionKind::ClearAll, &req);
|
||||
analytics.publish(
|
||||
DocumentsDeletionAggregator {
|
||||
clear_all: true,
|
||||
per_document_id: false,
|
||||
per_batch: false,
|
||||
per_filter: false,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
let task = KindWithContent::DocumentClear { index_uid: index_uid.to_string() };
|
||||
let uid = get_task_id(&req, &opt)?;
|
||||
|
@ -1,3 +1,5 @@
|
||||
use std::collections::{BinaryHeap, HashSet};
|
||||
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use deserr::actix_web::AwebJson;
|
||||
@ -10,14 +12,15 @@ use meilisearch_types::locales::Locale;
|
||||
use serde_json::Value;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::{Analytics, FacetSearchAggregator};
|
||||
use crate::analytics::{Aggregate, Analytics};
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::routes::indexes::search::search_kind;
|
||||
use crate::search::{
|
||||
add_search_rules, perform_facet_search, HybridQuery, MatchingStrategy, RankingScoreThreshold,
|
||||
SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG,
|
||||
DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
|
||||
add_search_rules, perform_facet_search, FacetSearchResult, HybridQuery, MatchingStrategy,
|
||||
RankingScoreThreshold, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
||||
DEFAULT_SEARCH_OFFSET,
|
||||
};
|
||||
use crate::search_queue::SearchQueue;
|
||||
|
||||
@ -53,20 +56,122 @@ pub struct FacetSearchQuery {
|
||||
pub locales: Option<Vec<Locale>>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct FacetSearchAggregator {
|
||||
// requests
|
||||
total_received: usize,
|
||||
total_succeeded: usize,
|
||||
time_spent: BinaryHeap<usize>,
|
||||
|
||||
// The set of all facetNames that were used
|
||||
facet_names: HashSet<String>,
|
||||
|
||||
// As there been any other parameter than the facetName or facetQuery ones?
|
||||
additional_search_parameters_provided: bool,
|
||||
}
|
||||
|
||||
impl FacetSearchAggregator {
|
||||
#[allow(clippy::field_reassign_with_default)]
|
||||
pub fn from_query(query: &FacetSearchQuery) -> Self {
|
||||
let FacetSearchQuery {
|
||||
facet_query: _,
|
||||
facet_name,
|
||||
vector,
|
||||
q,
|
||||
filter,
|
||||
matching_strategy,
|
||||
attributes_to_search_on,
|
||||
hybrid,
|
||||
ranking_score_threshold,
|
||||
locales,
|
||||
} = query;
|
||||
|
||||
Self {
|
||||
total_received: 1,
|
||||
facet_names: Some(facet_name.clone()).into_iter().collect(),
|
||||
additional_search_parameters_provided: q.is_some()
|
||||
|| vector.is_some()
|
||||
|| filter.is_some()
|
||||
|| *matching_strategy != MatchingStrategy::default()
|
||||
|| attributes_to_search_on.is_some()
|
||||
|| hybrid.is_some()
|
||||
|| ranking_score_threshold.is_some()
|
||||
|| locales.is_some(),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, result: &FacetSearchResult) {
|
||||
let FacetSearchResult { facet_hits: _, facet_query: _, processing_time_ms } = result;
|
||||
self.total_succeeded = 1;
|
||||
self.time_spent.push(*processing_time_ms as usize);
|
||||
}
|
||||
}
|
||||
|
||||
impl Aggregate for FacetSearchAggregator {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Facet Searched POST"
|
||||
}
|
||||
|
||||
fn aggregate(mut self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
for time in new.time_spent {
|
||||
self.time_spent.push(time);
|
||||
}
|
||||
|
||||
Box::new(Self {
|
||||
total_received: self.total_received.saturating_add(new.total_received),
|
||||
total_succeeded: self.total_succeeded.saturating_add(new.total_succeeded),
|
||||
time_spent: self.time_spent,
|
||||
facet_names: self.facet_names.union(&new.facet_names).cloned().collect(),
|
||||
additional_search_parameters_provided: self.additional_search_parameters_provided
|
||||
| new.additional_search_parameters_provided,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
let Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
time_spent,
|
||||
facet_names,
|
||||
additional_search_parameters_provided,
|
||||
} = *self;
|
||||
// the index of the 99th percentage of value
|
||||
let percentile_99th = 0.99 * (total_succeeded as f64 - 1.) + 1.;
|
||||
// we get all the values in a sorted manner
|
||||
let time_spent = time_spent.into_sorted_vec();
|
||||
// We are only interested by the slowest value of the 99th fastest results
|
||||
let time_spent = time_spent.get(percentile_99th as usize);
|
||||
|
||||
serde_json::json!({
|
||||
"requests": {
|
||||
"99th_response_time": time_spent.map(|t| format!("{:.2}", t)),
|
||||
"total_succeeded": total_succeeded,
|
||||
"total_failed": total_received.saturating_sub(total_succeeded), // just to be sure we never panics
|
||||
"total_received": total_received,
|
||||
},
|
||||
"facets": {
|
||||
"total_distinct_facet_count": facet_names.len(),
|
||||
"additional_search_parameters_provided": additional_search_parameters_provided,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn search(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
||||
search_queue: Data<SearchQueue>,
|
||||
index_uid: web::Path<String>,
|
||||
params: AwebJson<FacetSearchQuery, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
let query = params.into_inner();
|
||||
debug!(parameters = ?query, "Facet search");
|
||||
|
||||
let mut aggregate = FacetSearchAggregator::from_query(&query, &req);
|
||||
let mut aggregate = FacetSearchAggregator::from_query(&query);
|
||||
|
||||
let facet_query = query.facet_query.clone();
|
||||
let facet_name = query.facet_name.clone();
|
||||
@ -100,7 +205,7 @@ pub async fn search(
|
||||
if let Ok(ref search_result) = search_result {
|
||||
aggregate.succeed(search_result);
|
||||
}
|
||||
analytics.post_facet_search(aggregate);
|
||||
analytics.publish(aggregate, &req);
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
use std::collections::BTreeSet;
|
||||
use std::convert::Infallible;
|
||||
|
||||
use actix_web::web::Data;
|
||||
@ -13,12 +14,11 @@ use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::milli::{self, FieldDistribution, Index};
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
use time::OffsetDateTime;
|
||||
use tracing::debug;
|
||||
|
||||
use super::{get_task_id, Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::analytics::{Aggregate, Analytics};
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
@ -28,8 +28,11 @@ use crate::Opt;
|
||||
pub mod documents;
|
||||
pub mod facet_search;
|
||||
pub mod search;
|
||||
mod search_analytics;
|
||||
pub mod settings;
|
||||
mod settings_analytics;
|
||||
pub mod similar;
|
||||
mod similar_analytics;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
@ -123,12 +126,31 @@ pub struct IndexCreateRequest {
|
||||
primary_key: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct IndexCreatedAggregate {
|
||||
primary_key: BTreeSet<String>,
|
||||
}
|
||||
|
||||
impl Aggregate for IndexCreatedAggregate {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Index Created"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self { primary_key: self.primary_key.union(&new.primary_key).cloned().collect() })
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn create_index(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>,
|
||||
body: AwebJson<IndexCreateRequest, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?body, "Create index");
|
||||
let IndexCreateRequest { primary_key, uid } = body.into_inner();
|
||||
@ -136,9 +158,8 @@ pub async fn create_index(
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation(&uid);
|
||||
if allow_index_creation {
|
||||
analytics.publish(
|
||||
"Index Created".to_string(),
|
||||
json!({ "primary_key": primary_key }),
|
||||
Some(&req),
|
||||
IndexCreatedAggregate { primary_key: primary_key.iter().cloned().collect() },
|
||||
&req,
|
||||
);
|
||||
|
||||
let task = KindWithContent::IndexCreation { index_uid: uid.to_string(), primary_key };
|
||||
@ -194,21 +215,38 @@ pub async fn get_index(
|
||||
Ok(HttpResponse::Ok().json(index_view))
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct IndexUpdatedAggregate {
|
||||
primary_key: BTreeSet<String>,
|
||||
}
|
||||
|
||||
impl Aggregate for IndexUpdatedAggregate {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Index Updated"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self { primary_key: self.primary_key.union(&new.primary_key).cloned().collect() })
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
pub async fn update_index(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
body: AwebJson<UpdateIndexRequest, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?body, "Update index");
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
let body = body.into_inner();
|
||||
analytics.publish(
|
||||
"Index Updated".to_string(),
|
||||
json!({ "primary_key": body.primary_key }),
|
||||
Some(&req),
|
||||
IndexUpdatedAggregate { primary_key: body.primary_key.iter().cloned().collect() },
|
||||
&req,
|
||||
);
|
||||
|
||||
let task = KindWithContent::IndexUpdate {
|
||||
|
@ -13,12 +13,13 @@ use meilisearch_types::serde_cs::vec::CS;
|
||||
use serde_json::Value;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::{Analytics, SearchAggregator};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::metrics::MEILISEARCH_DEGRADED_SEARCH_REQUESTS;
|
||||
use crate::routes::indexes::search_analytics::{SearchAggregator, SearchGET, SearchPOST};
|
||||
use crate::search::{
|
||||
add_search_rules, perform_search, HybridQuery, MatchingStrategy, RankingScoreThreshold,
|
||||
RetrieveVectors, SearchKind, SearchQuery, SemanticRatio, DEFAULT_CROP_LENGTH,
|
||||
@ -225,7 +226,7 @@ pub async fn search_with_url_query(
|
||||
index_uid: web::Path<String>,
|
||||
params: AwebQueryParameter<SearchQueryGet, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?params, "Search get");
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
@ -237,7 +238,7 @@ pub async fn search_with_url_query(
|
||||
add_search_rules(&mut query.filter, search_rules);
|
||||
}
|
||||
|
||||
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||
let mut aggregate = SearchAggregator::<SearchGET>::from_query(&query);
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let features = index_scheduler.features();
|
||||
@ -254,7 +255,7 @@ pub async fn search_with_url_query(
|
||||
if let Ok(ref search_result) = search_result {
|
||||
aggregate.succeed(search_result);
|
||||
}
|
||||
analytics.get_search(aggregate);
|
||||
analytics.publish(aggregate, &req);
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
@ -268,7 +269,7 @@ pub async fn search_with_post(
|
||||
index_uid: web::Path<String>,
|
||||
params: AwebJson<SearchQuery, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
@ -280,7 +281,7 @@ pub async fn search_with_post(
|
||||
add_search_rules(&mut query.filter, search_rules);
|
||||
}
|
||||
|
||||
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||
let mut aggregate = SearchAggregator::<SearchPOST>::from_query(&query);
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
|
||||
@ -302,7 +303,7 @@ pub async fn search_with_post(
|
||||
MEILISEARCH_DEGRADED_SEARCH_REQUESTS.inc();
|
||||
}
|
||||
}
|
||||
analytics.post_search(aggregate);
|
||||
analytics.publish(aggregate, &req);
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
|
485
meilisearch/src/routes/indexes/search_analytics.rs
Normal file
485
meilisearch/src/routes/indexes/search_analytics.rs
Normal file
@ -0,0 +1,485 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use serde_json::{json, Value};
|
||||
use std::collections::{BTreeSet, BinaryHeap, HashMap};
|
||||
|
||||
use meilisearch_types::locales::Locale;
|
||||
|
||||
use crate::{
|
||||
aggregate_methods,
|
||||
analytics::{Aggregate, AggregateMethod},
|
||||
search::{
|
||||
SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
||||
DEFAULT_SEMANTIC_RATIO,
|
||||
},
|
||||
};
|
||||
|
||||
aggregate_methods!(
|
||||
SearchGET => "Documents Searched GET",
|
||||
SearchPOST => "Documents Searched POST",
|
||||
);
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SearchAggregator<Method: AggregateMethod> {
|
||||
// requests
|
||||
total_received: usize,
|
||||
total_succeeded: usize,
|
||||
total_degraded: usize,
|
||||
total_used_negative_operator: usize,
|
||||
time_spent: BinaryHeap<usize>,
|
||||
|
||||
// sort
|
||||
sort_with_geo_point: bool,
|
||||
// every time a request has a filter, this field must be incremented by the number of terms it contains
|
||||
sort_sum_of_criteria_terms: usize,
|
||||
// every time a request has a filter, this field must be incremented by one
|
||||
sort_total_number_of_criteria: usize,
|
||||
|
||||
// distinct
|
||||
distinct: bool,
|
||||
|
||||
// filter
|
||||
filter_with_geo_radius: bool,
|
||||
filter_with_geo_bounding_box: bool,
|
||||
// every time a request has a filter, this field must be incremented by the number of terms it contains
|
||||
filter_sum_of_criteria_terms: usize,
|
||||
// every time a request has a filter, this field must be incremented by one
|
||||
filter_total_number_of_criteria: usize,
|
||||
used_syntax: HashMap<String, usize>,
|
||||
|
||||
// attributes_to_search_on
|
||||
// every time a search is done using attributes_to_search_on
|
||||
attributes_to_search_on_total_number_of_uses: usize,
|
||||
|
||||
// q
|
||||
// The maximum number of terms in a q request
|
||||
max_terms_number: usize,
|
||||
|
||||
// vector
|
||||
// The maximum number of floats in a vector request
|
||||
max_vector_size: usize,
|
||||
// Whether the semantic ratio passed to a hybrid search equals the default ratio.
|
||||
semantic_ratio: bool,
|
||||
hybrid: bool,
|
||||
retrieve_vectors: bool,
|
||||
|
||||
// every time a search is done, we increment the counter linked to the used settings
|
||||
matching_strategy: HashMap<String, usize>,
|
||||
|
||||
// List of the unique Locales passed as parameter
|
||||
locales: BTreeSet<Locale>,
|
||||
|
||||
// pagination
|
||||
max_limit: usize,
|
||||
max_offset: usize,
|
||||
finite_pagination: usize,
|
||||
|
||||
// formatting
|
||||
max_attributes_to_retrieve: usize,
|
||||
max_attributes_to_highlight: usize,
|
||||
highlight_pre_tag: bool,
|
||||
highlight_post_tag: bool,
|
||||
max_attributes_to_crop: usize,
|
||||
crop_marker: bool,
|
||||
show_matches_position: bool,
|
||||
crop_length: bool,
|
||||
|
||||
// facets
|
||||
facets_sum_of_terms: usize,
|
||||
facets_total_number_of_facets: usize,
|
||||
|
||||
// scoring
|
||||
show_ranking_score: bool,
|
||||
show_ranking_score_details: bool,
|
||||
ranking_score_threshold: bool,
|
||||
|
||||
marker: std::marker::PhantomData<Method>,
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> SearchAggregator<Method> {
|
||||
#[allow(clippy::field_reassign_with_default)]
|
||||
pub fn from_query(query: &SearchQuery) -> Self {
|
||||
let SearchQuery {
|
||||
q,
|
||||
vector,
|
||||
offset,
|
||||
limit,
|
||||
page,
|
||||
hits_per_page,
|
||||
attributes_to_retrieve: _,
|
||||
retrieve_vectors,
|
||||
attributes_to_crop: _,
|
||||
crop_length,
|
||||
attributes_to_highlight: _,
|
||||
show_matches_position,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
filter,
|
||||
sort,
|
||||
distinct,
|
||||
facets: _,
|
||||
highlight_pre_tag,
|
||||
highlight_post_tag,
|
||||
crop_marker,
|
||||
matching_strategy,
|
||||
attributes_to_search_on,
|
||||
hybrid,
|
||||
ranking_score_threshold,
|
||||
locales,
|
||||
} = query;
|
||||
|
||||
let mut ret = Self::default();
|
||||
|
||||
ret.total_received = 1;
|
||||
|
||||
if let Some(ref sort) = sort {
|
||||
ret.sort_total_number_of_criteria = 1;
|
||||
ret.sort_with_geo_point = sort.iter().any(|s| s.contains("_geoPoint("));
|
||||
ret.sort_sum_of_criteria_terms = sort.len();
|
||||
}
|
||||
|
||||
ret.distinct = distinct.is_some();
|
||||
|
||||
if let Some(ref filter) = filter {
|
||||
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
|
||||
ret.filter_total_number_of_criteria = 1;
|
||||
|
||||
let syntax = match filter {
|
||||
Value::String(_) => "string".to_string(),
|
||||
Value::Array(values) => {
|
||||
if values.iter().map(|v| v.to_string()).any(|s| RE.is_match(&s)) {
|
||||
"mixed".to_string()
|
||||
} else {
|
||||
"array".to_string()
|
||||
}
|
||||
}
|
||||
_ => "none".to_string(),
|
||||
};
|
||||
// convert the string to a HashMap
|
||||
ret.used_syntax.insert(syntax, 1);
|
||||
|
||||
let stringified_filters = filter.to_string();
|
||||
ret.filter_with_geo_radius = stringified_filters.contains("_geoRadius(");
|
||||
ret.filter_with_geo_bounding_box = stringified_filters.contains("_geoBoundingBox(");
|
||||
ret.filter_sum_of_criteria_terms = RE.split(&stringified_filters).count();
|
||||
}
|
||||
|
||||
// attributes_to_search_on
|
||||
if attributes_to_search_on.is_some() {
|
||||
ret.attributes_to_search_on_total_number_of_uses = 1;
|
||||
}
|
||||
|
||||
if let Some(ref q) = q {
|
||||
ret.max_terms_number = q.split_whitespace().count();
|
||||
}
|
||||
|
||||
if let Some(ref vector) = vector {
|
||||
ret.max_vector_size = vector.len();
|
||||
}
|
||||
ret.retrieve_vectors |= retrieve_vectors;
|
||||
|
||||
if query.is_finite_pagination() {
|
||||
let limit = hits_per_page.unwrap_or_else(DEFAULT_SEARCH_LIMIT);
|
||||
ret.max_limit = limit;
|
||||
ret.max_offset = page.unwrap_or(1).saturating_sub(1) * limit;
|
||||
ret.finite_pagination = 1;
|
||||
} else {
|
||||
ret.max_limit = *limit;
|
||||
ret.max_offset = *offset;
|
||||
ret.finite_pagination = 0;
|
||||
}
|
||||
|
||||
ret.matching_strategy.insert(format!("{:?}", matching_strategy), 1);
|
||||
|
||||
if let Some(locales) = locales {
|
||||
ret.locales = locales.iter().copied().collect();
|
||||
}
|
||||
|
||||
ret.highlight_pre_tag = *highlight_pre_tag != DEFAULT_HIGHLIGHT_PRE_TAG();
|
||||
ret.highlight_post_tag = *highlight_post_tag != DEFAULT_HIGHLIGHT_POST_TAG();
|
||||
ret.crop_marker = *crop_marker != DEFAULT_CROP_MARKER();
|
||||
ret.crop_length = *crop_length != DEFAULT_CROP_LENGTH();
|
||||
ret.show_matches_position = *show_matches_position;
|
||||
|
||||
ret.show_ranking_score = *show_ranking_score;
|
||||
ret.show_ranking_score_details = *show_ranking_score_details;
|
||||
ret.ranking_score_threshold = ranking_score_threshold.is_some();
|
||||
|
||||
if let Some(hybrid) = hybrid {
|
||||
ret.semantic_ratio = hybrid.semantic_ratio != DEFAULT_SEMANTIC_RATIO();
|
||||
ret.hybrid = true;
|
||||
}
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, result: &SearchResult) {
|
||||
let SearchResult {
|
||||
hits: _,
|
||||
query: _,
|
||||
processing_time_ms,
|
||||
hits_info: _,
|
||||
semantic_hit_count: _,
|
||||
facet_distribution: _,
|
||||
facet_stats: _,
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
} = result;
|
||||
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(1);
|
||||
if *degraded {
|
||||
self.total_degraded = self.total_degraded.saturating_add(1);
|
||||
}
|
||||
if *used_negative_operator {
|
||||
self.total_used_negative_operator = self.total_used_negative_operator.saturating_add(1);
|
||||
}
|
||||
self.time_spent.push(*processing_time_ms as usize);
|
||||
}
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
|
||||
fn event_name(&self) -> &'static str {
|
||||
Method::event_name()
|
||||
}
|
||||
|
||||
fn aggregate(mut self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
let Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
mut time_spent,
|
||||
sort_with_geo_point,
|
||||
sort_sum_of_criteria_terms,
|
||||
sort_total_number_of_criteria,
|
||||
distinct,
|
||||
filter_with_geo_radius,
|
||||
filter_with_geo_bounding_box,
|
||||
filter_sum_of_criteria_terms,
|
||||
filter_total_number_of_criteria,
|
||||
used_syntax,
|
||||
attributes_to_search_on_total_number_of_uses,
|
||||
max_terms_number,
|
||||
max_vector_size,
|
||||
retrieve_vectors,
|
||||
matching_strategy,
|
||||
max_limit,
|
||||
max_offset,
|
||||
finite_pagination,
|
||||
max_attributes_to_retrieve,
|
||||
max_attributes_to_highlight,
|
||||
highlight_pre_tag,
|
||||
highlight_post_tag,
|
||||
max_attributes_to_crop,
|
||||
crop_marker,
|
||||
show_matches_position,
|
||||
crop_length,
|
||||
facets_sum_of_terms,
|
||||
facets_total_number_of_facets,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
semantic_ratio,
|
||||
hybrid,
|
||||
total_degraded,
|
||||
total_used_negative_operator,
|
||||
ranking_score_threshold,
|
||||
mut locales,
|
||||
marker: _,
|
||||
} = *new;
|
||||
|
||||
// request
|
||||
self.total_received = self.total_received.saturating_add(total_received);
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(total_succeeded);
|
||||
self.total_degraded = self.total_degraded.saturating_add(total_degraded);
|
||||
self.total_used_negative_operator =
|
||||
self.total_used_negative_operator.saturating_add(total_used_negative_operator);
|
||||
self.time_spent.append(&mut time_spent);
|
||||
|
||||
// sort
|
||||
self.sort_with_geo_point |= sort_with_geo_point;
|
||||
self.sort_sum_of_criteria_terms =
|
||||
self.sort_sum_of_criteria_terms.saturating_add(sort_sum_of_criteria_terms);
|
||||
self.sort_total_number_of_criteria =
|
||||
self.sort_total_number_of_criteria.saturating_add(sort_total_number_of_criteria);
|
||||
|
||||
// distinct
|
||||
self.distinct |= distinct;
|
||||
|
||||
// filter
|
||||
self.filter_with_geo_radius |= filter_with_geo_radius;
|
||||
self.filter_with_geo_bounding_box |= filter_with_geo_bounding_box;
|
||||
self.filter_sum_of_criteria_terms =
|
||||
self.filter_sum_of_criteria_terms.saturating_add(filter_sum_of_criteria_terms);
|
||||
self.filter_total_number_of_criteria =
|
||||
self.filter_total_number_of_criteria.saturating_add(filter_total_number_of_criteria);
|
||||
for (key, value) in used_syntax.into_iter() {
|
||||
let used_syntax = self.used_syntax.entry(key).or_insert(0);
|
||||
*used_syntax = used_syntax.saturating_add(value);
|
||||
}
|
||||
|
||||
// attributes_to_search_on
|
||||
self.attributes_to_search_on_total_number_of_uses = self
|
||||
.attributes_to_search_on_total_number_of_uses
|
||||
.saturating_add(attributes_to_search_on_total_number_of_uses);
|
||||
|
||||
// q
|
||||
self.max_terms_number = self.max_terms_number.max(max_terms_number);
|
||||
|
||||
// vector
|
||||
self.max_vector_size = self.max_vector_size.max(max_vector_size);
|
||||
self.retrieve_vectors |= retrieve_vectors;
|
||||
self.semantic_ratio |= semantic_ratio;
|
||||
self.hybrid |= hybrid;
|
||||
|
||||
// pagination
|
||||
self.max_limit = self.max_limit.max(max_limit);
|
||||
self.max_offset = self.max_offset.max(max_offset);
|
||||
self.finite_pagination += finite_pagination;
|
||||
|
||||
// formatting
|
||||
self.max_attributes_to_retrieve =
|
||||
self.max_attributes_to_retrieve.max(max_attributes_to_retrieve);
|
||||
self.max_attributes_to_highlight =
|
||||
self.max_attributes_to_highlight.max(max_attributes_to_highlight);
|
||||
self.highlight_pre_tag |= highlight_pre_tag;
|
||||
self.highlight_post_tag |= highlight_post_tag;
|
||||
self.max_attributes_to_crop = self.max_attributes_to_crop.max(max_attributes_to_crop);
|
||||
self.crop_marker |= crop_marker;
|
||||
self.show_matches_position |= show_matches_position;
|
||||
self.crop_length |= crop_length;
|
||||
|
||||
// facets
|
||||
self.facets_sum_of_terms = self.facets_sum_of_terms.saturating_add(facets_sum_of_terms);
|
||||
self.facets_total_number_of_facets =
|
||||
self.facets_total_number_of_facets.saturating_add(facets_total_number_of_facets);
|
||||
|
||||
// matching strategy
|
||||
for (key, value) in matching_strategy.into_iter() {
|
||||
let matching_strategy = self.matching_strategy.entry(key).or_insert(0);
|
||||
*matching_strategy = matching_strategy.saturating_add(value);
|
||||
}
|
||||
|
||||
// scoring
|
||||
self.show_ranking_score |= show_ranking_score;
|
||||
self.show_ranking_score_details |= show_ranking_score_details;
|
||||
self.ranking_score_threshold |= ranking_score_threshold;
|
||||
|
||||
// locales
|
||||
self.locales.append(&mut locales);
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
let Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
time_spent,
|
||||
sort_with_geo_point,
|
||||
sort_sum_of_criteria_terms,
|
||||
sort_total_number_of_criteria,
|
||||
distinct,
|
||||
filter_with_geo_radius,
|
||||
filter_with_geo_bounding_box,
|
||||
filter_sum_of_criteria_terms,
|
||||
filter_total_number_of_criteria,
|
||||
used_syntax,
|
||||
attributes_to_search_on_total_number_of_uses,
|
||||
max_terms_number,
|
||||
max_vector_size,
|
||||
retrieve_vectors,
|
||||
matching_strategy,
|
||||
max_limit,
|
||||
max_offset,
|
||||
finite_pagination,
|
||||
max_attributes_to_retrieve,
|
||||
max_attributes_to_highlight,
|
||||
highlight_pre_tag,
|
||||
highlight_post_tag,
|
||||
max_attributes_to_crop,
|
||||
crop_marker,
|
||||
show_matches_position,
|
||||
crop_length,
|
||||
facets_sum_of_terms,
|
||||
facets_total_number_of_facets,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
semantic_ratio,
|
||||
hybrid,
|
||||
total_degraded,
|
||||
total_used_negative_operator,
|
||||
ranking_score_threshold,
|
||||
locales,
|
||||
marker: _,
|
||||
} = *self;
|
||||
|
||||
// we get all the values in a sorted manner
|
||||
let time_spent = time_spent.into_sorted_vec();
|
||||
// the index of the 99th percentage of value
|
||||
let percentile_99th = time_spent.len() * 99 / 100;
|
||||
// We are only interested by the slowest value of the 99th fastest results
|
||||
let time_spent = time_spent.get(percentile_99th);
|
||||
|
||||
json!({
|
||||
"requests": {
|
||||
"99th_response_time": time_spent.map(|t| format!("{:.2}", t)),
|
||||
"total_succeeded": total_succeeded,
|
||||
"total_failed": total_received.saturating_sub(total_succeeded), // just to be sure we never panics
|
||||
"total_received": total_received,
|
||||
"total_degraded": total_degraded,
|
||||
"total_used_negative_operator": total_used_negative_operator,
|
||||
},
|
||||
"sort": {
|
||||
"with_geoPoint": sort_with_geo_point,
|
||||
"avg_criteria_number": format!("{:.2}", sort_sum_of_criteria_terms as f64 / sort_total_number_of_criteria as f64),
|
||||
},
|
||||
"distinct": distinct,
|
||||
"filter": {
|
||||
"with_geoRadius": filter_with_geo_radius,
|
||||
"with_geoBoundingBox": filter_with_geo_bounding_box,
|
||||
"avg_criteria_number": format!("{:.2}", filter_sum_of_criteria_terms as f64 / filter_total_number_of_criteria as f64),
|
||||
"most_used_syntax": used_syntax.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)),
|
||||
},
|
||||
"attributes_to_search_on": {
|
||||
"total_number_of_uses": attributes_to_search_on_total_number_of_uses,
|
||||
},
|
||||
"q": {
|
||||
"max_terms_number": max_terms_number,
|
||||
},
|
||||
"vector": {
|
||||
"max_vector_size": max_vector_size,
|
||||
"retrieve_vectors": retrieve_vectors,
|
||||
},
|
||||
"hybrid": {
|
||||
"enabled": hybrid,
|
||||
"semantic_ratio": semantic_ratio,
|
||||
},
|
||||
"pagination": {
|
||||
"max_limit": max_limit,
|
||||
"max_offset": max_offset,
|
||||
"most_used_navigation": if finite_pagination > (total_received / 2) { "exhaustive" } else { "estimated" },
|
||||
},
|
||||
"formatting": {
|
||||
"max_attributes_to_retrieve": max_attributes_to_retrieve,
|
||||
"max_attributes_to_highlight": max_attributes_to_highlight,
|
||||
"highlight_pre_tag": highlight_pre_tag,
|
||||
"highlight_post_tag": highlight_post_tag,
|
||||
"max_attributes_to_crop": max_attributes_to_crop,
|
||||
"crop_marker": crop_marker,
|
||||
"show_matches_position": show_matches_position,
|
||||
"crop_length": crop_length,
|
||||
},
|
||||
"facets": {
|
||||
"avg_facets_number": format!("{:.2}", facets_sum_of_terms as f64 / facets_total_number_of_facets as f64),
|
||||
},
|
||||
"matching_strategy": {
|
||||
"most_used_strategy": matching_strategy.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)),
|
||||
},
|
||||
"locales": locales,
|
||||
"scoring": {
|
||||
"show_ranking_score": show_ranking_score,
|
||||
"show_ranking_score_details": show_ranking_score_details,
|
||||
"ranking_score_threshold": ranking_score_threshold,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
@ -1,15 +1,14 @@
|
||||
use super::settings_analytics::*;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use deserr::actix_web::AwebJson;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::settings::{settings, RankingRuleView, SecretPolicy, Settings, Unchecked};
|
||||
use meilisearch_types::settings::{settings, SecretPolicy, Settings, Unchecked};
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use serde_json::json;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
@ -20,7 +19,7 @@ use crate::Opt;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! make_setting_route {
|
||||
($route:literal, $update_verb:ident, $type:ty, $err_ty:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
|
||||
($route:literal, $update_verb:ident, $type:ty, $err_ty:ty, $attr:ident, $camelcase_attr:literal, $analytics:ident) => {
|
||||
pub mod $attr {
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
||||
@ -80,7 +79,7 @@ macro_rules! make_setting_route {
|
||||
body: deserr::actix_web::AwebJson<Option<$type>, $err_ty>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
$analytics_var: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> std::result::Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
@ -88,7 +87,10 @@ macro_rules! make_setting_route {
|
||||
debug!(parameters = ?body, "Update settings");
|
||||
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
$analytics(&body, &req);
|
||||
analytics.publish(
|
||||
$crate::routes::indexes::settings_analytics::$analytics::new(body.as_ref()).into_settings(),
|
||||
&req,
|
||||
);
|
||||
|
||||
let new_settings = Settings {
|
||||
$attr: match body {
|
||||
@ -160,21 +162,7 @@ make_setting_route!(
|
||||
>,
|
||||
filterable_attributes,
|
||||
"filterableAttributes",
|
||||
analytics,
|
||||
|setting: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"FilterableAttributes Updated".to_string(),
|
||||
json!({
|
||||
"filterable_attributes": {
|
||||
"total": setting.as_ref().map(|filter| filter.len()).unwrap_or(0),
|
||||
"has_geo": setting.as_ref().map(|filter| filter.contains("_geo")).unwrap_or(false),
|
||||
}
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
FilterableAttributesAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -186,21 +174,7 @@ make_setting_route!(
|
||||
>,
|
||||
sortable_attributes,
|
||||
"sortableAttributes",
|
||||
analytics,
|
||||
|setting: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"SortableAttributes Updated".to_string(),
|
||||
json!({
|
||||
"sortable_attributes": {
|
||||
"total": setting.as_ref().map(|sort| sort.len()),
|
||||
"has_geo": setting.as_ref().map(|sort| sort.contains("_geo")),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
SortableAttributesAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -212,21 +186,7 @@ make_setting_route!(
|
||||
>,
|
||||
displayed_attributes,
|
||||
"displayedAttributes",
|
||||
analytics,
|
||||
|displayed: &Option<Vec<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"DisplayedAttributes Updated".to_string(),
|
||||
json!({
|
||||
"displayed_attributes": {
|
||||
"total": displayed.as_ref().map(|displayed| displayed.len()),
|
||||
"with_wildcard": displayed.as_ref().map(|displayed| displayed.iter().any(|displayed| displayed == "*")),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
DisplayedAttributesAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -238,40 +198,7 @@ make_setting_route!(
|
||||
>,
|
||||
typo_tolerance,
|
||||
"typoTolerance",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_types::settings::TypoSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"TypoTolerance Updated".to_string(),
|
||||
json!({
|
||||
"typo_tolerance": {
|
||||
"enabled": setting.as_ref().map(|s| !matches!(s.enabled, Setting::Set(false))),
|
||||
"disable_on_attributes": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
|
||||
"disable_on_words": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.disable_on_words.as_ref().set().map(|m| !m.is_empty())),
|
||||
"min_word_size_for_one_typo": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.one_typo.set()))
|
||||
.flatten(),
|
||||
"min_word_size_for_two_typos": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.two_typos.set()))
|
||||
.flatten(),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
TypoToleranceAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -283,21 +210,7 @@ make_setting_route!(
|
||||
>,
|
||||
searchable_attributes,
|
||||
"searchableAttributes",
|
||||
analytics,
|
||||
|setting: &Option<Vec<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"SearchableAttributes Updated".to_string(),
|
||||
json!({
|
||||
"searchable_attributes": {
|
||||
"total": setting.as_ref().map(|searchable| searchable.len()),
|
||||
"with_wildcard": setting.as_ref().map(|searchable| searchable.iter().any(|searchable| searchable == "*")),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
SearchableAttributesAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -309,20 +222,7 @@ make_setting_route!(
|
||||
>,
|
||||
stop_words,
|
||||
"stopWords",
|
||||
analytics,
|
||||
|stop_words: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"StopWords Updated".to_string(),
|
||||
json!({
|
||||
"stop_words": {
|
||||
"total": stop_words.as_ref().map(|stop_words| stop_words.len()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
StopWordsAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -334,20 +234,7 @@ make_setting_route!(
|
||||
>,
|
||||
non_separator_tokens,
|
||||
"nonSeparatorTokens",
|
||||
analytics,
|
||||
|non_separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"nonSeparatorTokens Updated".to_string(),
|
||||
json!({
|
||||
"non_separator_tokens": {
|
||||
"total": non_separator_tokens.as_ref().map(|non_separator_tokens| non_separator_tokens.len()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
NonSeparatorTokensAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -359,20 +246,7 @@ make_setting_route!(
|
||||
>,
|
||||
separator_tokens,
|
||||
"separatorTokens",
|
||||
analytics,
|
||||
|separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"separatorTokens Updated".to_string(),
|
||||
json!({
|
||||
"separator_tokens": {
|
||||
"total": separator_tokens.as_ref().map(|separator_tokens| separator_tokens.len()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
SeparatorTokensAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -384,20 +258,7 @@ make_setting_route!(
|
||||
>,
|
||||
dictionary,
|
||||
"dictionary",
|
||||
analytics,
|
||||
|dictionary: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"dictionary Updated".to_string(),
|
||||
json!({
|
||||
"dictionary": {
|
||||
"total": dictionary.as_ref().map(|dictionary| dictionary.len()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
DictionaryAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -409,20 +270,7 @@ make_setting_route!(
|
||||
>,
|
||||
synonyms,
|
||||
"synonyms",
|
||||
analytics,
|
||||
|synonyms: &Option<std::collections::BTreeMap<String, Vec<String>>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"Synonyms Updated".to_string(),
|
||||
json!({
|
||||
"synonyms": {
|
||||
"total": synonyms.as_ref().map(|synonyms| synonyms.len()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
SynonymsAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -434,19 +282,7 @@ make_setting_route!(
|
||||
>,
|
||||
distinct_attribute,
|
||||
"distinctAttribute",
|
||||
analytics,
|
||||
|distinct: &Option<String>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
analytics.publish(
|
||||
"DistinctAttribute Updated".to_string(),
|
||||
json!({
|
||||
"distinct_attribute": {
|
||||
"set": distinct.is_some(),
|
||||
}
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
DistinctAttributeAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -458,20 +294,7 @@ make_setting_route!(
|
||||
>,
|
||||
proximity_precision,
|
||||
"proximityPrecision",
|
||||
analytics,
|
||||
|precision: &Option<meilisearch_types::settings::ProximityPrecisionView>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
analytics.publish(
|
||||
"ProximityPrecision Updated".to_string(),
|
||||
json!({
|
||||
"proximity_precision": {
|
||||
"set": precision.is_some(),
|
||||
"value": precision.unwrap_or_default(),
|
||||
}
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
ProximityPrecisionAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -483,17 +306,7 @@ make_setting_route!(
|
||||
>,
|
||||
localized_attributes,
|
||||
"localizedAttributes",
|
||||
analytics,
|
||||
|rules: &Option<Vec<meilisearch_types::locales::LocalizedAttributesRuleView>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
analytics.publish(
|
||||
"LocalizedAttributesRules Updated".to_string(),
|
||||
json!({
|
||||
"locales": rules.as_ref().map(|rules| rules.iter().flat_map(|rule| rule.locales.iter().cloned()).collect::<std::collections::BTreeSet<_>>())
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
LocalesAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -505,26 +318,7 @@ make_setting_route!(
|
||||
>,
|
||||
ranking_rules,
|
||||
"rankingRules",
|
||||
analytics,
|
||||
|setting: &Option<Vec<meilisearch_types::settings::RankingRuleView>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"RankingRules Updated".to_string(),
|
||||
json!({
|
||||
"ranking_rules": {
|
||||
"words_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Words))),
|
||||
"typo_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Typo))),
|
||||
"proximity_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Proximity))),
|
||||
"attribute_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Attribute))),
|
||||
"sort_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Sort))),
|
||||
"exactness_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Exactness))),
|
||||
"values": setting.as_ref().map(|rr| rr.iter().filter(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Asc(_) | meilisearch_types::settings::RankingRuleView::Desc(_)) ).map(|x| x.to_string()).collect::<Vec<_>>().join(", ")),
|
||||
}
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
RankingRulesAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -536,25 +330,7 @@ make_setting_route!(
|
||||
>,
|
||||
faceting,
|
||||
"faceting",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_types::settings::FacetingSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
||||
|
||||
analytics.publish(
|
||||
"Faceting Updated".to_string(),
|
||||
json!({
|
||||
"faceting": {
|
||||
"max_values_per_facet": setting.as_ref().and_then(|s| s.max_values_per_facet.set()),
|
||||
"sort_facet_values_by_star_count": setting.as_ref().and_then(|s| {
|
||||
s.sort_facet_values_by.as_ref().set().map(|s| s.iter().any(|(k, v)| k == "*" && v == &FacetValuesSort::Count))
|
||||
}),
|
||||
"sort_facet_values_by_total": setting.as_ref().and_then(|s| s.sort_facet_values_by.as_ref().set().map(|s| s.len())),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
FacetingAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -566,20 +342,7 @@ make_setting_route!(
|
||||
>,
|
||||
pagination,
|
||||
"pagination",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_types::settings::PaginationSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"Pagination Updated".to_string(),
|
||||
json!({
|
||||
"pagination": {
|
||||
"max_total_hits": setting.as_ref().and_then(|s| s.max_total_hits.set()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
PaginationAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@ -591,75 +354,9 @@ make_setting_route!(
|
||||
>,
|
||||
embedders,
|
||||
"embedders",
|
||||
analytics,
|
||||
|setting: &Option<std::collections::BTreeMap<String, Setting<meilisearch_types::milli::vector::settings::EmbeddingSettings>>>, req: &HttpRequest| {
|
||||
|
||||
|
||||
analytics.publish(
|
||||
"Embedders Updated".to_string(),
|
||||
serde_json::json!({"embedders": crate::routes::indexes::settings::embedder_analytics(setting.as_ref())}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
EmbeddersAnalytics
|
||||
);
|
||||
|
||||
fn embedder_analytics(
|
||||
setting: Option<
|
||||
&std::collections::BTreeMap<
|
||||
String,
|
||||
Setting<meilisearch_types::milli::vector::settings::EmbeddingSettings>,
|
||||
>,
|
||||
>,
|
||||
) -> serde_json::Value {
|
||||
let mut sources = std::collections::HashSet::new();
|
||||
|
||||
if let Some(s) = &setting {
|
||||
for source in s
|
||||
.values()
|
||||
.filter_map(|config| config.clone().set())
|
||||
.filter_map(|config| config.source.set())
|
||||
{
|
||||
use meilisearch_types::milli::vector::settings::EmbedderSource;
|
||||
match source {
|
||||
EmbedderSource::OpenAi => sources.insert("openAi"),
|
||||
EmbedderSource::HuggingFace => sources.insert("huggingFace"),
|
||||
EmbedderSource::UserProvided => sources.insert("userProvided"),
|
||||
EmbedderSource::Ollama => sources.insert("ollama"),
|
||||
EmbedderSource::Rest => sources.insert("rest"),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
let document_template_used = setting.as_ref().map(|map| {
|
||||
map.values()
|
||||
.filter_map(|config| config.clone().set())
|
||||
.any(|config| config.document_template.set().is_some())
|
||||
});
|
||||
|
||||
let document_template_max_bytes = setting.as_ref().and_then(|map| {
|
||||
map.values()
|
||||
.filter_map(|config| config.clone().set())
|
||||
.filter_map(|config| config.document_template_max_bytes.set())
|
||||
.max()
|
||||
});
|
||||
|
||||
let binary_quantization_used = setting.as_ref().map(|map| {
|
||||
map.values()
|
||||
.filter_map(|config| config.clone().set())
|
||||
.any(|config| config.binary_quantized.set().is_some())
|
||||
});
|
||||
|
||||
json!(
|
||||
{
|
||||
"total": setting.as_ref().map(|s| s.len()),
|
||||
"sources": sources,
|
||||
"document_template_used": document_template_used,
|
||||
"document_template_max_bytes": document_template_max_bytes,
|
||||
"binary_quantization_used": binary_quantization_used,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
make_setting_route!(
|
||||
"/search-cutoff-ms",
|
||||
put,
|
||||
@ -669,14 +366,7 @@ make_setting_route!(
|
||||
>,
|
||||
search_cutoff_ms,
|
||||
"searchCutoffMs",
|
||||
analytics,
|
||||
|setting: &Option<u64>, req: &HttpRequest| {
|
||||
analytics.publish(
|
||||
"Search Cutoff Updated".to_string(),
|
||||
serde_json::json!({"search_cutoff_ms": setting }),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
SearchCutoffMsAnalytics
|
||||
);
|
||||
|
||||
macro_rules! generate_configure {
|
||||
@ -720,7 +410,7 @@ pub async fn update_all(
|
||||
body: AwebJson<Settings<Unchecked>, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
@ -729,104 +419,45 @@ pub async fn update_all(
|
||||
let new_settings = validate_settings(new_settings, &index_scheduler)?;
|
||||
|
||||
analytics.publish(
|
||||
"Settings Updated".to_string(),
|
||||
json!({
|
||||
"ranking_rules": {
|
||||
"words_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Words))),
|
||||
"typo_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Typo))),
|
||||
"proximity_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Proximity))),
|
||||
"attribute_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Attribute))),
|
||||
"sort_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Sort))),
|
||||
"exactness_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Exactness))),
|
||||
"values": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().filter(|s| !matches!(s, RankingRuleView::Asc(_) | RankingRuleView::Desc(_)) ).map(|x| x.to_string()).collect::<Vec<_>>().join(", ")),
|
||||
},
|
||||
"searchable_attributes": {
|
||||
"total": new_settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()),
|
||||
"with_wildcard": new_settings.searchable_attributes.as_ref().set().map(|searchable| searchable.iter().any(|searchable| searchable == "*")),
|
||||
},
|
||||
"displayed_attributes": {
|
||||
"total": new_settings.displayed_attributes.as_ref().set().map(|displayed| displayed.len()),
|
||||
"with_wildcard": new_settings.displayed_attributes.as_ref().set().map(|displayed| displayed.iter().any(|displayed| displayed == "*")),
|
||||
},
|
||||
"sortable_attributes": {
|
||||
"total": new_settings.sortable_attributes.as_ref().set().map(|sort| sort.len()),
|
||||
"has_geo": new_settings.sortable_attributes.as_ref().set().map(|sort| sort.iter().any(|s| s == "_geo")),
|
||||
},
|
||||
"filterable_attributes": {
|
||||
"total": new_settings.filterable_attributes.as_ref().set().map(|filter| filter.len()),
|
||||
"has_geo": new_settings.filterable_attributes.as_ref().set().map(|filter| filter.iter().any(|s| s == "_geo")),
|
||||
},
|
||||
"distinct_attribute": {
|
||||
"set": new_settings.distinct_attribute.as_ref().set().is_some()
|
||||
},
|
||||
"proximity_precision": {
|
||||
"set": new_settings.proximity_precision.as_ref().set().is_some(),
|
||||
"value": new_settings.proximity_precision.as_ref().set().copied().unwrap_or_default()
|
||||
},
|
||||
"typo_tolerance": {
|
||||
"enabled": new_settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.enabled.as_ref().set())
|
||||
.copied(),
|
||||
"disable_on_attributes": new_settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
|
||||
"disable_on_words": new_settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.disable_on_words.as_ref().set().map(|m| !m.is_empty())),
|
||||
"min_word_size_for_one_typo": new_settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.one_typo.set()))
|
||||
.flatten(),
|
||||
"min_word_size_for_two_typos": new_settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.two_typos.set()))
|
||||
.flatten(),
|
||||
},
|
||||
"faceting": {
|
||||
"max_values_per_facet": new_settings.faceting
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.max_values_per_facet.as_ref().set()),
|
||||
"sort_facet_values_by_star_count": new_settings.faceting
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| {
|
||||
s.sort_facet_values_by.as_ref().set().map(|s| s.iter().any(|(k, v)| k == "*" && v == &FacetValuesSort::Count))
|
||||
}),
|
||||
"sort_facet_values_by_total": new_settings.faceting
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.sort_facet_values_by.as_ref().set().map(|s| s.len())),
|
||||
},
|
||||
"pagination": {
|
||||
"max_total_hits": new_settings.pagination
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.max_total_hits.as_ref().set()),
|
||||
},
|
||||
"stop_words": {
|
||||
"total": new_settings.stop_words.as_ref().set().map(|stop_words| stop_words.len()),
|
||||
},
|
||||
"synonyms": {
|
||||
"total": new_settings.synonyms.as_ref().set().map(|synonyms| synonyms.len()),
|
||||
},
|
||||
"embedders": crate::routes::indexes::settings::embedder_analytics(new_settings.embedders.as_ref().set()),
|
||||
"search_cutoff_ms": new_settings.search_cutoff_ms.as_ref().set(),
|
||||
"locales": new_settings.localized_attributes.as_ref().set().map(|rules| rules.iter().flat_map(|rule| rule.locales.iter().cloned()).collect::<std::collections::BTreeSet<_>>()),
|
||||
}),
|
||||
Some(&req),
|
||||
SettingsAnalytics {
|
||||
ranking_rules: RankingRulesAnalytics::new(new_settings.ranking_rules.as_ref().set()),
|
||||
searchable_attributes: SearchableAttributesAnalytics::new(
|
||||
new_settings.searchable_attributes.as_ref().set(),
|
||||
),
|
||||
displayed_attributes: DisplayedAttributesAnalytics::new(
|
||||
new_settings.displayed_attributes.as_ref().set(),
|
||||
),
|
||||
sortable_attributes: SortableAttributesAnalytics::new(
|
||||
new_settings.sortable_attributes.as_ref().set(),
|
||||
),
|
||||
filterable_attributes: FilterableAttributesAnalytics::new(
|
||||
new_settings.filterable_attributes.as_ref().set(),
|
||||
),
|
||||
distinct_attribute: DistinctAttributeAnalytics::new(
|
||||
new_settings.distinct_attribute.as_ref().set(),
|
||||
),
|
||||
proximity_precision: ProximityPrecisionAnalytics::new(
|
||||
new_settings.proximity_precision.as_ref().set(),
|
||||
),
|
||||
typo_tolerance: TypoToleranceAnalytics::new(new_settings.typo_tolerance.as_ref().set()),
|
||||
faceting: FacetingAnalytics::new(new_settings.faceting.as_ref().set()),
|
||||
pagination: PaginationAnalytics::new(new_settings.pagination.as_ref().set()),
|
||||
stop_words: StopWordsAnalytics::new(new_settings.stop_words.as_ref().set()),
|
||||
synonyms: SynonymsAnalytics::new(new_settings.synonyms.as_ref().set()),
|
||||
embedders: EmbeddersAnalytics::new(new_settings.embedders.as_ref().set()),
|
||||
search_cutoff_ms: SearchCutoffMsAnalytics::new(
|
||||
new_settings.search_cutoff_ms.as_ref().set(),
|
||||
),
|
||||
locales: LocalesAnalytics::new(new_settings.localized_attributes.as_ref().set()),
|
||||
dictionary: DictionaryAnalytics::new(new_settings.dictionary.as_ref().set()),
|
||||
separator_tokens: SeparatorTokensAnalytics::new(
|
||||
new_settings.separator_tokens.as_ref().set(),
|
||||
),
|
||||
non_separator_tokens: NonSeparatorTokensAnalytics::new(
|
||||
new_settings.non_separator_tokens.as_ref().set(),
|
||||
),
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation(&index_uid);
|
||||
|
621
meilisearch/src/routes/indexes/settings_analytics.rs
Normal file
621
meilisearch/src/routes/indexes/settings_analytics.rs
Normal file
@ -0,0 +1,621 @@
|
||||
//! All the structures used to make the analytics on the settings works.
|
||||
//! The signatures of the `new` functions are not very rust idiomatic because they must match the types received
|
||||
//! through the sub-settings route directly without any manipulation.
|
||||
//! This is why we often use a `Option<&Vec<_>>` instead of a `Option<&[_]>`.
|
||||
|
||||
use meilisearch_types::locales::{Locale, LocalizedAttributesRuleView};
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::milli::vector::settings::EmbeddingSettings;
|
||||
use meilisearch_types::settings::{
|
||||
FacetingSettings, PaginationSettings, ProximityPrecisionView, TypoSettings,
|
||||
};
|
||||
use meilisearch_types::{facet_values_sort::FacetValuesSort, settings::RankingRuleView};
|
||||
use serde::Serialize;
|
||||
use std::collections::{BTreeMap, BTreeSet, HashSet};
|
||||
|
||||
use crate::analytics::Aggregate;
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct SettingsAnalytics {
|
||||
pub ranking_rules: RankingRulesAnalytics,
|
||||
pub searchable_attributes: SearchableAttributesAnalytics,
|
||||
pub displayed_attributes: DisplayedAttributesAnalytics,
|
||||
pub sortable_attributes: SortableAttributesAnalytics,
|
||||
pub filterable_attributes: FilterableAttributesAnalytics,
|
||||
pub distinct_attribute: DistinctAttributeAnalytics,
|
||||
pub proximity_precision: ProximityPrecisionAnalytics,
|
||||
pub typo_tolerance: TypoToleranceAnalytics,
|
||||
pub faceting: FacetingAnalytics,
|
||||
pub pagination: PaginationAnalytics,
|
||||
pub stop_words: StopWordsAnalytics,
|
||||
pub synonyms: SynonymsAnalytics,
|
||||
pub embedders: EmbeddersAnalytics,
|
||||
pub search_cutoff_ms: SearchCutoffMsAnalytics,
|
||||
pub locales: LocalesAnalytics,
|
||||
pub dictionary: DictionaryAnalytics,
|
||||
pub separator_tokens: SeparatorTokensAnalytics,
|
||||
pub non_separator_tokens: NonSeparatorTokensAnalytics,
|
||||
}
|
||||
|
||||
impl Aggregate for SettingsAnalytics {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Settings Updated"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
ranking_rules: RankingRulesAnalytics {
|
||||
words_position: new
|
||||
.ranking_rules
|
||||
.words_position
|
||||
.or(self.ranking_rules.words_position),
|
||||
typo_position: new.ranking_rules.typo_position.or(self.ranking_rules.typo_position),
|
||||
proximity_position: new
|
||||
.ranking_rules
|
||||
.proximity_position
|
||||
.or(self.ranking_rules.proximity_position),
|
||||
attribute_position: new
|
||||
.ranking_rules
|
||||
.attribute_position
|
||||
.or(self.ranking_rules.attribute_position),
|
||||
sort_position: new.ranking_rules.sort_position.or(self.ranking_rules.sort_position),
|
||||
exactness_position: new
|
||||
.ranking_rules
|
||||
.exactness_position
|
||||
.or(self.ranking_rules.exactness_position),
|
||||
values: new.ranking_rules.values.or(self.ranking_rules.values),
|
||||
},
|
||||
searchable_attributes: SearchableAttributesAnalytics {
|
||||
total: new.searchable_attributes.total.or(self.searchable_attributes.total),
|
||||
with_wildcard: new
|
||||
.searchable_attributes
|
||||
.with_wildcard
|
||||
.or(self.searchable_attributes.with_wildcard),
|
||||
},
|
||||
displayed_attributes: DisplayedAttributesAnalytics {
|
||||
total: new.displayed_attributes.total.or(self.displayed_attributes.total),
|
||||
with_wildcard: new
|
||||
.displayed_attributes
|
||||
.with_wildcard
|
||||
.or(self.displayed_attributes.with_wildcard),
|
||||
},
|
||||
sortable_attributes: SortableAttributesAnalytics {
|
||||
total: new.sortable_attributes.total.or(self.sortable_attributes.total),
|
||||
has_geo: new.sortable_attributes.has_geo.or(self.sortable_attributes.has_geo),
|
||||
},
|
||||
filterable_attributes: FilterableAttributesAnalytics {
|
||||
total: new.filterable_attributes.total.or(self.filterable_attributes.total),
|
||||
has_geo: new.filterable_attributes.has_geo.or(self.filterable_attributes.has_geo),
|
||||
},
|
||||
distinct_attribute: DistinctAttributeAnalytics {
|
||||
set: self.distinct_attribute.set | new.distinct_attribute.set,
|
||||
},
|
||||
proximity_precision: ProximityPrecisionAnalytics {
|
||||
set: self.proximity_precision.set | new.proximity_precision.set,
|
||||
value: new.proximity_precision.value.or(self.proximity_precision.value),
|
||||
},
|
||||
typo_tolerance: TypoToleranceAnalytics {
|
||||
enabled: new.typo_tolerance.enabled.or(self.typo_tolerance.enabled),
|
||||
disable_on_attributes: new
|
||||
.typo_tolerance
|
||||
.disable_on_attributes
|
||||
.or(self.typo_tolerance.disable_on_attributes),
|
||||
disable_on_words: new
|
||||
.typo_tolerance
|
||||
.disable_on_words
|
||||
.or(self.typo_tolerance.disable_on_words),
|
||||
min_word_size_for_one_typo: new
|
||||
.typo_tolerance
|
||||
.min_word_size_for_one_typo
|
||||
.or(self.typo_tolerance.min_word_size_for_one_typo),
|
||||
min_word_size_for_two_typos: new
|
||||
.typo_tolerance
|
||||
.min_word_size_for_two_typos
|
||||
.or(self.typo_tolerance.min_word_size_for_two_typos),
|
||||
},
|
||||
faceting: FacetingAnalytics {
|
||||
max_values_per_facet: new
|
||||
.faceting
|
||||
.max_values_per_facet
|
||||
.or(self.faceting.max_values_per_facet),
|
||||
sort_facet_values_by_star_count: new
|
||||
.faceting
|
||||
.sort_facet_values_by_star_count
|
||||
.or(self.faceting.sort_facet_values_by_star_count),
|
||||
sort_facet_values_by_total: new
|
||||
.faceting
|
||||
.sort_facet_values_by_total
|
||||
.or(self.faceting.sort_facet_values_by_total),
|
||||
},
|
||||
pagination: PaginationAnalytics {
|
||||
max_total_hits: new.pagination.max_total_hits.or(self.pagination.max_total_hits),
|
||||
},
|
||||
stop_words: StopWordsAnalytics {
|
||||
total: new.stop_words.total.or(self.stop_words.total),
|
||||
},
|
||||
synonyms: SynonymsAnalytics { total: new.synonyms.total.or(self.synonyms.total) },
|
||||
embedders: EmbeddersAnalytics {
|
||||
total: new.embedders.total.or(self.embedders.total),
|
||||
sources: match (self.embedders.sources, new.embedders.sources) {
|
||||
(None, None) => None,
|
||||
(Some(sources), None) | (None, Some(sources)) => Some(sources),
|
||||
(Some(this), Some(other)) => Some(this.union(&other).cloned().collect()),
|
||||
},
|
||||
document_template_used: match (
|
||||
self.embedders.document_template_used,
|
||||
new.embedders.document_template_used,
|
||||
) {
|
||||
(None, None) => None,
|
||||
(Some(used), None) | (None, Some(used)) => Some(used),
|
||||
(Some(this), Some(other)) => Some(this | other),
|
||||
},
|
||||
document_template_max_bytes: match (
|
||||
self.embedders.document_template_max_bytes,
|
||||
new.embedders.document_template_max_bytes,
|
||||
) {
|
||||
(None, None) => None,
|
||||
(Some(bytes), None) | (None, Some(bytes)) => Some(bytes),
|
||||
(Some(this), Some(other)) => Some(this.max(other)),
|
||||
},
|
||||
binary_quantization_used: match (
|
||||
self.embedders.binary_quantization_used,
|
||||
new.embedders.binary_quantization_used,
|
||||
) {
|
||||
(None, None) => None,
|
||||
(Some(bq), None) | (None, Some(bq)) => Some(bq),
|
||||
(Some(this), Some(other)) => Some(this | other),
|
||||
},
|
||||
},
|
||||
search_cutoff_ms: SearchCutoffMsAnalytics {
|
||||
search_cutoff_ms: new
|
||||
.search_cutoff_ms
|
||||
.search_cutoff_ms
|
||||
.or(self.search_cutoff_ms.search_cutoff_ms),
|
||||
},
|
||||
locales: LocalesAnalytics { locales: new.locales.locales.or(self.locales.locales) },
|
||||
dictionary: DictionaryAnalytics {
|
||||
total: new.dictionary.total.or(self.dictionary.total),
|
||||
},
|
||||
separator_tokens: SeparatorTokensAnalytics {
|
||||
total: new.non_separator_tokens.total.or(self.separator_tokens.total),
|
||||
},
|
||||
non_separator_tokens: NonSeparatorTokensAnalytics {
|
||||
total: new.non_separator_tokens.total.or(self.non_separator_tokens.total),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct RankingRulesAnalytics {
|
||||
pub words_position: Option<usize>,
|
||||
pub typo_position: Option<usize>,
|
||||
pub proximity_position: Option<usize>,
|
||||
pub attribute_position: Option<usize>,
|
||||
pub sort_position: Option<usize>,
|
||||
pub exactness_position: Option<usize>,
|
||||
pub values: Option<String>,
|
||||
}
|
||||
|
||||
impl RankingRulesAnalytics {
|
||||
pub fn new(rr: Option<&Vec<RankingRuleView>>) -> Self {
|
||||
RankingRulesAnalytics {
|
||||
words_position: rr.as_ref().and_then(|rr| {
|
||||
rr.iter()
|
||||
.position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Words))
|
||||
}),
|
||||
typo_position: rr.as_ref().and_then(|rr| {
|
||||
rr.iter()
|
||||
.position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Typo))
|
||||
}),
|
||||
proximity_position: rr.as_ref().and_then(|rr| {
|
||||
rr.iter().position(|s| {
|
||||
matches!(s, meilisearch_types::settings::RankingRuleView::Proximity)
|
||||
})
|
||||
}),
|
||||
attribute_position: rr.as_ref().and_then(|rr| {
|
||||
rr.iter().position(|s| {
|
||||
matches!(s, meilisearch_types::settings::RankingRuleView::Attribute)
|
||||
})
|
||||
}),
|
||||
sort_position: rr.as_ref().and_then(|rr| {
|
||||
rr.iter()
|
||||
.position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Sort))
|
||||
}),
|
||||
exactness_position: rr.as_ref().and_then(|rr| {
|
||||
rr.iter().position(|s| {
|
||||
matches!(s, meilisearch_types::settings::RankingRuleView::Exactness)
|
||||
})
|
||||
}),
|
||||
values: rr.as_ref().map(|rr| {
|
||||
rr.iter()
|
||||
.filter(|s| {
|
||||
matches!(
|
||||
s,
|
||||
meilisearch_types::settings::RankingRuleView::Asc(_)
|
||||
| meilisearch_types::settings::RankingRuleView::Desc(_)
|
||||
)
|
||||
})
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { ranking_rules: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct SearchableAttributesAnalytics {
|
||||
pub total: Option<usize>,
|
||||
pub with_wildcard: Option<bool>,
|
||||
}
|
||||
|
||||
impl SearchableAttributesAnalytics {
|
||||
pub fn new(setting: Option<&Vec<String>>) -> Self {
|
||||
Self {
|
||||
total: setting.as_ref().map(|searchable| searchable.len()),
|
||||
with_wildcard: setting
|
||||
.as_ref()
|
||||
.map(|searchable| searchable.iter().any(|searchable| searchable == "*")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { searchable_attributes: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct DisplayedAttributesAnalytics {
|
||||
pub total: Option<usize>,
|
||||
pub with_wildcard: Option<bool>,
|
||||
}
|
||||
|
||||
impl DisplayedAttributesAnalytics {
|
||||
pub fn new(displayed: Option<&Vec<String>>) -> Self {
|
||||
Self {
|
||||
total: displayed.as_ref().map(|displayed| displayed.len()),
|
||||
with_wildcard: displayed
|
||||
.as_ref()
|
||||
.map(|displayed| displayed.iter().any(|displayed| displayed == "*")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { displayed_attributes: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct SortableAttributesAnalytics {
|
||||
pub total: Option<usize>,
|
||||
pub has_geo: Option<bool>,
|
||||
}
|
||||
|
||||
impl SortableAttributesAnalytics {
|
||||
pub fn new(setting: Option<&BTreeSet<String>>) -> Self {
|
||||
Self {
|
||||
total: setting.as_ref().map(|sort| sort.len()),
|
||||
has_geo: setting.as_ref().map(|sort| sort.contains("_geo")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { sortable_attributes: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct FilterableAttributesAnalytics {
|
||||
pub total: Option<usize>,
|
||||
pub has_geo: Option<bool>,
|
||||
}
|
||||
|
||||
impl FilterableAttributesAnalytics {
|
||||
pub fn new(setting: Option<&BTreeSet<String>>) -> Self {
|
||||
Self {
|
||||
total: setting.as_ref().map(|filter| filter.len()),
|
||||
has_geo: setting.as_ref().map(|filter| filter.contains("_geo")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { filterable_attributes: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct DistinctAttributeAnalytics {
|
||||
pub set: bool,
|
||||
}
|
||||
|
||||
impl DistinctAttributeAnalytics {
|
||||
pub fn new(distinct: Option<&String>) -> Self {
|
||||
Self { set: distinct.is_some() }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { distinct_attribute: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct ProximityPrecisionAnalytics {
|
||||
pub set: bool,
|
||||
pub value: Option<ProximityPrecisionView>,
|
||||
}
|
||||
|
||||
impl ProximityPrecisionAnalytics {
|
||||
pub fn new(precision: Option<&ProximityPrecisionView>) -> Self {
|
||||
Self { set: precision.is_some(), value: precision.cloned() }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { proximity_precision: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct TypoToleranceAnalytics {
|
||||
pub enabled: Option<bool>,
|
||||
pub disable_on_attributes: Option<bool>,
|
||||
pub disable_on_words: Option<bool>,
|
||||
pub min_word_size_for_one_typo: Option<u8>,
|
||||
pub min_word_size_for_two_typos: Option<u8>,
|
||||
}
|
||||
|
||||
impl TypoToleranceAnalytics {
|
||||
pub fn new(setting: Option<&TypoSettings>) -> Self {
|
||||
Self {
|
||||
enabled: setting.as_ref().map(|s| !matches!(s.enabled, Setting::Set(false))),
|
||||
disable_on_attributes: setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
|
||||
disable_on_words: setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.disable_on_words.as_ref().set().map(|m| !m.is_empty())),
|
||||
min_word_size_for_one_typo: setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.min_word_size_for_typos.as_ref().set().map(|s| s.one_typo.set()))
|
||||
.flatten(),
|
||||
min_word_size_for_two_typos: setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.min_word_size_for_typos.as_ref().set().map(|s| s.two_typos.set()))
|
||||
.flatten(),
|
||||
}
|
||||
}
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { typo_tolerance: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct FacetingAnalytics {
|
||||
pub max_values_per_facet: Option<usize>,
|
||||
pub sort_facet_values_by_star_count: Option<bool>,
|
||||
pub sort_facet_values_by_total: Option<usize>,
|
||||
}
|
||||
|
||||
impl FacetingAnalytics {
|
||||
pub fn new(setting: Option<&FacetingSettings>) -> Self {
|
||||
Self {
|
||||
max_values_per_facet: setting.as_ref().and_then(|s| s.max_values_per_facet.set()),
|
||||
sort_facet_values_by_star_count: setting.as_ref().and_then(|s| {
|
||||
s.sort_facet_values_by
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.iter().any(|(k, v)| k == "*" && v == &FacetValuesSort::Count))
|
||||
}),
|
||||
sort_facet_values_by_total: setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.sort_facet_values_by.as_ref().set().map(|s| s.len())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { faceting: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct PaginationAnalytics {
|
||||
pub max_total_hits: Option<usize>,
|
||||
}
|
||||
|
||||
impl PaginationAnalytics {
|
||||
pub fn new(setting: Option<&PaginationSettings>) -> Self {
|
||||
Self { max_total_hits: setting.as_ref().and_then(|s| s.max_total_hits.set()) }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { pagination: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct StopWordsAnalytics {
|
||||
pub total: Option<usize>,
|
||||
}
|
||||
|
||||
impl StopWordsAnalytics {
|
||||
pub fn new(stop_words: Option<&BTreeSet<String>>) -> Self {
|
||||
Self { total: stop_words.as_ref().map(|stop_words| stop_words.len()) }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { stop_words: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct SynonymsAnalytics {
|
||||
pub total: Option<usize>,
|
||||
}
|
||||
|
||||
impl SynonymsAnalytics {
|
||||
pub fn new(synonyms: Option<&BTreeMap<String, Vec<String>>>) -> Self {
|
||||
Self { total: synonyms.as_ref().map(|synonyms| synonyms.len()) }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { synonyms: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct EmbeddersAnalytics {
|
||||
// last
|
||||
pub total: Option<usize>,
|
||||
// Merge the sources
|
||||
pub sources: Option<HashSet<String>>,
|
||||
// |=
|
||||
pub document_template_used: Option<bool>,
|
||||
// max
|
||||
pub document_template_max_bytes: Option<usize>,
|
||||
// |=
|
||||
pub binary_quantization_used: Option<bool>,
|
||||
}
|
||||
|
||||
impl EmbeddersAnalytics {
|
||||
pub fn new(setting: Option<&BTreeMap<String, Setting<EmbeddingSettings>>>) -> Self {
|
||||
let mut sources = std::collections::HashSet::new();
|
||||
|
||||
if let Some(s) = &setting {
|
||||
for source in s
|
||||
.values()
|
||||
.filter_map(|config| config.clone().set())
|
||||
.filter_map(|config| config.source.set())
|
||||
{
|
||||
use meilisearch_types::milli::vector::settings::EmbedderSource;
|
||||
match source {
|
||||
EmbedderSource::OpenAi => sources.insert("openAi".to_string()),
|
||||
EmbedderSource::HuggingFace => sources.insert("huggingFace".to_string()),
|
||||
EmbedderSource::UserProvided => sources.insert("userProvided".to_string()),
|
||||
EmbedderSource::Ollama => sources.insert("ollama".to_string()),
|
||||
EmbedderSource::Rest => sources.insert("rest".to_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
Self {
|
||||
total: setting.as_ref().map(|s| s.len()),
|
||||
sources: Some(sources),
|
||||
document_template_used: setting.as_ref().map(|map| {
|
||||
map.values()
|
||||
.filter_map(|config| config.clone().set())
|
||||
.any(|config| config.document_template.set().is_some())
|
||||
}),
|
||||
document_template_max_bytes: setting.as_ref().and_then(|map| {
|
||||
map.values()
|
||||
.filter_map(|config| config.clone().set())
|
||||
.filter_map(|config| config.document_template_max_bytes.set())
|
||||
.max()
|
||||
}),
|
||||
binary_quantization_used: setting.as_ref().map(|map| {
|
||||
map.values()
|
||||
.filter_map(|config| config.clone().set())
|
||||
.any(|config| config.binary_quantized.set().is_some())
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { embedders: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
#[serde(transparent)]
|
||||
pub struct SearchCutoffMsAnalytics {
|
||||
pub search_cutoff_ms: Option<u64>,
|
||||
}
|
||||
|
||||
impl SearchCutoffMsAnalytics {
|
||||
pub fn new(setting: Option<&u64>) -> Self {
|
||||
Self { search_cutoff_ms: setting.copied() }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { search_cutoff_ms: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
#[serde(transparent)]
|
||||
pub struct LocalesAnalytics {
|
||||
pub locales: Option<BTreeSet<Locale>>,
|
||||
}
|
||||
|
||||
impl LocalesAnalytics {
|
||||
pub fn new(rules: Option<&Vec<LocalizedAttributesRuleView>>) -> Self {
|
||||
LocalesAnalytics {
|
||||
locales: rules.as_ref().map(|rules| {
|
||||
rules
|
||||
.iter()
|
||||
.flat_map(|rule| rule.locales.iter().cloned())
|
||||
.collect::<std::collections::BTreeSet<_>>()
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { locales: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct DictionaryAnalytics {
|
||||
pub total: Option<usize>,
|
||||
}
|
||||
|
||||
impl DictionaryAnalytics {
|
||||
pub fn new(dictionary: Option<&BTreeSet<String>>) -> Self {
|
||||
Self { total: dictionary.as_ref().map(|dictionary| dictionary.len()) }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { dictionary: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct SeparatorTokensAnalytics {
|
||||
pub total: Option<usize>,
|
||||
}
|
||||
|
||||
impl SeparatorTokensAnalytics {
|
||||
pub fn new(separator_tokens: Option<&BTreeSet<String>>) -> Self {
|
||||
Self { total: separator_tokens.as_ref().map(|separator_tokens| separator_tokens.len()) }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { separator_tokens: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct NonSeparatorTokensAnalytics {
|
||||
pub total: Option<usize>,
|
||||
}
|
||||
|
||||
impl NonSeparatorTokensAnalytics {
|
||||
pub fn new(non_separator_tokens: Option<&BTreeSet<String>>) -> Self {
|
||||
Self {
|
||||
total: non_separator_tokens
|
||||
.as_ref()
|
||||
.map(|non_separator_tokens| non_separator_tokens.len()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { non_separator_tokens: self, ..Default::default() }
|
||||
}
|
||||
}
|
@ -13,9 +13,10 @@ use serde_json::Value;
|
||||
use tracing::debug;
|
||||
|
||||
use super::ActionPolicy;
|
||||
use crate::analytics::{Analytics, SimilarAggregator};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::indexes::similar_analytics::{SimilarAggregator, SimilarGET, SimilarPOST};
|
||||
use crate::search::{
|
||||
add_search_rules, perform_similar, RankingScoreThresholdSimilar, RetrieveVectors, SearchKind,
|
||||
SimilarQuery, SimilarResult, DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
|
||||
@ -34,13 +35,13 @@ pub async fn similar_get(
|
||||
index_uid: web::Path<String>,
|
||||
params: AwebQueryParameter<SimilarQueryGet, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
let query = params.0.try_into()?;
|
||||
|
||||
let mut aggregate = SimilarAggregator::from_query(&query, &req);
|
||||
let mut aggregate = SimilarAggregator::<SimilarGET>::from_query(&query);
|
||||
|
||||
debug!(parameters = ?query, "Similar get");
|
||||
|
||||
@ -49,7 +50,7 @@ pub async fn similar_get(
|
||||
if let Ok(similar) = &similar {
|
||||
aggregate.succeed(similar);
|
||||
}
|
||||
analytics.get_similar(aggregate);
|
||||
analytics.publish(aggregate, &req);
|
||||
|
||||
let similar = similar?;
|
||||
|
||||
@ -62,21 +63,21 @@ pub async fn similar_post(
|
||||
index_uid: web::Path<String>,
|
||||
params: AwebJson<SimilarQuery, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
let query = params.into_inner();
|
||||
debug!(parameters = ?query, "Similar post");
|
||||
|
||||
let mut aggregate = SimilarAggregator::from_query(&query, &req);
|
||||
let mut aggregate = SimilarAggregator::<SimilarPOST>::from_query(&query);
|
||||
|
||||
let similar = similar(index_scheduler, index_uid, query).await;
|
||||
|
||||
if let Ok(similar) = &similar {
|
||||
aggregate.succeed(similar);
|
||||
}
|
||||
analytics.post_similar(aggregate);
|
||||
analytics.publish(aggregate, &req);
|
||||
|
||||
let similar = similar?;
|
||||
|
||||
|
235
meilisearch/src/routes/indexes/similar_analytics.rs
Normal file
235
meilisearch/src/routes/indexes/similar_analytics.rs
Normal file
@ -0,0 +1,235 @@
|
||||
use std::collections::{BinaryHeap, HashMap};
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::{
|
||||
aggregate_methods,
|
||||
analytics::{Aggregate, AggregateMethod},
|
||||
search::{SimilarQuery, SimilarResult},
|
||||
};
|
||||
|
||||
aggregate_methods!(
|
||||
SimilarPOST => "Similar POST",
|
||||
SimilarGET => "Similar GET",
|
||||
);
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SimilarAggregator<Method: AggregateMethod> {
|
||||
// requests
|
||||
total_received: usize,
|
||||
total_succeeded: usize,
|
||||
time_spent: BinaryHeap<usize>,
|
||||
|
||||
// filter
|
||||
filter_with_geo_radius: bool,
|
||||
filter_with_geo_bounding_box: bool,
|
||||
// every time a request has a filter, this field must be incremented by the number of terms it contains
|
||||
filter_sum_of_criteria_terms: usize,
|
||||
// every time a request has a filter, this field must be incremented by one
|
||||
filter_total_number_of_criteria: usize,
|
||||
used_syntax: HashMap<String, usize>,
|
||||
|
||||
// Whether a non-default embedder was specified
|
||||
retrieve_vectors: bool,
|
||||
|
||||
// pagination
|
||||
max_limit: usize,
|
||||
max_offset: usize,
|
||||
|
||||
// formatting
|
||||
max_attributes_to_retrieve: usize,
|
||||
|
||||
// scoring
|
||||
show_ranking_score: bool,
|
||||
show_ranking_score_details: bool,
|
||||
ranking_score_threshold: bool,
|
||||
|
||||
marker: std::marker::PhantomData<Method>,
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> SimilarAggregator<Method> {
|
||||
#[allow(clippy::field_reassign_with_default)]
|
||||
pub fn from_query(query: &SimilarQuery) -> Self {
|
||||
let SimilarQuery {
|
||||
id: _,
|
||||
embedder: _,
|
||||
offset,
|
||||
limit,
|
||||
attributes_to_retrieve: _,
|
||||
retrieve_vectors,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
filter,
|
||||
ranking_score_threshold,
|
||||
} = query;
|
||||
|
||||
let mut ret = Self::default();
|
||||
|
||||
ret.total_received = 1;
|
||||
|
||||
if let Some(ref filter) = filter {
|
||||
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
|
||||
ret.filter_total_number_of_criteria = 1;
|
||||
|
||||
let syntax = match filter {
|
||||
Value::String(_) => "string".to_string(),
|
||||
Value::Array(values) => {
|
||||
if values.iter().map(|v| v.to_string()).any(|s| RE.is_match(&s)) {
|
||||
"mixed".to_string()
|
||||
} else {
|
||||
"array".to_string()
|
||||
}
|
||||
}
|
||||
_ => "none".to_string(),
|
||||
};
|
||||
// convert the string to a HashMap
|
||||
ret.used_syntax.insert(syntax, 1);
|
||||
|
||||
let stringified_filters = filter.to_string();
|
||||
ret.filter_with_geo_radius = stringified_filters.contains("_geoRadius(");
|
||||
ret.filter_with_geo_bounding_box = stringified_filters.contains("_geoBoundingBox(");
|
||||
ret.filter_sum_of_criteria_terms = RE.split(&stringified_filters).count();
|
||||
}
|
||||
|
||||
ret.max_limit = *limit;
|
||||
ret.max_offset = *offset;
|
||||
|
||||
ret.show_ranking_score = *show_ranking_score;
|
||||
ret.show_ranking_score_details = *show_ranking_score_details;
|
||||
ret.ranking_score_threshold = ranking_score_threshold.is_some();
|
||||
|
||||
ret.retrieve_vectors = *retrieve_vectors;
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, result: &SimilarResult) {
|
||||
let SimilarResult { id: _, hits: _, processing_time_ms, hits_info: _ } = result;
|
||||
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(1);
|
||||
|
||||
self.time_spent.push(*processing_time_ms as usize);
|
||||
}
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> Aggregate for SimilarAggregator<Method> {
|
||||
fn event_name(&self) -> &'static str {
|
||||
Method::event_name()
|
||||
}
|
||||
|
||||
/// Aggregate one [SimilarAggregator] into another.
|
||||
fn aggregate(mut self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
let Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
mut time_spent,
|
||||
filter_with_geo_radius,
|
||||
filter_with_geo_bounding_box,
|
||||
filter_sum_of_criteria_terms,
|
||||
filter_total_number_of_criteria,
|
||||
used_syntax,
|
||||
max_limit,
|
||||
max_offset,
|
||||
max_attributes_to_retrieve,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
ranking_score_threshold,
|
||||
retrieve_vectors,
|
||||
marker: _,
|
||||
} = *new;
|
||||
|
||||
// request
|
||||
self.total_received = self.total_received.saturating_add(total_received);
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(total_succeeded);
|
||||
self.time_spent.append(&mut time_spent);
|
||||
|
||||
// filter
|
||||
self.filter_with_geo_radius |= filter_with_geo_radius;
|
||||
self.filter_with_geo_bounding_box |= filter_with_geo_bounding_box;
|
||||
self.filter_sum_of_criteria_terms =
|
||||
self.filter_sum_of_criteria_terms.saturating_add(filter_sum_of_criteria_terms);
|
||||
self.filter_total_number_of_criteria =
|
||||
self.filter_total_number_of_criteria.saturating_add(filter_total_number_of_criteria);
|
||||
for (key, value) in used_syntax.into_iter() {
|
||||
let used_syntax = self.used_syntax.entry(key).or_insert(0);
|
||||
*used_syntax = used_syntax.saturating_add(value);
|
||||
}
|
||||
|
||||
self.retrieve_vectors |= retrieve_vectors;
|
||||
|
||||
// pagination
|
||||
self.max_limit = self.max_limit.max(max_limit);
|
||||
self.max_offset = self.max_offset.max(max_offset);
|
||||
|
||||
// formatting
|
||||
self.max_attributes_to_retrieve =
|
||||
self.max_attributes_to_retrieve.max(max_attributes_to_retrieve);
|
||||
|
||||
// scoring
|
||||
self.show_ranking_score |= show_ranking_score;
|
||||
self.show_ranking_score_details |= show_ranking_score_details;
|
||||
self.ranking_score_threshold |= ranking_score_threshold;
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
let Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
time_spent,
|
||||
filter_with_geo_radius,
|
||||
filter_with_geo_bounding_box,
|
||||
filter_sum_of_criteria_terms,
|
||||
filter_total_number_of_criteria,
|
||||
used_syntax,
|
||||
max_limit,
|
||||
max_offset,
|
||||
max_attributes_to_retrieve,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
ranking_score_threshold,
|
||||
retrieve_vectors,
|
||||
marker: _,
|
||||
} = *self;
|
||||
|
||||
// we get all the values in a sorted manner
|
||||
let time_spent = time_spent.into_sorted_vec();
|
||||
// the index of the 99th percentage of value
|
||||
let percentile_99th = time_spent.len() * 99 / 100;
|
||||
// We are only interested by the slowest value of the 99th fastest results
|
||||
let time_spent = time_spent.get(percentile_99th);
|
||||
|
||||
json!({
|
||||
"requests": {
|
||||
"99th_response_time": time_spent.map(|t| format!("{:.2}", t)),
|
||||
"total_succeeded": total_succeeded,
|
||||
"total_failed": total_received.saturating_sub(total_succeeded), // just to be sure we never panics
|
||||
"total_received": total_received,
|
||||
},
|
||||
"filter": {
|
||||
"with_geoRadius": filter_with_geo_radius,
|
||||
"with_geoBoundingBox": filter_with_geo_bounding_box,
|
||||
"avg_criteria_number": format!("{:.2}", filter_sum_of_criteria_terms as f64 / filter_total_number_of_criteria as f64),
|
||||
"most_used_syntax": used_syntax.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)),
|
||||
},
|
||||
"vector": {
|
||||
"retrieve_vectors": retrieve_vectors,
|
||||
},
|
||||
"pagination": {
|
||||
"max_limit": max_limit,
|
||||
"max_offset": max_offset,
|
||||
},
|
||||
"formatting": {
|
||||
"max_attributes_to_retrieve": max_attributes_to_retrieve,
|
||||
},
|
||||
"scoring": {
|
||||
"show_ranking_score": show_ranking_score,
|
||||
"show_ranking_score_details": show_ranking_score_details,
|
||||
"ranking_score_threshold": ranking_score_threshold,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@ -25,6 +25,7 @@ pub mod indexes;
|
||||
mod logs;
|
||||
mod metrics;
|
||||
mod multi_search;
|
||||
mod multi_search_analytics;
|
||||
mod snapshot;
|
||||
mod swap_indexes;
|
||||
pub mod tasks;
|
||||
|
@ -9,7 +9,7 @@ use meilisearch_types::keys::actions;
|
||||
use serde::Serialize;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::{Analytics, MultiSearchAggregator};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||
@ -21,6 +21,8 @@ use crate::search::{
|
||||
};
|
||||
use crate::search_queue::SearchQueue;
|
||||
|
||||
use super::multi_search_analytics::MultiSearchAggregator;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(multi_search_with_post))));
|
||||
}
|
||||
@ -35,7 +37,7 @@ pub async fn multi_search_with_post(
|
||||
search_queue: Data<SearchQueue>,
|
||||
params: AwebJson<FederatedSearch, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
// Since we don't want to process half of the search requests and then get a permit refused
|
||||
// we're going to get one permit for the whole duration of the multi-search request.
|
||||
@ -43,7 +45,7 @@ pub async fn multi_search_with_post(
|
||||
|
||||
let federated_search = params.into_inner();
|
||||
|
||||
let mut multi_aggregate = MultiSearchAggregator::from_federated_search(&federated_search, &req);
|
||||
let mut multi_aggregate = MultiSearchAggregator::from_federated_search(&federated_search);
|
||||
|
||||
let FederatedSearch { mut queries, federation } = federated_search;
|
||||
|
||||
@ -87,7 +89,7 @@ pub async fn multi_search_with_post(
|
||||
multi_aggregate.succeed();
|
||||
}
|
||||
|
||||
analytics.post_multi_search(multi_aggregate);
|
||||
analytics.publish(multi_aggregate, &req);
|
||||
HttpResponse::Ok().json(search_result??)
|
||||
}
|
||||
None => {
|
||||
@ -149,7 +151,7 @@ pub async fn multi_search_with_post(
|
||||
if search_results.is_ok() {
|
||||
multi_aggregate.succeed();
|
||||
}
|
||||
analytics.post_multi_search(multi_aggregate);
|
||||
analytics.publish(multi_aggregate, &req);
|
||||
|
||||
let search_results = search_results.map_err(|(mut err, query_index)| {
|
||||
// Add the query index that failed as context for the error message.
|
||||
|
170
meilisearch/src/routes/multi_search_analytics.rs
Normal file
170
meilisearch/src/routes/multi_search_analytics.rs
Normal file
@ -0,0 +1,170 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use serde_json::json;
|
||||
|
||||
use crate::{
|
||||
analytics::Aggregate,
|
||||
search::{FederatedSearch, SearchQueryWithIndex},
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct MultiSearchAggregator {
|
||||
// requests
|
||||
total_received: usize,
|
||||
total_succeeded: usize,
|
||||
|
||||
// sum of the number of distinct indexes in each single request, use with total_received to compute an avg
|
||||
total_distinct_index_count: usize,
|
||||
// number of queries with a single index, use with total_received to compute a proportion
|
||||
total_single_index: usize,
|
||||
|
||||
// sum of the number of search queries in the requests, use with total_received to compute an average
|
||||
total_search_count: usize,
|
||||
|
||||
// scoring
|
||||
show_ranking_score: bool,
|
||||
show_ranking_score_details: bool,
|
||||
|
||||
// federation
|
||||
use_federation: bool,
|
||||
}
|
||||
|
||||
impl MultiSearchAggregator {
|
||||
pub fn from_federated_search(federated_search: &FederatedSearch) -> Self {
|
||||
let use_federation = federated_search.federation.is_some();
|
||||
|
||||
let distinct_indexes: HashSet<_> = federated_search
|
||||
.queries
|
||||
.iter()
|
||||
.map(|query| {
|
||||
let query = &query;
|
||||
// make sure we get a compilation error if a field gets added to / removed from SearchQueryWithIndex
|
||||
let SearchQueryWithIndex {
|
||||
index_uid,
|
||||
federation_options: _,
|
||||
q: _,
|
||||
vector: _,
|
||||
offset: _,
|
||||
limit: _,
|
||||
page: _,
|
||||
hits_per_page: _,
|
||||
attributes_to_retrieve: _,
|
||||
retrieve_vectors: _,
|
||||
attributes_to_crop: _,
|
||||
crop_length: _,
|
||||
attributes_to_highlight: _,
|
||||
show_ranking_score: _,
|
||||
show_ranking_score_details: _,
|
||||
show_matches_position: _,
|
||||
filter: _,
|
||||
sort: _,
|
||||
distinct: _,
|
||||
facets: _,
|
||||
highlight_pre_tag: _,
|
||||
highlight_post_tag: _,
|
||||
crop_marker: _,
|
||||
matching_strategy: _,
|
||||
attributes_to_search_on: _,
|
||||
hybrid: _,
|
||||
ranking_score_threshold: _,
|
||||
locales: _,
|
||||
} = query;
|
||||
|
||||
index_uid.as_str()
|
||||
})
|
||||
.collect();
|
||||
|
||||
let show_ranking_score =
|
||||
federated_search.queries.iter().any(|query| query.show_ranking_score);
|
||||
let show_ranking_score_details =
|
||||
federated_search.queries.iter().any(|query| query.show_ranking_score_details);
|
||||
|
||||
Self {
|
||||
total_received: 1,
|
||||
total_succeeded: 0,
|
||||
total_distinct_index_count: distinct_indexes.len(),
|
||||
total_single_index: if distinct_indexes.len() == 1 { 1 } else { 0 },
|
||||
total_search_count: federated_search.queries.len(),
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
use_federation,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self) {
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(1);
|
||||
}
|
||||
}
|
||||
|
||||
impl Aggregate for MultiSearchAggregator {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Documents Searched by Multi-Search POST"
|
||||
}
|
||||
|
||||
/// Aggregate one [MultiSearchAggregator] into another.
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
// write the aggregate in a way that will cause a compilation error if a field is added.
|
||||
|
||||
// get ownership of self, replacing it by a default value.
|
||||
let this = *self;
|
||||
|
||||
let total_received = this.total_received.saturating_add(new.total_received);
|
||||
let total_succeeded = this.total_succeeded.saturating_add(new.total_succeeded);
|
||||
let total_distinct_index_count =
|
||||
this.total_distinct_index_count.saturating_add(new.total_distinct_index_count);
|
||||
let total_single_index = this.total_single_index.saturating_add(new.total_single_index);
|
||||
let total_search_count = this.total_search_count.saturating_add(new.total_search_count);
|
||||
let show_ranking_score = this.show_ranking_score || new.show_ranking_score;
|
||||
let show_ranking_score_details =
|
||||
this.show_ranking_score_details || new.show_ranking_score_details;
|
||||
let use_federation = this.use_federation || new.use_federation;
|
||||
|
||||
Box::new(Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
total_distinct_index_count,
|
||||
total_single_index,
|
||||
total_search_count,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
use_federation,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
let Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
total_distinct_index_count,
|
||||
total_single_index,
|
||||
total_search_count,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
use_federation,
|
||||
} = *self;
|
||||
|
||||
json!({
|
||||
"requests": {
|
||||
"total_succeeded": total_succeeded,
|
||||
"total_failed": total_received.saturating_sub(total_succeeded), // just to be sure we never panics
|
||||
"total_received": total_received,
|
||||
},
|
||||
"indexes": {
|
||||
"total_single_index": total_single_index,
|
||||
"total_distinct_index_count": total_distinct_index_count,
|
||||
"avg_distinct_index_count": (total_distinct_index_count as f64) / (total_received as f64), // not 0 else returned early
|
||||
},
|
||||
"searches": {
|
||||
"total_search_count": total_search_count,
|
||||
"avg_search_count": (total_search_count as f64) / (total_received as f64),
|
||||
},
|
||||
"scoring": {
|
||||
"show_ranking_score": show_ranking_score,
|
||||
"show_ranking_score_details": show_ranking_score_details,
|
||||
},
|
||||
"federation": {
|
||||
"use_federation": use_federation,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@ -3,7 +3,6 @@ use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use serde_json::json;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
@ -17,13 +16,15 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_snapshot))));
|
||||
}
|
||||
|
||||
crate::empty_analytics!(SnapshotAnalytics, "Snapshot Created");
|
||||
|
||||
pub async fn create_snapshot(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SNAPSHOTS_CREATE }>, Data<IndexScheduler>>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish("Snapshot Created".to_string(), json!({}), Some(&req));
|
||||
analytics.publish(SnapshotAnalytics::default(), &req);
|
||||
|
||||
let task = KindWithContent::SnapshotCreation;
|
||||
let uid = get_task_id(&req, &opt)?;
|
||||
|
@ -8,10 +8,10 @@ use meilisearch_types::error::deserr_codes::InvalidSwapIndexes;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::tasks::{IndexSwap, KindWithContent};
|
||||
use serde_json::json;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::{get_task_id, is_dry_run, SummarizedTaskView};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::analytics::{Aggregate, Analytics};
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||
@ -29,21 +29,36 @@ pub struct SwapIndexesPayload {
|
||||
indexes: Vec<IndexUid>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct IndexSwappedAnalytics {
|
||||
swap_operation_number: usize,
|
||||
}
|
||||
|
||||
impl Aggregate for IndexSwappedAnalytics {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Indexes Swapped"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
swap_operation_number: self.swap_operation_number.max(new.swap_operation_number),
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn swap_indexes(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>,
|
||||
params: AwebJson<Vec<SwapIndexesPayload>, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let params = params.into_inner();
|
||||
analytics.publish(
|
||||
"Indexes Swapped".to_string(),
|
||||
json!({
|
||||
"swap_operation_number": params.len(),
|
||||
}),
|
||||
Some(&req),
|
||||
);
|
||||
analytics.publish(IndexSwappedAnalytics { swap_operation_number: params.len() }, &req);
|
||||
let filters = index_scheduler.filters();
|
||||
|
||||
let mut swaps = vec![];
|
||||
|
@ -12,18 +12,17 @@ use meilisearch_types::star_or::{OptionStarOr, OptionStarOrList};
|
||||
use meilisearch_types::task_view::TaskView;
|
||||
use meilisearch_types::tasks::{Kind, KindWithContent, Status};
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::macros::format_description;
|
||||
use time::{Date, Duration, OffsetDateTime, Time};
|
||||
use tokio::task;
|
||||
|
||||
use super::{get_task_id, is_dry_run, SummarizedTaskView};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::analytics::{Aggregate, AggregateMethod, Analytics};
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::Opt;
|
||||
use crate::{aggregate_methods, Opt};
|
||||
|
||||
const DEFAULT_LIMIT: u32 = 20;
|
||||
|
||||
@ -158,12 +157,69 @@ impl TaskDeletionOrCancelationQuery {
|
||||
}
|
||||
}
|
||||
|
||||
aggregate_methods!(
|
||||
CancelTasks => "Tasks Canceled",
|
||||
DeleteTasks => "Tasks Deleted",
|
||||
);
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct TaskFilterAnalytics<Method: AggregateMethod> {
|
||||
filtered_by_uid: bool,
|
||||
filtered_by_index_uid: bool,
|
||||
filtered_by_type: bool,
|
||||
filtered_by_status: bool,
|
||||
filtered_by_canceled_by: bool,
|
||||
filtered_by_before_enqueued_at: bool,
|
||||
filtered_by_after_enqueued_at: bool,
|
||||
filtered_by_before_started_at: bool,
|
||||
filtered_by_after_started_at: bool,
|
||||
filtered_by_before_finished_at: bool,
|
||||
filtered_by_after_finished_at: bool,
|
||||
|
||||
#[serde(skip)]
|
||||
marker: std::marker::PhantomData<Method>,
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod + 'static> Aggregate for TaskFilterAnalytics<Method> {
|
||||
fn event_name(&self) -> &'static str {
|
||||
Method::event_name()
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
filtered_by_uid: self.filtered_by_uid | new.filtered_by_uid,
|
||||
filtered_by_index_uid: self.filtered_by_index_uid | new.filtered_by_index_uid,
|
||||
filtered_by_type: self.filtered_by_type | new.filtered_by_type,
|
||||
filtered_by_status: self.filtered_by_status | new.filtered_by_status,
|
||||
filtered_by_canceled_by: self.filtered_by_canceled_by | new.filtered_by_canceled_by,
|
||||
filtered_by_before_enqueued_at: self.filtered_by_before_enqueued_at
|
||||
| new.filtered_by_before_enqueued_at,
|
||||
filtered_by_after_enqueued_at: self.filtered_by_after_enqueued_at
|
||||
| new.filtered_by_after_enqueued_at,
|
||||
filtered_by_before_started_at: self.filtered_by_before_started_at
|
||||
| new.filtered_by_before_started_at,
|
||||
filtered_by_after_started_at: self.filtered_by_after_started_at
|
||||
| new.filtered_by_after_started_at,
|
||||
filtered_by_before_finished_at: self.filtered_by_before_finished_at
|
||||
| new.filtered_by_before_finished_at,
|
||||
filtered_by_after_finished_at: self.filtered_by_after_finished_at
|
||||
| new.filtered_by_after_finished_at,
|
||||
|
||||
marker: std::marker::PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
async fn cancel_tasks(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_CANCEL }>, Data<IndexScheduler>>,
|
||||
params: AwebQueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let params = params.into_inner();
|
||||
|
||||
@ -172,21 +228,22 @@ async fn cancel_tasks(
|
||||
}
|
||||
|
||||
analytics.publish(
|
||||
"Tasks Canceled".to_string(),
|
||||
json!({
|
||||
"filtered_by_uid": params.uids.is_some(),
|
||||
"filtered_by_index_uid": params.index_uids.is_some(),
|
||||
"filtered_by_type": params.types.is_some(),
|
||||
"filtered_by_status": params.statuses.is_some(),
|
||||
"filtered_by_canceled_by": params.canceled_by.is_some(),
|
||||
"filtered_by_before_enqueued_at": params.before_enqueued_at.is_some(),
|
||||
"filtered_by_after_enqueued_at": params.after_enqueued_at.is_some(),
|
||||
"filtered_by_before_started_at": params.before_started_at.is_some(),
|
||||
"filtered_by_after_started_at": params.after_started_at.is_some(),
|
||||
"filtered_by_before_finished_at": params.before_finished_at.is_some(),
|
||||
"filtered_by_after_finished_at": params.after_finished_at.is_some(),
|
||||
}),
|
||||
Some(&req),
|
||||
TaskFilterAnalytics::<CancelTasks> {
|
||||
filtered_by_uid: params.uids.is_some(),
|
||||
filtered_by_index_uid: params.index_uids.is_some(),
|
||||
filtered_by_type: params.types.is_some(),
|
||||
filtered_by_status: params.statuses.is_some(),
|
||||
filtered_by_canceled_by: params.canceled_by.is_some(),
|
||||
filtered_by_before_enqueued_at: params.before_enqueued_at.is_some(),
|
||||
filtered_by_after_enqueued_at: params.after_enqueued_at.is_some(),
|
||||
filtered_by_before_started_at: params.before_started_at.is_some(),
|
||||
filtered_by_after_started_at: params.after_started_at.is_some(),
|
||||
filtered_by_before_finished_at: params.before_finished_at.is_some(),
|
||||
filtered_by_after_finished_at: params.after_finished_at.is_some(),
|
||||
|
||||
marker: std::marker::PhantomData,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
let query = params.into_query();
|
||||
@ -214,7 +271,7 @@ async fn delete_tasks(
|
||||
params: AwebQueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let params = params.into_inner();
|
||||
|
||||
@ -223,22 +280,24 @@ async fn delete_tasks(
|
||||
}
|
||||
|
||||
analytics.publish(
|
||||
"Tasks Deleted".to_string(),
|
||||
json!({
|
||||
"filtered_by_uid": params.uids.is_some(),
|
||||
"filtered_by_index_uid": params.index_uids.is_some(),
|
||||
"filtered_by_type": params.types.is_some(),
|
||||
"filtered_by_status": params.statuses.is_some(),
|
||||
"filtered_by_canceled_by": params.canceled_by.is_some(),
|
||||
"filtered_by_before_enqueued_at": params.before_enqueued_at.is_some(),
|
||||
"filtered_by_after_enqueued_at": params.after_enqueued_at.is_some(),
|
||||
"filtered_by_before_started_at": params.before_started_at.is_some(),
|
||||
"filtered_by_after_started_at": params.after_started_at.is_some(),
|
||||
"filtered_by_before_finished_at": params.before_finished_at.is_some(),
|
||||
"filtered_by_after_finished_at": params.after_finished_at.is_some(),
|
||||
}),
|
||||
Some(&req),
|
||||
TaskFilterAnalytics::<DeleteTasks> {
|
||||
filtered_by_uid: params.uids.is_some(),
|
||||
filtered_by_index_uid: params.index_uids.is_some(),
|
||||
filtered_by_type: params.types.is_some(),
|
||||
filtered_by_status: params.statuses.is_some(),
|
||||
filtered_by_canceled_by: params.canceled_by.is_some(),
|
||||
filtered_by_before_enqueued_at: params.before_enqueued_at.is_some(),
|
||||
filtered_by_after_enqueued_at: params.after_enqueued_at.is_some(),
|
||||
filtered_by_before_started_at: params.before_started_at.is_some(),
|
||||
filtered_by_after_started_at: params.after_started_at.is_some(),
|
||||
filtered_by_before_finished_at: params.before_finished_at.is_some(),
|
||||
filtered_by_after_finished_at: params.after_finished_at.is_some(),
|
||||
|
||||
marker: std::marker::PhantomData,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
let query = params.into_query();
|
||||
|
||||
let (tasks, _) = index_scheduler.get_task_ids_from_authorized_indexes(
|
||||
|
@ -381,7 +381,6 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
db_path: dir.as_ref().join("db"),
|
||||
dump_dir: dir.as_ref().join("dumps"),
|
||||
env: "development".to_owned(),
|
||||
#[cfg(feature = "analytics")]
|
||||
no_analytics: true,
|
||||
max_index_size: Byte::from_u64_with_unit(100, Unit::MiB).unwrap(),
|
||||
max_task_db_size: Byte::from_u64_with_unit(1, Unit::GiB).unwrap(),
|
||||
|
@ -9,8 +9,9 @@ use actix_web::test;
|
||||
use actix_web::test::TestRequest;
|
||||
use actix_web::web::Data;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch::analytics::Analytics;
|
||||
use meilisearch::search_queue::SearchQueue;
|
||||
use meilisearch::{analytics, create_app, Opt, SubscriberForSecondLayer};
|
||||
use meilisearch::{create_app, Opt, SubscriberForSecondLayer};
|
||||
use meilisearch_auth::AuthController;
|
||||
use tracing::level_filters::LevelFilter;
|
||||
use tracing_subscriber::Layer;
|
||||
@ -141,7 +142,7 @@ impl Service {
|
||||
Data::new(search_queue),
|
||||
self.options.clone(),
|
||||
(route_layer_handle, stderr_layer_handle),
|
||||
analytics::MockAnalytics::new(&self.options),
|
||||
Data::new(Analytics::no_analytics()),
|
||||
true,
|
||||
))
|
||||
.await
|
||||
|
@ -7,8 +7,9 @@ use std::str::FromStr;
|
||||
use actix_web::http::header::ContentType;
|
||||
use actix_web::web::Data;
|
||||
use meili_snap::snapshot;
|
||||
use meilisearch::analytics::Analytics;
|
||||
use meilisearch::search_queue::SearchQueue;
|
||||
use meilisearch::{analytics, create_app, Opt, SubscriberForSecondLayer};
|
||||
use meilisearch::{create_app, Opt, SubscriberForSecondLayer};
|
||||
use tracing::level_filters::LevelFilter;
|
||||
use tracing_subscriber::layer::SubscriberExt;
|
||||
use tracing_subscriber::Layer;
|
||||
@ -54,7 +55,7 @@ async fn basic_test_log_stream_route() {
|
||||
Data::new(search_queue),
|
||||
server.service.options.clone(),
|
||||
(route_layer_handle, stderr_layer_handle),
|
||||
analytics::MockAnalytics::new(&server.service.options),
|
||||
Data::new(Analytics::no_analytics()),
|
||||
true,
|
||||
))
|
||||
.await;
|
||||
|
Loading…
x
Reference in New Issue
Block a user