diff --git a/Cargo.lock b/Cargo.lock index 2b0657da8..8da6a4b51 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1904,6 +1904,7 @@ dependencies = [ "insta", "log", "meili-snap", + "meilisearch-auth", "meilisearch-types", "nelson", "page_size 0.5.0", diff --git a/index-scheduler/Cargo.toml b/index-scheduler/Cargo.toml index 77936c435..99dfaa493 100644 --- a/index-scheduler/Cargo.toml +++ b/index-scheduler/Cargo.toml @@ -19,6 +19,7 @@ dump = { path = "../dump" } enum-iterator = "1.1.3" file-store = { path = "../file-store" } log = "0.4.14" +meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-types = { path = "../meilisearch-types" } page_size = "0.5.0" roaring = { version = "0.10.0", features = ["serde"] } diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index d80131681..8c050a34f 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -43,7 +43,6 @@ use file_store::FileStore; use meilisearch_types::error::ResponseError; use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str}; use meilisearch_types::heed::{self, Database, Env, RoTxn}; -use meilisearch_types::index_uid_pattern::IndexUidPattern; use meilisearch_types::milli; use meilisearch_types::milli::documents::DocumentsBatchBuilder; use meilisearch_types::milli::update::IndexerConfig; @@ -630,13 +629,13 @@ impl IndexScheduler { &self, rtxn: &RoTxn, query: &Query, - authorized_indexes: &Option>, + filters: &meilisearch_auth::AuthFilter, ) -> Result { let mut tasks = self.get_task_ids(rtxn, query)?; // If the query contains a list of index uid or there is a finite list of authorized indexes, // then we must exclude all the kinds that aren't associated to one and only one index. - if query.index_uids.is_some() || authorized_indexes.is_some() { + if query.index_uids.is_some() || !filters.all_indexes_authorized() { for kind in enum_iterator::all::().filter(|kind| !kind.related_to_one_index()) { tasks -= self.get_kind(rtxn, kind)?; } @@ -644,11 +643,11 @@ impl IndexScheduler { // Any task that is internally associated with a non-authorized index // must be discarded. - if let Some(authorized_indexes) = authorized_indexes { + if !filters.all_indexes_authorized() { let all_indexes_iter = self.index_tasks.iter(rtxn)?; for result in all_indexes_iter { let (index, index_tasks) = result?; - if !authorized_indexes.iter().any(|p| p.matches_str(index)) { + if !filters.is_index_authorized(index) { tasks -= index_tasks; } } @@ -668,12 +667,11 @@ impl IndexScheduler { pub fn get_tasks_from_authorized_indexes( &self, query: Query, - authorized_indexes: Option>, + filters: &meilisearch_auth::AuthFilter, ) -> Result> { let rtxn = self.env.read_txn()?; - let tasks = - self.get_task_ids_from_authorized_indexes(&rtxn, &query, &authorized_indexes)?; + let tasks = self.get_task_ids_from_authorized_indexes(&rtxn, &query, filters)?; let tasks = self.get_existing_tasks( &rtxn, @@ -1120,7 +1118,9 @@ mod tests { use crossbeam::channel::RecvTimeoutError; use file_store::File; use meili_snap::snapshot; + use meilisearch_auth::AuthFilter; use meilisearch_types::document_formats::DocumentFormatError; + use meilisearch_types::index_uid_pattern::IndexUidPattern; use meilisearch_types::milli::obkv_to_json; use meilisearch_types::milli::update::IndexDocumentsMethod::{ ReplaceDocuments, UpdateDocuments, @@ -2371,38 +2371,45 @@ mod tests { let rtxn = index_scheduler.env.read_txn().unwrap(); let query = Query { limit: Some(0), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); snapshot!(snapshot_bitmap(&tasks), @"[]"); let query = Query { limit: Some(1), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); snapshot!(snapshot_bitmap(&tasks), @"[2,]"); let query = Query { limit: Some(2), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); snapshot!(snapshot_bitmap(&tasks), @"[1,2,]"); let query = Query { from: Some(1), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); snapshot!(snapshot_bitmap(&tasks), @"[0,1,]"); let query = Query { from: Some(2), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); snapshot!(snapshot_bitmap(&tasks), @"[0,1,2,]"); let query = Query { from: Some(1), limit: Some(1), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); snapshot!(snapshot_bitmap(&tasks), @"[1,]"); let query = Query { from: Some(1), limit: Some(2), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); snapshot!(snapshot_bitmap(&tasks), @"[0,1,]"); } @@ -2427,21 +2434,24 @@ mod tests { let rtxn = index_scheduler.env.read_txn().unwrap(); let query = Query { statuses: Some(vec![Status::Processing]), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); snapshot!(snapshot_bitmap(&tasks), @"[0,]"); // only the processing tasks in the first tick let query = Query { statuses: Some(vec![Status::Enqueued]), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); snapshot!(snapshot_bitmap(&tasks), @"[1,2,]"); // only the enqueued tasks in the first tick let query = Query { statuses: Some(vec![Status::Enqueued, Status::Processing]), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); snapshot!(snapshot_bitmap(&tasks), @"[0,1,2,]"); // both enqueued and processing tasks in the first tick let query = Query { @@ -2449,8 +2459,9 @@ mod tests { after_started_at: Some(start_time), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // both enqueued and processing tasks in the first tick, but limited to those with a started_at // that comes after the start of the test, which should excludes the enqueued tasks snapshot!(snapshot_bitmap(&tasks), @"[0,]"); @@ -2460,8 +2471,9 @@ mod tests { before_started_at: Some(start_time), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // both enqueued and processing tasks in the first tick, but limited to those with a started_at // that comes before the start of the test, which should excludes all of them snapshot!(snapshot_bitmap(&tasks), @"[]"); @@ -2472,8 +2484,9 @@ mod tests { before_started_at: Some(start_time + Duration::minutes(1)), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // both enqueued and processing tasks in the first tick, but limited to those with a started_at // that comes after the start of the test and before one minute after the start of the test, // which should exclude the enqueued tasks and include the only processing task @@ -2498,8 +2511,9 @@ mod tests { before_started_at: Some(start_time + Duration::minutes(1)), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // both succeeded and processing tasks in the first tick, but limited to those with a started_at // that comes after the start of the test and before one minute after the start of the test, // which should include all tasks @@ -2510,8 +2524,9 @@ mod tests { before_started_at: Some(start_time), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // both succeeded and processing tasks in the first tick, but limited to those with a started_at // that comes before the start of the test, which should exclude all tasks snapshot!(snapshot_bitmap(&tasks), @"[]"); @@ -2522,8 +2537,9 @@ mod tests { before_started_at: Some(second_start_time + Duration::minutes(1)), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // both succeeded and processing tasks in the first tick, but limited to those with a started_at // that comes after the start of the second part of the test and before one minute after the // second start of the test, which should exclude all tasks @@ -2541,8 +2557,9 @@ mod tests { let rtxn = index_scheduler.env.read_txn().unwrap(); - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // we run the same query to verify that, and indeed find that the last task is matched snapshot!(snapshot_bitmap(&tasks), @"[2,]"); @@ -2552,8 +2569,9 @@ mod tests { before_started_at: Some(second_start_time + Duration::minutes(1)), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // enqueued, succeeded, or processing tasks started after the second part of the test, should // again only return the last task snapshot!(snapshot_bitmap(&tasks), @"[2,]"); @@ -2563,8 +2581,9 @@ mod tests { // now the last task should have failed snapshot!(snapshot_index_scheduler(&index_scheduler), name: "end"); - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // so running the last query should return nothing snapshot!(snapshot_bitmap(&tasks), @"[]"); @@ -2574,8 +2593,9 @@ mod tests { before_started_at: Some(second_start_time + Duration::minutes(1)), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // but the same query on failed tasks should return the last task snapshot!(snapshot_bitmap(&tasks), @"[2,]"); @@ -2585,8 +2605,9 @@ mod tests { before_started_at: Some(second_start_time + Duration::minutes(1)), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // but the same query on failed tasks should return the last task snapshot!(snapshot_bitmap(&tasks), @"[2,]"); @@ -2597,8 +2618,9 @@ mod tests { before_started_at: Some(second_start_time + Duration::minutes(1)), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // same query but with an invalid uid snapshot!(snapshot_bitmap(&tasks), @"[]"); @@ -2609,8 +2631,9 @@ mod tests { before_started_at: Some(second_start_time + Duration::minutes(1)), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // same query but with a valid uid snapshot!(snapshot_bitmap(&tasks), @"[2,]"); } @@ -2640,8 +2663,9 @@ mod tests { let rtxn = index_scheduler.env.read_txn().unwrap(); let query = Query { index_uids: Some(vec!["catto".to_owned()]), ..Default::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // only the first task associated with catto is returned, the indexSwap tasks are excluded! snapshot!(snapshot_bitmap(&tasks), @"[0,]"); @@ -2650,7 +2674,9 @@ mod tests { .get_task_ids_from_authorized_indexes( &rtxn, &query, - &Some(vec![IndexUidPattern::new_unchecked("doggo")]), + &AuthFilter::with_allowed_indexes( + vec![IndexUidPattern::new_unchecked("doggo")].into_iter().collect(), + ), ) .unwrap(); // we have asked for only the tasks associated with catto, but are only authorized to retrieve the tasks @@ -2662,7 +2688,9 @@ mod tests { .get_task_ids_from_authorized_indexes( &rtxn, &query, - &Some(vec![IndexUidPattern::new_unchecked("doggo")]), + &AuthFilter::with_allowed_indexes( + vec![IndexUidPattern::new_unchecked("doggo")].into_iter().collect(), + ), ) .unwrap(); // we asked for all the tasks, but we are only authorized to retrieve the doggo tasks @@ -2674,10 +2702,14 @@ mod tests { .get_task_ids_from_authorized_indexes( &rtxn, &query, - &Some(vec![ - IndexUidPattern::new_unchecked("catto"), - IndexUidPattern::new_unchecked("doggo"), - ]), + &AuthFilter::with_allowed_indexes( + vec![ + IndexUidPattern::new_unchecked("catto"), + IndexUidPattern::new_unchecked("doggo"), + ] + .into_iter() + .collect(), + ), ) .unwrap(); // we asked for all the tasks, but we are only authorized to retrieve the doggo and catto tasks @@ -2685,8 +2717,9 @@ mod tests { snapshot!(snapshot_bitmap(&tasks), @"[0,1,]"); let query = Query::default(); - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // we asked for all the tasks with all index authorized -> all tasks returned snapshot!(snapshot_bitmap(&tasks), @"[0,1,2,3,]"); } @@ -2717,8 +2750,9 @@ mod tests { let rtxn = index_scheduler.read_txn().unwrap(); let query = Query { canceled_by: Some(vec![task_cancelation.uid]), ..Query::default() }; - let tasks = - index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default()) + .unwrap(); // 0 is not returned because it was not canceled, 3 is not returned because it is the uid of the // taskCancelation itself snapshot!(snapshot_bitmap(&tasks), @"[1,2,]"); @@ -2728,7 +2762,9 @@ mod tests { .get_task_ids_from_authorized_indexes( &rtxn, &query, - &Some(vec![IndexUidPattern::new_unchecked("doggo")]), + &AuthFilter::with_allowed_indexes( + vec![IndexUidPattern::new_unchecked("doggo")].into_iter().collect(), + ), ) .unwrap(); // Return only 1 because the user is not authorized to see task 2 diff --git a/meilisearch-auth/src/lib.rs b/meilisearch-auth/src/lib.rs index 9f9b15f38..8d5457766 100644 --- a/meilisearch-auth/src/lib.rs +++ b/meilisearch-auth/src/lib.rs @@ -85,17 +85,13 @@ impl AuthController { uid: Uuid, search_rules: Option, ) -> Result { - let mut filters = AuthFilter::default(); let key = self.get_key(uid)?; - filters.search_rules = match search_rules { - Some(search_rules) => search_rules, - None => SearchRules::Set(key.indexes.into_iter().collect()), - }; + let key_authorized_indexes = SearchRules::Set(key.indexes.into_iter().collect()); - filters.allow_index_creation = self.is_key_authorized(uid, Action::IndexesAdd, None)?; + let allow_index_creation = self.is_key_authorized(uid, Action::IndexesAdd, None)?; - Ok(filters) + Ok(AuthFilter { search_rules, key_authorized_indexes, allow_index_creation }) } pub fn list_keys(&self) -> Result> { @@ -160,13 +156,59 @@ impl AuthController { } pub struct AuthFilter { - pub search_rules: SearchRules, - pub allow_index_creation: bool, + search_rules: Option, + key_authorized_indexes: SearchRules, + allow_index_creation: bool, } impl Default for AuthFilter { fn default() -> Self { - Self { search_rules: SearchRules::default(), allow_index_creation: true } + Self { + search_rules: None, + key_authorized_indexes: SearchRules::default(), + allow_index_creation: true, + } + } +} + +impl AuthFilter { + #[inline] + pub fn allow_index_creation(&self, index: &str) -> bool { + self.allow_index_creation && self.is_index_authorized(index) + } + + pub fn with_allowed_indexes(allowed_indexes: HashSet) -> Self { + Self { + search_rules: None, + key_authorized_indexes: SearchRules::Set(allowed_indexes), + allow_index_creation: false, + } + } + + pub fn all_indexes_authorized(&self) -> bool { + self.key_authorized_indexes.all_indexes_authorized() + && self + .search_rules + .as_ref() + .map(|search_rules| search_rules.all_indexes_authorized()) + .unwrap_or(true) + } + + pub fn is_index_authorized(&self, index: &str) -> bool { + self.key_authorized_indexes.is_index_authorized(index) + && self + .search_rules + .as_ref() + .map(|search_rules| search_rules.is_index_authorized(index)) + .unwrap_or(true) + } + + pub fn get_index_search_rules(&self, index: &str) -> Option { + if !self.is_index_authorized(index) { + return None; + } + let search_rules = self.search_rules.as_ref().unwrap_or(&self.key_authorized_indexes); + search_rules.get_index_search_rules(index) } } @@ -185,7 +227,7 @@ impl Default for SearchRules { } impl SearchRules { - pub fn is_index_authorized(&self, index: &str) -> bool { + fn is_index_authorized(&self, index: &str) -> bool { match self { Self::Set(set) => { set.contains("*") @@ -200,7 +242,7 @@ impl SearchRules { } } - pub fn get_index_search_rules(&self, index: &str) -> Option { + fn get_index_search_rules(&self, index: &str) -> Option { match self { Self::Set(_) => { if self.is_index_authorized(index) { @@ -219,24 +261,10 @@ impl SearchRules { } } - /// Return the list of indexes such that `self.is_index_authorized(index) == true`, - /// or `None` if all indexes satisfy this condition. - pub fn authorized_indexes(&self) -> Option> { + fn all_indexes_authorized(&self) -> bool { match self { - SearchRules::Set(set) => { - if set.contains("*") { - None - } else { - Some(set.iter().cloned().collect()) - } - } - SearchRules::Map(map) => { - if map.contains_key("*") { - None - } else { - Some(map.keys().cloned().collect()) - } - } + SearchRules::Set(set) => set.contains("*"), + SearchRules::Map(map) => map.contains_key("*"), } } } diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index f1ce0f217..4858b2886 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -11,8 +11,8 @@ use serde::{Deserialize, Serialize}; #[serde(rename_all = "camelCase")] pub struct ResponseError { #[serde(skip)] - code: StatusCode, - message: String, + pub code: StatusCode, + pub message: String, #[serde(rename = "code")] error_code: String, #[serde(rename = "type")] diff --git a/meilisearch/src/analytics/mock_analytics.rs b/meilisearch/src/analytics/mock_analytics.rs index 092e35424..03aed0189 100644 --- a/meilisearch/src/analytics/mock_analytics.rs +++ b/meilisearch/src/analytics/mock_analytics.rs @@ -26,6 +26,18 @@ impl SearchAggregator { pub fn succeed(&mut self, _: &dyn Any) {} } +#[derive(Default)] +pub struct MultiSearchAggregator; + +#[allow(dead_code)] +impl MultiSearchAggregator { + pub fn from_queries(_: &dyn Any, _: &dyn Any) -> Self { + Self::default() + } + + pub fn succeed(&mut self) {} +} + impl MockAnalytics { #[allow(clippy::new_ret_no_self)] pub fn new(opt: &Opt) -> Arc { @@ -43,6 +55,7 @@ impl Analytics for MockAnalytics { fn publish(&self, _event_name: String, _send: Value, _request: Option<&HttpRequest>) {} fn get_search(&self, _aggregate: super::SearchAggregator) {} fn post_search(&self, _aggregate: super::SearchAggregator) {} + fn post_multi_search(&self, _aggregate: super::MultiSearchAggregator) {} fn add_documents( &self, _documents_query: &UpdateDocumentsQuery, diff --git a/meilisearch/src/analytics/mod.rs b/meilisearch/src/analytics/mod.rs index ec15707bf..4d295db57 100644 --- a/meilisearch/src/analytics/mod.rs +++ b/meilisearch/src/analytics/mod.rs @@ -23,6 +23,8 @@ use crate::routes::tasks::TasksFilterQuery; pub type SegmentAnalytics = mock_analytics::MockAnalytics; #[cfg(any(debug_assertions, not(feature = "analytics")))] pub type SearchAggregator = mock_analytics::SearchAggregator; +#[cfg(any(debug_assertions, not(feature = "analytics")))] +pub type MultiSearchAggregator = mock_analytics::MultiSearchAggregator; // if we are in release mode and the feature analytics was enabled // we use the real analytics @@ -30,6 +32,8 @@ pub type SearchAggregator = mock_analytics::SearchAggregator; pub type SegmentAnalytics = segment_analytics::SegmentAnalytics; #[cfg(all(not(debug_assertions), feature = "analytics"))] pub type SearchAggregator = segment_analytics::SearchAggregator; +#[cfg(all(not(debug_assertions), feature = "analytics"))] +pub type MultiSearchAggregator = segment_analytics::MultiSearchAggregator; /// The Meilisearch config dir: /// `~/.config/Meilisearch` on *NIX or *BSD. @@ -74,6 +78,9 @@ pub trait Analytics: Sync + Send { /// This method should be called to aggregate a post search fn post_search(&self, aggregate: SearchAggregator); + /// This method should be called to aggregate a post array of searches + fn post_multi_search(&self, aggregate: MultiSearchAggregator); + // this method should be called to aggregate a add documents request fn add_documents( &self, diff --git a/meilisearch/src/analytics/segment_analytics.rs b/meilisearch/src/analytics/segment_analytics.rs index 818b06240..92ac4b1d5 100644 --- a/meilisearch/src/analytics/segment_analytics.rs +++ b/meilisearch/src/analytics/segment_analytics.rs @@ -9,7 +9,7 @@ use actix_web::HttpRequest; use byte_unit::Byte; use http::header::CONTENT_TYPE; use index_scheduler::IndexScheduler; -use meilisearch_auth::{AuthController, SearchRules}; +use meilisearch_auth::{AuthController, AuthFilter}; use meilisearch_types::InstanceUid; use once_cell::sync::Lazy; use regex::Regex; @@ -30,7 +30,7 @@ use crate::routes::indexes::documents::UpdateDocumentsQuery; use crate::routes::tasks::TasksFilterQuery; use crate::routes::{create_all_stats, Stats}; use crate::search::{ - SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, + SearchQuery, SearchQueryWithIndex, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT, }; use crate::Opt; @@ -68,6 +68,7 @@ pub enum AnalyticsMsg { BatchMessage(Track), AggregateGetSearch(SearchAggregator), AggregatePostSearch(SearchAggregator), + AggregatePostMultiSearch(MultiSearchAggregator), AggregateAddDocuments(DocumentsAggregator), AggregateDeleteDocuments(DocumentsDeletionAggregator), AggregateUpdateDocuments(DocumentsAggregator), @@ -133,6 +134,7 @@ impl SegmentAnalytics { opt: opt.clone(), batcher, post_search_aggregator: SearchAggregator::default(), + post_multi_search_aggregator: MultiSearchAggregator::default(), get_search_aggregator: SearchAggregator::default(), add_documents_aggregator: DocumentsAggregator::default(), delete_documents_aggregator: DocumentsDeletionAggregator::default(), @@ -174,6 +176,10 @@ impl super::Analytics for SegmentAnalytics { let _ = self.sender.try_send(AnalyticsMsg::AggregatePostSearch(aggregate)); } + fn post_multi_search(&self, aggregate: MultiSearchAggregator) { + let _ = self.sender.try_send(AnalyticsMsg::AggregatePostMultiSearch(aggregate)); + } + fn add_documents( &self, documents_query: &UpdateDocumentsQuery, @@ -326,6 +332,7 @@ pub struct Segment { batcher: AutoBatcher, get_search_aggregator: SearchAggregator, post_search_aggregator: SearchAggregator, + post_multi_search_aggregator: MultiSearchAggregator, add_documents_aggregator: DocumentsAggregator, delete_documents_aggregator: DocumentsDeletionAggregator, update_documents_aggregator: DocumentsAggregator, @@ -383,6 +390,7 @@ impl Segment { Some(AnalyticsMsg::BatchMessage(msg)) => drop(self.batcher.push(msg).await), Some(AnalyticsMsg::AggregateGetSearch(agreg)) => self.get_search_aggregator.aggregate(agreg), Some(AnalyticsMsg::AggregatePostSearch(agreg)) => self.post_search_aggregator.aggregate(agreg), + Some(AnalyticsMsg::AggregatePostMultiSearch(agreg)) => self.post_multi_search_aggregator.aggregate(agreg), Some(AnalyticsMsg::AggregateAddDocuments(agreg)) => self.add_documents_aggregator.aggregate(agreg), Some(AnalyticsMsg::AggregateDeleteDocuments(agreg)) => self.delete_documents_aggregator.aggregate(agreg), Some(AnalyticsMsg::AggregateUpdateDocuments(agreg)) => self.update_documents_aggregator.aggregate(agreg), @@ -401,7 +409,7 @@ impl Segment { auth_controller: AuthController, ) { if let Ok(stats) = - create_all_stats(index_scheduler.into(), auth_controller, &SearchRules::default()) + create_all_stats(index_scheduler.into(), auth_controller, &AuthFilter::default()) { // Replace the version number with the prototype name if any. let version = if let Some(prototype) = crate::prototype_name() { @@ -428,6 +436,8 @@ impl Segment { .into_event(&self.user, "Documents Searched GET"); let post_search = std::mem::take(&mut self.post_search_aggregator) .into_event(&self.user, "Documents Searched POST"); + let post_multi_search = std::mem::take(&mut self.post_multi_search_aggregator) + .into_event(&self.user, "Documents Searched by Multi-Search POST"); let add_documents = std::mem::take(&mut self.add_documents_aggregator) .into_event(&self.user, "Documents Added"); let delete_documents = std::mem::take(&mut self.delete_documents_aggregator) @@ -445,6 +455,9 @@ impl Segment { if let Some(post_search) = post_search { let _ = self.batcher.push(post_search).await; } + if let Some(post_multi_search) = post_multi_search { + let _ = self.batcher.push(post_multi_search).await; + } if let Some(add_documents) = add_documents { let _ = self.batcher.push(add_documents).await; } @@ -718,6 +731,118 @@ impl SearchAggregator { } } +#[derive(Default)] +pub struct MultiSearchAggregator { + timestamp: Option, + + // requests + total_received: usize, + total_succeeded: usize, + + // sum of the number of distinct indexes in each single request, use with total_received to compute an avg + total_distinct_index_count: usize, + // number of queries with a single index, use with total_received to compute a proportion + total_single_index: usize, + + // sum of the number of search queries in the requests, use with total_received to compute an average + total_search_count: usize, + + // context + user_agents: HashSet, +} + +impl MultiSearchAggregator { + pub fn from_queries(query: &[SearchQueryWithIndex], request: &HttpRequest) -> Self { + let timestamp = Some(OffsetDateTime::now_utc()); + + let user_agents = extract_user_agents(request).into_iter().collect(); + + let distinct_indexes: HashSet<_> = + query.iter().map(|query| query.index_uid.as_str()).collect(); + + Self { + timestamp, + total_received: 1, + total_succeeded: 0, + total_distinct_index_count: distinct_indexes.len(), + total_single_index: if distinct_indexes.len() == 1 { 1 } else { 0 }, + total_search_count: query.len(), + user_agents, + } + } + + pub fn succeed(&mut self) { + self.total_succeeded = self.total_succeeded.saturating_add(1); + } + + pub fn aggregate(&mut self, other: Self) { + // write the aggregate in a way that will cause a compilation error if a field is added. + + // get ownership of self, replacing it by a default value. + let this = std::mem::take(self); + + let timestamp = this.timestamp.or(other.timestamp); + let total_received = this.total_received.saturating_add(other.total_received); + let total_succeeded = this.total_succeeded.saturating_add(other.total_succeeded); + let total_distinct_index_count = + this.total_distinct_index_count.saturating_add(other.total_distinct_index_count); + let total_single_index = this.total_single_index.saturating_add(other.total_single_index); + let total_search_count = this.total_search_count.saturating_add(other.total_search_count); + let mut user_agents = this.user_agents; + + for user_agent in other.user_agents.into_iter() { + user_agents.insert(user_agent); + } + + // need all fields or compile error + let mut aggregated = Self { + timestamp, + total_received, + total_succeeded, + total_distinct_index_count, + total_single_index, + total_search_count, + user_agents, + // do not add _ or ..Default::default() here + }; + + // replace the default self with the aggregated value + std::mem::swap(self, &mut aggregated); + } + + pub fn into_event(self, user: &User, event_name: &str) -> Option { + if self.total_received == 0 { + None + } else { + let properties = json!({ + "user-agent": self.user_agents, + "requests": { + "total_succeeded": self.total_succeeded, + "total_failed": self.total_received.saturating_sub(self.total_succeeded), // just to be sure we never panics + "total_received": self.total_received, + }, + "indexes": { + "total_single_index": self.total_single_index, + "total_distinct_index_count": self.total_distinct_index_count, + "avg_distinct_index_count": (self.total_distinct_index_count as f64) / (self.total_received as f64), // not 0 else returned early + }, + "searches": { + "total_search_count": self.total_search_count, + "avg_search_count": (self.total_search_count as f64) / (self.total_received as f64), + } + }); + + Some(Track { + timestamp: self.timestamp, + user: user.clone(), + event: event_name.to_string(), + properties, + ..Default::default() + }) + } + } +} + #[derive(Default)] pub struct DocumentsAggregator { timestamp: Option, diff --git a/meilisearch/src/extractors/authentication/mod.rs b/meilisearch/src/extractors/authentication/mod.rs index 84b598689..6373d6a29 100644 --- a/meilisearch/src/extractors/authentication/mod.rs +++ b/meilisearch/src/extractors/authentication/mod.rs @@ -136,6 +136,13 @@ pub mod policies { use crate::extractors::authentication::Policy; + enum TenantTokenOutcome { + NotATenantToken, + Invalid, + Expired, + Valid(Uuid, SearchRules), + } + fn tenant_token_validation() -> Validation { let mut validation = Validation::default(); validation.validate_exp = false; @@ -164,29 +171,42 @@ pub mod policies { pub struct ActionPolicy; impl Policy for ActionPolicy { + /// Attempts to grant authentication from a bearer token (that can be a tenant token or an API key), the requested Action, + /// and a list of requested indexes. + /// + /// If the bearer token is not allowed for the specified indexes and action, returns `None`. + /// Otherwise, returns an object containing the generated permissions: the search filters to add to a search, and the list of allowed indexes + /// (that may contain more indexes than requested). fn authenticate( auth: AuthController, token: &str, index: Option<&str>, ) -> Option { // authenticate if token is the master key. - // master key can only have access to keys routes. - // if master key is None only keys routes are inaccessible. + // Without a master key, all routes are accessible except the key-related routes. if auth.get_master_key().map_or_else(|| !is_keys_action(A), |mk| mk == token) { return Some(AuthFilter::default()); } - // Tenant token - if let Some(filters) = ActionPolicy::::authenticate_tenant_token(&auth, token, index) - { - return Some(filters); - } else if let Some(action) = Action::from_repr(A) { - // API key - if let Ok(Some(uid)) = auth.get_optional_uid_from_encoded_key(token.as_bytes()) { - if let Ok(true) = auth.is_key_authorized(uid, action, index) { - return auth.get_key_filters(uid, None).ok(); + let (key_uuid, search_rules) = + match ActionPolicy::::authenticate_tenant_token(&auth, token) { + TenantTokenOutcome::Valid(key_uuid, search_rules) => { + (key_uuid, Some(search_rules)) } - } + TenantTokenOutcome::Expired => return None, + TenantTokenOutcome::Invalid => return None, + TenantTokenOutcome::NotATenantToken => { + (auth.get_optional_uid_from_encoded_key(token.as_bytes()).ok()??, None) + } + }; + + // check that the indexes are allowed + let action = Action::from_repr(A)?; + let auth_filter = auth.get_key_filters(key_uuid, search_rules).ok()?; + if auth.is_key_authorized(key_uuid, action, index).unwrap_or(false) + && index.map(|index| auth_filter.is_index_authorized(index)).unwrap_or(true) + { + return Some(auth_filter); } None @@ -194,50 +214,43 @@ pub mod policies { } impl ActionPolicy { - fn authenticate_tenant_token( - auth: &AuthController, - token: &str, - index: Option<&str>, - ) -> Option { - // A tenant token only has access to the search route which always defines an index. - let index = index?; - + fn authenticate_tenant_token(auth: &AuthController, token: &str) -> TenantTokenOutcome { // Only search action can be accessed by a tenant token. if A != actions::SEARCH { - return None; + return TenantTokenOutcome::NotATenantToken; } - let uid = extract_key_id(token)?; - // check if parent key is authorized to do the action. - if auth.is_key_authorized(uid, Action::Search, Some(index)).ok()? { - // Check if tenant token is valid. - let key = auth.generate_key(uid)?; - let data = decode::( - token, - &DecodingKey::from_secret(key.as_bytes()), - &tenant_token_validation(), - ) - .ok()?; + let uid = if let Some(uid) = extract_key_id(token) { + uid + } else { + return TenantTokenOutcome::NotATenantToken; + }; - // Check index access if an index restriction is provided. - if !data.claims.search_rules.is_index_authorized(index) { - return None; + // Check if tenant token is valid. + let key = if let Some(key) = auth.generate_key(uid) { + key + } else { + return TenantTokenOutcome::Invalid; + }; + + let data = if let Ok(data) = decode::( + token, + &DecodingKey::from_secret(key.as_bytes()), + &tenant_token_validation(), + ) { + data + } else { + return TenantTokenOutcome::Invalid; + }; + + // Check if token is expired. + if let Some(exp) = data.claims.exp { + if OffsetDateTime::now_utc().unix_timestamp() > exp { + return TenantTokenOutcome::Expired; } - - // Check if token is expired. - if let Some(exp) = data.claims.exp { - if OffsetDateTime::now_utc().unix_timestamp() > exp { - return None; - } - } - - return match auth.get_key_filters(uid, Some(data.claims.search_rules)) { - Ok(auth) if auth.search_rules.is_index_authorized(index) => Some(auth), - _ => None, - }; } - None + TenantTokenOutcome::Valid(uid, data.claims.search_rules) } } diff --git a/meilisearch/src/routes/indexes/documents.rs b/meilisearch/src/routes/indexes/documents.rs index 0c649ea5d..3f694b5d1 100644 --- a/meilisearch/src/routes/indexes/documents.rs +++ b/meilisearch/src/routes/indexes/documents.rs @@ -192,7 +192,7 @@ pub async fn replace_documents( analytics.add_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req); - let allow_index_creation = index_scheduler.filters().allow_index_creation; + let allow_index_creation = index_scheduler.filters().allow_index_creation(&index_uid); let task = document_addition( extract_mime_type(&req)?, index_scheduler, @@ -223,7 +223,7 @@ pub async fn update_documents( analytics.update_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req); - let allow_index_creation = index_scheduler.filters().allow_index_creation; + let allow_index_creation = index_scheduler.filters().allow_index_creation(&index_uid); let task = document_addition( extract_mime_type(&req)?, index_scheduler, diff --git a/meilisearch/src/routes/indexes/mod.rs b/meilisearch/src/routes/indexes/mod.rs index b58bef0e9..da24a92ad 100644 --- a/meilisearch/src/routes/indexes/mod.rs +++ b/meilisearch/src/routes/indexes/mod.rs @@ -89,11 +89,11 @@ pub async fn list_indexes( index_scheduler: GuardedData, Data>, paginate: AwebQueryParameter, ) -> Result { - let search_rules = &index_scheduler.filters().search_rules; + let filters = index_scheduler.filters(); let indexes: Vec<_> = index_scheduler.indexes()?; let indexes = indexes .into_iter() - .filter(|(name, _)| search_rules.is_index_authorized(name)) + .filter(|(name, _)| filters.is_index_authorized(name)) .map(|(name, index)| IndexView::new(name, &index)) .collect::, _>>()?; @@ -120,7 +120,7 @@ pub async fn create_index( ) -> Result { let IndexCreateRequest { primary_key, uid } = body.into_inner(); - let allow_index_creation = index_scheduler.filters().search_rules.is_index_authorized(&uid); + let allow_index_creation = index_scheduler.filters().allow_index_creation(&uid); if allow_index_creation { analytics.publish( "Index Created".to_string(), diff --git a/meilisearch/src/routes/indexes/search.rs b/meilisearch/src/routes/indexes/search.rs index 3fb413c43..f9242f320 100644 --- a/meilisearch/src/routes/indexes/search.rs +++ b/meilisearch/src/routes/indexes/search.rs @@ -3,7 +3,6 @@ use actix_web::{web, HttpRequest, HttpResponse}; use deserr::actix_web::{AwebJson, AwebQueryParameter}; use index_scheduler::IndexScheduler; use log::debug; -use meilisearch_auth::IndexSearchRules; use meilisearch_types::deserr::query_params::Param; use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError}; use meilisearch_types::error::deserr_codes::*; @@ -17,9 +16,9 @@ use crate::extractors::authentication::policies::*; use crate::extractors::authentication::GuardedData; use crate::extractors::sequential_extractor::SeqHandler; use crate::search::{ - perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, - DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT, - DEFAULT_SEARCH_OFFSET, + add_search_rules, perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, + DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, + DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET, }; pub fn configure(cfg: &mut web::ServiceConfig) { @@ -101,26 +100,6 @@ impl From for SearchQuery { } } -/// Incorporate search rules in search query -fn add_search_rules(query: &mut SearchQuery, rules: IndexSearchRules) { - query.filter = match (query.filter.take(), rules.filter) { - (None, rules_filter) => rules_filter, - (filter, None) => filter, - (Some(filter), Some(rules_filter)) => { - let filter = match filter { - Value::Array(filter) => filter, - filter => vec![filter], - }; - let rules_filter = match rules_filter { - Value::Array(rules_filter) => rules_filter, - rules_filter => vec![rules_filter], - }; - - Some(Value::Array([filter, rules_filter].concat())) - } - } -} - // TODO: TAMO: split on :asc, and :desc, instead of doing some weird things /// Transform the sort query parameter into something that matches the post expected format. @@ -159,9 +138,7 @@ pub async fn search_with_url_query( let mut query: SearchQuery = params.into_inner().into(); // Tenant token search_rules. - if let Some(search_rules) = - index_scheduler.filters().search_rules.get_index_search_rules(&index_uid) - { + if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) { add_search_rules(&mut query, search_rules); } @@ -193,9 +170,7 @@ pub async fn search_with_post( debug!("search called with params: {:?}", query); // Tenant token search_rules. - if let Some(search_rules) = - index_scheduler.filters().search_rules.get_index_search_rules(&index_uid) - { + if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) { add_search_rules(&mut query, search_rules); } diff --git a/meilisearch/src/routes/indexes/settings.rs b/meilisearch/src/routes/indexes/settings.rs index d10aec1a2..4b6cde685 100644 --- a/meilisearch/src/routes/indexes/settings.rs +++ b/meilisearch/src/routes/indexes/settings.rs @@ -45,7 +45,8 @@ macro_rules! make_setting_route { let new_settings = Settings { $attr: Setting::Reset.into(), ..Default::default() }; - let allow_index_creation = index_scheduler.filters().allow_index_creation; + let allow_index_creation = + index_scheduler.filters().allow_index_creation(&index_uid); let task = KindWithContent::SettingsUpdate { index_uid: index_uid.to_string(), @@ -86,7 +87,8 @@ macro_rules! make_setting_route { ..Default::default() }; - let allow_index_creation = index_scheduler.filters().allow_index_creation; + let allow_index_creation = + index_scheduler.filters().allow_index_creation(&index_uid); let task = KindWithContent::SettingsUpdate { index_uid: index_uid.to_string(), @@ -560,7 +562,7 @@ pub async fn update_all( Some(&req), ); - let allow_index_creation = index_scheduler.filters().allow_index_creation; + let allow_index_creation = index_scheduler.filters().allow_index_creation(&index_uid); let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner(); let task = KindWithContent::SettingsUpdate { index_uid, @@ -596,7 +598,7 @@ pub async fn delete_all( let new_settings = Settings::cleared().into_unchecked(); - let allow_index_creation = index_scheduler.filters().allow_index_creation; + let allow_index_creation = index_scheduler.filters().allow_index_creation(&index_uid); let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner(); let task = KindWithContent::SettingsUpdate { index_uid, diff --git a/meilisearch/src/routes/mod.rs b/meilisearch/src/routes/mod.rs index 0f367c9f4..622e26c75 100644 --- a/meilisearch/src/routes/mod.rs +++ b/meilisearch/src/routes/mod.rs @@ -22,6 +22,7 @@ const PAGINATION_DEFAULT_LIMIT: usize = 20; mod api_key; mod dump; pub mod indexes; +mod multi_search; mod swap_indexes; pub mod tasks; @@ -33,6 +34,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) { .service(web::resource("/stats").route(web::get().to(get_stats))) .service(web::resource("/version").route(web::get().to(get_version))) .service(web::scope("/indexes").configure(indexes::configure)) + .service(web::scope("/multi-search").configure(multi_search::configure)) .service(web::scope("/swap-indexes").configure(swap_indexes::configure)); } @@ -237,10 +239,9 @@ async fn get_stats( analytics: web::Data, ) -> Result { analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": false }), Some(&req)); - let search_rules = &index_scheduler.filters().search_rules; + let filters = index_scheduler.filters(); - let stats = - create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), search_rules)?; + let stats = create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), filters)?; debug!("returns: {:?}", stats); Ok(HttpResponse::Ok().json(stats)) @@ -249,19 +250,19 @@ async fn get_stats( pub fn create_all_stats( index_scheduler: Data, auth_controller: AuthController, - search_rules: &meilisearch_auth::SearchRules, + filters: &meilisearch_auth::AuthFilter, ) -> Result { let mut last_task: Option = None; let mut indexes = BTreeMap::new(); let mut database_size = 0; let processing_task = index_scheduler.get_tasks_from_authorized_indexes( Query { statuses: Some(vec![Status::Processing]), limit: Some(1), ..Query::default() }, - search_rules.authorized_indexes(), + filters, )?; // accumulate the size of each indexes let processing_index = processing_task.first().and_then(|task| task.index_uid()); for (name, index) in index_scheduler.indexes()? { - if !search_rules.is_index_authorized(&name) { + if !filters.is_index_authorized(&name) { continue; } diff --git a/meilisearch/src/routes/multi_search.rs b/meilisearch/src/routes/multi_search.rs new file mode 100644 index 000000000..fd78df5e5 --- /dev/null +++ b/meilisearch/src/routes/multi_search.rs @@ -0,0 +1,122 @@ +use actix_http::StatusCode; +use actix_web::web::{self, Data}; +use actix_web::{HttpRequest, HttpResponse}; +use deserr::actix_web::AwebJson; +use index_scheduler::IndexScheduler; +use log::debug; +use meilisearch_types::deserr::DeserrJsonError; +use meilisearch_types::error::ResponseError; +use meilisearch_types::keys::actions; +use serde::Serialize; + +use crate::analytics::{Analytics, MultiSearchAggregator}; +use crate::extractors::authentication::policies::ActionPolicy; +use crate::extractors::authentication::{AuthenticationError, GuardedData}; +use crate::extractors::sequential_extractor::SeqHandler; +use crate::search::{ + add_search_rules, perform_search, SearchQueryWithIndex, SearchResultWithIndex, +}; + +pub fn configure(cfg: &mut web::ServiceConfig) { + cfg.service(web::resource("").route(web::post().to(SeqHandler(multi_search_with_post)))); +} + +#[derive(Serialize)] +struct SearchResults { + results: Vec, +} + +#[derive(Debug, deserr::Deserr)] +#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] +pub struct SearchQueries { + queries: Vec, +} + +pub async fn multi_search_with_post( + index_scheduler: GuardedData, Data>, + params: AwebJson, + req: HttpRequest, + analytics: web::Data, +) -> Result { + let queries = params.into_inner().queries; + + let mut multi_aggregate = MultiSearchAggregator::from_queries(&queries, &req); + + // Explicitly expect a `(ResponseError, usize)` for the error type rather than `ResponseError` only, + // so that `?` doesn't work if it doesn't use `with_index`, ensuring that it is not forgotten in case of code + // changes. + let search_results: Result<_, (ResponseError, usize)> = (|| { + async { + let mut search_results = Vec::with_capacity(queries.len()); + for (query_index, (index_uid, mut query)) in + queries.into_iter().map(SearchQueryWithIndex::into_index_query).enumerate() + { + debug!("multi-search #{query_index}: called with params: {:?}", query); + + // Check index from API key + if !index_scheduler.filters().is_index_authorized(&index_uid) { + return Err(AuthenticationError::InvalidToken).with_index(query_index); + } + // Apply search rules from tenant token + if let Some(search_rules) = + index_scheduler.filters().get_index_search_rules(&index_uid) + { + add_search_rules(&mut query, search_rules); + } + + let index = index_scheduler + .index(&index_uid) + .map_err(|err| { + let mut err = ResponseError::from(err); + // Patch the HTTP status code to 400 as it defaults to 404 for `index_not_found`, but + // here the resource not found is not part of the URL. + err.code = StatusCode::BAD_REQUEST; + err + }) + .with_index(query_index)?; + let search_result = + tokio::task::spawn_blocking(move || perform_search(&index, query)) + .await + .with_index(query_index)?; + + search_results.push(SearchResultWithIndex { + index_uid: index_uid.into_inner(), + result: search_result.with_index(query_index)?, + }); + } + Ok(search_results) + } + })() + .await; + + if search_results.is_ok() { + multi_aggregate.succeed(); + } + analytics.post_multi_search(multi_aggregate); + + let search_results = search_results.map_err(|(mut err, query_index)| { + // Add the query index that failed as context for the error message. + // We're doing it only here and not directly in the `WithIndex` trait so that the `with_index` function returns a different type + // of result and we can benefit from static typing. + err.message = format!("Inside `.queries[{query_index}]`: {}", err.message); + err + })?; + + debug!("returns: {:?}", search_results); + + Ok(HttpResponse::Ok().json(SearchResults { results: search_results })) +} + +/// Local `Result` extension trait to avoid `map_err` boilerplate. +trait WithIndex { + type T; + /// convert the error type inside of the `Result` to a `ResponseError`, and return a couple of it + the usize. + fn with_index(self, index: usize) -> Result; +} + +impl> WithIndex for Result { + type T = T; + fn with_index(self, index: usize) -> Result { + self.map_err(|err| (err.into(), index)) + } +} diff --git a/meilisearch/src/routes/swap_indexes.rs b/meilisearch/src/routes/swap_indexes.rs index c4177e900..c4e204c09 100644 --- a/meilisearch/src/routes/swap_indexes.rs +++ b/meilisearch/src/routes/swap_indexes.rs @@ -42,7 +42,7 @@ pub async fn swap_indexes( }), Some(&req), ); - let search_rules = &index_scheduler.filters().search_rules; + let filters = index_scheduler.filters(); let mut swaps = vec![]; for SwapIndexesPayload { indexes } in params.into_iter() { @@ -53,7 +53,7 @@ pub async fn swap_indexes( return Err(MeilisearchHttpError::SwapIndexPayloadWrongLength(indexes).into()); } }; - if !search_rules.is_index_authorized(lhs) || !search_rules.is_index_authorized(rhs) { + if !filters.is_index_authorized(lhs) || !filters.is_index_authorized(rhs) { return Err(AuthenticationError::InvalidToken.into()); } swaps.push(IndexSwap { indexes: (lhs.to_string(), rhs.to_string()) }); diff --git a/meilisearch/src/routes/tasks.rs b/meilisearch/src/routes/tasks.rs index 49f3aac66..1356ff722 100644 --- a/meilisearch/src/routes/tasks.rs +++ b/meilisearch/src/routes/tasks.rs @@ -319,7 +319,7 @@ async fn cancel_tasks( let tasks = index_scheduler.get_task_ids_from_authorized_indexes( &index_scheduler.read_txn()?, &query, - &index_scheduler.filters().search_rules.authorized_indexes(), + index_scheduler.filters(), )?; let task_cancelation = KindWithContent::TaskCancelation { query: format!("?{}", req.query_string()), tasks }; @@ -364,7 +364,7 @@ async fn delete_tasks( let tasks = index_scheduler.get_task_ids_from_authorized_indexes( &index_scheduler.read_txn()?, &query, - &index_scheduler.filters().search_rules.authorized_indexes(), + index_scheduler.filters(), )?; let task_deletion = KindWithContent::TaskDeletion { query: format!("?{}", req.query_string()), tasks }; @@ -398,10 +398,7 @@ async fn get_tasks( let query = params.into_query(); let mut tasks_results: Vec = index_scheduler - .get_tasks_from_authorized_indexes( - query, - index_scheduler.filters().search_rules.authorized_indexes(), - )? + .get_tasks_from_authorized_indexes(query, index_scheduler.filters())? .into_iter() .map(|t| TaskView::from_task(&t)) .collect(); @@ -439,12 +436,8 @@ async fn get_task( let query = index_scheduler::Query { uids: Some(vec![task_uid]), ..Query::default() }; - if let Some(task) = index_scheduler - .get_tasks_from_authorized_indexes( - query, - index_scheduler.filters().search_rules.authorized_indexes(), - )? - .first() + if let Some(task) = + index_scheduler.get_tasks_from_authorized_indexes(query, index_scheduler.filters())?.first() { let task_view = TaskView::from_task(task); Ok(HttpResponse::Ok().json(task_view)) diff --git a/meilisearch/src/search.rs b/meilisearch/src/search.rs index 6bf23cfc4..b7d5b9114 100644 --- a/meilisearch/src/search.rs +++ b/meilisearch/src/search.rs @@ -5,8 +5,10 @@ use std::time::Instant; use deserr::Deserr; use either::Either; +use meilisearch_auth::IndexSearchRules; use meilisearch_types::deserr::DeserrJsonError; use meilisearch_types::error::deserr_codes::*; +use meilisearch_types::index_uid::IndexUid; use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS; use meilisearch_types::{milli, Document}; use milli::tokenizer::TokenizerBuilder; @@ -74,6 +76,100 @@ impl SearchQuery { } } +/// A `SearchQuery` + an index UID. +// This struct contains the fields of `SearchQuery` inline. +// This is because neither deserr nor serde support `flatten` when using `deny_unknown_fields. +// The `From` implementation ensures both structs remain up to date. +#[derive(Debug, Clone, PartialEq, Eq, Deserr)] +#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)] +pub struct SearchQueryWithIndex { + #[deserr(error = DeserrJsonError, missing_field_error = DeserrJsonError::missing_index_uid)] + pub index_uid: IndexUid, + #[deserr(default, error = DeserrJsonError)] + pub q: Option, + #[deserr(default = DEFAULT_SEARCH_OFFSET(), error = DeserrJsonError)] + pub offset: usize, + #[deserr(default = DEFAULT_SEARCH_LIMIT(), error = DeserrJsonError)] + pub limit: usize, + #[deserr(default, error = DeserrJsonError)] + pub page: Option, + #[deserr(default, error = DeserrJsonError)] + pub hits_per_page: Option, + #[deserr(default, error = DeserrJsonError)] + pub attributes_to_retrieve: Option>, + #[deserr(default, error = DeserrJsonError)] + pub attributes_to_crop: Option>, + #[deserr(default, error = DeserrJsonError, default = DEFAULT_CROP_LENGTH())] + pub crop_length: usize, + #[deserr(default, error = DeserrJsonError)] + pub attributes_to_highlight: Option>, + #[deserr(default, error = DeserrJsonError, default)] + pub show_matches_position: bool, + #[deserr(default, error = DeserrJsonError)] + pub filter: Option, + #[deserr(default, error = DeserrJsonError)] + pub sort: Option>, + #[deserr(default, error = DeserrJsonError)] + pub facets: Option>, + #[deserr(default, error = DeserrJsonError, default = DEFAULT_HIGHLIGHT_PRE_TAG())] + pub highlight_pre_tag: String, + #[deserr(default, error = DeserrJsonError, default = DEFAULT_HIGHLIGHT_POST_TAG())] + pub highlight_post_tag: String, + #[deserr(default, error = DeserrJsonError, default = DEFAULT_CROP_MARKER())] + pub crop_marker: String, + #[deserr(default, error = DeserrJsonError, default)] + pub matching_strategy: MatchingStrategy, +} + +impl SearchQueryWithIndex { + pub fn into_index_query(self) -> (IndexUid, SearchQuery) { + let SearchQueryWithIndex { + index_uid, + q, + offset, + limit, + page, + hits_per_page, + attributes_to_retrieve, + attributes_to_crop, + crop_length, + attributes_to_highlight, + show_matches_position, + filter, + sort, + facets, + highlight_pre_tag, + highlight_post_tag, + crop_marker, + matching_strategy, + } = self; + ( + index_uid, + SearchQuery { + q, + offset, + limit, + page, + hits_per_page, + attributes_to_retrieve, + attributes_to_crop, + crop_length, + attributes_to_highlight, + show_matches_position, + filter, + sort, + facets, + highlight_pre_tag, + highlight_post_tag, + crop_marker, + matching_strategy, + // do not use ..Default::default() here, + // rather add any missing field from `SearchQuery` to `SearchQueryWithIndex` + }, + ) + } +} + #[derive(Debug, Clone, PartialEq, Eq, Deserr)] #[deserr(rename_all = camelCase)] pub enum MatchingStrategy { @@ -122,6 +218,14 @@ pub struct SearchResult { pub facet_stats: Option>, } +#[derive(Serialize, Debug, Clone, PartialEq)] +#[serde(rename_all = "camelCase")] +pub struct SearchResultWithIndex { + pub index_uid: String, + #[serde(flatten)] + pub result: SearchResult, +} + #[derive(Serialize, Debug, Clone, PartialEq, Eq)] #[serde(untagged)] pub enum HitsInfo { @@ -137,6 +241,26 @@ pub struct FacetStats { pub max: f64, } +/// Incorporate search rules in search query +pub fn add_search_rules(query: &mut SearchQuery, rules: IndexSearchRules) { + query.filter = match (query.filter.take(), rules.filter) { + (None, rules_filter) => rules_filter, + (filter, None) => filter, + (Some(filter), Some(rules_filter)) => { + let filter = match filter { + Value::Array(filter) => filter, + filter => vec![filter], + }; + let rules_filter = match rules_filter { + Value::Array(rules_filter) => rules_filter, + rules_filter => vec![rules_filter], + }; + + Some(Value::Array([filter, rules_filter].concat())) + } + } +} + pub fn perform_search( index: &Index, query: SearchQuery, diff --git a/meilisearch/tests/auth/authorization.rs b/meilisearch/tests/auth/authorization.rs index 69a74b022..b0bc7ab03 100644 --- a/meilisearch/tests/auth/authorization.rs +++ b/meilisearch/tests/auth/authorization.rs @@ -11,6 +11,7 @@ use crate::common::Server; pub static AUTHORIZATIONS: Lazy>> = Lazy::new(|| { let mut authorizations = hashmap! { + ("POST", "/multi-search") => hashset!{"search", "*"}, ("POST", "/indexes/products/search") => hashset!{"search", "*"}, ("GET", "/indexes/products/search") => hashset!{"search", "*"}, ("POST", "/indexes/products/documents") => hashset!{"documents.add", "documents.*", "*"}, diff --git a/meilisearch/tests/auth/mod.rs b/meilisearch/tests/auth/mod.rs index 422f92d6e..2d88bf87e 100644 --- a/meilisearch/tests/auth/mod.rs +++ b/meilisearch/tests/auth/mod.rs @@ -4,6 +4,8 @@ mod errors; mod payload; mod tenant_token; +mod tenant_token_multi_search; + use actix_web::http::StatusCode; use serde_json::{json, Value}; diff --git a/meilisearch/tests/auth/tenant_token_multi_search.rs b/meilisearch/tests/auth/tenant_token_multi_search.rs new file mode 100644 index 000000000..ef206ea3d --- /dev/null +++ b/meilisearch/tests/auth/tenant_token_multi_search.rs @@ -0,0 +1,1141 @@ +use std::collections::HashMap; + +use ::time::format_description::well_known::Rfc3339; +use maplit::hashmap; +use once_cell::sync::Lazy; +use serde_json::{json, Value}; +use time::{Duration, OffsetDateTime}; + +use super::authorization::ALL_ACTIONS; +use crate::common::Server; + +fn generate_tenant_token( + parent_uid: impl AsRef, + parent_key: impl AsRef, + mut body: HashMap<&str, Value>, +) -> String { + use jsonwebtoken::{encode, EncodingKey, Header}; + + let parent_uid = parent_uid.as_ref(); + body.insert("apiKeyUid", json!(parent_uid)); + encode(&Header::default(), &body, &EncodingKey::from_secret(parent_key.as_ref().as_bytes())) + .unwrap() +} + +static DOCUMENTS: Lazy = Lazy::new(|| { + json!([ + { + "title": "Shazam!", + "id": "287947", + "color": ["green", "blue"] + }, + { + "title": "Captain Marvel", + "id": "299537", + "color": ["yellow", "blue"] + }, + { + "title": "Escape Room", + "id": "522681", + "color": ["yellow", "red"] + }, + { + "title": "How to Train Your Dragon: The Hidden World", + "id": "166428", + "color": ["green", "red"] + }, + { + "title": "Glass", + "id": "450465", + "color": ["blue", "red"] + } + ]) +}); + +static NESTED_DOCUMENTS: Lazy = Lazy::new(|| { + json!([ + { + "id": 852, + "father": "jean", + "mother": "michelle", + "doggos": [ + { + "name": "bobby", + "age": 2, + }, + { + "name": "buddy", + "age": 4, + }, + ], + "cattos": "pesti", + }, + { + "id": 654, + "father": "pierre", + "mother": "sabine", + "doggos": [ + { + "name": "gros bill", + "age": 8, + }, + ], + "cattos": ["simba", "pestiféré"], + }, + { + "id": 750, + "father": "romain", + "mother": "michelle", + "cattos": ["enigma"], + }, + { + "id": 951, + "father": "jean-baptiste", + "mother": "sophie", + "doggos": [ + { + "name": "turbo", + "age": 5, + }, + { + "name": "fast", + "age": 6, + }, + ], + "cattos": ["moumoute", "gomez"], + }, + ]) +}); + +fn invalid_response(query_index: Option) -> Value { + let message = if let Some(query_index) = query_index { + format!("Inside `.queries[{query_index}]`: The provided API key is invalid.") + } else { + "The provided API key is invalid.".to_string() + }; + json!({"message": message, + "code": "invalid_api_key", + "type": "auth", + "link": "https://docs.meilisearch.com/errors#invalid_api_key" + }) +} + +static ACCEPTED_KEYS_SINGLE: Lazy> = Lazy::new(|| { + vec![ + json!({ + "indexes": ["*"], + "actions": ["*"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["*"], + "actions": ["search"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["sales"], + "actions": ["*"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["sales"], + "actions": ["search"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["sal*", "prod*"], + "actions": ["search"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + ] +}); + +static ACCEPTED_KEYS_BOTH: Lazy> = Lazy::new(|| { + vec![ + json!({ + "indexes": ["*"], + "actions": ["*"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["*"], + "actions": ["search"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["sales", "products"], + "actions": ["*"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["sales", "products"], + "actions": ["search"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["sal*", "prod*"], + "actions": ["search"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + ] +}); + +static SINGLE_REFUSED_KEYS: Lazy> = Lazy::new(|| { + vec![ + // no search action + json!({ + "indexes": ["*"], + "actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::>(), + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["sales"], + "actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::>(), + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + // bad index + json!({ + "indexes": ["products"], + "actions": ["*"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["prod*", "p*"], + "actions": ["*"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["products"], + "actions": ["search"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + ] +}); + +static BOTH_REFUSED_KEYS: Lazy> = Lazy::new(|| { + vec![ + // no search action + json!({ + "indexes": ["*"], + "actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::>(), + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["sales", "products"], + "actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::>(), + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + // bad index + json!({ + "indexes": ["sales"], + "actions": ["*"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["sales"], + "actions": ["search"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["sal*", "proa*"], + "actions": ["search"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["products"], + "actions": ["*"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["prod*", "p*"], + "actions": ["*"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + json!({ + "indexes": ["products"], + "actions": ["search"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() + }), + ] +}); + +macro_rules! compute_authorized_single_search { + ($tenant_tokens:expr, $filter:expr, $expected_count:expr) => { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + let index = server.index("sales"); + let documents = DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(0).await; + index + .update_settings(json!({"filterableAttributes": ["color"]})) + .await; + index.wait_task(1).await; + drop(index); + + let index = server.index("products"); + let documents = NESTED_DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(2).await; + index + .update_settings(json!({"filterableAttributes": ["doggos"]})) + .await; + index.wait_task(3).await; + drop(index); + + + for key_content in ACCEPTED_KEYS_SINGLE.iter().chain(ACCEPTED_KEYS_BOTH.iter()) { + server.use_api_key("MASTER_KEY"); + let (response, code) = server.add_api_key(key_content.clone()).await; + assert_eq!(code, 201); + let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); + + for tenant_token in $tenant_tokens.iter() { + let web_token = generate_tenant_token(&uid, &key, tenant_token.clone()); + server.use_api_key(&web_token); + let (response, code) = server.multi_search(json!({"queries" : [{"indexUid": "sales", "filter": $filter}]})).await; + assert_eq!( + 200, code, + "{} using tenant_token: {:?} generated with parent_key: {:?}", + response, tenant_token, key_content + ); + assert_eq!( + $expected_count, + response["results"][0]["hits"].as_array().unwrap().len(), + "{} using tenant_token: {:?} generated with parent_key: {:?}", + response, + tenant_token, + key_content + ); + } + } + }; +} + +macro_rules! compute_authorized_multiple_search { + ($tenant_tokens:expr, $filter1:expr, $filter2:expr, $expected_count1:expr, $expected_count2:expr) => { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + let index = server.index("sales"); + let documents = DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(0).await; + index + .update_settings(json!({"filterableAttributes": ["color"]})) + .await; + index.wait_task(1).await; + drop(index); + + let index = server.index("products"); + let documents = NESTED_DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(2).await; + index + .update_settings(json!({"filterableAttributes": ["doggos"]})) + .await; + index.wait_task(3).await; + drop(index); + + + for key_content in ACCEPTED_KEYS_BOTH.iter() { + server.use_api_key("MASTER_KEY"); + let (response, code) = server.add_api_key(key_content.clone()).await; + assert_eq!(code, 201); + let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); + + for tenant_token in $tenant_tokens.iter() { + let web_token = generate_tenant_token(&uid, &key, tenant_token.clone()); + server.use_api_key(&web_token); + let (response, code) = server.multi_search(json!({"queries" : [ + {"indexUid": "sales", "filter": $filter1}, + {"indexUid": "products", "filter": $filter2}, + ]})).await; + assert_eq!( + code, 200, + "{} using tenant_token: {:?} generated with parent_key: {:?}", + response, tenant_token, key_content + ); + assert_eq!( + response["results"][0]["hits"].as_array().unwrap().len(), + $expected_count1, + "{} using tenant_token: {:?} generated with parent_key: {:?}", + response, + tenant_token, + key_content + ); + assert_eq!( + response["results"][1]["hits"].as_array().unwrap().len(), + $expected_count2, + "{} using tenant_token: {:?} generated with parent_key: {:?}", + response, + tenant_token, + key_content + ); + } + } + }; +} + +macro_rules! compute_forbidden_single_search { + ($tenant_tokens:expr, $parent_keys:expr, $failed_query_indexes:expr) => { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + let index = server.index("sales"); + let documents = DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(0).await; + index + .update_settings(json!({"filterableAttributes": ["color"]})) + .await; + index.wait_task(1).await; + drop(index); + + let index = server.index("products"); + let documents = NESTED_DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(2).await; + index + .update_settings(json!({"filterableAttributes": ["doggos"]})) + .await; + index.wait_task(3).await; + drop(index); + + assert_eq!($parent_keys.len(), $failed_query_indexes.len(), "keys != query_indexes"); + for (key_content, failed_query_indexes) in $parent_keys.iter().zip($failed_query_indexes.into_iter()) { + server.use_api_key("MASTER_KEY"); + let (response, code) = server.add_api_key(key_content.clone()).await; + assert_eq!(code, 201, "{:?}", response); + let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); + + assert_eq!($tenant_tokens.len(), failed_query_indexes.len(), "tenant_tokens != query_indexes"); + for (tenant_token, failed_query_index) in $tenant_tokens.iter().zip(failed_query_indexes.into_iter()) { + let web_token = generate_tenant_token(&uid, &key, tenant_token.clone()); + server.use_api_key(&web_token); + let (response, code) = server.multi_search(json!({"queries" : [{"indexUid": "sales"}]})).await; + assert_eq!( + response, + invalid_response(failed_query_index), + "{} using tenant_token: {:?} generated with parent_key: {:?}", + response, + tenant_token, + key_content + ); + assert_eq!( + code, 403, + "{} using tenant_token: {:?} generated with parent_key: {:?}", + response, tenant_token, key_content + ); + } + } + }; +} + +macro_rules! compute_forbidden_multiple_search { + ($tenant_tokens:expr, $parent_keys:expr, $failed_query_indexes:expr) => { + let mut server = Server::new_auth().await; + server.use_admin_key("MASTER_KEY").await; + let index = server.index("sales"); + let documents = DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(0).await; + index + .update_settings(json!({"filterableAttributes": ["color"]})) + .await; + index.wait_task(1).await; + drop(index); + + let index = server.index("products"); + let documents = NESTED_DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(2).await; + index + .update_settings(json!({"filterableAttributes": ["doggos"]})) + .await; + index.wait_task(3).await; + drop(index); + + assert_eq!($parent_keys.len(), $failed_query_indexes.len(), "keys != query_indexes"); + for (key_content, failed_query_indexes) in $parent_keys.iter().zip($failed_query_indexes.into_iter()) { + server.use_api_key("MASTER_KEY"); + let (response, code) = server.add_api_key(key_content.clone()).await; + assert_eq!(code, 201, "{:?}", response); + let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); + + assert_eq!($tenant_tokens.len(), failed_query_indexes.len(), "tenant_token != query_indexes"); + for (tenant_token, failed_query_index) in $tenant_tokens.iter().zip(failed_query_indexes.into_iter()) { + let web_token = generate_tenant_token(&uid, &key, tenant_token.clone()); + server.use_api_key(&web_token); + let (response, code) = server.multi_search(json!({"queries" : [ + {"indexUid": "sales"}, + {"indexUid": "products"}, + ]})).await; + assert_eq!( + response, + invalid_response(failed_query_index), + "{} using tenant_token: {:?} generated with parent_key: {:?}", + response, + tenant_token, + key_content + ); + assert_eq!( + code, 403, + "{} using tenant_token: {:?} generated with parent_key: {:?}", + response, tenant_token, key_content + ); + } + } + }; +} + +#[actix_rt::test] +async fn single_search_authorized_simple_token() { + let tenant_tokens = vec![ + hashmap! { + "searchRules" => json!({"*": {}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["*"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": {}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["sales"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"*": {}}), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!({"*": Value::Null}), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!(["*"]), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!({"sales": {}}), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!({"sales": Value::Null}), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!(["sales"]), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!(["sa*"]), + "exp" => Value::Null + }, + ]; + + compute_authorized_single_search!(tenant_tokens, {}, 5); +} + +#[actix_rt::test] +async fn multi_search_authorized_simple_token() { + let tenant_tokens = vec![ + hashmap! { + "searchRules" => json!({"*": {}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["*"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": {}, "products": {}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["sales", "products"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"*": {}}), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!({"*": Value::Null}), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!(["*"]), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!({"sales": {}, "products": {}}), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!({"sales": Value::Null, "products": Value::Null}), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!(["sales", "products"]), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!(["sa*", "pro*"]), + "exp" => Value::Null + }, + ]; + + compute_authorized_multiple_search!(tenant_tokens, {}, {}, 5, 4); +} + +#[actix_rt::test] +async fn single_search_authorized_filter_token() { + let tenant_tokens = vec![ + hashmap! { + "searchRules" => json!({"*": {"filter": "color = blue"}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": {"filter": "color = blue"}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"*": {"filter": ["color = blue"]}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": {"filter": ["color = blue"]}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + // filter on sales should override filters on * + hashmap! { + "searchRules" => json!({ + "*": {"filter": "color = green"}, + "sales": {"filter": "color = blue"} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({ + "*": {}, + "sales": {"filter": "color = blue"} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({ + "*": {"filter": "color = green"}, + "sales": {"filter": ["color = blue"]} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({ + "*": {}, + "sales": {"filter": ["color = blue"]} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + ]; + + compute_authorized_single_search!(tenant_tokens, {}, 3); +} + +#[actix_rt::test] +async fn multi_search_authorized_filter_token() { + let both_tenant_tokens = vec![ + hashmap! { + "searchRules" => json!({"sales": {"filter": "color = blue"}, "products": {"filter": "doggos.age <= 5"}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": {"filter": ["color = blue"]}, "products": {"filter": "doggos.age <= 5"}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + // filter on sales should override filters on * + hashmap! { + "searchRules" => json!({ + "*": {"filter": "color = green"}, + "sales": {"filter": "color = blue"}, + "products": {"filter": "doggos.age <= 5"} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({ + "*": {}, + "sales": {"filter": "color = blue"}, + "products": {"filter": "doggos.age <= 5"} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({ + "*": {"filter": "color = green"}, + "sales": {"filter": ["color = blue"]}, + "products": {"filter": ["doggos.age <= 5"]} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({ + "*": {}, + "sales": {"filter": ["color = blue"]}, + "products": {"filter": ["doggos.age <= 5"]} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + ]; + + compute_authorized_multiple_search!(both_tenant_tokens, {}, {}, 3, 2); +} + +#[actix_rt::test] +async fn filter_single_search_authorized_filter_token() { + let tenant_tokens = vec![ + hashmap! { + "searchRules" => json!({"*": {"filter": "color = blue"}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": {"filter": "color = blue"}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"*": {"filter": ["color = blue"]}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": {"filter": ["color = blue"]}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + // filter on sales should override filters on * + hashmap! { + "searchRules" => json!({ + "*": {"filter": "color = green"}, + "sales": {"filter": "color = blue"} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({ + "*": {}, + "sales": {"filter": "color = blue"} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({ + "*": {"filter": "color = green"}, + "sales": {"filter": ["color = blue"]} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({ + "*": {}, + "sales": {"filter": ["color = blue"]} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({ + "*": {}, + "sal*": {"filter": ["color = blue"]} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + ]; + + compute_authorized_single_search!(tenant_tokens, "color = yellow", 1); +} + +#[actix_rt::test] +async fn filter_multi_search_authorized_filter_token() { + let tenant_tokens = vec![ + hashmap! { + "searchRules" => json!({"sales": {"filter": "color = blue"}, "products": {"filter": "doggos.age <= 5"}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": {"filter": ["color = blue"]}, "products": {"filter": ["doggos.age <= 5"]}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + // filter on sales should override filters on * + hashmap! { + "searchRules" => json!({ + "*": {"filter": "color = green"}, + "sales": {"filter": "color = blue"}, + "products": {"filter": "doggos.age <= 5"} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({ + "*": {}, + "sales": {"filter": "color = blue"}, + "products": {"filter": "doggos.age <= 5"} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({ + "*": {"filter": "color = green"}, + "sales": {"filter": ["color = blue"]}, + "products": {"filter": ["doggos.age <= 5"]} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({ + "*": {}, + "sales": {"filter": ["color = blue"]}, + "products": {"filter": ["doggos.age <= 5"]} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({ + "*": {}, + "sal*": {"filter": ["color = blue"]}, + "pro*": {"filter": ["doggos.age <= 5"]} + }), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + ]; + + compute_authorized_multiple_search!(tenant_tokens, "color = yellow", "doggos.age > 4", 1, 1); +} + +/// Tests that those Tenant Token are incompatible with the REFUSED_KEYS defined above. +#[actix_rt::test] +async fn error_single_search_token_forbidden_parent_key() { + let tenant_tokens = vec![ + hashmap! { + "searchRules" => json!({"*": {}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"*": Value::Null}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["*"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": {}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": Value::Null}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["sales"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["sali*", "s*", "sales*"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + ]; + + compute_forbidden_single_search!( + tenant_tokens, + SINGLE_REFUSED_KEYS, + vec![vec![None; 7], vec![None; 7], vec![Some(0); 7], vec![Some(0); 7], vec![Some(0); 7]] + ); +} + +/// Tests that those Tenant Token are incompatible with the REFUSED_KEYS defined above. +#[actix_rt::test] +async fn error_multi_search_token_forbidden_parent_key() { + let tenant_tokens = vec![ + hashmap! { + "searchRules" => json!({"*": {}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"*": Value::Null}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["*"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": {}, "products": {}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": Value::Null, "products": Value::Null}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["sales", "products"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["sali*", "s*", "sales*", "pro*", "proa*", "products*"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + ]; + + compute_forbidden_multiple_search!( + tenant_tokens, + BOTH_REFUSED_KEYS, + vec![ + vec![None; 7], + vec![None; 7], + vec![Some(1); 7], + vec![Some(1); 7], + vec![Some(1); 7], + vec![Some(0); 7], + vec![Some(0); 7], + vec![Some(0); 7] + ] + ); +} + +#[actix_rt::test] +async fn error_single_search_forbidden_token() { + let tenant_tokens = vec![ + // bad index + hashmap! { + "searchRules" => json!({"products": {}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["products"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"products": {}}), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!({"products": Value::Null}), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!(["products"]), + "exp" => Value::Null + }, + // expired token + hashmap! { + "searchRules" => json!({"*": {}}), + "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"*": Value::Null}), + "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["*"]), + "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": {}}), + "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": Value::Null}), + "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["sales"]), + "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) + }, + ]; + + let failed_query_indexes: Vec<_> = + std::iter::repeat(Some(0)).take(5).chain(std::iter::repeat(None).take(6)).collect(); + + let failed_query_indexes = vec![failed_query_indexes; ACCEPTED_KEYS_SINGLE.len()]; + + compute_forbidden_single_search!(tenant_tokens, ACCEPTED_KEYS_SINGLE, failed_query_indexes); +} + +#[actix_rt::test] +async fn error_multi_search_forbidden_token() { + let tenant_tokens = vec![ + // bad index + hashmap! { + "searchRules" => json!({"products": {}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["products"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"products": {}}), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!({"products": Value::Null}), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!(["products"]), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!({"sales": {}}), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["sales"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": {}}), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!({"sales": Value::Null}), + "exp" => Value::Null + }, + hashmap! { + "searchRules" => json!(["sales"]), + "exp" => Value::Null + }, + // expired token + hashmap! { + "searchRules" => json!({"*": {}}), + "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"*": Value::Null}), + "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["*"]), + "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": {}, "products": {}}), + "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!({"sales": Value::Null, "products": {}}), + "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) + }, + hashmap! { + "searchRules" => json!(["sales", "products"]), + "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) + }, + ]; + + let failed_query_indexes: Vec<_> = std::iter::repeat(Some(0)) + .take(5) + .chain(std::iter::repeat(Some(1)).take(5)) + .chain(std::iter::repeat(None).take(6)) + .collect(); + + let failed_query_indexes = vec![failed_query_indexes; ACCEPTED_KEYS_BOTH.len()]; + + compute_forbidden_multiple_search!(tenant_tokens, ACCEPTED_KEYS_BOTH, failed_query_indexes); +} + +#[actix_rt::test] +async fn error_access_expired_parent_key() { + use std::{thread, time}; + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + + let content = json!({ + "indexes": ["*"], + "actions": ["*"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::seconds(1)).format(&Rfc3339).unwrap(), + }); + + let (response, code) = server.add_api_key(content).await; + assert_eq!(code, 201); + assert!(response["key"].is_string()); + + let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); + + let tenant_token = hashmap! { + "searchRules" => json!(["*"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }; + let web_token = generate_tenant_token(uid, key, tenant_token); + server.use_api_key(&web_token); + + // test search request while parent_key is not expired + let (response, code) = server + .multi_search(json!({"queries" : [{"indexUid": "sales"}, {"indexUid": "products"}]})) + .await; + assert_ne!(response, invalid_response(None)); + assert_ne!(code, 403); + + // wait until the key is expired. + thread::sleep(time::Duration::new(1, 0)); + + let (response, code) = server + .multi_search(json!({"queries" : [{"indexUid": "sales"}, {"indexUid": "products"}]})) + .await; + assert_eq!(response, invalid_response(None)); + assert_eq!(code, 403); +} + +#[actix_rt::test] +async fn error_access_modified_token() { + let mut server = Server::new_auth().await; + server.use_api_key("MASTER_KEY"); + + let content = json!({ + "indexes": ["*"], + "actions": ["*"], + "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), + }); + + let (response, code) = server.add_api_key(content).await; + assert_eq!(code, 201); + assert!(response["key"].is_string()); + + let key = response["key"].as_str().unwrap(); + let uid = response["uid"].as_str().unwrap(); + + let tenant_token = hashmap! { + "searchRules" => json!(["products"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }; + let web_token = generate_tenant_token(uid, key, tenant_token); + server.use_api_key(&web_token); + + // test search request while web_token is valid + let (response, code) = + server.multi_search(json!({"queries" : [{"indexUid": "products"}]})).await; + assert_ne!(response, invalid_response(Some(0))); + assert_ne!(code, 403); + + let tenant_token = hashmap! { + "searchRules" => json!(["*"]), + "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) + }; + + let alt = generate_tenant_token(uid, key, tenant_token); + let altered_token = [ + web_token.split('.').next().unwrap(), + alt.split('.').nth(1).unwrap(), + web_token.split('.').nth(2).unwrap(), + ] + .join("."); + + server.use_api_key(&altered_token); + let (response, code) = + server.multi_search(json!({"queries" : [{"indexUid": "products"}]})).await; + assert_eq!(response, invalid_response(None)); + assert_eq!(code, 403); +} diff --git a/meilisearch/tests/common/server.rs b/meilisearch/tests/common/server.rs index f2d645563..8152edbd0 100644 --- a/meilisearch/tests/common/server.rs +++ b/meilisearch/tests/common/server.rs @@ -103,6 +103,10 @@ impl Server { Index { uid: uid.as_ref().to_string(), service: &self.service, encoder } } + pub async fn multi_search(&self, queries: Value) -> (Value, StatusCode) { + self.service.post("/multi-search", queries).await + } + pub async fn list_indexes_raw(&self, parameters: &str) -> (Value, StatusCode) { self.service.get(format!("/indexes{parameters}")).await } diff --git a/meilisearch/tests/search/mod.rs b/meilisearch/tests/search/mod.rs index 91ff64d37..ed73a1817 100644 --- a/meilisearch/tests/search/mod.rs +++ b/meilisearch/tests/search/mod.rs @@ -3,6 +3,7 @@ mod errors; mod formatted; +mod multi; mod pagination; use once_cell::sync::Lazy; diff --git a/meilisearch/tests/search/multi.rs b/meilisearch/tests/search/multi.rs new file mode 100644 index 000000000..01751ff62 --- /dev/null +++ b/meilisearch/tests/search/multi.rs @@ -0,0 +1,343 @@ +use meili_snap::{json_string, snapshot}; +use serde_json::json; + +use super::{DOCUMENTS, NESTED_DOCUMENTS}; +use crate::common::Server; + +#[actix_rt::test] +async fn search_empty_list() { + let server = Server::new().await; + + let (response, code) = server.multi_search(json!({"queries": []})).await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(response), @r###" + { + "results": [] + } + "###); +} + +#[actix_rt::test] +async fn search_json_object() { + let server = Server::new().await; + + let (response, code) = server.multi_search(json!({})).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Missing field `queries`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad_request" + } + "###); +} + +#[actix_rt::test] +async fn search_json_array() { + let server = Server::new().await; + + let (response, code) = server.multi_search(json!([])).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value type: expected an object, but found an array: `[]`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad_request" + } + "###); +} + +#[actix_rt::test] +async fn simple_search_single_index() { + let server = Server::new().await; + let index = server.index("test"); + + let documents = DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(0).await; + + let (response, code) = server + .multi_search(json!({"queries": [ + {"indexUid" : "test", "q": "glass"}, + {"indexUid": "test", "q": "captain"}, + ]})) + .await; + snapshot!(code, @"200 OK"); + insta::assert_json_snapshot!(response["results"], { "[].processingTimeMs" => "[time]" }, @r###" + [ + { + "indexUid": "test", + "hits": [ + { + "title": "Glass", + "id": "450465" + } + ], + "query": "glass", + "processingTimeMs": "[time]", + "limit": 20, + "offset": 0, + "estimatedTotalHits": 1 + }, + { + "indexUid": "test", + "hits": [ + { + "title": "Captain Marvel", + "id": "299537" + } + ], + "query": "captain", + "processingTimeMs": "[time]", + "limit": 20, + "offset": 0, + "estimatedTotalHits": 1 + } + ] + "###); +} + +#[actix_rt::test] +async fn simple_search_missing_index_uid() { + let server = Server::new().await; + let index = server.index("test"); + + let documents = DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(0).await; + + let (response, code) = server + .multi_search(json!({"queries": [ + {"q": "glass"}, + ]})) + .await; + snapshot!(code, @"400 Bad Request"); + insta::assert_json_snapshot!(response, @r###" + { + "message": "Missing field `indexUid` inside `.queries[0]`", + "code": "missing_index_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_index_uid" + } + "###); +} + +#[actix_rt::test] +async fn simple_search_illegal_index_uid() { + let server = Server::new().await; + let index = server.index("test"); + + let documents = DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(0).await; + + let (response, code) = server + .multi_search(json!({"queries": [ + {"indexUid": "hé", "q": "glass"}, + ]})) + .await; + snapshot!(code, @"400 Bad Request"); + insta::assert_json_snapshot!(response, @r###" + { + "message": "Invalid value at `.queries[0].indexUid`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", + "code": "invalid_index_uid", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_index_uid" + } + "###); +} + +#[actix_rt::test] +async fn simple_search_two_indexes() { + let server = Server::new().await; + let index = server.index("test"); + + let documents = DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(0).await; + + let index = server.index("nested"); + let documents = NESTED_DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(1).await; + + let (response, code) = server + .multi_search(json!({"queries": [ + {"indexUid" : "test", "q": "glass"}, + {"indexUid": "nested", "q": "pesti"}, + ]})) + .await; + snapshot!(code, @"200 OK"); + insta::assert_json_snapshot!(response["results"], { "[].processingTimeMs" => "[time]" }, @r###" + [ + { + "indexUid": "test", + "hits": [ + { + "title": "Glass", + "id": "450465" + } + ], + "query": "glass", + "processingTimeMs": "[time]", + "limit": 20, + "offset": 0, + "estimatedTotalHits": 1 + }, + { + "indexUid": "nested", + "hits": [ + { + "id": 852, + "father": "jean", + "mother": "michelle", + "doggos": [ + { + "name": "bobby", + "age": 2 + }, + { + "name": "buddy", + "age": 4 + } + ], + "cattos": "pesti" + }, + { + "id": 654, + "father": "pierre", + "mother": "sabine", + "doggos": [ + { + "name": "gros bill", + "age": 8 + } + ], + "cattos": [ + "simba", + "pestiféré" + ] + } + ], + "query": "pesti", + "processingTimeMs": "[time]", + "limit": 20, + "offset": 0, + "estimatedTotalHits": 2 + } + ] + "###); +} + +#[actix_rt::test] +async fn search_one_index_doesnt_exist() { + let server = Server::new().await; + let index = server.index("test"); + + let documents = DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(0).await; + + let (response, code) = server + .multi_search(json!({"queries": [ + {"indexUid" : "test", "q": "glass"}, + {"indexUid": "nested", "q": "pesti"}, + ]})) + .await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Inside `.queries[1]`: Index `nested` not found.", + "code": "index_not_found", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#index_not_found" + } + "###); +} + +#[actix_rt::test] +async fn search_multiple_indexes_dont_exist() { + let server = Server::new().await; + + let (response, code) = server + .multi_search(json!({"queries": [ + {"indexUid" : "test", "q": "glass"}, + {"indexUid": "nested", "q": "pesti"}, + ]})) + .await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Inside `.queries[0]`: Index `test` not found.", + "code": "index_not_found", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#index_not_found" + } + "###); +} + +#[actix_rt::test] +async fn search_one_query_error() { + let server = Server::new().await; + + let index = server.index("test"); + + let documents = DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(0).await; + + let index = server.index("nested"); + let documents = NESTED_DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(1).await; + + let (response, code) = server + .multi_search(json!({"queries": [ + {"indexUid" : "test", "q": "glass", "facets": ["title"]}, + {"indexUid": "nested", "q": "pesti"}, + ]})) + .await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Inside `.queries[0]`: Invalid facet distribution, this index does not have configured filterable attributes.", + "code": "invalid_search_facets", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_search_facets" + } + "###); +} + +#[actix_rt::test] +async fn search_multiple_query_errors() { + let server = Server::new().await; + + let index = server.index("test"); + + let documents = DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(0).await; + + let index = server.index("nested"); + let documents = NESTED_DOCUMENTS.clone(); + index.add_documents(documents, None).await; + index.wait_task(1).await; + + let (response, code) = server + .multi_search(json!({"queries": [ + {"indexUid" : "test", "q": "glass", "facets": ["title"]}, + {"indexUid": "nested", "q": "pesti", "facets": ["doggos"]}, + ]})) + .await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Inside `.queries[0]`: Invalid facet distribution, this index does not have configured filterable attributes.", + "code": "invalid_search_facets", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_search_facets" + } + "###); +}