diff --git a/meilisearch-http/tests/search/mod.rs b/meilisearch-http/tests/search/mod.rs index 7c7924c34..eab8f0a87 100644 --- a/meilisearch-http/tests/search/mod.rs +++ b/meilisearch-http/tests/search/mod.rs @@ -267,3 +267,79 @@ async fn displayed_attributes() { assert_eq!(code, 200, "{}", response); assert!(response["hits"].get("title").is_none()); } + +#[actix_rt::test] +async fn placeholder_search_is_hard_limited() { + let server = Server::new().await; + let index = server.index("test"); + + let documents: Vec<_> = (0..1200) + .map(|i| json!({ "id": i, "text": "I am unique!" })) + .collect(); + index.add_documents(documents.into(), None).await; + index.wait_task(0).await; + + index + .search( + json!({ + "limit": 1500, + }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!(response["hits"].as_array().unwrap().len(), 1000); + }, + ) + .await; + + index + .search( + json!({ + "offset": 800, + "limit": 400, + }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!(response["hits"].as_array().unwrap().len(), 200); + }, + ) + .await; +} + +#[actix_rt::test] +async fn search_is_hard_limited() { + let server = Server::new().await; + let index = server.index("test"); + + let documents: Vec<_> = (0..1200) + .map(|i| json!({ "id": i, "text": "I am unique!" })) + .collect(); + index.add_documents(documents.into(), None).await; + index.wait_task(0).await; + + index + .search( + json!({ + "q": "unique", + "limit": 1500, + }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!(response["hits"].as_array().unwrap().len(), 1000); + }, + ) + .await; + + index + .search( + json!({ + "q": "unique", + "offset": 800, + "limit": 400, + }), + |response, code| { + assert_eq!(code, 200, "{}", response); + assert_eq!(response["hits"].as_array().unwrap().len(), 200); + }, + ) + .await; +} diff --git a/meilisearch-lib/src/index/search.rs b/meilisearch-lib/src/index/search.rs index 644b75468..1498b70bd 100644 --- a/meilisearch-lib/src/index/search.rs +++ b/meilisearch-lib/src/index/search.rs @@ -1,3 +1,4 @@ +use std::cmp::min; use std::collections::{BTreeMap, BTreeSet, HashSet}; use std::str::FromStr; use std::time::Instant; @@ -34,6 +35,10 @@ pub const fn default_crop_length() -> usize { DEFAULT_CROP_LENGTH } +/// The maximimum number of results that the engine +/// will be able to return in one search call. +pub const HARD_RESULT_LIMIT: usize = 1000; + #[derive(Deserialize, Debug, Clone, PartialEq)] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct SearchQuery { @@ -97,8 +102,13 @@ impl Index { search.query(query); } - search.limit(query.limit); - search.offset(query.offset.unwrap_or_default()); + // Make sure that a user can't get more documents than the hard limit, + // we align that on the offset too. + let offset = min(query.offset.unwrap_or(0), HARD_RESULT_LIMIT); + let limit = min(query.limit, HARD_RESULT_LIMIT.saturating_sub(offset)); + + search.offset(offset); + search.limit(limit); if let Some(ref filter) = query.filter { if let Some(facets) = parse_filter(filter)? {