mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-03 20:07:09 +02:00
Merge #2601
2601: Ease search result pagination r=Kerollmops a=ManyTheFish # Summary This PR is a prototype enhancing search results pagination (#2577) # Todo - [x] Update the API to return the number of pages and allow users to directly choose a page instead of computing an offset - [x] Change computation of `total_pages` in order to have an exact count - [x] compute query tree exhaustively - [x] compute distinct exhaustively # Small Documentation ## Default search query **request**: ```sh curl \ -X POST 'http://localhost:7700/indexes/movies/search' \ -H 'Content-Type: application/json' \ --data-binary '{ "q": "botman" }' ``` **result**: ```json { "hits":[...], "query":"botman", "processingTimeMs":5, "hitsPerPage":20, "page":1, "totalPages":4, "totalHits":66 } ``` ## Search query with offset parameter **request**: ```sh curl \ -X POST 'http://localhost:7700/indexes/movies/search' \ -H 'Content-Type: application/json' \ --data-binary '{ "q": "botman", "offset": 0 }' ``` **result**: ```json { "hits":[...], "query":"botman", "processingTimeMs":3, "limit":20, "offset":0, "estimatedTotalHits":66 } ``` ## Search query selecting page with page parameter **request**: ```sh curl \ -X POST 'http://localhost:7700/indexes/movies/search' \ -H 'Content-Type: application/json' \ --data-binary '{ "q": "botman", "page": 2 }' ``` **result**: ```json { "hits":[...], "query":"botman", "processingTimeMs":5, "hitsPerPage":20, "page":2, "totalPages":4, "totalHits":66 } ``` # Related fixes #2577 ## In charge of the feature Core: `@ManyTheFish` Docs: `@guimachiavelli` Integration: `@bidoubiwa` Co-authored-by: ManyTheFish <many@meilisearch.com>
This commit is contained in:
commit
25ec51e783
13 changed files with 247 additions and 52 deletions
|
@ -3,6 +3,7 @@
|
|||
|
||||
mod errors;
|
||||
mod formatted;
|
||||
mod pagination;
|
||||
|
||||
use crate::common::Server;
|
||||
use once_cell::sync::Lazy;
|
||||
|
|
112
meilisearch-http/tests/search/pagination.rs
Normal file
112
meilisearch-http/tests/search/pagination.rs
Normal file
|
@ -0,0 +1,112 @@
|
|||
use crate::common::Server;
|
||||
use crate::search::DOCUMENTS;
|
||||
use serde_json::json;
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn default_search_should_return_estimated_total_hit() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("basic");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
.search(json!({}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert!(response.get("estimatedTotalHits").is_some());
|
||||
assert!(response.get("limit").is_some());
|
||||
assert!(response.get("offset").is_some());
|
||||
|
||||
// these fields shouldn't be present
|
||||
assert!(response.get("totalHits").is_none());
|
||||
assert!(response.get("page").is_none());
|
||||
assert!(response.get("totalPages").is_none());
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("basic");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
.search(json!({"page": 1}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 5);
|
||||
assert!(response.get("totalHits").is_some());
|
||||
assert_eq!(response["page"], 1);
|
||||
assert_eq!(response["totalPages"], 1);
|
||||
|
||||
// these fields shouldn't be present
|
||||
assert!(response.get("estimatedTotalHits").is_none());
|
||||
assert!(response.get("limit").is_none());
|
||||
assert!(response.get("offset").is_none());
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn page_zero_should_not_return_any_result() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("basic");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
.search(json!({"page": 0}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
|
||||
assert!(response.get("totalHits").is_some());
|
||||
assert_eq!(response["page"], 0);
|
||||
assert_eq!(response["totalPages"], 1);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn hits_per_page_1() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("basic");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
.search(json!({"hitsPerPage": 1}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||
assert_eq!(response["totalHits"], 5);
|
||||
assert_eq!(response["page"], 1);
|
||||
assert_eq!(response["totalPages"], 5);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn hits_per_page_0_should_not_return_any_result() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("basic");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
index
|
||||
.search(json!({"hitsPerPage": 0}), |response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
|
||||
assert_eq!(response["totalHits"], 5);
|
||||
assert_eq!(response["page"], 1);
|
||||
assert_eq!(response["totalPages"], 0);
|
||||
})
|
||||
.await;
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue