mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-03 11:57:07 +02:00
Reapply #2601
This commit is contained in:
parent
a99ddf85f7
commit
0dd8e00929
9 changed files with 58 additions and 2569 deletions
|
@ -15,7 +15,7 @@ use crate::extractors::sequential_extractor::SeqHandler;
|
|||
use crate::search::{
|
||||
perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
||||
DEFAULT_SEARCH_OFFSET
|
||||
DEFAULT_SEARCH_OFFSET,
|
||||
};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
|
@ -72,9 +72,7 @@ impl From<SearchQueryGet> for SearchQuery {
|
|||
limit: other.limit,
|
||||
page: other.page,
|
||||
hits_per_page: other.hits_per_page,
|
||||
attributes_to_retrieve: other
|
||||
.attributes_to_retrieve
|
||||
.map(|o| o.into_iter().collect()),
|
||||
attributes_to_retrieve: other.attributes_to_retrieve.map(|o| o.into_iter().collect()),
|
||||
attributes_to_crop: other.attributes_to_crop.map(|o| o.into_iter().collect()),
|
||||
crop_length: other.crop_length,
|
||||
attributes_to_highlight: other.attributes_to_highlight.map(|o| o.into_iter().collect()),
|
||||
|
|
|
@ -19,6 +19,7 @@ use crate::error::MeilisearchHttpError;
|
|||
|
||||
type MatchesPosition = BTreeMap<String, Vec<MatchBounds>>;
|
||||
|
||||
pub const DEFAULT_SEARCH_OFFSET: fn() -> usize = || 0;
|
||||
pub const DEFAULT_SEARCH_LIMIT: fn() -> usize = || 20;
|
||||
pub const DEFAULT_CROP_LENGTH: fn() -> usize = || 10;
|
||||
pub const DEFAULT_CROP_MARKER: fn() -> String = || "…".to_string();
|
||||
|
@ -29,9 +30,12 @@ pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string();
|
|||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct SearchQuery {
|
||||
pub q: Option<String>,
|
||||
pub offset: Option<usize>,
|
||||
#[serde(default = "DEFAULT_SEARCH_OFFSET")]
|
||||
pub offset: usize,
|
||||
#[serde(default = "DEFAULT_SEARCH_LIMIT")]
|
||||
pub limit: usize,
|
||||
pub page: Option<usize>,
|
||||
pub hits_per_page: Option<usize>,
|
||||
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
||||
pub attributes_to_crop: Option<Vec<String>>,
|
||||
#[serde(default = "DEFAULT_CROP_LENGTH")]
|
||||
|
@ -53,6 +57,12 @@ pub struct SearchQuery {
|
|||
pub matching_strategy: MatchingStrategy,
|
||||
}
|
||||
|
||||
impl SearchQuery {
|
||||
pub fn is_finite_pagination(&self) -> bool {
|
||||
self.page.or(self.hits_per_page).is_some()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum MatchingStrategy {
|
||||
|
@ -91,15 +101,23 @@ pub struct SearchHit {
|
|||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SearchResult {
|
||||
pub hits: Vec<SearchHit>,
|
||||
pub estimated_total_hits: u64,
|
||||
pub query: String,
|
||||
pub limit: usize,
|
||||
pub offset: usize,
|
||||
pub processing_time_ms: u128,
|
||||
#[serde(flatten)]
|
||||
pub hits_info: HitsInfo,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub facet_distribution: Option<BTreeMap<String, BTreeMap<String, u64>>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(untagged)]
|
||||
pub enum HitsInfo {
|
||||
#[serde(rename_all = "camelCase")]
|
||||
Pagination { hits_per_page: usize, page: usize, total_pages: usize, total_hits: usize },
|
||||
#[serde(rename_all = "camelCase")]
|
||||
OffsetLimit { limit: usize, offset: usize, estimated_total_hits: usize },
|
||||
}
|
||||
|
||||
pub fn perform_search(
|
||||
index: &Index,
|
||||
query: SearchQuery,
|
||||
|
@ -113,6 +131,7 @@ pub fn perform_search(
|
|||
search.query(query);
|
||||
}
|
||||
|
||||
let is_finite_pagination = query.is_finite_pagination();
|
||||
search.terms_matching_strategy(query.matching_strategy.into());
|
||||
|
||||
let max_total_hits = index
|
||||
|
@ -120,10 +139,23 @@ pub fn perform_search(
|
|||
.map_err(milli::Error::from)?
|
||||
.unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS);
|
||||
|
||||
search.exhaustive_number_hits(is_finite_pagination);
|
||||
|
||||
// compute the offset on the limit depending on the pagination mode.
|
||||
let (offset, limit) = if is_finite_pagination {
|
||||
let limit = query.hits_per_page.unwrap_or_else(DEFAULT_SEARCH_LIMIT);
|
||||
let page = query.page.unwrap_or(1);
|
||||
|
||||
// page 0 gives a limit of 0 forcing Meilisearch to return no document.
|
||||
page.checked_sub(1).map_or((0, 0), |p| (limit * p, limit))
|
||||
} else {
|
||||
(query.offset, query.limit)
|
||||
};
|
||||
|
||||
// Make sure that a user can't get more documents than the hard limit,
|
||||
// we align that on the offset too.
|
||||
let offset = min(query.offset.unwrap_or(0), max_total_hits);
|
||||
let limit = min(query.limit, max_total_hits.saturating_sub(offset));
|
||||
let offset = min(offset, max_total_hits);
|
||||
let limit = min(limit, max_total_hits.saturating_sub(offset));
|
||||
|
||||
search.offset(offset);
|
||||
search.limit(limit);
|
||||
|
@ -239,7 +271,23 @@ pub fn perform_search(
|
|||
documents.push(hit);
|
||||
}
|
||||
|
||||
let estimated_total_hits = candidates.len();
|
||||
let number_of_hits = min(candidates.len() as usize, max_total_hits);
|
||||
let hits_info = if is_finite_pagination {
|
||||
let hits_per_page = query.hits_per_page.unwrap_or_else(DEFAULT_SEARCH_LIMIT);
|
||||
// If hit_per_page is 0, then pages can't be computed and so we respond 0.
|
||||
let total_pages = (number_of_hits + hits_per_page.saturating_sub(1))
|
||||
.checked_div(hits_per_page)
|
||||
.unwrap_or(0);
|
||||
|
||||
HitsInfo::Pagination {
|
||||
hits_per_page,
|
||||
page: query.page.unwrap_or(1),
|
||||
total_pages,
|
||||
total_hits: number_of_hits,
|
||||
}
|
||||
} else {
|
||||
HitsInfo::OffsetLimit { limit: query.limit, offset, estimated_total_hits: number_of_hits }
|
||||
};
|
||||
|
||||
let facet_distribution = match query.facets {
|
||||
Some(ref fields) => {
|
||||
|
@ -263,10 +311,8 @@ pub fn perform_search(
|
|||
|
||||
let result = SearchResult {
|
||||
hits: documents,
|
||||
estimated_total_hits,
|
||||
hits_info,
|
||||
query: query.q.clone().unwrap_or_default(),
|
||||
limit: query.limit,
|
||||
offset: query.offset.unwrap_or_default(),
|
||||
processing_time_ms: before_search.elapsed().as_millis(),
|
||||
facet_distribution,
|
||||
};
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue