mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-03 11:57:07 +02:00
Implements the experimental contains filter operator«
This commit is contained in:
parent
1582c7e788
commit
2af9481804
34 changed files with 484 additions and 122 deletions
|
@ -49,6 +49,8 @@ pub struct RuntimeTogglableFeatures {
|
|||
pub logs_route: Option<bool>,
|
||||
#[deserr(default)]
|
||||
pub edit_documents_by_function: Option<bool>,
|
||||
#[deserr(default)]
|
||||
pub contains_filter: Option<bool>,
|
||||
}
|
||||
|
||||
async fn patch_features(
|
||||
|
@ -72,6 +74,7 @@ async fn patch_features(
|
|||
.0
|
||||
.edit_documents_by_function
|
||||
.unwrap_or(old_features.edit_documents_by_function),
|
||||
contains_filter: new_features.0.contains_filter.unwrap_or(old_features.contains_filter),
|
||||
};
|
||||
|
||||
// explicitly destructure for analytics rather than using the `Serialize` implementation, because
|
||||
|
@ -82,6 +85,7 @@ async fn patch_features(
|
|||
metrics,
|
||||
logs_route,
|
||||
edit_documents_by_function,
|
||||
contains_filter,
|
||||
} = new_features;
|
||||
|
||||
analytics.publish(
|
||||
|
@ -91,6 +95,7 @@ async fn patch_features(
|
|||
"metrics": metrics,
|
||||
"logs_route": logs_route,
|
||||
"edit_documents_by_function": edit_documents_by_function,
|
||||
"contains_filter": contains_filter,
|
||||
}),
|
||||
Some(&req),
|
||||
);
|
||||
|
|
|
@ -7,7 +7,7 @@ use bstr::ByteSlice as _;
|
|||
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
||||
use deserr::Deserr;
|
||||
use futures::StreamExt;
|
||||
use index_scheduler::{IndexScheduler, TaskId};
|
||||
use index_scheduler::{IndexScheduler, RoFeatures, TaskId};
|
||||
use meilisearch_types::deserr::query_params::Param;
|
||||
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||
use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType};
|
||||
|
@ -260,8 +260,15 @@ fn documents_by_query(
|
|||
let retrieve_vectors = RetrieveVectors::new(retrieve_vectors, features)?;
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let (total, documents) =
|
||||
retrieve_documents(&index, offset, limit, filter, fields, retrieve_vectors)?;
|
||||
let (total, documents) = retrieve_documents(
|
||||
&index,
|
||||
offset,
|
||||
limit,
|
||||
filter,
|
||||
fields,
|
||||
retrieve_vectors,
|
||||
index_scheduler.features(),
|
||||
)?;
|
||||
|
||||
let ret = PaginationView::new(offset, limit, total as usize, documents);
|
||||
|
||||
|
@ -565,11 +572,9 @@ pub async fn delete_documents_by_filter(
|
|||
analytics.delete_documents(DocumentDeletionKind::PerFilter, &req);
|
||||
|
||||
// we ensure the filter is well formed before enqueuing it
|
||||
|| -> Result<_, ResponseError> {
|
||||
Ok(crate::search::parse_filter(&filter)?.ok_or(MeilisearchHttpError::EmptyFilter)?)
|
||||
}()
|
||||
// and whatever was the error, the error code should always be an InvalidDocumentFilter
|
||||
.map_err(|err| ResponseError::from_msg(err.message, Code::InvalidDocumentFilter))?;
|
||||
crate::search::parse_filter(&filter, Code::InvalidDocumentFilter, index_scheduler.features())?
|
||||
.ok_or(MeilisearchHttpError::EmptyFilter)?;
|
||||
|
||||
let task = KindWithContent::DocumentDeletionByFilter { index_uid, filter_expr: filter };
|
||||
|
||||
let uid = get_task_id(&req, &opt)?;
|
||||
|
@ -626,11 +631,12 @@ pub async fn edit_documents_by_function(
|
|||
|
||||
if let Some(ref filter) = filter {
|
||||
// we ensure the filter is well formed before enqueuing it
|
||||
|| -> Result<_, ResponseError> {
|
||||
Ok(crate::search::parse_filter(filter)?.ok_or(MeilisearchHttpError::EmptyFilter)?)
|
||||
}()
|
||||
// and whatever was the error, the error code should always be an InvalidDocumentFilter
|
||||
.map_err(|err| ResponseError::from_msg(err.message, Code::InvalidDocumentFilter))?;
|
||||
crate::search::parse_filter(
|
||||
filter,
|
||||
Code::InvalidDocumentFilter,
|
||||
index_scheduler.features(),
|
||||
)?
|
||||
.ok_or(MeilisearchHttpError::EmptyFilter)?;
|
||||
}
|
||||
let task = KindWithContent::DocumentEdition {
|
||||
index_uid,
|
||||
|
@ -736,12 +742,12 @@ fn retrieve_documents<S: AsRef<str>>(
|
|||
filter: Option<Value>,
|
||||
attributes_to_retrieve: Option<Vec<S>>,
|
||||
retrieve_vectors: RetrieveVectors,
|
||||
features: RoFeatures,
|
||||
) -> Result<(u64, Vec<Document>), ResponseError> {
|
||||
let rtxn = index.read_txn()?;
|
||||
let filter = &filter;
|
||||
let filter = if let Some(filter) = filter {
|
||||
parse_filter(filter)
|
||||
.map_err(|err| ResponseError::from_msg(err.to_string(), Code::InvalidDocumentFilter))?
|
||||
parse_filter(filter, Code::InvalidDocumentFilter, features)?
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
|
|
@ -79,7 +79,14 @@ pub async fn search(
|
|||
let search_kind = search_kind(&search_query, &index_scheduler, &index, features)?;
|
||||
let _permit = search_queue.try_get_search_permit().await?;
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_facet_search(&index, search_query, facet_query, facet_name, search_kind)
|
||||
perform_facet_search(
|
||||
&index,
|
||||
search_query,
|
||||
facet_query,
|
||||
facet_name,
|
||||
search_kind,
|
||||
index_scheduler.features(),
|
||||
)
|
||||
})
|
||||
.await?;
|
||||
|
||||
|
|
|
@ -231,7 +231,7 @@ pub async fn search_with_url_query(
|
|||
let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors, features)?;
|
||||
let _permit = search_queue.try_get_search_permit().await?;
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_search(&index, query, search_kind, retrieve_vector)
|
||||
perform_search(&index, query, search_kind, retrieve_vector, index_scheduler.features())
|
||||
})
|
||||
.await?;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
|
@ -274,7 +274,7 @@ pub async fn search_with_post(
|
|||
|
||||
let _permit = search_queue.try_get_search_permit().await?;
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_search(&index, query, search_kind, retrieve_vectors)
|
||||
perform_search(&index, query, search_kind, retrieve_vectors, index_scheduler.features())
|
||||
})
|
||||
.await?;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
|
|
|
@ -106,7 +106,14 @@ async fn similar(
|
|||
SearchKind::embedder(&index_scheduler, &index, query.embedder.as_deref(), None)?;
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
perform_similar(&index, query, embedder_name, embedder, retrieve_vectors)
|
||||
perform_similar(
|
||||
&index,
|
||||
query,
|
||||
embedder_name,
|
||||
embedder,
|
||||
retrieve_vectors,
|
||||
index_scheduler.features(),
|
||||
)
|
||||
})
|
||||
.await?
|
||||
}
|
||||
|
|
|
@ -112,6 +112,7 @@ pub async fn multi_search_with_post(
|
|||
));
|
||||
}
|
||||
|
||||
let features = index_scheduler.features();
|
||||
let index = index_scheduler
|
||||
.index(&index_uid)
|
||||
.map_err(|err| {
|
||||
|
@ -130,7 +131,7 @@ pub async fn multi_search_with_post(
|
|||
.with_index(query_index)?;
|
||||
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_search(&index, query, search_kind, retrieve_vector)
|
||||
perform_search(&index, query, search_kind, retrieve_vector, features)
|
||||
})
|
||||
.await
|
||||
.with_index(query_index)?;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue