mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-03 20:07:09 +02:00
Integrate deserr on the most important routes
This commit is contained in:
parent
839b05c43d
commit
50ce0409bc
21 changed files with 1582 additions and 657 deletions
|
@ -7,6 +7,7 @@ name = "meilisearch"
|
|||
version = "1.0.0"
|
||||
|
||||
[dependencies]
|
||||
deserr = { path = "../../deserr", features = ["serde-json"] }
|
||||
actix-cors = "0.6.3"
|
||||
actix-http = { version = "3.2.2", default-features = false, features = ["compress-brotli", "compress-gzip", "rustls"] }
|
||||
actix-web = { version = "4.2.1", default-features = false, features = ["macros", "compress-brotli", "compress-gzip", "cookies", "rustls"] }
|
||||
|
@ -71,6 +72,8 @@ toml = "0.5.9"
|
|||
uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
||||
walkdir = "2.3.2"
|
||||
yaup = "0.2.0"
|
||||
serde_urlencoded = "0.7.1"
|
||||
actix-utils = "3.0.1"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.7.0"
|
||||
|
@ -99,17 +102,7 @@ zip = { version = "0.6.2", optional = true }
|
|||
default = ["analytics", "meilisearch-types/default", "mini-dashboard"]
|
||||
metrics = ["prometheus"]
|
||||
analytics = ["segment"]
|
||||
mini-dashboard = [
|
||||
"actix-web-static-files",
|
||||
"static-files",
|
||||
"anyhow",
|
||||
"cargo_toml",
|
||||
"hex",
|
||||
"reqwest",
|
||||
"sha-1",
|
||||
"tempfile",
|
||||
"zip",
|
||||
]
|
||||
mini-dashboard = ["actix-web-static-files", "static-files", "anyhow", "cargo_toml", "hex", "reqwest", "sha-1", "tempfile", "zip"]
|
||||
chinese = ["meilisearch-types/chinese"]
|
||||
hebrew = ["meilisearch-types/hebrew"]
|
||||
japanese = ["meilisearch-types/japanese"]
|
||||
|
|
78
meilisearch/src/extractors/deserr.rs
Normal file
78
meilisearch/src/extractors/deserr.rs
Normal file
|
@ -0,0 +1,78 @@
|
|||
use std::fmt::Debug;
|
||||
use std::future::Future;
|
||||
use std::marker::PhantomData;
|
||||
use std::pin::Pin;
|
||||
use std::task::{Context, Poll};
|
||||
|
||||
use actix_web::dev::Payload;
|
||||
use actix_web::web::Json;
|
||||
use actix_web::{FromRequest, HttpRequest};
|
||||
use deserr::{DeserializeError, DeserializeFromValue};
|
||||
use futures::ready;
|
||||
use meilisearch_types::error::{ErrorCode, ResponseError};
|
||||
|
||||
/// Extractor for typed data from Json request payloads
|
||||
/// deserialised by deserr.
|
||||
///
|
||||
/// # Extractor
|
||||
/// To extract typed data from a request body, the inner type `T` must implement the
|
||||
/// [`deserr::DeserializeFromError<E>`] trait. The inner type `E` must implement the
|
||||
/// [`ErrorCode`](meilisearch_error::ErrorCode) trait.
|
||||
#[derive(Debug)]
|
||||
pub struct ValidatedJson<T, E>(pub T, PhantomData<*const E>);
|
||||
|
||||
impl<T, E> ValidatedJson<T, E> {
|
||||
pub fn new(data: T) -> Self {
|
||||
ValidatedJson(data, PhantomData)
|
||||
}
|
||||
pub fn into_inner(self) -> T {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> FromRequest for ValidatedJson<T, E>
|
||||
where
|
||||
E: DeserializeError + ErrorCode + 'static,
|
||||
T: DeserializeFromValue<E>,
|
||||
{
|
||||
type Error = actix_web::Error;
|
||||
type Future = ValidatedJsonExtractFut<T, E>;
|
||||
|
||||
#[inline]
|
||||
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
|
||||
ValidatedJsonExtractFut {
|
||||
fut: Json::<serde_json::Value>::from_request(req, payload),
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ValidatedJsonExtractFut<T, E> {
|
||||
fut: <Json<serde_json::Value> as FromRequest>::Future,
|
||||
_phantom: PhantomData<*const (T, E)>,
|
||||
}
|
||||
|
||||
impl<T, E> Future for ValidatedJsonExtractFut<T, E>
|
||||
where
|
||||
T: DeserializeFromValue<E>,
|
||||
E: DeserializeError + ErrorCode + 'static,
|
||||
{
|
||||
type Output = Result<ValidatedJson<T, E>, actix_web::Error>;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let ValidatedJsonExtractFut { fut, .. } = self.get_mut();
|
||||
let fut = Pin::new(fut);
|
||||
|
||||
let res = ready!(fut.poll(cx));
|
||||
|
||||
let res = match res {
|
||||
Err(err) => Err(err),
|
||||
Ok(data) => match deserr::deserialize::<_, _, E>(data.into_inner()) {
|
||||
Ok(data) => Ok(ValidatedJson::new(data)),
|
||||
Err(e) => Err(ResponseError::from(e).into()),
|
||||
},
|
||||
};
|
||||
|
||||
Poll::Ready(res)
|
||||
}
|
||||
}
|
78
meilisearch/src/extractors/json.rs
Normal file
78
meilisearch/src/extractors/json.rs
Normal file
|
@ -0,0 +1,78 @@
|
|||
use std::fmt::Debug;
|
||||
use std::future::Future;
|
||||
use std::marker::PhantomData;
|
||||
use std::pin::Pin;
|
||||
use std::task::{Context, Poll};
|
||||
|
||||
use actix_web::dev::Payload;
|
||||
use actix_web::web::Json;
|
||||
use actix_web::{FromRequest, HttpRequest};
|
||||
use deserr::{DeserializeError, DeserializeFromValue};
|
||||
use futures::ready;
|
||||
use meilisearch_types::error::{ErrorCode, ResponseError};
|
||||
|
||||
/// Extractor for typed data from Json request payloads
|
||||
/// deserialised by deserr.
|
||||
///
|
||||
/// # Extractor
|
||||
/// To extract typed data from a request body, the inner type `T` must implement the
|
||||
/// [`deserr::DeserializeFromError<E>`] trait. The inner type `E` must implement the
|
||||
/// [`ErrorCode`](meilisearch_error::ErrorCode) trait.
|
||||
#[derive(Debug)]
|
||||
pub struct ValidatedJson<T, E>(pub T, PhantomData<*const E>);
|
||||
|
||||
impl<T, E> ValidatedJson<T, E> {
|
||||
pub fn new(data: T) -> Self {
|
||||
ValidatedJson(data, PhantomData)
|
||||
}
|
||||
pub fn into_inner(self) -> T {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> FromRequest for ValidatedJson<T, E>
|
||||
where
|
||||
E: DeserializeError + ErrorCode + 'static,
|
||||
T: DeserializeFromValue<E>,
|
||||
{
|
||||
type Error = actix_web::Error;
|
||||
type Future = ValidatedJsonExtractFut<T, E>;
|
||||
|
||||
#[inline]
|
||||
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
|
||||
ValidatedJsonExtractFut {
|
||||
fut: Json::<serde_json::Value>::from_request(req, payload),
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ValidatedJsonExtractFut<T, E> {
|
||||
fut: <Json<serde_json::Value> as FromRequest>::Future,
|
||||
_phantom: PhantomData<*const (T, E)>,
|
||||
}
|
||||
|
||||
impl<T, E> Future for ValidatedJsonExtractFut<T, E>
|
||||
where
|
||||
T: DeserializeFromValue<E>,
|
||||
E: DeserializeError + ErrorCode + 'static,
|
||||
{
|
||||
type Output = Result<ValidatedJson<T, E>, actix_web::Error>;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let ValidatedJsonExtractFut { fut, .. } = self.get_mut();
|
||||
let fut = Pin::new(fut);
|
||||
|
||||
let res = ready!(fut.poll(cx));
|
||||
|
||||
let res = match res {
|
||||
Err(err) => Err(err),
|
||||
Ok(data) => match deserr::deserialize::<_, _, E>(data.into_inner()) {
|
||||
Ok(data) => Ok(ValidatedJson::new(data)),
|
||||
Err(e) => Err(ResponseError::from(e).into()),
|
||||
},
|
||||
};
|
||||
|
||||
Poll::Ready(res)
|
||||
}
|
||||
}
|
|
@ -1,4 +1,7 @@
|
|||
pub mod payload;
|
||||
#[macro_use]
|
||||
pub mod authentication;
|
||||
// pub mod deserr;
|
||||
pub mod json;
|
||||
pub mod query_parameters;
|
||||
pub mod sequential_extractor;
|
||||
|
|
134
meilisearch/src/extractors/query_parameters.rs
Normal file
134
meilisearch/src/extractors/query_parameters.rs
Normal file
|
@ -0,0 +1,134 @@
|
|||
//! For query parameter extractor documentation, see [`Query`].
|
||||
|
||||
use std::marker::PhantomData;
|
||||
use std::{fmt, ops};
|
||||
|
||||
use actix_http::Payload;
|
||||
use actix_utils::future::{err, ok, Ready};
|
||||
use actix_web::{FromRequest, HttpRequest};
|
||||
use deserr::{DeserializeError, DeserializeFromValue};
|
||||
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
|
||||
|
||||
/// Extract typed information from the request's query.
|
||||
///
|
||||
/// To extract typed data from the URL query string, the inner type `T` must implement the
|
||||
/// [`DeserializeOwned`] trait.
|
||||
///
|
||||
/// Use [`QueryConfig`] to configure extraction process.
|
||||
///
|
||||
/// # Panics
|
||||
/// A query string consists of unordered `key=value` pairs, therefore it cannot be decoded into any
|
||||
/// type which depends upon data ordering (eg. tuples). Trying to do so will result in a panic.
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use actix_web::{get, web};
|
||||
/// use serde::Deserialize;
|
||||
///
|
||||
/// #[derive(Debug, Deserialize)]
|
||||
/// pub enum ResponseType {
|
||||
/// Token,
|
||||
/// Code
|
||||
/// }
|
||||
///
|
||||
/// #[derive(Debug, Deserialize)]
|
||||
/// pub struct AuthRequest {
|
||||
/// id: u64,
|
||||
/// response_type: ResponseType,
|
||||
/// }
|
||||
///
|
||||
/// // Deserialize `AuthRequest` struct from query string.
|
||||
/// // This handler gets called only if the request's query parameters contain both fields.
|
||||
/// // A valid request path for this handler would be `/?id=64&response_type=Code"`.
|
||||
/// #[get("/")]
|
||||
/// async fn index(info: web::Query<AuthRequest>) -> String {
|
||||
/// format!("Authorization request for id={} and type={:?}!", info.id, info.response_type)
|
||||
/// }
|
||||
///
|
||||
/// // To access the entire underlying query struct, use `.into_inner()`.
|
||||
/// #[get("/debug1")]
|
||||
/// async fn debug1(info: web::Query<AuthRequest>) -> String {
|
||||
/// dbg!("Authorization object = {:?}", info.into_inner());
|
||||
/// "OK".to_string()
|
||||
/// }
|
||||
///
|
||||
/// // Or use destructuring, which is equivalent to `.into_inner()`.
|
||||
/// #[get("/debug2")]
|
||||
/// async fn debug2(web::Query(info): web::Query<AuthRequest>) -> String {
|
||||
/// dbg!("Authorization object = {:?}", info);
|
||||
/// "OK".to_string()
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct QueryParameter<T, E>(pub T, PhantomData<*const E>);
|
||||
|
||||
impl<T, E> QueryParameter<T, E> {
|
||||
/// Unwrap into inner `T` value.
|
||||
pub fn into_inner(self) -> T {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> QueryParameter<T, E>
|
||||
where
|
||||
T: DeserializeFromValue<E>,
|
||||
E: DeserializeError + ErrorCode + 'static,
|
||||
{
|
||||
/// Deserialize a `T` from the URL encoded query parameter string.
|
||||
///
|
||||
/// ```
|
||||
/// # use std::collections::HashMap;
|
||||
/// # use actix_web::web::Query;
|
||||
/// let numbers = Query::<HashMap<String, u32>>::from_query("one=1&two=2").unwrap();
|
||||
/// assert_eq!(numbers.get("one"), Some(&1));
|
||||
/// assert_eq!(numbers.get("two"), Some(&2));
|
||||
/// assert!(numbers.get("three").is_none());
|
||||
/// ```
|
||||
pub fn from_query(query_str: &str) -> Result<Self, actix_web::Error> {
|
||||
let value = serde_urlencoded::from_str::<serde_json::Value>(query_str)
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?;
|
||||
|
||||
match deserr::deserialize::<_, _, E>(value) {
|
||||
Ok(data) => Ok(QueryParameter(data, PhantomData)),
|
||||
Err(e) => Err(ResponseError::from(e).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> ops::Deref for QueryParameter<T, E> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &T {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> ops::DerefMut for QueryParameter<T, E> {
|
||||
fn deref_mut(&mut self) -> &mut T {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: fmt::Display, E> fmt::Display for QueryParameter<T, E> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
/// See [here](#Examples) for example of usage as an extractor.
|
||||
impl<T, E> FromRequest for QueryParameter<T, E>
|
||||
where
|
||||
T: DeserializeFromValue<E>,
|
||||
E: DeserializeError + ErrorCode + 'static,
|
||||
{
|
||||
type Error = actix_web::Error;
|
||||
type Future = Ready<Result<Self, actix_web::Error>>;
|
||||
|
||||
#[inline]
|
||||
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
|
||||
QueryParameter::from_query(&req.query_string()).map(ok).unwrap_or_else(err)
|
||||
// serde_urlencoded::from_str::<serde_json::Value>(req.query_string())
|
||||
// .map(|val| Ok(QueryParameter(val, PhantomData)))
|
||||
// .unwrap_or_else(|e| err(ResponseError::from_msg(e.to_string(), Code::BadRequest)))
|
||||
}
|
||||
}
|
|
@ -1,8 +1,14 @@
|
|||
use std::convert::Infallible;
|
||||
use std::num::ParseIntError;
|
||||
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use deserr::{
|
||||
DeserializeError, DeserializeFromValue, ErrorKind, IntoValue, MergeWithError, ValuePointerRef,
|
||||
};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use log::debug;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError};
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::milli::{self, FieldDistribution, Index};
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
|
@ -14,6 +20,8 @@ use super::{Pagination, SummarizedTaskView};
|
|||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||
use crate::extractors::json::ValidatedJson;
|
||||
use crate::extractors::query_parameters::QueryParameter;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
|
||||
pub mod documents;
|
||||
|
@ -66,7 +74,7 @@ impl IndexView {
|
|||
|
||||
pub async fn list_indexes(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
|
||||
paginate: web::Query<Pagination>,
|
||||
paginate: QueryParameter<Pagination, ListIndexesDeserrError>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let search_rules = &index_scheduler.filters().search_rules;
|
||||
let indexes: Vec<_> = index_scheduler.indexes()?;
|
||||
|
@ -82,8 +90,68 @@ pub async fn list_indexes(
|
|||
Ok(HttpResponse::Ok().json(ret))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
#[derive(Debug)]
|
||||
pub struct ListIndexesDeserrError {
|
||||
error: String,
|
||||
code: Code,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ListIndexesDeserrError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.error)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for ListIndexesDeserrError {}
|
||||
impl ErrorCode for ListIndexesDeserrError {
|
||||
fn error_code(&self) -> Code {
|
||||
self.code
|
||||
}
|
||||
}
|
||||
|
||||
impl MergeWithError<ListIndexesDeserrError> for ListIndexesDeserrError {
|
||||
fn merge(
|
||||
_self_: Option<Self>,
|
||||
other: ListIndexesDeserrError,
|
||||
_merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
Err(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl deserr::DeserializeError for ListIndexesDeserrError {
|
||||
fn error<V: IntoValue>(
|
||||
_self_: Option<Self>,
|
||||
error: ErrorKind<V>,
|
||||
location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
let code = match location.last_field() {
|
||||
Some("offset") => Code::InvalidIndexLimit,
|
||||
Some("limit") => Code::InvalidIndexOffset,
|
||||
_ => Code::BadRequest,
|
||||
};
|
||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
||||
|
||||
Err(ListIndexesDeserrError { error, code })
|
||||
}
|
||||
}
|
||||
|
||||
impl MergeWithError<ParseIntError> for ListIndexesDeserrError {
|
||||
fn merge(
|
||||
_self_: Option<Self>,
|
||||
other: ParseIntError,
|
||||
merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
ListIndexesDeserrError::error::<Infallible>(
|
||||
None,
|
||||
ErrorKind::Unexpected { msg: other.to_string() },
|
||||
merge_location,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeserializeFromValue, Debug)]
|
||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct IndexCreateRequest {
|
||||
uid: String,
|
||||
primary_key: Option<String>,
|
||||
|
@ -91,7 +159,7 @@ pub struct IndexCreateRequest {
|
|||
|
||||
pub async fn create_index(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>,
|
||||
body: web::Json<IndexCreateRequest>,
|
||||
body: ValidatedJson<IndexCreateRequest, CreateIndexesDeserrError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
|
@ -116,11 +184,58 @@ pub async fn create_index(
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
pub struct CreateIndexesDeserrError {
|
||||
error: String,
|
||||
code: Code,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for CreateIndexesDeserrError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.error)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for CreateIndexesDeserrError {}
|
||||
impl ErrorCode for CreateIndexesDeserrError {
|
||||
fn error_code(&self) -> Code {
|
||||
self.code
|
||||
}
|
||||
}
|
||||
|
||||
impl MergeWithError<CreateIndexesDeserrError> for CreateIndexesDeserrError {
|
||||
fn merge(
|
||||
_self_: Option<Self>,
|
||||
other: CreateIndexesDeserrError,
|
||||
_merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
Err(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl deserr::DeserializeError for CreateIndexesDeserrError {
|
||||
fn error<V: IntoValue>(
|
||||
_self_: Option<Self>,
|
||||
error: ErrorKind<V>,
|
||||
location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
let code = match location.last_field() {
|
||||
Some("uid") => Code::InvalidIndexUid,
|
||||
Some("primaryKey") => Code::InvalidIndexPrimaryKey,
|
||||
None if matches!(error, ErrorKind::MissingField { field } if field == "uid") => {
|
||||
Code::MissingIndexUid
|
||||
}
|
||||
_ => Code::BadRequest,
|
||||
};
|
||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
||||
|
||||
Err(CreateIndexesDeserrError { error, code })
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeserializeFromValue, Debug)]
|
||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct UpdateIndexRequest {
|
||||
uid: Option<String>,
|
||||
primary_key: Option<String>,
|
||||
}
|
||||
|
||||
|
@ -139,7 +254,7 @@ pub async fn get_index(
|
|||
pub async fn update_index(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, Data<IndexScheduler>>,
|
||||
path: web::Path<String>,
|
||||
body: web::Json<UpdateIndexRequest>,
|
||||
body: ValidatedJson<UpdateIndexRequest, UpdateIndexesDeserrError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
|
@ -147,7 +262,7 @@ pub async fn update_index(
|
|||
let body = body.into_inner();
|
||||
analytics.publish(
|
||||
"Index Updated".to_string(),
|
||||
json!({ "primary_key": body.primary_key}),
|
||||
json!({ "primary_key": body.primary_key }),
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
|
@ -163,6 +278,51 @@ pub async fn update_index(
|
|||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UpdateIndexesDeserrError {
|
||||
error: String,
|
||||
code: Code,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for UpdateIndexesDeserrError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.error)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for UpdateIndexesDeserrError {}
|
||||
impl ErrorCode for UpdateIndexesDeserrError {
|
||||
fn error_code(&self) -> Code {
|
||||
self.code
|
||||
}
|
||||
}
|
||||
|
||||
impl MergeWithError<UpdateIndexesDeserrError> for UpdateIndexesDeserrError {
|
||||
fn merge(
|
||||
_self_: Option<Self>,
|
||||
other: UpdateIndexesDeserrError,
|
||||
_merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
Err(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl deserr::DeserializeError for UpdateIndexesDeserrError {
|
||||
fn error<V: IntoValue>(
|
||||
_self_: Option<Self>,
|
||||
error: ErrorKind<V>,
|
||||
location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
let code = match location.last_field() {
|
||||
Some("primaryKey") => Code::InvalidIndexPrimaryKey,
|
||||
_ => Code::BadRequest,
|
||||
};
|
||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
||||
|
||||
Err(UpdateIndexesDeserrError { error, code })
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn delete_index(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
|
|
|
@ -1,21 +1,25 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use log::debug;
|
||||
use meilisearch_auth::IndexSearchRules;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use serde::Deserialize;
|
||||
use serde_cs::vec::CS;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::analytics::{Analytics, SearchAggregator};
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::json::ValidatedJson;
|
||||
use crate::extractors::query_parameters::QueryParameter;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::from_string_to_option;
|
||||
use crate::search::{
|
||||
perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
||||
DEFAULT_SEARCH_OFFSET,
|
||||
perform_search, MatchingStrategy, SearchDeserError, SearchQuery, DEFAULT_CROP_LENGTH,
|
||||
DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG,
|
||||
DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
|
||||
};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
|
@ -26,33 +30,35 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||
);
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
#[derive(Debug, deserr::DeserializeFromValue)]
|
||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct SearchQueryGet {
|
||||
q: Option<String>,
|
||||
#[serde(default = "DEFAULT_SEARCH_OFFSET")]
|
||||
#[deserr(default = DEFAULT_SEARCH_OFFSET(), from(&String) = FromStr::from_str -> std::num::ParseIntError)]
|
||||
offset: usize,
|
||||
#[serde(default = "DEFAULT_SEARCH_LIMIT")]
|
||||
#[deserr(default = DEFAULT_SEARCH_LIMIT(), from(&String) = FromStr::from_str -> std::num::ParseIntError)]
|
||||
limit: usize,
|
||||
#[deserr(from(&String) = from_string_to_option -> std::num::ParseIntError)]
|
||||
page: Option<usize>,
|
||||
#[deserr(from(&String) = from_string_to_option -> std::num::ParseIntError)]
|
||||
hits_per_page: Option<usize>,
|
||||
attributes_to_retrieve: Option<CS<String>>,
|
||||
attributes_to_crop: Option<CS<String>>,
|
||||
#[serde(default = "DEFAULT_CROP_LENGTH")]
|
||||
#[deserr(default = DEFAULT_CROP_LENGTH(), from(&String) = FromStr::from_str -> std::num::ParseIntError)]
|
||||
crop_length: usize,
|
||||
attributes_to_highlight: Option<CS<String>>,
|
||||
filter: Option<String>,
|
||||
sort: Option<String>,
|
||||
#[serde(default = "Default::default")]
|
||||
#[deserr(default, from(&String) = FromStr::from_str -> std::str::ParseBoolError)]
|
||||
show_matches_position: bool,
|
||||
facets: Option<CS<String>>,
|
||||
#[serde(default = "DEFAULT_HIGHLIGHT_PRE_TAG")]
|
||||
#[deserr(default = DEFAULT_HIGHLIGHT_PRE_TAG())]
|
||||
highlight_pre_tag: String,
|
||||
#[serde(default = "DEFAULT_HIGHLIGHT_POST_TAG")]
|
||||
#[deserr(default = DEFAULT_HIGHLIGHT_POST_TAG())]
|
||||
highlight_post_tag: String,
|
||||
#[serde(default = "DEFAULT_CROP_MARKER")]
|
||||
#[deserr(default = DEFAULT_CROP_MARKER())]
|
||||
crop_marker: String,
|
||||
#[serde(default)]
|
||||
#[deserr(default)]
|
||||
matching_strategy: MatchingStrategy,
|
||||
}
|
||||
|
||||
|
@ -136,7 +142,7 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
|
|||
pub async fn search_with_url_query(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
params: web::Query<SearchQueryGet>,
|
||||
params: QueryParameter<SearchQueryGet, SearchDeserError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
|
@ -168,7 +174,7 @@ pub async fn search_with_url_query(
|
|||
pub async fn search_with_post(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
params: web::Json<SearchQuery>,
|
||||
params: ValidatedJson<SearchQuery, SearchDeserError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
use std::fmt;
|
||||
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use deserr::{IntoValue, ValuePointerRef};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use log::debug;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError};
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::settings::{settings, Settings, Unchecked};
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
|
@ -11,6 +14,7 @@ use serde_json::json;
|
|||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::json::ValidatedJson;
|
||||
use crate::routes::SummarizedTaskView;
|
||||
|
||||
#[macro_export]
|
||||
|
@ -39,7 +43,7 @@ macro_rules! make_setting_route {
|
|||
>,
|
||||
index_uid: web::Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let new_settings = Settings { $attr: Setting::Reset, ..Default::default() };
|
||||
let new_settings = Settings { $attr: Setting::Reset.into(), ..Default::default() };
|
||||
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner();
|
||||
|
@ -74,8 +78,8 @@ macro_rules! make_setting_route {
|
|||
|
||||
let new_settings = Settings {
|
||||
$attr: match body {
|
||||
Some(inner_body) => Setting::Set(inner_body),
|
||||
None => Setting::Reset,
|
||||
Some(inner_body) => Setting::Set(inner_body).into(),
|
||||
None => Setting::Reset.into(),
|
||||
},
|
||||
..Default::default()
|
||||
};
|
||||
|
@ -208,7 +212,7 @@ make_setting_route!(
|
|||
"TypoTolerance Updated".to_string(),
|
||||
json!({
|
||||
"typo_tolerance": {
|
||||
"enabled": setting.as_ref().map(|s| !matches!(s.enabled, Setting::Set(false))),
|
||||
"enabled": setting.as_ref().map(|s| !matches!(s.enabled.into(), Setting::Set(false))),
|
||||
"disable_on_attributes": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
|
||||
|
@ -424,10 +428,66 @@ generate_configure!(
|
|||
faceting
|
||||
);
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SettingsDeserrError {
|
||||
error: String,
|
||||
code: Code,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for SettingsDeserrError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.error)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for SettingsDeserrError {}
|
||||
impl ErrorCode for SettingsDeserrError {
|
||||
fn error_code(&self) -> Code {
|
||||
self.code
|
||||
}
|
||||
}
|
||||
|
||||
impl deserr::MergeWithError<SettingsDeserrError> for SettingsDeserrError {
|
||||
fn merge(
|
||||
_self_: Option<Self>,
|
||||
other: SettingsDeserrError,
|
||||
_merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
Err(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl deserr::DeserializeError for SettingsDeserrError {
|
||||
fn error<V: IntoValue>(
|
||||
_self_: Option<Self>,
|
||||
error: deserr::ErrorKind<V>,
|
||||
location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
||||
|
||||
let code = match location.first_field() {
|
||||
Some("displayedAttributes") => Code::InvalidSettingsDisplayedAttributes,
|
||||
Some("searchableAttributes") => Code::InvalidSettingsSearchableAttributes,
|
||||
Some("filterableAttributes") => Code::InvalidSettingsFilterableAttributes,
|
||||
Some("sortableAttributes") => Code::InvalidSettingsSortableAttributes,
|
||||
Some("rankingRules") => Code::InvalidSettingsRankingRules,
|
||||
Some("stopWords") => Code::InvalidSettingsStopWords,
|
||||
Some("synonyms") => Code::InvalidSettingsSynonyms,
|
||||
Some("distinctAttribute") => Code::InvalidSettingsDistinctAttribute,
|
||||
Some("typoTolerance") => Code::InvalidSettingsTypoTolerance,
|
||||
Some("faceting") => Code::InvalidSettingsFaceting,
|
||||
Some("pagination") => Code::InvalidSettingsPagination,
|
||||
_ => Code::BadRequest,
|
||||
};
|
||||
|
||||
Err(SettingsDeserrError { error, code })
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn update_all(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
body: web::Json<Settings<Unchecked>>,
|
||||
body: ValidatedJson<Settings<Unchecked>, SettingsDeserrError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::str::FromStr;
|
||||
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use deserr::DeserializeFromValue;
|
||||
use index_scheduler::{IndexScheduler, Query};
|
||||
use log::debug;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
|
@ -49,6 +51,13 @@ where
|
|||
.collect()
|
||||
}
|
||||
|
||||
pub fn from_string_to_option<T, E>(input: &str) -> Result<Option<T>, E>
|
||||
where
|
||||
T: FromStr<Err = E>,
|
||||
{
|
||||
Ok(Some(input.parse()?))
|
||||
}
|
||||
|
||||
const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20;
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
|
@ -75,12 +84,15 @@ impl From<Task> for SummarizedTaskView {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize)]
|
||||
#[derive(DeserializeFromValue, Deserialize, Debug, Clone, Copy)]
|
||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct Pagination {
|
||||
#[serde(default)]
|
||||
#[deserr(default, from(&String) = FromStr::from_str -> std::num::ParseIntError)]
|
||||
pub offset: usize,
|
||||
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
|
||||
#[deserr(default = PAGINATION_DEFAULT_LIMIT(), from(&String) = FromStr::from_str -> std::num::ParseIntError)]
|
||||
pub limit: usize,
|
||||
}
|
||||
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
use std::fmt;
|
||||
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use deserr::{DeserializeFromValue, IntoValue, ValuePointerRef};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError};
|
||||
use meilisearch_types::tasks::{IndexSwap, KindWithContent};
|
||||
use serde::Deserialize;
|
||||
use serde_json::json;
|
||||
|
||||
use super::SummarizedTaskView;
|
||||
|
@ -11,23 +13,26 @@ use crate::analytics::Analytics;
|
|||
use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||
use crate::extractors::json::ValidatedJson;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(swap_indexes))));
|
||||
}
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
|
||||
#[derive(DeserializeFromValue, Debug, Clone, PartialEq, Eq)]
|
||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct SwapIndexesPayload {
|
||||
indexes: Vec<String>,
|
||||
}
|
||||
|
||||
pub async fn swap_indexes(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>,
|
||||
params: web::Json<Vec<SwapIndexesPayload>>,
|
||||
params: ValidatedJson<Vec<SwapIndexesPayload>, SwapIndexesDeserrError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let params = params.into_inner();
|
||||
analytics.publish(
|
||||
"Indexes Swapped".to_string(),
|
||||
json!({
|
||||
|
@ -38,7 +43,7 @@ pub async fn swap_indexes(
|
|||
let search_rules = &index_scheduler.filters().search_rules;
|
||||
|
||||
let mut swaps = vec![];
|
||||
for SwapIndexesPayload { indexes } in params.into_inner().into_iter() {
|
||||
for SwapIndexesPayload { indexes } in params.into_iter() {
|
||||
let (lhs, rhs) = match indexes.as_slice() {
|
||||
[lhs, rhs] => (lhs, rhs),
|
||||
_ => {
|
||||
|
@ -57,3 +62,49 @@ pub async fn swap_indexes(
|
|||
let task: SummarizedTaskView = task.into();
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SwapIndexesDeserrError {
|
||||
error: String,
|
||||
code: Code,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for SwapIndexesDeserrError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.error)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for SwapIndexesDeserrError {}
|
||||
impl ErrorCode for SwapIndexesDeserrError {
|
||||
fn error_code(&self) -> Code {
|
||||
self.code
|
||||
}
|
||||
}
|
||||
|
||||
impl deserr::MergeWithError<SwapIndexesDeserrError> for SwapIndexesDeserrError {
|
||||
fn merge(
|
||||
_self_: Option<Self>,
|
||||
other: SwapIndexesDeserrError,
|
||||
_merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
Err(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl deserr::DeserializeError for SwapIndexesDeserrError {
|
||||
fn error<V: IntoValue>(
|
||||
_self_: Option<Self>,
|
||||
error: deserr::ErrorKind<V>,
|
||||
location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
||||
|
||||
let code = match location.last_field() {
|
||||
Some("indexes") => Code::InvalidSwapIndexes,
|
||||
_ => Code::BadRequest,
|
||||
};
|
||||
|
||||
Err(SwapIndexesDeserrError { error, code })
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ use std::str::FromStr;
|
|||
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use index_scheduler::error::DateField;
|
||||
use index_scheduler::{IndexScheduler, Query, TaskId};
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
|
@ -168,6 +169,7 @@ pub struct TaskCommonQueryRaw {
|
|||
pub statuses: Option<CS<StarOr<String>>>,
|
||||
pub index_uids: Option<CS<StarOr<String>>>,
|
||||
}
|
||||
|
||||
impl TaskCommonQueryRaw {
|
||||
fn validate(self) -> Result<TaskCommonQuery, ResponseError> {
|
||||
let Self { uids, canceled_by, types, statuses, index_uids } = self;
|
||||
|
@ -290,37 +292,37 @@ impl TaskDateQueryRaw {
|
|||
|
||||
for (field_name, string_value, before_or_after, dest) in [
|
||||
(
|
||||
"afterEnqueuedAt",
|
||||
DateField::AfterEnqueuedAt,
|
||||
after_enqueued_at,
|
||||
DeserializeDateOption::After,
|
||||
&mut query.after_enqueued_at,
|
||||
),
|
||||
(
|
||||
"beforeEnqueuedAt",
|
||||
DateField::BeforeEnqueuedAt,
|
||||
before_enqueued_at,
|
||||
DeserializeDateOption::Before,
|
||||
&mut query.before_enqueued_at,
|
||||
),
|
||||
(
|
||||
"afterStartedAt",
|
||||
DateField::AfterStartedAt,
|
||||
after_started_at,
|
||||
DeserializeDateOption::After,
|
||||
&mut query.after_started_at,
|
||||
),
|
||||
(
|
||||
"beforeStartedAt",
|
||||
DateField::BeforeStartedAt,
|
||||
before_started_at,
|
||||
DeserializeDateOption::Before,
|
||||
&mut query.before_started_at,
|
||||
),
|
||||
(
|
||||
"afterFinishedAt",
|
||||
DateField::AfterFinishedAt,
|
||||
after_finished_at,
|
||||
DeserializeDateOption::After,
|
||||
&mut query.after_finished_at,
|
||||
),
|
||||
(
|
||||
"beforeFinishedAt",
|
||||
DateField::BeforeFinishedAt,
|
||||
before_finished_at,
|
||||
DeserializeDateOption::Before,
|
||||
&mut query.before_finished_at,
|
||||
|
@ -690,6 +692,7 @@ async fn get_task(
|
|||
}
|
||||
|
||||
pub(crate) mod date_deserializer {
|
||||
use index_scheduler::error::DateField;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::macros::format_description;
|
||||
|
@ -701,7 +704,7 @@ pub(crate) mod date_deserializer {
|
|||
}
|
||||
|
||||
pub fn deserialize_date(
|
||||
field_name: &str,
|
||||
field_name: DateField,
|
||||
value: &str,
|
||||
option: DeserializeDateOption,
|
||||
) -> std::result::Result<OffsetDateTime, ResponseError> {
|
||||
|
@ -727,7 +730,7 @@ pub(crate) mod date_deserializer {
|
|||
}
|
||||
} else {
|
||||
Err(index_scheduler::Error::InvalidTaskDate {
|
||||
field: field_name.to_string(),
|
||||
field: field_name,
|
||||
date: value.to_string(),
|
||||
}
|
||||
.into())
|
||||
|
|
|
@ -1,9 +1,16 @@
|
|||
use std::cmp::min;
|
||||
use std::collections::{BTreeMap, BTreeSet, HashSet};
|
||||
use std::str::FromStr;
|
||||
use std::convert::Infallible;
|
||||
use std::fmt;
|
||||
use std::num::ParseIntError;
|
||||
use std::str::{FromStr, ParseBoolError};
|
||||
use std::time::Instant;
|
||||
|
||||
use deserr::{
|
||||
DeserializeError, DeserializeFromValue, ErrorKind, IntoValue, MergeWithError, ValuePointerRef,
|
||||
};
|
||||
use either::Either;
|
||||
use meilisearch_types::error::{unwrap_any, Code, ErrorCode};
|
||||
use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS;
|
||||
use meilisearch_types::{milli, Document};
|
||||
use milli::tokenizer::TokenizerBuilder;
|
||||
|
@ -26,34 +33,33 @@ pub const DEFAULT_CROP_MARKER: fn() -> String = || "…".to_string();
|
|||
pub const DEFAULT_HIGHLIGHT_PRE_TAG: fn() -> String = || "<em>".to_string();
|
||||
pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string();
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
#[derive(Debug, Clone, Default, PartialEq, DeserializeFromValue)]
|
||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct SearchQuery {
|
||||
pub q: Option<String>,
|
||||
#[serde(default = "DEFAULT_SEARCH_OFFSET")]
|
||||
#[deserr(default = DEFAULT_SEARCH_OFFSET())]
|
||||
pub offset: usize,
|
||||
#[serde(default = "DEFAULT_SEARCH_LIMIT")]
|
||||
#[deserr(default = DEFAULT_SEARCH_LIMIT())]
|
||||
pub limit: usize,
|
||||
pub page: Option<usize>,
|
||||
pub hits_per_page: Option<usize>,
|
||||
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
||||
pub attributes_to_crop: Option<Vec<String>>,
|
||||
#[serde(default = "DEFAULT_CROP_LENGTH")]
|
||||
#[deserr(default = DEFAULT_CROP_LENGTH())]
|
||||
pub crop_length: usize,
|
||||
pub attributes_to_highlight: Option<HashSet<String>>,
|
||||
// Default to false
|
||||
#[serde(default = "Default::default")]
|
||||
#[deserr(default)]
|
||||
pub show_matches_position: bool,
|
||||
pub filter: Option<Value>,
|
||||
pub sort: Option<Vec<String>>,
|
||||
pub facets: Option<Vec<String>>,
|
||||
#[serde(default = "DEFAULT_HIGHLIGHT_PRE_TAG")]
|
||||
#[deserr(default = DEFAULT_HIGHLIGHT_PRE_TAG())]
|
||||
pub highlight_pre_tag: String,
|
||||
#[serde(default = "DEFAULT_HIGHLIGHT_POST_TAG")]
|
||||
#[deserr(default = DEFAULT_HIGHLIGHT_POST_TAG())]
|
||||
pub highlight_post_tag: String,
|
||||
#[serde(default = "DEFAULT_CROP_MARKER")]
|
||||
#[deserr(default = DEFAULT_CROP_MARKER())]
|
||||
pub crop_marker: String,
|
||||
#[serde(default)]
|
||||
#[deserr(default)]
|
||||
pub matching_strategy: MatchingStrategy,
|
||||
}
|
||||
|
||||
|
@ -63,7 +69,8 @@ impl SearchQuery {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Deserialize, Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[deserr(rename_all = camelCase)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum MatchingStrategy {
|
||||
/// Remove query words from last to first
|
||||
|
@ -87,6 +94,96 @@ impl From<MatchingStrategy> for TermsMatchingStrategy {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SearchDeserError {
|
||||
error: String,
|
||||
code: Code,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for SearchDeserError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.error)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for SearchDeserError {}
|
||||
impl ErrorCode for SearchDeserError {
|
||||
fn error_code(&self) -> Code {
|
||||
self.code
|
||||
}
|
||||
}
|
||||
|
||||
impl MergeWithError<SearchDeserError> for SearchDeserError {
|
||||
fn merge(
|
||||
_self_: Option<Self>,
|
||||
other: SearchDeserError,
|
||||
_merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
Err(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl DeserializeError for SearchDeserError {
|
||||
fn error<V: IntoValue>(
|
||||
_self_: Option<Self>,
|
||||
error: ErrorKind<V>,
|
||||
location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
|
||||
|
||||
let code = match location.last_field() {
|
||||
Some("q") => Code::InvalidSearchQ,
|
||||
Some("offset") => Code::InvalidSearchOffset,
|
||||
Some("limit") => Code::InvalidSearchLimit,
|
||||
Some("page") => Code::InvalidSearchPage,
|
||||
Some("hitsPerPage") => Code::InvalidSearchHitsPerPage,
|
||||
Some("attributesToRetrieve") => Code::InvalidSearchAttributesToRetrieve,
|
||||
Some("attributesToCrop") => Code::InvalidSearchAttributesToCrop,
|
||||
Some("cropLength") => Code::InvalidSearchCropLength,
|
||||
Some("attributesToHighlight") => Code::InvalidSearchAttributesToHighlight,
|
||||
Some("showMatchesPosition") => Code::InvalidSearchShowMatchesPosition,
|
||||
Some("filter") => Code::InvalidSearchFilter,
|
||||
Some("sort") => Code::InvalidSearchSort,
|
||||
Some("facets") => Code::InvalidSearchFacets,
|
||||
Some("highlightPreTag") => Code::InvalidSearchHighlightPreTag,
|
||||
Some("highlightPostTag") => Code::InvalidSearchHighlightPostTag,
|
||||
Some("cropMarker") => Code::InvalidSearchCropMarker,
|
||||
Some("matchingStrategy") => Code::InvalidSearchMatchingStrategy,
|
||||
_ => Code::BadRequest,
|
||||
};
|
||||
|
||||
Err(SearchDeserError { error, code })
|
||||
}
|
||||
}
|
||||
|
||||
impl MergeWithError<ParseBoolError> for SearchDeserError {
|
||||
fn merge(
|
||||
_self_: Option<Self>,
|
||||
other: ParseBoolError,
|
||||
merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
SearchDeserError::error::<Infallible>(
|
||||
None,
|
||||
ErrorKind::Unexpected { msg: other.to_string() },
|
||||
merge_location,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl MergeWithError<ParseIntError> for SearchDeserError {
|
||||
fn merge(
|
||||
_self_: Option<Self>,
|
||||
other: ParseIntError,
|
||||
merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
SearchDeserError::error::<Infallible>(
|
||||
None,
|
||||
ErrorKind::Unexpected { msg: other.to_string() },
|
||||
merge_location,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, PartialEq, Eq)]
|
||||
pub struct SearchHit {
|
||||
#[serde(flatten)]
|
||||
|
|
|
@ -37,25 +37,105 @@ async fn search_unexisting_parameter() {
|
|||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_invalid_highlight_and_crop_tags() {
|
||||
async fn search_invalid_crop_marker() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let fields = &["cropMarker", "highlightPreTag", "highlightPostTag"];
|
||||
// object
|
||||
let response = index.search_post(json!({"cropMarker": { "marker": "<crop>" }})).await;
|
||||
meili_snap::snapshot!(format!("{:#?}", response), @r###"
|
||||
(
|
||||
Object {
|
||||
"message": String("invalid type: Map `{\"marker\":\"<crop>\"}`, expected a String at `.cropMarker`."),
|
||||
"code": String("invalid_search_crop_marker"),
|
||||
"type": String("invalid_request"),
|
||||
"link": String("https://docs.meilisearch.com/errors#invalid_search_crop_marker"),
|
||||
},
|
||||
400,
|
||||
)
|
||||
"###);
|
||||
|
||||
for field in fields {
|
||||
// object
|
||||
let (response, code) =
|
||||
index.search_post(json!({field.to_string(): {"marker": "<crop>"}})).await;
|
||||
assert_eq!(code, 400, "field {} passing object: {}", &field, response);
|
||||
assert_eq!(response["code"], "bad_request");
|
||||
// array
|
||||
let response = index.search_post(json!({"cropMarker": ["marker", "<crop>"]})).await;
|
||||
meili_snap::snapshot!(format!("{:#?}", response), @r###"
|
||||
(
|
||||
Object {
|
||||
"message": String("invalid type: Sequence `[\"marker\",\"<crop>\"]`, expected a String at `.cropMarker`."),
|
||||
"code": String("invalid_search_crop_marker"),
|
||||
"type": String("invalid_request"),
|
||||
"link": String("https://docs.meilisearch.com/errors#invalid_search_crop_marker"),
|
||||
},
|
||||
400,
|
||||
)
|
||||
"###);
|
||||
}
|
||||
|
||||
// array
|
||||
let (response, code) =
|
||||
index.search_post(json!({field.to_string(): ["marker", "<crop>"]})).await;
|
||||
assert_eq!(code, 400, "field {} passing array: {}", &field, response);
|
||||
assert_eq!(response["code"], "bad_request");
|
||||
}
|
||||
#[actix_rt::test]
|
||||
async fn search_invalid_highlight_pre_tag() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
// object
|
||||
let response = index.search_post(json!({"highlightPreTag": { "marker": "<em>" }})).await;
|
||||
meili_snap::snapshot!(format!("{:#?}", response), @r###"
|
||||
(
|
||||
Object {
|
||||
"message": String("invalid type: Map `{\"marker\":\"<em>\"}`, expected a String at `.highlightPreTag`."),
|
||||
"code": String("invalid_search_highlight_pre_tag"),
|
||||
"type": String("invalid_request"),
|
||||
"link": String("https://docs.meilisearch.com/errors#invalid_search_highlight_pre_tag"),
|
||||
},
|
||||
400,
|
||||
)
|
||||
"###);
|
||||
|
||||
// array
|
||||
let response = index.search_post(json!({"highlightPreTag": ["marker", "<em>"]})).await;
|
||||
meili_snap::snapshot!(format!("{:#?}", response), @r###"
|
||||
(
|
||||
Object {
|
||||
"message": String("invalid type: Sequence `[\"marker\",\"<em>\"]`, expected a String at `.highlightPreTag`."),
|
||||
"code": String("invalid_search_highlight_pre_tag"),
|
||||
"type": String("invalid_request"),
|
||||
"link": String("https://docs.meilisearch.com/errors#invalid_search_highlight_pre_tag"),
|
||||
},
|
||||
400,
|
||||
)
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_invalid_highlight_post_tag() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
// object
|
||||
let response = index.search_post(json!({"highlightPostTag": { "marker": "</em>" }})).await;
|
||||
meili_snap::snapshot!(format!("{:#?}", response), @r###"
|
||||
(
|
||||
Object {
|
||||
"message": String("invalid type: Map `{\"marker\":\"</em>\"}`, expected a String at `.highlightPostTag`."),
|
||||
"code": String("invalid_search_highlight_post_tag"),
|
||||
"type": String("invalid_request"),
|
||||
"link": String("https://docs.meilisearch.com/errors#invalid_search_highlight_post_tag"),
|
||||
},
|
||||
400,
|
||||
)
|
||||
"###);
|
||||
|
||||
// array
|
||||
let response = index.search_post(json!({"highlightPostTag": ["marker", "</em>"]})).await;
|
||||
meili_snap::snapshot!(format!("{:#?}", response), @r###"
|
||||
(
|
||||
Object {
|
||||
"message": String("invalid type: Sequence `[\"marker\",\"</em>\"]`, expected a String at `.highlightPostTag`."),
|
||||
"code": String("invalid_search_highlight_post_tag"),
|
||||
"type": String("invalid_request"),
|
||||
"link": String("https://docs.meilisearch.com/errors#invalid_search_highlight_post_tag"),
|
||||
},
|
||||
400,
|
||||
)
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
|
|
@ -193,9 +193,9 @@ async fn get_task_filter_error() {
|
|||
insta::assert_json_snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Task uid `pied` is invalid. It should only contain numeric characters.",
|
||||
"code": "invalid_task_uids_filter",
|
||||
"code": "invalid_task_uids",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid-task-uids-filter"
|
||||
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
|
||||
}
|
||||
"###);
|
||||
|
||||
|
@ -215,9 +215,9 @@ async fn get_task_filter_error() {
|
|||
insta::assert_json_snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Task `beforeStartedAt` `pied` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||
"code": "invalid_task_date_filter",
|
||||
"code": "invalid_task_before_started_at",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid-task-date-filter"
|
||||
"link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
@ -253,9 +253,9 @@ async fn delete_task_filter_error() {
|
|||
insta::assert_json_snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Task uid `pied` is invalid. It should only contain numeric characters.",
|
||||
"code": "invalid_task_uids_filter",
|
||||
"code": "invalid_task_uids",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid-task-uids-filter"
|
||||
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
@ -291,9 +291,9 @@ async fn cancel_task_filter_error() {
|
|||
insta::assert_json_snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Task uid `pied` is invalid. It should only contain numeric characters.",
|
||||
"code": "invalid_task_uids_filter",
|
||||
"code": "invalid_task_uids",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid-task-uids-filter"
|
||||
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue