Integrate deserr on the most important routes

This commit is contained in:
Tamo 2022-12-14 13:00:43 +01:00
parent 839b05c43d
commit 50ce0409bc
No known key found for this signature in database
GPG Key ID: 20CD8020AFA88D69
21 changed files with 1582 additions and 657 deletions

888
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -249,17 +249,17 @@ pub(crate) mod test {
pub fn create_test_settings() -> Settings<Checked> {
let settings = Settings {
displayed_attributes: Setting::Set(vec![S("race"), S("name")]),
searchable_attributes: Setting::Set(vec![S("name"), S("race")]),
filterable_attributes: Setting::Set(btreeset! { S("race"), S("age") }),
sortable_attributes: Setting::Set(btreeset! { S("age") }),
ranking_rules: Setting::NotSet,
stop_words: Setting::NotSet,
synonyms: Setting::NotSet,
distinct_attribute: Setting::NotSet,
typo_tolerance: Setting::NotSet,
faceting: Setting::NotSet,
pagination: Setting::NotSet,
displayed_attributes: Setting::Set(vec![S("race"), S("name")]).into(),
searchable_attributes: Setting::Set(vec![S("name"), S("race")]).into(),
filterable_attributes: Setting::Set(btreeset! { S("race"), S("age") }).into(),
sortable_attributes: Setting::Set(btreeset! { S("age") }).into(),
ranking_rules: Setting::NotSet.into(),
stop_words: Setting::NotSet.into(),
synonyms: Setting::NotSet.into(),
distinct_attribute: Setting::NotSet.into(),
typo_tolerance: Setting::NotSet.into(),
faceting: Setting::NotSet.into(),
pagination: Setting::NotSet.into(),
_kind: std::marker::PhantomData,
};
settings.check()

View File

@ -26,7 +26,7 @@ pub type Kind = crate::KindDump;
pub type Details = meilisearch_types::tasks::Details;
// everything related to the settings
pub type Setting<T> = meilisearch_types::milli::update::Setting<T>;
pub type Setting<T> = meilisearch_types::settings::Setting<T>;
pub type TypoTolerance = meilisearch_types::settings::TypoSettings;
pub type MinWordSizeForTypos = meilisearch_types::settings::MinWordSizeTyposSetting;
pub type FacetingSettings = meilisearch_types::settings::FacetingSettings;

View File

@ -1,3 +1,5 @@
use std::fmt::Display;
use meilisearch_types::error::{Code, ErrorCode};
use meilisearch_types::tasks::{Kind, Status};
use meilisearch_types::{heed, milli};
@ -5,6 +7,42 @@ use thiserror::Error;
use crate::TaskId;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum DateField {
BeforeEnqueuedAt,
AfterEnqueuedAt,
BeforeStartedAt,
AfterStartedAt,
BeforeFinishedAt,
AfterFinishedAt,
}
impl Display for DateField {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DateField::BeforeEnqueuedAt => write!(f, "beforeEnqueuedAt"),
DateField::AfterEnqueuedAt => write!(f, "afterEnqueuedAt"),
DateField::BeforeStartedAt => write!(f, "beforeStartedAt"),
DateField::AfterStartedAt => write!(f, "afterStartedAt"),
DateField::BeforeFinishedAt => write!(f, "beforeFinishedAt"),
DateField::AfterFinishedAt => write!(f, "afterFinishedAt"),
}
}
}
impl From<DateField> for Code {
fn from(date: DateField) -> Self {
match date {
DateField::BeforeEnqueuedAt => Code::InvalidTaskBeforeEnqueuedAt,
DateField::AfterEnqueuedAt => Code::InvalidTaskAfterEnqueuedAt,
DateField::BeforeStartedAt => Code::InvalidTaskBeforeStartedAt,
DateField::AfterStartedAt => Code::InvalidTaskAfterStartedAt,
DateField::BeforeFinishedAt => Code::InvalidTaskBeforeFinishedAt,
DateField::AfterFinishedAt => Code::InvalidTaskAfterFinishedAt,
}
}
}
#[allow(clippy::large_enum_variant)]
#[derive(Error, Debug)]
pub enum Error {
@ -31,7 +69,7 @@ pub enum Error {
#[error(
"Task `{field}` `{date}` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format."
)]
InvalidTaskDate { field: String, date: String },
InvalidTaskDate { field: DateField, date: String },
#[error("Task uid `{task_uid}` is invalid. It should only contain numeric characters.")]
InvalidTaskUids { task_uid: String },
#[error(
@ -102,11 +140,11 @@ impl ErrorCode for Error {
Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists,
Error::SwapDuplicateIndexesFound(_) => Code::DuplicateIndexFound,
Error::SwapDuplicateIndexFound(_) => Code::DuplicateIndexFound,
Error::InvalidTaskDate { .. } => Code::InvalidTaskDateFilter,
Error::InvalidTaskUids { .. } => Code::InvalidTaskUidsFilter,
Error::InvalidTaskStatuses { .. } => Code::InvalidTaskStatusesFilter,
Error::InvalidTaskTypes { .. } => Code::InvalidTaskTypesFilter,
Error::InvalidTaskCanceledBy { .. } => Code::InvalidTaskCanceledByFilter,
Error::InvalidTaskDate { field, .. } => (*field).into(),
Error::InvalidTaskUids { .. } => Code::InvalidTaskUids,
Error::InvalidTaskStatuses { .. } => Code::InvalidTaskStatuses,
Error::InvalidTaskTypes { .. } => Code::InvalidTaskTypes,
Error::InvalidTaskCanceledBy { .. } => Code::InvalidTaskCanceledBy,
Error::InvalidIndexUid { .. } => Code::InvalidIndexUid,
Error::TaskNotFound(_) => Code::TaskNotFound,
Error::TaskDeletionWithEmptyQuery => Code::TaskDeletionWithEmptyQuery,

View File

@ -5,6 +5,7 @@ authors = ["marin <postma.marin@protonmail.com>"]
edition = "2021"
[dependencies]
deserr = { path = "../../deserr", features = ["serde-json"] }
actix-web = { version = "4.2.1", default-features = false }
anyhow = "1.0.65"
convert_case = "0.6.0"

View File

@ -119,9 +119,13 @@ pub enum Code {
// index related error
CreateIndex,
IndexAlreadyExists,
InvalidIndexPrimaryKey,
IndexNotFound,
InvalidIndexUid,
MissingIndexUid,
InvalidMinWordLengthForTypo,
InvalidIndexLimit,
InvalidIndexOffset,
DuplicateIndexFound,
@ -138,6 +142,55 @@ pub enum Code {
Filter,
Sort,
// Invalid swap-indexes
InvalidSwapIndexes,
// Invalid settings update request
InvalidSettingsDisplayedAttributes,
InvalidSettingsSearchableAttributes,
InvalidSettingsFilterableAttributes,
InvalidSettingsSortableAttributes,
InvalidSettingsRankingRules,
InvalidSettingsStopWords,
InvalidSettingsSynonyms,
InvalidSettingsDistinctAttribute,
InvalidSettingsTypoTolerance,
InvalidSettingsFaceting,
InvalidSettingsPagination,
// Invalid search request
InvalidSearchQ,
InvalidSearchOffset,
InvalidSearchLimit,
InvalidSearchPage,
InvalidSearchHitsPerPage,
InvalidSearchAttributesToRetrieve,
InvalidSearchAttributesToCrop,
InvalidSearchCropLength,
InvalidSearchAttributesToHighlight,
InvalidSearchShowMatchesPosition,
InvalidSearchFilter,
InvalidSearchSort,
InvalidSearchFacets,
InvalidSearchHighlightPreTag,
InvalidSearchHighlightPostTag,
InvalidSearchCropMarker,
InvalidSearchMatchingStrategy,
// Related to the tasks
InvalidTaskUids,
InvalidTaskTypes,
InvalidTaskStatuses,
InvalidTaskCanceledBy,
InvalidTaskLimit,
InvalidTaskFrom,
InvalidTaskBeforeEnqueuedAt,
InvalidTaskAfterEnqueuedAt,
InvalidTaskBeforeStartedAt,
InvalidTaskAfterStartedAt,
InvalidTaskBeforeFinishedAt,
InvalidTaskAfterFinishedAt,
BadParameter,
BadRequest,
DatabaseSizeLimitReached,
@ -150,11 +203,6 @@ pub enum Code {
MissingAuthorizationHeader,
MissingMasterKey,
DumpNotFound,
InvalidTaskDateFilter,
InvalidTaskStatusesFilter,
InvalidTaskTypesFilter,
InvalidTaskCanceledByFilter,
InvalidTaskUidsFilter,
TaskNotFound,
TaskDeletionWithEmptyQuery,
TaskCancelationWithEmptyQuery,
@ -209,6 +257,12 @@ impl Code {
// thrown when requesting an unexisting index
IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND),
InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST),
MissingIndexUid => ErrCode::invalid("missing_index_uid", StatusCode::BAD_REQUEST),
InvalidIndexPrimaryKey => {
ErrCode::invalid("invalid_index_primary_key", StatusCode::BAD_REQUEST)
}
InvalidIndexLimit => ErrCode::invalid("invalid_index_limit", StatusCode::BAD_REQUEST),
InvalidIndexOffset => ErrCode::invalid("invalid_index_offset", StatusCode::BAD_REQUEST),
// invalid state error
InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR),
@ -259,21 +313,6 @@ impl Code {
MissingMasterKey => {
ErrCode::authentication("missing_master_key", StatusCode::UNAUTHORIZED)
}
InvalidTaskDateFilter => {
ErrCode::invalid("invalid_task_date_filter", StatusCode::BAD_REQUEST)
}
InvalidTaskUidsFilter => {
ErrCode::invalid("invalid_task_uids_filter", StatusCode::BAD_REQUEST)
}
InvalidTaskStatusesFilter => {
ErrCode::invalid("invalid_task_statuses_filter", StatusCode::BAD_REQUEST)
}
InvalidTaskTypesFilter => {
ErrCode::invalid("invalid_task_types_filter", StatusCode::BAD_REQUEST)
}
InvalidTaskCanceledByFilter => {
ErrCode::invalid("invalid_task_canceled_by_filter", StatusCode::BAD_REQUEST)
}
TaskNotFound => ErrCode::invalid("task_not_found", StatusCode::NOT_FOUND),
TaskDeletionWithEmptyQuery => {
ErrCode::invalid("missing_task_filters", StatusCode::BAD_REQUEST)
@ -336,6 +375,116 @@ impl Code {
DuplicateIndexFound => {
ErrCode::invalid("duplicate_index_found", StatusCode::BAD_REQUEST)
}
InvalidSwapIndexes => ErrCode::invalid("invalid_swap_indexes", StatusCode::BAD_REQUEST),
InvalidSettingsDisplayedAttributes => {
ErrCode::invalid("invalid_settings_displayed_attributes", StatusCode::BAD_REQUEST)
}
InvalidSettingsSearchableAttributes => {
ErrCode::invalid("invalid_settings_searchable_attributes", StatusCode::BAD_REQUEST)
}
InvalidSettingsFilterableAttributes => {
ErrCode::invalid("invalid_settings_filterable_attributes", StatusCode::BAD_REQUEST)
}
InvalidSettingsSortableAttributes => {
ErrCode::invalid("invalid_settings_sortable_attributes", StatusCode::BAD_REQUEST)
}
InvalidSettingsRankingRules => {
ErrCode::invalid("invalid_settings_ranking_rules", StatusCode::BAD_REQUEST)
}
InvalidSettingsStopWords => {
ErrCode::invalid("invalid_settings_stop_words", StatusCode::BAD_REQUEST)
}
InvalidSettingsSynonyms => {
ErrCode::invalid("invalid_settings_synonyms", StatusCode::BAD_REQUEST)
}
InvalidSettingsDistinctAttribute => {
ErrCode::invalid("invalid_settings_distinct_attribute", StatusCode::BAD_REQUEST)
}
InvalidSettingsTypoTolerance => {
ErrCode::invalid("invalid_settings_typo_tolerance", StatusCode::BAD_REQUEST)
}
InvalidSettingsFaceting => {
ErrCode::invalid("invalid_settings_faceting", StatusCode::BAD_REQUEST)
}
InvalidSettingsPagination => {
ErrCode::invalid("invalid_settings_pagination", StatusCode::BAD_REQUEST)
}
InvalidSearchQ => ErrCode::invalid("invalid_search_q", StatusCode::BAD_REQUEST),
InvalidSearchOffset => {
ErrCode::invalid("invalid_search_offset", StatusCode::BAD_REQUEST)
}
InvalidSearchLimit => ErrCode::invalid("invalid_search_limit", StatusCode::BAD_REQUEST),
InvalidSearchPage => ErrCode::invalid("invalid_search_page", StatusCode::BAD_REQUEST),
InvalidSearchHitsPerPage => {
ErrCode::invalid("invalid_search_hits_per_page", StatusCode::BAD_REQUEST)
}
InvalidSearchAttributesToRetrieve => {
ErrCode::invalid("invalid_search_attributes_to_retrieve", StatusCode::BAD_REQUEST)
}
InvalidSearchAttributesToCrop => {
ErrCode::invalid("invalid_search_attributes_to_crop", StatusCode::BAD_REQUEST)
}
InvalidSearchCropLength => {
ErrCode::invalid("invalid_search_crop_length", StatusCode::BAD_REQUEST)
}
InvalidSearchAttributesToHighlight => {
ErrCode::invalid("invalid_search_attributes_to_highlight", StatusCode::BAD_REQUEST)
}
InvalidSearchShowMatchesPosition => {
ErrCode::invalid("invalid_search_show_matches_position", StatusCode::BAD_REQUEST)
}
InvalidSearchFilter => {
ErrCode::invalid("invalid_search_filter", StatusCode::BAD_REQUEST)
}
InvalidSearchSort => ErrCode::invalid("invalid_search_sort", StatusCode::BAD_REQUEST),
InvalidSearchFacets => {
ErrCode::invalid("invalid_search_facets", StatusCode::BAD_REQUEST)
}
InvalidSearchHighlightPreTag => {
ErrCode::invalid("invalid_search_highlight_pre_tag", StatusCode::BAD_REQUEST)
}
InvalidSearchHighlightPostTag => {
ErrCode::invalid("invalid_search_highlight_post_tag", StatusCode::BAD_REQUEST)
}
InvalidSearchCropMarker => {
ErrCode::invalid("invalid_search_crop_marker", StatusCode::BAD_REQUEST)
}
InvalidSearchMatchingStrategy => {
ErrCode::invalid("invalid_search_matching_strategy", StatusCode::BAD_REQUEST)
}
// Related to the tasks
InvalidTaskUids => ErrCode::invalid("invalid_task_uids", StatusCode::BAD_REQUEST),
InvalidTaskTypes => ErrCode::invalid("invalid_task_types", StatusCode::BAD_REQUEST),
InvalidTaskStatuses => {
ErrCode::invalid("invalid_task_statuses", StatusCode::BAD_REQUEST)
}
InvalidTaskCanceledBy => {
ErrCode::invalid("invalid_task_canceled_by", StatusCode::BAD_REQUEST)
}
InvalidTaskLimit => ErrCode::invalid("invalid_task_limit", StatusCode::BAD_REQUEST),
InvalidTaskFrom => ErrCode::invalid("invalid_task_from", StatusCode::BAD_REQUEST),
InvalidTaskBeforeEnqueuedAt => {
ErrCode::invalid("invalid_task_before_enqueued_at", StatusCode::BAD_REQUEST)
}
InvalidTaskAfterEnqueuedAt => {
ErrCode::invalid("invalid_task_after_enqueued_at", StatusCode::BAD_REQUEST)
}
InvalidTaskBeforeStartedAt => {
ErrCode::invalid("invalid_task_before_started_at", StatusCode::BAD_REQUEST)
}
InvalidTaskAfterStartedAt => {
ErrCode::invalid("invalid_task_after_started_at", StatusCode::BAD_REQUEST)
}
InvalidTaskBeforeFinishedAt => {
ErrCode::invalid("invalid_task_before_finished_at", StatusCode::BAD_REQUEST)
}
InvalidTaskAfterFinishedAt => {
ErrCode::invalid("invalid_task_after_finished_at", StatusCode::BAD_REQUEST)
}
}
}
@ -476,6 +625,13 @@ impl ErrorCode for io::Error {
}
}
pub fn unwrap_any<T>(any: Result<T, T>) -> T {
match any {
Ok(any) => any,
Err(any) => any,
}
}
#[cfg(feature = "test-traits")]
mod strategy {
use proptest::strategy::Strategy;

View File

@ -2,10 +2,10 @@ use std::collections::{BTreeMap, BTreeSet};
use std::marker::PhantomData;
use std::num::NonZeroUsize;
use deserr::{DeserializeError, DeserializeFromValue};
use fst::IntoStreamer;
use milli::update::Setting;
use milli::{Index, DEFAULT_VALUES_PER_FACET};
use serde::{Deserialize, Serialize, Serializer};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
/// The maximimum number of results that the engine
/// will be able to return in one search call.
@ -27,16 +27,135 @@ where
.serialize(s)
}
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
pub enum Setting<T> {
Set(T),
Reset,
NotSet,
}
impl<T> Default for Setting<T> {
fn default() -> Self {
Self::NotSet
}
}
impl<T> From<Setting<T>> for milli::update::Setting<T> {
fn from(value: Setting<T>) -> Self {
match value {
Setting::Set(x) => milli::update::Setting::Set(x),
Setting::Reset => milli::update::Setting::Reset,
Setting::NotSet => milli::update::Setting::NotSet,
}
}
}
impl<T> From<milli::update::Setting<T>> for Setting<T> {
fn from(value: milli::update::Setting<T>) -> Self {
match value {
milli::update::Setting::Set(x) => Setting::Set(x),
milli::update::Setting::Reset => Setting::Reset,
milli::update::Setting::NotSet => Setting::NotSet,
}
}
}
impl<T> Setting<T> {
pub fn set(self) -> Option<T> {
match self {
Self::Set(value) => Some(value),
_ => None,
}
}
pub const fn as_ref(&self) -> Setting<&T> {
match *self {
Self::Set(ref value) => Setting::Set(value),
Self::Reset => Setting::Reset,
Self::NotSet => Setting::NotSet,
}
}
pub const fn is_not_set(&self) -> bool {
matches!(self, Self::NotSet)
}
/// If `Self` is `Reset`, then map self to `Set` with the provided `val`.
pub fn or_reset(self, val: T) -> Self {
match self {
Self::Reset => Self::Set(val),
otherwise => otherwise,
}
}
}
impl<T: Serialize> Serialize for Setting<T> {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Set(value) => Some(value),
// Usually not_set isn't serialized by setting skip_serializing_if field attribute
Self::NotSet | Self::Reset => None,
}
.serialize(serializer)
}
}
impl<'de, T: Deserialize<'de>> Deserialize<'de> for Setting<T> {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Deserialize::deserialize(deserializer).map(|x| match x {
Some(x) => Self::Set(x),
None => Self::Reset, // Reset is forced by sending null value
})
}
}
impl<T, E> DeserializeFromValue<E> for Setting<T>
where
T: DeserializeFromValue<E>,
E: DeserializeError,
{
fn deserialize_from_value<V: deserr::IntoValue>(
value: deserr::Value<V>,
location: deserr::ValuePointerRef,
) -> Result<Self, E> {
match value {
deserr::Value::Null => Ok(Setting::Reset),
_ => T::deserialize_from_value(value, location).map(Setting::Set),
}
}
fn default() -> Option<Self> {
Some(Self::NotSet)
}
}
#[derive(Clone, Default, Debug, Serialize, PartialEq, Eq)]
pub struct Checked;
#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct Unchecked;
impl<E> DeserializeFromValue<E> for Unchecked
where
E: DeserializeError,
{
fn deserialize_from_value<V: deserr::IntoValue>(
_value: deserr::Value<V>,
_location: deserr::ValuePointerRef,
) -> Result<Self, E> {
unreachable!()
}
}
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
pub struct MinWordSizeTyposSetting {
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
@ -47,9 +166,10 @@ pub struct MinWordSizeTyposSetting {
}
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
pub struct TypoSettings {
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
@ -66,9 +186,10 @@ pub struct TypoSettings {
}
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
pub struct FacetingSettings {
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
@ -76,9 +197,10 @@ pub struct FacetingSettings {
}
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
pub struct PaginationSettings {
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
@ -88,10 +210,11 @@ pub struct PaginationSettings {
/// Holds all the settings for an index. `T` can either be `Checked` if they represents settings
/// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a
/// call to `check` will return a `Settings<Checked>` from a `Settings<Unchecked>`.
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
#[serde(bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>"))]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
pub struct Settings<T> {
#[serde(

View File

@ -7,6 +7,7 @@ name = "meilisearch"
version = "1.0.0"
[dependencies]
deserr = { path = "../../deserr", features = ["serde-json"] }
actix-cors = "0.6.3"
actix-http = { version = "3.2.2", default-features = false, features = ["compress-brotli", "compress-gzip", "rustls"] }
actix-web = { version = "4.2.1", default-features = false, features = ["macros", "compress-brotli", "compress-gzip", "cookies", "rustls"] }
@ -71,6 +72,8 @@ toml = "0.5.9"
uuid = { version = "1.1.2", features = ["serde", "v4"] }
walkdir = "2.3.2"
yaup = "0.2.0"
serde_urlencoded = "0.7.1"
actix-utils = "3.0.1"
[dev-dependencies]
actix-rt = "2.7.0"
@ -99,17 +102,7 @@ zip = { version = "0.6.2", optional = true }
default = ["analytics", "meilisearch-types/default", "mini-dashboard"]
metrics = ["prometheus"]
analytics = ["segment"]
mini-dashboard = [
"actix-web-static-files",
"static-files",
"anyhow",
"cargo_toml",
"hex",
"reqwest",
"sha-1",
"tempfile",
"zip",
]
mini-dashboard = ["actix-web-static-files", "static-files", "anyhow", "cargo_toml", "hex", "reqwest", "sha-1", "tempfile", "zip"]
chinese = ["meilisearch-types/chinese"]
hebrew = ["meilisearch-types/hebrew"]
japanese = ["meilisearch-types/japanese"]

View File

@ -0,0 +1,78 @@
use std::fmt::Debug;
use std::future::Future;
use std::marker::PhantomData;
use std::pin::Pin;
use std::task::{Context, Poll};
use actix_web::dev::Payload;
use actix_web::web::Json;
use actix_web::{FromRequest, HttpRequest};
use deserr::{DeserializeError, DeserializeFromValue};
use futures::ready;
use meilisearch_types::error::{ErrorCode, ResponseError};
/// Extractor for typed data from Json request payloads
/// deserialised by deserr.
///
/// # Extractor
/// To extract typed data from a request body, the inner type `T` must implement the
/// [`deserr::DeserializeFromError<E>`] trait. The inner type `E` must implement the
/// [`ErrorCode`](meilisearch_error::ErrorCode) trait.
#[derive(Debug)]
pub struct ValidatedJson<T, E>(pub T, PhantomData<*const E>);
impl<T, E> ValidatedJson<T, E> {
pub fn new(data: T) -> Self {
ValidatedJson(data, PhantomData)
}
pub fn into_inner(self) -> T {
self.0
}
}
impl<T, E> FromRequest for ValidatedJson<T, E>
where
E: DeserializeError + ErrorCode + 'static,
T: DeserializeFromValue<E>,
{
type Error = actix_web::Error;
type Future = ValidatedJsonExtractFut<T, E>;
#[inline]
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
ValidatedJsonExtractFut {
fut: Json::<serde_json::Value>::from_request(req, payload),
_phantom: PhantomData,
}
}
}
pub struct ValidatedJsonExtractFut<T, E> {
fut: <Json<serde_json::Value> as FromRequest>::Future,
_phantom: PhantomData<*const (T, E)>,
}
impl<T, E> Future for ValidatedJsonExtractFut<T, E>
where
T: DeserializeFromValue<E>,
E: DeserializeError + ErrorCode + 'static,
{
type Output = Result<ValidatedJson<T, E>, actix_web::Error>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let ValidatedJsonExtractFut { fut, .. } = self.get_mut();
let fut = Pin::new(fut);
let res = ready!(fut.poll(cx));
let res = match res {
Err(err) => Err(err),
Ok(data) => match deserr::deserialize::<_, _, E>(data.into_inner()) {
Ok(data) => Ok(ValidatedJson::new(data)),
Err(e) => Err(ResponseError::from(e).into()),
},
};
Poll::Ready(res)
}
}

View File

@ -0,0 +1,78 @@
use std::fmt::Debug;
use std::future::Future;
use std::marker::PhantomData;
use std::pin::Pin;
use std::task::{Context, Poll};
use actix_web::dev::Payload;
use actix_web::web::Json;
use actix_web::{FromRequest, HttpRequest};
use deserr::{DeserializeError, DeserializeFromValue};
use futures::ready;
use meilisearch_types::error::{ErrorCode, ResponseError};
/// Extractor for typed data from Json request payloads
/// deserialised by deserr.
///
/// # Extractor
/// To extract typed data from a request body, the inner type `T` must implement the
/// [`deserr::DeserializeFromError<E>`] trait. The inner type `E` must implement the
/// [`ErrorCode`](meilisearch_error::ErrorCode) trait.
#[derive(Debug)]
pub struct ValidatedJson<T, E>(pub T, PhantomData<*const E>);
impl<T, E> ValidatedJson<T, E> {
pub fn new(data: T) -> Self {
ValidatedJson(data, PhantomData)
}
pub fn into_inner(self) -> T {
self.0
}
}
impl<T, E> FromRequest for ValidatedJson<T, E>
where
E: DeserializeError + ErrorCode + 'static,
T: DeserializeFromValue<E>,
{
type Error = actix_web::Error;
type Future = ValidatedJsonExtractFut<T, E>;
#[inline]
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
ValidatedJsonExtractFut {
fut: Json::<serde_json::Value>::from_request(req, payload),
_phantom: PhantomData,
}
}
}
pub struct ValidatedJsonExtractFut<T, E> {
fut: <Json<serde_json::Value> as FromRequest>::Future,
_phantom: PhantomData<*const (T, E)>,
}
impl<T, E> Future for ValidatedJsonExtractFut<T, E>
where
T: DeserializeFromValue<E>,
E: DeserializeError + ErrorCode + 'static,
{
type Output = Result<ValidatedJson<T, E>, actix_web::Error>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let ValidatedJsonExtractFut { fut, .. } = self.get_mut();
let fut = Pin::new(fut);
let res = ready!(fut.poll(cx));
let res = match res {
Err(err) => Err(err),
Ok(data) => match deserr::deserialize::<_, _, E>(data.into_inner()) {
Ok(data) => Ok(ValidatedJson::new(data)),
Err(e) => Err(ResponseError::from(e).into()),
},
};
Poll::Ready(res)
}
}

View File

@ -1,4 +1,7 @@
pub mod payload;
#[macro_use]
pub mod authentication;
// pub mod deserr;
pub mod json;
pub mod query_parameters;
pub mod sequential_extractor;

View File

@ -0,0 +1,134 @@
//! For query parameter extractor documentation, see [`Query`].
use std::marker::PhantomData;
use std::{fmt, ops};
use actix_http::Payload;
use actix_utils::future::{err, ok, Ready};
use actix_web::{FromRequest, HttpRequest};
use deserr::{DeserializeError, DeserializeFromValue};
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
/// Extract typed information from the request's query.
///
/// To extract typed data from the URL query string, the inner type `T` must implement the
/// [`DeserializeOwned`] trait.
///
/// Use [`QueryConfig`] to configure extraction process.
///
/// # Panics
/// A query string consists of unordered `key=value` pairs, therefore it cannot be decoded into any
/// type which depends upon data ordering (eg. tuples). Trying to do so will result in a panic.
///
/// # Examples
/// ```
/// use actix_web::{get, web};
/// use serde::Deserialize;
///
/// #[derive(Debug, Deserialize)]
/// pub enum ResponseType {
/// Token,
/// Code
/// }
///
/// #[derive(Debug, Deserialize)]
/// pub struct AuthRequest {
/// id: u64,
/// response_type: ResponseType,
/// }
///
/// // Deserialize `AuthRequest` struct from query string.
/// // This handler gets called only if the request's query parameters contain both fields.
/// // A valid request path for this handler would be `/?id=64&response_type=Code"`.
/// #[get("/")]
/// async fn index(info: web::Query<AuthRequest>) -> String {
/// format!("Authorization request for id={} and type={:?}!", info.id, info.response_type)
/// }
///
/// // To access the entire underlying query struct, use `.into_inner()`.
/// #[get("/debug1")]
/// async fn debug1(info: web::Query<AuthRequest>) -> String {
/// dbg!("Authorization object = {:?}", info.into_inner());
/// "OK".to_string()
/// }
///
/// // Or use destructuring, which is equivalent to `.into_inner()`.
/// #[get("/debug2")]
/// async fn debug2(web::Query(info): web::Query<AuthRequest>) -> String {
/// dbg!("Authorization object = {:?}", info);
/// "OK".to_string()
/// }
/// ```
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct QueryParameter<T, E>(pub T, PhantomData<*const E>);
impl<T, E> QueryParameter<T, E> {
/// Unwrap into inner `T` value.
pub fn into_inner(self) -> T {
self.0
}
}
impl<T, E> QueryParameter<T, E>
where
T: DeserializeFromValue<E>,
E: DeserializeError + ErrorCode + 'static,
{
/// Deserialize a `T` from the URL encoded query parameter string.
///
/// ```
/// # use std::collections::HashMap;
/// # use actix_web::web::Query;
/// let numbers = Query::<HashMap<String, u32>>::from_query("one=1&two=2").unwrap();
/// assert_eq!(numbers.get("one"), Some(&1));
/// assert_eq!(numbers.get("two"), Some(&2));
/// assert!(numbers.get("three").is_none());
/// ```
pub fn from_query(query_str: &str) -> Result<Self, actix_web::Error> {
let value = serde_urlencoded::from_str::<serde_json::Value>(query_str)
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?;
match deserr::deserialize::<_, _, E>(value) {
Ok(data) => Ok(QueryParameter(data, PhantomData)),
Err(e) => Err(ResponseError::from(e).into()),
}
}
}
impl<T, E> ops::Deref for QueryParameter<T, E> {
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}
impl<T, E> ops::DerefMut for QueryParameter<T, E> {
fn deref_mut(&mut self) -> &mut T {
&mut self.0
}
}
impl<T: fmt::Display, E> fmt::Display for QueryParameter<T, E> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
/// See [here](#Examples) for example of usage as an extractor.
impl<T, E> FromRequest for QueryParameter<T, E>
where
T: DeserializeFromValue<E>,
E: DeserializeError + ErrorCode + 'static,
{
type Error = actix_web::Error;
type Future = Ready<Result<Self, actix_web::Error>>;
#[inline]
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
QueryParameter::from_query(&req.query_string()).map(ok).unwrap_or_else(err)
// serde_urlencoded::from_str::<serde_json::Value>(req.query_string())
// .map(|val| Ok(QueryParameter(val, PhantomData)))
// .unwrap_or_else(|e| err(ResponseError::from_msg(e.to_string(), Code::BadRequest)))
}
}

View File

@ -1,8 +1,14 @@
use std::convert::Infallible;
use std::num::ParseIntError;
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use deserr::{
DeserializeError, DeserializeFromValue, ErrorKind, IntoValue, MergeWithError, ValuePointerRef,
};
use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_types::error::ResponseError;
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError};
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::milli::{self, FieldDistribution, Index};
use meilisearch_types::tasks::KindWithContent;
@ -14,6 +20,8 @@ use super::{Pagination, SummarizedTaskView};
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::{AuthenticationError, GuardedData};
use crate::extractors::json::ValidatedJson;
use crate::extractors::query_parameters::QueryParameter;
use crate::extractors::sequential_extractor::SeqHandler;
pub mod documents;
@ -66,7 +74,7 @@ impl IndexView {
pub async fn list_indexes(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
paginate: web::Query<Pagination>,
paginate: QueryParameter<Pagination, ListIndexesDeserrError>,
) -> Result<HttpResponse, ResponseError> {
let search_rules = &index_scheduler.filters().search_rules;
let indexes: Vec<_> = index_scheduler.indexes()?;
@ -82,8 +90,68 @@ pub async fn list_indexes(
Ok(HttpResponse::Ok().json(ret))
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
#[derive(Debug)]
pub struct ListIndexesDeserrError {
error: String,
code: Code,
}
impl std::fmt::Display for ListIndexesDeserrError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.error)
}
}
impl std::error::Error for ListIndexesDeserrError {}
impl ErrorCode for ListIndexesDeserrError {
fn error_code(&self) -> Code {
self.code
}
}
impl MergeWithError<ListIndexesDeserrError> for ListIndexesDeserrError {
fn merge(
_self_: Option<Self>,
other: ListIndexesDeserrError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl deserr::DeserializeError for ListIndexesDeserrError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let code = match location.last_field() {
Some("offset") => Code::InvalidIndexLimit,
Some("limit") => Code::InvalidIndexOffset,
_ => Code::BadRequest,
};
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
Err(ListIndexesDeserrError { error, code })
}
}
impl MergeWithError<ParseIntError> for ListIndexesDeserrError {
fn merge(
_self_: Option<Self>,
other: ParseIntError,
merge_location: ValuePointerRef,
) -> Result<Self, Self> {
ListIndexesDeserrError::error::<Infallible>(
None,
ErrorKind::Unexpected { msg: other.to_string() },
merge_location,
)
}
}
#[derive(DeserializeFromValue, Debug)]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
pub struct IndexCreateRequest {
uid: String,
primary_key: Option<String>,
@ -91,7 +159,7 @@ pub struct IndexCreateRequest {
pub async fn create_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>,
body: web::Json<IndexCreateRequest>,
body: ValidatedJson<IndexCreateRequest, CreateIndexesDeserrError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
@ -116,11 +184,58 @@ pub async fn create_index(
}
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
#[allow(dead_code)]
#[derive(Debug)]
pub struct CreateIndexesDeserrError {
error: String,
code: Code,
}
impl std::fmt::Display for CreateIndexesDeserrError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.error)
}
}
impl std::error::Error for CreateIndexesDeserrError {}
impl ErrorCode for CreateIndexesDeserrError {
fn error_code(&self) -> Code {
self.code
}
}
impl MergeWithError<CreateIndexesDeserrError> for CreateIndexesDeserrError {
fn merge(
_self_: Option<Self>,
other: CreateIndexesDeserrError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl deserr::DeserializeError for CreateIndexesDeserrError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let code = match location.last_field() {
Some("uid") => Code::InvalidIndexUid,
Some("primaryKey") => Code::InvalidIndexPrimaryKey,
None if matches!(error, ErrorKind::MissingField { field } if field == "uid") => {
Code::MissingIndexUid
}
_ => Code::BadRequest,
};
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
Err(CreateIndexesDeserrError { error, code })
}
}
#[derive(DeserializeFromValue, Debug)]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
pub struct UpdateIndexRequest {
uid: Option<String>,
primary_key: Option<String>,
}
@ -139,7 +254,7 @@ pub async fn get_index(
pub async fn update_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, Data<IndexScheduler>>,
path: web::Path<String>,
body: web::Json<UpdateIndexRequest>,
body: ValidatedJson<UpdateIndexRequest, UpdateIndexesDeserrError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
@ -147,7 +262,7 @@ pub async fn update_index(
let body = body.into_inner();
analytics.publish(
"Index Updated".to_string(),
json!({ "primary_key": body.primary_key}),
json!({ "primary_key": body.primary_key }),
Some(&req),
);
@ -163,6 +278,51 @@ pub async fn update_index(
Ok(HttpResponse::Accepted().json(task))
}
#[derive(Debug)]
pub struct UpdateIndexesDeserrError {
error: String,
code: Code,
}
impl std::fmt::Display for UpdateIndexesDeserrError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.error)
}
}
impl std::error::Error for UpdateIndexesDeserrError {}
impl ErrorCode for UpdateIndexesDeserrError {
fn error_code(&self) -> Code {
self.code
}
}
impl MergeWithError<UpdateIndexesDeserrError> for UpdateIndexesDeserrError {
fn merge(
_self_: Option<Self>,
other: UpdateIndexesDeserrError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl deserr::DeserializeError for UpdateIndexesDeserrError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let code = match location.last_field() {
Some("primaryKey") => Code::InvalidIndexPrimaryKey,
_ => Code::BadRequest,
};
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
Err(UpdateIndexesDeserrError { error, code })
}
}
pub async fn delete_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,

View File

@ -1,21 +1,25 @@
use std::str::FromStr;
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_auth::IndexSearchRules;
use meilisearch_types::error::ResponseError;
use serde::Deserialize;
use serde_cs::vec::CS;
use serde_json::Value;
use crate::analytics::{Analytics, SearchAggregator};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::json::ValidatedJson;
use crate::extractors::query_parameters::QueryParameter;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::from_string_to_option;
use crate::search::{
perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
DEFAULT_SEARCH_OFFSET,
perform_search, MatchingStrategy, SearchDeserError, SearchQuery, DEFAULT_CROP_LENGTH,
DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG,
DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
};
pub fn configure(cfg: &mut web::ServiceConfig) {
@ -26,33 +30,35 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
);
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
#[derive(Debug, deserr::DeserializeFromValue)]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
pub struct SearchQueryGet {
q: Option<String>,
#[serde(default = "DEFAULT_SEARCH_OFFSET")]
#[deserr(default = DEFAULT_SEARCH_OFFSET(), from(&String) = FromStr::from_str -> std::num::ParseIntError)]
offset: usize,
#[serde(default = "DEFAULT_SEARCH_LIMIT")]
#[deserr(default = DEFAULT_SEARCH_LIMIT(), from(&String) = FromStr::from_str -> std::num::ParseIntError)]
limit: usize,
#[deserr(from(&String) = from_string_to_option -> std::num::ParseIntError)]
page: Option<usize>,
#[deserr(from(&String) = from_string_to_option -> std::num::ParseIntError)]
hits_per_page: Option<usize>,
attributes_to_retrieve: Option<CS<String>>,
attributes_to_crop: Option<CS<String>>,
#[serde(default = "DEFAULT_CROP_LENGTH")]
#[deserr(default = DEFAULT_CROP_LENGTH(), from(&String) = FromStr::from_str -> std::num::ParseIntError)]
crop_length: usize,
attributes_to_highlight: Option<CS<String>>,
filter: Option<String>,
sort: Option<String>,
#[serde(default = "Default::default")]
#[deserr(default, from(&String) = FromStr::from_str -> std::str::ParseBoolError)]
show_matches_position: bool,
facets: Option<CS<String>>,
#[serde(default = "DEFAULT_HIGHLIGHT_PRE_TAG")]
#[deserr(default = DEFAULT_HIGHLIGHT_PRE_TAG())]
highlight_pre_tag: String,
#[serde(default = "DEFAULT_HIGHLIGHT_POST_TAG")]
#[deserr(default = DEFAULT_HIGHLIGHT_POST_TAG())]
highlight_post_tag: String,
#[serde(default = "DEFAULT_CROP_MARKER")]
#[deserr(default = DEFAULT_CROP_MARKER())]
crop_marker: String,
#[serde(default)]
#[deserr(default)]
matching_strategy: MatchingStrategy,
}
@ -136,7 +142,7 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
pub async fn search_with_url_query(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
params: web::Query<SearchQueryGet>,
params: QueryParameter<SearchQueryGet, SearchDeserError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
@ -168,7 +174,7 @@ pub async fn search_with_url_query(
pub async fn search_with_post(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
params: web::Json<SearchQuery>,
params: ValidatedJson<SearchQuery, SearchDeserError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {

View File

@ -1,8 +1,11 @@
use std::fmt;
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use deserr::{IntoValue, ValuePointerRef};
use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_types::error::ResponseError;
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError};
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::settings::{settings, Settings, Unchecked};
use meilisearch_types::tasks::KindWithContent;
@ -11,6 +14,7 @@ use serde_json::json;
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::json::ValidatedJson;
use crate::routes::SummarizedTaskView;
#[macro_export]
@ -39,7 +43,7 @@ macro_rules! make_setting_route {
>,
index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let new_settings = Settings { $attr: Setting::Reset, ..Default::default() };
let new_settings = Settings { $attr: Setting::Reset.into(), ..Default::default() };
let allow_index_creation = index_scheduler.filters().allow_index_creation;
let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner();
@ -74,8 +78,8 @@ macro_rules! make_setting_route {
let new_settings = Settings {
$attr: match body {
Some(inner_body) => Setting::Set(inner_body),
None => Setting::Reset,
Some(inner_body) => Setting::Set(inner_body).into(),
None => Setting::Reset.into(),
},
..Default::default()
};
@ -208,7 +212,7 @@ make_setting_route!(
"TypoTolerance Updated".to_string(),
json!({
"typo_tolerance": {
"enabled": setting.as_ref().map(|s| !matches!(s.enabled, Setting::Set(false))),
"enabled": setting.as_ref().map(|s| !matches!(s.enabled.into(), Setting::Set(false))),
"disable_on_attributes": setting
.as_ref()
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
@ -424,10 +428,66 @@ generate_configure!(
faceting
);
#[derive(Debug)]
pub struct SettingsDeserrError {
error: String,
code: Code,
}
impl std::fmt::Display for SettingsDeserrError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.error)
}
}
impl std::error::Error for SettingsDeserrError {}
impl ErrorCode for SettingsDeserrError {
fn error_code(&self) -> Code {
self.code
}
}
impl deserr::MergeWithError<SettingsDeserrError> for SettingsDeserrError {
fn merge(
_self_: Option<Self>,
other: SettingsDeserrError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl deserr::DeserializeError for SettingsDeserrError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: deserr::ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
let code = match location.first_field() {
Some("displayedAttributes") => Code::InvalidSettingsDisplayedAttributes,
Some("searchableAttributes") => Code::InvalidSettingsSearchableAttributes,
Some("filterableAttributes") => Code::InvalidSettingsFilterableAttributes,
Some("sortableAttributes") => Code::InvalidSettingsSortableAttributes,
Some("rankingRules") => Code::InvalidSettingsRankingRules,
Some("stopWords") => Code::InvalidSettingsStopWords,
Some("synonyms") => Code::InvalidSettingsSynonyms,
Some("distinctAttribute") => Code::InvalidSettingsDistinctAttribute,
Some("typoTolerance") => Code::InvalidSettingsTypoTolerance,
Some("faceting") => Code::InvalidSettingsFaceting,
Some("pagination") => Code::InvalidSettingsPagination,
_ => Code::BadRequest,
};
Err(SettingsDeserrError { error, code })
}
}
pub async fn update_all(
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
body: web::Json<Settings<Unchecked>>,
body: ValidatedJson<Settings<Unchecked>, SettingsDeserrError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {

View File

@ -1,7 +1,9 @@
use std::collections::BTreeMap;
use std::str::FromStr;
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use deserr::DeserializeFromValue;
use index_scheduler::{IndexScheduler, Query};
use log::debug;
use meilisearch_types::error::ResponseError;
@ -49,6 +51,13 @@ where
.collect()
}
pub fn from_string_to_option<T, E>(input: &str) -> Result<Option<T>, E>
where
T: FromStr<Err = E>,
{
Ok(Some(input.parse()?))
}
const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20;
#[derive(Debug, Serialize)]
@ -75,12 +84,15 @@ impl From<Task> for SummarizedTaskView {
}
}
#[derive(Debug, Clone, Copy, Deserialize)]
#[derive(DeserializeFromValue, Deserialize, Debug, Clone, Copy)]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct Pagination {
#[serde(default)]
#[deserr(default, from(&String) = FromStr::from_str -> std::num::ParseIntError)]
pub offset: usize,
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
#[deserr(default = PAGINATION_DEFAULT_LIMIT(), from(&String) = FromStr::from_str -> std::num::ParseIntError)]
pub limit: usize,
}

View File

@ -1,9 +1,11 @@
use std::fmt;
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use deserr::{DeserializeFromValue, IntoValue, ValuePointerRef};
use index_scheduler::IndexScheduler;
use meilisearch_types::error::ResponseError;
use meilisearch_types::error::{unwrap_any, Code, ErrorCode, ResponseError};
use meilisearch_types::tasks::{IndexSwap, KindWithContent};
use serde::Deserialize;
use serde_json::json;
use super::SummarizedTaskView;
@ -11,23 +13,26 @@ use crate::analytics::Analytics;
use crate::error::MeilisearchHttpError;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::{AuthenticationError, GuardedData};
use crate::extractors::json::ValidatedJson;
use crate::extractors::sequential_extractor::SeqHandler;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(SeqHandler(swap_indexes))));
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
#[derive(DeserializeFromValue, Debug, Clone, PartialEq, Eq)]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
pub struct SwapIndexesPayload {
indexes: Vec<String>,
}
pub async fn swap_indexes(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>,
params: web::Json<Vec<SwapIndexesPayload>>,
params: ValidatedJson<Vec<SwapIndexesPayload>, SwapIndexesDeserrError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
let params = params.into_inner();
analytics.publish(
"Indexes Swapped".to_string(),
json!({
@ -38,7 +43,7 @@ pub async fn swap_indexes(
let search_rules = &index_scheduler.filters().search_rules;
let mut swaps = vec![];
for SwapIndexesPayload { indexes } in params.into_inner().into_iter() {
for SwapIndexesPayload { indexes } in params.into_iter() {
let (lhs, rhs) = match indexes.as_slice() {
[lhs, rhs] => (lhs, rhs),
_ => {
@ -57,3 +62,49 @@ pub async fn swap_indexes(
let task: SummarizedTaskView = task.into();
Ok(HttpResponse::Accepted().json(task))
}
#[derive(Debug)]
pub struct SwapIndexesDeserrError {
error: String,
code: Code,
}
impl std::fmt::Display for SwapIndexesDeserrError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.error)
}
}
impl std::error::Error for SwapIndexesDeserrError {}
impl ErrorCode for SwapIndexesDeserrError {
fn error_code(&self) -> Code {
self.code
}
}
impl deserr::MergeWithError<SwapIndexesDeserrError> for SwapIndexesDeserrError {
fn merge(
_self_: Option<Self>,
other: SwapIndexesDeserrError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl deserr::DeserializeError for SwapIndexesDeserrError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: deserr::ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
let code = match location.last_field() {
Some("indexes") => Code::InvalidSwapIndexes,
_ => Code::BadRequest,
};
Err(SwapIndexesDeserrError { error, code })
}
}

View File

@ -2,6 +2,7 @@ use std::str::FromStr;
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use index_scheduler::error::DateField;
use index_scheduler::{IndexScheduler, Query, TaskId};
use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid;
@ -168,6 +169,7 @@ pub struct TaskCommonQueryRaw {
pub statuses: Option<CS<StarOr<String>>>,
pub index_uids: Option<CS<StarOr<String>>>,
}
impl TaskCommonQueryRaw {
fn validate(self) -> Result<TaskCommonQuery, ResponseError> {
let Self { uids, canceled_by, types, statuses, index_uids } = self;
@ -290,37 +292,37 @@ impl TaskDateQueryRaw {
for (field_name, string_value, before_or_after, dest) in [
(
"afterEnqueuedAt",
DateField::AfterEnqueuedAt,
after_enqueued_at,
DeserializeDateOption::After,
&mut query.after_enqueued_at,
),
(
"beforeEnqueuedAt",
DateField::BeforeEnqueuedAt,
before_enqueued_at,
DeserializeDateOption::Before,
&mut query.before_enqueued_at,
),
(
"afterStartedAt",
DateField::AfterStartedAt,
after_started_at,
DeserializeDateOption::After,
&mut query.after_started_at,
),
(
"beforeStartedAt",
DateField::BeforeStartedAt,
before_started_at,
DeserializeDateOption::Before,
&mut query.before_started_at,
),
(
"afterFinishedAt",
DateField::AfterFinishedAt,
after_finished_at,
DeserializeDateOption::After,
&mut query.after_finished_at,
),
(
"beforeFinishedAt",
DateField::BeforeFinishedAt,
before_finished_at,
DeserializeDateOption::Before,
&mut query.before_finished_at,
@ -690,6 +692,7 @@ async fn get_task(
}
pub(crate) mod date_deserializer {
use index_scheduler::error::DateField;
use meilisearch_types::error::ResponseError;
use time::format_description::well_known::Rfc3339;
use time::macros::format_description;
@ -701,7 +704,7 @@ pub(crate) mod date_deserializer {
}
pub fn deserialize_date(
field_name: &str,
field_name: DateField,
value: &str,
option: DeserializeDateOption,
) -> std::result::Result<OffsetDateTime, ResponseError> {
@ -727,7 +730,7 @@ pub(crate) mod date_deserializer {
}
} else {
Err(index_scheduler::Error::InvalidTaskDate {
field: field_name.to_string(),
field: field_name,
date: value.to_string(),
}
.into())

View File

@ -1,9 +1,16 @@
use std::cmp::min;
use std::collections::{BTreeMap, BTreeSet, HashSet};
use std::str::FromStr;
use std::convert::Infallible;
use std::fmt;
use std::num::ParseIntError;
use std::str::{FromStr, ParseBoolError};
use std::time::Instant;
use deserr::{
DeserializeError, DeserializeFromValue, ErrorKind, IntoValue, MergeWithError, ValuePointerRef,
};
use either::Either;
use meilisearch_types::error::{unwrap_any, Code, ErrorCode};
use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS;
use meilisearch_types::{milli, Document};
use milli::tokenizer::TokenizerBuilder;
@ -26,34 +33,33 @@ pub const DEFAULT_CROP_MARKER: fn() -> String = || "…".to_string();
pub const DEFAULT_HIGHLIGHT_PRE_TAG: fn() -> String = || "<em>".to_string();
pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string();
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
#[derive(Debug, Clone, Default, PartialEq, DeserializeFromValue)]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
pub struct SearchQuery {
pub q: Option<String>,
#[serde(default = "DEFAULT_SEARCH_OFFSET")]
#[deserr(default = DEFAULT_SEARCH_OFFSET())]
pub offset: usize,
#[serde(default = "DEFAULT_SEARCH_LIMIT")]
#[deserr(default = DEFAULT_SEARCH_LIMIT())]
pub limit: usize,
pub page: Option<usize>,
pub hits_per_page: Option<usize>,
pub attributes_to_retrieve: Option<BTreeSet<String>>,
pub attributes_to_crop: Option<Vec<String>>,
#[serde(default = "DEFAULT_CROP_LENGTH")]
#[deserr(default = DEFAULT_CROP_LENGTH())]
pub crop_length: usize,
pub attributes_to_highlight: Option<HashSet<String>>,
// Default to false
#[serde(default = "Default::default")]
#[deserr(default)]
pub show_matches_position: bool,
pub filter: Option<Value>,
pub sort: Option<Vec<String>>,
pub facets: Option<Vec<String>>,
#[serde(default = "DEFAULT_HIGHLIGHT_PRE_TAG")]
#[deserr(default = DEFAULT_HIGHLIGHT_PRE_TAG())]
pub highlight_pre_tag: String,
#[serde(default = "DEFAULT_HIGHLIGHT_POST_TAG")]
#[deserr(default = DEFAULT_HIGHLIGHT_POST_TAG())]
pub highlight_post_tag: String,
#[serde(default = "DEFAULT_CROP_MARKER")]
#[deserr(default = DEFAULT_CROP_MARKER())]
pub crop_marker: String,
#[serde(default)]
#[deserr(default)]
pub matching_strategy: MatchingStrategy,
}
@ -63,7 +69,8 @@ impl SearchQuery {
}
}
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
#[derive(Deserialize, Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
#[deserr(rename_all = camelCase)]
#[serde(rename_all = "camelCase")]
pub enum MatchingStrategy {
/// Remove query words from last to first
@ -87,6 +94,96 @@ impl From<MatchingStrategy> for TermsMatchingStrategy {
}
}
#[derive(Debug)]
pub struct SearchDeserError {
error: String,
code: Code,
}
impl std::fmt::Display for SearchDeserError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.error)
}
}
impl std::error::Error for SearchDeserError {}
impl ErrorCode for SearchDeserError {
fn error_code(&self) -> Code {
self.code
}
}
impl MergeWithError<SearchDeserError> for SearchDeserError {
fn merge(
_self_: Option<Self>,
other: SearchDeserError,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(other)
}
}
impl DeserializeError for SearchDeserError {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let error = unwrap_any(deserr::serde_json::JsonError::error(None, error, location)).0;
let code = match location.last_field() {
Some("q") => Code::InvalidSearchQ,
Some("offset") => Code::InvalidSearchOffset,
Some("limit") => Code::InvalidSearchLimit,
Some("page") => Code::InvalidSearchPage,
Some("hitsPerPage") => Code::InvalidSearchHitsPerPage,
Some("attributesToRetrieve") => Code::InvalidSearchAttributesToRetrieve,
Some("attributesToCrop") => Code::InvalidSearchAttributesToCrop,
Some("cropLength") => Code::InvalidSearchCropLength,
Some("attributesToHighlight") => Code::InvalidSearchAttributesToHighlight,
Some("showMatchesPosition") => Code::InvalidSearchShowMatchesPosition,
Some("filter") => Code::InvalidSearchFilter,
Some("sort") => Code::InvalidSearchSort,
Some("facets") => Code::InvalidSearchFacets,
Some("highlightPreTag") => Code::InvalidSearchHighlightPreTag,
Some("highlightPostTag") => Code::InvalidSearchHighlightPostTag,
Some("cropMarker") => Code::InvalidSearchCropMarker,
Some("matchingStrategy") => Code::InvalidSearchMatchingStrategy,
_ => Code::BadRequest,
};
Err(SearchDeserError { error, code })
}
}
impl MergeWithError<ParseBoolError> for SearchDeserError {
fn merge(
_self_: Option<Self>,
other: ParseBoolError,
merge_location: ValuePointerRef,
) -> Result<Self, Self> {
SearchDeserError::error::<Infallible>(
None,
ErrorKind::Unexpected { msg: other.to_string() },
merge_location,
)
}
}
impl MergeWithError<ParseIntError> for SearchDeserError {
fn merge(
_self_: Option<Self>,
other: ParseIntError,
merge_location: ValuePointerRef,
) -> Result<Self, Self> {
SearchDeserError::error::<Infallible>(
None,
ErrorKind::Unexpected { msg: other.to_string() },
merge_location,
)
}
}
#[derive(Debug, Clone, Serialize, PartialEq, Eq)]
pub struct SearchHit {
#[serde(flatten)]

View File

@ -37,25 +37,105 @@ async fn search_unexisting_parameter() {
}
#[actix_rt::test]
async fn search_invalid_highlight_and_crop_tags() {
async fn search_invalid_crop_marker() {
let server = Server::new().await;
let index = server.index("test");
let fields = &["cropMarker", "highlightPreTag", "highlightPostTag"];
// object
let response = index.search_post(json!({"cropMarker": { "marker": "<crop>" }})).await;
meili_snap::snapshot!(format!("{:#?}", response), @r###"
(
Object {
"message": String("invalid type: Map `{\"marker\":\"<crop>\"}`, expected a String at `.cropMarker`."),
"code": String("invalid_search_crop_marker"),
"type": String("invalid_request"),
"link": String("https://docs.meilisearch.com/errors#invalid_search_crop_marker"),
},
400,
)
"###);
for field in fields {
// object
let (response, code) =
index.search_post(json!({field.to_string(): {"marker": "<crop>"}})).await;
assert_eq!(code, 400, "field {} passing object: {}", &field, response);
assert_eq!(response["code"], "bad_request");
// array
let response = index.search_post(json!({"cropMarker": ["marker", "<crop>"]})).await;
meili_snap::snapshot!(format!("{:#?}", response), @r###"
(
Object {
"message": String("invalid type: Sequence `[\"marker\",\"<crop>\"]`, expected a String at `.cropMarker`."),
"code": String("invalid_search_crop_marker"),
"type": String("invalid_request"),
"link": String("https://docs.meilisearch.com/errors#invalid_search_crop_marker"),
},
400,
)
"###);
}
// array
let (response, code) =
index.search_post(json!({field.to_string(): ["marker", "<crop>"]})).await;
assert_eq!(code, 400, "field {} passing array: {}", &field, response);
assert_eq!(response["code"], "bad_request");
}
#[actix_rt::test]
async fn search_invalid_highlight_pre_tag() {
let server = Server::new().await;
let index = server.index("test");
// object
let response = index.search_post(json!({"highlightPreTag": { "marker": "<em>" }})).await;
meili_snap::snapshot!(format!("{:#?}", response), @r###"
(
Object {
"message": String("invalid type: Map `{\"marker\":\"<em>\"}`, expected a String at `.highlightPreTag`."),
"code": String("invalid_search_highlight_pre_tag"),
"type": String("invalid_request"),
"link": String("https://docs.meilisearch.com/errors#invalid_search_highlight_pre_tag"),
},
400,
)
"###);
// array
let response = index.search_post(json!({"highlightPreTag": ["marker", "<em>"]})).await;
meili_snap::snapshot!(format!("{:#?}", response), @r###"
(
Object {
"message": String("invalid type: Sequence `[\"marker\",\"<em>\"]`, expected a String at `.highlightPreTag`."),
"code": String("invalid_search_highlight_pre_tag"),
"type": String("invalid_request"),
"link": String("https://docs.meilisearch.com/errors#invalid_search_highlight_pre_tag"),
},
400,
)
"###);
}
#[actix_rt::test]
async fn search_invalid_highlight_post_tag() {
let server = Server::new().await;
let index = server.index("test");
// object
let response = index.search_post(json!({"highlightPostTag": { "marker": "</em>" }})).await;
meili_snap::snapshot!(format!("{:#?}", response), @r###"
(
Object {
"message": String("invalid type: Map `{\"marker\":\"</em>\"}`, expected a String at `.highlightPostTag`."),
"code": String("invalid_search_highlight_post_tag"),
"type": String("invalid_request"),
"link": String("https://docs.meilisearch.com/errors#invalid_search_highlight_post_tag"),
},
400,
)
"###);
// array
let response = index.search_post(json!({"highlightPostTag": ["marker", "</em>"]})).await;
meili_snap::snapshot!(format!("{:#?}", response), @r###"
(
Object {
"message": String("invalid type: Sequence `[\"marker\",\"</em>\"]`, expected a String at `.highlightPostTag`."),
"code": String("invalid_search_highlight_post_tag"),
"type": String("invalid_request"),
"link": String("https://docs.meilisearch.com/errors#invalid_search_highlight_post_tag"),
},
400,
)
"###);
}
#[actix_rt::test]

View File

@ -193,9 +193,9 @@ async fn get_task_filter_error() {
insta::assert_json_snapshot!(response, @r###"
{
"message": "Task uid `pied` is invalid. It should only contain numeric characters.",
"code": "invalid_task_uids_filter",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-uids-filter"
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
}
"###);
@ -215,9 +215,9 @@ async fn get_task_filter_error() {
insta::assert_json_snapshot!(response, @r###"
{
"message": "Task `beforeStartedAt` `pied` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_date_filter",
"code": "invalid_task_before_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-date-filter"
"link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at"
}
"###);
}
@ -253,9 +253,9 @@ async fn delete_task_filter_error() {
insta::assert_json_snapshot!(response, @r###"
{
"message": "Task uid `pied` is invalid. It should only contain numeric characters.",
"code": "invalid_task_uids_filter",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-uids-filter"
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
}
"###);
}
@ -291,9 +291,9 @@ async fn cancel_task_filter_error() {
insta::assert_json_snapshot!(response, @r###"
{
"message": "Task uid `pied` is invalid. It should only contain numeric characters.",
"code": "invalid_task_uids_filter",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-uids-filter"
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
}
"###);
}