Merge branch 'main' of github.com:meilisearch/meilisearch into chore/update-get-index-test

This commit is contained in:
Timon Jurschitsch 2025-01-16 11:17:17 +01:00
commit a4ed36f0cc
408 changed files with 17390 additions and 12655 deletions

View file

@ -14,42 +14,42 @@ default-run = "meilisearch"
[dependencies]
actix-cors = "0.7.0"
actix-http = { version = "3.8.0", default-features = false, features = [
actix-http = { version = "3.9.0", default-features = false, features = [
"compress-brotli",
"compress-gzip",
"rustls-0_23",
] }
actix-utils = "3.0.1"
actix-web = { version = "4.8.0", default-features = false, features = [
actix-web = { version = "4.9.0", default-features = false, features = [
"macros",
"compress-brotli",
"compress-gzip",
"cookies",
"rustls-0_23",
] }
anyhow = { version = "1.0.86", features = ["backtrace"] }
async-trait = "0.1.81"
bstr = "1.9.1"
byte-unit = { version = "5.1.4", default-features = false, features = [
anyhow = { version = "1.0.95", features = ["backtrace"] }
async-trait = "0.1.85"
bstr = "1.11.3"
byte-unit = { version = "5.1.6", default-features = false, features = [
"std",
"byte",
"serde",
] }
bytes = "1.6.0"
clap = { version = "4.5.9", features = ["derive", "env"] }
crossbeam-channel = "0.5.13"
deserr = { version = "0.6.2", features = ["actix-web"] }
bytes = "1.9.0"
clap = { version = "4.5.24", features = ["derive", "env"] }
crossbeam-channel = "0.5.14"
deserr = { version = "0.6.3", features = ["actix-web"] }
dump = { path = "../dump" }
either = "1.13.0"
file-store = { path = "../file-store" }
flate2 = "1.0.30"
flate2 = "1.0.35"
fst = "0.4.7"
futures = "0.3.30"
futures-util = "0.3.30"
futures = "0.3.31"
futures-util = "0.3.31"
index-scheduler = { path = "../index-scheduler" }
indexmap = { version = "2.2.6", features = ["serde"] }
is-terminal = "0.4.12"
itertools = "0.13.0"
indexmap = { version = "2.7.0", features = ["serde"] }
is-terminal = "0.4.13"
itertools = "0.14.0"
jsonwebtoken = "9.3.0"
lazy_static = "1.5.0"
meilisearch-auth = { path = "../meilisearch-auth" }
@ -58,81 +58,85 @@ mimalloc = { version = "0.1.43", default-features = false }
mime = "0.3.17"
num_cpus = "1.16.0"
obkv = "0.3.0"
once_cell = "1.19.0"
ordered-float = "4.2.1"
once_cell = "1.20.2"
ordered-float = "4.6.0"
parking_lot = "0.12.3"
permissive-json-pointer = { path = "../permissive-json-pointer" }
pin-project-lite = "0.2.14"
pin-project-lite = "0.2.16"
platform-dirs = "0.3.0"
prometheus = { version = "0.13.4", features = ["process"] }
rand = "0.8.5"
rayon = "1.10.0"
regex = "1.10.5"
reqwest = { version = "0.12.5", features = [
regex = "1.11.1"
reqwest = { version = "0.12.12", features = [
"rustls-tls",
"json",
], default-features = false }
rustls = { version = "0.23.11", features = ["ring"], default-features = false }
rustls-pki-types = { version = "1.7.0", features = ["alloc"] }
rustls-pemfile = "2.1.2"
segment = { version = "0.2.4" }
serde = { version = "1.0.204", features = ["derive"] }
serde_json = { version = "1.0.120", features = ["preserve_order"] }
rustls = { version = "0.23.20", features = ["ring"], default-features = false }
rustls-pki-types = { version = "1.10.1", features = ["alloc"] }
rustls-pemfile = "2.2.0"
segment = { version = "0.2.5" }
serde = { version = "1.0.217", features = ["derive"] }
serde_json = { version = "1.0.135", features = ["preserve_order"] }
sha2 = "0.10.8"
siphasher = "1.0.1"
slice-group-by = "0.3.1"
static-files = { version = "0.2.4", optional = true }
sysinfo = "0.30.13"
tar = "0.4.41"
tempfile = "3.10.1"
thiserror = "1.0.61"
time = { version = "0.3.36", features = [
sysinfo = "0.33.1"
tar = "0.4.43"
tempfile = "3.15.0"
thiserror = "2.0.9"
time = { version = "0.3.37", features = [
"serde-well-known",
"formatting",
"parsing",
"macros",
] }
tokio = { version = "1.38.0", features = ["full"] }
toml = "0.8.14"
uuid = { version = "1.10.0", features = ["serde", "v4"] }
tokio = { version = "1.42.0", features = ["full"] }
toml = "0.8.19"
uuid = { version = "1.11.0", features = ["serde", "v4"] }
serde_urlencoded = "0.7.1"
termcolor = "1.4.1"
url = { version = "2.5.2", features = ["serde"] }
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["json"] }
url = { version = "2.5.4", features = ["serde"] }
tracing = "0.1.41"
tracing-subscriber = { version = "0.3.19", features = ["json"] }
tracing-trace = { version = "0.1.0", path = "../tracing-trace" }
tracing-actix-web = "0.7.11"
tracing-actix-web = "0.7.15"
build-info = { version = "1.7.0", path = "../build-info" }
roaring = "0.10.7"
roaring = "0.10.10"
mopa-maintained = "0.2.3"
utoipa = { version = "5.3.1", features = ["actix_extras", "macros", "non_strict_integers", "preserve_order", "uuid", "time", "openapi_extensions"] }
utoipa-scalar = { version = "0.2.1", optional = true, features = ["actix-web"] }
[dev-dependencies]
actix-rt = "2.10.0"
brotli = "6.0.0"
insta = "1.39.0"
# fixed version due to format breakages in v1.40
insta = "=1.39.0"
manifest-dir-macros = "0.1.18"
maplit = "1.0.2"
meili-snap = { path = "../meili-snap" }
temp-env = "0.3.6"
urlencoding = "2.1.3"
wiremock = "0.6.0"
wiremock = "0.6.2"
yaup = "0.3.1"
[build-dependencies]
anyhow = { version = "1.0.86", optional = true }
cargo_toml = { version = "0.20.3", optional = true }
anyhow = { version = "1.0.95", optional = true }
cargo_toml = { version = "0.21.0", optional = true }
hex = { version = "0.4.3", optional = true }
reqwest = { version = "0.12.5", features = [
reqwest = { version = "0.12.12", features = [
"blocking",
"rustls-tls",
], default-features = false, optional = true }
sha-1 = { version = "0.10.1", optional = true }
static-files = { version = "0.2.4", optional = true }
tempfile = { version = "3.10.1", optional = true }
zip = { version = "2.1.3", optional = true }
tempfile = { version = "3.15.0", optional = true }
zip = { version = "2.2.2", optional = true }
[features]
default = ["meilisearch-types/all-tokenizations", "mini-dashboard"]
swagger = ["utoipa-scalar"]
mini-dashboard = [
"static-files",
"anyhow",

View file

@ -194,6 +194,7 @@ struct Infos {
experimental_enable_logs_route: bool,
experimental_reduce_indexing_memory_usage: bool,
experimental_max_number_of_batched_tasks: usize,
experimental_limit_batched_tasks_total_size: u64,
gpu_enabled: bool,
db_path: bool,
import_dump: bool,
@ -239,6 +240,7 @@ impl Infos {
experimental_enable_logs_route,
experimental_reduce_indexing_memory_usage,
experimental_max_number_of_batched_tasks,
experimental_limit_batched_tasks_total_size,
http_addr,
master_key: _,
env,
@ -314,6 +316,7 @@ impl Infos {
http_addr: http_addr != default_http_addr(),
http_payload_size_limit,
experimental_max_number_of_batched_tasks,
experimental_limit_batched_tasks_total_size,
task_queue_webhook: task_webhook_url.is_some(),
task_webhook_authorization_header: task_webhook_authorization_header.is_some(),
log_level: log_level.to_string(),
@ -426,13 +429,9 @@ impl Segment {
&AuthFilter::default(),
) {
// Replace the version number with the prototype name if any.
let version = if let Some(prototype) = build_info::DescribeResult::from_build()
let version = build_info::DescribeResult::from_build()
.and_then(|describe| describe.as_prototype())
{
prototype
} else {
env!("CARGO_PKG_VERSION")
};
.unwrap_or(env!("CARGO_PKG_VERSION"));
let _ = self
.batcher

View file

@ -19,15 +19,15 @@ pub enum MeilisearchHttpError {
#[error("The Content-Type `{0}` does not support the use of a csv delimiter. The csv delimiter can only be used with the Content-Type `text/csv`.")]
CsvDelimiterWithWrongContentType(String),
#[error(
"The Content-Type `{0}` is invalid. Accepted values for the Content-Type header are: {}",
.1.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", ")
"The Content-Type `{}` is invalid. Accepted values for the Content-Type header are: {}",
.0, .1.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", ")
)]
InvalidContentType(String, Vec<String>),
#[error("Document `{0}` not found.")]
DocumentNotFound(String),
#[error("Sending an empty filter is forbidden.")]
EmptyFilter,
#[error("Invalid syntax for the filter parameter: `expected {}, found: {1}`.", .0.join(", "))]
#[error("Invalid syntax for the filter parameter: `expected {}, found: {}`.", .0.join(", "), .1)]
InvalidExpression(&'static [&'static str], Value),
#[error("Using `federationOptions` is not allowed in a non-federated search.\n - Hint: remove `federationOptions` from query #{0} or add `federation` to the request.")]
FederationOptionsInNonFederatedRequest(usize),

View file

@ -188,13 +188,13 @@ impl tracing_actix_web::RootSpanBuilder for AwebTracingLogger {
if let Some(error) = response.response().error() {
// use the status code already constructed for the outgoing HTTP response
span.record("error", &tracing::field::display(error.as_response_error()));
span.record("error", tracing::field::display(error.as_response_error()));
}
}
Err(error) => {
let code: i32 = error.error_response().status().as_u16().into();
span.record("status_code", code);
span.record("error", &tracing::field::display(error.as_response_error()));
span.record("error", tracing::field::display(error.as_response_error()));
}
};
}
@ -307,11 +307,12 @@ fn open_or_create_database_unchecked(
task_db_size: opt.max_task_db_size.as_u64() as usize,
index_base_map_size: opt.max_index_size.as_u64() as usize,
enable_mdb_writemap: opt.experimental_reduce_indexing_memory_usage,
indexer_config: (&opt.indexer_options).try_into()?,
indexer_config: Arc::new((&opt.indexer_options).try_into()?),
autobatching_enabled: true,
cleanup_enabled: !opt.experimental_replication_parameters,
max_number_of_tasks: 1_000_000,
max_number_of_batched_tasks: opt.experimental_max_number_of_batched_tasks,
batched_tasks_size_limit: opt.experimental_limit_batched_tasks_total_size,
index_growth_amount: byte_unit::Byte::from_str("10GiB").unwrap().as_u64() as usize,
index_count: DEFAULT_INDEX_COUNT,
instance_features,

View file

@ -60,6 +60,8 @@ const MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE: &str =
"MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE";
const MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS: &str =
"MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS";
const MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE: &str =
"MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_SIZE";
const DEFAULT_CONFIG_FILE_PATH: &str = "./config.toml";
const DEFAULT_DB_PATH: &str = "./data.ms";
@ -200,20 +202,23 @@ pub struct Opt {
#[clap(long, env = MEILI_TASK_WEBHOOK_URL)]
pub task_webhook_url: Option<Url>,
/// The Authorization header to send on the webhook URL whenever a task finishes so a third party can be notified.
/// The Authorization header to send on the webhook URL whenever
/// a task finishes so a third party can be notified.
#[clap(long, env = MEILI_TASK_WEBHOOK_AUTHORIZATION_HEADER)]
pub task_webhook_authorization_header: Option<String>,
/// Deactivates Meilisearch's built-in telemetry when provided.
///
/// Meilisearch automatically collects data from all instances that do not opt out using this flag.
/// All gathered data is used solely for the purpose of improving Meilisearch, and can be deleted
/// Meilisearch automatically collects data from all instances that
/// do not opt out using this flag. All gathered data is used solely
/// for the purpose of improving Meilisearch, and can be deleted
/// at any time.
#[serde(default)] // we can't send true
#[clap(long, env = MEILI_NO_ANALYTICS)]
pub no_analytics: bool,
/// Sets the maximum size of the index. Value must be given in bytes or explicitly stating a base unit (for instance: 107374182400, '107.7Gb', or '107374 Mb').
/// Sets the maximum size of the index. Value must be given in bytes or explicitly
/// stating a base unit (for instance: 107374182400, '107.7Gb', or '107374 Mb').
#[clap(skip = default_max_index_size())]
#[serde(skip, default = "default_max_index_size")]
pub max_index_size: Byte,
@ -333,43 +338,53 @@ pub struct Opt {
/// Defines how much detail should be present in Meilisearch's logs.
///
/// Meilisearch currently supports six log levels, listed in order of increasing verbosity: OFF, ERROR, WARN, INFO, DEBUG, TRACE.
/// Meilisearch currently supports six log levels, listed in order of
/// increasing verbosity: OFF, ERROR, WARN, INFO, DEBUG, TRACE.
#[clap(long, env = MEILI_LOG_LEVEL, default_value_t)]
#[serde(default)]
pub log_level: LogLevel,
/// Experimental contains filter feature. For more information, see: <https://github.com/orgs/meilisearch/discussions/763>
/// Experimental contains filter feature. For more information,
/// see: <https://github.com/orgs/meilisearch/discussions/763>
///
/// Enables the experimental contains filter operator.
#[clap(long, env = MEILI_EXPERIMENTAL_CONTAINS_FILTER)]
#[serde(default)]
pub experimental_contains_filter: bool,
/// Experimental metrics feature. For more information, see: <https://github.com/meilisearch/meilisearch/discussions/3518>
/// Experimental metrics feature. For more information,
/// see: <https://github.com/meilisearch/meilisearch/discussions/3518>
///
/// Enables the Prometheus metrics on the `GET /metrics` endpoint.
#[clap(long, env = MEILI_EXPERIMENTAL_ENABLE_METRICS)]
#[serde(default)]
pub experimental_enable_metrics: bool,
/// Experimental search queue size. For more information, see: <https://github.com/orgs/meilisearch/discussions/729>
/// Experimental search queue size. For more information,
/// see: <https://github.com/orgs/meilisearch/discussions/729>
///
/// Lets you customize the size of the search queue. Meilisearch processes
/// your search requests as fast as possible but once the queue is full
/// it starts returning HTTP 503, Service Unavailable.
///
/// Lets you customize the size of the search queue. Meilisearch processes your search requests as fast as possible but once the
/// queue is full it starts returning HTTP 503, Service Unavailable.
/// The default value is 1000.
#[clap(long, env = MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE, default_value_t = default_experimental_search_queue_size())]
#[serde(default = "default_experimental_search_queue_size")]
pub experimental_search_queue_size: usize,
/// Experimental drop search after. For more information, see: <https://github.com/orgs/meilisearch/discussions/783>
/// Experimental drop search after. For more information,
/// see: <https://github.com/orgs/meilisearch/discussions/783>
///
/// Let you customize after how many seconds Meilisearch should consider
/// a search request irrelevant and drop it.
///
/// Let you customize after how many seconds Meilisearch should consider a search request irrelevant and drop it.
/// The default value is 60.
#[clap(long, env = MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER, default_value_t = default_drop_search_after())]
#[serde(default = "default_drop_search_after")]
pub experimental_drop_search_after: NonZeroUsize,
/// Experimental number of searches per core. For more information, see: <https://github.com/orgs/meilisearch/discussions/784>
/// Experimental number of searches per core. For more information,
/// see: <https://github.com/orgs/meilisearch/discussions/784>
///
/// Lets you customize how many search requests can run on each core concurrently.
/// The default value is 4.
@ -377,16 +392,19 @@ pub struct Opt {
#[serde(default = "default_nb_searches_per_core")]
pub experimental_nb_searches_per_core: NonZeroUsize,
/// Experimental logs mode feature. For more information, see: <https://github.com/orgs/meilisearch/discussions/723>
/// Experimental logs mode feature. For more information,
/// see: <https://github.com/orgs/meilisearch/discussions/723>
///
/// Change the mode of the logs on the console.
#[clap(long, env = MEILI_EXPERIMENTAL_LOGS_MODE, default_value_t)]
#[serde(default)]
pub experimental_logs_mode: LogMode,
/// Experimental logs route feature. For more information, see: <https://github.com/orgs/meilisearch/discussions/721>
/// Experimental logs route feature. For more information,
/// see: <https://github.com/orgs/meilisearch/discussions/721>
///
/// Enables the log routes on the `POST /logs/stream`, `POST /logs/stderr` endpoints, and the `DELETE /logs/stream` to stop receiving logs.
/// Enables the log routes on the `POST /logs/stream`, `POST /logs/stderr` endpoints,
/// and the `DELETE /logs/stream` to stop receiving logs.
#[clap(long, env = MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE)]
#[serde(default)]
pub experimental_enable_logs_route: bool,
@ -396,21 +414,30 @@ pub struct Opt {
///
/// - /!\ Disable the automatic clean up of old processed tasks, you're in charge of that now
/// - Lets you specify a custom task ID upon registering a task
/// - Lets you execute dry-register a task (get an answer from the route but nothing is actually registered in meilisearch and it won't be processed)
/// - Lets you execute dry-register a task (get an answer from the route but nothing is actually
/// registered in meilisearch and it won't be processed)
#[clap(long, env = MEILI_EXPERIMENTAL_REPLICATION_PARAMETERS)]
#[serde(default)]
pub experimental_replication_parameters: bool,
/// Experimental RAM reduction during indexing, do not use in production, see: <https://github.com/meilisearch/product/discussions/652>
/// Experimental RAM reduction during indexing, do not use in production,
/// see: <https://github.com/meilisearch/product/discussions/652>
#[clap(long, env = MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE)]
#[serde(default)]
pub experimental_reduce_indexing_memory_usage: bool,
/// Experimentally reduces the maximum number of tasks that will be processed at once, see: <https://github.com/orgs/meilisearch/discussions/713>
/// Experimentally reduces the maximum number of tasks that will be processed at once,
/// see: <https://github.com/orgs/meilisearch/discussions/713>
#[clap(long, env = MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS, default_value_t = default_limit_batched_tasks())]
#[serde(default = "default_limit_batched_tasks")]
pub experimental_max_number_of_batched_tasks: usize,
/// Experimentally reduces the maximum total size, in bytes, of tasks that will be processed at once,
/// see: <https://github.com/orgs/meilisearch/discussions/801>
#[clap(long, env = MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE, default_value_t = default_limit_batched_tasks_total_size())]
#[serde(default = "default_limit_batched_tasks_total_size")]
pub experimental_limit_batched_tasks_total_size: u64,
#[serde(flatten)]
#[clap(flatten)]
pub indexer_options: IndexerOpts,
@ -482,7 +509,6 @@ impl Opt {
max_index_size: _,
max_task_db_size: _,
http_payload_size_limit,
experimental_max_number_of_batched_tasks,
ssl_cert_path,
ssl_key_path,
ssl_auth_path,
@ -512,6 +538,8 @@ impl Opt {
experimental_enable_logs_route,
experimental_replication_parameters,
experimental_reduce_indexing_memory_usage,
experimental_max_number_of_batched_tasks,
experimental_limit_batched_tasks_total_size,
} = self;
export_to_env_if_not_present(MEILI_DB_PATH, db_path);
export_to_env_if_not_present(MEILI_HTTP_ADDR, http_addr);
@ -534,10 +562,6 @@ impl Opt {
MEILI_HTTP_PAYLOAD_SIZE_LIMIT,
http_payload_size_limit.to_string(),
);
export_to_env_if_not_present(
MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS,
experimental_max_number_of_batched_tasks.to_string(),
);
if let Some(ssl_cert_path) = ssl_cert_path {
export_to_env_if_not_present(MEILI_SSL_CERT_PATH, ssl_cert_path);
}
@ -596,6 +620,14 @@ impl Opt {
MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE,
experimental_reduce_indexing_memory_usage.to_string(),
);
export_to_env_if_not_present(
MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS,
experimental_max_number_of_batched_tasks.to_string(),
);
export_to_env_if_not_present(
MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE,
experimental_limit_batched_tasks_total_size.to_string(),
);
indexer_options.export_to_env();
}
@ -760,8 +792,8 @@ impl MaxMemory {
/// Returns the total amount of bytes available or `None` if this system isn't supported.
fn total_memory_bytes() -> Option<u64> {
if sysinfo::IS_SUPPORTED_SYSTEM {
let memory_kind = RefreshKind::new().with_memory(MemoryRefreshKind::new().with_ram());
let mut system = System::new_with_specifics(memory_kind);
let mem_kind = RefreshKind::nothing().with_memory(MemoryRefreshKind::nothing().with_ram());
let mut system = System::new_with_specifics(mem_kind);
system.refresh_memory();
Some(system.total_memory())
} else {
@ -899,6 +931,10 @@ fn default_limit_batched_tasks() -> usize {
usize::MAX
}
fn default_limit_batched_tasks_total_size() -> u64 {
u64::MAX
}
fn default_snapshot_dir() -> PathBuf {
PathBuf::from(DEFAULT_SNAPSHOT_DIR)
}

View file

@ -13,14 +13,28 @@ use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::keys::{CreateApiKey, Key, PatchApiKey};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use utoipa::{IntoParams, OpenApi, ToSchema};
use uuid::Uuid;
use super::PAGINATION_DEFAULT_LIMIT;
use super::{PaginationView, PAGINATION_DEFAULT_LIMIT, PAGINATION_DEFAULT_LIMIT_FN};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::Pagination;
#[derive(OpenApi)]
#[openapi(
paths(create_api_key, list_api_keys, get_api_key, patch_api_key, delete_api_key),
tags((
name = "Keys",
description = "Manage API `keys` for a Meilisearch instance. Each key has a given set of permissions.
You must have the master key or the default admin key to access the keys route. More information about the keys and their rights.
Accessing any route under `/keys` without having set a master key will result in an error.",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/keys"),
)),
)]
pub struct ApiKeyApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
web::resource("")
@ -35,6 +49,51 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
);
}
/// Create an API Key
///
/// Create an API Key.
#[utoipa::path(
post,
path = "",
tag = "Keys",
security(("Bearer" = ["keys.create", "keys.*", "*"])),
request_body = CreateApiKey,
responses(
(status = 202, description = "Key has been created", body = KeyView, content_type = "application/json", example = json!(
{
"uid": "01b4bc42-eb33-4041-b481-254d00cce834",
"key": "d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4",
"name": "Indexing Products API key",
"description": null,
"actions": [
"documents.add"
],
"indexes": [
"products"
],
"expiresAt": "2021-11-13T00:00:00Z",
"createdAt": "2021-11-12T10:00:00Z",
"updatedAt": "2021-11-12T10:00:00Z"
}
)),
(status = 401, description = "The route has been hit on an unprotected instance", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
"code": "missing_master_key",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_master_key"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn create_api_key(
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_CREATE }>, Data<AuthController>>,
body: AwebJson<CreateApiKey, DeserrJsonError>,
@ -51,12 +110,15 @@ pub async fn create_api_key(
Ok(HttpResponse::Created().json(res))
}
#[derive(Deserr, Debug, Clone, Copy)]
#[derive(Deserr, Debug, Clone, Copy, IntoParams)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
#[into_params(rename_all = "camelCase", parameter_in = Query)]
pub struct ListApiKeys {
#[deserr(default, error = DeserrQueryParamError<InvalidApiKeyOffset>)]
#[param(value_type = usize, default = 0)]
pub offset: Param<usize>,
#[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidApiKeyLimit>)]
#[param(value_type = usize, default = PAGINATION_DEFAULT_LIMIT_FN)]
pub limit: Param<usize>,
}
@ -66,6 +128,58 @@ impl ListApiKeys {
}
}
/// Get API Keys
///
/// List all API Keys
#[utoipa::path(
get,
path = "",
tag = "Keys",
security(("Bearer" = ["keys.get", "keys.*", "*"])),
params(ListApiKeys),
responses(
(status = 202, description = "List of keys", body = PaginationView<KeyView>, content_type = "application/json", example = json!(
{
"results": [
{
"uid": "01b4bc42-eb33-4041-b481-254d00cce834",
"key": "d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4",
"name": "An API Key",
"description": null,
"actions": [
"documents.add"
],
"indexes": [
"movies"
],
"expiresAt": "2022-11-12T10:00:00Z",
"createdAt": "2021-11-12T10:00:00Z",
"updatedAt": "2021-11-12T10:00:00Z"
}
],
"limit": 20,
"offset": 0,
"total": 1
}
)),
(status = 401, description = "The route has been hit on an unprotected instance", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
"code": "missing_master_key",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_master_key"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn list_api_keys(
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, Data<AuthController>>,
list_api_keys: AwebQueryParameter<ListApiKeys, DeserrQueryParamError>,
@ -84,6 +198,51 @@ pub async fn list_api_keys(
Ok(HttpResponse::Ok().json(page_view))
}
/// Get an API Key
///
/// Get an API key from its `uid` or its `key` field.
#[utoipa::path(
get,
path = "/{uidOrKey}",
tag = "Keys",
security(("Bearer" = ["keys.get", "keys.*", "*"])),
params(("uidOrKey" = String, Path, format = Password, example = "7b198a7f-52a0-4188-8762-9ad93cd608b2", description = "The `uid` or `key` field of an existing API key", nullable = false)),
responses(
(status = 200, description = "The key is returned", body = KeyView, content_type = "application/json", example = json!(
{
"uid": "01b4bc42-eb33-4041-b481-254d00cce834",
"key": "d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4",
"name": "An API Key",
"description": null,
"actions": [
"documents.add"
],
"indexes": [
"movies"
],
"expiresAt": "2022-11-12T10:00:00Z",
"createdAt": "2021-11-12T10:00:00Z",
"updatedAt": "2021-11-12T10:00:00Z"
}
)),
(status = 401, description = "The route has been hit on an unprotected instance", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
"code": "missing_master_key",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_master_key"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn get_api_key(
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, Data<AuthController>>,
path: web::Path<AuthParam>,
@ -103,6 +262,53 @@ pub async fn get_api_key(
Ok(HttpResponse::Ok().json(res))
}
/// Update a Key
///
/// Update the name and description of an API key.
/// Updates to keys are partial. This means you should provide only the fields you intend to update, as any fields not present in the payload will remain unchanged.
#[utoipa::path(
patch,
path = "/{uidOrKey}",
tag = "Keys",
security(("Bearer" = ["keys.update", "keys.*", "*"])),
params(("uidOrKey" = String, Path, format = Password, example = "7b198a7f-52a0-4188-8762-9ad93cd608b2", description = "The `uid` or `key` field of an existing API key", nullable = false)),
request_body = PatchApiKey,
responses(
(status = 200, description = "The key have been updated", body = KeyView, content_type = "application/json", example = json!(
{
"uid": "01b4bc42-eb33-4041-b481-254d00cce834",
"key": "d0552b41536279a0ad88bd595327b96f01176a60c2243e906c52ac02375f9bc4",
"name": "An API Key",
"description": null,
"actions": [
"documents.add"
],
"indexes": [
"movies"
],
"expiresAt": "2022-11-12T10:00:00Z",
"createdAt": "2021-11-12T10:00:00Z",
"updatedAt": "2021-11-12T10:00:00Z"
}
)),
(status = 401, description = "The route has been hit on an unprotected instance", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
"code": "missing_master_key",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_master_key"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn patch_api_key(
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_UPDATE }>, Data<AuthController>>,
body: AwebJson<PatchApiKey, DeserrJsonError>,
@ -123,6 +329,35 @@ pub async fn patch_api_key(
Ok(HttpResponse::Ok().json(res))
}
/// Delete a key
///
/// Delete the specified API key.
#[utoipa::path(
delete,
path = "/{uidOrKey}",
tag = "Keys",
security(("Bearer" = ["keys.delete", "keys.*", "*"])),
params(("uidOrKey" = String, Path, format = Password, example = "7b198a7f-52a0-4188-8762-9ad93cd608b2", description = "The `uid` or `key` field of an existing API key", nullable = false)),
responses(
(status = NO_CONTENT, description = "The key have been removed"),
(status = 401, description = "The route has been hit on an unprotected instance", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.",
"code": "missing_master_key",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_master_key"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn delete_api_key(
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_DELETE }>, Data<AuthController>>,
path: web::Path<AuthParam>,
@ -144,19 +379,30 @@ pub struct AuthParam {
key: String,
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
struct KeyView {
pub(super) struct KeyView {
/// The name of the API Key if any
name: Option<String>,
/// The description of the API Key if any
description: Option<String>,
/// The actual API Key you can send to Meilisearch
key: String,
/// The `Uuid` specified while creating the key or autogenerated by Meilisearch.
uid: Uuid,
/// The actions accessible with this key.
actions: Vec<Action>,
/// The indexes accessible with this key.
indexes: Vec<String>,
/// The expiration date of the key. Once this timestamp is exceeded the key is not deleted but cannot be used anymore.
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
expires_at: Option<OffsetDateTime>,
/// The date of creation of this API Key.
#[schema(read_only)]
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
created_at: OffsetDateTime,
/// The date of the last update made on this key.
#[schema(read_only)]
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
updated_at: OffsetDateTime,
}

View file

@ -8,17 +8,76 @@ use meilisearch_types::deserr::DeserrQueryParamError;
use meilisearch_types::error::ResponseError;
use meilisearch_types::keys::actions;
use serde::Serialize;
use utoipa::{OpenApi, ToSchema};
use super::tasks::TasksFilterQuery;
use super::ActionPolicy;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
#[derive(OpenApi)]
#[openapi(
paths(get_batch, get_batches),
tags((
name = "Batches",
description = "The /batches route gives information about the progress of batches of asynchronous operations.",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/batches"),
)),
)]
pub struct BatchesApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_batches))))
.service(web::resource("/{batch_id}").route(web::get().to(SeqHandler(get_batch))));
}
/// Get one batch
///
/// Get a single batch.
#[utoipa::path(
get,
path = "/{batchUid}",
tag = "Batches",
security(("Bearer" = ["tasks.get", "tasks.*", "*"])),
params(
("batchUid" = String, Path, example = "8685", description = "The unique batch id", nullable = false),
),
responses(
(status = OK, description = "Return the batch", body = BatchView, content_type = "application/json", example = json!(
{
"uid": 1,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"progress": null,
"stats": {
"totalNbTasks": 1,
"status": {
"succeeded": 1
},
"types": {
"documentAdditionOrUpdate": 1
},
"indexUids": {
"INDEX_NAME": 1
}
},
"duration": "PT0.364788S",
"startedAt": "2024-12-10T15:48:49.672141Z",
"finishedAt": "2024-12-10T15:48:50.036929Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
async fn get_batch(
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>,
batch_uid: web::Path<String>,
@ -36,17 +95,17 @@ async fn get_batch(
let query = index_scheduler::Query { batch_uids: Some(vec![batch_uid]), ..Query::default() };
let filters = index_scheduler.filters();
let (batches, _) = index_scheduler.get_batches_from_authorized_indexes(query, filters)?;
let (batches, _) = index_scheduler.get_batches_from_authorized_indexes(&query, filters)?;
if let Some(batch) = batches.first() {
let task_view = BatchView::from_batch(batch);
Ok(HttpResponse::Ok().json(task_view))
let batch_view = BatchView::from_batch(batch);
Ok(HttpResponse::Ok().json(batch_view))
} else {
Err(index_scheduler::Error::BatchNotFound(batch_uid).into())
}
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, ToSchema)]
pub struct AllBatches {
results: Vec<BatchView>,
total: u64,
@ -55,6 +114,63 @@ pub struct AllBatches {
next: Option<u32>,
}
/// Get batches
///
/// List all batches, regardless of index. The batch objects are contained in the results array.
/// Batches are always returned in descending order of uid. This means that by default, the most recently created batch objects appear first.
/// Batch results are paginated and can be filtered with query parameters.
#[utoipa::path(
get,
path = "",
tag = "Batches",
security(("Bearer" = ["tasks.get", "tasks.*", "*"])),
params(TasksFilterQuery),
responses(
(status = OK, description = "Return the batches", body = AllBatches, content_type = "application/json", example = json!(
{
"results": [
{
"uid": 2,
"details": {
"stopWords": [
"of",
"the"
]
},
"progress": null,
"stats": {
"totalNbTasks": 1,
"status": {
"succeeded": 1
},
"types": {
"settingsUpdate": 1
},
"indexUids": {
"INDEX_NAME": 1
}
},
"duration": "PT0.110083S",
"startedAt": "2024-12-10T15:49:04.995321Z",
"finishedAt": "2024-12-10T15:49:05.105404Z"
}
],
"total": 3,
"limit": 1,
"from": 2,
"next": 1
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
async fn get_batches(
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>,
params: AwebQueryParameter<TasksFilterQuery, DeserrQueryParamError>,
@ -66,7 +182,7 @@ async fn get_batches(
let query = params.into_query();
let filters = index_scheduler.filters();
let (tasks, total) = index_scheduler.get_batches_from_authorized_indexes(query, filters)?;
let (tasks, total) = index_scheduler.get_batches_from_authorized_indexes(&query, filters)?;
let mut results: Vec<_> = tasks.iter().map(BatchView::from_batch).collect();
// If we were able to fetch the number +1 tasks we asked

View file

@ -5,6 +5,7 @@ use meilisearch_auth::AuthController;
use meilisearch_types::error::ResponseError;
use meilisearch_types::tasks::KindWithContent;
use tracing::debug;
use utoipa::OpenApi;
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
@ -13,12 +14,60 @@ use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::{get_task_id, is_dry_run, SummarizedTaskView};
use crate::Opt;
#[derive(OpenApi)]
#[openapi(
paths(create_dump),
tags((
name = "Dumps",
description = "The `dumps` route allows the creation of database dumps.
Dumps are `.dump` files that can be used to launch Meilisearch. Dumps are compatible between Meilisearch versions.
Creating a dump is also referred to as exporting it, whereas launching Meilisearch with a dump is referred to as importing it.
During a [dump export](https://www.meilisearch.com/docs/reference/api/dump#create-a-dump), all indexes of the current instance are
exportedtogether with their documents and settingsand saved as a single `.dump` file. During a dump import,
all indexes contained in the indicated `.dump` file are imported along with their associated documents and settings.
Any existing index with the same uid as an index in the dump file will be overwritten.
Dump imports are [performed at launch](https://www.meilisearch.com/docs/learn/advanced/dumps#importing-a-dump) using an option.",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/dump"),
)),
)]
pub struct DumpApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))));
}
crate::empty_analytics!(DumpAnalytics, "Dump Created");
/// Create a dump
///
/// Triggers a dump creation process. Once the process is complete, a dump is created in the
/// [dump directory](https://www.meilisearch.com/docs/learn/self_hosted/configure_meilisearch_at_launch#dump-directory).
/// If the dump directory does not exist yet, it will be created.
#[utoipa::path(
post,
path = "",
tag = "Dumps",
security(("Bearer" = ["dumps.create", "dumps.*", "*"])),
responses(
(status = 202, description = "Dump is being created", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 0,
"indexUid": null,
"status": "enqueued",
"type": "DumpCreation",
"enqueuedAt": "2021-01-01T09:39:00.000000Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn create_dump(
index_scheduler: GuardedData<ActionPolicy<{ actions::DUMPS_CREATE }>, Data<IndexScheduler>>,
auth_controller: GuardedData<ActionPolicy<{ actions::DUMPS_CREATE }>, Data<AuthController>>,

View file

@ -8,12 +8,26 @@ use meilisearch_types::error::ResponseError;
use meilisearch_types::keys::actions;
use serde::Serialize;
use tracing::debug;
use utoipa::{OpenApi, ToSchema};
use crate::analytics::{Aggregate, Analytics};
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
#[derive(OpenApi)]
#[openapi(
paths(get_features, patch_features),
tags((
name = "Experimental features",
description = "The `/experimental-features` route allows you to activate or deactivate some of Meilisearch's experimental features.
This route is **synchronous**. This means that no task object will be returned, and any activated or deactivated features will be made available or unavailable immediately.",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/experimental_features"),
)),
)]
pub struct ExperimentalFeaturesApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
web::resource("")
@ -22,6 +36,32 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
);
}
/// Get all experimental features
///
/// Get a list of all experimental features that can be activated via the /experimental-features route and whether or not they are currently activated.
#[utoipa::path(
get,
path = "",
tag = "Experimental features",
security(("Bearer" = ["experimental_features.get", "experimental_features.*", "*"])),
responses(
(status = OK, description = "Experimental features are returned", body = RuntimeTogglableFeatures, content_type = "application/json", example = json!(RuntimeTogglableFeatures {
vector_store: Some(true),
metrics: Some(true),
logs_route: Some(false),
edit_documents_by_function: Some(false),
contains_filter: Some(false),
})),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
async fn get_features(
index_scheduler: GuardedData<
ActionPolicy<{ actions::EXPERIMENTAL_FEATURES_GET }>,
@ -35,8 +75,10 @@ async fn get_features(
HttpResponse::Ok().json(features)
}
#[derive(Debug, Deserr)]
#[derive(Debug, Deserr, ToSchema, Serialize)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct RuntimeTogglableFeatures {
#[deserr(default)]
pub vector_store: Option<bool>,
@ -79,6 +121,32 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
}
}
/// Configure experimental features
///
/// Activate or deactivate experimental features.
#[utoipa::path(
patch,
path = "",
tag = "Experimental features",
security(("Bearer" = ["experimental_features.update", "experimental_features.*", "*"])),
responses(
(status = OK, description = "Experimental features are returned", body = RuntimeTogglableFeatures, content_type = "application/json", example = json!(RuntimeTogglableFeatures {
vector_store: Some(true),
metrics: Some(true),
logs_route: Some(false),
edit_documents_by_function: Some(false),
contains_filter: Some(false),
})),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
async fn patch_features(
index_scheduler: GuardedData<
ActionPolicy<{ actions::EXPERIMENTAL_FEATURES_UPDATE }>,

View file

@ -31,6 +31,7 @@ use tempfile::tempfile;
use tokio::fs::File;
use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter};
use tracing::debug;
use utoipa::{IntoParams, OpenApi, ToSchema};
use crate::analytics::{Aggregate, AggregateMethod, Analytics};
use crate::error::MeilisearchHttpError;
@ -71,6 +72,19 @@ pub struct DocumentParam {
document_id: String,
}
#[derive(OpenApi)]
#[openapi(
paths(get_document, get_documents, delete_document, replace_documents, update_documents, clear_all_documents, delete_documents_batch, delete_documents_by_filter, edit_documents_by_function, documents_by_query_post),
tags(
(
name = "Documents",
description = "Documents are objects composed of fields that can store any type of data. Each field contains an attribute and its associated value. Documents are stored inside [indexes](https://www.meilisearch.com/docs/learn/getting_started/indexes).",
external_docs(url = "https://www.meilisearch.com/docs/learn/getting_started/documents"),
),
),
)]
pub struct DocumentsApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
web::resource("")
@ -93,12 +107,18 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
);
}
#[derive(Debug, Deserr)]
#[derive(Debug, Deserr, IntoParams, ToSchema)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
#[into_params(rename_all = "camelCase", parameter_in = Query)]
#[schema(rename_all = "camelCase")]
pub struct GetDocument {
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFields>)]
#[param(value_type = Option<Vec<String>>)]
#[schema(value_type = Option<Vec<String>>)]
fields: OptionStarOrList<String>,
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentRetrieveVectors>)]
#[param(value_type = Option<bool>)]
#[schema(value_type = Option<bool>)]
retrieve_vectors: Param<bool>,
}
@ -174,6 +194,55 @@ impl<Method: AggregateMethod> Aggregate for DocumentsFetchAggregator<Method> {
}
}
/// Get one document
///
/// Get one document from its primary key.
#[utoipa::path(
get,
path = "{indexUid}/documents/{documentId}",
tag = "Documents",
security(("Bearer" = ["documents.get", "documents.*", "*"])),
params(
("indexUid" = String, Path, example = "movies", description = "Index Unique Identifier", nullable = false),
("documentId" = String, Path, example = "85087", description = "The document identifier", nullable = false),
GetDocument,
),
responses(
(status = 200, description = "The document is returned", body = serde_json::Value, content_type = "application/json", example = json!(
{
"id": 25684,
"title": "American Ninja 5",
"poster": "https://image.tmdb.org/t/p/w1280/iuAQVI4mvjI83wnirpD8GVNRVuY.jpg",
"overview": "When a scientists daughter is kidnapped, American Ninja, attempts to find her, but this time he teams up with a youngster he has trained in the ways of the ninja.",
"release_date": 725846400
}
)),
(status = 404, description = "Index not found", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Index `movies` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
}
)),
(status = 404, description = "Document not found", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Document `a` not found.",
"code": "document_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#document_not_found"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn get_document(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
document_param: web::Path<DocumentParam>,
@ -237,6 +306,38 @@ impl Aggregate for DocumentsDeletionAggregator {
}
}
/// Delete a document
///
/// Delete a single document by id.
#[utoipa::path(
delete,
path = "{indexUid}/documents/{documentId}",
tag = "Documents",
security(("Bearer" = ["documents.delete", "documents.*", "*"])),
params(
("indexUid" = String, Path, example = "movies", description = "Index Unique Identifier", nullable = false),
("documentId" = String, Path, example = "853", description = "Document Identifier", nullable = false),
),
responses(
(status = 200, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 147,
"indexUid": null,
"status": "enqueued",
"type": "documentAdditionOrUpdate",
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn delete_document(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
path: web::Path<DocumentParam>,
@ -271,36 +372,98 @@ pub async fn delete_document(
Ok(HttpResponse::Accepted().json(task))
}
#[derive(Debug, Deserr)]
#[derive(Debug, Deserr, IntoParams)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
#[into_params(rename_all = "camelCase", parameter_in = Query)]
pub struct BrowseQueryGet {
#[param(default, value_type = Option<usize>)]
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentOffset>)]
offset: Param<usize>,
#[param(default, value_type = Option<usize>)]
#[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidDocumentLimit>)]
limit: Param<usize>,
#[param(default, value_type = Option<Vec<String>>)]
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFields>)]
fields: OptionStarOrList<String>,
#[param(default, value_type = Option<bool>)]
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentRetrieveVectors>)]
retrieve_vectors: Param<bool>,
#[param(default, value_type = Option<String>, example = "popularity > 1000")]
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFilter>)]
filter: Option<String>,
}
#[derive(Debug, Deserr)]
#[derive(Debug, Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[schema(rename_all = "camelCase")]
pub struct BrowseQuery {
#[schema(default, example = 150)]
#[deserr(default, error = DeserrJsonError<InvalidDocumentOffset>)]
offset: usize,
#[schema(default = 20, example = 1)]
#[deserr(default = PAGINATION_DEFAULT_LIMIT, error = DeserrJsonError<InvalidDocumentLimit>)]
limit: usize,
#[schema(example = json!(["title, description"]))]
#[deserr(default, error = DeserrJsonError<InvalidDocumentFields>)]
fields: Option<Vec<String>>,
#[schema(default, example = true)]
#[deserr(default, error = DeserrJsonError<InvalidDocumentRetrieveVectors>)]
retrieve_vectors: bool,
#[schema(default, value_type = Option<Value>, example = "popularity > 1000")]
#[deserr(default, error = DeserrJsonError<InvalidDocumentFilter>)]
filter: Option<Value>,
}
/// Get documents with POST
///
/// Get a set of documents.
#[utoipa::path(
post,
path = "{indexUid}/documents/fetch",
tag = "Documents",
security(("Bearer" = ["documents.delete", "documents.*", "*"])),
params(("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false)),
request_body = BrowseQuery,
responses(
(status = 200, description = "Task successfully enqueued", body = PaginationView<serde_json::Value>, content_type = "application/json", example = json!(
{
"results":[
{
"title":"The Travels of Ibn Battuta",
"genres":[
"Travel",
"Adventure"
],
"language":"English",
"rating":4.5
},
{
"title":"Pride and Prejudice",
"genres":[
"Classics",
"Fiction",
"Romance",
"Literature"
],
"language":"English",
"rating":4
},
],
"offset":0,
"limit":2,
"total":5
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn documents_by_query_post(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
@ -326,6 +489,60 @@ pub async fn documents_by_query_post(
documents_by_query(&index_scheduler, index_uid, body)
}
/// Get documents
///
/// Get documents by batches.
#[utoipa::path(
get,
path = "{indexUid}/documents",
tag = "Documents",
security(("Bearer" = ["documents.get", "documents.*", "*"])),
params(
("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false),
BrowseQueryGet
),
responses(
(status = 200, description = "The documents are returned", body = PaginationView<serde_json::Value>, content_type = "application/json", example = json!(
{
"results": [
{
"id": 25684,
"title": "American Ninja 5",
"poster": "https://image.tmdb.org/t/p/w1280/iuAQVI4mvjI83wnirpD8GVNRVuY.jpg",
"overview": "When a scientists daughter is kidnapped, American Ninja, attempts to find her, but this time he teams up with a youngster he has trained in the ways of the ninja.",
"release_date": 725846400
},
{
"id": 45881,
"title": "The Bridge of San Luis Rey",
"poster": "https://image.tmdb.org/t/p/w500/4X7quIcdkc24Cveg5XdpfRqxtYA.jpg",
"overview": "The Bridge of San Luis Rey is American author Thornton Wilder's second novel, first published in 1927 to worldwide acclaim. It tells the story of several interrelated people who die in the collapse of an Inca rope-fiber suspension bridge in Peru, and the events that lead up to their being on the bridge.[ A friar who has witnessed the tragic accident then goes about inquiring into the lives of the victims, seeking some sort of cosmic answer to the question of why each had to die. The novel won the Pulitzer Prize in 1928.",
"release_date": 1072915200
}
],
"limit": 20,
"offset": 0,
"total": 2
}
)),
(status = 404, description = "Index not found", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Index `movies` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn get_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
@ -396,11 +613,17 @@ fn documents_by_query(
Ok(HttpResponse::Ok().json(ret))
}
#[derive(Deserialize, Debug, Deserr)]
#[derive(Deserialize, Debug, Deserr, IntoParams)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
#[into_params(parameter_in = Query, rename_all = "camelCase")]
pub struct UpdateDocumentsQuery {
/// The primary key of the documents. primaryKey is optional. If you want to set the primary key of your index through this route,
/// it only has to be done the first time you add documents to the index. After which it will be ignored if given.
#[param(example = "id")]
#[deserr(default, error = DeserrQueryParamError<InvalidIndexPrimaryKey>)]
pub primary_key: Option<String>,
/// Customize the csv delimiter when importing CSV documents.
#[param(value_type = char, default = ",", example = ";")]
#[deserr(default, try_from(char) = from_char_csv_delimiter -> DeserrQueryParamError<InvalidDocumentCsvDelimiter>, error = DeserrQueryParamError<InvalidDocumentCsvDelimiter>)]
pub csv_delimiter: Option<u8>,
}
@ -451,6 +674,52 @@ impl<Method: AggregateMethod> Aggregate for DocumentsAggregator<Method> {
}
}
/// Add or replace documents
///
/// Add a list of documents or replace them if they already exist.
///
/// If you send an already existing document (same id) the whole existing document will be overwritten by the new document. Fields previously in the document not present in the new document are removed.
///
/// For a partial update of the document see Add or update documents route.
/// > info
/// > If the provided index does not exist, it will be created.
/// > info
/// > Use the reserved `_geo` object to add geo coordinates to a document. `_geo` is an object made of `lat` and `lng` field.
/// >
/// > When the vectorStore feature is enabled you can use the reserved `_vectors` field in your documents.
/// > It can accept an array of floats, multiple arrays of floats in an outer array or an object.
/// > This object accepts keys corresponding to the different embedders defined your index settings.
#[utoipa::path(
post,
path = "{indexUid}/documents",
tag = "Documents",
security(("Bearer" = ["documents.add", "documents.*", "*"])),
params(
("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false),
// Here we can use the post version of the browse query since it contains the exact same parameter
UpdateDocumentsQuery,
),
request_body = serde_json::Value,
responses(
(status = 200, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 147,
"indexUid": null,
"status": "enqueued",
"type": "documentAdditionOrUpdate",
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn replace_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
@ -508,6 +777,50 @@ pub async fn replace_documents(
Ok(HttpResponse::Accepted().json(task))
}
/// Add or update documents
///
/// Add a list of documents or update them if they already exist.
/// If you send an already existing document (same id) the old document will be only partially updated according to the fields of the new document. Thus, any fields not present in the new document are kept and remained unchanged.
/// To completely overwrite a document, see Add or replace documents route.
/// > info
/// > If the provided index does not exist, it will be created.
/// > info
/// > Use the reserved `_geo` object to add geo coordinates to a document. `_geo` is an object made of `lat` and `lng` field.
/// >
/// > When the vectorStore feature is enabled you can use the reserved `_vectors` field in your documents.
/// > It can accept an array of floats, multiple arrays of floats in an outer array or an object.
/// > This object accepts keys corresponding to the different embedders defined your index settings.
#[utoipa::path(
put,
path = "{indexUid}/documents",
tag = "Documents",
security(("Bearer" = ["documents.add", "documents.*", "*"])),
params(
("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false),
// Here we can use the post version of the browse query since it contains the exact same parameter
UpdateDocumentsQuery,
),
request_body = serde_json::Value,
responses(
(status = 200, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 147,
"indexUid": null,
"status": "enqueued",
"type": "documentAdditionOrUpdate",
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn update_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
@ -608,7 +921,7 @@ async fn document_addition(
}
};
let (uuid, mut update_file) = index_scheduler.create_update_file(dry_run)?;
let (uuid, mut update_file) = index_scheduler.queue.create_update_file(dry_run)?;
let documents_count = match format {
PayloadType::Ndjson => {
let (path, file) = update_file.into_parts();
@ -670,7 +983,7 @@ async fn document_addition(
Err(e) => {
// Here the file MAY have been persisted or not.
// We don't know thus we ignore the file not found error.
match index_scheduler.delete_update_file(uuid) {
match index_scheduler.queue.delete_update_file(uuid) {
Ok(()) => (),
Err(index_scheduler::Error::FileStore(file_store::Error::IoError(e)))
if e.kind() == ErrorKind::NotFound => {}
@ -701,7 +1014,7 @@ async fn document_addition(
{
Ok(task) => task,
Err(e) => {
index_scheduler.delete_update_file(uuid)?;
index_scheduler.queue.delete_update_file(uuid)?;
return Err(e.into());
}
};
@ -742,6 +1055,38 @@ async fn copy_body_to_file(
Ok(read_file)
}
/// Delete documents by batch
///
/// Delete a set of documents based on an array of document ids.
#[utoipa::path(
post,
path = "{indexUid}/delete-batch",
tag = "Documents",
security(("Bearer" = ["documents.delete", "documents.*", "*"])),
params(
("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false),
),
request_body = Vec<Value>,
responses(
(status = 200, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 147,
"indexUid": null,
"status": "enqueued",
"type": "documentAdditionOrUpdate",
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn delete_documents_batch(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
@ -781,13 +1126,44 @@ pub async fn delete_documents_batch(
Ok(HttpResponse::Accepted().json(task))
}
#[derive(Debug, Deserr)]
#[derive(Debug, Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[schema(rename_all = "camelCase")]
pub struct DocumentDeletionByFilter {
#[deserr(error = DeserrJsonError<InvalidDocumentFilter>, missing_field_error = DeserrJsonError::missing_document_filter)]
filter: Value,
}
/// Delete documents by filter
///
/// Delete a set of documents based on a filter.
#[utoipa::path(
post,
path = "{indexUid}/documents/delete",
tag = "Documents",
security(("Bearer" = ["documents.delete", "documents.*", "*"])),
params(("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false)),
request_body = DocumentDeletionByFilter,
responses(
(status = ACCEPTED, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 147,
"indexUid": null,
"status": "enqueued",
"type": "documentDeletion",
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn delete_documents_by_filter(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
@ -828,13 +1204,16 @@ pub async fn delete_documents_by_filter(
Ok(HttpResponse::Accepted().json(task))
}
#[derive(Debug, Deserr)]
#[derive(Debug, Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct DocumentEditionByFunction {
/// A string containing a RHAI function.
#[deserr(default, error = DeserrJsonError<InvalidDocumentFilter>)]
pub filter: Option<Value>,
/// A string containing a filter expression.
#[deserr(default, error = DeserrJsonError<InvalidDocumentEditionContext>)]
pub context: Option<Value>,
/// An object with data Meilisearch should make available for the editing function.
#[deserr(error = DeserrJsonError<InvalidDocumentEditionFunctionFilter>, missing_field_error = DeserrJsonError::missing_document_edition_function)]
pub function: String,
}
@ -867,6 +1246,38 @@ impl Aggregate for EditDocumentsByFunctionAggregator {
}
}
/// Edit documents by function.
///
/// Use a [RHAI function](https://rhai.rs/book/engine/hello-world.html) to edit one or more documents directly in Meilisearch.
#[utoipa::path(
post,
path = "{indexUid}/documents/edit",
tag = "Documents",
security(("Bearer" = ["documents.*", "*"])),
params(
("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false),
),
request_body = DocumentEditionByFunction,
responses(
(status = 202, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 147,
"indexUid": null,
"status": "enqueued",
"type": "documentDeletion",
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn edit_documents_by_function(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ALL }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
@ -936,6 +1347,35 @@ pub async fn edit_documents_by_function(
Ok(HttpResponse::Accepted().json(task))
}
/// Delete all documents
///
/// Delete all documents in the specified index.
#[utoipa::path(
delete,
path = "{indexUid}/documents",
tag = "Documents",
security(("Bearer" = ["documents.delete", "documents.*", "*"])),
params(("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false)),
responses(
(status = 200, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 147,
"indexUid": null,
"status": "enqueued",
"type": "documentDeletion",
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn clear_all_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,

View file

@ -11,6 +11,7 @@ use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::locales::Locale;
use serde_json::Value;
use tracing::debug;
use utoipa::{OpenApi, ToSchema};
use crate::analytics::{Aggregate, Analytics};
use crate::extractors::authentication::policies::*;
@ -18,20 +19,33 @@ use crate::extractors::authentication::GuardedData;
use crate::routes::indexes::search::search_kind;
use crate::search::{
add_search_rules, perform_facet_search, FacetSearchResult, HybridQuery, MatchingStrategy,
RankingScoreThreshold, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
RankingScoreThreshold, SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
DEFAULT_SEARCH_OFFSET,
};
use crate::search_queue::SearchQueue;
#[derive(OpenApi)]
#[openapi(
paths(search),
tags(
(
name = "Facet Search",
description = "The `/facet-search` route allows you to search for facet values. Facet search supports prefix search and typo tolerance. The returned hits are sorted lexicographically in ascending order. You can configure how facets are sorted using the sortFacetValuesBy property of the faceting index settings.",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/facet_search"),
),
),
)]
pub struct FacetSearchApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(search)));
}
/// # Important
///
/// Intentionally don't use `deny_unknown_fields` to ignore search parameters sent by user
#[derive(Debug, Clone, Default, PartialEq, deserr::Deserr)]
// # Important
//
// Intentionally don't use `deny_unknown_fields` to ignore search parameters sent by user
#[derive(Debug, Clone, Default, PartialEq, deserr::Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase)]
pub struct FacetSearchQuery {
#[deserr(default, error = DeserrJsonError<InvalidFacetSearchQuery>)]
@ -158,6 +172,60 @@ impl Aggregate for FacetSearchAggregator {
}
}
/// Perform a facet search
///
/// Search for a facet value within a given facet.
#[utoipa::path(
post,
path = "{indexUid}/facet-search",
tag = "Facet Search",
security(("Bearer" = ["search", "*"])),
params(("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false)),
request_body = FacetSearchQuery,
responses(
(status = 200, description = "The documents are returned", body = SearchResult, content_type = "application/json", example = json!(
{
"hits": [
{
"id": 2770,
"title": "American Pie 2",
"poster": "https://image.tmdb.org/t/p/w1280/q4LNgUnRfltxzp3gf1MAGiK5LhV.jpg",
"overview": "The whole gang are back and as close as ever. They decide to get even closer by spending the summer together at a beach house. They decide to hold the biggest…",
"release_date": 997405200
},
{
"id": 190859,
"title": "American Sniper",
"poster": "https://image.tmdb.org/t/p/w1280/svPHnYE7N5NAGO49dBmRhq0vDQ3.jpg",
"overview": "U.S. Navy SEAL Chris Kyle takes his sole mission—protect his comrades—to heart and becomes one of the most lethal snipers in American history. His pinpoint accuracy not only saves countless lives but also makes him a prime…",
"release_date": 1418256000
}
],
"offset": 0,
"limit": 2,
"estimatedTotalHits": 976,
"processingTimeMs": 35,
"query": "american "
}
)),
(status = 404, description = "Index not found", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Index `movies` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn search(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
search_queue: Data<SearchQueue>,

View file

@ -16,8 +16,11 @@ use meilisearch_types::tasks::KindWithContent;
use serde::Serialize;
use time::OffsetDateTime;
use tracing::debug;
use utoipa::{IntoParams, OpenApi, ToSchema};
use super::{get_task_id, Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
use super::{
get_task_id, Pagination, PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT,
};
use crate::analytics::{Aggregate, Analytics};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::{AuthenticationError, GuardedData};
@ -36,6 +39,25 @@ mod settings_analytics;
pub mod similar;
mod similar_analytics;
#[derive(OpenApi)]
#[openapi(
nest(
(path = "/", api = documents::DocumentsApi),
(path = "/", api = facet_search::FacetSearchApi),
(path = "/", api = similar::SimilarApi),
(path = "/", api = settings::SettingsApi),
),
paths(list_indexes, create_index, get_index, update_index, delete_index, get_index_stats),
tags(
(
name = "Indexes",
description = "An index is an entity that gathers a set of [documents](https://www.meilisearch.com/docs/learn/getting_started/documents) with its own [settings](https://www.meilisearch.com/docs/reference/api/settings). Learn more about indexes.",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/indexes"),
),
),
)]
pub struct IndexesApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
web::resource("")
@ -59,14 +81,18 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
);
}
#[derive(Debug, Serialize, Clone)]
#[derive(Debug, Serialize, Clone, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct IndexView {
/// Unique identifier for the index
pub uid: String,
/// An `RFC 3339` format for date/time/duration.
#[serde(with = "time::serde::rfc3339")]
pub created_at: OffsetDateTime,
/// An `RFC 3339` format for date/time/duration.
#[serde(with = "time::serde::rfc3339")]
pub updated_at: OffsetDateTime,
/// Custom primaryKey for documents
pub primary_key: Option<String>,
}
@ -84,20 +110,61 @@ impl IndexView {
}
}
#[derive(Deserr, Debug, Clone, Copy)]
#[derive(Deserr, Debug, Clone, Copy, IntoParams)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
#[into_params(rename_all = "camelCase", parameter_in = Query)]
pub struct ListIndexes {
/// The number of indexes to skip before starting to retrieve anything
#[param(value_type = Option<usize>, default, example = 100)]
#[deserr(default, error = DeserrQueryParamError<InvalidIndexOffset>)]
pub offset: Param<usize>,
/// The number of indexes to retrieve
#[param(value_type = Option<usize>, default = 20, example = 1)]
#[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidIndexLimit>)]
pub limit: Param<usize>,
}
impl ListIndexes {
fn as_pagination(self) -> Pagination {
Pagination { offset: self.offset.0, limit: self.limit.0 }
}
}
/// List indexes
///
/// List all indexes.
#[utoipa::path(
get,
path = "",
tag = "Indexes",
security(("Bearer" = ["indexes.get", "indexes.*", "*"])),
params(ListIndexes),
responses(
(status = 200, description = "Indexes are returned", body = PaginationView<IndexView>, content_type = "application/json", example = json!(
{
"results": [
{
"uid": "movies",
"primaryKey": "movie_id",
"createdAt": "2019-11-20T09:40:33.711324Z",
"updatedAt": "2019-11-20T09:40:33.711324Z"
}
],
"limit": 1,
"offset": 0,
"total": 1
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn list_indexes(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
paginate: AwebQueryParameter<ListIndexes, DeserrQueryParamError>,
@ -121,11 +188,16 @@ pub async fn list_indexes(
Ok(HttpResponse::Ok().json(ret))
}
#[derive(Deserr, Debug)]
#[derive(Deserr, Debug, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[schema(rename_all = "camelCase")]
pub struct IndexCreateRequest {
/// The name of the index
#[schema(example = "movies")]
#[deserr(error = DeserrJsonError<InvalidIndexUid>, missing_field_error = DeserrJsonError::missing_index_uid)]
uid: IndexUid,
/// The primary key of the index
#[schema(example = "id")]
#[deserr(default, error = DeserrJsonError<InvalidIndexPrimaryKey>)]
primary_key: Option<String>,
}
@ -149,6 +221,35 @@ impl Aggregate for IndexCreatedAggregate {
}
}
/// Create index
///
/// Create an index.
#[utoipa::path(
post,
path = "",
tag = "Indexes",
security(("Bearer" = ["indexes.create", "indexes.*", "*"])),
request_body = IndexCreateRequest,
responses(
(status = 200, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 147,
"indexUid": "movies",
"status": "enqueued",
"type": "indexCreation",
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn create_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>,
body: AwebJson<IndexCreateRequest, DeserrJsonError>,
@ -198,13 +299,42 @@ fn deny_immutable_fields_index(
}
}
#[derive(Deserr, Debug)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_index)]
pub struct UpdateIndexRequest {
#[deserr(default, error = DeserrJsonError<InvalidIndexPrimaryKey>)]
primary_key: Option<String>,
}
/// Get index
///
/// Get information about an index.
#[utoipa::path(
get,
path = "/{indexUid}",
tag = "Indexes",
security(("Bearer" = ["indexes.get", "indexes.*", "*"])),
params(("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false)),
responses(
(status = 200, description = "The index is returned", body = IndexView, content_type = "application/json", example = json!(
{
"uid": "movies",
"primaryKey": "movie_id",
"createdAt": "2019-11-20T09:40:33.711324Z",
"updatedAt": "2019-11-20T09:40:33.711324Z"
}
)),
(status = 404, description = "Index not found", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Index `movies` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn get_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
@ -237,6 +367,47 @@ impl Aggregate for IndexUpdatedAggregate {
serde_json::to_value(*self).unwrap_or_default()
}
}
#[derive(Deserr, Debug, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_index)]
#[schema(rename_all = "camelCase")]
pub struct UpdateIndexRequest {
/// The new primary key of the index
#[deserr(default, error = DeserrJsonError<InvalidIndexPrimaryKey>)]
primary_key: Option<String>,
}
/// Update index
///
/// Update the `primaryKey` of an index.
/// Return an error if the index doesn't exists yet or if it contains documents.
#[utoipa::path(
patch,
path = "/{indexUid}",
tag = "Indexes",
security(("Bearer" = ["indexes.update", "indexes.*", "*"])),
params(("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false)),
request_body = UpdateIndexRequest,
responses(
(status = ACCEPTED, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 0,
"indexUid": "movies",
"status": "enqueued",
"type": "indexUpdate",
"enqueuedAt": "2021-01-01T09:39:00.000000Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn update_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
@ -269,6 +440,35 @@ pub async fn update_index(
Ok(HttpResponse::Accepted().json(task))
}
/// Delete index
///
/// Delete an index.
#[utoipa::path(
delete,
path = "/{indexUid}",
tag = "Indexes",
security(("Bearer" = ["indexes.delete", "indexes.*", "*"])),
params(("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false)),
responses(
(status = ACCEPTED, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 0,
"indexUid": "movies",
"status": "enqueued",
"type": "indexDeletion",
"enqueuedAt": "2021-01-01T09:39:00.000000Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn delete_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
@ -289,14 +489,15 @@ pub async fn delete_index(
}
/// Stats of an `Index`, as known to the `stats` route.
#[derive(Serialize, Debug)]
#[derive(Serialize, Debug, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct IndexStats {
/// Number of documents in the index
pub number_of_documents: u64,
/// Whether the index is currently performing indexation, according to the scheduler.
/// Whether or not the index is currently ingesting document
pub is_indexing: bool,
/// Association of every field name with the number of times it occurs in the documents.
#[schema(value_type = HashMap<String, u64>)]
pub field_distribution: FieldDistribution,
}
@ -310,6 +511,44 @@ impl From<index_scheduler::IndexStats> for IndexStats {
}
}
/// Get stats of index
///
/// Get the stats of an index.
#[utoipa::path(
get,
path = "/{indexUid}/stats",
tag = "Stats",
security(("Bearer" = ["stats.get", "stats.*", "*"])),
params(("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false)),
responses(
(status = OK, description = "The stats of the index", body = IndexStats, content_type = "application/json", example = json!(
{
"numberOfDocuments": 10,
"isIndexing": true,
"fieldDistribution": {
"genre": 10,
"author": 9
}
}
)),
(status = 404, description = "Index not found", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Index `movies` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn get_index_stats(
index_scheduler: GuardedData<ActionPolicy<{ actions::STATS_GET }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,

View file

@ -12,6 +12,7 @@ use meilisearch_types::milli;
use meilisearch_types::serde_cs::vec::CS;
use serde_json::Value;
use tracing::debug;
use utoipa::{IntoParams, OpenApi};
use crate::analytics::Analytics;
use crate::error::MeilisearchHttpError;
@ -22,12 +23,28 @@ use crate::metrics::MEILISEARCH_DEGRADED_SEARCH_REQUESTS;
use crate::routes::indexes::search_analytics::{SearchAggregator, SearchGET, SearchPOST};
use crate::search::{
add_search_rules, perform_search, HybridQuery, MatchingStrategy, RankingScoreThreshold,
RetrieveVectors, SearchKind, SearchQuery, SemanticRatio, DEFAULT_CROP_LENGTH,
RetrieveVectors, SearchKind, SearchQuery, SearchResult, SemanticRatio, DEFAULT_CROP_LENGTH,
DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG,
DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET, DEFAULT_SEMANTIC_RATIO,
};
use crate::search_queue::SearchQueue;
#[derive(OpenApi)]
#[openapi(
paths(search_with_url_query, search_with_post),
tags(
(
name = "Search",
description = "Meilisearch exposes two routes to perform searches:
- A POST route: this is the preferred route when using API authentication, as it allows [preflight request](https://developer.mozilla.org/en-US/docs/Glossary/Preflight_request) caching and better performance.
- A GET route: the usage of this route is discouraged, unless you have good reason to do otherwise (specific caching abilities for example)",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/search"),
),
),
)]
pub struct SearchApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
web::resource("")
@ -36,30 +53,41 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
);
}
#[derive(Debug, deserr::Deserr)]
#[derive(Debug, deserr::Deserr, IntoParams)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
#[into_params(rename_all = "camelCase", parameter_in = Query)]
pub struct SearchQueryGet {
#[deserr(default, error = DeserrQueryParamError<InvalidSearchQ>)]
q: Option<String>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchVector>)]
#[param(value_type = Vec<f32>, explode = false)]
vector: Option<CS<f32>>,
#[deserr(default = Param(DEFAULT_SEARCH_OFFSET()), error = DeserrQueryParamError<InvalidSearchOffset>)]
#[param(value_type = usize, default = DEFAULT_SEARCH_OFFSET)]
offset: Param<usize>,
#[deserr(default = Param(DEFAULT_SEARCH_LIMIT()), error = DeserrQueryParamError<InvalidSearchLimit>)]
#[param(value_type = usize, default = DEFAULT_SEARCH_LIMIT)]
limit: Param<usize>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchPage>)]
#[param(value_type = Option<usize>)]
page: Option<Param<usize>>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchHitsPerPage>)]
#[param(value_type = Option<usize>)]
hits_per_page: Option<Param<usize>>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToRetrieve>)]
#[param(value_type = Vec<String>, explode = false)]
attributes_to_retrieve: Option<CS<String>>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchRetrieveVectors>)]
#[param(value_type = bool, default)]
retrieve_vectors: Param<bool>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToCrop>)]
#[param(value_type = Vec<String>, explode = false)]
attributes_to_crop: Option<CS<String>>,
#[deserr(default = Param(DEFAULT_CROP_LENGTH()), error = DeserrQueryParamError<InvalidSearchCropLength>)]
#[param(value_type = usize, default = DEFAULT_CROP_LENGTH)]
crop_length: Param<usize>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToHighlight>)]
#[param(value_type = Vec<String>, explode = false)]
attributes_to_highlight: Option<CS<String>>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchFilter>)]
filter: Option<String>,
@ -68,30 +96,41 @@ pub struct SearchQueryGet {
#[deserr(default, error = DeserrQueryParamError<InvalidSearchDistinct>)]
distinct: Option<String>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowMatchesPosition>)]
#[param(value_type = bool)]
show_matches_position: Param<bool>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowRankingScore>)]
#[param(value_type = bool)]
show_ranking_score: Param<bool>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowRankingScoreDetails>)]
#[param(value_type = bool)]
show_ranking_score_details: Param<bool>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchFacets>)]
#[param(value_type = Vec<String>, explode = false)]
facets: Option<CS<String>>,
#[deserr( default = DEFAULT_HIGHLIGHT_PRE_TAG(), error = DeserrQueryParamError<InvalidSearchHighlightPreTag>)]
#[deserr(default = DEFAULT_HIGHLIGHT_PRE_TAG(), error = DeserrQueryParamError<InvalidSearchHighlightPreTag>)]
#[param(default = DEFAULT_HIGHLIGHT_PRE_TAG)]
highlight_pre_tag: String,
#[deserr( default = DEFAULT_HIGHLIGHT_POST_TAG(), error = DeserrQueryParamError<InvalidSearchHighlightPostTag>)]
#[deserr(default = DEFAULT_HIGHLIGHT_POST_TAG(), error = DeserrQueryParamError<InvalidSearchHighlightPostTag>)]
#[param(default = DEFAULT_HIGHLIGHT_POST_TAG)]
highlight_post_tag: String,
#[deserr(default = DEFAULT_CROP_MARKER(), error = DeserrQueryParamError<InvalidSearchCropMarker>)]
#[param(default = DEFAULT_CROP_MARKER)]
crop_marker: String,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchMatchingStrategy>)]
matching_strategy: MatchingStrategy,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToSearchOn>)]
#[param(value_type = Vec<String>, explode = false)]
pub attributes_to_search_on: Option<CS<String>>,
#[deserr(default, error = DeserrQueryParamError<InvalidEmbedder>)]
pub hybrid_embedder: Option<String>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchSemanticRatio>)]
#[param(value_type = f32)]
pub hybrid_semantic_ratio: Option<SemanticRatioGet>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchRankingScoreThreshold>)]
#[param(value_type = f32)]
pub ranking_score_threshold: Option<RankingScoreThresholdGet>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchLocales>)]
#[param(value_type = Vec<Locale>, explode = false)]
pub locales: Option<CS<Locale>>,
}
@ -220,6 +259,62 @@ pub fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
sort_parameters
}
/// Search an index with GET
///
/// Search for documents matching a specific query in the given index.
#[utoipa::path(
get,
path = "/{indexUid}/search",
tags = ["Indexes", "Search"],
security(("Bearer" = ["search", "*"])),
params(
("indexUid" = String, Path, example = "movies", description = "Index Unique Identifier", nullable = false),
SearchQueryGet
),
responses(
(status = 200, description = "The documents are returned", body = SearchResult, content_type = "application/json", example = json!(
{
"hits": [
{
"id": 2770,
"title": "American Pie 2",
"poster": "https://image.tmdb.org/t/p/w1280/q4LNgUnRfltxzp3gf1MAGiK5LhV.jpg",
"overview": "The whole gang are back and as close as ever. They decide to get even closer by spending the summer together at a beach house. They decide to hold the biggest…",
"release_date": 997405200
},
{
"id": 190859,
"title": "American Sniper",
"poster": "https://image.tmdb.org/t/p/w1280/svPHnYE7N5NAGO49dBmRhq0vDQ3.jpg",
"overview": "U.S. Navy SEAL Chris Kyle takes his sole mission—protect his comrades—to heart and becomes one of the most lethal snipers in American history. His pinpoint accuracy not only saves countless lives but also makes him a prime…",
"release_date": 1418256000
}
],
"offset": 0,
"limit": 2,
"estimatedTotalHits": 976,
"processingTimeMs": 35,
"query": "american "
}
)),
(status = 404, description = "Index not found", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Index `movies` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn search_with_url_query(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
search_queue: web::Data<SearchQueue>,
@ -271,6 +366,62 @@ pub async fn search_with_url_query(
Ok(HttpResponse::Ok().json(search_result))
}
/// Search with POST
///
/// Search for documents matching a specific query in the given index.
#[utoipa::path(
post,
path = "/{indexUid}/search",
tags = ["Indexes", "Search"],
security(("Bearer" = ["search", "*"])),
params(
("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false),
),
request_body = SearchQuery,
responses(
(status = 200, description = "The documents are returned", body = SearchResult, content_type = "application/json", example = json!(
{
"hits": [
{
"id": 2770,
"title": "American Pie 2",
"poster": "https://image.tmdb.org/t/p/w1280/q4LNgUnRfltxzp3gf1MAGiK5LhV.jpg",
"overview": "The whole gang are back and as close as ever. They decide to get even closer by spending the summer together at a beach house. They decide to hold the biggest…",
"release_date": 997405200
},
{
"id": 190859,
"title": "American Sniper",
"poster": "https://image.tmdb.org/t/p/w1280/svPHnYE7N5NAGO49dBmRhq0vDQ3.jpg",
"overview": "U.S. Navy SEAL Chris Kyle takes his sole mission—protect his comrades—to heart and becomes one of the most lethal snipers in American history. His pinpoint accuracy not only saves countless lives but also makes him a prime…",
"release_date": 1418256000
}
],
"offset": 0,
"limit": 2,
"estimatedTotalHits": 976,
"processingTimeMs": 35,
"query": "american "
}
)),
(status = 404, description = "Index not found", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Index `movies` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn search_with_post(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
search_queue: web::Data<SearchQueue>,

View file

@ -6,9 +6,12 @@ use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::milli::update::Setting;
use meilisearch_types::settings::{settings, SecretPolicy, Settings, Unchecked};
use meilisearch_types::settings::{
settings, SecretPolicy, SettingEmbeddingSettings, Settings, Unchecked,
};
use meilisearch_types::tasks::KindWithContent;
use tracing::debug;
use utoipa::OpenApi;
use super::settings_analytics::*;
use crate::analytics::Analytics;
@ -29,6 +32,19 @@ macro_rules! make_setting_routes {
make_setting_route!($route, $update_verb, $type, $err_ty, $attr, $camelcase_attr, $analytics);
)*
#[derive(OpenApi)]
#[openapi(
paths(update_all, get_all, delete_all, $( $attr::get, $attr::update, $attr::delete,)*),
tags(
(
name = "Settings",
description = "Use the /settings route to customize search settings for a given index. You can either modify all index settings at once using the update settings endpoint, or use a child route to configure a single setting.",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/settings"),
),
),
)]
pub struct SettingsApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
use crate::extractors::sequential_extractor::SeqHandler;
cfg.service(
@ -62,7 +78,39 @@ macro_rules! make_setting_route {
use $crate::extractors::sequential_extractor::SeqHandler;
use $crate::Opt;
use $crate::routes::{is_dry_run, get_task_id, SummarizedTaskView};
#[allow(unused_imports)]
use super::*;
#[utoipa::path(
delete,
path = concat!("{indexUid}/settings", $route),
tag = "Settings",
security(("Bearer" = ["settings.update", "settings.*", "*"])),
operation_id = concat!("delete", $camelcase_attr),
summary = concat!("Reset ", $camelcase_attr),
description = concat!("Reset an index's ", $camelcase_attr, " to its default value"),
params(("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false)),
request_body = $type,
responses(
(status = 200, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 147,
"indexUid": "movies",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn delete(
index_scheduler: GuardedData<
ActionPolicy<{ actions::SETTINGS_UPDATE }>,
@ -96,6 +144,37 @@ macro_rules! make_setting_route {
Ok(HttpResponse::Accepted().json(task))
}
#[utoipa::path(
$update_verb,
path = concat!("{indexUid}/settings", $route),
tag = "Settings",
security(("Bearer" = ["settings.update", "settings.*", "*"])),
operation_id = concat!(stringify!($update_verb), $camelcase_attr),
summary = concat!("Update ", $camelcase_attr),
description = concat!("Update an index's user defined ", $camelcase_attr),
params(("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false)),
request_body = $type,
responses(
(status = 200, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 147,
"indexUid": "movies",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn update(
index_scheduler: GuardedData<
ActionPolicy<{ actions::SETTINGS_UPDATE }>,
@ -151,6 +230,30 @@ macro_rules! make_setting_route {
Ok(HttpResponse::Accepted().json(task))
}
#[utoipa::path(
get,
path = concat!("{indexUid}/settings", $route),
tag = "Settings",
summary = concat!("Get ", $camelcase_attr),
description = concat!("Get an user defined ", $camelcase_attr),
security(("Bearer" = ["settings.get", "settings.*", "*"])),
operation_id = concat!("get", $camelcase_attr),
params(("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false)),
responses(
(status = 200, description = concat!($camelcase_attr, " is returned"), body = $type, content_type = "application/json", example = json!(
<$type>::default()
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn get(
index_scheduler: GuardedData<
ActionPolicy<{ actions::SETTINGS_GET }>,
@ -359,7 +462,7 @@ make_setting_routes!(
{
route: "/embedders",
update_verb: patch,
value_type: std::collections::BTreeMap<String, Setting<meilisearch_types::milli::vector::settings::EmbeddingSettings>>,
value_type: std::collections::BTreeMap<String, SettingEmbeddingSettings>,
err_type: meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsEmbedders,
>,
@ -402,6 +505,39 @@ make_setting_routes!(
},
);
#[utoipa::path(
patch,
path = "{indexUid}/settings",
tag = "Settings",
security(("Bearer" = ["settings.update", "settings.*", "*"])),
params(("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false)),
request_body = Settings<Unchecked>,
responses(
(status = 200, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 147,
"indexUid": "movies",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
/// Update settings
///
/// Update the settings of an index.
/// Passing null to an index setting will reset it to its default value.
/// Updates in the settings route are partial. This means that any parameters not provided in the body will be left unchanged.
/// If the provided index does not exist, it will be created.
pub async fn update_all(
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
@ -479,6 +615,29 @@ pub async fn update_all(
Ok(HttpResponse::Accepted().json(task))
}
#[utoipa::path(
get,
path = "{indexUid}/settings",
tag = "Settings",
security(("Bearer" = ["settings.update", "settings.*", "*"])),
params(("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false)),
responses(
(status = 200, description = "Settings are returned", body = Settings<Unchecked>, content_type = "application/json", example = json!(
Settings::<Unchecked>::default()
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
/// All settings
///
/// This route allows you to retrieve, configure, or reset all of an index's settings at once.
pub async fn get_all(
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_GET }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
@ -492,6 +651,35 @@ pub async fn get_all(
Ok(HttpResponse::Ok().json(new_settings))
}
#[utoipa::path(
delete,
path = "{indexUid}/settings",
tag = "Settings",
security(("Bearer" = ["settings.update", "settings.*", "*"])),
params(("indexUid", example = "movies", description = "Index Unique Identifier", nullable = false)),
responses(
(status = 200, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 147,
"indexUid": "movies",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
/// Reset settings
///
/// Reset all the settings of an index to their default value.
pub async fn delete_all(
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,

View file

@ -8,10 +8,9 @@ use std::collections::{BTreeMap, BTreeSet, HashSet};
use meilisearch_types::facet_values_sort::FacetValuesSort;
use meilisearch_types::locales::{Locale, LocalizedAttributesRuleView};
use meilisearch_types::milli::update::Setting;
use meilisearch_types::milli::vector::settings::EmbeddingSettings;
use meilisearch_types::settings::{
FacetingSettings, PaginationSettings, PrefixSearchSettings, ProximityPrecisionView,
RankingRuleView, TypoSettings,
RankingRuleView, SettingEmbeddingSettings, TypoSettings,
};
use serde::Serialize;
@ -497,13 +496,13 @@ pub struct EmbeddersAnalytics {
}
impl EmbeddersAnalytics {
pub fn new(setting: Option<&BTreeMap<String, Setting<EmbeddingSettings>>>) -> Self {
pub fn new(setting: Option<&BTreeMap<String, SettingEmbeddingSettings>>) -> Self {
let mut sources = std::collections::HashSet::new();
if let Some(s) = &setting {
for source in s
.values()
.filter_map(|config| config.clone().set())
.filter_map(|config| config.inner.clone().set())
.filter_map(|config| config.source.set())
{
use meilisearch_types::milli::vector::settings::EmbedderSource;
@ -522,18 +521,18 @@ impl EmbeddersAnalytics {
sources: Some(sources),
document_template_used: setting.as_ref().map(|map| {
map.values()
.filter_map(|config| config.clone().set())
.filter_map(|config| config.inner.clone().set())
.any(|config| config.document_template.set().is_some())
}),
document_template_max_bytes: setting.as_ref().and_then(|map| {
map.values()
.filter_map(|config| config.clone().set())
.filter_map(|config| config.inner.clone().set())
.filter_map(|config| config.document_template_max_bytes.set())
.max()
}),
binary_quantization_used: setting.as_ref().map(|map| {
map.values()
.filter_map(|config| config.clone().set())
.filter_map(|config| config.inner.clone().set())
.any(|config| config.binary_quantized.set().is_some())
}),
}

View file

@ -11,6 +11,7 @@ use meilisearch_types::keys::actions;
use meilisearch_types::serde_cs::vec::CS;
use serde_json::Value;
use tracing::debug;
use utoipa::{IntoParams, OpenApi};
use super::ActionPolicy;
use crate::analytics::Analytics;
@ -22,6 +23,21 @@ use crate::search::{
SimilarQuery, SimilarResult, DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
};
#[derive(OpenApi)]
#[openapi(
paths(similar_get, similar_post),
tags(
(
name = "Similar documents",
description = "The /similar route uses AI-powered search to return a number of documents similar to a target document.
Meilisearch exposes two routes for retrieving similar documents: POST and GET. In the majority of cases, POST will offer better performance and ease of use.",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/similar"),
),
),
)]
pub struct SimilarApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
web::resource("")
@ -30,6 +46,62 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
);
}
/// Get similar documents with GET
///
/// Retrieve documents similar to a specific search result.
#[utoipa::path(
get,
path = "{indexUid}/similar",
tag = "Similar documents",
security(("Bearer" = ["search", "*"])),
params(
("indexUid" = String, Path, example = "movies", description = "Index Unique Identifier", nullable = false),
SimilarQueryGet
),
responses(
(status = 200, description = "The documents are returned", body = SimilarResult, content_type = "application/json", example = json!(
{
"hits": [
{
"id": 2770,
"title": "American Pie 2",
"poster": "https://image.tmdb.org/t/p/w1280/q4LNgUnRfltxzp3gf1MAGiK5LhV.jpg",
"overview": "The whole gang are back and as close as ever. They decide to get even closer by spending the summer together at a beach house. They decide to hold the biggest…",
"release_date": 997405200
},
{
"id": 190859,
"title": "American Sniper",
"poster": "https://image.tmdb.org/t/p/w1280/svPHnYE7N5NAGO49dBmRhq0vDQ3.jpg",
"overview": "U.S. Navy SEAL Chris Kyle takes his sole mission—protect his comrades—to heart and becomes one of the most lethal snipers in American history. His pinpoint accuracy not only saves countless lives but also makes him a prime…",
"release_date": 1418256000
}
],
"offset": 0,
"limit": 2,
"estimatedTotalHits": 976,
"processingTimeMs": 35,
"query": "american "
}
)),
(status = 404, description = "Index not found", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Index `movies` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn similar_get(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
@ -58,6 +130,60 @@ pub async fn similar_get(
Ok(HttpResponse::Ok().json(similar))
}
/// Get similar documents with POST
///
/// Retrieve documents similar to a specific search result.
#[utoipa::path(
post,
path = "{indexUid}/similar",
tag = "Similar documents",
security(("Bearer" = ["search", "*"])),
params(("indexUid" = String, Path, example = "movies", description = "Index Unique Identifier", nullable = false)),
request_body = SimilarQuery,
responses(
(status = 200, description = "The documents are returned", body = SimilarResult, content_type = "application/json", example = json!(
{
"hits": [
{
"id": 2770,
"title": "American Pie 2",
"poster": "https://image.tmdb.org/t/p/w1280/q4LNgUnRfltxzp3gf1MAGiK5LhV.jpg",
"overview": "The whole gang are back and as close as ever. They decide to get even closer by spending the summer together at a beach house. They decide to hold the biggest…",
"release_date": 997405200
},
{
"id": 190859,
"title": "American Sniper",
"poster": "https://image.tmdb.org/t/p/w1280/svPHnYE7N5NAGO49dBmRhq0vDQ3.jpg",
"overview": "U.S. Navy SEAL Chris Kyle takes his sole mission—protect his comrades—to heart and becomes one of the most lethal snipers in American history. His pinpoint accuracy not only saves countless lives but also makes him a prime…",
"release_date": 1418256000
}
],
"offset": 0,
"limit": 2,
"estimatedTotalHits": 976,
"processingTimeMs": 35,
"query": "american "
}
)),
(status = 404, description = "Index not found", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Index `movies` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn similar_post(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
@ -125,26 +251,35 @@ async fn similar(
.await?
}
#[derive(Debug, deserr::Deserr)]
#[derive(Debug, deserr::Deserr, IntoParams)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
#[into_params(parameter_in = Query)]
pub struct SimilarQueryGet {
#[deserr(error = DeserrQueryParamError<InvalidSimilarId>)]
#[param(value_type = String)]
id: Param<String>,
#[deserr(default = Param(DEFAULT_SEARCH_OFFSET()), error = DeserrQueryParamError<InvalidSimilarOffset>)]
#[param(value_type = usize, default = DEFAULT_SEARCH_OFFSET)]
offset: Param<usize>,
#[deserr(default = Param(DEFAULT_SEARCH_LIMIT()), error = DeserrQueryParamError<InvalidSimilarLimit>)]
#[param(value_type = usize, default = DEFAULT_SEARCH_LIMIT)]
limit: Param<usize>,
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarAttributesToRetrieve>)]
#[param(value_type = Vec<String>)]
attributes_to_retrieve: Option<CS<String>>,
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarRetrieveVectors>)]
#[param(value_type = bool, default)]
retrieve_vectors: Param<bool>,
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarFilter>)]
filter: Option<String>,
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarShowRankingScore>)]
#[param(value_type = bool, default)]
show_ranking_score: Param<bool>,
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarShowRankingScoreDetails>)]
#[param(value_type = bool, default)]
show_ranking_score_details: Param<bool>,
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarRankingScoreThreshold>, default)]
#[param(value_type = Option<f32>)]
pub ranking_score_threshold: Option<RankingScoreThresholdGet>,
#[deserr(error = DeserrQueryParamError<InvalidEmbedder>)]
pub embedder: String,

View file

@ -14,9 +14,11 @@ use index_scheduler::IndexScheduler;
use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::{Code, ResponseError};
use serde::Serialize;
use tokio::sync::mpsc;
use tracing_subscriber::filter::Targets;
use tracing_subscriber::Layer;
use utoipa::{OpenApi, ToSchema};
use crate::error::MeilisearchHttpError;
use crate::extractors::authentication::policies::*;
@ -24,6 +26,18 @@ use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::{LogRouteHandle, LogStderrHandle};
#[derive(OpenApi)]
#[openapi(
paths(get_logs, cancel_logs, update_stderr_target),
tags((
name = "Logs",
description = "Everything about retrieving or customizing logs.
Currently [experimental](https://www.meilisearch.com/docs/learn/experimental/overview).",
external_docs(url = "https://www.meilisearch.com/docs/learn/experimental/log_customization"),
)),
)]
pub struct LogsApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
web::resource("stream")
@ -33,12 +47,16 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(web::resource("stderr").route(web::post().to(SeqHandler(update_stderr_target))));
}
#[derive(Debug, Default, Clone, Copy, Deserr, PartialEq, Eq)]
#[derive(Debug, Default, Clone, Copy, Deserr, Serialize, PartialEq, Eq, ToSchema)]
#[deserr(rename_all = camelCase)]
#[schema(rename_all = "camelCase")]
pub enum LogMode {
/// Output the logs in a human readable form.
#[default]
Human,
/// Output the logs in json.
Json,
/// Output the logs in the firefox profiler format. They can then be loaded and visualized at https://profiler.firefox.com/
Profile,
}
@ -83,16 +101,26 @@ impl MergeWithError<MyParseError> for DeserrJsonError<BadRequest> {
}
}
#[derive(Debug, Deserr)]
#[derive(Debug, Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields, validate = validate_get_logs -> DeserrJsonError<InvalidSettingsTypoTolerance>)]
#[schema(rename_all = "camelCase")]
pub struct GetLogs {
/// Lets you specify which parts of the code you want to inspect and is formatted like that: code_part=log_level,code_part=log_level
/// - If the `code_part` is missing, then the `log_level` will be applied to everything.
/// - If the `log_level` is missing, then the `code_part` will be selected in `info` log level.
#[deserr(default = "info".parse().unwrap(), try_from(&String) = MyTargets::from_str -> DeserrJsonError<BadRequest>)]
#[schema(value_type = String, default = "info", example = json!("milli=trace,index_scheduler,actix_web=off"))]
target: MyTargets,
/// Lets you customize the format of the logs.
#[deserr(default, error = DeserrJsonError<BadRequest>)]
#[schema(default = LogMode::default)]
mode: LogMode,
/// A boolean to indicate if you want to profile the memory as well. This is only useful while using the `profile` mode.
/// Be cautious, though; it slows down the engine a lot.
#[deserr(default = false, error = DeserrJsonError<BadRequest>)]
#[schema(default = false)]
profile_memory: bool,
}
@ -248,6 +276,46 @@ fn entry_stream(
)
}
/// Retrieve logs
///
/// Stream logs over HTTP. The format of the logs depends on the configuration specified in the payload.
/// The logs are sent as multi-part, and the stream never stops, so make sure your clients correctly handle that.
/// To make the server stop sending you logs, you can call the `DELETE /logs/stream` route.
///
/// There can only be one listener at a timeand an error will be returned if you call this route while it's being used by another client.
#[utoipa::path(
post,
path = "/stream",
tag = "Logs",
security(("Bearer" = ["metrics.get", "metrics.*", "*"])),
request_body = GetLogs,
responses(
(status = OK, description = "Logs are being returned", body = String, content_type = "application/json", example = json!(
r#"
2024-10-08T13:35:02.643750Z WARN HTTP request{method=GET host="localhost:7700" route=/metrics query_parameters= user_agent=HTTPie/3.2.3 status_code=400 error=Getting metrics requires enabling the `metrics` experimental feature. See https://github.com/meilisearch/product/discussions/625}: tracing_actix_web::middleware: Error encountered while processing the incoming HTTP request: ResponseError { code: 400, message: "Getting metrics requires enabling the `metrics` experimental feature. See https://github.com/meilisearch/product/discussions/625", error_code: "feature_not_enabled", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#feature_not_enabled" }
2024-10-08T13:35:02.644191Z INFO HTTP request{method=GET host="localhost:7700" route=/metrics query_parameters= user_agent=HTTPie/3.2.3 status_code=400 error=Getting metrics requires enabling the `metrics` experimental feature. See https://github.com/meilisearch/product/discussions/625}: meilisearch: close time.busy=1.66ms time.idle=658µs
2024-10-08T13:35:18.564152Z INFO HTTP request{method=PATCH host="localhost:7700" route=/experimental-features query_parameters= user_agent=curl/8.6.0 status_code=200}: meilisearch: close time.busy=1.17ms time.idle=127µs
2024-10-08T13:35:23.094987Z INFO HTTP request{method=GET host="localhost:7700" route=/metrics query_parameters= user_agent=HTTPie/3.2.3 status_code=200}: meilisearch: close time.busy=2.12ms time.idle=595µs
"#
)),
(status = 400, description = "The route is already being used", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The `/logs/stream` route is currently in use by someone else.",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn get_logs(
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
logs: Data<LogRouteHandle>,
@ -280,6 +348,26 @@ pub async fn get_logs(
}
}
/// Stop retrieving logs
///
/// Call this route to make the engine stops sending logs through the `POST /logs/stream` route.
#[utoipa::path(
delete,
path = "/stream",
tag = "Logs",
security(("Bearer" = ["metrics.get", "metrics.*", "*"])),
responses(
(status = NO_CONTENT, description = "Logs are being returned"),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn cancel_logs(
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
logs: Data<LogRouteHandle>,
@ -293,13 +381,38 @@ pub async fn cancel_logs(
Ok(HttpResponse::NoContent().finish())
}
#[derive(Debug, Deserr)]
#[derive(Debug, Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct UpdateStderrLogs {
/// Lets you specify which parts of the code you want to inspect and is formatted like that: code_part=log_level,code_part=log_level
/// - If the `code_part` is missing, then the `log_level` will be applied to everything.
/// - If the `log_level` is missing, then the `code_part` will be selected in `info` log level.
#[deserr(default = "info".parse().unwrap(), try_from(&String) = MyTargets::from_str -> DeserrJsonError<BadRequest>)]
#[schema(value_type = String, default = "info", example = json!("milli=trace,index_scheduler,actix_web=off"))]
target: MyTargets,
}
/// Update target of the console logs
///
/// This route lets you specify at runtime the level of the console logs outputted on stderr.
#[utoipa::path(
post,
path = "/stderr",
tag = "Logs",
request_body = UpdateStderrLogs,
security(("Bearer" = ["metrics.get", "metrics.*", "*"])),
responses(
(status = NO_CONTENT, description = "The console logs have been updated"),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn update_stderr_target(
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
logs: Data<LogStderrHandle>,

View file

@ -1,7 +1,3 @@
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::{AuthenticationError, GuardedData};
use crate::routes::create_all_stats;
use crate::search_queue::SearchQueue;
use actix_web::http::header;
use actix_web::web::{self, Data};
use actix_web::HttpResponse;
@ -12,11 +8,107 @@ use meilisearch_types::keys::actions;
use meilisearch_types::tasks::Status;
use prometheus::{Encoder, TextEncoder};
use time::OffsetDateTime;
use utoipa::OpenApi;
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::{AuthenticationError, GuardedData};
use crate::routes::create_all_stats;
use crate::search_queue::SearchQueue;
#[derive(OpenApi)]
#[openapi(paths(get_metrics))]
pub struct MetricApi;
pub fn configure(config: &mut web::ServiceConfig) {
config.service(web::resource("").route(web::get().to(get_metrics)));
}
/// Get prometheus metrics
///
/// Retrieve metrics on the engine. See https://www.meilisearch.com/docs/learn/experimental/metrics
/// Currently, [the feature is experimental](https://www.meilisearch.com/docs/learn/experimental/overview)
/// which means it must be enabled.
#[utoipa::path(
get,
path = "",
tag = "Stats",
security(("Bearer" = ["metrics.get", "metrics.*", "*"])),
responses(
(status = 200, description = "The metrics of the instance", body = String, content_type = "text/plain", example = json!(
r#"
# HELP meilisearch_db_size_bytes Meilisearch DB Size In Bytes
# TYPE meilisearch_db_size_bytes gauge
meilisearch_db_size_bytes 1130496
# HELP meilisearch_http_requests_total Meilisearch HTTP requests total
# TYPE meilisearch_http_requests_total counter
meilisearch_http_requests_total{method="GET",path="/metrics",status="400"} 1
meilisearch_http_requests_total{method="PATCH",path="/experimental-features",status="200"} 1
# HELP meilisearch_http_response_time_seconds Meilisearch HTTP response times
# TYPE meilisearch_http_response_time_seconds histogram
meilisearch_http_response_time_seconds_bucket{method="GET",path="/metrics",le="0.005"} 0
meilisearch_http_response_time_seconds_bucket{method="GET",path="/metrics",le="0.01"} 0
meilisearch_http_response_time_seconds_bucket{method="GET",path="/metrics",le="0.025"} 0
meilisearch_http_response_time_seconds_bucket{method="GET",path="/metrics",le="0.05"} 0
meilisearch_http_response_time_seconds_bucket{method="GET",path="/metrics",le="0.075"} 0
meilisearch_http_response_time_seconds_bucket{method="GET",path="/metrics",le="0.1"} 0
meilisearch_http_response_time_seconds_bucket{method="GET",path="/metrics",le="0.25"} 0
meilisearch_http_response_time_seconds_bucket{method="GET",path="/metrics",le="0.5"} 0
meilisearch_http_response_time_seconds_bucket{method="GET",path="/metrics",le="0.75"} 0
meilisearch_http_response_time_seconds_bucket{method="GET",path="/metrics",le="1"} 0
meilisearch_http_response_time_seconds_bucket{method="GET",path="/metrics",le="2.5"} 0
meilisearch_http_response_time_seconds_bucket{method="GET",path="/metrics",le="5"} 0
meilisearch_http_response_time_seconds_bucket{method="GET",path="/metrics",le="7.5"} 0
meilisearch_http_response_time_seconds_bucket{method="GET",path="/metrics",le="10"} 0
meilisearch_http_response_time_seconds_bucket{method="GET",path="/metrics",le="+Inf"} 0
meilisearch_http_response_time_seconds_sum{method="GET",path="/metrics"} 0
meilisearch_http_response_time_seconds_count{method="GET",path="/metrics"} 0
# HELP meilisearch_index_count Meilisearch Index Count
# TYPE meilisearch_index_count gauge
meilisearch_index_count 1
# HELP meilisearch_index_docs_count Meilisearch Index Docs Count
# TYPE meilisearch_index_docs_count gauge
meilisearch_index_docs_count{index="mieli"} 2
# HELP meilisearch_is_indexing Meilisearch Is Indexing
# TYPE meilisearch_is_indexing gauge
meilisearch_is_indexing 0
# HELP meilisearch_last_update Meilisearch Last Update
# TYPE meilisearch_last_update gauge
meilisearch_last_update 1726675964
# HELP meilisearch_nb_tasks Meilisearch Number of tasks
# TYPE meilisearch_nb_tasks gauge
meilisearch_nb_tasks{kind="indexes",value="mieli"} 39
meilisearch_nb_tasks{kind="statuses",value="canceled"} 0
meilisearch_nb_tasks{kind="statuses",value="enqueued"} 0
meilisearch_nb_tasks{kind="statuses",value="failed"} 4
meilisearch_nb_tasks{kind="statuses",value="processing"} 0
meilisearch_nb_tasks{kind="statuses",value="succeeded"} 35
meilisearch_nb_tasks{kind="types",value="documentAdditionOrUpdate"} 9
meilisearch_nb_tasks{kind="types",value="documentDeletion"} 0
meilisearch_nb_tasks{kind="types",value="documentEdition"} 0
meilisearch_nb_tasks{kind="types",value="dumpCreation"} 0
meilisearch_nb_tasks{kind="types",value="indexCreation"} 0
meilisearch_nb_tasks{kind="types",value="indexDeletion"} 8
meilisearch_nb_tasks{kind="types",value="indexSwap"} 0
meilisearch_nb_tasks{kind="types",value="indexUpdate"} 0
meilisearch_nb_tasks{kind="types",value="settingsUpdate"} 22
meilisearch_nb_tasks{kind="types",value="snapshotCreation"} 0
meilisearch_nb_tasks{kind="types",value="taskCancelation"} 0
meilisearch_nb_tasks{kind="types",value="taskDeletion"} 0
# HELP meilisearch_used_db_size_bytes Meilisearch Used DB Size In Bytes
# TYPE meilisearch_used_db_size_bytes gauge
meilisearch_used_db_size_bytes 409600
"#
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn get_metrics(
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
auth_controller: Data<AuthController>,
@ -64,7 +156,7 @@ pub async fn get_metrics(
let task_queue_latency_seconds = index_scheduler
.get_tasks_from_authorized_indexes(
Query {
&Query {
limit: Some(1),
reverse: Some(true),
statuses: Some(vec![Status::Enqueued, Status::Processing]),

View file

@ -4,19 +4,46 @@ use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use index_scheduler::IndexScheduler;
use meilisearch_auth::AuthController;
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::settings::{Settings, Unchecked};
use meilisearch_types::batch_view::BatchView;
use meilisearch_types::batches::BatchStats;
use meilisearch_types::error::{Code, ErrorType, ResponseError};
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::keys::CreateApiKey;
use meilisearch_types::settings::{
Checked, FacetingSettings, MinWordSizeTyposSetting, PaginationSettings, Settings, TypoSettings,
Unchecked,
};
use meilisearch_types::task_view::{DetailsView, TaskView};
use meilisearch_types::tasks::{Kind, Status, Task, TaskId};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use tracing::debug;
use utoipa::{OpenApi, ToSchema};
use self::api_key::KeyView;
use self::indexes::documents::BrowseQuery;
use self::indexes::{IndexCreateRequest, IndexStats, UpdateIndexRequest};
use self::logs::{GetLogs, LogMode, UpdateStderrLogs};
use self::open_api_utils::OpenApiAuth;
use self::tasks::AllTasks;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::milli::progress::{ProgressStepView, ProgressView};
use crate::routes::batches::AllBatches;
use crate::routes::features::RuntimeTogglableFeatures;
use crate::routes::indexes::documents::{DocumentDeletionByFilter, DocumentEditionByFunction};
use crate::routes::indexes::IndexView;
use crate::routes::multi_search::SearchResults;
use crate::routes::swap_indexes::SwapIndexesPayload;
use crate::search::{
FederatedSearch, FederatedSearchResult, Federation, FederationOptions, MergeFacets,
SearchQueryWithIndex, SearchResultWithIndex, SimilarQuery, SimilarResult,
};
use crate::search_queue::SearchQueue;
use crate::Opt;
const PAGINATION_DEFAULT_LIMIT: usize = 20;
const PAGINATION_DEFAULT_LIMIT_FN: fn() -> usize = || 20;
mod api_key;
pub mod batches;
@ -27,10 +54,41 @@ mod logs;
mod metrics;
mod multi_search;
mod multi_search_analytics;
mod open_api_utils;
mod snapshot;
mod swap_indexes;
pub mod tasks;
#[derive(OpenApi)]
#[openapi(
nest(
(path = "/tasks", api = tasks::TaskApi),
(path = "/batches", api = batches::BatchesApi),
(path = "/indexes", api = indexes::IndexesApi),
// We must stop the search path here because the rest must be configured by each route individually
(path = "/indexes", api = indexes::search::SearchApi),
(path = "/snapshots", api = snapshot::SnapshotApi),
(path = "/dumps", api = dump::DumpApi),
(path = "/keys", api = api_key::ApiKeyApi),
(path = "/metrics", api = metrics::MetricApi),
(path = "/logs", api = logs::LogsApi),
(path = "/multi-search", api = multi_search::MultiSearchApi),
(path = "/swap-indexes", api = swap_indexes::SwapIndexesApi),
(path = "/experimental-features", api = features::ExperimentalFeaturesApi),
),
paths(get_health, get_version, get_stats),
tags(
(name = "Stats", description = "Stats gives extended information and metrics about indexes and the Meilisearch database."),
),
modifiers(&OpenApiAuth),
servers((
url = "/",
description = "Local server",
)),
components(schemas(PaginationView<KeyView>, PaginationView<IndexView>, IndexView, DocumentDeletionByFilter, AllBatches, BatchStats, ProgressStepView, ProgressView, BatchView, RuntimeTogglableFeatures, SwapIndexesPayload, DocumentEditionByFunction, MergeFacets, FederationOptions, SearchQueryWithIndex, Federation, FederatedSearch, FederatedSearchResult, SearchResults, SearchResultWithIndex, SimilarQuery, SimilarResult, PaginationView<serde_json::Value>, BrowseQuery, UpdateIndexRequest, IndexUid, IndexCreateRequest, KeyView, Action, CreateApiKey, UpdateStderrLogs, LogMode, GetLogs, IndexStats, Stats, HealthStatus, HealthResponse, VersionResponse, Code, ErrorType, AllTasks, TaskView, Status, DetailsView, ResponseError, Settings<Unchecked>, Settings<Checked>, TypoSettings, MinWordSizeTyposSetting, FacetingSettings, PaginationSettings, SummarizedTaskView, Kind))
)]
pub struct MeilisearchApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::scope("/tasks").configure(tasks::configure))
.service(web::scope("/batches").configure(batches::configure))
@ -46,6 +104,13 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(web::scope("/swap-indexes").configure(swap_indexes::configure))
.service(web::scope("/metrics").configure(metrics::configure))
.service(web::scope("/experimental-features").configure(features::configure));
#[cfg(feature = "swagger")]
{
use utoipa_scalar::{Scalar, Servable as ScalarServable};
let openapi = MeilisearchApi::openapi();
cfg.service(Scalar::with_url("/scalar", openapi.clone()));
}
}
pub fn get_task_id(req: &HttpRequest, opt: &Opt) -> Result<Option<TaskId>, ResponseError> {
@ -98,14 +163,20 @@ pub fn is_dry_run(req: &HttpRequest, opt: &Opt) -> Result<bool, ResponseError> {
.map_or(false, |s| s.to_lowercase() == "true"))
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct SummarizedTaskView {
/// The task unique identifier.
#[schema(value_type = u32)]
task_uid: TaskId,
/// The index affected by this task. May be `null` if the task is not linked to any index.
index_uid: Option<String>,
/// The status of the task.
status: Status,
/// The type of the task.
#[serde(rename = "type")]
kind: Kind,
/// The date on which the task was enqueued.
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
enqueued_at: OffsetDateTime,
}
@ -127,7 +198,9 @@ pub struct Pagination {
pub limit: usize,
}
#[derive(Debug, Clone, Serialize)]
#[derive(Debug, Clone, Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct PaginationView<T> {
pub results: Vec<T>,
pub offset: usize,
@ -283,17 +356,56 @@ pub async fn running() -> HttpResponse {
HttpResponse::Ok().json(serde_json::json!({ "status": "Meilisearch is running" }))
}
#[derive(Serialize, Debug)]
#[derive(Serialize, Debug, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct Stats {
/// The size of the database, in bytes.
pub database_size: u64,
#[serde(skip)]
pub used_database_size: u64,
/// The date of the last update in the RFC 3339 formats. Can be `null` if no update has ever been processed.
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
pub last_update: Option<OffsetDateTime>,
/// The stats of every individual index your API key lets you access.
#[schema(value_type = HashMap<String, indexes::IndexStats>)]
pub indexes: BTreeMap<String, indexes::IndexStats>,
}
/// Get stats of all indexes.
///
/// Get stats of all indexes.
#[utoipa::path(
get,
path = "/stats",
tag = "Stats",
security(("Bearer" = ["stats.get", "stats.*", "*"])),
responses(
(status = 200, description = "The stats of the instance", body = Stats, content_type = "application/json", example = json!(
{
"databaseSize": 567,
"lastUpdate": "2019-11-20T09:40:33.711324Z",
"indexes": {
"movies": {
"numberOfDocuments": 10,
"isIndexing": true,
"fieldDistribution": {
"genre": 10,
"author": 9
}
}
}
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
async fn get_stats(
index_scheduler: GuardedData<ActionPolicy<{ actions::STATS_GET }>, Data<IndexScheduler>>,
auth_controller: GuardedData<ActionPolicy<{ actions::STATS_GET }>, Data<AuthController>>,
@ -343,14 +455,43 @@ pub fn create_all_stats(
Ok(stats)
}
#[derive(Serialize)]
#[derive(Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
struct VersionResponse {
/// The commit used to compile this build of Meilisearch.
commit_sha: String,
/// The date of this build.
commit_date: String,
/// The version of Meilisearch.
pkg_version: String,
}
/// Get version
///
/// Current version of Meilisearch.
#[utoipa::path(
get,
path = "/version",
tag = "Version",
security(("Bearer" = ["version", "*"])),
responses(
(status = 200, description = "Instance is healthy", body = VersionResponse, content_type = "application/json", example = json!(
{
"commitSha": "b46889b5f0f2f8b91438a08a358ba8f05fc09fc1",
"commitDate": "2021-07-08",
"pkgVersion": "0.23.0"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
async fn get_version(
_index_scheduler: GuardedData<ActionPolicy<{ actions::VERSION }>, Data<IndexScheduler>>,
) -> HttpResponse {
@ -370,6 +511,35 @@ async fn get_version(
})
}
#[derive(Default, Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
struct HealthResponse {
/// The status of the instance.
status: HealthStatus,
}
#[derive(Default, Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
enum HealthStatus {
#[default]
Available,
}
/// Get Health
///
/// The health check endpoint enables you to periodically test the health of your Meilisearch instance.
#[utoipa::path(
get,
path = "/health",
tag = "Health",
responses(
(status = 200, description = "Instance is healthy", body = HealthResponse, content_type = "application/json", example = json!(
{
"status": "available"
}
)),
)
)]
pub async fn get_health(
index_scheduler: Data<IndexScheduler>,
auth_controller: Data<AuthController>,
@ -379,5 +549,5 @@ pub async fn get_health(
index_scheduler.health().unwrap();
auth_controller.health().unwrap();
Ok(HttpResponse::Ok().json(serde_json::json!({ "status": "available" })))
Ok(HttpResponse::Ok().json(HealthResponse::default()))
}

View file

@ -8,6 +8,7 @@ use meilisearch_types::error::ResponseError;
use meilisearch_types::keys::actions;
use serde::Serialize;
use tracing::debug;
use utoipa::{OpenApi, ToSchema};
use super::multi_search_analytics::MultiSearchAggregator;
use crate::analytics::Analytics;
@ -17,20 +18,127 @@ use crate::extractors::authentication::{AuthenticationError, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::indexes::search::search_kind;
use crate::search::{
add_search_rules, perform_federated_search, perform_search, FederatedSearch, RetrieveVectors,
SearchQueryWithIndex, SearchResultWithIndex,
add_search_rules, perform_federated_search, perform_search, FederatedSearch,
FederatedSearchResult, RetrieveVectors, SearchQueryWithIndex, SearchResultWithIndex,
};
use crate::search_queue::SearchQueue;
#[derive(OpenApi)]
#[openapi(
paths(multi_search_with_post),
tags((
name = "Multi-search",
description = "The `/multi-search` route allows you to perform multiple search queries on one or more indexes by bundling them into a single HTTP request. Multi-search is also known as federated search.",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/multi_search"),
)),
)]
pub struct MultiSearchApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(SeqHandler(multi_search_with_post))));
}
#[derive(Serialize)]
struct SearchResults {
#[derive(Serialize, ToSchema)]
pub struct SearchResults {
results: Vec<SearchResultWithIndex>,
}
/// Perform a multi-search
///
/// Bundle multiple search queries in a single API request. Use this endpoint to search through multiple indexes at once.
#[utoipa::path(
post,
path = "",
tag = "Multi-search",
security(("Bearer" = ["search", "*"])),
responses(
(status = OK, description = "Non federated multi-search", body = SearchResults, content_type = "application/json", example = json!(
{
"results":[
{
"indexUid":"movies",
"hits":[
{
"id":13682,
"title":"Pooh's Heffalump Movie",
},
],
"query":"pooh",
"processingTimeMs":26,
"limit":1,
"offset":0,
"estimatedTotalHits":22
},
{
"indexUid":"movies",
"hits":[
{
"id":12,
"title":"Finding Nemo",
},
],
"query":"nemo",
"processingTimeMs":5,
"limit":1,
"offset":0,
"estimatedTotalHits":11
},
{
"indexUid":"movie_ratings",
"hits":[
{
"id":"Us",
"director": "Jordan Peele",
}
],
"query":"Us",
"processingTimeMs":0,
"limit":1,
"offset":0,
"estimatedTotalHits":1
}
]
}
)),
(status = OK, description = "Federated multi-search", body = FederatedSearchResult, content_type = "application/json", example = json!(
{
"hits": [
{
"id": 42,
"title": "Batman returns",
"overview": "The overview of batman returns",
"_federation": {
"indexUid": "movies",
"queriesPosition": 0
}
},
{
"comicsId": "batman-killing-joke",
"description": "This comic is really awesome",
"title": "Batman: the killing joke",
"_federation": {
"indexUid": "comics",
"queriesPosition": 1
}
},
],
"processingTimeMs": 0,
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2,
"semanticHitCount": 0
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn multi_search_with_post(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
search_queue: Data<SearchQueue>,

View file

@ -0,0 +1,24 @@
use serde::Serialize;
use utoipa::openapi::security::{HttpAuthScheme, HttpBuilder, SecurityScheme};
#[derive(Debug, Serialize)]
pub struct OpenApiAuth;
impl utoipa::Modify for OpenApiAuth {
fn modify(&self, openapi: &mut utoipa::openapi::OpenApi) {
if let Some(schema) = openapi.components.as_mut() {
schema.add_security_scheme(
"Bearer",
SecurityScheme::Http(
HttpBuilder::new()
.scheme(HttpAuthScheme::Bearer)
.bearer_format("Uuidv4, string or JWT")
.description(Some(
"An API key is a token that you provide when making API calls. Include the token in a header parameter called `Authorization`.
Example: `Authorization: Bearer 8fece4405662dd830e4cb265e7e047aab2e79672a760a12712d2a263c9003509`"))
.build(),
),
);
}
}
}

View file

@ -4,6 +4,7 @@ use index_scheduler::IndexScheduler;
use meilisearch_types::error::ResponseError;
use meilisearch_types::tasks::KindWithContent;
use tracing::debug;
use utoipa::OpenApi;
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
@ -12,12 +13,55 @@ use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::{get_task_id, is_dry_run, SummarizedTaskView};
use crate::Opt;
#[derive(OpenApi)]
#[openapi(
paths(create_snapshot),
tags((
name = "Snapshots",
description = "The snapshots route allows the creation of database snapshots. Snapshots are .snapshot files that can be used to launch Meilisearch.
Creating a snapshot is also referred to as exporting it, whereas launching Meilisearch with a snapshot is referred to as importing it.
During a snapshot export, all indexes of the current instance are exportedtogether with their documents and settingsand saved as a single .snapshot file.
During a snapshot import, all indexes contained in the indicated .snapshot file are imported along with their associated documents and settings.
Snapshot imports are performed at launch using an option.",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/snapshots"),
)),
)]
pub struct SnapshotApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_snapshot))));
}
crate::empty_analytics!(SnapshotAnalytics, "Snapshot Created");
/// Create a snapshot
///
/// Triggers a snapshot creation process. Once the process is complete, a snapshot is created in the snapshot directory. If the snapshot directory does not exist yet, it will be created.
#[utoipa::path(
post,
path = "",
tag = "Snapshots",
security(("Bearer" = ["snapshots.create", "snapshots.*", "*"])),
responses(
(status = 202, description = "Snapshot is being created", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 0,
"indexUid": null,
"status": "enqueued",
"type": "snapshotCreation",
"enqueuedAt": "2021-01-01T09:39:00.000000Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn create_snapshot(
index_scheduler: GuardedData<ActionPolicy<{ actions::SNAPSHOTS_CREATE }>, Data<IndexScheduler>>,
req: HttpRequest,

View file

@ -9,6 +9,7 @@ use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::tasks::{IndexSwap, KindWithContent};
use serde::Serialize;
use utoipa::{OpenApi, ToSchema};
use super::{get_task_id, is_dry_run, SummarizedTaskView};
use crate::analytics::{Aggregate, Analytics};
@ -18,13 +19,18 @@ use crate::extractors::authentication::{AuthenticationError, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler;
use crate::Opt;
#[derive(OpenApi)]
#[openapi(paths(swap_indexes))]
pub struct SwapIndexesApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(SeqHandler(swap_indexes))));
}
#[derive(Deserr, Debug, Clone, PartialEq, Eq)]
#[derive(Deserr, Debug, Clone, PartialEq, Eq, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct SwapIndexesPayload {
/// Array of the two indexUids to be swapped
#[deserr(error = DeserrJsonError<InvalidSwapIndexes>, missing_field_error = DeserrJsonError::missing_swap_indexes)]
indexes: Vec<IndexUid>,
}
@ -50,6 +56,37 @@ impl Aggregate for IndexSwappedAnalytics {
}
}
/// Swap indexes
///
/// Swap the documents, settings, and task history of two or more indexes. You can only swap indexes in pairs. However, a single request can swap as many index pairs as you wish.
/// Swapping indexes is an atomic transaction: either all indexes are successfully swapped, or none are.
/// Swapping indexA and indexB will also replace every mention of indexA by indexB and vice-versa in the task history. enqueued tasks are left unmodified.
#[utoipa::path(
post,
path = "",
tag = "Indexes",
security(("Bearer" = ["search", "*"])),
request_body = Vec<SwapIndexesPayload>,
responses(
(status = OK, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 3,
"indexUid": null,
"status": "enqueued",
"type": "indexSwap",
"enqueuedAt": "2021-08-12T10:00:00.000000Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
pub async fn swap_indexes(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>,
params: AwebJson<Vec<SwapIndexesPayload>, DeserrJsonError>,

View file

@ -17,6 +17,7 @@ use time::format_description::well_known::Rfc3339;
use time::macros::format_description;
use time::{Date, Duration, OffsetDateTime, Time};
use tokio::task;
use utoipa::{IntoParams, OpenApi, ToSchema};
use super::{get_task_id, is_dry_run, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
use crate::analytics::{Aggregate, AggregateMethod, Analytics};
@ -25,6 +26,17 @@ use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::{aggregate_methods, Opt};
#[derive(OpenApi)]
#[openapi(
paths(get_tasks, delete_tasks, cancel_tasks, get_task),
tags((
name = "Tasks",
description = "The tasks route gives information about the progress of the [asynchronous operations](https://docs.meilisearch.com/learn/advanced/asynchronous_operations.html).",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/tasks"),
)),
)]
pub struct TaskApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
web::resource("")
@ -35,41 +47,72 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task))));
}
#[derive(Debug, Deserr)]
#[derive(Debug, Deserr, IntoParams)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
#[into_params(rename_all = "camelCase", parameter_in = Query)]
pub struct TasksFilterQuery {
/// Maximum number of results to return.
#[deserr(default = Param(PAGINATION_DEFAULT_LIMIT as u32), error = DeserrQueryParamError<InvalidTaskLimit>)]
#[param(required = false, value_type = u32, example = 12, default = json!(PAGINATION_DEFAULT_LIMIT))]
pub limit: Param<u32>,
/// Fetch the next set of results from the given uid.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskFrom>)]
#[param(required = false, value_type = Option<u32>, example = 12421)]
pub from: Option<Param<TaskId>>,
/// The order you want to retrieve the objects.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskReverse>)]
#[param(required = false, value_type = Option<bool>, example = true)]
pub reverse: Option<Param<bool>>,
/// Permits to filter tasks by their batch uid. By default, when the `batchUids` query parameter is not set, all task uids are returned. It's possible to specify several batch uids by separating them with the `,` character.
#[deserr(default, error = DeserrQueryParamError<InvalidBatchUids>)]
#[param(required = false, value_type = Option<u32>, example = 12421)]
pub batch_uids: OptionStarOrList<BatchId>,
/// Permits to filter tasks by their uid. By default, when the uids query parameter is not set, all task uids are returned. It's possible to specify several uids by separating them with the `,` character.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskUids>)]
#[param(required = false, value_type = Option<Vec<u32>>, example = json!([231, 423, 598, "*"]))]
pub uids: OptionStarOrList<u32>,
/// Permits to filter tasks using the uid of the task that canceled them. It's possible to specify several task uids by separating them with the `,` character.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskCanceledBy>)]
#[param(required = false, value_type = Option<Vec<u32>>, example = json!([374, "*"]))]
pub canceled_by: OptionStarOrList<u32>,
/// Permits to filter tasks by their related type. By default, when `types` query parameter is not set, all task types are returned. It's possible to specify several types by separating them with the `,` character.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskTypes>)]
#[param(required = false, value_type = Option<Vec<String>>, example = json!([Kind::DocumentAdditionOrUpdate, "*"]))]
pub types: OptionStarOrList<Kind>,
/// Permits to filter tasks by their status. By default, when `statuses` query parameter is not set, all task statuses are returned. It's possible to specify several statuses by separating them with the `,` character.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskStatuses>)]
#[param(required = false, value_type = Option<Vec<Status>>, example = json!([Status::Succeeded, Status::Failed, Status::Canceled, Status::Enqueued, Status::Processing, "*"]))]
pub statuses: OptionStarOrList<Status>,
/// Permits to filter tasks by their related index. By default, when `indexUids` query parameter is not set, the tasks of all the indexes are returned. It is possible to specify several indexes by separating them with the `,` character.
#[deserr(default, error = DeserrQueryParamError<InvalidIndexUid>)]
#[param(required = false, value_type = Option<Vec<String>>, example = json!(["movies", "theater", "*"]))]
pub index_uids: OptionStarOrList<IndexUid>,
/// Permits to filter tasks based on their enqueuedAt time. Matches tasks enqueued after the given date. Supports RFC 3339 date format.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
pub after_enqueued_at: OptionStarOr<OffsetDateTime>,
/// Permits to filter tasks based on their enqueuedAt time. Matches tasks enqueued before the given date. Supports RFC 3339 date format.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
pub before_enqueued_at: OptionStarOr<OffsetDateTime>,
/// Permits to filter tasks based on their startedAt time. Matches tasks started after the given date. Supports RFC 3339 date format.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
pub after_started_at: OptionStarOr<OffsetDateTime>,
/// Permits to filter tasks based on their startedAt time. Matches tasks started before the given date. Supports RFC 3339 date format.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
pub before_started_at: OptionStarOr<OffsetDateTime>,
/// Permits to filter tasks based on their finishedAt time. Matches tasks finished after the given date. Supports RFC 3339 date format.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
pub after_finished_at: OptionStarOr<OffsetDateTime>,
/// Permits to filter tasks based on their finishedAt time. Matches tasks finished before the given date. Supports RFC 3339 date format.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
pub before_finished_at: OptionStarOr<OffsetDateTime>,
}
@ -117,33 +160,58 @@ impl TaskDeletionOrCancelationQuery {
}
}
#[derive(Debug, Deserr)]
#[derive(Debug, Deserr, IntoParams)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
#[into_params(rename_all = "camelCase", parameter_in = Query)]
pub struct TaskDeletionOrCancelationQuery {
/// Permits to filter tasks by their uid. By default, when the `uids` query parameter is not set, all task uids are returned. It's possible to specify several uids by separating them with the `,` character.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskUids>)]
pub uids: OptionStarOrList<TaskId>,
#[param(required = false, value_type = Option<Vec<u32>>, example = json!([231, 423, 598, "*"]))]
pub uids: OptionStarOrList<u32>,
/// Lets you filter tasks by their `batchUid`.
#[deserr(default, error = DeserrQueryParamError<InvalidBatchUids>)]
#[param(required = false, value_type = Option<Vec<u32>>, example = json!([231, 423, 598, "*"]))]
pub batch_uids: OptionStarOrList<BatchId>,
/// Permits to filter tasks using the uid of the task that canceled them. It's possible to specify several task uids by separating them with the `,` character.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskCanceledBy>)]
pub canceled_by: OptionStarOrList<TaskId>,
#[param(required = false, value_type = Option<Vec<u32>>, example = json!([374, "*"]))]
pub canceled_by: OptionStarOrList<u32>,
/// Permits to filter tasks by their related type. By default, when `types` query parameter is not set, all task types are returned. It's possible to specify several types by separating them with the `,` character.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskTypes>)]
#[param(required = false, value_type = Option<Vec<Kind>>, example = json!([Kind::DocumentDeletion, "*"]))]
pub types: OptionStarOrList<Kind>,
/// Permits to filter tasks by their status. By default, when `statuses` query parameter is not set, all task statuses are returned. It's possible to specify several statuses by separating them with the `,` character.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskStatuses>)]
#[param(required = false, value_type = Option<Vec<Status>>, example = json!([Status::Succeeded, Status::Failed, Status::Canceled, "*"]))]
pub statuses: OptionStarOrList<Status>,
/// Permits to filter tasks by their related index. By default, when `indexUids` query parameter is not set, the tasks of all the indexes are returned. It is possible to specify several indexes by separating them with the `,` character.
#[deserr(default, error = DeserrQueryParamError<InvalidIndexUid>)]
#[param(required = false, value_type = Option<Vec<String>>, example = json!(["movies", "theater", "*"]))]
pub index_uids: OptionStarOrList<IndexUid>,
/// Permits to filter tasks based on their enqueuedAt time. Matches tasks enqueued after the given date. Supports RFC 3339 date format.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
pub after_enqueued_at: OptionStarOr<OffsetDateTime>,
/// Permits to filter tasks based on their enqueuedAt time. Matches tasks enqueued before the given date. Supports RFC 3339 date format.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
pub before_enqueued_at: OptionStarOr<OffsetDateTime>,
/// Permits to filter tasks based on their startedAt time. Matches tasks started after the given date. Supports RFC 3339 date format.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
pub after_started_at: OptionStarOr<OffsetDateTime>,
/// Permits to filter tasks based on their startedAt time. Matches tasks started before the given date. Supports RFC 3339 date format.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
pub before_started_at: OptionStarOr<OffsetDateTime>,
/// Permits to filter tasks based on their finishedAt time. Matches tasks finished after the given date. Supports RFC 3339 date format.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
pub after_finished_at: OptionStarOr<OffsetDateTime>,
/// Permits to filter tasks based on their finishedAt time. Matches tasks finished before the given date. Supports RFC 3339 date format.
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
pub before_finished_at: OptionStarOr<OffsetDateTime>,
}
@ -226,6 +294,51 @@ impl<Method: AggregateMethod + 'static> Aggregate for TaskFilterAnalytics<Method
}
}
/// Cancel tasks
///
/// Cancel enqueued and/or processing [tasks](https://www.meilisearch.com/docs/learn/async/asynchronous_operations)
#[utoipa::path(
post,
path = "/cancel",
tag = "Tasks",
security(("Bearer" = ["tasks.cancel", "tasks.*", "*"])),
params(TaskDeletionOrCancelationQuery),
responses(
(status = 200, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 147,
"indexUid": null,
"status": "enqueued",
"type": "taskCancelation",
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
}
)),
(status = 400, description = "A filter is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.",
"code": "missing_task_filters",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#missing_task_filters"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
(status = 404, description = "The task uid does not exists", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Task :taskUid not found.",
"code": "task_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors/#task_not_found"
}
))
)
)]
async fn cancel_tasks(
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_CANCEL }>, Data<IndexScheduler>>,
params: AwebQueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
@ -260,11 +373,8 @@ async fn cancel_tasks(
let query = params.into_query();
let (tasks, _) = index_scheduler.get_task_ids_from_authorized_indexes(
&index_scheduler.read_txn()?,
&query,
index_scheduler.filters(),
)?;
let (tasks, _) =
index_scheduler.get_task_ids_from_authorized_indexes(&query, index_scheduler.filters())?;
let task_cancelation =
KindWithContent::TaskCancelation { query: format!("?{}", req.query_string()), tasks };
@ -278,6 +388,51 @@ async fn cancel_tasks(
Ok(HttpResponse::Ok().json(task))
}
/// Delete tasks
///
/// Delete [tasks](https://docs.meilisearch.com/learn/advanced/asynchronous_operations.html) on filter
#[utoipa::path(
delete,
path = "",
tag = "Tasks",
security(("Bearer" = ["tasks.delete", "tasks.*", "*"])),
params(TaskDeletionOrCancelationQuery),
responses(
(status = 200, description = "Task successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 147,
"indexUid": null,
"status": "enqueued",
"type": "taskDeletion",
"enqueuedAt": "2024-08-08T17:05:55.791772Z"
}
)),
(status = 400, description = "A filter is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.",
"code": "missing_task_filters",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#missing_task_filters"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
(status = 404, description = "The task uid does not exists", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Task :taskUid not found.",
"code": "task_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors/#task_not_found"
}
))
)
)]
async fn delete_tasks(
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_DELETE }>, Data<IndexScheduler>>,
params: AwebQueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
@ -312,11 +467,8 @@ async fn delete_tasks(
let query = params.into_query();
let (tasks, _) = index_scheduler.get_task_ids_from_authorized_indexes(
&index_scheduler.read_txn()?,
&query,
index_scheduler.filters(),
)?;
let (tasks, _) =
index_scheduler.get_task_ids_from_authorized_indexes(&query, index_scheduler.filters())?;
let task_deletion =
KindWithContent::TaskDeletion { query: format!("?{}", req.query_string()), tasks };
@ -329,15 +481,69 @@ async fn delete_tasks(
Ok(HttpResponse::Ok().json(task))
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, ToSchema)]
pub struct AllTasks {
/// The list of tasks that matched the filter.
results: Vec<TaskView>,
/// Total number of browsable results using offset/limit parameters for the given resource.
total: u64,
/// Limit given for the query. If limit is not provided as a query parameter, this parameter displays the default limit value.
limit: u32,
/// The first task uid returned.
from: Option<u32>,
/// Represents the value to send in from to fetch the next slice of the results. The first item for the next slice starts at this exact number. When the returned value is null, it means that all the data have been browsed in the given order.
next: Option<u32>,
}
/// Get all tasks
///
/// Get all [tasks](https://docs.meilisearch.com/learn/advanced/asynchronous_operations.html)
#[utoipa::path(
get,
path = "",
tag = "Tasks",
security(("Bearer" = ["tasks.get", "tasks.*", "*"])),
params(TasksFilterQuery),
responses(
(status = 200, description = "Get all tasks", body = AllTasks, content_type = "application/json", example = json!(
{
"results": [
{
"uid": 144,
"indexUid": "mieli",
"status": "succeeded",
"type": "settingsUpdate",
"canceledBy": null,
"details": {
"settings": {
"filterableAttributes": [
"play_count"
]
}
},
"error": null,
"duration": "PT0.009330S",
"enqueuedAt": "2024-08-08T09:01:13.348471Z",
"startedAt": "2024-08-08T09:01:13.349442Z",
"finishedAt": "2024-08-08T09:01:13.358772Z"
}
],
"total": 1,
"limit": 1,
"from": 144,
"next": null
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
async fn get_tasks(
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>,
params: AwebQueryParameter<TasksFilterQuery, DeserrQueryParamError>,
@ -349,7 +555,7 @@ async fn get_tasks(
let query = params.into_query();
let filters = index_scheduler.filters();
let (tasks, total) = index_scheduler.get_tasks_from_authorized_indexes(query, filters)?;
let (tasks, total) = index_scheduler.get_tasks_from_authorized_indexes(&query, filters)?;
let mut results: Vec<_> = tasks.iter().map(TaskView::from_task).collect();
// If we were able to fetch the number +1 tasks we asked
@ -362,6 +568,52 @@ async fn get_tasks(
Ok(HttpResponse::Ok().json(tasks))
}
/// Get a task
///
/// Get a [task](https://www.meilisearch.com/docs/learn/async/asynchronous_operations)
#[utoipa::path(
get,
path = "/{taskUid}",
tag = "Tasks",
security(("Bearer" = ["tasks.get", "tasks.*", "*"])),
params(("taskUid", format = UInt32, example = 0, description = "The task identifier", nullable = false)),
responses(
(status = 200, description = "Task successfully retrieved", body = TaskView, content_type = "application/json", example = json!(
{
"uid": 1,
"indexUid": "movies",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 79000,
"indexedDocuments": 79000
},
"error": null,
"duration": "PT1S",
"enqueuedAt": "2021-01-01T09:39:00.000000Z",
"startedAt": "2021-01-01T09:39:01.000000Z",
"finishedAt": "2021-01-01T09:39:02.000000Z"
}
)),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
(status = 404, description = "The task uid does not exists", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "Task :taskUid not found.",
"code": "task_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors/#task_not_found"
}
))
)
)]
async fn get_task(
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>,
task_uid: web::Path<String>,
@ -377,7 +629,7 @@ async fn get_task(
let query = index_scheduler::Query { uids: Some(vec![task_uid]), ..Query::default() };
let filters = index_scheduler.filters();
let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(query, filters)?;
let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(&query, filters)?;
if let Some(task) = tasks.first() {
let task_view = TaskView::from_task(task);

View file

@ -22,6 +22,7 @@ use meilisearch_types::milli::score_details::{ScoreDetails, ScoreValue};
use meilisearch_types::milli::{self, DocumentId, OrderBy, TimeBudget};
use roaring::RoaringBitmap;
use serde::Serialize;
use utoipa::ToSchema;
use super::ranking_rules::{self, RankingRules};
use super::{
@ -33,10 +34,11 @@ use crate::routes::indexes::search::search_kind;
pub const DEFAULT_FEDERATED_WEIGHT: f64 = 1.0;
#[derive(Debug, Default, Clone, Copy, PartialEq, deserr::Deserr)]
#[derive(Debug, Default, Clone, Copy, PartialEq, deserr::Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct FederationOptions {
#[deserr(default, error = DeserrJsonError<InvalidMultiSearchWeight>)]
#[schema(value_type = f64)]
pub weight: Weight,
}
@ -70,8 +72,9 @@ impl std::ops::Deref for Weight {
}
}
#[derive(Debug, deserr::Deserr)]
#[derive(Debug, deserr::Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[schema(rename_all = "camelCase")]
pub struct Federation {
#[deserr(default = super::DEFAULT_SEARCH_LIMIT(), error = DeserrJsonError<InvalidSearchLimit>)]
pub limit: usize,
@ -83,22 +86,26 @@ pub struct Federation {
pub merge_facets: Option<MergeFacets>,
}
#[derive(Copy, Clone, Debug, deserr::Deserr, Default)]
#[derive(Copy, Clone, Debug, deserr::Deserr, Default, ToSchema)]
#[deserr(error = DeserrJsonError<InvalidMultiSearchMergeFacets>, rename_all = camelCase, deny_unknown_fields)]
#[schema(rename_all = "camelCase")]
pub struct MergeFacets {
#[deserr(default, error = DeserrJsonError<InvalidMultiSearchMaxValuesPerFacet>)]
pub max_values_per_facet: Option<usize>,
}
#[derive(Debug, deserr::Deserr)]
#[derive(Debug, deserr::Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[schema(rename_all = "camelCase")]
pub struct FederatedSearch {
pub queries: Vec<SearchQueryWithIndex>,
#[deserr(default)]
pub federation: Option<Federation>,
}
#[derive(Serialize, Clone)]
#[derive(Serialize, Clone, ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct FederatedSearchResult {
pub hits: Vec<SearchHit>,
pub processing_time_ms: u128,
@ -109,6 +116,7 @@ pub struct FederatedSearchResult {
pub semantic_hit_count: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
#[schema(value_type = Option<BTreeMap<String, BTreeMap<String, u64>>>)]
pub facet_distribution: Option<BTreeMap<String, IndexMap<String, u64>>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub facet_stats: Option<BTreeMap<String, FacetStats>>,
@ -355,7 +363,7 @@ struct SearchResultByIndex {
facets: Option<ComputedFacets>,
}
#[derive(Debug, Clone, Default, Serialize)]
#[derive(Debug, Clone, Default, Serialize, ToSchema)]
pub struct FederatedFacets(pub BTreeMap<String, ComputedFacets>);
impl FederatedFacets {

View file

@ -34,11 +34,15 @@ use serde::Serialize;
use serde_json::{json, Value};
#[cfg(test)]
mod mod_test;
use utoipa::ToSchema;
use crate::error::MeilisearchHttpError;
mod federated;
pub use federated::{perform_federated_search, FederatedSearch, Federation, FederationOptions};
pub use federated::{
perform_federated_search, FederatedSearch, FederatedSearchResult, Federation,
FederationOptions, MergeFacets,
};
mod ranking_rules;
@ -52,7 +56,7 @@ pub const DEFAULT_HIGHLIGHT_PRE_TAG: fn() -> String = || "<em>".to_string();
pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string();
pub const DEFAULT_SEMANTIC_RATIO: fn() -> SemanticRatio = || SemanticRatio(0.5);
#[derive(Clone, Default, PartialEq, Deserr)]
#[derive(Clone, Default, PartialEq, Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct SearchQuery {
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
@ -62,8 +66,10 @@ pub struct SearchQuery {
#[deserr(default, error = DeserrJsonError<InvalidHybridQuery>)]
pub hybrid: Option<HybridQuery>,
#[deserr(default = DEFAULT_SEARCH_OFFSET(), error = DeserrJsonError<InvalidSearchOffset>)]
#[schema(default = DEFAULT_SEARCH_OFFSET)]
pub offset: usize,
#[deserr(default = DEFAULT_SEARCH_LIMIT(), error = DeserrJsonError<InvalidSearchLimit>)]
#[schema(default = DEFAULT_SEARCH_LIMIT)]
pub limit: usize,
#[deserr(default, error = DeserrJsonError<InvalidSearchPage>)]
pub page: Option<usize>,
@ -75,15 +81,16 @@ pub struct SearchQuery {
pub retrieve_vectors: bool,
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToCrop>)]
pub attributes_to_crop: Option<Vec<String>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchCropLength>, default = DEFAULT_CROP_LENGTH())]
#[deserr(error = DeserrJsonError<InvalidSearchCropLength>, default = DEFAULT_CROP_LENGTH())]
#[schema(default = DEFAULT_CROP_LENGTH)]
pub crop_length: usize,
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToHighlight>)]
pub attributes_to_highlight: Option<HashSet<String>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchShowMatchesPosition>, default)]
#[deserr(default, error = DeserrJsonError<InvalidSearchShowMatchesPosition>)]
pub show_matches_position: bool,
#[deserr(default, error = DeserrJsonError<InvalidSearchShowRankingScore>, default)]
#[deserr(default, error = DeserrJsonError<InvalidSearchShowRankingScore>)]
pub show_ranking_score: bool,
#[deserr(default, error = DeserrJsonError<InvalidSearchShowRankingScoreDetails>, default)]
#[deserr(default, error = DeserrJsonError<InvalidSearchShowRankingScoreDetails>)]
pub show_ranking_score_details: bool,
#[deserr(default, error = DeserrJsonError<InvalidSearchFilter>)]
pub filter: Option<Value>,
@ -93,26 +100,28 @@ pub struct SearchQuery {
pub distinct: Option<String>,
#[deserr(default, error = DeserrJsonError<InvalidSearchFacets>)]
pub facets: Option<Vec<String>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchHighlightPreTag>, default = DEFAULT_HIGHLIGHT_PRE_TAG())]
#[deserr(error = DeserrJsonError<InvalidSearchHighlightPreTag>, default = DEFAULT_HIGHLIGHT_PRE_TAG())]
#[schema(default = DEFAULT_HIGHLIGHT_PRE_TAG)]
pub highlight_pre_tag: String,
#[deserr(default, error = DeserrJsonError<InvalidSearchHighlightPostTag>, default = DEFAULT_HIGHLIGHT_POST_TAG())]
#[deserr(error = DeserrJsonError<InvalidSearchHighlightPostTag>, default = DEFAULT_HIGHLIGHT_POST_TAG())]
#[schema(default = DEFAULT_HIGHLIGHT_POST_TAG)]
pub highlight_post_tag: String,
#[deserr(default, error = DeserrJsonError<InvalidSearchCropMarker>, default = DEFAULT_CROP_MARKER())]
#[deserr(error = DeserrJsonError<InvalidSearchCropMarker>, default = DEFAULT_CROP_MARKER())]
#[schema(default = DEFAULT_CROP_MARKER)]
pub crop_marker: String,
#[deserr(default, error = DeserrJsonError<InvalidSearchMatchingStrategy>, default)]
#[deserr(default, error = DeserrJsonError<InvalidSearchMatchingStrategy>)]
pub matching_strategy: MatchingStrategy,
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToSearchOn>, default)]
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToSearchOn>)]
pub attributes_to_search_on: Option<Vec<String>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchRankingScoreThreshold>, default)]
#[deserr(default, error = DeserrJsonError<InvalidSearchRankingScoreThreshold>)]
pub ranking_score_threshold: Option<RankingScoreThreshold>,
#[deserr(default, error = DeserrJsonError<InvalidSearchLocales>, default)]
#[deserr(default, error = DeserrJsonError<InvalidSearchLocales>)]
pub locales: Option<Vec<Locale>>,
}
#[derive(Debug, Clone, Copy, PartialEq, Deserr)]
#[derive(Debug, Clone, Copy, PartialEq, Deserr, ToSchema)]
#[deserr(try_from(f64) = TryFrom::try_from -> InvalidSearchRankingScoreThreshold)]
pub struct RankingScoreThreshold(f64);
impl std::convert::TryFrom<f64> for RankingScoreThreshold {
type Error = InvalidSearchRankingScoreThreshold;
@ -266,10 +275,11 @@ impl fmt::Debug for SearchQuery {
}
}
#[derive(Debug, Clone, Default, PartialEq, Deserr)]
#[derive(Debug, Clone, Default, PartialEq, Deserr, ToSchema)]
#[deserr(error = DeserrJsonError<InvalidHybridQuery>, rename_all = camelCase, deny_unknown_fields)]
pub struct HybridQuery {
#[deserr(default, error = DeserrJsonError<InvalidSearchSemanticRatio>, default)]
#[schema(value_type = f32, default)]
pub semantic_ratio: SemanticRatio,
#[deserr(error = DeserrJsonError<InvalidEmbedder>)]
pub embedder: String,
@ -381,8 +391,9 @@ impl SearchQuery {
// This struct contains the fields of `SearchQuery` inline.
// This is because neither deserr nor serde support `flatten` when using `deny_unknown_fields.
// The `From<SearchQueryWithIndex>` implementation ensures both structs remain up to date.
#[derive(Debug, Clone, PartialEq, Deserr)]
#[derive(Debug, Clone, PartialEq, Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[schema(rename_all = "camelCase")]
pub struct SearchQueryWithIndex {
#[deserr(error = DeserrJsonError<InvalidIndexUid>, missing_field_error = DeserrJsonError::missing_index_uid)]
pub index_uid: IndexUid,
@ -530,10 +541,11 @@ impl SearchQueryWithIndex {
}
}
#[derive(Debug, Clone, PartialEq, Deserr)]
#[derive(Debug, Clone, PartialEq, Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct SimilarQuery {
#[deserr(error = DeserrJsonError<InvalidSimilarId>)]
#[schema(value_type = String)]
pub id: ExternalDocumentId,
#[deserr(default = DEFAULT_SEARCH_OFFSET(), error = DeserrJsonError<InvalidSimilarOffset>)]
pub offset: usize,
@ -552,6 +564,7 @@ pub struct SimilarQuery {
#[deserr(default, error = DeserrJsonError<InvalidSimilarShowRankingScoreDetails>, default)]
pub show_ranking_score_details: bool,
#[deserr(default, error = DeserrJsonError<InvalidSimilarRankingScoreThreshold>, default)]
#[schema(value_type = f64)]
pub ranking_score_threshold: Option<RankingScoreThresholdSimilar>,
}
@ -587,7 +600,7 @@ impl TryFrom<Value> for ExternalDocumentId {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserr)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserr, ToSchema)]
#[deserr(rename_all = camelCase)]
pub enum MatchingStrategy {
/// Remove query words from last to first
@ -634,11 +647,13 @@ impl From<FacetValuesSort> for OrderBy {
}
}
#[derive(Debug, Clone, Serialize, PartialEq)]
#[derive(Debug, Clone, Serialize, PartialEq, ToSchema)]
pub struct SearchHit {
#[serde(flatten)]
#[schema(additional_properties, inline, value_type = HashMap<String, Value>)]
pub document: Document,
#[serde(rename = "_formatted", skip_serializing_if = "Document::is_empty")]
#[schema(additional_properties, value_type = HashMap<String, Value>)]
pub formatted: Document,
#[serde(rename = "_matchesPosition", skip_serializing_if = "Option::is_none")]
pub matches_position: Option<MatchesPosition>,
@ -648,8 +663,9 @@ pub struct SearchHit {
pub ranking_score_details: Option<serde_json::Map<String, serde_json::Value>>,
}
#[derive(Serialize, Clone, PartialEq)]
#[derive(Serialize, Clone, PartialEq, ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct SearchResult {
pub hits: Vec<SearchHit>,
pub query: String,
@ -657,6 +673,7 @@ pub struct SearchResult {
#[serde(flatten)]
pub hits_info: HitsInfo,
#[serde(skip_serializing_if = "Option::is_none")]
#[schema(value_type = Option<BTreeMap<String, Value>>)]
pub facet_distribution: Option<BTreeMap<String, IndexMap<String, u64>>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub facet_stats: Option<BTreeMap<String, FacetStats>>,
@ -711,7 +728,7 @@ impl fmt::Debug for SearchResult {
}
}
#[derive(Serialize, Debug, Clone, PartialEq)]
#[derive(Serialize, Debug, Clone, PartialEq, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct SimilarResult {
pub hits: Vec<SearchHit>,
@ -721,24 +738,27 @@ pub struct SimilarResult {
pub hits_info: HitsInfo,
}
#[derive(Serialize, Debug, Clone, PartialEq)]
#[derive(Serialize, Debug, Clone, PartialEq, ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct SearchResultWithIndex {
pub index_uid: String,
#[serde(flatten)]
pub result: SearchResult,
}
#[derive(Serialize, Debug, Clone, PartialEq, Eq)]
#[derive(Serialize, Debug, Clone, PartialEq, Eq, ToSchema)]
#[serde(untagged)]
pub enum HitsInfo {
#[serde(rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
Pagination { hits_per_page: usize, page: usize, total_pages: usize, total_hits: usize },
#[serde(rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
OffsetLimit { limit: usize, offset: usize, estimated_total_hits: usize },
}
#[derive(Serialize, Debug, Clone, PartialEq)]
#[derive(Serialize, Debug, Clone, PartialEq, ToSchema)]
pub struct FacetStats {
pub min: f64,
pub max: f64,
@ -1021,8 +1041,9 @@ pub fn perform_search(
Ok(result)
}
#[derive(Debug, Clone, Default, Serialize)]
#[derive(Debug, Clone, Default, Serialize, ToSchema)]
pub struct ComputedFacets {
#[schema(value_type = BTreeMap<String, BTreeMap<String, u64>>)]
pub distribution: BTreeMap<String, IndexMap<String, u64>>,
pub stats: BTreeMap<String, FacetStats>,
}

View file

@ -99,12 +99,12 @@ macro_rules! compute_authorized_search {
server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
index
let (task1,_status_code) = index.add_documents(documents, None).await;
index.wait_task(task1.uid()).await.succeeded();
let (task2,_status_code) = index
.update_settings(json!({"filterableAttributes": ["color"]}))
.await;
index.wait_task(1).await;
index.wait_task(task2.uid()).await.succeeded();
drop(index);
for key_content in ACCEPTED_KEYS.iter() {
@ -146,8 +146,8 @@ macro_rules! compute_forbidden_search {
server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
drop(index);
for key_content in $parent_keys.iter() {

View file

@ -267,22 +267,22 @@ macro_rules! compute_authorized_single_search {
server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
index
let (add_task,_status_code) = index.add_documents(documents, None).await;
index.wait_task(add_task.uid()).await.succeeded();
let (update_task,_status_code) = index
.update_settings(json!({"filterableAttributes": ["color"]}))
.await;
index.wait_task(1).await;
index.wait_task(update_task.uid()).await.succeeded();
drop(index);
let index = server.index("products");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(2).await;
index
let (add_task2,_status_code) = index.add_documents(documents, None).await;
index.wait_task(add_task2.uid()).await.succeeded();
let (update_task2,_status_code) = index
.update_settings(json!({"filterableAttributes": ["doggos"]}))
.await;
index.wait_task(3).await;
index.wait_task(update_task2.uid()).await.succeeded();
drop(index);
@ -338,22 +338,22 @@ macro_rules! compute_authorized_multiple_search {
server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
index
let (task,_status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (task,_status_code) = index
.update_settings(json!({"filterableAttributes": ["color"]}))
.await;
index.wait_task(1).await;
index.wait_task(task.uid()).await.succeeded();
drop(index);
let index = server.index("products");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(2).await;
index
let (task,_status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (task,_status_code) = index
.update_settings(json!({"filterableAttributes": ["doggos"]}))
.await;
index.wait_task(3).await;
index.wait_task(task.uid()).await.succeeded();
drop(index);
@ -422,22 +422,22 @@ macro_rules! compute_forbidden_single_search {
server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
index
let (task,_status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (task,_status_code) = index
.update_settings(json!({"filterableAttributes": ["color"]}))
.await;
index.wait_task(1).await;
index.wait_task(task.uid()).await.succeeded();
drop(index);
let index = server.index("products");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(2).await;
index
let (task,_status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (task,_status_code) = index
.update_settings(json!({"filterableAttributes": ["doggos"]}))
.await;
index.wait_task(3).await;
index.wait_task(task.uid()).await.succeeded();
drop(index);
assert_eq!($parent_keys.len(), $failed_query_indexes.len(), "keys != query_indexes");
@ -498,22 +498,22 @@ macro_rules! compute_forbidden_multiple_search {
server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
index
let (task,_status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (task,_status_code) = index
.update_settings(json!({"filterableAttributes": ["color"]}))
.await;
index.wait_task(1).await;
index.wait_task(task.uid()).await.succeeded();
drop(index);
let index = server.index("products");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(2).await;
index
let (task,_status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (task,_status_code) = index
.update_settings(json!({"filterableAttributes": ["doggos"]}))
.await;
index.wait_task(3).await;
index.wait_task(task.uid()).await.succeeded();
drop(index);
assert_eq!($parent_keys.len(), $failed_query_indexes.len(), "keys != query_indexes");

View file

@ -10,8 +10,8 @@ use crate::json;
async fn error_get_unexisting_batch_status() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
let (task, _coder) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_batch(1).await;
let expected_response = json!({
@ -29,8 +29,8 @@ async fn error_get_unexisting_batch_status() {
async fn get_batch_status() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (_response, code) = index.get_batch(0).await;
assert_eq!(code, 200);
}
@ -39,8 +39,8 @@ async fn get_batch_status() {
async fn list_batches() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
@ -97,7 +97,7 @@ async fn list_batches_with_star_filters() {
let server = Server::new().await;
let index = server.index("test");
let (batch, _code) = index.create(None).await;
index.wait_task(batch.uid()).await;
index.wait_task(batch.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
@ -141,25 +141,21 @@ async fn list_batches_with_star_filters() {
async fn list_batches_status_filtered() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.filtered_batches(&[], &["succeeded"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
// We can't be sure that the update isn't already processed so we can't test this
// let (response, code) = index.filtered_batches(&[], &["processing"]).await;
// assert_eq!(code, 200, "{}", response);
// assert_eq!(response["results"].as_array().unwrap().len(), 1);
index.wait_task(1).await;
let (response, code) = index.filtered_batches(&[], &["succeeded"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) = index.filtered_batches(&[], &["succeeded", "failed"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
@ -171,23 +167,26 @@ async fn list_batches_type_filtered() {
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.delete().await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.filtered_batches(&["indexCreation"], &[], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) =
index.filtered_batches(&["indexCreation", "indexDeletion"], &[], &[]).await;
index.filtered_batches(&["indexCreation", "IndexDeletion"], &[], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index.filtered_batches(&["indexCreation"], &[], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
}
#[actix_rt::test]
async fn list_batches_invalid_canceled_by_filter() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
@ -201,8 +200,8 @@ async fn list_batches_invalid_canceled_by_filter() {
async fn list_batches_status_and_type_filtered() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
@ -275,8 +274,9 @@ async fn list_batch_filter_error() {
async fn test_summarized_document_addition_or_update() {
let server = Server::new().await;
let index = server.index("test");
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await;
index.wait_task(0).await;
let (task, _status_code) =
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await;
index.wait_task(task.uid()).await.succeeded();
let (batch, _) = index.get_batch(0).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -306,8 +306,9 @@ async fn test_summarized_document_addition_or_update() {
}
"#);
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
index.wait_task(1).await;
let (task, _status_code) =
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
let (batch, _) = index.get_batch(1).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -342,8 +343,8 @@ async fn test_summarized_document_addition_or_update() {
async fn test_summarized_delete_documents_by_batch() {
let server = Server::new().await;
let index = server.index("test");
index.delete_batch(vec![1, 2, 3]).await;
index.wait_task(0).await;
let (task, _status_code) = index.delete_batch(vec![1, 2, 3]).await;
index.wait_task(task.uid()).await.failed();
let (batch, _) = index.get_batch(0).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -374,8 +375,8 @@ async fn test_summarized_delete_documents_by_batch() {
"#);
index.create(None).await;
index.delete_batch(vec![42]).await;
index.wait_task(2).await;
let (task, _status_code) = index.delete_batch(vec![42]).await;
index.wait_task(task.uid()).await.succeeded();
let (batch, _) = index.get_batch(2).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -411,8 +412,9 @@ async fn test_summarized_delete_documents_by_filter() {
let server = Server::new().await;
let index = server.index("test");
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(0).await;
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(task.uid()).await.failed();
let (batch, _) = index.get_batch(0).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -444,8 +446,9 @@ async fn test_summarized_delete_documents_by_filter() {
"#);
index.create(None).await;
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(2).await;
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(task.uid()).await.failed();
let (batch, _) = index.get_batch(2).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -477,8 +480,9 @@ async fn test_summarized_delete_documents_by_filter() {
"#);
index.update_settings(json!({ "filterableAttributes": ["doggo"] })).await;
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(4).await;
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(task.uid()).await.succeeded();
let (batch, _) = index.get_batch(4).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -514,14 +518,13 @@ async fn test_summarized_delete_documents_by_filter() {
async fn test_summarized_delete_document_by_id() {
let server = Server::new().await;
let index = server.index("test");
index.delete_document(1).await;
index.wait_task(0).await;
let (task, _status_code) = index.delete_document(1).await;
index.wait_task(task.uid()).await.failed();
let (batch, _) = index.get_batch(0).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(batch,
@r#"
{
"uid": 0,
"uid": "[uid]",
"progress": null,
"details": {
"providedIds": 1,
@ -546,8 +549,8 @@ async fn test_summarized_delete_document_by_id() {
"#);
index.create(None).await;
index.delete_document(42).await;
index.wait_task(2).await;
let (task, _status_code) = index.delete_document(42).await;
index.wait_task(task.uid()).await.succeeded();
let (batch, _) = index.get_batch(2).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -594,8 +597,8 @@ async fn test_summarized_settings_update() {
}
"###);
index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
index.wait_task(0).await;
let (task,_status_code) = index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
index.wait_task(task.uid()).await.succeeded();
let (batch, _) = index.get_batch(0).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -639,8 +642,8 @@ async fn test_summarized_settings_update() {
async fn test_summarized_index_creation() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (batch, _) = index.get_batch(0).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -667,8 +670,8 @@ async fn test_summarized_index_creation() {
}
"#);
index.create(Some("doggos")).await;
index.wait_task(1).await;
let (task, _status_code) = index.create(Some("doggos")).await;
index.wait_task(task.uid()).await.failed();
let (batch, _) = index.get_batch(1).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -703,7 +706,7 @@ async fn test_summarized_index_deletion() {
let server = Server::new().await;
let index = server.index("test");
let (ret, _code) = index.delete().await;
let batch = index.wait_task(ret.uid()).await;
let batch = index.wait_task(ret.uid()).await.failed();
snapshot!(batch,
@r###"
{
@ -734,7 +737,7 @@ async fn test_summarized_index_deletion() {
// both batches may get autobatched and the deleted documents count will be wrong.
let (ret, _code) =
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
let batch = index.wait_task(ret.uid()).await;
let batch = index.wait_task(ret.uid()).await.succeeded();
snapshot!(batch,
@r###"
{
@ -757,7 +760,7 @@ async fn test_summarized_index_deletion() {
"###);
let (ret, _code) = index.delete().await;
let batch = index.wait_task(ret.uid()).await;
let batch = index.wait_task(ret.uid()).await.succeeded();
snapshot!(batch,
@r###"
{
@ -780,7 +783,7 @@ async fn test_summarized_index_deletion() {
// What happens when you delete an index that doesn't exists.
let (ret, _code) = index.delete().await;
let batch = index.wait_task(ret.uid()).await;
let batch = index.wait_task(ret.uid()).await.failed();
snapshot!(batch,
@r###"
{
@ -812,8 +815,8 @@ async fn test_summarized_index_update() {
let server = Server::new().await;
let index = server.index("test");
// If the index doesn't exist yet, we should get errors with or without the primary key.
index.update(None).await;
index.wait_task(0).await;
let (task, _status_code) = index.update(None).await;
index.wait_task(task.uid()).await.failed();
let (batch, _) = index.get_batch(0).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -840,8 +843,8 @@ async fn test_summarized_index_update() {
}
"#);
index.update(Some("bones")).await;
index.wait_task(1).await;
let (task, _status_code) = index.update(Some("bones")).await;
index.wait_task(task.uid()).await.failed();
let (batch, _) = index.get_batch(1).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -873,8 +876,8 @@ async fn test_summarized_index_update() {
// And run the same two tests once the index do exists.
index.create(None).await;
index.update(None).await;
index.wait_task(3).await;
let (task, _status_code) = index.update(None).await;
index.wait_task(task.uid()).await.succeeded();
let (batch, _) = index.get_batch(3).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -901,8 +904,8 @@ async fn test_summarized_index_update() {
}
"#);
index.update(Some("bones")).await;
index.wait_task(4).await;
let (task, _status_code) = index.update(Some("bones")).await;
index.wait_task(task.uid()).await.succeeded();
let (batch, _) = index.get_batch(4).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -935,12 +938,12 @@ async fn test_summarized_index_update() {
#[actix_web::test]
async fn test_summarized_index_swap() {
let server = Server::new().await;
server
let (task, _status_code) = server
.index_swap(json!([
{ "indexes": ["doggos", "cattos"] }
]))
.await;
server.wait_task(0).await;
server.wait_task(task.uid()).await.failed();
let (batch, _) = server.get_batch(0).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -975,39 +978,32 @@ async fn test_summarized_index_swap() {
"#);
server.index("doggos").create(None).await;
server.index("cattos").create(None).await;
let (task, _status_code) = server.index("cattos").create(None).await;
server
.index_swap(json!([
{ "indexes": ["doggos", "cattos"] }
]))
.await;
server.wait_task(3).await;
let (batch, _) = server.get_batch(3).await;
server.wait_task(task.uid()).await.succeeded();
let (batch, _) = server.get_batch(1).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r#"
{
"uid": 3,
"uid": 1,
"progress": null,
"details": {
"swaps": [
{
"indexes": [
"doggos",
"cattos"
]
}
]
},
"details": {},
"stats": {
"totalNbTasks": 1,
"status": {
"succeeded": 1
},
"types": {
"indexSwap": 1
"indexCreation": 1
},
"indexUids": {}
"indexUids": {
"doggos": 1
}
},
"duration": "[duration]",
"startedAt": "[date]",
@ -1021,10 +1017,10 @@ async fn test_summarized_batch_cancelation() {
let server = Server::new().await;
let index = server.index("doggos");
// to avoid being flaky we're only going to cancel an already finished batch :(
index.create(None).await;
index.wait_task(0).await;
server.cancel_tasks("uids=0").await;
index.wait_task(1).await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = server.cancel_tasks("uids=0").await;
index.wait_task(task.uid()).await.succeeded();
let (batch, _) = index.get_batch(1).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -1059,10 +1055,10 @@ async fn test_summarized_batch_deletion() {
let server = Server::new().await;
let index = server.index("doggos");
// to avoid being flaky we're only going to delete an already finished batch :(
index.create(None).await;
index.wait_task(0).await;
server.delete_tasks("uids=0").await;
index.wait_task(1).await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = server.delete_tasks("uids=0").await;
index.wait_task(task.uid()).await.succeeded();
let (batch, _) = index.get_batch(1).await;
assert_json_snapshot!(batch,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -1095,8 +1091,8 @@ async fn test_summarized_batch_deletion() {
#[actix_web::test]
async fn test_summarized_dump_creation() {
let server = Server::new().await;
server.create_dump().await;
server.wait_task(0).await;
let (task, _status_code) = server.create_dump().await;
server.wait_task(task.uid()).await;
let (batch, _) = server.get_batch(0).await;
assert_json_snapshot!(batch,
{ ".details.dumpUid" => "[dumpUid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },

View file

@ -46,11 +46,11 @@ impl Value {
// Panic if the json doesn't contain the `status` field set to "succeeded"
#[track_caller]
pub fn succeeded(&self) -> &Self {
pub fn succeeded(&self) -> Self {
if !self.is_success() {
panic!("Called succeeded on {}", serde_json::to_string_pretty(&self.0).unwrap());
}
self
self.clone()
}
/// Return `true` if the `status` field is set to `failed`.
@ -65,11 +65,11 @@ impl Value {
// Panic if the json doesn't contain the `status` field set to "succeeded"
#[track_caller]
pub fn failed(&self) -> &Self {
pub fn failed(&self) -> Self {
if !self.is_fail() {
panic!("Called failed on {}", serde_json::to_string_pretty(&self.0).unwrap());
}
self
self.clone()
}
}
@ -426,7 +426,7 @@ pub async fn shared_index_with_test_set() -> &'static Index<'static, Shared> {
)
.await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
index
})
.await

View file

@ -980,7 +980,7 @@ async fn add_documents_no_index_creation() {
snapshot!(code, @"202 Accepted");
assert_eq!(response["taskUid"], 0);
index.wait_task(0).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(0).await;
snapshot!(code, @"200 OK");
@ -1059,9 +1059,9 @@ async fn document_addition_with_primary_key() {
}
"###);
index.wait_task(0).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(0).await;
let (response, code) = index.get_task(response.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -1169,7 +1169,7 @@ async fn replace_document() {
}
"###);
index.wait_task(0).await;
index.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@ -1178,12 +1178,12 @@ async fn replace_document() {
}
]);
let (_response, code) = index.add_documents(documents, None).await;
let (task, code) = index.add_documents(documents, None).await;
snapshot!(code,@"202 Accepted");
index.wait_task(1).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(1).await;
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -1220,9 +1220,89 @@ async fn replace_document() {
#[actix_rt::test]
async fn add_no_documents() {
let server = Server::new().await;
let index = server.index("test");
let (_response, code) = index.add_documents(json!([]), None).await;
let index = server.index("kefir");
let (task, code) = index.add_documents(json!([]), None).await;
snapshot!(code, @"202 Accepted");
let task = server.wait_task(task.uid()).await;
let task = task.succeeded();
snapshot!(task, @r#"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "kefir",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 0,
"indexedDocuments": 0
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
"#);
let (task, _code) = index.add_documents(json!([]), Some("kefkef")).await;
let task = server.wait_task(task.uid()).await;
let task = task.succeeded();
snapshot!(task, @r#"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "kefir",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 0,
"indexedDocuments": 0
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
"#);
let (task, _code) = index.add_documents(json!([{ "kefkef": 1 }]), None).await;
let task = server.wait_task(task.uid()).await;
let task = task.succeeded();
snapshot!(task, @r#"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "kefir",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
"#);
let (documents, _status) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
snapshot!(documents, @r#"
{
"results": [
{
"kefkef": 1
}
],
"offset": 0,
"limit": 20,
"total": 1
}
"#);
}
#[actix_rt::test]
@ -1273,9 +1353,9 @@ async fn error_add_documents_bad_document_id() {
"content": "foobar"
}
]);
let (value, _code) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
let (response, code) = index.get_task(value.uid()).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -1311,7 +1391,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (value, _code) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.failed();
let (response, code) = index.get_task(value.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1348,7 +1428,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (value, _code) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.failed();
let (response, code) = index.get_task(value.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1389,9 +1469,9 @@ async fn error_add_documents_missing_document_id() {
"content": "foobar"
}
]);
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (response, code) = index.get_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -1439,7 +1519,7 @@ async fn error_document_field_limit_reached_in_one_document() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await;
let response = index.wait_task(response.uid()).await.failed();
snapshot!(code, @"202 Accepted");
// Documents without a primary key are not accepted.
snapshot!(response,
@ -1701,8 +1781,8 @@ async fn add_documents_with_geo_field() {
},
]);
index.add_documents(documents, None).await;
let response = index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
let response = index.wait_task(task.uid()).await;
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -1740,9 +1820,9 @@ async fn add_documents_invalid_geo_field() {
}
]);
index.add_documents(documents, None).await;
index.wait_task(2).await;
let (response, code) = index.get_task(2).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -1778,9 +1858,9 @@ async fn add_documents_invalid_geo_field() {
}
]);
index.add_documents(documents, None).await;
index.wait_task(3).await;
let (response, code) = index.get_task(3).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -1816,9 +1896,9 @@ async fn add_documents_invalid_geo_field() {
}
]);
index.add_documents(documents, None).await;
index.wait_task(4).await;
let (response, code) = index.get_task(4).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -1854,9 +1934,9 @@ async fn add_documents_invalid_geo_field() {
}
]);
index.add_documents(documents, None).await;
index.wait_task(5).await;
let (response, code) = index.get_task(5).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -1892,9 +1972,9 @@ async fn add_documents_invalid_geo_field() {
}
]);
index.add_documents(documents, None).await;
index.wait_task(6).await;
let (response, code) = index.get_task(6).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -1930,9 +2010,9 @@ async fn add_documents_invalid_geo_field() {
}
]);
index.add_documents(documents, None).await;
index.wait_task(7).await;
let (response, code) = index.get_task(7).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -1968,9 +2048,9 @@ async fn add_documents_invalid_geo_field() {
}
]);
index.add_documents(documents, None).await;
index.wait_task(8).await;
let (response, code) = index.get_task(8).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -2006,9 +2086,9 @@ async fn add_documents_invalid_geo_field() {
}
]);
index.add_documents(documents, None).await;
index.wait_task(9).await;
let (response, code) = index.get_task(9).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -2044,9 +2124,9 @@ async fn add_documents_invalid_geo_field() {
}
]);
index.add_documents(documents, None).await;
index.wait_task(10).await;
let (response, code) = index.get_task(10).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -2082,9 +2162,9 @@ async fn add_documents_invalid_geo_field() {
}
]);
index.add_documents(documents, None).await;
index.wait_task(11).await;
let (response, code) = index.get_task(11).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -2120,9 +2200,9 @@ async fn add_documents_invalid_geo_field() {
}
]);
index.add_documents(documents, None).await;
index.wait_task(12).await;
let (response, code) = index.get_task(12).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -2158,9 +2238,9 @@ async fn add_documents_invalid_geo_field() {
}
]);
index.add_documents(documents, None).await;
index.wait_task(13).await;
let (response, code) = index.get_task(13).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
@ -2200,7 +2280,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await;
let response = index.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2237,7 +2317,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await;
let response = index.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2274,7 +2354,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await;
let response = index.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2318,7 +2398,7 @@ async fn add_invalid_geo_and_then_settings() {
]);
let (ret, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let ret = index.wait_task(ret.uid()).await;
let ret = index.wait_task(ret.uid()).await.succeeded();
snapshot!(ret, @r###"
{
"uid": "[uid]",
@ -2341,7 +2421,7 @@ async fn add_invalid_geo_and_then_settings() {
let (ret, code) = index.update_settings(json!({ "sortableAttributes": ["_geo"] })).await;
snapshot!(code, @"202 Accepted");
let ret = index.wait_task(ret.uid()).await;
let ret = index.wait_task(ret.uid()).await.failed();
snapshot!(ret, @r###"
{
"uid": "[uid]",
@ -2408,9 +2488,9 @@ async fn error_primary_key_inference() {
}
]);
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (response, code) = index.get_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2449,9 +2529,9 @@ async fn error_primary_key_inference() {
}
]);
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (response, code) = index.get_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2488,9 +2568,9 @@ async fn error_primary_key_inference() {
}
]);
index.add_documents(documents, None).await;
index.wait_task(2).await;
let (response, code) = index.get_task(2).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2527,14 +2607,14 @@ async fn add_documents_with_primary_key_twice() {
}
]);
index.add_documents(documents.clone(), Some("title")).await;
index.wait_task(0).await;
let (response, _code) = index.get_task(0).await;
let (task, _status_code) = index.add_documents(documents.clone(), Some("title")).await;
index.wait_task(task.uid()).await.succeeded();
let (response, _code) = index.get_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
index.add_documents(documents, Some("title")).await;
index.wait_task(1).await;
let (response, _code) = index.get_task(1).await;
let (task, _status_code) = index.add_documents(documents, Some("title")).await;
index.wait_task(task.uid()).await.succeeded();
let (response, _code) = index.get_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
}

View file

@ -7,10 +7,10 @@ use crate::json;
async fn delete_one_document_unexisting_index() {
let server = Server::new().await;
let index = server.index("test");
let (_response, code) = index.delete_document(0).await;
let (task, code) = index.delete_document(0).await;
assert_eq!(code, 202);
let response = index.wait_task(0).await;
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
}
@ -22,7 +22,7 @@ async fn delete_one_unexisting_document() {
index.create(None).await;
let (response, code) = index.delete_document(0).await;
assert_eq!(code, 202, "{}", response);
let update = index.wait_task(0).await;
let update = index.wait_task(response.uid()).await;
assert_eq!(update["status"], "succeeded");
}
@ -30,11 +30,12 @@ async fn delete_one_unexisting_document() {
async fn delete_one_document() {
let server = Server::new().await;
let index = server.index("test");
index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await;
index.wait_task(0).await;
let (_response, code) = server.index("test").delete_document(0).await;
assert_eq!(code, 202);
index.wait_task(1).await;
let (task, _status_code) =
index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, status_code) = server.index("test").delete_document(0).await;
assert_eq!(status_code, 202);
index.wait_task(task.uid()).await.succeeded();
let (_response, code) = index.get_document(0, None).await;
assert_eq!(code, 404);
@ -44,10 +45,10 @@ async fn delete_one_document() {
async fn clear_all_documents_unexisting_index() {
let server = Server::new().await;
let index = server.index("test");
let (_response, code) = index.clear_all_documents().await;
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let response = index.wait_task(0).await;
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
}
@ -56,17 +57,17 @@ async fn clear_all_documents_unexisting_index() {
async fn clear_all_documents() {
let server = Server::new().await;
let index = server.index("test");
index
let (task, _status_code) = index
.add_documents(
json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }]),
None,
)
.await;
index.wait_task(0).await;
let (_response, code) = index.clear_all_documents().await;
index.wait_task(task.uid()).await.succeeded();
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let _update = index.wait_task(1).await;
let _update = index.wait_task(task.uid()).await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
@ -76,12 +77,12 @@ async fn clear_all_documents() {
async fn clear_all_documents_empty_index() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
let (_response, code) = index.clear_all_documents().await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let _update = index.wait_task(0).await;
let _update = index.wait_task(task.uid()).await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
@ -91,7 +92,7 @@ async fn clear_all_documents_empty_index() {
async fn error_delete_batch_unexisting_index() {
let server = Server::new().await;
let index = server.index("test");
let (_, code) = index.delete_batch(vec![]).await;
let (task, code) = index.delete_batch(vec![]).await;
let expected_response = json!({
"message": "Index `test` not found.",
"code": "index_not_found",
@ -100,7 +101,7 @@ async fn error_delete_batch_unexisting_index() {
});
assert_eq!(code, 202);
let response = index.wait_task(0).await;
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
assert_eq!(response["error"], expected_response);
@ -110,12 +111,12 @@ async fn error_delete_batch_unexisting_index() {
async fn delete_batch() {
let server = Server::new().await;
let index = server.index("test");
index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(0).await;
let (_response, code) = index.delete_batch(vec![1, 0]).await;
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
let (task, code) = index.delete_batch(vec![1, 0]).await;
assert_eq!(code, 202);
let _update = index.wait_task(1).await;
let _update = index.wait_task(task.uid()).await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
@ -126,12 +127,12 @@ async fn delete_batch() {
async fn delete_no_document_batch() {
let server = Server::new().await;
let index = server.index("test");
index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(0).await;
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
let (_response, code) = index.delete_batch(vec![]).await;
assert_eq!(code, 202, "{}", _response);
let _update = index.wait_task(1).await;
let _update = index.wait_task(_response.uid()).await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 3);
@ -142,7 +143,7 @@ async fn delete_document_by_filter() {
let server = Server::new().await;
let index = server.index("doggo");
index.update_settings_filterable_attributes(json!(["color"])).await;
index
let (task, _status_code) = index
.add_documents(
json!([
{ "id": 0, "color": "red" },
@ -153,7 +154,7 @@ async fn delete_document_by_filter() {
Some("id"),
)
.await;
index.wait_task(1).await;
index.wait_task(task.uid()).await.succeeded();
let (stats, _) = index.stats().await;
snapshot!(json_string!(stats), @r###"
@ -180,7 +181,7 @@ async fn delete_document_by_filter() {
}
"###);
let response = index.wait_task(2).await;
let response = index.wait_task(response.uid()).await;
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": 2,
@ -246,7 +247,7 @@ async fn delete_document_by_filter() {
}
"###);
let response = index.wait_task(3).await;
let response = index.wait_task(response.uid()).await;
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": 3,
@ -302,7 +303,7 @@ async fn delete_document_by_complex_filter() {
let server = Server::new().await;
let index = server.index("doggo");
index.update_settings_filterable_attributes(json!(["color"])).await;
index
let (task, _status_code) = index
.add_documents(
json!([
{ "id": 0, "color": "red" },
@ -314,7 +315,7 @@ async fn delete_document_by_complex_filter() {
Some("id"),
)
.await;
index.wait_task(1).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.delete_document_by_filter(
json!({ "filter": ["color != red", "color != green", "color EXISTS"] }),
@ -331,7 +332,7 @@ async fn delete_document_by_complex_filter() {
}
"###);
let response = index.wait_task(2).await;
let response = index.wait_task(response.uid()).await;
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": 2,
@ -390,7 +391,7 @@ async fn delete_document_by_complex_filter() {
}
"###);
let response = index.wait_task(3).await;
let response = index.wait_task(response.uid()).await;
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": 3,

View file

@ -115,7 +115,7 @@ async fn create_index_with_primary_key() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = index.wait_task(response.uid()).await.succeeded();
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -130,8 +130,7 @@ async fn create_index_with_invalid_primary_key() {
let index = server.unique_index();
let (response, code) = index.add_documents(documents, Some("title")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.failed();
let (response, code) = index.get().await;
assert_eq!(code, 200);
@ -141,8 +140,7 @@ async fn create_index_with_invalid_primary_key() {
let (response, code) = index.add_documents(documents, Some("id")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.failed();
let (response, code) = index.get().await;
assert_eq!(code, 200);

View file

@ -12,7 +12,7 @@ async fn create_and_get_index() {
assert_eq!(code, 202);
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get().await;
@ -79,8 +79,8 @@ async fn get_and_paginate_indexes() {
let server = Server::new().await;
const NB_INDEXES: usize = 50;
for i in 0..NB_INDEXES {
server.index(&format!("test_{i:02}")).create(None).await;
server.index(&format!("test_{i:02}")).wait_task(i as u64).await;
server.index(format!("test_{i:02}")).create(None).await;
server.index(format!("test_{i:02}")).wait_task(i as u64).await;
}
// basic

View file

@ -5,11 +5,11 @@ use crate::json;
async fn stats() {
let server = Server::new().await;
let index = server.index("test");
let (_, code) = index.create(Some("id")).await;
let (task, code) = index.create(Some("id")).await;
assert_eq!(code, 202);
index.wait_task(0).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.stats().await;
@ -33,7 +33,7 @@ async fn stats() {
assert_eq!(code, 202);
assert_eq!(response["taskUid"], 1);
index.wait_task(1).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.stats().await;

View file

@ -13,9 +13,9 @@ async fn update_primary_key() {
assert_eq!(code, 202);
index.update(Some("primary")).await;
let (task, _status_code) = index.update(Some("primary")).await;
let response = index.wait_task(1).await;
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
@ -46,9 +46,9 @@ async fn create_and_update_with_different_encoding() {
assert_eq!(code, 202);
let index = server.index_with_encoder("test", Encoder::Brotli);
index.update(Some("primary")).await;
let (task, _status_code) = index.update(Some("primary")).await;
let response = index.wait_task(1).await;
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
}
@ -57,17 +57,17 @@ async fn create_and_update_with_different_encoding() {
async fn update_nothing() {
let server = Server::new().await;
let index = server.index("test");
let (_, code) = index.create(None).await;
let (task1, code) = index.create(None).await;
assert_eq!(code, 202);
index.wait_task(0).await;
index.wait_task(task1.uid()).await.succeeded();
let (_, code) = index.update(None).await;
let (task2, code) = index.update(None).await;
assert_eq!(code, 202);
let response = index.wait_task(1).await;
let response = index.wait_task(task2.uid()).await;
assert_eq!(response["status"], "succeeded");
}
@ -88,11 +88,11 @@ async fn error_update_existing_primary_key() {
]);
index.add_documents(documents, None).await;
let (_, code) = index.update(Some("primary")).await;
let (task, code) = index.update(Some("primary")).await;
assert_eq!(code, 202);
let response = index.wait_task(2).await;
let response = index.wait_task(task.uid()).await;
let expected_response = json!({
"message": "Index `test`: Index already has a primary key: `id`.",
@ -107,11 +107,11 @@ async fn error_update_existing_primary_key() {
#[actix_rt::test]
async fn error_update_unexisting_index() {
let server = Server::new().await;
let (_, code) = server.index("test").update(None).await;
let (task, code) = server.index("test").update(None).await;
assert_eq!(code, 202);
let response = server.index("test").wait_task(0).await;
let response = server.index("test").wait_task(task.uid()).await;
let expected_response = json!({
"message": "Index `test` not found.",

View file

@ -94,7 +94,7 @@ async fn basic_test_log_stream_route() {
"enqueuedAt": "[date]"
}
"###);
server.wait_task(ret.uid()).await;
server.wait_task(ret.uid()).await.succeeded();
let req = actix_web::test::TestRequest::delete().uri("/logs/stream");
let req = req.to_request();

View file

@ -151,8 +151,8 @@ async fn distinct_search_with_offset_no_ranking() {
let documents = DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
index.update_distinct_attribute(json!(DOCUMENT_DISTINCT_KEY)).await;
index.wait_task(1).await;
let (task, _status_code) = index.update_distinct_attribute(json!(DOCUMENT_DISTINCT_KEY)).await;
index.wait_task(task.uid()).await.succeeded();
fn get_hits(response: &Value) -> Vec<&str> {
let hits_array = response["hits"].as_array().unwrap();
@ -210,8 +210,8 @@ async fn distinct_search_with_pagination_no_ranking() {
let documents = DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
index.update_distinct_attribute(json!(DOCUMENT_DISTINCT_KEY)).await;
index.wait_task(1).await;
let (task, _status_code) = index.update_distinct_attribute(json!(DOCUMENT_DISTINCT_KEY)).await;
index.wait_task(task.uid()).await.succeeded();
fn get_hits(response: &Value) -> Vec<&str> {
let hits_array = response["hits"].as_array().unwrap();

View file

@ -640,7 +640,7 @@ async fn filter_invalid_syntax_object() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(json!({"filter": "title & Glass"}), |response, code| {
@ -663,7 +663,7 @@ async fn filter_invalid_syntax_array() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(json!({"filter": ["title & Glass"]}), |response, code| {
@ -686,7 +686,7 @@ async fn filter_invalid_syntax_string() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": "Found unexpected characters at the end of the filter: `XOR title = Glass`. You probably forgot an `OR` or an `AND` rule.\n15:32 title = Glass XOR title = Glass",
@ -708,7 +708,7 @@ async fn filter_invalid_attribute_array() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": format!("Index `{}`: Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass", index.uid),
@ -730,7 +730,7 @@ async fn filter_invalid_attribute_string() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": format!("Index `{}`: Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass", index.uid),
@ -752,7 +752,7 @@ async fn filter_reserved_geo_attribute_array() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:13 _geo = Glass",
@ -774,7 +774,7 @@ async fn filter_reserved_geo_attribute_string() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:13 _geo = Glass",
@ -796,7 +796,7 @@ async fn filter_reserved_attribute_array() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:21 _geoDistance = Glass",
@ -818,7 +818,7 @@ async fn filter_reserved_attribute_string() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:21 _geoDistance = Glass",
@ -840,7 +840,7 @@ async fn filter_reserved_geo_point_array() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:18 _geoPoint = Glass",
@ -862,7 +862,7 @@ async fn filter_reserved_geo_point_string() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:18 _geoPoint = Glass",
@ -884,7 +884,7 @@ async fn sort_geo_reserved_attribute() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geo` is a reserved keyword and thus can't be used as a sort expression. Use the _geoPoint(latitude, longitude) built-in rule to sort on _geo field coordinates.",
@ -911,7 +911,7 @@ async fn sort_reserved_attribute() {
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"sortableAttributes": ["id"]})).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a sort expression.",
@ -1095,7 +1095,7 @@ async fn distinct_at_search_time() {
assert_eq!(code, 400);
let (task, _) = index.update_settings_filterable_attributes(json!(["color", "machin"])).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": format!("Index `{}`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes are: `color, machin`.", index.uid),
@ -1109,7 +1109,7 @@ async fn distinct_at_search_time() {
assert_eq!(code, 400);
let (task, _) = index.update_settings_displayed_attributes(json!(["color"])).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
let expected_response = json!({
"message": format!("Index `{}`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes are: `color, <..hidden-attributes>`.", index.uid),

View file

@ -41,8 +41,8 @@ async fn simple_facet_search() {
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -175,8 +175,8 @@ async fn advanced_facet_search() {
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
index.update_settings_typo_tolerance(json!({ "enabled": false })).await;
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "adventre"})).await;
@ -199,8 +199,8 @@ async fn more_advanced_facet_search() {
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
index.update_settings_typo_tolerance(json!({ "disableOnWords": ["adventre"] })).await;
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "adventre"})).await;
@ -223,8 +223,8 @@ async fn simple_facet_search_with_max_values() {
let documents = DOCUMENTS.clone();
index.update_settings_faceting(json!({ "maxValuesPerFacet": 1 })).await;
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -245,8 +245,8 @@ async fn simple_facet_search_by_count_with_max_values() {
)
.await;
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -261,8 +261,8 @@ async fn non_filterable_facet_search_error() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
@ -280,8 +280,8 @@ async fn facet_search_dont_support_words() {
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "words"})).await;
@ -298,8 +298,8 @@ async fn simple_facet_search_with_sort_by_count() {
let documents = DOCUMENTS.clone();
index.update_settings_faceting(json!({ "sortFacetValuesBy": { "*": "count" } })).await;
index.update_settings_filterable_attributes(json!(["genres"])).await;
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;

View file

@ -65,7 +65,7 @@ async fn formatted_contain_wildcard() {
let documents = NESTED_DOCUMENTS.clone();
let (response, _) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
index.search(json!({ "q": "pésti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"], "showMatchesPosition": true }),
|response, code|
@ -398,7 +398,7 @@ async fn displayedattr_2_smol() {
let documents = NESTED_DOCUMENTS.clone();
let (response, _) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
index
.search(json!({ "attributesToRetrieve": ["father", "id"], "attributesToHighlight": ["mother"], "attributesToCrop": ["cattos"] }),
@ -596,7 +596,7 @@ async fn test_cjk_highlight() {
{ "id": 1, "title": "大卫到了扫罗那里" },
]);
let (response, _) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
index
.search(json!({"q": "", "attributesToHighlight": ["title"]}), |response, code| {

View file

@ -46,8 +46,8 @@ async fn geo_sort_with_geo_strings() {
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["_geo"])).await;
index.update_settings_sortable_attributes(json!(["_geo"])).await;
index.add_documents(documents, None).await;
index.wait_task(2).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(
@ -128,7 +128,7 @@ async fn bug_4640() {
index.add_documents(documents, None).await;
index.update_settings_filterable_attributes(json!(["_geo"])).await;
let (ret, _code) = index.update_settings_sortable_attributes(json!(["_geo"])).await;
index.wait_task(ret.uid()).await;
index.wait_task(ret.uid()).await.succeeded();
// Sort the document with the second one first
index

View file

@ -30,11 +30,11 @@ async fn index_with_documents_user_provided<'a>(
"dimensions": 2}}} ))
.await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
index
}
@ -63,11 +63,11 @@ async fn index_with_documents_hf<'a>(server: &'a Server, documents: &Value) -> I
}}} ))
.await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
index
}
@ -260,7 +260,7 @@ async fn distribution_shift() {
snapshot!(code, @"202 Accepted");
let response = server.wait_task(response.uid()).await;
snapshot!(response["details"], @r###"{"embedders":{"default":{"distribution":{"mean":0.998,"sigma":0.01}}}}"###);
snapshot!(response["details"], @r#"{"embedders":{"default":{"distribution":{"mean":0.998,"sigma":0.01}}}}"#);
let (response, code) = index.search_post(search).await;
snapshot!(code, @"200 OK");
@ -573,7 +573,7 @@ async fn retrieve_vectors() {
.update_settings(json!({ "displayedAttributes": ["id", "title", "desc", "_vectors"]} ))
.await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.search_post(
@ -623,7 +623,7 @@ async fn retrieve_vectors() {
let (response, code) =
index.update_settings(json!({ "displayedAttributes": ["id", "title", "desc"]} )).await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.search_post(

View file

@ -98,8 +98,8 @@ async fn simple_search() {
json!({"searchableAttributes": ["name_en", "name_ja", "name_zh", "author_en", "author_ja", "author_zh", "description_en", "description_ja", "description_zh"]}),
)
.await;
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
// english
index
@ -220,8 +220,8 @@ async fn force_locales() {
"enqueuedAt": "[date]"
}
"###);
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
// chinese detection
index
@ -298,8 +298,8 @@ async fn force_locales_with_pattern() {
"enqueuedAt": "[date]"
}
"###);
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
// chinese detection
index
@ -374,8 +374,8 @@ async fn force_locales_with_pattern_nested() {
"enqueuedAt": "[date]"
}
"###);
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
// chinese
index
@ -449,8 +449,8 @@ async fn force_different_locales_with_pattern() {
"enqueuedAt": "[date]"
}
"###);
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
// force chinese
index
@ -527,8 +527,8 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
"enqueuedAt": "[date]"
}
"###);
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
// auto infer any language
index
@ -601,8 +601,8 @@ async fn auto_infer_locales_at_search() {
"enqueuedAt": "[date]"
}
"###);
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(
@ -700,8 +700,8 @@ async fn force_different_locales_with_pattern_nested() {
"enqueuedAt": "[date]"
}
"###);
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
// chinese
index
@ -778,8 +778,8 @@ async fn settings_change() {
let index = server.index("test");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, _) = index
.update_settings(json!({
"searchableAttributes": ["document_en", "document_ja", "document_zh"],
@ -798,7 +798,7 @@ async fn settings_change() {
"enqueuedAt": "[date]"
}
"###);
index.wait_task(1).await;
index.wait_task(response.uid()).await.succeeded();
// chinese
index
@ -861,7 +861,7 @@ async fn settings_change() {
"enqueuedAt": "[date]"
}
"###);
index.wait_task(2).await;
index.wait_task(response.uid()).await.succeeded();
// chinese
index
@ -915,8 +915,8 @@ async fn invalid_locales() {
json!({"searchableAttributes": ["name_en", "name_ja", "name_zh", "author_en", "author_ja", "author_zh", "description_en", "description_ja", "description_zh"]}),
)
.await;
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.search_post(json!({"q": "Atta", "locales": ["invalid"]})).await;
snapshot!(code, @"400 Bad Request");
@ -1033,8 +1033,8 @@ async fn simple_facet_search() {
"enqueuedAt": "[date]"
}
"###);
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, _) = index
.facet_search(json!({"facetName": "name_zh", "facetQuery": "進撃", "locales": ["cmn"]}))
@ -1095,8 +1095,8 @@ async fn facet_search_with_localized_attributes() {
"enqueuedAt": "[date]"
}
"###);
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, _) = index
.facet_search(json!({"facetName": "name_zh", "facetQuery": "进击", "locales": ["cmn"]}))
@ -1165,7 +1165,7 @@ async fn swedish_search() {
]
}))
.await;
index.wait_task(1).await;
index.wait_task(_response.uid()).await.succeeded();
// infer swedish
index
@ -1286,7 +1286,7 @@ async fn german_search() {
]
}))
.await;
index.wait_task(1).await;
index.wait_task(_response.uid()).await.succeeded();
// infer swedish
index

View file

@ -8,8 +8,8 @@ use crate::json;
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
let index = server.index("test");
index.add_documents(documents.clone(), None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
index
}

View file

@ -138,8 +138,8 @@ async fn phrase_search_with_stop_word() {
meili_snap::snapshot!(code, @"202 Accepted");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "how \"to\" train \"the" }), |response, code| {
@ -218,11 +218,12 @@ async fn negative_special_cases_search() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index.update_settings(json!({"synonyms": { "escape": ["gläss"] }})).await;
index.wait_task(1).await;
let (task, _status_code) =
index.update_settings(json!({"synonyms": { "escape": ["gläss"] }})).await;
index.wait_task(task.uid()).await.succeeded();
// There is a synonym for escape -> glass but we don't want "escape", only the derivates: glass
index
@ -247,8 +248,8 @@ async fn test_kanji_language_detection() {
{ "id": 1, "title": "東京のお寿司。" },
{ "id": 2, "title": "הַשּׁוּעָל הַמָּהִיר (״הַחוּם״) לֹא יָכוֹל לִקְפֹּץ 9.94 מֶטְרִים, נָכוֹן? ברר, 1.5°C- בַּחוּץ!" }
]);
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "東京"}), |response, code| {
@ -270,11 +271,11 @@ async fn test_thai_language() {
{ "id": 1, "title": "สบู่สมุนไพรชาเขียว 100 กรัม จำนวน 6 ก้อน" },
{ "id": 2, "title": "สบู่สมุนไพรฝางแดงผสมว่านหางจรเข้ 100 กรัม จำนวน 6 ก้อน" }
]);
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index.update_settings(json!({"rankingRules": ["exactness"]})).await;
index.wait_task(1).await;
let (task, _status_code) = index.update_settings(json!({"rankingRules": ["exactness"]})).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "สบู"}), |response, code| {
@ -329,9 +330,9 @@ async fn search_with_filter_string_notation() {
meili_snap::snapshot!(code, @"202 Accepted");
let documents = DOCUMENTS.clone();
let (_, code) = index.add_documents(documents, None).await;
let (task, code) = index.add_documents(documents, None).await;
meili_snap::snapshot!(code, @"202 Accepted");
let res = index.wait_task(1).await;
let res = index.wait_task(task.uid()).await;
meili_snap::snapshot!(res["status"], @r###""succeeded""###);
index
@ -353,9 +354,9 @@ async fn search_with_filter_string_notation() {
meili_snap::snapshot!(code, @"202 Accepted");
let documents = NESTED_DOCUMENTS.clone();
let (_, code) = index.add_documents(documents, None).await;
let (task, code) = index.add_documents(documents, None).await;
meili_snap::snapshot!(code, @"202 Accepted");
let res = index.wait_task(3).await;
let res = index.wait_task(task.uid()).await;
meili_snap::snapshot!(res["status"], @r###""succeeded""###);
index
@ -607,8 +608,8 @@ async fn displayed_attributes() {
index.update_settings(json!({ "displayedAttributes": ["title"] })).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.search_post(json!({ "attributesToRetrieve": ["title", "id"] })).await;
@ -622,8 +623,8 @@ async fn placeholder_search_is_hard_limited() {
let index = server.index("test");
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
index.add_documents(documents.into(), None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents.into(), None).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(
@ -650,8 +651,9 @@ async fn placeholder_search_is_hard_limited() {
)
.await;
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
index.wait_task(1).await;
let (task, _status_code) =
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(
@ -685,8 +687,8 @@ async fn search_is_hard_limited() {
let index = server.index("test");
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
index.add_documents(documents.into(), None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents.into(), None).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(
@ -715,8 +717,9 @@ async fn search_is_hard_limited() {
)
.await;
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
index.wait_task(1).await;
let (task, _status_code) =
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(
@ -754,8 +757,8 @@ async fn faceting_max_values_per_facet() {
index.update_settings(json!({ "filterableAttributes": ["number"] })).await;
let documents: Vec<_> = (0..10_000).map(|id| json!({ "id": id, "number": id * 10 })).collect();
index.add_documents(json!(documents), None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(json!(documents), None).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(
@ -770,8 +773,9 @@ async fn faceting_max_values_per_facet() {
)
.await;
index.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } })).await;
index.wait_task(2).await;
let (task, _status_code) =
index.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } })).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(
@ -795,7 +799,7 @@ async fn test_score_details() {
let documents = DOCUMENTS.clone();
let res = index.add_documents(json!(documents), None).await;
index.wait_task(res.0.uid()).await;
index.wait_task(res.0.uid()).await.succeeded();
index
.search(
@ -868,7 +872,7 @@ async fn test_score() {
let documents = SCORE_DOCUMENTS.clone();
let res = index.add_documents(json!(documents), None).await;
index.wait_task(res.0.uid()).await;
index.wait_task(res.0.uid()).await.succeeded();
index
.search(
@ -921,7 +925,7 @@ async fn test_score_threshold() {
let documents = SCORE_DOCUMENTS.clone();
let res = index.add_documents(json!(documents), None).await;
index.wait_task(res.0.uid()).await;
index.wait_task(res.0.uid()).await.succeeded();
index
.search(
@ -1077,7 +1081,7 @@ async fn test_degraded_score_details() {
index.add_documents(json!(documents), None).await;
// We can't really use anything else than 0ms here; otherwise, the test will get flaky.
let (res, _code) = index.update_settings(json!({ "searchCutoffMs": 0 })).await;
index.wait_task(res.uid()).await;
index.wait_task(res.uid()).await.succeeded();
index
.search(
@ -1162,8 +1166,8 @@ async fn experimental_feature_vector_store() {
let documents = DOCUMENTS.clone();
index.add_documents(json!(documents), None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(json!(documents), None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.search_post(json!({
@ -1369,8 +1373,8 @@ async fn camelcased_words() {
{ "id": 3, "title": "TestAb" },
{ "id": 4, "title": "testab" },
]);
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "deLonghi"}), |response, code| {
@ -1587,13 +1591,14 @@ async fn simple_search_with_strange_synonyms() {
let server = Server::new().await;
let index = server.index("test");
index.update_settings(json!({ "synonyms": {"&": ["to"], "to": ["&"]} })).await;
let r = index.wait_task(0).await;
let (task, _status_code) =
index.update_settings(json!({ "synonyms": {"&": ["to"], "to": ["&"]} })).await;
let r = index.wait_task(task.uid()).await;
meili_snap::snapshot!(r["status"], @r###""succeeded""###);
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "How to train"}), |response, code| {
@ -1679,11 +1684,12 @@ async fn change_attributes_settings() {
index.update_settings(json!({ "searchableAttributes": ["father", "mother"] })).await;
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(json!(documents), None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(json!(documents), None).await;
index.wait_task(task.uid()).await.succeeded();
index.update_settings(json!({ "searchableAttributes": ["father", "mother", "doggos"], "filterableAttributes": ["doggos"] })).await;
index.wait_task(2).await;
let (task,_status_code) =
index.update_settings(json!({ "searchableAttributes": ["father", "mother", "doggos"], "filterableAttributes": ["doggos"] })).await;
index.wait_task(task.uid()).await.succeeded();
// search
index

View file

@ -89,8 +89,8 @@ async fn simple_search_single_index() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"queries": [
@ -161,8 +161,8 @@ async fn federation_single_search_single_index() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -208,8 +208,8 @@ async fn federation_multiple_search_single_index() {
let index = server.index("test");
let documents = SCORE_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -283,8 +283,8 @@ async fn federation_two_search_single_index() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -351,8 +351,8 @@ async fn simple_search_missing_index_uid() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"queries": [
@ -376,8 +376,8 @@ async fn federation_simple_search_missing_index_uid() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -401,8 +401,8 @@ async fn simple_search_illegal_index_uid() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"queries": [
@ -426,8 +426,8 @@ async fn federation_search_illegal_index_uid() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -451,13 +451,13 @@ async fn simple_search_two_indexes() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("nested");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (add_task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(add_task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"queries": [
@ -558,13 +558,13 @@ async fn federation_two_search_two_indexes() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("nested");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -666,18 +666,18 @@ async fn federation_multiple_search_multiple_indexes() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("nested");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("score");
let documents = SCORE_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(2).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -924,8 +924,8 @@ async fn search_one_index_doesnt_exist() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"queries": [
@ -950,8 +950,8 @@ async fn federation_one_index_doesnt_exist() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -1021,13 +1021,13 @@ async fn search_one_query_error() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("nested");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"queries": [
@ -1053,13 +1053,13 @@ async fn federation_one_query_error() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("nested");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -1085,13 +1085,13 @@ async fn federation_one_query_sort_error() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("nested");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -1117,13 +1117,13 @@ async fn search_multiple_query_errors() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("nested");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"queries": [
@ -1149,13 +1149,13 @@ async fn federation_multiple_query_errors() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("nested");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"queries": [
@ -1181,13 +1181,13 @@ async fn federation_multiple_query_sort_errors() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("nested");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"queries": [
@ -1213,13 +1213,13 @@ async fn federation_multiple_query_errors_interleaved() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("nested");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"queries": [
@ -1246,13 +1246,13 @@ async fn federation_multiple_query_sort_errors_interleaved() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("nested");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"queries": [
@ -1280,14 +1280,14 @@ async fn federation_filter() {
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(
json!({"searchableAttributes": ["name"], "filterableAttributes": ["BOOST"]}),
)
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -1348,7 +1348,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -1363,7 +1363,7 @@ async fn federation_sort_same_indexes_same_criterion_same_direction() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// two identical placeholder search should have all results from first query
let (response, code) = server
@ -1611,7 +1611,7 @@ async fn federation_sort_same_indexes_same_criterion_opposite_direction() {
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -1626,7 +1626,7 @@ async fn federation_sort_same_indexes_same_criterion_opposite_direction() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// two identical placeholder search should have all results from first query
let (response, code) = server
@ -1671,7 +1671,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -1686,7 +1686,7 @@ async fn federation_sort_same_indexes_different_criterion_same_direction() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// return mothers and fathers ordered accross fields.
let (response, code) = server
@ -1935,7 +1935,7 @@ async fn federation_sort_same_indexes_different_criterion_opposite_direction() {
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -1950,7 +1950,7 @@ async fn federation_sort_same_indexes_different_criterion_opposite_direction() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// two identical placeholder search should have all results from first query
let (response, code) = server
@ -1995,7 +1995,7 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
let documents = DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -2010,13 +2010,13 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let index = server.index("batman");
let documents = SCORE_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -2031,7 +2031,7 @@ async fn federation_sort_different_indexes_same_criterion_same_direction() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// return titles ordered accross indexes
let (response, code) = server
@ -2307,7 +2307,7 @@ async fn federation_sort_different_ranking_rules() {
let documents = DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -2322,13 +2322,13 @@ async fn federation_sort_different_ranking_rules() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let index = server.index("batman");
let documents = SCORE_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -2343,7 +2343,7 @@ async fn federation_sort_different_ranking_rules() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// return titles ordered accross indexes
let (response, code) = server
@ -2546,7 +2546,7 @@ async fn federation_sort_different_indexes_same_criterion_opposite_direction() {
let documents = DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -2561,13 +2561,13 @@ async fn federation_sort_different_indexes_same_criterion_opposite_direction() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let index = server.index("batman");
let documents = SCORE_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -2582,7 +2582,7 @@ async fn federation_sort_different_indexes_same_criterion_opposite_direction() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// all results from query 0
let (response, code) = server
@ -2628,7 +2628,7 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
let documents = DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -2643,13 +2643,13 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let index = server.index("batman");
let documents = SCORE_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -2664,7 +2664,7 @@ async fn federation_sort_different_indexes_different_criterion_same_direction()
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// return titles ordered accross indexes
let (response, code) = server
@ -2940,7 +2940,7 @@ async fn federation_sort_different_indexes_different_criterion_opposite_directio
let documents = DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -2955,13 +2955,13 @@ async fn federation_sort_different_indexes_different_criterion_opposite_directio
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let index = server.index("batman");
let documents = SCORE_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -2976,7 +2976,7 @@ async fn federation_sort_different_indexes_different_criterion_opposite_directio
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// all results from query 0 first
let (response, code) = server
@ -3020,18 +3020,18 @@ async fn federation_limit_offset() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("nested");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("score");
let documents = SCORE_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(2).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
{
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -3338,18 +3338,18 @@ async fn federation_formatting() {
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("nested");
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = server.index("score");
let documents = SCORE_DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(2).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
{
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -3685,14 +3685,14 @@ async fn federation_invalid_weight() {
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(
json!({"searchableAttributes": ["name"], "filterableAttributes": ["BOOST"]}),
)
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -3719,14 +3719,14 @@ async fn federation_null_weight() {
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(
json!({"searchableAttributes": ["name"], "filterableAttributes": ["BOOST"]}),
)
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -3787,7 +3787,7 @@ async fn federation_federated_contains_pagination() {
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// fail when a federated query contains "limit"
let (response, code) = server
@ -3867,11 +3867,11 @@ async fn federation_federated_contains_facets() {
)
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// empty facets are actually OK
let (response, code) = server
@ -3951,7 +3951,7 @@ async fn federation_non_faceted_for_an_index() {
)
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let index = server.index("fruits-no-name");
@ -3961,17 +3961,17 @@ async fn federation_non_faceted_for_an_index() {
)
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let index = server.index("fruits-no-facets");
let (value, _) = index.update_settings(json!({"searchableAttributes": ["name"]})).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// fails
let (response, code) = server
@ -4071,7 +4071,7 @@ async fn federation_non_federated_contains_federation_option() {
let documents = FRUITS_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// fail when a non-federated query contains "federationOptions"
let (response, code) = server
@ -4116,12 +4116,12 @@ async fn federation_vector_single_index() {
}
}}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let documents = VECTOR_DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// same embedder
let (response, code) = server
@ -4320,12 +4320,12 @@ async fn federation_vector_two_indexes() {
},
}}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let documents = VECTOR_DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let index = server.index("vectors-sentiment");
@ -4337,12 +4337,12 @@ async fn federation_vector_two_indexes() {
}
}}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let documents = VECTOR_DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {}, "queries": [
@ -4802,7 +4802,7 @@ async fn federation_facets_different_indexes_same_facet() {
let documents = DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -4818,13 +4818,13 @@ async fn federation_facets_different_indexes_same_facet() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let index = server.index("batman");
let documents = SCORE_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -4840,13 +4840,13 @@ async fn federation_facets_different_indexes_same_facet() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let index = server.index("batman-2");
let documents = SCORE_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -4862,7 +4862,7 @@ async fn federation_facets_different_indexes_same_facet() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// return titles ordered accross indexes
let (response, code) = server
@ -5369,7 +5369,7 @@ async fn federation_facets_same_indexes() {
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -5384,13 +5384,13 @@ async fn federation_facets_same_indexes() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let index = server.index("doggos-2");
let documents = NESTED_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -5405,7 +5405,7 @@ async fn federation_facets_same_indexes() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (response, code) = server
.multi_search(json!({"federation": {
@ -5670,7 +5670,7 @@ async fn federation_inconsistent_merge_order() {
let documents = DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -5686,13 +5686,13 @@ async fn federation_inconsistent_merge_order() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let index = server.index("movies-2");
let documents = DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -5711,13 +5711,13 @@ async fn federation_inconsistent_merge_order() {
}
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let index = server.index("batman");
let documents = SCORE_DOCUMENTS.clone();
let (value, _) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (value, _) = index
.update_settings(json!({
@ -5733,7 +5733,7 @@ async fn federation_inconsistent_merge_order() {
]
}))
.await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// without merging, it works
let (response, code) = server

View file

@ -64,8 +64,8 @@ async fn search_no_searchable_attribute_set() {
)
.await;
index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(1).await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(
@ -77,8 +77,8 @@ async fn search_no_searchable_attribute_set() {
)
.await;
index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(2).await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["*"])).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(
@ -108,8 +108,8 @@ async fn search_on_all_attributes() {
async fn search_on_all_attributes_restricted_set() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
index.update_settings_searchable_attributes(json!(["title"])).await;
index.wait_task(1).await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["title"])).await;
index.wait_task(task.uid()).await.succeeded();
index
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["*"]}), |response, code| {
@ -191,8 +191,10 @@ async fn word_ranking_rule_order() {
async fn word_ranking_rule_order_exact_words() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
index.update_settings_typo_tolerance(json!({"disableOnWords": ["Captain", "Marvel"]})).await;
index.wait_task(1).await;
let (task, _status_code) = index
.update_settings_typo_tolerance(json!({"disableOnWords": ["Captain", "Marvel"]}))
.await;
index.wait_task(task.uid()).await.succeeded();
// simple search should return 2 documents (ids: 2 and 3).
index
@ -358,7 +360,7 @@ async fn search_on_exact_field() {
let (response, code) =
index.update_settings_typo_tolerance(json!({ "disableOnAttributes": ["exact"] })).await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(1).await;
index.wait_task(response.uid()).await.succeeded();
// Searching on an exact attribute should only return the document matching without typo.
index
.search(json!({"q": "Marvel", "attributesToSearchOn": ["exact"]}), |response, code| {

View file

@ -6,16 +6,16 @@ async fn set_and_reset_distinct_attribute() {
let server = Server::new().await;
let index = server.index("test");
let (_response, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await;
index.wait_task(0).await;
let (task1, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await;
index.wait_task(task1.uid()).await.succeeded();
let (response, _) = index.settings().await;
assert_eq!(response["distinctAttribute"], "test");
index.update_settings(json!({ "distinctAttribute": null })).await;
let (task2, _status_code) = index.update_settings(json!({ "distinctAttribute": null })).await;
index.wait_task(1).await;
index.wait_task(task2.uid()).await.succeeded();
let (response, _) = index.settings().await;
@ -27,16 +27,16 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let server = Server::new().await;
let index = server.index("test");
let (_response, _code) = index.update_distinct_attribute(json!("test")).await;
index.wait_task(0).await;
let (update_task1, _code) = index.update_distinct_attribute(json!("test")).await;
index.wait_task(update_task1.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;
assert_eq!(response, "test");
index.update_distinct_attribute(json!(null)).await;
let (update_task2, _status_code) = index.update_distinct_attribute(json!(null)).await;
index.wait_task(1).await;
index.wait_task(update_task2.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;

View file

@ -193,7 +193,7 @@ async fn get_settings() {
let server = Server::new().await;
let index = server.index("test");
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
let settings = response.as_object().unwrap();
@ -250,7 +250,7 @@ async fn secrets_are_hidden_in_settings() {
let index = server.index("test");
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -285,7 +285,7 @@ async fn secrets_are_hidden_in_settings() {
let (response, code) = index.settings().await;
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
meili_snap::snapshot!(meili_snap::json_string!(response), @r#"
{
"displayedAttributes": [
"*"
@ -346,11 +346,11 @@ async fn secrets_are_hidden_in_settings() {
"facetSearch": true,
"prefixSearch": "indexingTime"
}
"###);
"#);
let (response, code) = server.get_task(settings_update_uid).await;
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response["details"]), @r###"
meili_snap::snapshot!(meili_snap::json_string!(response["details"]), @r#"
{
"embedders": {
"default": {
@ -363,7 +363,7 @@ async fn secrets_are_hidden_in_settings() {
}
}
}
"###);
"#);
}
#[actix_rt::test]
@ -378,15 +378,15 @@ async fn error_update_settings_unknown_field() {
async fn test_partial_update() {
let server = Server::new().await;
let index = server.index("test");
let (_response, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await;
index.wait_task(0).await;
let (task, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(response["displayedAttributes"], json!(["foo"]));
assert_eq!(response["searchableAttributes"], json!(["*"]));
let (_response, _) = index.update_settings(json!({"searchableAttributes": ["bar"]})).await;
index.wait_task(1).await;
let (task, _) = index.update_settings(json!({"searchableAttributes": ["bar"]})).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
@ -398,10 +398,10 @@ async fn test_partial_update() {
async fn error_delete_settings_unexisting_index() {
let server = Server::new().await;
let index = server.index("test");
let (_response, code) = index.delete_settings().await;
let (task, code) = index.delete_settings().await;
assert_eq!(code, 202);
let response = index.wait_task(0).await;
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
}
@ -422,12 +422,12 @@ async fn reset_all_settings() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
assert_eq!(response["taskUid"], 0);
index.wait_task(0).await;
index.wait_task(response.uid()).await.succeeded();
index
let (update_task,_status_code) = index
.update_settings(json!({"displayedAttributes": ["name", "age"], "searchableAttributes": ["name"], "stopWords": ["the"], "filterableAttributes": ["age"], "synonyms": {"puppy": ["dog", "doggo", "potat"] }}))
.await;
index.wait_task(1).await;
index.wait_task(update_task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(response["displayedAttributes"], json!(["name", "age"]));
@ -436,8 +436,8 @@ async fn reset_all_settings() {
assert_eq!(response["synonyms"], json!({"puppy": ["dog", "doggo", "potat"] }));
assert_eq!(response["filterableAttributes"], json!(["age"]));
index.delete_settings().await;
index.wait_task(2).await;
let (delete_task, _status_code) = index.delete_settings().await;
index.wait_task(delete_task.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200);
@ -456,14 +456,14 @@ async fn reset_all_settings() {
async fn update_setting_unexisting_index() {
let server = Server::new().await;
let index = server.index("test");
let (_response, code) = index.update_settings(json!({})).await;
let (task, code) = index.update_settings(json!({})).await;
assert_eq!(code, 202);
let response = index.wait_task(0).await;
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
let (_response, code) = index.get().await;
assert_eq!(code, 200);
index.delete_settings().await;
let response = index.wait_task(1).await;
let (task, _status_code) = index.delete_settings().await;
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
}
@ -506,16 +506,16 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let server = Server::new().await;
let index = server.index("test");
let (_response, _code) = index.update_distinct_attribute(json!("test")).await;
index.wait_task(0).await;
let (task, _code) = index.update_distinct_attribute(json!("test")).await;
index.wait_task(task.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;
assert_eq!(response, "test");
index.update_distinct_attribute(json!(null)).await;
let (task, _status_code) = index.update_distinct_attribute(json!(null)).await;
index.wait_task(1).await;
index.wait_task(task.uid()).await.succeeded();
let (response, _) = index.get_distinct_attribute().await;

View file

@ -29,8 +29,8 @@ async fn attribute_scale_search() {
let server = Server::new().await;
let index = server.index("test");
index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -39,7 +39,7 @@ async fn attribute_scale_search() {
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(1).await;
index.wait_task(response.uid()).await.succeeded();
// the expected order is [1, 3, 2] instead of [3, 1, 2]
// because the attribute scale doesn't make the difference between 1 and 3.
@ -102,16 +102,16 @@ async fn attribute_scale_phrase_search() {
let server = Server::new().await;
let index = server.index("test");
index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
let (_response, _code) = index
let (task, _code) = index
.update_settings(json!({
"proximityPrecision": "byAttribute",
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
index.wait_task(1).await;
index.wait_task(task.uid()).await.succeeded();
// the expected order is [1, 3] instead of [3, 1]
// because the attribute scale doesn't make the difference between 1 and 3.
@ -170,25 +170,25 @@ async fn word_scale_set_and_reset() {
let server = Server::new().await;
let index = server.index("test");
index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
// Set and reset the setting ensuring the swap between the 2 settings is applied.
let (_response, _code) = index
let (update_task1, _code) = index
.update_settings(json!({
"proximityPrecision": "byAttribute",
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
index.wait_task(1).await;
index.wait_task(update_task1.uid()).await.succeeded();
let (_response, _code) = index
let (update_task2, _code) = index
.update_settings(json!({
"proximityPrecision": "byWord",
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
index.wait_task(2).await;
index.wait_task(update_task2.uid()).await.succeeded();
// [3, 1, 2]
index
@ -285,8 +285,8 @@ async fn attribute_scale_default_ranking_rules() {
let server = Server::new().await;
let index = server.index("test");
index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(0).await;
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -294,7 +294,7 @@ async fn attribute_scale_default_ranking_rules() {
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(1).await;
index.wait_task(response.uid()).await.succeeded();
// the expected order is [3, 1, 2]
index

View file

@ -8,14 +8,14 @@ async fn set_and_reset() {
let server = Server::new().await;
let index = server.index("test");
let (_response, _code) = index
let (task, _code) = index
.update_settings(json!({
"nonSeparatorTokens": ["#", "&"],
"separatorTokens": ["&sep", "<br/>"],
"dictionary": ["J.R.R.", "J. R. R."],
}))
.await;
index.wait_task(0).await;
index.wait_task(task.uid()).await.succeeded();
let (response, _) = index.settings().await;
snapshot!(json_string!(response["nonSeparatorTokens"]), @r###"
@ -37,7 +37,7 @@ async fn set_and_reset() {
]
"###);
index
let (task, _status_code) = index
.update_settings(json!({
"nonSeparatorTokens": null,
"separatorTokens": null,
@ -45,7 +45,7 @@ async fn set_and_reset() {
}))
.await;
index.wait_task(1).await;
index.wait_task(task.uid()).await.succeeded();
let (response, _) = index.settings().await;
snapshot!(json_string!(response["nonSeparatorTokens"]), @"[]");
@ -73,17 +73,17 @@ async fn set_and_search() {
let server = Server::new().await;
let index = server.index("test");
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (add_task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(add_task.uid()).await.succeeded();
let (_response, _code) = index
let (update_task, _code) = index
.update_settings(json!({
"nonSeparatorTokens": ["#", "&"],
"separatorTokens": ["<br/>", "&sep"],
"dictionary": ["#", "A#", "B#", "C#", "D#", "E#", "F#", "G#"],
}))
.await;
index.wait_task(1).await;
index.wait_task(update_task.uid()).await.succeeded();
index
.search(json!({"q": "&", "attributesToHighlight": ["content"]}), |response, code| {
@ -227,10 +227,10 @@ async fn advanced_synergies() {
let server = Server::new().await;
let index = server.index("test");
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (add_task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(add_task.uid()).await.succeeded();
let (_response, _code) = index
let (update_task, _code) = index
.update_settings(json!({
"dictionary": ["J.R.R.", "J. R. R."],
"synonyms": {
@ -243,7 +243,7 @@ async fn advanced_synergies() {
}
}))
.await;
index.wait_task(1).await;
index.wait_task(update_task.uid()).await.succeeded();
index
.search(json!({"q": "J.R.R.", "attributesToHighlight": ["content"]}), |response, code| {
@ -353,7 +353,7 @@ async fn advanced_synergies() {
"dictionary": ["J.R.R.", "J. R. R.", "J.K.", "J. K."],
}))
.await;
index.wait_task(2).await;
index.wait_task(_response.uid()).await.succeeded();
index
.search(json!({"q": "jk", "attributesToHighlight": ["content"]}), |response, code| {

View file

@ -324,7 +324,7 @@ async fn similar_bad_filter() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (response, code) =
index.similar_post(json!({ "id": 287947, "filter": true, "embedder": "manual" })).await;
@ -362,7 +362,7 @@ async fn filter_invalid_syntax_object() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
index
.similar(json!({"id": 287947, "filter": "title & Glass", "embedder": "manual"}), |response, code| {
@ -401,7 +401,7 @@ async fn filter_invalid_syntax_array() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
index
.similar(json!({"id": 287947, "filter": ["title & Glass"], "embedder": "manual"}), |response, code| {
@ -440,7 +440,7 @@ async fn filter_invalid_syntax_string() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let expected_response = json!({
"message": "Found unexpected characters at the end of the filter: `XOR title = Glass`. You probably forgot an `OR` or an `AND` rule.\n15:32 title = Glass XOR title = Glass",
@ -481,7 +481,7 @@ async fn filter_invalid_attribute_array() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let expected_response = json!({
"message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass",
@ -522,7 +522,7 @@ async fn filter_invalid_attribute_string() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let expected_response = json!({
"message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass",
@ -563,7 +563,7 @@ async fn filter_reserved_geo_attribute_array() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:13 _geo = Glass",
@ -604,7 +604,7 @@ async fn filter_reserved_geo_attribute_string() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:13 _geo = Glass",
@ -645,7 +645,7 @@ async fn filter_reserved_attribute_array() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:21 _geoDistance = Glass",
@ -686,7 +686,7 @@ async fn filter_reserved_attribute_string() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:21 _geoDistance = Glass",
@ -727,7 +727,7 @@ async fn filter_reserved_geo_point_array() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:18 _geoPoint = Glass",
@ -768,7 +768,7 @@ async fn filter_reserved_geo_point_string() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let expected_response = json!({
"message": "`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.\n1:18 _geoPoint = Glass",

View file

@ -77,7 +77,7 @@ async fn basic() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
index
.similar(
@ -274,7 +274,7 @@ async fn ranking_score_threshold() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
index
.similar(
@ -555,7 +555,7 @@ async fn filter() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
index
.similar(
@ -684,7 +684,7 @@ async fn limit_and_offset() {
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
index
.similar(

View file

@ -56,7 +56,7 @@ async fn perform_snapshot() {
let (task, code) = server.index("test1").create(Some("prim")).await;
meili_snap::snapshot!(code, @"202 Accepted");
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
// wait for the _next task_ to process, aka the snapshot that should be enqueued at some point
@ -129,10 +129,10 @@ async fn perform_on_demand_snapshot() {
index.load_test_set().await;
let (task, _) = server.index("doggo").create(Some("bone")).await;
let (task, _status_code) = server.index("doggo").create(Some("bone")).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _) = server.index("doggo").create(Some("bone")).await;
let (task, _status_code) = server.index("doggo").create(Some("bone")).await;
index.wait_task(task.uid()).await.failed();
let (task, code) = server.create_snapshot().await;

View file

@ -28,10 +28,10 @@ async fn test_healthyness() {
async fn stats() {
let server = Server::new().await;
let index = server.index("test");
let (_, code) = index.create(Some("id")).await;
let (task, code) = index.create(Some("id")).await;
assert_eq!(code, 202);
index.wait_task(0).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = server.stats().await;
@ -57,7 +57,7 @@ async fn stats() {
assert_eq!(code, 202, "{}", response);
assert_eq!(response["taskUid"], 1);
index.wait_task(1).await;
index.wait_task(response.uid()).await.succeeded();
let timestamp = OffsetDateTime::now_utc();
let (response, code) = server.stats().await;

View file

@ -15,7 +15,7 @@ async fn swap_indexes() {
let (res, code) = b.add_documents(json!({ "id": 1, "index": "b"}), None).await;
snapshot!(code, @"202 Accepted");
snapshot!(res["taskUid"], @"1");
server.wait_task(1).await;
server.wait_task(res.uid()).await;
let (tasks, code) = server.tasks().await;
snapshot!(code, @"200 OK");
@ -67,7 +67,7 @@ async fn swap_indexes() {
let (res, code) = server.index_swap(json!([{ "indexes": ["a", "b"] }])).await;
snapshot!(code, @"202 Accepted");
snapshot!(res["taskUid"], @"2");
server.wait_task(2).await;
server.wait_task(res.uid()).await;
let (tasks, code) = server.tasks().await;
snapshot!(code, @"200 OK");
@ -159,7 +159,7 @@ async fn swap_indexes() {
let (res, code) = d.add_documents(json!({ "id": 1, "index": "d"}), None).await;
snapshot!(code, @"202 Accepted");
snapshot!(res["taskUid"], @"4");
server.wait_task(4).await;
server.wait_task(res.uid()).await;
// ensure the index creation worked properly
let (tasks, code) = server.tasks_filter("limit=2").await;
@ -215,7 +215,7 @@ async fn swap_indexes() {
server.index_swap(json!([{ "indexes": ["a", "b"] }, { "indexes": ["c", "d"] } ])).await;
snapshot!(res["taskUid"], @"5");
snapshot!(code, @"202 Accepted");
server.wait_task(5).await;
server.wait_task(res.uid()).await;
// ensure the index creation worked properly
let (tasks, code) = server.tasks().await;

View file

@ -13,8 +13,8 @@ use crate::json;
async fn error_get_unexisting_task_status() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(1).await;
let expected_response = json!({
@ -32,8 +32,8 @@ async fn error_get_unexisting_task_status() {
async fn get_task_status() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index
let (create_task, _status_code) = index.create(None).await;
let (add_task, _status_code) = index
.add_documents(
json!([{
"id": 1,
@ -42,8 +42,8 @@ async fn get_task_status() {
None,
)
.await;
index.wait_task(0).await;
let (_response, code) = index.get_task(1).await;
index.wait_task(create_task.uid()).await.succeeded();
let (_response, code) = index.get_task(add_task.uid()).await;
assert_eq!(code, 200);
// TODO check response format, as per #48
}
@ -52,8 +52,8 @@ async fn get_task_status() {
async fn list_tasks() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
@ -105,7 +105,7 @@ async fn list_tasks_with_star_filters() {
let server = Server::new().await;
let index = server.index("test");
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await;
index.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
@ -149,25 +149,21 @@ async fn list_tasks_with_star_filters() {
async fn list_tasks_status_filtered() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.failed();
let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
// We can't be sure that the update isn't already processed so we can't test this
// let (response, code) = index.filtered_tasks(&[], &["processing"]).await;
// assert_eq!(code, 200, "{}", response);
// assert_eq!(response["results"].as_array().unwrap().len(), 1);
index.wait_task(1).await;
let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) = index.filtered_tasks(&[], &["succeeded", "failed"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
@ -175,8 +171,8 @@ async fn list_tasks_status_filtered() {
async fn list_tasks_type_filtered() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
@ -195,8 +191,8 @@ async fn list_tasks_type_filtered() {
async fn list_tasks_invalid_canceled_by_filter() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
@ -210,8 +206,8 @@ async fn list_tasks_invalid_canceled_by_filter() {
async fn list_tasks_status_and_type_filtered() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
@ -278,8 +274,9 @@ async fn test_summarized_task_view() {
async fn test_summarized_document_addition_or_update() {
let server = Server::new().await;
let index = server.index("test");
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await;
index.wait_task(0).await;
let (task, _status_code) =
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(0).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -303,8 +300,9 @@ async fn test_summarized_document_addition_or_update() {
}
"###);
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
index.wait_task(1).await;
let (task, _status_code) =
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(1).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -333,8 +331,8 @@ async fn test_summarized_document_addition_or_update() {
async fn test_summarized_delete_documents_by_batch() {
let server = Server::new().await;
let index = server.index("test");
index.delete_batch(vec![1, 2, 3]).await;
index.wait_task(0).await;
let (task, _status_code) = index.delete_batch(vec![1, 2, 3]).await;
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(0).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@ -365,9 +363,9 @@ async fn test_summarized_delete_documents_by_batch() {
"###);
index.create(None).await;
index.delete_batch(vec![42]).await;
index.wait_task(2).await;
let (task, _) = index.get_task(2).await;
let (del_task, _status_code) = index.delete_batch(vec![42]).await;
index.wait_task(del_task.uid()).await.succeeded();
let (task, _) = index.get_task(del_task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -397,9 +395,10 @@ async fn test_summarized_delete_documents_by_filter() {
let server = Server::new().await;
let index = server.index("test");
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(0).await;
let (task, _) = index.get_task(0).await;
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -429,9 +428,10 @@ async fn test_summarized_delete_documents_by_filter() {
"###);
index.create(None).await;
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(2).await;
let (task, _) = index.get_task(2).await;
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -461,9 +461,10 @@ async fn test_summarized_delete_documents_by_filter() {
"###);
index.update_settings(json!({ "filterableAttributes": ["doggo"] })).await;
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(4).await;
let (task, _) = index.get_task(4).await;
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -492,9 +493,9 @@ async fn test_summarized_delete_documents_by_filter() {
async fn test_summarized_delete_document_by_id() {
let server = Server::new().await;
let index = server.index("test");
index.delete_document(1).await;
index.wait_task(0).await;
let (task, _) = index.get_task(0).await;
let (task, _status_code) = index.delete_document(1).await;
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -524,9 +525,9 @@ async fn test_summarized_delete_document_by_id() {
"###);
index.create(None).await;
index.delete_document(42).await;
index.wait_task(2).await;
let (task, _) = index.get_task(2).await;
let (task, _status_code) = index.delete_document(42).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -567,9 +568,9 @@ async fn test_summarized_settings_update() {
}
"###);
index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
index.wait_task(0).await;
let (task, _) = index.get_task(0).await;
let (task,_status_code) = index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -606,9 +607,9 @@ async fn test_summarized_settings_update() {
async fn test_summarized_index_creation() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
let (task, _) = index.get_task(0).await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -630,9 +631,9 @@ async fn test_summarized_index_creation() {
}
"###);
index.create(Some("doggos")).await;
index.wait_task(1).await;
let (task, _) = index.get_task(1).await;
let (task, _status_code) = index.create(Some("doggos")).await;
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -774,9 +775,9 @@ async fn test_summarized_index_update() {
let server = Server::new().await;
let index = server.index("test");
// If the index doesn't exist yet, we should get errors with or without the primary key.
index.update(None).await;
index.wait_task(0).await;
let (task, _) = index.get_task(0).await;
let (task, _status_code) = index.update(None).await;
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -803,9 +804,9 @@ async fn test_summarized_index_update() {
}
"###);
index.update(Some("bones")).await;
index.wait_task(1).await;
let (task, _) = index.get_task(1).await;
let (task, _status_code) = index.update(Some("bones")).await;
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -835,9 +836,9 @@ async fn test_summarized_index_update() {
// And run the same two tests once the index do exists.
index.create(None).await;
index.update(None).await;
index.wait_task(3).await;
let (task, _) = index.get_task(3).await;
let (task, _status_code) = index.update(None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -859,9 +860,9 @@ async fn test_summarized_index_update() {
}
"###);
index.update(Some("bones")).await;
index.wait_task(4).await;
let (task, _) = index.get_task(4).await;
let (task, _status_code) = index.update(Some("bones")).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -887,13 +888,13 @@ async fn test_summarized_index_update() {
#[actix_web::test]
async fn test_summarized_index_swap() {
let server = Server::new().await;
server
let (task, _status_code) = server
.index_swap(json!([
{ "indexes": ["doggos", "cattos"] }
]))
.await;
server.wait_task(0).await;
let (task, _) = server.get_task(0).await;
server.wait_task(task.uid()).await.failed();
let (task, _) = server.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -927,15 +928,17 @@ async fn test_summarized_index_swap() {
}
"###);
server.index("doggos").create(None).await;
server.index("cattos").create(None).await;
server
let (task, _code) = server.index("doggos").create(None).await;
server.wait_task(task.uid()).await.succeeded();
let (task, _code) = server.index("cattos").create(None).await;
server.wait_task(task.uid()).await.succeeded();
let (task, _code) = server
.index_swap(json!([
{ "indexes": ["doggos", "cattos"] }
]))
.await;
server.wait_task(3).await;
let (task, _) = server.get_task(3).await;
server.wait_task(task.uid()).await.succeeded();
let (task, _) = server.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -970,11 +973,11 @@ async fn test_summarized_task_cancelation() {
let server = Server::new().await;
let index = server.index("doggos");
// to avoid being flaky we're only going to cancel an already finished task :(
index.create(None).await;
index.wait_task(0).await;
server.cancel_tasks("uids=0").await;
index.wait_task(1).await;
let (task, _) = index.get_task(1).await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = server.cancel_tasks("uids=0").await;
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -1004,11 +1007,11 @@ async fn test_summarized_task_deletion() {
let server = Server::new().await;
let index = server.index("doggos");
// to avoid being flaky we're only going to delete an already finished task :(
index.create(None).await;
index.wait_task(0).await;
server.delete_tasks("uids=0").await;
index.wait_task(1).await;
let (task, _) = index.get_task(1).await;
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = server.delete_tasks("uids=0").await;
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
@ -1036,9 +1039,9 @@ async fn test_summarized_task_deletion() {
#[actix_web::test]
async fn test_summarized_dump_creation() {
let server = Server::new().await;
server.create_dump().await;
server.wait_task(0).await;
let (task, _) = server.get_task(0).await;
let (task, _status_code) = server.create_dump().await;
server.wait_task(task.uid()).await;
let (task, _) = server.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".details.dumpUid" => "[dumpUid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"

View file

@ -35,7 +35,7 @@ async fn retrieve_binary_quantize_status_in_the_settings() {
let (settings, code) = index.settings().await;
snapshot!(code, @"200 OK");
snapshot!(settings["embedders"]["manual"], @r###"{"source":"userProvided","dimensions":3}"###);
snapshot!(settings["embedders"]["manual"], @r#"{"source":"userProvided","dimensions":3}"#);
let (response, code) = index
.update_settings(json!({
@ -53,7 +53,7 @@ async fn retrieve_binary_quantize_status_in_the_settings() {
let (settings, code) = index.settings().await;
snapshot!(code, @"200 OK");
snapshot!(settings["embedders"]["manual"], @r###"{"source":"userProvided","dimensions":3,"binaryQuantized":false}"###);
snapshot!(settings["embedders"]["manual"], @r#"{"source":"userProvided","dimensions":3,"binaryQuantized":false}"#);
let (response, code) = index
.update_settings(json!({
@ -71,7 +71,7 @@ async fn retrieve_binary_quantize_status_in_the_settings() {
let (settings, code) = index.settings().await;
snapshot!(code, @"200 OK");
snapshot!(settings["embedders"]["manual"], @r###"{"source":"userProvided","dimensions":3,"binaryQuantized":true}"###);
snapshot!(settings["embedders"]["manual"], @r#"{"source":"userProvided","dimensions":3,"binaryQuantized":true}"#);
}
#[actix_rt::test]
@ -300,7 +300,7 @@ async fn try_to_disable_binary_quantization() {
.await;
snapshot!(code, @"202 Accepted");
let ret = server.wait_task(response.uid()).await;
snapshot!(ret, @r###"
snapshot!(ret, @r#"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
@ -328,7 +328,7 @@ async fn try_to_disable_binary_quantization() {
"startedAt": "[date]",
"finishedAt": "[date]"
}
"###);
"#);
}
#[actix_rt::test]

View file

@ -55,7 +55,7 @@ async fn add_remove_user_provided() {
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{"id": 0, "name": "kefir", "_vectors": { "manual": [0, 0, 0] }},
@ -63,7 +63,7 @@ async fn add_remove_user_provided() {
]);
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
@ -116,7 +116,7 @@ async fn add_remove_user_provided() {
]);
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
@ -159,7 +159,7 @@ async fn add_remove_user_provided() {
let (value, code) = index.delete_document(0).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
@ -221,7 +221,7 @@ async fn generate_default_user_provided_documents(server: &Server) -> Index {
]);
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
index
}
@ -618,7 +618,7 @@ async fn clear_documents() {
let index = generate_default_user_provided_documents(&server).await;
let (value, _code) = index.clear_all_documents().await;
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// Make sure the documents DB has been cleared
let (documents, _code) = index

View file

@ -73,7 +73,7 @@ async fn update_embedder() {
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({