Merge with main

This commit is contained in:
F. Levi 2025-06-20 09:59:09 +03:00
commit 90056c64f5
264 changed files with 8879 additions and 5892 deletions

View file

@ -13,50 +13,50 @@ license.workspace = true
default-run = "meilisearch"
[dependencies]
actix-cors = "0.7.0"
actix-http = { version = "3.9.0", default-features = false, features = [
actix-cors = "0.7.1"
actix-http = { version = "3.11.0", default-features = false, features = [
"compress-brotli",
"compress-gzip",
"rustls-0_23",
] }
actix-utils = "3.0.1"
actix-web = { version = "4.9.0", default-features = false, features = [
actix-web = { version = "4.11.0", default-features = false, features = [
"macros",
"compress-brotli",
"compress-gzip",
"cookies",
"rustls-0_23",
] }
anyhow = { version = "1.0.95", features = ["backtrace"] }
async-trait = "0.1.85"
bstr = "1.11.3"
anyhow = { version = "1.0.98", features = ["backtrace"] }
bstr = "1.12.0"
byte-unit = { version = "5.1.6", features = ["serde"] }
bytes = "1.9.0"
clap = { version = "4.5.24", features = ["derive", "env"] }
bytes = "1.10.1"
bumpalo = "3.18.1"
clap = { version = "4.5.40", features = ["derive", "env"] }
crossbeam-channel = "0.5.15"
deserr = { version = "0.6.3", features = ["actix-web"] }
dump = { path = "../dump" }
either = "1.13.0"
either = "1.15.0"
file-store = { path = "../file-store" }
flate2 = "1.0.35"
flate2 = "1.1.2"
fst = "0.4.7"
futures = "0.3.31"
futures-util = "0.3.31"
index-scheduler = { path = "../index-scheduler" }
indexmap = { version = "2.7.0", features = ["serde"] }
is-terminal = "0.4.13"
indexmap = { version = "2.9.0", features = ["serde"] }
is-terminal = "0.4.16"
itertools = "0.14.0"
jsonwebtoken = "9.3.0"
jsonwebtoken = "9.3.1"
lazy_static = "1.5.0"
meilisearch-auth = { path = "../meilisearch-auth" }
meilisearch-types = { path = "../meilisearch-types" }
mimalloc = { version = "0.1.43", default-features = false }
mimalloc = { version = "0.1.47", default-features = false }
mime = "0.3.17"
num_cpus = "1.16.0"
num_cpus = "1.17.0"
obkv = "0.3.0"
once_cell = "1.20.2"
ordered-float = "4.6.0"
parking_lot = "0.12.3"
once_cell = "1.21.3"
ordered-float = "5.0.0"
parking_lot = "0.12.4"
permissive-json-pointer = { path = "../permissive-json-pointer" }
pin-project-lite = "0.2.16"
platform-dirs = "0.3.0"
@ -64,44 +64,44 @@ prometheus = { version = "0.14.0", features = ["process"] }
rand = "0.8.5"
rayon = "1.10.0"
regex = "1.11.1"
reqwest = { version = "0.12.12", features = [
reqwest = { version = "0.12.20", features = [
"rustls-tls",
"json",
], default-features = false }
rustls = { version = "0.23.20", features = ["ring"], default-features = false }
rustls-pki-types = { version = "1.10.1", features = ["alloc"] }
rustls = { version = "0.23.28", features = ["ring"], default-features = false }
rustls-pki-types = { version = "1.12.0", features = ["alloc"] }
rustls-pemfile = "2.2.0"
segment = { version = "0.2.5" }
serde = { version = "1.0.217", features = ["derive"] }
serde_json = { version = "1.0.135", features = ["preserve_order"] }
sha2 = "0.10.8"
segment = { version = "0.2.6" }
serde = { version = "1.0.219", features = ["derive"] }
serde_json = { version = "1.0.140", features = ["preserve_order"] }
sha2 = "0.10.9"
siphasher = "1.0.1"
slice-group-by = "0.3.1"
static-files = { version = "0.2.4", optional = true }
sysinfo = "0.33.1"
tar = "0.4.43"
tempfile = "3.15.0"
thiserror = "2.0.9"
time = { version = "0.3.37", features = [
static-files = { version = "0.2.5", optional = true }
sysinfo = "0.35.2"
tar = "0.4.44"
tempfile = "3.20.0"
thiserror = "2.0.12"
time = { version = "0.3.41", features = [
"serde-well-known",
"formatting",
"parsing",
"macros",
] }
tokio = { version = "1.43.1", features = ["full"] }
toml = "0.8.19"
uuid = { version = "1.11.0", features = ["serde", "v4"] }
tokio = { version = "1.45.1", features = ["full"] }
toml = "0.8.23"
uuid = { version = "1.17.0", features = ["serde", "v4"] }
serde_urlencoded = "0.7.1"
termcolor = "1.4.1"
url = { version = "2.5.4", features = ["serde"] }
tracing = "0.1.41"
tracing-subscriber = { version = "0.3.19", features = ["json"] }
tracing-trace = { version = "0.1.0", path = "../tracing-trace" }
tracing-actix-web = "0.7.15"
tracing-actix-web = "0.7.18"
build-info = { version = "1.7.0", path = "../build-info" }
roaring = "0.10.10"
roaring = "0.10.12"
mopa-maintained = "0.2.3"
utoipa = { version = "5.3.1", features = [
utoipa = { version = "5.4.0", features = [
"actix_extras",
"macros",
"non_strict_integers",
@ -111,32 +111,35 @@ utoipa = { version = "5.3.1", features = [
"openapi_extensions",
] }
utoipa-scalar = { version = "0.3.0", optional = true, features = ["actix-web"] }
async-openai = { git = "https://github.com/meilisearch/async-openai", branch = "better-error-handling" }
secrecy = "0.10.3"
actix-web-lab = { version = "0.24.1", default-features = false }
[dev-dependencies]
actix-rt = "2.10.0"
brotli = "6.0.0"
brotli = "8.0.1"
# fixed version due to format breakages in v1.40
insta = "=1.39.0"
insta = { version = "=1.39.0", features = ["redactions"] }
manifest-dir-macros = "0.1.18"
maplit = "1.0.2"
meili-snap = { path = "../meili-snap" }
temp-env = "0.3.6"
urlencoding = "2.1.3"
wiremock = "0.6.2"
wiremock = "0.6.3"
yaup = "0.3.1"
[build-dependencies]
anyhow = { version = "1.0.95", optional = true }
cargo_toml = { version = "0.21.0", optional = true }
anyhow = { version = "1.0.98", optional = true }
cargo_toml = { version = "0.22.1", optional = true }
hex = { version = "0.4.3", optional = true }
reqwest = { version = "0.12.12", features = [
reqwest = { version = "0.12.20", features = [
"blocking",
"rustls-tls",
], default-features = false, optional = true }
sha-1 = { version = "0.10.1", optional = true }
static-files = { version = "0.2.4", optional = true }
tempfile = { version = "3.15.0", optional = true }
zip = { version = "2.3.0", optional = true }
static-files = { version = "0.2.5", optional = true }
tempfile = { version = "3.20.0", optional = true }
zip = { version = "4.1.0", optional = true }
[features]
default = ["meilisearch-types/all-tokenizations", "mini-dashboard"]

View file

@ -197,9 +197,11 @@ struct Infos {
experimental_max_number_of_batched_tasks: usize,
experimental_limit_batched_tasks_total_size: u64,
experimental_network: bool,
experimental_chat_completions: bool,
experimental_get_task_documents_route: bool,
experimental_composite_embedders: bool,
experimental_embedding_cache_entries: usize,
experimental_no_snapshot_compaction: bool,
gpu_enabled: bool,
db_path: bool,
import_dump: bool,
@ -248,6 +250,7 @@ impl Infos {
experimental_max_number_of_batched_tasks,
experimental_limit_batched_tasks_total_size,
experimental_embedding_cache_entries,
experimental_no_snapshot_compaction,
http_addr,
master_key: _,
env,
@ -294,6 +297,7 @@ impl Infos {
network,
get_task_documents_route,
composite_embedders,
chat_completions,
} = features;
// We're going to override every sensible information.
@ -312,9 +316,11 @@ impl Infos {
experimental_enable_logs_route: experimental_enable_logs_route | logs_route,
experimental_reduce_indexing_memory_usage,
experimental_network: network,
experimental_chat_completions: chat_completions,
experimental_get_task_documents_route: get_task_documents_route,
experimental_composite_embedders: composite_embedders,
experimental_embedding_cache_entries,
experimental_no_snapshot_compaction,
gpu_enabled: meilisearch_types::milli::vector::is_cuda_enabled(),
db_path: db_path != PathBuf::from("./data.ms"),
import_dump: import_dump.is_some(),

View file

@ -4,6 +4,7 @@ use std::marker::PhantomData;
use std::ops::Deref;
use std::pin::Pin;
use actix_web::http::header::AUTHORIZATION;
use actix_web::web::Data;
use actix_web::FromRequest;
pub use error::AuthenticationError;
@ -94,36 +95,44 @@ impl<P: Policy + 'static, D: 'static + Clone> FromRequest for GuardedData<P, D>
_payload: &mut actix_web::dev::Payload,
) -> Self::Future {
match req.app_data::<Data<AuthController>>().cloned() {
Some(auth) => match req
.headers()
.get("Authorization")
.map(|type_token| type_token.to_str().unwrap_or_default().splitn(2, ' '))
{
Some(mut type_token) => match type_token.next() {
Some("Bearer") => {
// TODO: find a less hardcoded way?
let index = req.match_info().get("index_uid");
match type_token.next() {
Some(token) => Box::pin(Self::auth_bearer(
auth,
token.to_string(),
index.map(String::from),
req.app_data::<D>().cloned(),
)),
None => Box::pin(err(AuthenticationError::InvalidToken.into())),
}
}
_otherwise => {
Box::pin(err(AuthenticationError::MissingAuthorizationHeader.into()))
}
},
None => Box::pin(Self::auth_token(auth, req.app_data::<D>().cloned())),
Some(auth) => match extract_token_from_request(req) {
Ok(Some(token)) => {
// TODO: find a less hardcoded way?
let index = req.match_info().get("index_uid");
Box::pin(Self::auth_bearer(
auth,
token.to_string(),
index.map(String::from),
req.app_data::<D>().cloned(),
))
}
Ok(None) => Box::pin(Self::auth_token(auth, req.app_data::<D>().cloned())),
Err(e) => Box::pin(err(e.into())),
},
None => Box::pin(err(AuthenticationError::IrretrievableState.into())),
}
}
}
pub fn extract_token_from_request(
req: &actix_web::HttpRequest,
) -> Result<Option<&str>, AuthenticationError> {
match req
.headers()
.get(AUTHORIZATION)
.map(|type_token| type_token.to_str().unwrap_or_default().splitn(2, ' '))
{
Some(mut type_token) => match type_token.next() {
Some("Bearer") => match type_token.next() {
Some(token) => Ok(Some(token)),
None => Err(AuthenticationError::InvalidToken),
},
_otherwise => Err(AuthenticationError::MissingAuthorizationHeader),
},
None => Ok(None),
}
}
pub trait Policy {
fn authenticate(
auth: Data<AuthController>,
@ -299,8 +308,8 @@ pub mod policies {
auth: &AuthController,
token: &str,
) -> Result<TenantTokenOutcome, AuthError> {
// Only search action can be accessed by a tenant token.
if A != actions::SEARCH {
// Only search and chat actions can be accessed by a tenant token.
if A != actions::SEARCH && A != actions::CHAT_COMPLETIONS {
return Ok(TenantTokenOutcome::NotATenantToken);
}

View file

@ -37,7 +37,9 @@ use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
use meilisearch_auth::{open_auth_store_env, AuthController};
use meilisearch_types::milli::constants::VERSION_MAJOR;
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod};
use meilisearch_types::milli::update::{
default_thread_pool_and_threads, IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig,
};
use meilisearch_types::settings::apply_settings_to_builder;
use meilisearch_types::tasks::KindWithContent;
use meilisearch_types::versioning::{
@ -234,6 +236,7 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(Arc<IndexScheduler>, Arc<
instance_features: opt.to_instance_features(),
auto_upgrade: opt.experimental_dumpless_upgrade,
embedding_cache_cap: opt.experimental_embedding_cache_entries,
experimental_no_snapshot_compaction: opt.experimental_no_snapshot_compaction,
};
let binary_version = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH);
@ -500,7 +503,19 @@ fn import_dump(
let network = dump_reader.network()?.cloned().unwrap_or_default();
index_scheduler.put_network(network)?;
let indexer_config = index_scheduler.indexer_config();
// 3.1 Use all cpus to process dump if `max_indexing_threads` not configured
let backup_config;
let base_config = index_scheduler.indexer_config();
let indexer_config = if base_config.max_threads.is_none() {
let (thread_pool, _) = default_thread_pool_and_threads();
let _config = IndexerConfig { thread_pool, ..*base_config };
backup_config = _config;
&backup_config
} else {
base_config
};
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
// try to process tasks while we're trying to import the indexes.

View file

@ -65,6 +65,7 @@ const MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE: &str =
"MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_SIZE";
const MEILI_EXPERIMENTAL_EMBEDDING_CACHE_ENTRIES: &str =
"MEILI_EXPERIMENTAL_EMBEDDING_CACHE_ENTRIES";
const MEILI_EXPERIMENTAL_NO_SNAPSHOT_COMPACTION: &str = "MEILI_EXPERIMENTAL_NO_SNAPSHOT_COMPACTION";
const DEFAULT_CONFIG_FILE_PATH: &str = "./config.toml";
const DEFAULT_DB_PATH: &str = "./data.ms";
const DEFAULT_HTTP_ADDR: &str = "localhost:7700";
@ -455,6 +456,15 @@ pub struct Opt {
#[serde(default = "default_embedding_cache_entries")]
pub experimental_embedding_cache_entries: usize,
/// Experimental no snapshot compaction feature.
///
/// When enabled, Meilisearch will not compact snapshots during creation.
///
/// For more information, see <https://github.com/orgs/meilisearch/discussions/833>.
#[clap(long, env = MEILI_EXPERIMENTAL_NO_SNAPSHOT_COMPACTION)]
#[serde(default)]
pub experimental_no_snapshot_compaction: bool,
#[serde(flatten)]
#[clap(flatten)]
pub indexer_options: IndexerOpts,
@ -559,6 +569,7 @@ impl Opt {
experimental_max_number_of_batched_tasks,
experimental_limit_batched_tasks_total_size,
experimental_embedding_cache_entries,
experimental_no_snapshot_compaction,
} = self;
export_to_env_if_not_present(MEILI_DB_PATH, db_path);
export_to_env_if_not_present(MEILI_HTTP_ADDR, http_addr);
@ -655,6 +666,10 @@ impl Opt {
MEILI_EXPERIMENTAL_EMBEDDING_CACHE_ENTRIES,
experimental_embedding_cache_entries.to_string(),
);
export_to_env_if_not_present(
MEILI_EXPERIMENTAL_NO_SNAPSHOT_COMPACTION,
experimental_no_snapshot_compaction.to_string(),
);
indexer_options.export_to_env();
}
@ -746,10 +761,12 @@ impl IndexerOpts {
max_indexing_memory.to_string(),
);
}
export_to_env_if_not_present(
MEILI_MAX_INDEXING_THREADS,
max_indexing_threads.0.to_string(),
);
if let Some(max_indexing_threads) = max_indexing_threads.0 {
export_to_env_if_not_present(
MEILI_MAX_INDEXING_THREADS,
max_indexing_threads.to_string(),
);
}
}
}
@ -757,15 +774,15 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
type Error = anyhow::Error;
fn try_from(other: &IndexerOpts) -> Result<Self, Self::Error> {
let thread_pool = ThreadPoolNoAbortBuilder::new()
.thread_name(|index| format!("indexing-thread:{index}"))
.num_threads(*other.max_indexing_threads)
let thread_pool = ThreadPoolNoAbortBuilder::new_for_indexing()
.num_threads(other.max_indexing_threads.unwrap_or_else(|| num_cpus::get() / 2))
.build()?;
Ok(Self {
thread_pool,
log_every_n: Some(DEFAULT_LOG_EVERY_N),
max_memory: other.max_indexing_memory.map(|b| b.as_u64() as usize),
thread_pool: Some(thread_pool),
max_threads: *other.max_indexing_threads,
max_positions_per_attributes: None,
skip_index_budget: other.skip_index_budget,
..Default::default()
@ -828,31 +845,31 @@ fn total_memory_bytes() -> Option<u64> {
}
}
#[derive(Debug, Clone, Copy, Deserialize, Serialize)]
pub struct MaxThreads(usize);
#[derive(Default, Debug, Clone, Copy, Deserialize, Serialize)]
pub struct MaxThreads(Option<usize>);
impl FromStr for MaxThreads {
type Err = ParseIntError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
usize::from_str(s).map(Self)
}
}
impl Default for MaxThreads {
fn default() -> Self {
MaxThreads(num_cpus::get() / 2)
fn from_str(s: &str) -> Result<MaxThreads, Self::Err> {
if s.is_empty() || s == "unlimited" {
return Ok(MaxThreads::default());
}
usize::from_str(s).map(Some).map(MaxThreads)
}
}
impl fmt::Display for MaxThreads {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
match self.0 {
Some(threads) => write!(f, "{}", threads),
None => write!(f, "unlimited"),
}
}
}
impl Deref for MaxThreads {
type Target = usize;
type Target = Option<usize>;
fn deref(&self) -> &Self::Target {
&self.0

View file

@ -0,0 +1,744 @@
use std::collections::HashMap;
use std::fmt::Write as _;
use std::mem;
use std::ops::ControlFlow;
use std::time::Duration;
use actix_web::web::{self, Data};
use actix_web::{Either, HttpRequest, HttpResponse, Responder};
use actix_web_lab::sse::{Event, Sse};
use async_openai::types::{
ChatCompletionMessageToolCall, ChatCompletionMessageToolCallChunk,
ChatCompletionRequestAssistantMessageArgs, ChatCompletionRequestDeveloperMessage,
ChatCompletionRequestDeveloperMessageContent, ChatCompletionRequestMessage,
ChatCompletionRequestSystemMessage, ChatCompletionRequestSystemMessageContent,
ChatCompletionRequestToolMessage, ChatCompletionRequestToolMessageContent,
ChatCompletionStreamResponseDelta, ChatCompletionToolArgs, ChatCompletionToolType,
CreateChatCompletionRequest, CreateChatCompletionStreamResponse, FinishReason, FunctionCall,
FunctionCallStream, FunctionObjectArgs,
};
use async_openai::Client;
use bumpalo::Bump;
use futures::StreamExt;
use index_scheduler::IndexScheduler;
use meilisearch_auth::AuthController;
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::features::{
ChatCompletionPrompts as DbChatCompletionPrompts,
ChatCompletionSource as DbChatCompletionSource, SystemRole,
};
use meilisearch_types::keys::actions;
use meilisearch_types::milli::index::ChatConfig;
use meilisearch_types::milli::{all_obkv_to_json, obkv_to_json, TimeBudget};
use meilisearch_types::{Document, Index};
use serde::Deserialize;
use serde_json::json;
use tokio::runtime::Handle;
use tokio::sync::mpsc::error::SendError;
use super::config::Config;
use super::errors::{MistralError, OpenAiOutsideError, StreamErrorEvent};
use super::utils::format_documents;
use super::{
ChatsParam, MEILI_APPEND_CONVERSATION_MESSAGE_NAME, MEILI_SEARCH_IN_INDEX_FUNCTION_NAME,
MEILI_SEARCH_PROGRESS_NAME, MEILI_SEARCH_SOURCES_NAME,
};
use crate::error::MeilisearchHttpError;
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::{extract_token_from_request, GuardedData, Policy as _};
use crate::metrics::MEILISEARCH_DEGRADED_SEARCH_REQUESTS;
use crate::routes::chats::utils::SseEventSender;
use crate::routes::indexes::search::search_kind;
use crate::search::{add_search_rules, prepare_search, search_from_kind, SearchQuery};
use crate::search_queue::SearchQueue;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(chat)));
}
/// Get a chat completion
async fn chat(
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT_COMPLETIONS }>, Data<IndexScheduler>>,
auth_ctrl: web::Data<AuthController>,
chats_param: web::Path<ChatsParam>,
req: HttpRequest,
search_queue: web::Data<SearchQueue>,
web::Json(chat_completion): web::Json<CreateChatCompletionRequest>,
) -> impl Responder {
let ChatsParam { workspace_uid } = chats_param.into_inner();
if chat_completion.stream.unwrap_or(false) {
Either::Right(
streamed_chat(
index_scheduler,
auth_ctrl,
search_queue,
&workspace_uid,
req,
chat_completion,
)
.await,
)
} else {
Either::Left(
non_streamed_chat(
index_scheduler,
auth_ctrl,
search_queue,
&workspace_uid,
req,
chat_completion,
)
.await,
)
}
}
#[derive(Default, Debug, Clone, Copy)]
pub struct FunctionSupport {
/// Defines if we can call the _meiliSearchProgress function
/// to inform the front-end about what we are searching for.
report_progress: bool,
/// Defines if we can call the _meiliSearchSources function
/// to inform the front-end about the sources of the search.
report_sources: bool,
/// Defines if we can call the _meiliAppendConversationMessage
/// function to provide the messages to append into the conversation.
append_to_conversation: bool,
}
/// Setup search tool in chat completion request
fn setup_search_tool(
index_scheduler: &Data<IndexScheduler>,
filters: &meilisearch_auth::AuthFilter,
chat_completion: &mut CreateChatCompletionRequest,
prompts: &DbChatCompletionPrompts,
system_role: SystemRole,
) -> Result<FunctionSupport, ResponseError> {
let tools = chat_completion.tools.get_or_insert_default();
for tool in &tools[..] {
match tool.function.name.as_str() {
MEILI_SEARCH_IN_INDEX_FUNCTION_NAME => {
return Err(ResponseError::from_msg(
format!("{MEILI_SEARCH_IN_INDEX_FUNCTION_NAME} function is already defined."),
Code::BadRequest,
));
}
MEILI_SEARCH_PROGRESS_NAME
| MEILI_SEARCH_SOURCES_NAME
| MEILI_APPEND_CONVERSATION_MESSAGE_NAME => (),
external_function_name => {
return Err(ResponseError::from_msg(
format!("{external_function_name}: External functions are not supported yet."),
Code::UnimplementedExternalFunctionCalling,
));
}
}
}
// Remove internal tools used for front-end notifications as they should be hidden from the LLM.
let mut report_progress = false;
let mut report_sources = false;
let mut append_to_conversation = false;
tools.retain(|tool| {
match tool.function.name.as_str() {
MEILI_SEARCH_PROGRESS_NAME => {
report_progress = true;
false
}
MEILI_SEARCH_SOURCES_NAME => {
report_sources = true;
false
}
MEILI_APPEND_CONVERSATION_MESSAGE_NAME => {
append_to_conversation = true;
false
}
_ => true, // keep other tools
}
});
let mut index_uids = Vec::new();
let mut function_description = prompts.search_description.clone();
index_scheduler.try_for_each_index::<_, ()>(|name, index| {
// Make sure to skip unauthorized indexes
if !filters.is_index_authorized(name) {
return Ok(());
}
let rtxn = index.read_txn()?;
let chat_config = index.chat_config(&rtxn)?;
let index_description = chat_config.description;
let _ = writeln!(&mut function_description, "\n\n - {name}: {index_description}\n");
index_uids.push(name.to_string());
Ok(())
})?;
let tool = ChatCompletionToolArgs::default()
.r#type(ChatCompletionToolType::Function)
.function(
FunctionObjectArgs::default()
.name(MEILI_SEARCH_IN_INDEX_FUNCTION_NAME)
.description(&function_description)
.parameters(json!({
"type": "object",
"properties": {
"index_uid": {
"type": "string",
"enum": index_uids,
"description": prompts.search_index_uid_param,
},
"q": {
// Unfortunately, Mistral does not support an array of types, here.
// "type": ["string", "null"],
"type": "string",
"description": prompts.search_q_param,
}
},
"required": ["index_uid", "q"],
"additionalProperties": false,
}))
.strict(true)
.build()
.unwrap(),
)
.build()
.unwrap();
tools.push(tool);
let system_message = match system_role {
SystemRole::System => {
ChatCompletionRequestMessage::System(ChatCompletionRequestSystemMessage {
content: ChatCompletionRequestSystemMessageContent::Text(prompts.system.clone()),
name: None,
})
}
SystemRole::Developer => {
ChatCompletionRequestMessage::Developer(ChatCompletionRequestDeveloperMessage {
content: ChatCompletionRequestDeveloperMessageContent::Text(prompts.system.clone()),
name: None,
})
}
};
chat_completion.messages.insert(0, system_message);
Ok(FunctionSupport { report_progress, report_sources, append_to_conversation })
}
/// Process search request and return formatted results
async fn process_search_request(
index_scheduler: &GuardedData<
ActionPolicy<{ actions::CHAT_COMPLETIONS }>,
Data<IndexScheduler>,
>,
auth_ctrl: web::Data<AuthController>,
search_queue: &web::Data<SearchQueue>,
auth_token: &str,
index_uid: String,
q: Option<String>,
) -> Result<(Index, Vec<Document>, String), ResponseError> {
let index = index_scheduler.index(&index_uid)?;
let rtxn = index.static_read_txn()?;
let ChatConfig { description: _, prompt: _, search_parameters } = index.chat_config(&rtxn)?;
let mut query = SearchQuery { q, ..SearchQuery::from(search_parameters) };
let auth_filter = ActionPolicy::<{ actions::SEARCH }>::authenticate(
auth_ctrl,
auth_token,
Some(index_uid.as_str()),
)?;
// Tenant token search_rules.
if let Some(search_rules) = auth_filter.get_index_search_rules(&index_uid) {
add_search_rules(&mut query.filter, search_rules);
}
let search_kind =
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index)?;
let permit = search_queue.try_get_search_permit().await?;
let features = index_scheduler.features();
let index_cloned = index.clone();
let output = tokio::task::spawn_blocking(move || -> Result<_, ResponseError> {
let time_budget = match index_cloned
.search_cutoff(&rtxn)
.map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid.clone())))?
{
Some(cutoff) => TimeBudget::new(Duration::from_millis(cutoff)),
None => TimeBudget::default(),
};
let (search, _is_finite_pagination, _max_total_hits, _offset) =
prepare_search(&index_cloned, &rtxn, &query, &search_kind, time_budget, features)?;
search_from_kind(index_uid, search_kind, search)
.map(|(search_results, _)| (rtxn, search_results))
.map_err(ResponseError::from)
})
.await;
permit.drop().await;
let output = output?;
let mut documents = Vec::new();
if let Ok((ref rtxn, ref search_result)) = output {
// aggregate.succeed(search_result);
if search_result.degraded {
MEILISEARCH_DEGRADED_SEARCH_REQUESTS.inc();
}
let fields_ids_map = index.fields_ids_map(rtxn)?;
let displayed_fields = index.displayed_fields_ids(rtxn)?;
for &document_id in &search_result.documents_ids {
let obkv = index.document(rtxn, document_id)?;
let document = match displayed_fields {
Some(ref fields) => obkv_to_json(fields, &fields_ids_map, obkv)?,
None => all_obkv_to_json(obkv, &fields_ids_map)?,
};
documents.push(document);
}
}
let (rtxn, search_result) = output?;
let render_alloc = Bump::new();
let formatted = format_documents(&rtxn, &index, &render_alloc, search_result.documents_ids)?;
let text = formatted.join("\n");
drop(rtxn);
Ok((index, documents, text))
}
#[allow(unreachable_code, unused_variables)] // will be correctly implemented in the future
async fn non_streamed_chat(
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT_COMPLETIONS }>, Data<IndexScheduler>>,
auth_ctrl: web::Data<AuthController>,
search_queue: web::Data<SearchQueue>,
workspace_uid: &str,
req: HttpRequest,
chat_completion: CreateChatCompletionRequest,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_chat_completions("using the /chats chat completions route")?;
if let Some(n) = chat_completion.n.filter(|&n| n != 1) {
return Err(ResponseError::from_msg(
format!("You tried to specify n = {n} but only single choices are supported (n = 1)."),
Code::UnimplementedMultiChoiceChatCompletions,
));
}
return Err(ResponseError::from_msg(
"Non-streamed chat completions is not implemented".to_string(),
Code::UnimplementedNonStreamingChatCompletions,
));
let filters = index_scheduler.filters();
let chat_settings = match index_scheduler.chat_settings(workspace_uid).unwrap() {
Some(settings) => settings,
None => {
return Err(ResponseError::from_msg(
format!("Chat `{workspace_uid}` not found"),
Code::ChatNotFound,
))
}
};
let config = Config::new(&chat_settings);
let client = Client::with_config(config);
let auth_token = extract_token_from_request(&req)?.unwrap();
let system_role = chat_settings.source.system_role(&chat_completion.model);
// TODO do function support later
let _function_support = setup_search_tool(
&index_scheduler,
filters,
&mut chat_completion,
&chat_settings.prompts,
system_role,
)?;
let mut response;
loop {
response = client.chat().create(chat_completion.clone()).await.unwrap();
let choice = &mut response.choices[0];
match choice.finish_reason {
Some(FinishReason::ToolCalls) => {
let tool_calls = mem::take(&mut choice.message.tool_calls).unwrap_or_default();
let (meili_calls, other_calls): (Vec<_>, Vec<_>) = tool_calls
.into_iter()
.partition(|call| call.function.name == MEILI_SEARCH_IN_INDEX_FUNCTION_NAME);
chat_completion.messages.push(
ChatCompletionRequestAssistantMessageArgs::default()
.tool_calls(meili_calls.clone())
.build()
.unwrap()
.into(),
);
for call in meili_calls {
let result = match serde_json::from_str(&call.function.arguments) {
Ok(SearchInIndexParameters { index_uid, q }) => process_search_request(
&index_scheduler,
auth_ctrl.clone(),
&search_queue,
auth_token,
index_uid,
q,
)
.await
.map_err(|e| e.to_string()),
Err(err) => Err(err.to_string()),
};
// TODO report documents sources later
let answer = match result {
Ok((_, _documents, text)) => text,
Err(err) => err,
};
chat_completion.messages.push(ChatCompletionRequestMessage::Tool(
ChatCompletionRequestToolMessage {
tool_call_id: call.id.clone(),
content: ChatCompletionRequestToolMessageContent::Text(answer),
},
));
}
// Let the client call other tools by themselves
if !other_calls.is_empty() {
response.choices[0].message.tool_calls = Some(other_calls);
break;
}
}
_ => break,
}
}
Ok(HttpResponse::Ok().json(response))
}
async fn streamed_chat(
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT_COMPLETIONS }>, Data<IndexScheduler>>,
auth_ctrl: web::Data<AuthController>,
search_queue: web::Data<SearchQueue>,
workspace_uid: &str,
req: HttpRequest,
mut chat_completion: CreateChatCompletionRequest,
) -> Result<impl Responder, ResponseError> {
index_scheduler.features().check_chat_completions("using the /chats chat completions route")?;
let filters = index_scheduler.filters();
if let Some(n) = chat_completion.n.filter(|&n| n != 1) {
return Err(ResponseError::from_msg(
format!("You tried to specify n = {n} but only single choices are supported (n = 1)."),
Code::UnimplementedMultiChoiceChatCompletions,
));
}
let chat_settings = match index_scheduler.chat_settings(workspace_uid)? {
Some(settings) => settings,
None => {
return Err(ResponseError::from_msg(
format!("Chat `{workspace_uid}` not found"),
Code::ChatNotFound,
))
}
};
let config = Config::new(&chat_settings);
let auth_token = extract_token_from_request(&req)?.unwrap().to_string();
let system_role = chat_settings.source.system_role(&chat_completion.model);
let function_support = setup_search_tool(
&index_scheduler,
filters,
&mut chat_completion,
&chat_settings.prompts,
system_role,
)?;
tracing::debug!("Conversation function support: {function_support:?}");
let (tx, rx) = tokio::sync::mpsc::channel(10);
let tx = SseEventSender::new(tx);
let _join_handle = Handle::current().spawn(async move {
let client = Client::with_config(config.clone());
let mut global_tool_calls = HashMap::<u32, Call>::new();
// Limit the number of internal calls to satisfy the search requests of the LLM
for _ in 0..20 {
let output = run_conversation(
&index_scheduler,
&auth_ctrl,
&search_queue,
&auth_token,
&client,
chat_settings.source,
&mut chat_completion,
&tx,
&mut global_tool_calls,
function_support,
);
match output.await {
Ok(ControlFlow::Continue(())) => (),
Ok(ControlFlow::Break(_finish_reason)) => break,
// If the connection is closed we must stop
Err(SendError(_)) => return,
}
}
let _ = tx.stop().await;
});
Ok(Sse::from_infallible_receiver(rx).with_retry_duration(Duration::from_secs(10)))
}
/// Updates the chat completion with the new messages, streams the LLM tokens,
/// and report progress and errors.
#[allow(clippy::too_many_arguments)]
async fn run_conversation<C: async_openai::config::Config>(
index_scheduler: &GuardedData<
ActionPolicy<{ actions::CHAT_COMPLETIONS }>,
Data<IndexScheduler>,
>,
auth_ctrl: &web::Data<AuthController>,
search_queue: &web::Data<SearchQueue>,
auth_token: &str,
client: &Client<C>,
source: DbChatCompletionSource,
chat_completion: &mut CreateChatCompletionRequest,
tx: &SseEventSender,
global_tool_calls: &mut HashMap<u32, Call>,
function_support: FunctionSupport,
) -> Result<ControlFlow<Option<FinishReason>, ()>, SendError<Event>> {
let mut finish_reason = None;
// safety: unwrap: can only happens if `stream` was set to `false`
let mut response = client.chat().create_stream(chat_completion.clone()).await.unwrap();
while let Some(result) = response.next().await {
match result {
Ok(resp) => {
let choice = &resp.choices[0];
finish_reason = choice.finish_reason;
let ChatCompletionStreamResponseDelta { ref tool_calls, .. } = &choice.delta;
match tool_calls {
Some(tool_calls) => {
for chunk in tool_calls {
let ChatCompletionMessageToolCallChunk {
index,
id,
r#type: _,
function,
} = chunk;
let FunctionCallStream { name, arguments } = function.as_ref().unwrap();
global_tool_calls
.entry(*index)
.and_modify(|call| {
if call.is_internal() {
call.append(arguments.as_ref().unwrap())
}
})
.or_insert_with(|| {
if name.as_deref() == Some(MEILI_SEARCH_IN_INDEX_FUNCTION_NAME)
{
Call::Internal {
id: id.as_ref().unwrap().clone(),
function_name: name.as_ref().unwrap().clone(),
arguments: arguments.as_ref().unwrap().clone(),
}
} else {
Call::External
}
});
}
}
None => {
if !global_tool_calls.is_empty() {
let (meili_calls, _other_calls): (Vec<_>, Vec<_>) =
mem::take(global_tool_calls)
.into_values()
.flat_map(|call| match call {
Call::Internal { id, function_name: name, arguments } => {
Some(ChatCompletionMessageToolCall {
id,
r#type: Some(ChatCompletionToolType::Function),
function: FunctionCall { name, arguments },
})
}
Call::External => None,
})
.partition(|call| {
call.function.name == MEILI_SEARCH_IN_INDEX_FUNCTION_NAME
});
chat_completion.messages.push(
ChatCompletionRequestAssistantMessageArgs::default()
.tool_calls(meili_calls.clone())
.build()
.unwrap()
.into(),
);
handle_meili_tools(
index_scheduler,
auth_ctrl,
search_queue,
auth_token,
tx,
meili_calls,
chat_completion,
&resp,
function_support,
)
.await?;
} else {
tx.forward_response(&resp).await?;
}
}
}
}
Err(error) => {
let result = match source {
DbChatCompletionSource::Mistral => {
StreamErrorEvent::from_openai_error::<MistralError>(error).await
}
_ => StreamErrorEvent::from_openai_error::<OpenAiOutsideError>(error).await,
};
let error = result.unwrap_or_else(StreamErrorEvent::from_reqwest_error);
tx.send_error(&error).await?;
return Ok(ControlFlow::Break(None));
}
}
}
// We must stop if the finish reason is not something we can solve with Meilisearch
match finish_reason {
Some(FinishReason::ToolCalls) => Ok(ControlFlow::Continue(())),
otherwise => Ok(ControlFlow::Break(otherwise)),
}
}
#[allow(clippy::too_many_arguments)]
async fn handle_meili_tools(
index_scheduler: &GuardedData<
ActionPolicy<{ actions::CHAT_COMPLETIONS }>,
Data<IndexScheduler>,
>,
auth_ctrl: &web::Data<AuthController>,
search_queue: &web::Data<SearchQueue>,
auth_token: &str,
tx: &SseEventSender,
meili_calls: Vec<ChatCompletionMessageToolCall>,
chat_completion: &mut CreateChatCompletionRequest,
resp: &CreateChatCompletionStreamResponse,
FunctionSupport { report_progress, report_sources, append_to_conversation, .. }: FunctionSupport,
) -> Result<(), SendError<Event>> {
for call in meili_calls {
if report_progress {
tx.report_search_progress(
resp.clone(),
&call.id,
&call.function.name,
&call.function.arguments,
)
.await?;
}
if append_to_conversation {
tx.append_tool_call_conversation_message(
resp.clone(),
call.id.clone(),
call.function.name.clone(),
call.function.arguments.clone(),
)
.await?;
}
let mut error = None;
let result = match serde_json::from_str(&call.function.arguments) {
Ok(SearchInIndexParameters { index_uid, q }) => match process_search_request(
index_scheduler,
auth_ctrl.clone(),
search_queue,
auth_token,
index_uid,
q,
)
.await
{
Ok(output) => Ok(output),
Err(err) => {
let error_text = format!("the search tool call failed with {err}");
error = Some(err);
Err(error_text)
}
},
Err(err) => Err(err.to_string()),
};
let answer = match result {
Ok((_index, documents, text)) => {
if report_sources {
tx.report_sources(resp.clone(), &call.id, &documents).await?;
}
text
}
Err(err) => err,
};
let tool = ChatCompletionRequestMessage::Tool(ChatCompletionRequestToolMessage {
tool_call_id: call.id.clone(),
content: ChatCompletionRequestToolMessageContent::Text(answer),
});
if append_to_conversation {
tx.append_conversation_message(resp.clone(), &tool).await?;
}
chat_completion.messages.push(tool);
if let Some(error) = error {
tx.send_error(&StreamErrorEvent::from_response_error(error)).await?;
}
}
Ok(())
}
/// The structure used to aggregate the function calls to make.
#[derive(Debug)]
enum Call {
/// Tool calls to tools that must be managed by Meilisearch internally.
/// Typically the search functions.
Internal { id: String, function_name: String, arguments: String },
/// Tool calls that we track but only to know that its not our functions.
/// We return the function calls as-is to the end-user.
External,
}
impl Call {
fn is_internal(&self) -> bool {
matches!(self, Call::Internal { .. })
}
/// # Panics
///
/// - if called on external calls
fn append(&mut self, more: &str) {
match self {
Call::Internal { arguments, .. } => arguments.push_str(more),
Call::External => panic!("Cannot append argument chunks to an external function"),
}
}
}
#[derive(Deserialize)]
struct SearchInIndexParameters {
/// The index uid to search in.
index_uid: String,
/// The query parameter to use.
q: Option<String>,
}

View file

@ -0,0 +1,88 @@
use async_openai::config::{AzureConfig, OpenAIConfig};
use meilisearch_types::features::ChatCompletionSettings as DbChatSettings;
use reqwest::header::HeaderMap;
use secrecy::SecretString;
#[derive(Debug, Clone)]
pub enum Config {
OpenAiCompatible(OpenAIConfig),
AzureOpenAiCompatible(AzureConfig),
}
impl Config {
pub fn new(chat_settings: &DbChatSettings) -> Self {
use meilisearch_types::features::ChatCompletionSource::*;
match chat_settings.source {
OpenAi | Mistral | Gemini | VLlm => {
let mut config = OpenAIConfig::default();
if let Some(org_id) = chat_settings.org_id.as_ref() {
config = config.with_org_id(org_id);
}
if let Some(project_id) = chat_settings.project_id.as_ref() {
config = config.with_project_id(project_id);
}
if let Some(api_key) = chat_settings.api_key.as_ref() {
config = config.with_api_key(api_key);
}
let base_url = chat_settings.base_url.as_deref();
if let Some(base_url) = chat_settings.source.base_url().or(base_url) {
config = config.with_api_base(base_url);
}
Self::OpenAiCompatible(config)
}
AzureOpenAi => {
let mut config = AzureConfig::default();
if let Some(version) = chat_settings.api_version.as_ref() {
config = config.with_api_version(version);
}
if let Some(deployment_id) = chat_settings.deployment_id.as_ref() {
config = config.with_deployment_id(deployment_id);
}
if let Some(api_key) = chat_settings.api_key.as_ref() {
config = config.with_api_key(api_key);
}
if let Some(base_url) = chat_settings.base_url.as_ref() {
config = config.with_api_base(base_url);
}
Self::AzureOpenAiCompatible(config)
}
}
}
}
impl async_openai::config::Config for Config {
fn headers(&self) -> HeaderMap {
match self {
Config::OpenAiCompatible(config) => config.headers(),
Config::AzureOpenAiCompatible(config) => config.headers(),
}
}
fn url(&self, path: &str) -> String {
match self {
Config::OpenAiCompatible(config) => config.url(path),
Config::AzureOpenAiCompatible(config) => config.url(path),
}
}
fn query(&self) -> Vec<(&str, &str)> {
match self {
Config::OpenAiCompatible(config) => config.query(),
Config::AzureOpenAiCompatible(config) => config.query(),
}
}
fn api_base(&self) -> &str {
match self {
Config::OpenAiCompatible(config) => config.api_base(),
Config::AzureOpenAiCompatible(config) => config.api_base(),
}
}
fn api_key(&self) -> &SecretString {
match self {
Config::OpenAiCompatible(config) => config.api_key(),
Config::AzureOpenAiCompatible(config) => config.api_key(),
}
}
}

View file

@ -0,0 +1,250 @@
use async_openai::error::{ApiError, OpenAIError};
use async_openai::reqwest_eventsource::Error as EventSourceError;
use meilisearch_types::error::ResponseError;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
/// The error type which is always `error`.
const ERROR_TYPE: &str = "error";
/// The error struct returned by the Mistral API.
///
/// ```json
/// {
/// "object": "error",
/// "message": "Service tier capacity exceeded for this model.",
/// "type": "invalid_request_error",
/// "param": null,
/// "code": null
/// }
/// ```
#[derive(Debug, Clone, Deserialize)]
pub struct MistralError {
message: String,
r#type: String,
param: Option<String>,
code: Option<String>,
}
impl From<MistralError> for StreamErrorEvent {
fn from(error: MistralError) -> Self {
let MistralError { message, r#type, param, code } = error;
StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_owned(),
error: StreamError { r#type, code, message, param, event_id: None },
}
}
}
#[derive(Debug, Clone, Deserialize)]
pub struct OpenAiOutsideError {
/// Emitted when an error occurs.
error: OpenAiInnerError,
}
/// Emitted when an error occurs.
#[derive(Debug, Clone, Deserialize)]
pub struct OpenAiInnerError {
/// The error code.
code: Option<String>,
/// The error message.
message: String,
/// The error parameter.
param: Option<String>,
/// The type of the event. Always `error`.
r#type: String,
}
impl From<OpenAiOutsideError> for StreamErrorEvent {
fn from(error: OpenAiOutsideError) -> Self {
let OpenAiOutsideError { error: OpenAiInnerError { code, message, param, r#type } } = error;
StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError { r#type, code, message, param, event_id: None },
}
}
}
/// An error that occurs during the streaming process.
///
/// It directly comes from the OpenAI API and you can
/// read more about error events on their website:
/// <https://platform.openai.com/docs/api-reference/realtime-server-events/error>
#[derive(Debug, Serialize, Deserialize)]
pub struct StreamErrorEvent {
/// The unique ID of the server event.
pub event_id: String,
/// The event type, must be error.
pub r#type: String,
/// Details of the error.
pub error: StreamError,
}
/// Details of the error.
#[derive(Debug, Serialize, Deserialize)]
pub struct StreamError {
/// The type of error (e.g., "invalid_request_error", "server_error").
pub r#type: String,
/// Error code, if any.
pub code: Option<String>,
/// A human-readable error message.
pub message: String,
/// Parameter related to the error, if any.
pub param: Option<String>,
/// The event_id of the client event that caused the error, if applicable.
pub event_id: Option<String>,
}
impl StreamErrorEvent {
pub async fn from_openai_error<E>(error: OpenAIError) -> Result<Self, reqwest::Error>
where
E: serde::de::DeserializeOwned,
Self: From<E>,
{
match error {
OpenAIError::Reqwest(e) => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "internal_reqwest_error".to_string(),
code: Some("internal".to_string()),
message: e.to_string(),
param: None,
event_id: None,
},
}),
OpenAIError::ApiError(ApiError { message, r#type, param, code }) => {
Ok(StreamErrorEvent {
r#type: ERROR_TYPE.to_string(),
event_id: Uuid::new_v4().to_string(),
error: StreamError {
r#type: r#type.unwrap_or_else(|| "unknown".to_string()),
code,
message,
param,
event_id: None,
},
})
}
OpenAIError::JSONDeserialize(error) => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "json_deserialize_error".to_string(),
code: Some("internal".to_string()),
message: error.to_string(),
param: None,
event_id: None,
},
}),
OpenAIError::FileSaveError(_) | OpenAIError::FileReadError(_) => unreachable!(),
OpenAIError::StreamError(error) => match error {
EventSourceError::InvalidStatusCode(_status_code, response) => {
let error = response.json::<E>().await?;
Ok(StreamErrorEvent::from(error))
}
EventSourceError::InvalidContentType(_header_value, response) => {
let error = response.json::<E>().await?;
Ok(StreamErrorEvent::from(error))
}
EventSourceError::Utf8(error) => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "invalid_utf8_error".to_string(),
code: None,
message: error.to_string(),
param: None,
event_id: None,
},
}),
EventSourceError::Parser(error) => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "parser_error".to_string(),
code: None,
message: error.to_string(),
param: None,
event_id: None,
},
}),
EventSourceError::Transport(error) => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "transport_error".to_string(),
code: None,
message: error.to_string(),
param: None,
event_id: None,
},
}),
EventSourceError::InvalidLastEventId(message) => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "invalid_last_event_id".to_string(),
code: None,
message,
param: None,
event_id: None,
},
}),
EventSourceError::StreamEnded => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "stream_ended".to_string(),
code: None,
message: "Stream ended".to_string(),
param: None,
event_id: None,
},
}),
},
OpenAIError::InvalidArgument(message) => Ok(StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "invalid_argument".to_string(),
code: None,
message,
param: None,
event_id: None,
},
}),
}
}
pub fn from_response_error(error: ResponseError) -> Self {
let ResponseError { code, message, .. } = error;
StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "response_error".to_string(),
code: Some(code.as_str().to_string()),
message,
param: None,
event_id: None,
},
}
}
pub fn from_reqwest_error(error: reqwest::Error) -> Self {
StreamErrorEvent {
event_id: Uuid::new_v4().to_string(),
r#type: ERROR_TYPE.to_string(),
error: StreamError {
r#type: "reqwest_error".to_string(),
code: None,
message: error.to_string(),
param: None,
event_id: None,
},
}
}
}

View file

@ -0,0 +1,134 @@
use actix_web::web::{self, Data};
use actix_web::HttpResponse;
use deserr::actix_web::AwebQueryParameter;
use deserr::Deserr;
use index_scheduler::IndexScheduler;
use meilisearch_types::deserr::query_params::Param;
use meilisearch_types::deserr::DeserrQueryParamError;
use meilisearch_types::error::deserr_codes::{InvalidIndexLimit, InvalidIndexOffset};
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::keys::actions;
use serde::{Deserialize, Serialize};
use serde_json::json;
use tracing::debug;
use utoipa::{IntoParams, ToSchema};
use super::Pagination;
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::GuardedData;
use crate::routes::PAGINATION_DEFAULT_LIMIT;
pub mod chat_completions;
mod config;
mod errors;
pub mod settings;
mod utils;
/// The function name to report search progress.
/// This function is used to report on what meilisearch is
/// doing which must be used on the frontend to report progress.
const MEILI_SEARCH_PROGRESS_NAME: &str = "_meiliSearchProgress";
/// The function name to append a conversation message in the user conversation.
/// This function is used to append a conversation message in the user conversation.
/// This must be used on the frontend to keep context of what happened on the
/// Meilisearch-side and keep good context for follow up questions.
const MEILI_APPEND_CONVERSATION_MESSAGE_NAME: &str = "_meiliAppendConversationMessage";
/// The function name to report sources to the frontend.
/// This function is used to report sources to the frontend.
/// The call id is associated to the one used by the search progress function.
const MEILI_SEARCH_SOURCES_NAME: &str = "_meiliSearchSources";
/// The *internal* function name to provide to the LLM to search in indexes.
/// This function must not leak to the user as the LLM will call it and the
/// main goal of Meilisearch is to provide an answer to these calls.
const MEILI_SEARCH_IN_INDEX_FUNCTION_NAME: &str = "_meiliSearchInIndex";
#[derive(Deserialize)]
pub struct ChatsParam {
workspace_uid: String,
}
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::get().to(list_workspaces))).service(
web::scope("/{workspace_uid}")
.service(
web::resource("")
.route(web::get().to(get_chat))
.route(web::delete().to(delete_chat)),
)
.service(web::scope("/chat/completions").configure(chat_completions::configure))
.service(web::scope("/settings").configure(settings::configure)),
);
}
pub async fn get_chat(
index_scheduler: GuardedData<ActionPolicy<{ actions::CHATS_GET }>, Data<IndexScheduler>>,
workspace_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_chat_completions("displaying a chat")?;
let workspace_uid = IndexUid::try_from(workspace_uid.into_inner())?;
if index_scheduler.chat_workspace_exists(&workspace_uid)? {
Ok(HttpResponse::Ok().json(json!({ "uid": workspace_uid })))
} else {
Err(ResponseError::from_msg(format!("chat {workspace_uid} not found"), Code::ChatNotFound))
}
}
pub async fn delete_chat(
index_scheduler: GuardedData<ActionPolicy<{ actions::CHATS_DELETE }>, Data<IndexScheduler>>,
workspace_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_chat_completions("deleting a chat")?;
let workspace_uid = workspace_uid.into_inner();
if index_scheduler.delete_chat_settings(&workspace_uid)? {
Ok(HttpResponse::NoContent().finish())
} else {
Err(ResponseError::from_msg(format!("chat {workspace_uid} not found"), Code::ChatNotFound))
}
}
#[derive(Deserr, Debug, Clone, Copy, IntoParams)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
#[into_params(rename_all = "camelCase", parameter_in = Query)]
pub struct ListChats {
/// The number of chat workspaces to skip before starting to retrieve anything
#[param(value_type = Option<usize>, default, example = 100)]
#[deserr(default, error = DeserrQueryParamError<InvalidIndexOffset>)]
pub offset: Param<usize>,
/// The number of chat workspaces to retrieve
#[param(value_type = Option<usize>, default = 20, example = 1)]
#[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidIndexLimit>)]
pub limit: Param<usize>,
}
impl ListChats {
fn as_pagination(self) -> Pagination {
Pagination { offset: self.offset.0, limit: self.limit.0 }
}
}
#[derive(Debug, Serialize, Clone, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct ChatWorkspaceView {
/// Unique identifier for the index
pub uid: String,
}
pub async fn list_workspaces(
index_scheduler: GuardedData<ActionPolicy<{ actions::CHATS_GET }>, Data<IndexScheduler>>,
paginate: AwebQueryParameter<ListChats, DeserrQueryParamError>,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_chat_completions("listing the chats")?;
debug!(parameters = ?paginate, "List chat workspaces");
let (total, workspaces) =
index_scheduler.paginated_chat_workspace_uids(*paginate.offset, *paginate.limit)?;
let workspaces =
workspaces.into_iter().map(|uid| ChatWorkspaceView { uid }).collect::<Vec<_>>();
let ret = paginate.as_pagination().format_with(total, workspaces);
debug!(returns = ?ret, "List chat workspaces");
Ok(HttpResponse::Ok().json(ret))
}

View file

@ -0,0 +1,260 @@
use actix_web::web::{self, Data};
use actix_web::HttpResponse;
use deserr::Deserr;
use index_scheduler::IndexScheduler;
use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::features::{
ChatCompletionPrompts as DbChatCompletionPrompts, ChatCompletionSettings,
ChatCompletionSource as DbChatCompletionSource, DEFAULT_CHAT_SEARCH_DESCRIPTION_PROMPT,
DEFAULT_CHAT_SEARCH_INDEX_UID_PARAM_PROMPT, DEFAULT_CHAT_SEARCH_Q_PARAM_PROMPT,
DEFAULT_CHAT_SYSTEM_PROMPT,
};
use meilisearch_types::keys::actions;
use meilisearch_types::milli::update::Setting;
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
use super::ChatsParam;
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
web::resource("")
.route(web::get().to(SeqHandler(get_settings)))
.route(web::patch().to(SeqHandler(patch_settings)))
.route(web::delete().to(SeqHandler(reset_settings))),
);
}
async fn get_settings(
index_scheduler: GuardedData<
ActionPolicy<{ actions::CHATS_SETTINGS_GET }>,
Data<IndexScheduler>,
>,
chats_param: web::Path<ChatsParam>,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_chat_completions("using the /chats/settings route")?;
let ChatsParam { workspace_uid } = chats_param.into_inner();
let mut settings = match index_scheduler.chat_settings(&workspace_uid)? {
Some(settings) => settings,
None => {
return Err(ResponseError::from_msg(
format!("Chat `{workspace_uid}` not found"),
Code::ChatNotFound,
))
}
};
settings.hide_secrets();
Ok(HttpResponse::Ok().json(settings))
}
async fn patch_settings(
index_scheduler: GuardedData<
ActionPolicy<{ actions::CHATS_SETTINGS_UPDATE }>,
Data<IndexScheduler>,
>,
chats_param: web::Path<ChatsParam>,
web::Json(new): web::Json<ChatWorkspaceSettings>,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_chat_completions("using the /chats/settings route")?;
let ChatsParam { workspace_uid } = chats_param.into_inner();
let old_settings = index_scheduler.chat_settings(&workspace_uid)?.unwrap_or_default();
let prompts = match new.prompts {
Setting::Set(new_prompts) => DbChatCompletionPrompts {
system: match new_prompts.system {
Setting::Set(new_system) => new_system,
Setting::Reset => DEFAULT_CHAT_SYSTEM_PROMPT.to_string(),
Setting::NotSet => old_settings.prompts.system,
},
search_description: match new_prompts.search_description {
Setting::Set(new_description) => new_description,
Setting::Reset => DEFAULT_CHAT_SEARCH_DESCRIPTION_PROMPT.to_string(),
Setting::NotSet => old_settings.prompts.search_description,
},
search_q_param: match new_prompts.search_q_param {
Setting::Set(new_description) => new_description,
Setting::Reset => DEFAULT_CHAT_SEARCH_Q_PARAM_PROMPT.to_string(),
Setting::NotSet => old_settings.prompts.search_q_param,
},
search_index_uid_param: match new_prompts.search_index_uid_param {
Setting::Set(new_description) => new_description,
Setting::Reset => DEFAULT_CHAT_SEARCH_INDEX_UID_PARAM_PROMPT.to_string(),
Setting::NotSet => old_settings.prompts.search_index_uid_param,
},
},
Setting::Reset => DbChatCompletionPrompts::default(),
Setting::NotSet => old_settings.prompts,
};
let mut settings = ChatCompletionSettings {
source: match new.source {
Setting::Set(new_source) => new_source.into(),
Setting::Reset => DbChatCompletionSource::default(),
Setting::NotSet => old_settings.source,
},
org_id: match new.org_id {
Setting::Set(new_org_id) => Some(new_org_id),
Setting::Reset => None,
Setting::NotSet => old_settings.org_id,
},
project_id: match new.project_id {
Setting::Set(new_project_id) => Some(new_project_id),
Setting::Reset => None,
Setting::NotSet => old_settings.project_id,
},
api_version: match new.api_version {
Setting::Set(new_api_version) => Some(new_api_version),
Setting::Reset => None,
Setting::NotSet => old_settings.api_version,
},
deployment_id: match new.deployment_id {
Setting::Set(new_deployment_id) => Some(new_deployment_id),
Setting::Reset => None,
Setting::NotSet => old_settings.deployment_id,
},
base_url: match new.base_url {
Setting::Set(new_base_url) => Some(new_base_url),
Setting::Reset => None,
Setting::NotSet => old_settings.base_url,
},
api_key: match new.api_key {
Setting::Set(new_api_key) => Some(new_api_key),
Setting::Reset => None,
Setting::NotSet => old_settings.api_key,
},
prompts,
};
// TODO send analytics
// analytics.publish(
// PatchNetworkAnalytics {
// network_size: merged_remotes.len(),
// network_has_self: merged_self.is_some(),
// },
// &req,
// );
settings.validate()?;
index_scheduler.put_chat_settings(&workspace_uid, &settings)?;
settings.hide_secrets();
Ok(HttpResponse::Ok().json(settings))
}
async fn reset_settings(
index_scheduler: GuardedData<
ActionPolicy<{ actions::CHATS_SETTINGS_UPDATE }>,
Data<IndexScheduler>,
>,
chats_param: web::Path<ChatsParam>,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_chat_completions("using the /chats/settings route")?;
let ChatsParam { workspace_uid } = chats_param.into_inner();
if index_scheduler.chat_settings(&workspace_uid)?.is_some() {
let settings = Default::default();
index_scheduler.put_chat_settings(&workspace_uid, &settings)?;
Ok(HttpResponse::Ok().json(settings))
} else {
Err(ResponseError::from_msg(
format!("Chat `{workspace_uid}` not found"),
Code::ChatNotFound,
))
}
}
#[derive(Debug, Clone, Deserialize, Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct ChatWorkspaceSettings {
#[serde(default)]
#[deserr(default)]
#[schema(value_type = Option<ChatCompletionSource>)]
pub source: Setting<ChatCompletionSource>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionOrgId>)]
#[schema(value_type = Option<String>, example = json!("dcba4321..."))]
pub org_id: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionProjectId>)]
#[schema(value_type = Option<String>, example = json!("4321dcba..."))]
pub project_id: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionApiVersion>)]
#[schema(value_type = Option<String>, example = json!("2024-02-01"))]
pub api_version: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionDeploymentId>)]
#[schema(value_type = Option<String>, example = json!("1234abcd..."))]
pub deployment_id: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionBaseApi>)]
#[schema(value_type = Option<String>, example = json!("https://api.mistral.ai/v1"))]
pub base_url: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionApiKey>)]
#[schema(value_type = Option<String>, example = json!("abcd1234..."))]
pub api_key: Setting<String>,
#[serde(default)]
#[deserr(default)]
#[schema(inline, value_type = Option<ChatPrompts>)]
pub prompts: Setting<ChatPrompts>,
}
#[derive(Default, Debug, Clone, Copy, Serialize, Deserialize, Deserr, ToSchema)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub enum ChatCompletionSource {
#[default]
OpenAi,
Mistral,
Gemini,
AzureOpenAi,
VLlm,
}
impl From<ChatCompletionSource> for DbChatCompletionSource {
fn from(source: ChatCompletionSource) -> Self {
use ChatCompletionSource::*;
match source {
OpenAi => DbChatCompletionSource::OpenAi,
Mistral => DbChatCompletionSource::Mistral,
Gemini => DbChatCompletionSource::Gemini,
AzureOpenAi => DbChatCompletionSource::AzureOpenAi,
VLlm => DbChatCompletionSource::VLlm,
}
}
}
#[derive(Debug, Clone, Deserialize, Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct ChatPrompts {
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionSystemPrompt>)]
#[schema(value_type = Option<String>, example = json!("You are a helpful assistant..."))]
pub system: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionSearchDescriptionPrompt>)]
#[schema(value_type = Option<String>, example = json!("This is the search function..."))]
pub search_description: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionSearchQueryParamPrompt>)]
#[schema(value_type = Option<String>, example = json!("This is query parameter..."))]
pub search_q_param: Setting<String>,
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidChatCompletionSearchIndexUidParamPrompt>)]
#[schema(value_type = Option<String>, example = json!("This is index you want to search in..."))]
pub search_index_uid_param: Setting<String>,
}

View file

@ -0,0 +1,253 @@
use std::cell::RefCell;
use std::sync::RwLock;
use actix_web_lab::sse::{self, Event};
use async_openai::types::{
ChatChoiceStream, ChatCompletionMessageToolCall, ChatCompletionMessageToolCallChunk,
ChatCompletionRequestAssistantMessage, ChatCompletionRequestMessage,
ChatCompletionStreamResponseDelta, ChatCompletionToolType, CreateChatCompletionStreamResponse,
FunctionCall, FunctionCallStream, Role,
};
use bumpalo::Bump;
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::heed::RoTxn;
use meilisearch_types::milli::index::ChatConfig;
use meilisearch_types::milli::prompt::{Prompt, PromptData};
use meilisearch_types::milli::update::new::document::DocumentFromDb;
use meilisearch_types::milli::{
DocumentId, FieldIdMapWithMetadata, GlobalFieldsIdsMap, MetadataBuilder,
};
use meilisearch_types::{Document, Index};
use serde::Serialize;
use tokio::sync::mpsc::error::SendError;
use tokio::sync::mpsc::Sender;
use super::errors::StreamErrorEvent;
use super::MEILI_APPEND_CONVERSATION_MESSAGE_NAME;
use crate::routes::chats::{MEILI_SEARCH_PROGRESS_NAME, MEILI_SEARCH_SOURCES_NAME};
pub struct SseEventSender(Sender<Event>);
impl SseEventSender {
pub fn new(sender: Sender<Event>) -> Self {
Self(sender)
}
/// Ask the front-end user to append this tool *call* to the conversation
pub async fn append_tool_call_conversation_message(
&self,
resp: CreateChatCompletionStreamResponse,
call_id: String,
function_name: String,
function_arguments: String,
) -> Result<(), SendError<Event>> {
#[allow(deprecated)] // function_call
let message =
ChatCompletionRequestMessage::Assistant(ChatCompletionRequestAssistantMessage {
content: None,
refusal: None,
name: None,
audio: None,
tool_calls: Some(vec![ChatCompletionMessageToolCall {
id: call_id,
r#type: Some(ChatCompletionToolType::Function),
function: FunctionCall { name: function_name, arguments: function_arguments },
}]),
function_call: None,
});
self.append_conversation_message(resp, &message).await
}
/// Ask the front-end user to append this tool to the conversation
pub async fn append_conversation_message(
&self,
mut resp: CreateChatCompletionStreamResponse,
message: &ChatCompletionRequestMessage,
) -> Result<(), SendError<Event>> {
let call_text = serde_json::to_string(message).unwrap();
let tool_call = ChatCompletionMessageToolCallChunk {
index: 0,
id: Some(uuid::Uuid::new_v4().to_string()),
r#type: Some(ChatCompletionToolType::Function),
function: Some(FunctionCallStream {
name: Some(MEILI_APPEND_CONVERSATION_MESSAGE_NAME.to_string()),
arguments: Some(call_text),
}),
};
resp.choices[0] = ChatChoiceStream {
index: 0,
#[allow(deprecated)] // function_call
delta: ChatCompletionStreamResponseDelta {
content: None,
function_call: None,
tool_calls: Some(vec![tool_call]),
role: Some(Role::Assistant),
refusal: None,
},
finish_reason: None,
logprobs: None,
};
self.send_json(&resp).await
}
pub async fn report_search_progress(
&self,
mut resp: CreateChatCompletionStreamResponse,
call_id: &str,
function_name: &str,
function_arguments: &str,
) -> Result<(), SendError<Event>> {
#[derive(Debug, Clone, Serialize)]
/// Provides information about the current Meilisearch search operation.
struct MeiliSearchProgress<'a> {
/// The call ID to track the sources of the search.
call_id: &'a str,
/// The name of the function we are executing.
function_name: &'a str,
/// The arguments of the function we are executing, encoded in JSON.
function_arguments: &'a str,
}
let progress = MeiliSearchProgress { call_id, function_name, function_arguments };
let call_text = serde_json::to_string(&progress).unwrap();
let tool_call = ChatCompletionMessageToolCallChunk {
index: 0,
id: Some(uuid::Uuid::new_v4().to_string()),
r#type: Some(ChatCompletionToolType::Function),
function: Some(FunctionCallStream {
name: Some(MEILI_SEARCH_PROGRESS_NAME.to_string()),
arguments: Some(call_text),
}),
};
resp.choices[0] = ChatChoiceStream {
index: 0,
#[allow(deprecated)] // function_call
delta: ChatCompletionStreamResponseDelta {
content: None,
function_call: None,
tool_calls: Some(vec![tool_call]),
role: Some(Role::Assistant),
refusal: None,
},
finish_reason: None,
logprobs: None,
};
self.send_json(&resp).await
}
pub async fn report_sources(
&self,
mut resp: CreateChatCompletionStreamResponse,
call_id: &str,
documents: &[Document],
) -> Result<(), SendError<Event>> {
#[derive(Debug, Clone, Serialize)]
/// Provides sources of the search.
struct MeiliSearchSources<'a> {
/// The call ID to track the original search associated to those sources.
call_id: &'a str,
/// The documents associated with the search (call_id).
/// Only the displayed attributes of the documents are returned.
sources: &'a [Document],
}
let sources = MeiliSearchSources { call_id, sources: documents };
let call_text = serde_json::to_string(&sources).unwrap();
let tool_call = ChatCompletionMessageToolCallChunk {
index: 0,
id: Some(uuid::Uuid::new_v4().to_string()),
r#type: Some(ChatCompletionToolType::Function),
function: Some(FunctionCallStream {
name: Some(MEILI_SEARCH_SOURCES_NAME.to_string()),
arguments: Some(call_text),
}),
};
resp.choices[0] = ChatChoiceStream {
index: 0,
#[allow(deprecated)] // function_call
delta: ChatCompletionStreamResponseDelta {
content: None,
function_call: None,
tool_calls: Some(vec![tool_call]),
role: Some(Role::Assistant),
refusal: None,
},
finish_reason: None,
logprobs: None,
};
self.send_json(&resp).await
}
pub async fn forward_response(
&self,
resp: &CreateChatCompletionStreamResponse,
) -> Result<(), SendError<Event>> {
self.send_json(resp).await
}
pub async fn send_error(&self, error: &StreamErrorEvent) -> Result<(), SendError<Event>> {
self.send_json(error).await
}
pub async fn stop(self) -> Result<(), SendError<Event>> {
// It is the way OpenAI sends a correct end of stream
// <https://platform.openai.com/docs/api-reference/assistants-streaming/events>
const DONE_DATA: &str = "[DONE]";
self.0.send(Event::Data(sse::Data::new(DONE_DATA))).await
}
async fn send_json<S: Serialize>(&self, data: &S) -> Result<(), SendError<Event>> {
self.0.send(Event::Data(sse::Data::new_json(data).unwrap())).await
}
}
/// Format documents based on the provided template and maximum bytes.
///
/// This formatting function is usually used to generate a summary of the documents for LLMs.
pub fn format_documents<'doc>(
rtxn: &RoTxn<'_>,
index: &Index,
doc_alloc: &'doc Bump,
internal_docids: Vec<DocumentId>,
) -> Result<Vec<&'doc str>, ResponseError> {
let ChatConfig { prompt: PromptData { template, max_bytes }, .. } = index.chat_config(rtxn)?;
let prompt = Prompt::new(template, max_bytes).unwrap();
let fid_map = index.fields_ids_map(rtxn)?;
let metadata_builder = MetadataBuilder::from_index(index, rtxn)?;
let fid_map_with_meta = FieldIdMapWithMetadata::new(fid_map.clone(), metadata_builder);
let global = RwLock::new(fid_map_with_meta);
let gfid_map = RefCell::new(GlobalFieldsIdsMap::new(&global));
let external_ids: Vec<String> = index
.external_id_of(rtxn, internal_docids.iter().copied())?
.into_iter()
.collect::<Result<_, _>>()?;
let mut renders = Vec::new();
for (docid, external_docid) in internal_docids.into_iter().zip(external_ids) {
let document = match DocumentFromDb::new(docid, rtxn, index, &fid_map)? {
Some(doc) => doc,
None => unreachable!("Document with internal ID {docid} not found"),
};
let text = match prompt.render_document(&external_docid, document, &gfid_map, doc_alloc) {
Ok(text) => text,
Err(err) => {
return Err(ResponseError::from_msg(
err.to_string(),
Code::InvalidChatSettingDocumentTemplate,
))
}
};
renders.push(text);
}
Ok(renders)
}

View file

@ -53,6 +53,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
network: Some(false),
get_task_documents_route: Some(false),
composite_embedders: Some(false),
chat_completions: Some(false),
})),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
@ -97,6 +98,8 @@ pub struct RuntimeTogglableFeatures {
pub get_task_documents_route: Option<bool>,
#[deserr(default)]
pub composite_embedders: Option<bool>,
#[deserr(default)]
pub chat_completions: Option<bool>,
}
impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogglableFeatures {
@ -109,6 +112,7 @@ impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogg
network,
get_task_documents_route,
composite_embedders,
chat_completions,
} = value;
Self {
@ -119,6 +123,7 @@ impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogg
network: Some(network),
get_task_documents_route: Some(get_task_documents_route),
composite_embedders: Some(composite_embedders),
chat_completions: Some(chat_completions),
}
}
}
@ -132,6 +137,7 @@ pub struct PatchExperimentalFeatureAnalytics {
network: bool,
get_task_documents_route: bool,
composite_embedders: bool,
chat_completions: bool,
}
impl Aggregate for PatchExperimentalFeatureAnalytics {
@ -148,6 +154,7 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
network: new.network,
get_task_documents_route: new.get_task_documents_route,
composite_embedders: new.composite_embedders,
chat_completions: new.chat_completions,
})
}
@ -173,6 +180,7 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
network: Some(false),
get_task_documents_route: Some(false),
composite_embedders: Some(false),
chat_completions: Some(false),
})),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
@ -214,6 +222,7 @@ async fn patch_features(
.0
.composite_embedders
.unwrap_or(old_features.composite_embedders),
chat_completions: new_features.0.chat_completions.unwrap_or(old_features.chat_completions),
};
// explicitly destructure for analytics rather than using the `Serialize` implementation, because
@ -227,6 +236,7 @@ async fn patch_features(
network,
get_task_documents_route,
composite_embedders,
chat_completions,
} = new_features;
analytics.publish(
@ -238,6 +248,7 @@ async fn patch_features(
network,
get_task_documents_route,
composite_embedders,
chat_completions,
},
&req,
);

View file

@ -172,7 +172,7 @@ pub async fn list_indexes(
debug!(parameters = ?paginate, "List indexes");
let filters = index_scheduler.filters();
let (total, indexes) =
index_scheduler.get_paginated_indexes_stats(filters, *paginate.offset, *paginate.limit)?;
index_scheduler.paginated_indexes_stats(filters, *paginate.offset, *paginate.limit)?;
let indexes = indexes
.into_iter()
.map(|(name, stats)| IndexView {

View file

@ -5,8 +5,9 @@ use index_scheduler::IndexScheduler;
use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::milli::update::Setting;
use meilisearch_types::settings::{
settings, SecretPolicy, SettingEmbeddingSettings, Settings, Unchecked,
settings, ChatSettings, SecretPolicy, SettingEmbeddingSettings, Settings, Unchecked,
};
use meilisearch_types::tasks::KindWithContent;
use tracing::debug;
@ -508,6 +509,17 @@ make_setting_routes!(
camelcase_attr: "prefixSearch",
analytics: PrefixSearchAnalytics
},
{
route: "/chat",
update_verb: put,
value_type: ChatSettings,
err_type: meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsIndexChat,
>,
attr: chat,
camelcase_attr: "chat",
analytics: ChatAnalytics
},
);
#[utoipa::path(
@ -597,6 +609,7 @@ pub async fn update_all(
),
facet_search: FacetSearchAnalytics::new(new_settings.facet_search.as_ref().set()),
prefix_search: PrefixSearchAnalytics::new(new_settings.prefix_search.as_ref().set()),
chat: ChatAnalytics::new(new_settings.chat.as_ref().set()),
},
&req,
);
@ -651,7 +664,11 @@ pub async fn get_all(
let index = index_scheduler.index(&index_uid)?;
let rtxn = index.read_txn()?;
let new_settings = settings(&index, &rtxn, SecretPolicy::HideSecrets)?;
let mut new_settings = settings(&index, &rtxn, SecretPolicy::HideSecrets)?;
if index_scheduler.features().check_chat_completions("showing index `chat` settings").is_err() {
new_settings.chat = Setting::NotSet;
}
debug!(returns = ?new_settings, "Get all settings");
Ok(HttpResponse::Ok().json(new_settings))
}
@ -741,5 +758,9 @@ fn validate_settings(
}
}
if let Setting::Set(_chat) = &settings.chat {
features.check_chat_completions("setting `chat` in the index settings")?;
}
Ok(settings.validate()?)
}

View file

@ -10,8 +10,8 @@ use meilisearch_types::locales::{Locale, LocalizedAttributesRuleView};
use meilisearch_types::milli::update::Setting;
use meilisearch_types::milli::FilterableAttributesRule;
use meilisearch_types::settings::{
FacetingSettings, PaginationSettings, PrefixSearchSettings, ProximityPrecisionView,
RankingRuleView, SettingEmbeddingSettings, TypoSettings,
ChatSettings, FacetingSettings, PaginationSettings, PrefixSearchSettings,
ProximityPrecisionView, RankingRuleView, SettingEmbeddingSettings, TypoSettings,
};
use serde::Serialize;
@ -39,6 +39,7 @@ pub struct SettingsAnalytics {
pub non_separator_tokens: NonSeparatorTokensAnalytics,
pub facet_search: FacetSearchAnalytics,
pub prefix_search: PrefixSearchAnalytics,
pub chat: ChatAnalytics,
}
impl Aggregate for SettingsAnalytics {
@ -198,6 +199,7 @@ impl Aggregate for SettingsAnalytics {
set: new.prefix_search.set | self.prefix_search.set,
value: new.prefix_search.value.or(self.prefix_search.value),
},
chat: ChatAnalytics { set: new.chat.set | self.chat.set },
})
}
@ -454,7 +456,9 @@ pub struct PaginationAnalytics {
impl PaginationAnalytics {
pub fn new(setting: Option<&PaginationSettings>) -> Self {
Self { max_total_hits: setting.as_ref().and_then(|s| s.max_total_hits.set()) }
Self {
max_total_hits: setting.as_ref().and_then(|s| s.max_total_hits.set().map(|x| x.into())),
}
}
pub fn into_settings(self) -> SettingsAnalytics {
@ -674,3 +678,18 @@ impl PrefixSearchAnalytics {
SettingsAnalytics { prefix_search: self, ..Default::default() }
}
}
#[derive(Serialize, Default)]
pub struct ChatAnalytics {
pub set: bool,
}
impl ChatAnalytics {
pub fn new(settings: Option<&ChatSettings>) -> Self {
Self { set: settings.is_some() }
}
pub fn into_settings(self) -> SettingsAnalytics {
SettingsAnalytics { chat: self, ..Default::default() }
}
}

View file

@ -52,6 +52,7 @@ const PAGINATION_DEFAULT_LIMIT_FN: fn() -> usize = || 20;
mod api_key;
pub mod batches;
pub mod chats;
mod dump;
pub mod features;
pub mod indexes;
@ -113,7 +114,8 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(web::scope("/swap-indexes").configure(swap_indexes::configure))
.service(web::scope("/metrics").configure(metrics::configure))
.service(web::scope("/experimental-features").configure(features::configure))
.service(web::scope("/network").configure(network::configure));
.service(web::scope("/network").configure(network::configure))
.service(web::scope("/chats").configure(chats::configure));
#[cfg(feature = "swagger")]
{

View file

@ -17,6 +17,7 @@ use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::heed::RoTxn;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::locales::Locale;
use meilisearch_types::milli::index::{self, SearchParameters};
use meilisearch_types::milli::score_details::{ScoreDetails, ScoringStrategy};
use meilisearch_types::milli::vector::parsed_vectors::ExplicitVectors;
use meilisearch_types::milli::vector::Embedder;
@ -122,9 +123,58 @@ pub struct SearchQuery {
pub locales: Option<Vec<Locale>>,
}
impl From<SearchParameters> for SearchQuery {
fn from(parameters: SearchParameters) -> Self {
let SearchParameters {
hybrid,
limit,
sort,
distinct,
matching_strategy,
attributes_to_search_on,
ranking_score_threshold,
} = parameters;
SearchQuery {
hybrid: hybrid.map(|index::HybridQuery { semantic_ratio, embedder }| HybridQuery {
semantic_ratio: SemanticRatio::try_from(semantic_ratio)
.ok()
.unwrap_or_else(DEFAULT_SEMANTIC_RATIO),
embedder,
}),
limit: limit.unwrap_or_else(DEFAULT_SEARCH_LIMIT),
sort,
distinct,
matching_strategy: matching_strategy.map(MatchingStrategy::from).unwrap_or_default(),
attributes_to_search_on,
ranking_score_threshold: ranking_score_threshold.map(RankingScoreThreshold::from),
q: None,
vector: None,
offset: DEFAULT_SEARCH_OFFSET(),
page: None,
hits_per_page: None,
attributes_to_retrieve: None,
retrieve_vectors: false,
attributes_to_crop: None,
crop_length: DEFAULT_CROP_LENGTH(),
attributes_to_highlight: None,
show_matches_position: false,
show_ranking_score: false,
show_ranking_score_details: false,
filter: None,
facets: None,
highlight_pre_tag: DEFAULT_HIGHLIGHT_PRE_TAG(),
highlight_post_tag: DEFAULT_HIGHLIGHT_POST_TAG(),
crop_marker: DEFAULT_CROP_MARKER(),
locales: None,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Deserr, ToSchema, Serialize)]
#[deserr(try_from(f64) = TryFrom::try_from -> InvalidSearchRankingScoreThreshold)]
pub struct RankingScoreThreshold(f64);
impl std::convert::TryFrom<f64> for RankingScoreThreshold {
type Error = InvalidSearchRankingScoreThreshold;
@ -139,6 +189,14 @@ impl std::convert::TryFrom<f64> for RankingScoreThreshold {
}
}
impl From<index::RankingScoreThreshold> for RankingScoreThreshold {
fn from(threshold: index::RankingScoreThreshold) -> Self {
let threshold = threshold.as_f64();
assert!((0.0..=1.0).contains(&threshold));
RankingScoreThreshold(threshold)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Deserr)]
#[deserr(try_from(f64) = TryFrom::try_from -> InvalidSimilarRankingScoreThreshold)]
pub struct RankingScoreThresholdSimilar(f64);
@ -282,8 +340,8 @@ impl fmt::Debug for SearchQuery {
#[deserr(error = DeserrJsonError<InvalidSearchHybridQuery>, rename_all = camelCase, deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
pub struct HybridQuery {
#[deserr(default, error = DeserrJsonError<InvalidSearchSemanticRatio>, default)]
#[schema(value_type = f32, default)]
#[deserr(default, error = DeserrJsonError<InvalidSearchSemanticRatio>)]
#[schema(default, value_type = f32)]
#[serde(default)]
pub semantic_ratio: SemanticRatio,
#[deserr(error = DeserrJsonError<InvalidSearchEmbedder>)]
@ -720,6 +778,16 @@ impl From<MatchingStrategy> for TermsMatchingStrategy {
}
}
impl From<index::MatchingStrategy> for MatchingStrategy {
fn from(other: index::MatchingStrategy) -> Self {
match other {
index::MatchingStrategy::Last => Self::Last,
index::MatchingStrategy::All => Self::All,
index::MatchingStrategy::Frequency => Self::Frequency,
}
}
}
#[derive(Debug, Default, Clone, PartialEq, Eq, Deserr)]
#[deserr(rename_all = camelCase)]
pub enum FacetValuesSort {
@ -883,7 +951,7 @@ pub fn add_search_rules(filter: &mut Option<Value>, rules: IndexSearchRules) {
}
}
fn prepare_search<'t>(
pub fn prepare_search<'t>(
index: &'t Index,
rtxn: &'t RoTxn,
query: &'t SearchQuery,
@ -1266,7 +1334,7 @@ struct HitMaker<'a> {
vectors_fid: Option<FieldId>,
retrieve_vectors: RetrieveVectors,
to_retrieve_ids: BTreeSet<FieldId>,
embedding_configs: Vec<milli::index::IndexEmbeddingConfig>,
embedding_configs: Vec<index::IndexEmbeddingConfig>,
matcher_builder: MatcherBuilder<'a>,
formatted_options: BTreeMap<FieldId, FormatOptions>,
show_ranking_score: bool,

View file

@ -1,5 +1,5 @@
//! This file implements a queue of searches to process and the ability to control how many searches can be run in parallel.
//! We need this because we don't want to process more search requests than we have cores.
//! We need this because we don't want to process more search requests than the available CPU cores.
//! That slows down everything and consumes RAM for no reason.
//! The steps to do a search are to get the `SearchQueue` data structure and try to get a search permit.
//! This can fail if the queue is full, and we need to drop your search request to register a new one.
@ -8,7 +8,7 @@
//!
//! In order to do a search request you should try to get a search permit.
//! Retrieve the `SearchQueue` structure from actix-web (`search_queue: Data<SearchQueue>`)
//! and right before processing the search, calls the `SearchQueue::try_get_search_permit` method: `search_queue.try_get_search_permit().await?;`
//! and right before processing the search, call the `SearchQueue::try_get_search_permit` method: `search_queue.try_get_search_permit().await?;`
//!
//! What is going to happen at this point is that you're going to send a oneshot::Sender over an async mpsc channel.
//! Then, the queue/scheduler is going to either:
@ -121,12 +121,12 @@ impl SearchQueue {
let mut queue: Vec<oneshot::Sender<Permit>> = Default::default();
let mut rng: StdRng = StdRng::from_entropy();
let mut searches_running: usize = 0;
// By having a capacity of parallelism we ensures that every time a search finish it can release its RAM asap
// By having a capacity of parallelism we ensure that every time a search finish it can release its RAM asap
let (sender, mut search_finished) = mpsc::channel(parallelism.into());
loop {
tokio::select! {
// biased select because we wants to free up space before trying to register new tasks
// biased select because we want to free up space before trying to register new tasks
biased;
_ = search_finished.recv() => {
searches_running = searches_running.saturating_sub(1);
@ -148,11 +148,11 @@ impl SearchQueue {
if searches_running < usize::from(parallelism) && queue.is_empty() {
searches_running += 1;
// if the search requests die it's not a hard error on our side
// if the search requests die, it's not a hard error on our side
let _ = search_request.send(Permit { sender: sender.clone() });
continue;
} else if capacity == 0 {
// in the very specific case where we have a capacity of zero
// in the very specific case where we have a capacity of zero,
// we must refuse the request straight away without going through
// the queue stuff.
drop(search_request);
@ -183,7 +183,7 @@ impl SearchQueue {
.map_err(|_| MeilisearchHttpError::TooManySearchRequests(self.capacity))?;
// If we've been for more than one minute to get a search permit, it's better to simply
// abort the search request than spending time processing something were the client
// abort the search request than spending time processing something where the client
// most certainly exited or got a timeout a long time ago.
// We may find a better solution in https://github.com/actix/actix-web/issues/3462.
if now.elapsed() > self.time_to_abort {

View file

@ -421,7 +421,7 @@ async fn error_add_api_key_invalid_parameters_actions() {
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
{
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`",
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`",
"code": "invalid_api_key_actions",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
@ -538,7 +538,7 @@ async fn error_add_api_key_parameters_uid_already_exist() {
let (response, code) = server.add_api_key(content).await;
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
{
"message": "`uid` field value `4bc0887a-0e41-4f3b-935d-0c451dcee9c8` is already an existing API key.",
"message": "`uid` field value `[uuid]` is already an existing API key.",
"code": "api_key_already_exists",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#api_key_already_exists"
@ -849,11 +849,27 @@ async fn list_api_keys() {
"expiresAt": null,
"createdAt": "[ignored]",
"updatedAt": "[ignored]"
},
{
"name": "Default Chat API Key",
"description": "Use it to chat and search from the frontend",
"key": "[ignored]",
"uid": "[ignored]",
"actions": [
"chatCompletions",
"search"
],
"indexes": [
"*"
],
"expiresAt": null,
"createdAt": "[ignored]",
"updatedAt": "[ignored]"
}
],
"offset": 0,
"limit": 20,
"total": 3
"total": 4
}
"###);
meili_snap::snapshot!(code, @"200 OK");

View file

@ -93,7 +93,7 @@ async fn create_api_key_bad_actions() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`",
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`",
"code": "invalid_api_key_actions",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"

View file

@ -310,7 +310,7 @@ async fn test_summarized_document_addition_or_update() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
@ -353,7 +353,7 @@ async fn test_summarized_document_addition_or_update() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
}
@ -398,7 +398,7 @@ async fn test_summarized_delete_documents_by_batch() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
@ -440,7 +440,7 @@ async fn test_summarized_delete_documents_by_batch() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
}
@ -488,7 +488,7 @@ async fn test_summarized_delete_documents_by_filter() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
@ -532,7 +532,7 @@ async fn test_summarized_delete_documents_by_filter() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
@ -576,7 +576,7 @@ async fn test_summarized_delete_documents_by_filter() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
}
@ -622,7 +622,7 @@ async fn test_summarized_delete_document_by_id() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
@ -664,7 +664,7 @@ async fn test_summarized_delete_document_by_id() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
}
@ -731,7 +731,7 @@ async fn test_summarized_settings_update() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
}
"###);
}
@ -773,7 +773,7 @@ async fn test_summarized_index_creation() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "task with id 0 of type `indexCreation` cannot be batched"
"batchStrategy": "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task."
}
"###);
@ -812,7 +812,7 @@ async fn test_summarized_index_creation() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "task with id 1 of type `indexCreation` cannot be batched"
"batchStrategy": "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task."
}
"###);
}
@ -964,7 +964,7 @@ async fn test_summarized_index_update() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "task with id 0 of type `indexUpdate` cannot be batched"
"batchStrategy": "created batch containing only task with id 0 of type `indexUpdate` that cannot be batched with any other task."
}
"###);
@ -1003,7 +1003,7 @@ async fn test_summarized_index_update() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "task with id 1 of type `indexUpdate` cannot be batched"
"batchStrategy": "created batch containing only task with id 1 of type `indexUpdate` that cannot be batched with any other task."
}
"###);
@ -1043,7 +1043,7 @@ async fn test_summarized_index_update() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "task with id 3 of type `indexUpdate` cannot be batched"
"batchStrategy": "created batch containing only task with id 3 of type `indexUpdate` that cannot be batched with any other task."
}
"###);
@ -1082,7 +1082,7 @@ async fn test_summarized_index_update() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "task with id 4 of type `indexUpdate` cannot be batched"
"batchStrategy": "created batch containing only task with id 4 of type `indexUpdate` that cannot be batched with any other task."
}
"###);
}
@ -1134,7 +1134,7 @@ async fn test_summarized_index_swap() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "task with id 0 of type `indexSwap` cannot be batched"
"batchStrategy": "created batch containing only task with id 0 of type `indexSwap` that cannot be batched with any other task."
}
"###);
@ -1177,7 +1177,7 @@ async fn test_summarized_index_swap() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "task with id 1 of type `indexCreation` cannot be batched"
"batchStrategy": "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task."
}
"###);
}
@ -1224,7 +1224,7 @@ async fn test_summarized_batch_cancelation() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "task with id 1 of type `taskCancelation` cannot be batched"
"batchStrategy": "created batch containing only task with id 1 of type `taskCancelation` that cannot be batched with any other task."
}
"###);
}
@ -1271,7 +1271,7 @@ async fn test_summarized_batch_deletion() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "a batch of tasks of type `taskDeletion` cannot be batched with any other type of task"
"batchStrategy": "stopped after the last task of type `taskDeletion` because they cannot be batched with tasks of any other type."
}
"###);
}
@ -1313,7 +1313,7 @@ async fn test_summarized_dump_creation() {
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "task with id 0 of type `dumpCreation` cannot be batched"
"batchStrategy": "created batch containing only task with id 0 of type `dumpCreation` that cannot be batched with any other task."
}
"###);
}

View file

@ -29,6 +29,10 @@ impl<'a> Index<'a, Owned> {
}
}
pub fn with_encoder(&self, encoder: Encoder) -> Index<'a, Owned> {
Index { uid: self.uid.clone(), service: self.service, encoder, marker: PhantomData }
}
pub async fn load_test_set(&self) -> u64 {
let url = format!("/indexes/{}/documents", urlencode(self.uid.as_ref()));
let (response, code) = self
@ -290,6 +294,20 @@ impl Index<'_, Shared> {
}
(task, code)
}
pub async fn update_index_fail(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
let (mut task, code) = self._update(primary_key).await;
if code.is_success() {
task = self.wait_task(task.uid()).await;
if task.is_success() {
panic!(
"`update_index_fail` succeeded: {}",
serde_json::to_string_pretty(&task).unwrap()
);
}
}
(task, code)
}
}
#[allow(dead_code)]
@ -333,6 +351,14 @@ impl<State> Index<'_, State> {
self.service.post_encoded("/indexes", body, self.encoder).await
}
pub(super) async fn _update(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
let body = json!({
"primaryKey": primary_key,
});
let url = format!("/indexes/{}", urlencode(self.uid.as_ref()));
self.service.patch_encoded(url, body, self.encoder).await
}
pub(super) async fn _delete(&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}", urlencode(self.uid.as_ref()));
self.service.delete(url).await

View file

@ -128,7 +128,8 @@ impl Display for Value {
".finishedAt" => "[date]",
".duration" => "[duration]",
".processingTimeMs" => "[duration]",
".details.embedders.*.url" => "[url]"
".details.embedders.*.url" => "[url]",
".details.dumpUid" => "[dump_uid]",
})
)
}
@ -264,6 +265,24 @@ pub static SCORE_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
])
});
pub async fn shared_index_with_score_documents() -> &'static Index<'static, Shared> {
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
INDEX.get_or_init(|| async {
let server = Server::new_shared();
let index = server._index("SHARED_SCORE_DOCUMENTS").to_shared();
let documents = SCORE_DOCUMENTS.clone();
let (response, _code) = index._add_documents(documents, None).await;
index.wait_task(response.uid()).await.succeeded();
let (response, _code) = index
._update_settings(
json!({"filterableAttributes": ["id", "title"], "sortableAttributes": ["id", "title"]}),
)
.await;
index.wait_task(response.uid()).await.succeeded();
index
}).await
}
pub static NESTED_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
json!([
{
@ -333,7 +352,7 @@ pub async fn shared_index_with_nested_documents() -> &'static Index<'static, Sha
index.wait_task(response.uid()).await.succeeded();
let (response, _code) = index
._update_settings(
json!({"filterableAttributes": ["father", "doggos"], "sortableAttributes": ["doggos"]}),
json!({"filterableAttributes": ["father", "doggos", "cattos"], "sortableAttributes": ["doggos"]}),
)
.await;
index.wait_task(response.uid()).await.succeeded();
@ -435,3 +454,57 @@ pub async fn shared_index_with_test_set() -> &'static Index<'static, Shared> {
})
.await
}
pub static GEO_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
json!([
{
"id": 1,
"name": "Taco Truck",
"address": "444 Salsa Street, Burritoville",
"type": "Mexican",
"rating": 9,
"_geo": {
"lat": 34.0522,
"lng": -118.2437
}
},
{
"id": 2,
"name": "La Bella Italia",
"address": "456 Elm Street, Townsville",
"type": "Italian",
"rating": 9,
"_geo": {
"lat": "45.4777599",
"lng": "9.1967508"
}
},
{
"id": 3,
"name": "Crêpe Truck",
"address": "2 Billig Avenue, Rouenville",
"type": "French",
"rating": 10
}
])
});
pub async fn shared_index_with_geo_documents() -> &'static Index<'static, Shared> {
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
INDEX
.get_or_init(|| async {
let server = Server::new_shared();
let index = server._index("SHARED_GEO_DOCUMENTS").to_shared();
let (response, _code) = index._add_documents(GEO_DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await.succeeded();
let (response, _code) = index
._update_settings(
json!({"filterableAttributes": ["_geo"], "sortableAttributes": ["_geo"]}),
)
.await;
index.wait_task(response.uid()).await.succeeded();
index
})
.await
}

View file

@ -347,6 +347,16 @@ impl<State> Server<State> {
}
}
pub fn unique_index_with_prefix(&self, prefix: &str) -> Index<'_> {
let uuid = Uuid::new_v4();
Index {
uid: format!("{prefix}-{}", uuid),
service: &self.service,
encoder: Encoder::Plain,
marker: PhantomData,
}
}
pub fn unique_index_with_encoder(&self, encoder: Encoder) -> Index<'_> {
let uuid = Uuid::new_v4();
Index { uid: uuid.to_string(), service: &self.service, encoder, marker: PhantomData }
@ -399,18 +409,9 @@ impl<State> Server<State> {
pub async fn wait_task(&self, update_id: u64) -> Value {
// try several times to get status, or panic to not wait forever
let url = format!("/tasks/{}", update_id);
// Increase timeout for vector-related tests
let max_attempts = if url.contains("/tasks/") {
if update_id > 1000 {
400 // 200 seconds for vector tests
} else {
100 // 50 seconds for other tests
}
} else {
100 // 50 seconds for other tests
};
let max_attempts = 400; // 200 seconds total, 0.5s per attempt
for _ in 0..max_attempts {
for i in 0..max_attempts {
let (response, status_code) = self.service.get(&url).await;
assert_eq!(200, status_code, "response: {}", response);
@ -420,6 +421,10 @@ impl<State> Server<State> {
// wait 0.5 second.
sleep(Duration::from_millis(500)).await;
if i == max_attempts - 1 {
dbg!(response);
}
}
panic!("Timeout waiting for update id");
}

File diff suppressed because it is too large Load diff

View file

@ -1,39 +1,35 @@
use meili_snap::{json_string, snapshot};
use crate::common::{GetAllDocumentsOptions, Server};
use crate::common::{shared_does_not_exists_index, GetAllDocumentsOptions, Server};
use crate::json;
#[actix_rt::test]
async fn delete_one_document_unexisting_index() {
let server = Server::new().await;
let index = server.index("test");
let (task, code) = index.delete_document(0).await;
let index = shared_does_not_exists_index().await;
let (task, code) = index.delete_document_by_filter_fail(json!({"filter": "a = b"})).await;
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
index.wait_task(task.uid()).await.failed();
}
#[actix_rt::test]
async fn delete_one_unexisting_document() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
index.create(None).await;
let (response, code) = index.delete_document(0).await;
assert_eq!(code, 202, "{}", response);
let update = index.wait_task(response.uid()).await;
assert_eq!(update["status"], "succeeded");
assert_eq!(code, 202, "{response}");
index.wait_task(response.uid()).await.succeeded();
}
#[actix_rt::test]
async fn delete_one_document() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) =
index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, status_code) = server.index("test").delete_document(0).await;
let (task, status_code) = index.delete_document(0).await;
assert_eq!(status_code, 202);
index.wait_task(task.uid()).await.succeeded();
@ -43,20 +39,18 @@ async fn delete_one_document() {
#[actix_rt::test]
async fn clear_all_documents_unexisting_index() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
index.wait_task(task.uid()).await.failed();
}
#[actix_rt::test]
async fn clear_all_documents() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) = index
.add_documents(
json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }]),
@ -67,7 +61,7 @@ async fn clear_all_documents() {
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await;
let _update = index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
@ -75,14 +69,14 @@ async fn clear_all_documents() {
#[actix_rt::test]
async fn clear_all_documents_empty_index() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await;
let _update = index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
@ -90,33 +84,31 @@ async fn clear_all_documents_empty_index() {
#[actix_rt::test]
async fn error_delete_batch_unexisting_index() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, code) = index.delete_batch(vec![]).await;
let expected_response = json!({
"message": "Index `test` not found.",
"message": format!("Index `{}` not found.", index.uid),
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
let response = index.wait_task(task.uid()).await.failed();
assert_eq!(response["error"], expected_response);
}
#[actix_rt::test]
async fn delete_batch() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
let (task, code) = index.delete_batch(vec![1, 0]).await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await;
let _update = index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
@ -125,14 +117,14 @@ async fn delete_batch() {
#[actix_rt::test]
async fn delete_no_document_batch() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
let (_response, code) = index.delete_batch(vec![]).await;
assert_eq!(code, 202, "{}", _response);
let (response, code) = index.delete_batch(vec![]).await;
assert_eq!(code, 202, "{response}");
let _update = index.wait_task(_response.uid()).await;
let _update = index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 3);
@ -140,8 +132,8 @@ async fn delete_no_document_batch() {
#[actix_rt::test]
async fn delete_document_by_filter() {
let server = Server::new().await;
let index = server.index("doggo");
let server = Server::new_shared();
let index = server.unique_index();
index.update_settings_filterable_attributes(json!(["color"])).await;
let (task, _status_code) = index
.add_documents(
@ -178,22 +170,22 @@ async fn delete_document_by_filter() {
let (response, code) =
index.delete_document_by_filter(json!({ "filter": "color = blue"})).await;
snapshot!(code, @"202 Accepted");
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###"
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
{
"taskUid": 2,
"indexUid": "doggo",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "documentDeletion",
"enqueuedAt": "[date]"
}
"###);
let response = index.wait_task(response.uid()).await;
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
let response = index.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": 2,
"batchUid": 2,
"indexUid": "doggo",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,
@ -251,22 +243,22 @@ async fn delete_document_by_filter() {
let (response, code) =
index.delete_document_by_filter(json!({ "filter": "color NOT EXISTS"})).await;
snapshot!(code, @"202 Accepted");
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"taskUid": 3,
"indexUid": "doggo",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "documentDeletion",
"enqueuedAt": "[date]"
}
"###);
let response = index.wait_task(response.uid()).await;
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
let response = index.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": 3,
"batchUid": 3,
"indexUid": "doggo",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,
@ -321,8 +313,8 @@ async fn delete_document_by_filter() {
#[actix_rt::test]
async fn delete_document_by_complex_filter() {
let server = Server::new().await;
let index = server.index("doggo");
let server = Server::new_shared();
let index = server.unique_index();
index.update_settings_filterable_attributes(json!(["color"])).await;
let (task, _status_code) = index
.add_documents(
@ -343,22 +335,22 @@ async fn delete_document_by_complex_filter() {
)
.await;
snapshot!(code, @"202 Accepted");
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###"
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
{
"taskUid": 2,
"indexUid": "doggo",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "documentDeletion",
"enqueuedAt": "[date]"
}
"###);
let response = index.wait_task(response.uid()).await;
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
let response = index.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": 2,
"batchUid": 2,
"indexUid": "doggo",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,
@ -402,22 +394,22 @@ async fn delete_document_by_complex_filter() {
.delete_document_by_filter(json!({ "filter": [["color = green", "color NOT EXISTS"]] }))
.await;
snapshot!(code, @"202 Accepted");
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"taskUid": 3,
"indexUid": "doggo",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "documentDeletion",
"enqueuedAt": "[date]"
}
"###);
let response = index.wait_task(response.uid()).await;
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
let response = index.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": 3,
"batchUid": 3,
"indexUid": "doggo",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,

View file

@ -621,7 +621,7 @@ async fn delete_document_by_filter() {
let (response, code) =
index.delete_document_by_filter_fail(json!({ "filter": "catto = jorts"})).await;
snapshot!(code, @"202 Accepted");
let response = server.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await.failed();
snapshot!(response, @r###"
{
"uid": "[uid]",
@ -665,7 +665,7 @@ async fn fetch_document_by_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.fetch_documents(json!(null)).await;
snapshot!(code, @"400 Bad Request");

View file

@ -832,8 +832,8 @@ async fn get_document_by_ids_and_filter() {
#[actix_rt::test]
async fn get_document_with_vectors() {
let server = Server::new().await;
let index = server.index("doggo");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({

View file

@ -6,19 +6,18 @@ use crate::json;
#[actix_rt::test]
async fn error_document_update_create_index_bad_uid() {
let server = Server::new().await;
let index = server.index("883 fj!");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("883 fj!");
let (response, code) = index.update_documents(json!([{"id": 1}]), None).await;
let expected_response = json!({
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
});
assert_eq!(code, 400);
assert_eq!(response, expected_response);
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "`883 fj!-[uuid]` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
}"###);
}
#[actix_rt::test]

View file

@ -2187,7 +2187,8 @@ async fn import_dump_v6_containing_experimental_features() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false
}
"###);
@ -2312,7 +2313,8 @@ async fn import_dump_v6_containing_batches_and_enqueued_tasks() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false
}
"###);
@ -2417,7 +2419,8 @@ async fn generate_and_import_dump_containing_vectors() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false
}
"###);

View file

@ -27,7 +27,7 @@ source: crates/meilisearch/tests/dumps/mod.rs
"duration": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "batched all enqueued tasks"
"batchStrategy": "batched all enqueued tasks"
},
{
"uid": 1,
@ -51,7 +51,7 @@ source: crates/meilisearch/tests/dumps/mod.rs
"duration": "PT0.144827890S",
"startedAt": "2025-02-04T10:15:21.275640274Z",
"finishedAt": "2025-02-04T10:15:21.420468164Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 0,
@ -72,7 +72,7 @@ source: crates/meilisearch/tests/dumps/mod.rs
"duration": "PT0.032902186S",
"startedAt": "2025-02-04T10:14:43.559526162Z",
"finishedAt": "2025-02-04T10:14:43.592428348Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 3,

View file

@ -24,7 +24,8 @@ async fn experimental_features() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false
}
"###);
@ -39,7 +40,8 @@ async fn experimental_features() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false
}
"###);
@ -54,7 +56,8 @@ async fn experimental_features() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false
}
"###);
@ -70,7 +73,8 @@ async fn experimental_features() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false
}
"###);
@ -86,7 +90,8 @@ async fn experimental_features() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false
}
"###);
}
@ -109,7 +114,8 @@ async fn experimental_feature_metrics() {
"containsFilter": false,
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false
"compositeEmbedders": false,
"chatCompletions": false
}
"###);
@ -156,7 +162,7 @@ async fn errors() {
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{
"message": "Unknown field `NotAFeature`: expected one of `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`, `network`, `getTaskDocumentsRoute`, `compositeEmbedders`",
"message": "Unknown field `NotAFeature`: expected one of `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`, `network`, `getTaskDocumentsRoute`, `compositeEmbedders`, `chatCompletions`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"

View file

@ -46,8 +46,10 @@ async fn create_index_with_gzip_encoded_request_and_receiving_brotli_encoded_res
let server = Server::new_shared();
let app = server.init_web_app().await;
let index = server.unique_index_with_prefix("test");
let body = serde_json::to_string(&json!({
"uid": "test",
"uid": index.uid.clone(),
"primaryKey": None::<&str>,
}))
.unwrap();
@ -68,7 +70,7 @@ async fn create_index_with_gzip_encoded_request_and_receiving_brotli_encoded_res
let parsed_response =
serde_json::from_slice::<Value>(decoded.into().as_ref()).expect("Expecting valid json");
assert_eq!(parsed_response["indexUid"], "test");
assert_eq!(parsed_response["indexUid"], index.uid);
}
#[actix_rt::test]

View file

@ -1,8 +1,8 @@
use crate::json;
use meili_snap::{json_string, snapshot};
use serde_json::Value;
use crate::common::{shared_does_not_exists_index, Server};
use crate::json;
#[actix_rt::test]
async fn create_and_get_index() {

View file

@ -2,28 +2,26 @@ use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
use crate::common::encoder::Encoder;
use crate::common::Server;
use crate::common::{shared_does_not_exists_index, shared_index_with_documents, Server};
use crate::json;
#[actix_rt::test]
async fn update_primary_key() {
let server = Server::new().await;
let index = server.index("test");
let (_, code) = index.create(None).await;
let server = Server::new_shared();
let index = server.unique_index();
let (task, code) = index.create(None).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = index.update(Some("primary")).await;
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get().await;
assert_eq!(code, 200);
assert_eq!(response["uid"], "test");
assert_eq!(response["uid"], index.uid);
assert!(response.get("createdAt").is_some());
assert!(response.get("updatedAt").is_some());
@ -39,24 +37,23 @@ async fn update_primary_key() {
#[actix_rt::test]
async fn create_and_update_with_different_encoding() {
let server = Server::new().await;
let index = server.index_with_encoder("test", Encoder::Gzip);
let (_, code) = index.create(None).await;
let server = Server::new_shared();
let index = server.unique_index_with_encoder(Encoder::Gzip);
let (create_task, code) = index.create(None).await;
assert_eq!(code, 202);
index.wait_task(create_task.uid()).await.succeeded();
let index = server.index_with_encoder("test", Encoder::Brotli);
let index = index.with_encoder(Encoder::Brotli);
let (task, _status_code) = index.update(Some("primary")).await;
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
index.wait_task(task.uid()).await.succeeded();
}
#[actix_rt::test]
async fn update_nothing() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task1, code) = index.create(None).await;
assert_eq!(code, 202);
@ -67,35 +64,20 @@ async fn update_nothing() {
assert_eq!(code, 202);
let response = index.wait_task(task2.uid()).await;
assert_eq!(response["status"], "succeeded");
index.wait_task(task2.uid()).await.succeeded();
}
#[actix_rt::test]
async fn error_update_existing_primary_key() {
let server = Server::new().await;
let index = server.index("test");
let (_response, code) = index.create(Some("id")).await;
let index = shared_index_with_documents().await;
let (update_task, code) = index.update_index_fail(Some("primary")).await;
assert_eq!(code, 202);
let documents = json!([
{
"id": "11",
"content": "foobar"
}
]);
index.add_documents(documents, None).await;
let (task, code) = index.update(Some("primary")).await;
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await;
let response = index.wait_task(update_task.uid()).await.failed();
let expected_response = json!({
"message": "Index `test`: Index already has a primary key: `id`.",
"message": format!("Index `{}`: Index already has a primary key: `id`.", index.uid),
"code": "index_primary_key_already_exists",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_primary_key_already_exists"
@ -106,15 +88,15 @@ async fn error_update_existing_primary_key() {
#[actix_rt::test]
async fn error_update_unexisting_index() {
let server = Server::new().await;
let (task, code) = server.index("test").update(None).await;
let index = shared_does_not_exists_index().await;
let (task, code) = index.update_index_fail(Some("my-primary-key")).await;
assert_eq!(code, 202);
let response = server.index("test").wait_task(task.uid()).await;
let response = index.wait_task(task.uid()).await.failed();
let expected_response = json!({
"message": "Index `test` not found.",
"message": format!("Index `{}` not found.", index.uid),
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"

View file

@ -146,8 +146,8 @@ static DOCUMENT_DISTINCT_KEY: &str = "product_id";
/// testing: https://github.com/meilisearch/meilisearch/issues/4078
#[actix_rt::test]
async fn distinct_search_with_offset_no_ranking() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let documents = DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
@ -163,50 +163,50 @@ async fn distinct_search_with_offset_no_ranking() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"2");
snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#);
snapshot!(format!("{hits:?}"), @r#"["123456", "789012"]"#);
snapshot!(response["estimatedTotalHits"] , @"11");
let (response, code) = index.search_post(json!({"offset": 2, "limit": 2})).await;
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"2");
snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#);
snapshot!(format!("{hits:?}"), @r#"["456789", "987654"]"#);
snapshot!(response["estimatedTotalHits"], @"10");
let (response, code) = index.search_post(json!({"offset": 4, "limit": 2})).await;
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"2");
snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#);
snapshot!(format!("{hits:?}"), @r#"["234567", "345678"]"#);
snapshot!(response["estimatedTotalHits"], @"6");
let (response, code) = index.search_post(json!({"offset": 5, "limit": 2})).await;
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"1");
snapshot!(format!("{:?}", hits), @r#"["345678"]"#);
snapshot!(format!("{hits:?}"), @r#"["345678"]"#);
snapshot!(response["estimatedTotalHits"], @"6");
let (response, code) = index.search_post(json!({"offset": 6, "limit": 2})).await;
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"0");
snapshot!(format!("{:?}", hits), @r#"[]"#);
snapshot!(format!("{hits:?}"), @r#"[]"#);
snapshot!(response["estimatedTotalHits"], @"6");
let (response, code) = index.search_post(json!({"offset": 7, "limit": 2})).await;
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"0");
snapshot!(format!("{:?}", hits), @r#"[]"#);
snapshot!(format!("{hits:?}"), @r#"[]"#);
snapshot!(response["estimatedTotalHits"], @"6");
}
/// testing: https://github.com/meilisearch/meilisearch/issues/4130
#[actix_rt::test]
async fn distinct_search_with_pagination_no_ranking() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let documents = DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
@ -222,7 +222,7 @@ async fn distinct_search_with_pagination_no_ranking() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"0");
snapshot!(format!("{:?}", hits), @r#"[]"#);
snapshot!(format!("{hits:?}"), @r#"[]"#);
snapshot!(response["page"], @"0");
snapshot!(response["totalPages"], @"3");
snapshot!(response["totalHits"], @"6");
@ -231,7 +231,7 @@ async fn distinct_search_with_pagination_no_ranking() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"2");
snapshot!(format!("{:?}", hits), @r#"["123456", "789012"]"#);
snapshot!(format!("{hits:?}"), @r#"["123456", "789012"]"#);
snapshot!(response["page"], @"1");
snapshot!(response["totalPages"], @"3");
snapshot!(response["totalHits"], @"6");
@ -240,7 +240,7 @@ async fn distinct_search_with_pagination_no_ranking() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"2");
snapshot!(format!("{:?}", hits), @r#"["456789", "987654"]"#);
snapshot!(format!("{hits:?}"), @r#"["456789", "987654"]"#);
snapshot!(response["page"], @"2");
snapshot!(response["totalPages"], @"3");
snapshot!(response["totalHits"], @"6");
@ -249,7 +249,7 @@ async fn distinct_search_with_pagination_no_ranking() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"2");
snapshot!(format!("{:?}", hits), @r#"["234567", "345678"]"#);
snapshot!(format!("{hits:?}"), @r#"["234567", "345678"]"#);
snapshot!(response["page"], @"3");
snapshot!(response["totalPages"], @"3");
snapshot!(response["totalHits"], @"6");
@ -258,7 +258,7 @@ async fn distinct_search_with_pagination_no_ranking() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"0");
snapshot!(format!("{:?}", hits), @r#"[]"#);
snapshot!(format!("{hits:?}"), @r#"[]"#);
snapshot!(response["page"], @"4");
snapshot!(response["totalPages"], @"3");
snapshot!(response["totalHits"], @"6");
@ -267,7 +267,7 @@ async fn distinct_search_with_pagination_no_ranking() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"3");
snapshot!(format!("{:?}", hits), @r#"["987654", "234567", "345678"]"#);
snapshot!(format!("{hits:?}"), @r#"["987654", "234567", "345678"]"#);
snapshot!(response["page"], @"2");
snapshot!(response["totalPages"], @"2");
snapshot!(response["totalHits"], @"6");
@ -275,13 +275,13 @@ async fn distinct_search_with_pagination_no_ranking() {
#[actix_rt::test]
async fn distinct_at_search_time() {
let server = Server::new().await;
let index = server.index("tamo");
let server = Server::new_shared();
let index = server.unique_index();
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
let (task, _) = index.update_settings_filterable_attributes(json!(["color.main"])).await;
let task = index.wait_task(task.uid()).await;
let task = index.wait_task(task.uid()).await.succeeded();
snapshot!(task, name: "succeed");
fn get_hits(response: &Value) -> Vec<String> {
@ -299,7 +299,7 @@ async fn distinct_at_search_time() {
let hits = get_hits(&response);
snapshot!(code, @"200 OK");
snapshot!(hits.len(), @"3");
snapshot!(format!("{:?}", hits), @r###"["1", "2", "3"]"###);
snapshot!(format!("{hits:?}"), @r###"["1", "2", "3"]"###);
snapshot!(response["page"], @"1");
snapshot!(response["totalPages"], @"1");
snapshot!(response["totalHits"], @"3");

View file

@ -1,10 +1,9 @@
use meili_snap::*;
use super::test_settings_documents_indexing_swapping_and_search;
use crate::common::{shared_does_not_exists_index, Server, DOCUMENTS, NESTED_DOCUMENTS};
use crate::json;
use super::test_settings_documents_indexing_swapping_and_search;
#[actix_rt::test]
async fn search_unexisting_index() {
let index = shared_does_not_exists_index().await;
@ -708,7 +707,7 @@ async fn filter_invalid_attribute_array() {
|response, code| {
snapshot!(response, @r###"
{
"message": "Index `test`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
"message": "Index `[uuid]`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -729,7 +728,7 @@ async fn filter_invalid_attribute_string() {
|response, code| {
snapshot!(response, @r###"
{
"message": "Index `test`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
"message": "Index `[uuid]`: Attribute `many` is not filterable. Available filterable attribute patterns are: `title`.\n1:5 many = Glass",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -886,7 +885,7 @@ async fn search_with_pattern_filter_settings_errors() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -912,7 +911,7 @@ async fn search_with_pattern_filter_settings_errors() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -933,7 +932,7 @@ async fn search_with_pattern_filter_settings_errors() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -959,7 +958,7 @@ async fn search_with_pattern_filter_settings_errors() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -985,7 +984,7 @@ async fn search_with_pattern_filter_settings_errors() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Index `test`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"message": "Index `[uuid]`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -1144,7 +1143,7 @@ async fn search_on_unknown_field() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
"message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
"code": "invalid_search_attributes_to_search_on",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on"
@ -1165,7 +1164,7 @@ async fn search_on_unknown_field_plus_joker() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
"message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
"code": "invalid_search_attributes_to_search_on",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on"
@ -1183,7 +1182,7 @@ async fn search_on_unknown_field_plus_joker() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Index `test`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
"message": "Index `[uuid]`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.",
"code": "invalid_search_attributes_to_search_on",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on"
@ -1196,10 +1195,8 @@ async fn search_on_unknown_field_plus_joker() {
#[actix_rt::test]
async fn distinct_at_search_time() {
let server = Server::new().await;
let index = server.index("test");
let (task, _) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) =
index.add_documents(json!([{"id": 1, "color": "Doggo", "machin": "Action"}]), None).await;
index.wait_task(response.uid()).await.succeeded();
@ -1209,7 +1206,7 @@ async fn distinct_at_search_time() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.",
"message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.",
"code": "invalid_search_distinct",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
@ -1224,7 +1221,7 @@ async fn distinct_at_search_time() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, machin`.",
"message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, machin`.",
"code": "invalid_search_distinct",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
@ -1239,7 +1236,7 @@ async fn distinct_at_search_time() {
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Index `test`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, <..hidden-attributes>`.",
"message": "Index `[uuid]`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes patterns are: `color, <..hidden-attributes>`.",
"code": "invalid_search_distinct",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"

View file

@ -50,13 +50,11 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
index.wait_task(task.uid()).await.succeeded();
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.facet_search(query.clone()).await;
insta::allow_duplicates! {
@ -65,21 +63,18 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
let (task, code) = server.delete_index("test").await;
assert_eq!(code, 202, "{}", task);
let response = server.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
server.wait_task(task.uid()).await.succeeded();
eprintln!("Settings -> Documents -> test");
let index = server.index("test");
let (task, code) = index.update_settings(settings.clone()).await;
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
index.wait_task(task.uid()).await.succeeded();
let (task, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.facet_search(query.clone()).await;
insta::allow_duplicates! {
@ -88,14 +83,13 @@ async fn test_settings_documents_indexing_swapping_and_facet_search(
let (task, code) = server.delete_index("test").await;
assert_eq!(code, 202, "{}", task);
let response = server.wait_task(task.uid()).await;
assert!(response.is_success(), "{:?}", response);
server.wait_task(task.uid()).await.succeeded();
}
#[actix_rt::test]
async fn simple_facet_search() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
@ -105,20 +99,20 @@ async fn simple_facet_search() {
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2);
assert_eq!(code, 200, "{response}");
assert_eq!(response["facetHits"].as_array().unwrap().len(), 2);
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "adventure"})).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["facetHits"].as_array().unwrap().len(), 1);
}
#[actix_rt::test]
async fn simple_facet_search_on_movies() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let documents = json!([
{
@ -212,23 +206,23 @@ async fn simple_facet_search_on_movies() {
]);
let (response, code) =
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetQuery": "", "facetName": "genres", "q": "" })).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
snapshot!(response["facetHits"], @r###"[{"value":"Action","count":2},{"value":"Adventure","count":3},{"value":"Drama","count":3},{"value":"Fantasy","count":1},{"value":"Romance","count":1},{"value":"Science Fiction","count":1}]"###);
}
#[actix_rt::test]
async fn advanced_facet_search() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
@ -251,8 +245,8 @@ async fn advanced_facet_search() {
#[actix_rt::test]
async fn more_advanced_facet_search() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
@ -275,8 +269,8 @@ async fn more_advanced_facet_search() {
#[actix_rt::test]
async fn simple_facet_search_with_max_values() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let documents = DOCUMENTS.clone();
index.update_settings_faceting(json!({ "maxValuesPerFacet": 1 })).await;
@ -287,14 +281,14 @@ async fn simple_facet_search_with_max_values() {
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1);
assert_eq!(code, 200, "{response}");
assert_eq!(response["facetHits"].as_array().unwrap().len(), 1);
}
#[actix_rt::test]
async fn simple_facet_search_by_count_with_max_values() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let documents = DOCUMENTS.clone();
index
@ -309,14 +303,14 @@ async fn simple_facet_search_by_count_with_max_values() {
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1);
assert_eq!(code, 200, "{response}");
assert_eq!(response["facetHits"].as_array().unwrap().len(), 1);
}
#[actix_rt::test]
async fn non_filterable_facet_search_error() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let documents = DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
@ -324,17 +318,17 @@ async fn non_filterable_facet_search_error() {
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 400, "{}", response);
assert_eq!(code, 400, "{response}");
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "adv"})).await;
assert_eq!(code, 400, "{}", response);
assert_eq!(code, 400, "{response}");
}
#[actix_rt::test]
async fn facet_search_dont_support_words() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["genres"])).await;
@ -344,14 +338,14 @@ async fn facet_search_dont_support_words() {
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "words"})).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["facetHits"].as_array().unwrap().len(), 0);
}
#[actix_rt::test]
async fn simple_facet_search_with_sort_by_count() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let documents = DOCUMENTS.clone();
index.update_settings_faceting(json!({ "sortFacetValuesBy": { "*": "count" } })).await;
@ -362,7 +356,7 @@ async fn simple_facet_search_with_sort_by_count() {
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
let hits = response["facetHits"].as_array().unwrap();
assert_eq!(hits.len(), 2);
assert_eq!(hits[0], json!({ "value": "Action", "count": 3 }));
@ -371,25 +365,25 @@ async fn simple_facet_search_with_sort_by_count() {
#[actix_rt::test]
async fn add_documents_and_deactivate_facet_search() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
"facetSearch": false,
"filterableAttributes": ["genres"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 400, "{}", response);
assert_eq!(code, 400, "{response}");
snapshot!(response, @r###"
{
"message": "The facet search is disabled for this index",
@ -402,8 +396,8 @@ async fn add_documents_and_deactivate_facet_search() {
#[actix_rt::test]
async fn deactivate_facet_search_and_add_documents() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -411,16 +405,16 @@ async fn deactivate_facet_search_and_add_documents() {
"filterableAttributes": ["genres"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 400, "{}", response);
assert_eq!(code, 400, "{response}");
snapshot!(response, @r###"
{
"message": "The facet search is disabled for this index",
@ -433,8 +427,8 @@ async fn deactivate_facet_search_and_add_documents() {
#[actix_rt::test]
async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -442,31 +436,31 @@ async fn deactivate_facet_search_add_documents_and_activate_facet_search() {
"filterableAttributes": ["genres"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
"facetSearch": true,
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2);
assert_eq!(code, 200, "{response}");
assert_eq!(response["facetHits"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -474,25 +468,25 @@ async fn deactivate_facet_search_add_documents_and_reset_facet_search() {
"filterableAttributes": ["genres"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
"facetSearch": serde_json::Value::Null,
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 2);
assert_eq!(code, 200, "{response}");
assert_eq!(response["facetHits"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
@ -618,8 +612,8 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
#[actix_rt::test]
async fn distinct_facet_search_on_movies() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let documents = json!([
{
@ -925,26 +919,26 @@ async fn distinct_facet_search_on_movies() {
]);
let (response, code) =
index.update_settings_filterable_attributes(json!(["genres", "color"])).await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.update_settings_distinct_attribute(json!("color")).await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(documents, None).await;
index.wait_task(response.uid()).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "" })).await;
// non-exhaustive facet count is counting 27 documents with the facet query "blob" but there are only 23 documents with a distinct color.
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
snapshot!(response["facetHits"], @r###"[{"value":"Blob","count":27}]"###);
let (response, code) =
index.facet_search(json!({"facetQuery": "blob", "facetName": "genres", "q": "", "exhaustiveFacetCount": true })).await;
// exhaustive facet count is counting 23 documents with the facet query "blob" which is the number of distinct colors.
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
snapshot!(response["facetHits"], @r###"[{"value":"Blob","count":23}]"###);
}

View file

@ -3,24 +3,15 @@ use meilisearch::Opt;
use tempfile::TempDir;
use super::test_settings_documents_indexing_swapping_and_search;
use crate::{
common::{default_settings, shared_index_with_documents, Server, DOCUMENTS, NESTED_DOCUMENTS},
json,
use crate::common::{
default_settings, shared_index_with_documents, shared_index_with_nested_documents, Server,
DOCUMENTS, NESTED_DOCUMENTS,
};
use crate::json;
#[actix_rt::test]
async fn search_with_filter_string_notation() {
let server = Server::new().await;
let index = server.index("test");
let (_, code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
meili_snap::snapshot!(code, @"202 Accepted");
let documents = DOCUMENTS.clone();
let (task, code) = index.add_documents(documents, None).await;
meili_snap::snapshot!(code, @"202 Accepted");
let res = index.wait_task(task.uid()).await;
meili_snap::snapshot!(res["status"], @r###""succeeded""###);
let index = shared_index_with_documents().await;
index
.search(
@ -28,44 +19,34 @@ async fn search_with_filter_string_notation() {
"filter": "title = Gläss"
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
},
)
.await;
let index = server.index("nested");
let nested_index = shared_index_with_nested_documents().await;
let (_, code) =
index.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})).await;
meili_snap::snapshot!(code, @"202 Accepted");
let documents = NESTED_DOCUMENTS.clone();
let (task, code) = index.add_documents(documents, None).await;
meili_snap::snapshot!(code, @"202 Accepted");
let res = index.wait_task(task.uid()).await;
meili_snap::snapshot!(res["status"], @r###""succeeded""###);
index
nested_index
.search(
json!({
"filter": "cattos = pésti"
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
assert_eq!(response["hits"][0]["id"], json!(852));
},
)
.await;
index
nested_index
.search(
json!({
"filter": "doggos.age > 5"
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
assert_eq!(response["hits"][0]["id"], json!(654));
assert_eq!(response["hits"][1]["id"], json!(951));
@ -82,7 +63,7 @@ async fn search_with_filter_array_notation() {
"filter": ["title = Gläss"]
}))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
let (response, code) = index
@ -90,7 +71,7 @@ async fn search_with_filter_array_notation() {
"filter": [["title = Gläss", "title = \"Shazam!\"", "title = \"Escape Room\""]]
}))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["hits"].as_array().unwrap().len(), 3);
}
@ -116,7 +97,7 @@ async fn search_with_contains_filter() {
"filter": "title CONTAINS cap"
}))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["hits"].as_array().unwrap().len(), 2);
}
@ -269,16 +250,14 @@ async fn search_with_pattern_filter_settings() {
#[actix_rt::test]
async fn search_with_pattern_filter_settings_scenario_1() {
let temp = TempDir::new().unwrap();
let server = Server::new_with_options(Opt { ..default_settings(temp.path()) }).await.unwrap();
let server = Server::new_shared();
eprintln!("Documents -> Settings -> test");
let index = server.index("test");
let index = server.unique_index();
let (task, code) = index.add_documents(NESTED_DOCUMENTS.clone(), None).await;
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
snapshot!(response["status"], @r###""succeeded""###);
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
let (task, code) = index
.update_settings(json!({"filterableAttributes": [{
@ -289,9 +268,8 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}
}]}))
.await;
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
snapshot!(response["status"], @r###""succeeded""###);
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter works
index
@ -335,7 +313,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -355,9 +333,8 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}
}]}))
.await;
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
snapshot!(response["status"], @r###""succeeded""###);
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter works
index
@ -467,9 +444,8 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}
}]}))
.await;
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
snapshot!(response["status"], @r###""succeeded""###);
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter returns an error
index
@ -481,7 +457,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"message": "Index `[uuid]`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -567,9 +543,8 @@ async fn search_with_pattern_filter_settings_scenario_1() {
}
}]}))
.await;
assert_eq!(code, 202, "{}", task);
let response = index.wait_task(task.uid()).await;
snapshot!(response["status"], @r###""succeeded""###);
assert_eq!(code, 202, "{task}");
index.wait_task(task.uid()).await.succeeded();
// Check if the Equality filter works
index
@ -613,7 +588,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"message": "Index `[uuid]`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -720,7 +695,7 @@ async fn test_filterable_attributes_priority() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `test`: Attribute `doggos.age` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:11 doggos.age > 2",
"message": "Index `[uuid]`: Attribute `doggos.age` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:11 doggos.age > 2",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -746,7 +721,7 @@ async fn test_filterable_attributes_priority() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `test`: Attribute `doggos` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:7 doggos EXISTS",
"message": "Index `[uuid]`: Attribute `doggos` is not filterable. Available filterable attribute patterns are: `doggos.*`.\n1:7 doggos EXISTS",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"

View file

@ -1,56 +1,13 @@
use meili_snap::{json_string, snapshot};
use meilisearch_types::milli::constants::RESERVED_GEO_FIELD_NAME;
use once_cell::sync::Lazy;
use crate::common::{Server, Value};
use crate::json;
use super::test_settings_documents_indexing_swapping_and_search;
static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
json!([
{
"id": 1,
"name": "Taco Truck",
"address": "444 Salsa Street, Burritoville",
"type": "Mexican",
"rating": 9,
"_geo": {
"lat": 34.0522,
"lng": -118.2437
}
},
{
"id": 2,
"name": "La Bella Italia",
"address": "456 Elm Street, Townsville",
"type": "Italian",
"rating": 9,
"_geo": {
"lat": "45.4777599",
"lng": "9.1967508"
}
},
{
"id": 3,
"name": "Crêpe Truck",
"address": "2 Billig Avenue, Rouenville",
"type": "French",
"rating": 10
}
])
});
use crate::common::shared_index_with_geo_documents;
use crate::json;
#[actix_rt::test]
async fn geo_sort_with_geo_strings() {
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["_geo"])).await;
index.update_settings_sortable_attributes(json!(["_geo"])).await;
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
let index = shared_index_with_geo_documents().await;
index
.search(
@ -59,7 +16,7 @@ async fn geo_sort_with_geo_strings() {
"sort": ["_geoPoint(0.0, 0.0):asc"]
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
},
)
.await;
@ -67,14 +24,7 @@ async fn geo_sort_with_geo_strings() {
#[actix_rt::test]
async fn geo_bounding_box_with_string_and_number() {
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.update_settings_filterable_attributes(json!(["_geo"])).await;
index.update_settings_sortable_attributes(json!(["_geo"])).await;
let (ret, _code) = index.add_documents(documents, None).await;
index.wait_task(ret.uid()).await.succeeded();
let index = shared_index_with_geo_documents().await;
index
.search(
@ -82,7 +32,7 @@ async fn geo_bounding_box_with_string_and_number() {
"filter": "_geoBoundingBox([89, 179], [-89, -179])",
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
@ -124,14 +74,7 @@ async fn geo_bounding_box_with_string_and_number() {
#[actix_rt::test]
async fn bug_4640() {
// https://github.com/meilisearch/meilisearch/issues/4640
let server = Server::new().await;
let index = server.index("test");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.update_settings_filterable_attributes(json!(["_geo"])).await;
let (ret, _code) = index.update_settings_sortable_attributes(json!(["_geo"])).await;
index.wait_task(ret.uid()).await.succeeded();
let index = shared_index_with_geo_documents().await;
// Sort the document with the second one first
index
@ -140,7 +83,7 @@ async fn bug_4640() {
"sort": ["_geoPoint(45.4777599, 9.1967508):asc"],
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
@ -203,7 +146,7 @@ async fn geo_asc_with_words() {
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}),
&json!({"q": "jean"}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
@ -248,7 +191,7 @@ async fn geo_asc_with_words() {
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}),
&json!({"q": "bob"}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
@ -285,7 +228,7 @@ async fn geo_asc_with_words() {
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "geo:asc"]}),
&json!({"q": "intel"}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [
@ -325,7 +268,7 @@ async fn geo_sort_with_words() {
&json!({"searchableAttributes": ["id", "doggo"], "rankingRules": ["words", "sort"], "sortableAttributes": [RESERVED_GEO_FIELD_NAME]}),
&json!({"q": "jean", "sort": ["_geoPoint(0.0, 0.0):asc"]}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
{
"hits": [

View file

@ -2,31 +2,31 @@ use meili_snap::snapshot;
use once_cell::sync::Lazy;
use crate::common::index::Index;
use crate::common::{Server, Value};
use crate::common::{Server, Shared, Value};
use crate::json;
async fn index_with_documents_user_provided<'a>(
server: &'a Server,
server: &'a Server<Shared>,
documents: &Value,
) -> Index<'a> {
let index = server.index("test");
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({ "embedders": {"default": {
"source": "userProvided",
"dimensions": 2}}} ))
.await;
assert_eq!(202, code, "{:?}", response);
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{:?}", response);
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
index
}
async fn index_with_documents_hf<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
let index = server.index("test");
async fn index_with_documents_hf<'a>(server: &'a Server<Shared>, documents: &Value) -> Index<'a> {
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({ "embedders": {"default": {
@ -36,11 +36,11 @@ async fn index_with_documents_hf<'a>(server: &'a Server, documents: &Value) -> I
"documentTemplate": "{{doc.title}}, {{doc.desc}}"
}}} ))
.await;
assert_eq!(202, code, "{:?}", response);
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{:?}", response);
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
index
}
@ -76,6 +76,48 @@ static SINGLE_DOCUMENT_VEC: Lazy<Value> = Lazy::new(|| {
}])
});
static TEST_DISTINCT_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
// for query "Captain Marvel" and vector [1.0, 1.0]
json!([
{
"id": 0,
"search": "Captain Planet",
"desc": "#2 for keyword search, #3 for hybrid search",
"_vectors": {
"default": [-1.0, 0.0],
},
"distinct": 0
},
{
"id": 1,
"search": "Captain Marvel",
"desc": "#1 for keyword search, #4 for hybrid search",
"_vectors": {
"default": [-1.0, -1.0],
},
"distinct": 1
},
{
"id": 2,
"search": "Some Captain at least",
"desc": "#3 for keyword search, #1 for hybrid search",
"_vectors": {
"default": [1.0, 1.0],
},
"distinct": 0
},
{
"id": 3,
"search": "Irrelevant Capitaine",
"desc": "#4 for keyword search, #2 for hybrid search",
"_vectors": {
"default": [1.0, 0.0],
},
"distinct": 1
},
])
});
static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
json!([
{
@ -97,8 +139,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
#[actix_rt::test]
async fn simple_search() {
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let (response, code) = index
.search_post(
@ -130,8 +172,8 @@ async fn simple_search() {
#[actix_rt::test]
async fn limit_offset() {
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let (response, code) = index
.search_post(
@ -143,8 +185,8 @@ async fn limit_offset() {
snapshot!(response["semanticHitCount"], @"0");
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let (response, code) = index
.search_post(
@ -159,8 +201,8 @@ async fn limit_offset() {
#[actix_rt::test]
async fn simple_search_hf() {
let server = Server::new().await;
let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents_hf(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let (response, code) = index
.search_post(
@ -211,8 +253,8 @@ async fn simple_search_hf() {
#[actix_rt::test]
async fn distribution_shift() {
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let search = json!({"q": "Captain", "vector": [1.0, 1.0], "showRankingScore": true, "hybrid": {"embedder": "default", "semanticRatio": 1.0}, "retrieveVectors": true});
let (response, code) = index.search_post(search.clone()).await;
@ -233,7 +275,7 @@ async fn distribution_shift() {
.await;
snapshot!(code, @"202 Accepted");
let response = server.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(response["details"], @r#"{"embedders":{"default":{"distribution":{"mean":0.998,"sigma":0.01}}}}"#);
let (response, code) = index.search_post(search).await;
@ -243,8 +285,8 @@ async fn distribution_shift() {
#[actix_rt::test]
async fn highlighter() {
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let (response, code) = index
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
@ -298,8 +340,8 @@ async fn highlighter() {
#[actix_rt::test]
async fn invalid_semantic_ratio() {
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let (response, code) = index
.search_post(
@ -370,8 +412,8 @@ async fn invalid_semantic_ratio() {
#[actix_rt::test]
async fn single_document() {
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SINGLE_DOCUMENT_VEC).await;
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SINGLE_DOCUMENT_VEC).await;
let (response, code) = index
.search_post(
@ -386,8 +428,8 @@ async fn single_document() {
#[actix_rt::test]
async fn query_combination() {
let server = Server::new().await;
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
// search without query and vector, but with hybrid => still placeholder
let (response, code) = index
@ -493,10 +535,54 @@ async fn query_combination() {
snapshot!(response["semanticHitCount"], @"0");
}
// see <https://github.com/meilisearch/meilisearch/issues/5526>
#[actix_rt::test]
async fn distinct_is_applied() {
let server = Server::new_shared();
let index = index_with_documents_user_provided(server, &TEST_DISTINCT_DOCUMENTS).await;
let (response, code) = index.update_settings(json!({ "distinctAttribute": "distinct" } )).await;
assert_eq!(202, code, "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
// pure keyword
let (response, code) = index
.search_post(
json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.0, "embedder": "default"}}),
)
.await;
snapshot!(code, @"200 OK");
snapshot!(response["hits"], @r###"[{"id":1,"search":"Captain Marvel","desc":"#1 for keyword search, #4 for hybrid search","distinct":1},{"id":0,"search":"Captain Planet","desc":"#2 for keyword search, #3 for hybrid search","distinct":0}]"###);
snapshot!(response["semanticHitCount"], @"null");
snapshot!(response["estimatedTotalHits"], @"2");
// pure semantic
let (response, code) = index
.search_post(
json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 1.0, "embedder": "default"}}),
)
.await;
snapshot!(code, @"200 OK");
snapshot!(response["hits"], @r###"[{"id":2,"search":"Some Captain at least","desc":"#3 for keyword search, #1 for hybrid search","distinct":0},{"id":3,"search":"Irrelevant Capitaine","desc":"#4 for keyword search, #2 for hybrid search","distinct":1}]"###);
snapshot!(response["semanticHitCount"], @"2");
snapshot!(response["estimatedTotalHits"], @"2");
// hybrid
let (response, code) = index
.search_post(
json!({"q": "Captain Marvel", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.5, "embedder": "default"}}),
)
.await;
snapshot!(code, @"200 OK");
snapshot!(response["hits"], @r###"[{"id":2,"search":"Some Captain at least","desc":"#3 for keyword search, #1 for hybrid search","distinct":0},{"id":1,"search":"Captain Marvel","desc":"#1 for keyword search, #4 for hybrid search","distinct":1}]"###);
snapshot!(response["semanticHitCount"], @"1");
snapshot!(response["estimatedTotalHits"], @"2");
}
#[actix_rt::test]
async fn retrieve_vectors() {
let server = Server::new().await;
let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents_hf(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let (response, code) = index
.search_post(
@ -546,7 +632,7 @@ async fn retrieve_vectors() {
let (response, code) = index
.update_settings(json!({ "displayedAttributes": ["id", "title", "desc", "_vectors"]} ))
.await;
assert_eq!(202, code, "{:?}", response);
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index
@ -596,7 +682,7 @@ async fn retrieve_vectors() {
// remove `_vectors` from displayed attributes
let (response, code) =
index.update_settings(json!({ "displayedAttributes": ["id", "title", "desc"]} )).await;
assert_eq!(202, code, "{:?}", response);
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index

View file

@ -89,9 +89,9 @@ static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
#[actix_rt::test]
async fn simple_search() {
let server = Server::new().await;
let server = Server::new_shared();
let index = server.unique_index();
let index = server.index("test");
let documents = DOCUMENTS.clone();
index
.update_settings(
@ -147,23 +147,20 @@ async fn simple_search() {
.search(
json!({"q": "進撃", "locales": ["jpn"], "attributesToRetrieve": ["id"]}),
|response, code| {
snapshot!(response, @r###"
snapshot!(response, @r#"
{
"hits": [
{
"id": 852
},
{
"id": 853
}
],
"query": "進撃",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2
"estimatedTotalHits": 1
}
"###);
"#);
snapshot!(code, @"200 OK");
},
)
@ -172,23 +169,20 @@ async fn simple_search() {
// chinese
index
.search(json!({"q": "进击", "attributesToRetrieve": ["id"]}), |response, code| {
snapshot!(response, @r###"
snapshot!(response, @r#"
{
"hits": [
{
"id": 853
},
{
"id": 852
}
],
"query": "进击",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2
"estimatedTotalHits": 1
}
"###);
"#);
snapshot!(code, @"200 OK");
})
.await;
@ -196,9 +190,9 @@ async fn simple_search() {
#[actix_rt::test]
async fn force_locales() {
let server = Server::new().await;
let server = Server::new_shared();
let index = server.unique_index();
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _) = index
.update_settings(
@ -211,10 +205,10 @@ async fn force_locales() {
}),
)
.await;
snapshot!(response, @r###"
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
{
"taskUid": 0,
"indexUid": "test",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -274,9 +268,9 @@ async fn force_locales() {
#[actix_rt::test]
async fn force_locales_with_pattern() {
let server = Server::new().await;
let server = Server::new_shared();
let index = server.unique_index();
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _) = index
.update_settings(
@ -289,10 +283,10 @@ async fn force_locales_with_pattern() {
}),
)
.await;
snapshot!(response, @r###"
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
{
"taskUid": 0,
"indexUid": "test",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -352,9 +346,9 @@ async fn force_locales_with_pattern() {
#[actix_rt::test]
async fn force_locales_with_pattern_nested() {
let server = Server::new().await;
let server = Server::new_shared();
let index = server.unique_index();
let index = server.index("test");
let documents = NESTED_DOCUMENTS.clone();
let (response, _) = index
.update_settings(json!({
@ -365,10 +359,10 @@ async fn force_locales_with_pattern_nested() {
]
}))
.await;
snapshot!(response, @r###"
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
{
"taskUid": 0,
"indexUid": "test",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -423,9 +417,9 @@ async fn force_locales_with_pattern_nested() {
}
#[actix_rt::test]
async fn force_different_locales_with_pattern() {
let server = Server::new().await;
let server = Server::new_shared();
let index = server.unique_index();
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _) = index
.update_settings(
@ -440,10 +434,10 @@ async fn force_different_locales_with_pattern() {
}),
)
.await;
snapshot!(response, @r###"
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
{
"taskUid": 0,
"indexUid": "test",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -499,9 +493,9 @@ async fn force_different_locales_with_pattern() {
#[actix_rt::test]
async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
let server = Server::new().await;
let server = Server::new_shared();
let index = server.unique_index();
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _) = index
.update_settings(
@ -518,10 +512,10 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
}),
)
.await;
snapshot!(response, @r###"
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
{
"taskUid": 0,
"indexUid": "test",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -577,9 +571,9 @@ async fn auto_infer_locales_at_search_with_attributes_to_search_on() {
#[actix_rt::test]
async fn auto_infer_locales_at_search() {
let server = Server::new().await;
let server = Server::new_shared();
let index = server.unique_index();
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _) = index
.update_settings(
@ -592,10 +586,10 @@ async fn auto_infer_locales_at_search() {
}),
)
.await;
snapshot!(response, @r###"
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
{
"taskUid": 0,
"indexUid": "test",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -676,9 +670,9 @@ async fn auto_infer_locales_at_search() {
#[actix_rt::test]
async fn force_different_locales_with_pattern_nested() {
let server = Server::new().await;
let server = Server::new_shared();
let index = server.unique_index();
let index = server.index("test");
let documents = NESTED_DOCUMENTS.clone();
let (response, _) = index
.update_settings(json!({
@ -691,10 +685,10 @@ async fn force_different_locales_with_pattern_nested() {
]
}))
.await;
snapshot!(response, @r###"
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
{
"taskUid": 0,
"indexUid": "test",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -774,9 +768,9 @@ async fn force_different_locales_with_pattern_nested() {
#[actix_rt::test]
async fn settings_change() {
let server = Server::new().await;
let server = Server::new_shared();
let index = server.unique_index();
let index = server.index("test");
let documents = NESTED_DOCUMENTS.clone();
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
@ -789,10 +783,10 @@ async fn settings_change() {
]
}))
.await;
snapshot!(response, @r###"
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
{
"taskUid": 1,
"indexUid": "test",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -852,10 +846,10 @@ async fn settings_change() {
]
}))
.await;
snapshot!(response, @r###"
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
{
"taskUid": 2,
"indexUid": "test",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -906,9 +900,9 @@ async fn settings_change() {
#[actix_rt::test]
async fn invalid_locales() {
let server = Server::new().await;
let server = Server::new_shared();
let index = server.unique_index();
let index = server.index("test");
let documents = DOCUMENTS.clone();
index
.update_settings(
@ -945,9 +939,9 @@ async fn invalid_locales() {
#[actix_rt::test]
async fn invalid_localized_attributes_rules() {
let server = Server::new().await;
let server = Server::new_shared();
let index = server.unique_index();
let index = server.index("test");
let (response, _) = index
.update_settings(json!({
"localizedAttributes": [
@ -1015,19 +1009,19 @@ async fn invalid_localized_attributes_rules() {
#[actix_rt::test]
async fn simple_facet_search() {
let server = Server::new().await;
let server = Server::new_shared();
let index = server.unique_index();
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _) = index
.update_settings(json!({
"filterableAttributes": ["name_en", "name_ja", "name_zh"],
}))
.await;
snapshot!(response, @r###"
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
{
"taskUid": 0,
"indexUid": "test",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -1073,9 +1067,9 @@ async fn simple_facet_search() {
#[actix_rt::test]
async fn facet_search_with_localized_attributes() {
let server = Server::new().await;
let server = Server::new_shared();
let index = server.unique_index();
let index = server.index("test");
let documents = DOCUMENTS.clone();
let (response, _) = index
.update_settings(json!({
@ -1086,10 +1080,10 @@ async fn facet_search_with_localized_attributes() {
]
}))
.await;
snapshot!(response, @r###"
snapshot!(json_string!(response, { ".taskUid" => "[task_uid]", ".enqueuedAt" => "[date]" }), @r###"
{
"taskUid": 0,
"indexUid": "test",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -1146,9 +1140,9 @@ async fn facet_search_with_localized_attributes() {
#[actix_rt::test]
async fn swedish_search() {
let server = Server::new().await;
let server = Server::new_shared();
let index = server.unique_index();
let index = server.index("test");
let documents = json!([
{"id": "tra1-1", "product": "trä"},
{"id": "tra2-1", "product": "traktor"},
@ -1269,9 +1263,9 @@ async fn swedish_search() {
#[actix_rt::test]
async fn german_search() {
let server = Server::new().await;
let server = Server::new_shared();
let index = server.unique_index();
let index = server.index("test");
let documents = json!([
{"id": 1, "product": "Interkulturalität"},
{"id": 2, "product": "Wissensorganisation"},

View file

@ -2,11 +2,11 @@ use meili_snap::snapshot;
use once_cell::sync::Lazy;
use crate::common::index::Index;
use crate::common::{Server, Value};
use crate::common::{Server, Shared, Value};
use crate::json;
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
let index = server.index("test");
async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value) -> Index<'a> {
let index = server.unique_index();
let (task, _status_code) = index.add_documents(documents.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
@ -48,8 +48,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
#[actix_rt::test]
async fn simple_search() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
index
.search(json!({"q": "Captain Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| {
@ -75,8 +75,8 @@ async fn simple_search() {
#[actix_rt::test]
async fn search_with_typo() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
index
.search(json!({"q": "Capitain Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| {
@ -102,8 +102,8 @@ async fn search_with_typo() {
#[actix_rt::test]
async fn search_with_unknown_word() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
index
.search(json!({"q": "Captain Supercopter Marvel", "matchingStrategy": "last", "attributesToRetrieve": ["id"]}), |response, code| {

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -2296,6 +2296,7 @@ async fn error_remote_500_once() {
}
#[actix_rt::test]
#[ignore]
async fn error_remote_timeout() {
let ms0 = Server::new().await;
let ms1 = Server::new().await;

View file

@ -7,7 +7,7 @@ async fn default_search_should_return_estimated_total_hit() {
let index = shared_index_with_documents().await;
index
.search(json!({}), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert!(response.get("estimatedTotalHits").is_some());
assert!(response.get("limit").is_some());
assert!(response.get("offset").is_some());
@ -25,7 +25,7 @@ async fn simple_search() {
let index = shared_index_with_documents().await;
index
.search(json!({"page": 1}), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["hits"].as_array().unwrap().len(), 5);
assert!(response.get("totalHits").is_some());
assert_eq!(response["page"], 1);
@ -44,7 +44,7 @@ async fn page_zero_should_not_return_any_result() {
let index = shared_index_with_documents().await;
index
.search(json!({"page": 0}), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
assert!(response.get("totalHits").is_some());
assert_eq!(response["page"], 0);
@ -58,7 +58,7 @@ async fn hits_per_page_1() {
let index = shared_index_with_documents().await;
index
.search(json!({"hitsPerPage": 1}), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
assert_eq!(response["totalHits"], 5);
assert_eq!(response["page"], 1);
@ -72,7 +72,7 @@ async fn hits_per_page_0_should_not_return_any_result() {
let index = shared_index_with_documents().await;
index
.search(json!({"hitsPerPage": 0}), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
assert_eq!(response["totalHits"], 5);
assert_eq!(response["page"], 1);
@ -126,7 +126,7 @@ async fn ensure_placeholder_search_hit_count_valid() {
for page in 0..=4 {
index
.search(json!({"page": page, "hitsPerPage": 1}), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["totalHits"], 4);
assert_eq!(response["totalPages"], 4);
})

View file

@ -2,11 +2,11 @@ use meili_snap::{json_string, snapshot};
use once_cell::sync::Lazy;
use crate::common::index::Index;
use crate::common::{Server, Value};
use crate::common::{Server, Shared, Value};
use crate::json;
async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Index<'a> {
let index = server.index("test");
async fn index_with_documents<'a>(server: &'a Server<Shared>, documents: &Value) -> Index<'a> {
let index = server.unique_index();
let (task, _code) = index.add_documents(documents.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
@ -34,8 +34,8 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
#[actix_rt::test]
async fn simple_search_on_title() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
// simple search should return 2 documents (ids: 2 and 3).
index
@ -51,8 +51,8 @@ async fn simple_search_on_title() {
#[actix_rt::test]
async fn search_no_searchable_attribute_set() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
index
.search(
@ -93,8 +93,8 @@ async fn search_no_searchable_attribute_set() {
#[actix_rt::test]
async fn search_on_all_attributes() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
index
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["*"]}), |response, code| {
@ -106,8 +106,8 @@ async fn search_on_all_attributes() {
#[actix_rt::test]
async fn search_on_all_attributes_restricted_set() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let (task, _status_code) = index.update_settings_searchable_attributes(json!(["title"])).await;
index.wait_task(task.uid()).await.succeeded();
@ -121,8 +121,8 @@ async fn search_on_all_attributes_restricted_set() {
#[actix_rt::test]
async fn simple_prefix_search_on_title() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
// simple search should return 2 documents (ids: 2 and 3).
index
@ -135,8 +135,8 @@ async fn simple_prefix_search_on_title() {
#[actix_rt::test]
async fn simple_search_on_title_matching_strategy_all() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
// simple search matching strategy all should only return 1 document (ids: 2).
index
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": ["title"], "matchingStrategy": "all"}), |response, code| {
@ -148,8 +148,8 @@ async fn simple_search_on_title_matching_strategy_all() {
#[actix_rt::test]
async fn simple_search_on_no_field() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
// simple search on no field shouldn't return any document.
index
.search(json!({"q": "Captain Marvel", "attributesToSearchOn": []}), |response, code| {
@ -161,8 +161,8 @@ async fn simple_search_on_no_field() {
#[actix_rt::test]
async fn word_ranking_rule_order() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
// Document 3 should appear before document 2.
index
@ -189,8 +189,8 @@ async fn word_ranking_rule_order() {
#[actix_rt::test]
async fn word_ranking_rule_order_exact_words() {
let server = Server::new().await;
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &SIMPLE_SEARCH_DOCUMENTS).await;
let (task, _status_code) = index
.update_settings_typo_tolerance(json!({"disableOnWords": ["Captain", "Marvel"]}))
.await;
@ -221,9 +221,9 @@ async fn word_ranking_rule_order_exact_words() {
#[actix_rt::test]
async fn typo_ranking_rule_order() {
let server = Server::new().await;
let server = Server::new_shared();
let index = index_with_documents(
&server,
server,
&json!([
{
"title": "Capitain Marivel",
@ -260,9 +260,9 @@ async fn typo_ranking_rule_order() {
#[actix_rt::test]
async fn attributes_ranking_rule_order() {
let server = Server::new().await;
let server = Server::new_shared();
let index = index_with_documents(
&server,
server,
&json!([
{
"title": "Captain Marvel",
@ -301,9 +301,9 @@ async fn attributes_ranking_rule_order() {
#[actix_rt::test]
async fn exactness_ranking_rule_order() {
let server = Server::new().await;
let server = Server::new_shared();
let index = index_with_documents(
&server,
server,
&json!([
{
"title": "Captain Marvel",
@ -340,9 +340,9 @@ async fn exactness_ranking_rule_order() {
#[actix_rt::test]
async fn search_on_exact_field() {
let server = Server::new().await;
let server = Server::new_shared();
let index = index_with_documents(
&server,
server,
&json!([
{
"title": "Captain Marvel",
@ -359,7 +359,7 @@ async fn search_on_exact_field() {
let (response, code) =
index.update_settings_typo_tolerance(json!({ "disableOnAttributes": ["exact"] })).await;
assert_eq!(202, code, "{:?}", response);
assert_eq!(202, code, "{response:?}");
index.wait_task(response.uid()).await.succeeded();
// Searching on an exact attribute should only return the document matching without typo.
index
@ -372,7 +372,7 @@ async fn search_on_exact_field() {
#[actix_rt::test]
async fn phrase_search_on_title() {
let server = Server::new().await;
let server = Server::new_shared();
let documents = json!([
{ "id": 8, "desc": "Document Review", "title": "Document Review Specialist II" },
{ "id": 5, "desc": "Document Review", "title": "Document Review Attorney" },
@ -383,7 +383,7 @@ async fn phrase_search_on_title() {
{ "id": 7, "desc": "Document Review", "title": "Document Review Specialist II" },
{ "id": 6, "desc": "Document Review", "title": "Document Review (Entry Level)" }
]);
let index = index_with_documents(&server, &documents).await;
let index = index_with_documents(server, &documents).await;
index
.search(
@ -460,8 +460,8 @@ static NESTED_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
#[actix_rt::test]
async fn nested_search_on_title_with_prefix_wildcard() {
let server = Server::new().await;
let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
// Wildcard should match to 'details.' attribute
index
@ -486,8 +486,8 @@ async fn nested_search_on_title_with_prefix_wildcard() {
#[actix_rt::test]
async fn nested_search_with_suffix_wildcard() {
let server = Server::new().await;
let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
// Wildcard should match to any attribute inside 'details.'
// It's worth noting the difference between 'details.*' and '*.title'
@ -553,8 +553,8 @@ async fn nested_search_with_suffix_wildcard() {
#[actix_rt::test]
async fn nested_search_on_title_restricted_set_with_suffix_wildcard() {
let server = Server::new().await;
let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
let (task, _status_code) =
index.update_settings_searchable_attributes(json!(["details.title"])).await;
index.wait_task(task.uid()).await.succeeded();
@ -581,8 +581,8 @@ async fn nested_search_on_title_restricted_set_with_suffix_wildcard() {
#[actix_rt::test]
async fn nested_search_no_searchable_attribute_set_with_any_wildcard() {
let server = Server::new().await;
let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
index
.search(
@ -632,8 +632,8 @@ async fn nested_search_no_searchable_attribute_set_with_any_wildcard() {
#[actix_rt::test]
async fn nested_prefix_search_on_title_with_prefix_wildcard() {
let server = Server::new().await;
let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
// Nested prefix search with prefix wildcard should return 2 documents (ids: 2 and 3).
index
@ -658,8 +658,8 @@ async fn nested_prefix_search_on_title_with_prefix_wildcard() {
#[actix_rt::test]
async fn nested_prefix_search_on_details_with_suffix_wildcard() {
let server = Server::new().await;
let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
index
.search(
@ -686,8 +686,8 @@ async fn nested_prefix_search_on_details_with_suffix_wildcard() {
#[actix_rt::test]
async fn nested_prefix_search_on_weaknesses_with_suffix_wildcard() {
let server = Server::new().await;
let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
// Wildcard search on nested weaknesses should return 2 documents (ids: 1 and 3)
index
@ -712,8 +712,8 @@ async fn nested_prefix_search_on_weaknesses_with_suffix_wildcard() {
#[actix_rt::test]
async fn nested_search_on_title_matching_strategy_all() {
let server = Server::new().await;
let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
// Nested search matching strategy all should only return 1 document (ids: 3)
index
@ -735,8 +735,8 @@ async fn nested_search_on_title_matching_strategy_all() {
#[actix_rt::test]
async fn nested_attributes_ranking_rule_order_with_prefix_wildcard() {
let server = Server::new().await;
let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
// Document 3 should appear before documents 1 and 2
index
@ -766,8 +766,8 @@ async fn nested_attributes_ranking_rule_order_with_prefix_wildcard() {
#[actix_rt::test]
async fn nested_attributes_ranking_rule_order_with_suffix_wildcard() {
let server = Server::new().await;
let index = index_with_documents(&server, &NESTED_SEARCH_DOCUMENTS).await;
let server = Server::new_shared();
let index = index_with_documents(server, &NESTED_SEARCH_DOCUMENTS).await;
// Document 3 should appear before documents 1 and 2
index

View file

@ -4,7 +4,7 @@ source: crates/meilisearch/tests/search/distinct.rs
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "tamo",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "settingsUpdate",
"canceledBy": null,

View file

@ -3,8 +3,8 @@ use crate::json;
#[actix_rt::test]
async fn set_and_reset_distinct_attribute() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task1, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await;
index.wait_task(task1.uid()).await.succeeded();
@ -24,8 +24,8 @@ async fn set_and_reset_distinct_attribute() {
#[actix_rt::test]
async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (update_task1, _code) = index.update_distinct_attribute(json!("test")).await;
index.wait_task(update_task1.uid()).await.succeeded();

View file

@ -338,6 +338,47 @@ async fn settings_bad_pagination() {
"###);
}
#[actix_rt::test]
async fn settings_bad_max_total_hits() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) =
index.update_settings(json!({ "pagination": { "maxTotalHits": "doggo" } })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.pagination.maxTotalHits`: expected a positive integer, but found a string: `\"doggo\"`",
"code": "invalid_settings_pagination",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_settings_pagination"
}
"###);
let (response, code) =
index.update_settings_pagination(json!({ "maxTotalHits": "doggo" } )).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value type at `.maxTotalHits`: expected a positive integer, but found a string: `\"doggo\"`",
"code": "invalid_settings_pagination",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_settings_pagination"
}
"#);
let (response, code) = index.update_settings_pagination(json!({ "maxTotalHits": 0 } )).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value at `.maxTotalHits`: a non-zero integer value lower than `18446744073709551615` was expected, but found a zero",
"code": "invalid_settings_pagination",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_settings_pagination"
}
"#);
}
#[actix_rt::test]
async fn settings_bad_search_cutoff_ms() {
let server = Server::new_shared();

View file

@ -11,59 +11,62 @@ macro_rules! test_setting_routes {
#[actix_rt::test]
async fn get_unexisting_index() {
let server = Server::new().await;
let url = format!("/indexes/test/settings/{}",
stringify!($setting)
.chars()
.map(|c| if c == '_' { '-' } else { c })
.collect::<String>());
let (_response, code) = server.service.get(url).await;
assert_eq!(code, 404);
}
#[actix_rt::test]
async fn update_unexisting_index() {
let server = Server::new().await;
let url = format!("/indexes/test/settings/{}",
stringify!($setting)
.chars()
.map(|c| if c == '_' { '-' } else { c })
.collect::<String>());
let (response, code) = server.service.$update_verb(url, serde_json::Value::Null.into()).await;
assert_eq!(code, 202, "{}", response);
server.index("").wait_task(0).await;
let (response, code) = server.index("test").get().await;
assert_eq!(code, 200, "{}", response);
}
#[actix_rt::test]
async fn delete_unexisting_index() {
let server = Server::new().await;
let url = format!("/indexes/test/settings/{}",
stringify!($setting)
.chars()
.map(|c| if c == '_' { '-' } else { c })
.collect::<String>());
let (_, code) = server.service.delete(url).await;
assert_eq!(code, 202);
let response = server.index("").wait_task(0).await;
assert_eq!(response["status"], "failed");
}
#[actix_rt::test]
async fn get_default() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.create(None).await;
assert_eq!(code, 202, "{}", response);
index.wait_task(0).await;
let url = format!("/indexes/test/settings/{}",
let server = Server::new_shared();
let index_name = uuid::Uuid::new_v4().to_string();
let url = format!("/indexes/{index_name}/settings/{}",
stringify!($setting)
.chars()
.map(|c| if c == '_' { '-' } else { c })
.collect::<String>());
let (response, code) = server.service.get(url).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 404, "{response}");
}
#[actix_rt::test]
async fn update_unexisting_index() {
let server = Server::new_shared();
let index_name = uuid::Uuid::new_v4().to_string();
let url = format!("/indexes/{index_name}/settings/{}",
stringify!($setting)
.chars()
.map(|c| if c == '_' { '-' } else { c })
.collect::<String>());
let (response, code) = server.service.$update_verb(url, serde_json::Value::Null.into()).await;
assert_eq!(code, 202, "{response}");
let (response, code) = server.service.get(format!("/indixes/{index_name}")).await;
assert_eq!(code, 404, "{response}");
}
#[actix_rt::test]
async fn delete_unexisting_index() {
let server = Server::new_shared();
let index_name = uuid::Uuid::new_v4().to_string();
let url = format!("/indexes/{index_name}/settings/{}",
stringify!($setting)
.chars()
.map(|c| if c == '_' { '-' } else { c })
.collect::<String>());
let (response, code) = server.service.delete(url).await;
assert_eq!(code, 202, "{response}");
let (response, code) = server.service.get(format!("/indixes/{index_name}")).await;
assert_eq!(code, 404, "{response}");
}
#[actix_rt::test]
async fn get_default() {
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index.create(None).await;
assert_eq!(code, 202, "{response}");
index.wait_task(response.uid()).await.succeeded();
let url = format!("/indexes/{}/settings/{}",
index.uid,
stringify!($setting)
.chars()
.map(|c| if c == '_' { '-' } else { c })
.collect::<String>());
let (response, code) = server.service.get(url).await;
assert_eq!(code, 200, "{response}");
let expected = crate::json!($default_value);
assert_eq!(expected, response);
}
@ -181,19 +184,30 @@ test_setting_routes!(
update_verb: patch,
default_value: {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [], "disableOnNumbers": false}
},
{
setting: chat,
update_verb: put,
default_value: {
"description": "",
"documentTemplate": "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
"documentTemplateMaxBytes": 400,
"searchParameters": {}
}
},
);
#[actix_rt::test]
async fn get_settings_unexisting_index() {
let server = Server::new().await;
let (response, code) = server.index("test").settings().await;
assert_eq!(code, 404, "{}", response)
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index.settings().await;
assert_eq!(code, 404, "{response}")
}
#[actix_rt::test]
async fn get_settings() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.settings().await;
@ -237,9 +251,8 @@ async fn get_settings() {
#[actix_rt::test]
async fn secrets_are_hidden_in_settings() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, _code) = index.create(None).await;
index.wait_task(response.uid()).await.succeeded();
@ -259,11 +272,11 @@ async fn secrets_are_hidden_in_settings() {
.await;
meili_snap::snapshot!(code, @"202 Accepted");
meili_snap::snapshot!(meili_snap::json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
meili_snap::snapshot!(meili_snap::json_string!(response, { ".taskUid" => "[task_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
"taskUid": 1,
"indexUid": "test",
"taskUid": "[task_uid]",
"indexUid": "[uuid]",
"status": "enqueued",
"type": "settingsUpdate",
"enqueuedAt": "[date]"
@ -272,7 +285,7 @@ async fn secrets_are_hidden_in_settings() {
let settings_update_uid = response.uid();
index.wait_task(settings_update_uid).await;
index.wait_task(settings_update_uid).await.succeeded();
let (response, code) = index.settings().await;
meili_snap::snapshot!(code, @"200 OK");
@ -360,16 +373,16 @@ async fn secrets_are_hidden_in_settings() {
#[actix_rt::test]
async fn error_update_settings_unknown_field() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (_response, code) = index.update_settings(json!({"foo": 12})).await;
assert_eq!(code, 400);
}
#[actix_rt::test]
async fn test_partial_update() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.settings().await;
@ -388,20 +401,18 @@ async fn test_partial_update() {
#[actix_rt::test]
async fn error_delete_settings_unexisting_index() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, code) = index.delete_settings().await;
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
index.wait_task(task.uid()).await.failed();
}
#[actix_rt::test]
async fn reset_all_settings() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let documents = json!([
{
@ -413,7 +424,6 @@ async fn reset_all_settings() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
assert_eq!(response["taskUid"], 0);
index.wait_task(response.uid()).await.succeeded();
let (update_task,_status_code) = index
@ -446,17 +456,15 @@ async fn reset_all_settings() {
#[actix_rt::test]
async fn update_setting_unexisting_index() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, code) = index.update_settings(json!({})).await;
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
index.wait_task(task.uid()).await.succeeded();
let (_response, code) = index.get().await;
assert_eq!(code, 200);
let (task, _status_code) = index.delete_settings().await;
let response = index.wait_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
index.wait_task(task.uid()).await.succeeded();
}
#[actix_rt::test]
@ -477,8 +485,8 @@ async fn error_update_setting_unexisting_index_invalid_uid() {
#[actix_rt::test]
async fn error_set_invalid_ranking_rules() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
index.create(None).await;
let (response, code) = index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await;
@ -495,8 +503,8 @@ async fn error_set_invalid_ranking_rules() {
#[actix_rt::test]
async fn set_and_reset_distinct_attribute_with_dedicated_route() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.update_distinct_attribute(json!("test")).await;
index.wait_task(task.uid()).await.succeeded();
@ -516,8 +524,8 @@ async fn set_and_reset_distinct_attribute_with_dedicated_route() {
#[actix_rt::test]
async fn granular_filterable_attributes() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
index.create(None).await;
let (response, code) =
@ -535,7 +543,7 @@ async fn granular_filterable_attributes() {
index.wait_task(response.uid()).await.succeeded();
let (response, code) = index.settings().await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
snapshot!(json_string!(response["filterableAttributes"]), @r###"
[
{

View file

@ -26,11 +26,11 @@ static DOCUMENTS: Lazy<crate::common::Value> = Lazy::new(|| {
#[actix_rt::test]
async fn add_docs_and_disable() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -38,8 +38,8 @@ async fn add_docs_and_disable() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
// only 1 document should match
index
@ -86,8 +86,8 @@ async fn add_docs_and_disable() {
#[actix_rt::test]
async fn disable_and_add_docs() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@ -95,11 +95,11 @@ async fn disable_and_add_docs() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
// only 1 document should match
index
@ -145,8 +145,8 @@ async fn disable_and_add_docs() {
#[actix_rt::test]
async fn disable_add_docs_and_enable() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@ -154,11 +154,11 @@ async fn disable_add_docs_and_enable() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -166,8 +166,8 @@ async fn disable_add_docs_and_enable() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(2).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
// all documents should match
index
@ -253,8 +253,8 @@ async fn disable_add_docs_and_enable() {
#[actix_rt::test]
async fn disable_add_docs_and_reset() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@ -262,11 +262,11 @@ async fn disable_add_docs_and_reset() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
@ -274,8 +274,8 @@ async fn disable_add_docs_and_reset() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(2).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
// all documents should match
index
@ -361,19 +361,19 @@ async fn disable_add_docs_and_reset() {
#[actix_rt::test]
async fn default_behavior() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await;
assert_eq!("202", code.as_str(), "{response:?}");
server.wait_task(response.uid()).await.succeeded();
let (response, _code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
// all documents should match
index

View file

@ -26,8 +26,8 @@ static DOCUMENTS: Lazy<crate::common::Value> = Lazy::new(|| {
#[actix_rt::test]
async fn attribute_scale_search() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
@ -38,7 +38,7 @@ async fn attribute_scale_search() {
"rankingRules": ["words", "typo", "proximity"],
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
// the expected order is [1, 3, 2] instead of [3, 1, 2]
@ -99,8 +99,8 @@ async fn attribute_scale_search() {
#[actix_rt::test]
async fn attribute_scale_phrase_search() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
@ -167,8 +167,8 @@ async fn attribute_scale_phrase_search() {
#[actix_rt::test]
async fn word_scale_set_and_reset() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
@ -282,8 +282,8 @@ async fn word_scale_set_and_reset() {
#[actix_rt::test]
async fn attribute_scale_default_ranking_rules() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) = index.add_documents(DOCUMENTS.clone(), None).await;
index.wait_task(task.uid()).await.succeeded();
@ -293,7 +293,7 @@ async fn attribute_scale_default_ranking_rules() {
"proximityPrecision": "byAttribute"
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
assert_eq!("202", code.as_str(), "{response:?}");
index.wait_task(response.uid()).await.succeeded();
// the expected order is [3, 1, 2]

View file

@ -5,8 +5,8 @@ use crate::json;
#[actix_rt::test]
async fn set_and_reset() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index
.update_settings(json!({
@ -70,8 +70,8 @@ async fn set_and_search() {
},
]);
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (add_task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(add_task.uid()).await.succeeded();
@ -224,8 +224,8 @@ async fn advanced_synergies() {
},
]);
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (add_task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(add_task.uid()).await.succeeded();

View file

@ -6,11 +6,11 @@ use crate::json;
#[actix_rt::test]
async fn similar_unexisting_index() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let expected_response = json!({
"message": "Index `test` not found.",
"message": format!("Index `{}` not found.", index.uid),
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -26,12 +26,12 @@ async fn similar_unexisting_index() {
#[actix_rt::test]
async fn similar_unexisting_parameter() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
index
.similar(json!({"id": 287947, "marin": "hello"}), |response, code| {
assert_eq!(code, 400, "{}", response);
assert_eq!(code, 400, "{response}");
assert_eq!(response["code"], "bad_request");
})
.await;
@ -39,8 +39,8 @@ async fn similar_unexisting_parameter() {
#[actix_rt::test]
async fn similar_bad_id() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -53,7 +53,7 @@ async fn similar_bad_id() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.similar_post(json!({"id": ["doggo"], "embedder": "manual"})).await;
snapshot!(code, @"400 Bad Request");
@ -69,8 +69,8 @@ async fn similar_bad_id() {
#[actix_rt::test]
async fn similar_bad_ranking_score_threshold() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -83,7 +83,7 @@ async fn similar_bad_ranking_score_threshold() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.similar_post(json!({"rankingScoreThreshold": ["doggo"]})).await;
snapshot!(code, @"400 Bad Request");
@ -99,8 +99,8 @@ async fn similar_bad_ranking_score_threshold() {
#[actix_rt::test]
async fn similar_invalid_ranking_score_threshold() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -113,7 +113,7 @@ async fn similar_invalid_ranking_score_threshold() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.similar_post(json!({"rankingScoreThreshold": 42})).await;
snapshot!(code, @"400 Bad Request");
@ -129,8 +129,8 @@ async fn similar_invalid_ranking_score_threshold() {
#[actix_rt::test]
async fn similar_invalid_id() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -143,7 +143,7 @@ async fn similar_invalid_id() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.similar_post(json!({"id": "http://invalid-docid/", "embedder": "manual"})).await;
@ -160,8 +160,8 @@ async fn similar_invalid_id() {
#[actix_rt::test]
async fn similar_not_found_id() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -174,7 +174,7 @@ async fn similar_not_found_id() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.similar_post(json!({"id": "definitely-doesnt-exist", "embedder": "manual"})).await;
@ -191,8 +191,8 @@ async fn similar_not_found_id() {
#[actix_rt::test]
async fn similar_bad_offset() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -205,7 +205,7 @@ async fn similar_bad_offset() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.similar_post(json!({"id": 287947, "offset": "doggo", "embedder": "manual"})).await;
@ -233,8 +233,8 @@ async fn similar_bad_offset() {
#[actix_rt::test]
async fn similar_bad_limit() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -247,7 +247,7 @@ async fn similar_bad_limit() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let (response, code) =
index.similar_post(json!({"id": 287947, "limit": "doggo", "embedder": "manual"})).await;
@ -277,8 +277,8 @@ async fn similar_bad_limit() {
async fn similar_bad_filter() {
// Since a filter is deserialized as a json Value it will never fail to deserialize.
// Thus the error message is not generated by deserr but written by us.
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -291,7 +291,7 @@ async fn similar_bad_filter() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
@ -316,8 +316,8 @@ async fn similar_bad_filter() {
#[actix_rt::test]
async fn filter_invalid_syntax_object() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -330,7 +330,7 @@ async fn filter_invalid_syntax_object() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -354,8 +354,8 @@ async fn filter_invalid_syntax_object() {
#[actix_rt::test]
async fn filter_invalid_syntax_array() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -368,7 +368,7 @@ async fn filter_invalid_syntax_array() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -392,8 +392,8 @@ async fn filter_invalid_syntax_array() {
#[actix_rt::test]
async fn filter_invalid_syntax_string() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -406,7 +406,7 @@ async fn filter_invalid_syntax_string() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -432,8 +432,8 @@ async fn filter_invalid_syntax_string() {
#[actix_rt::test]
async fn filter_invalid_attribute_array() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -446,7 +446,7 @@ async fn filter_invalid_attribute_array() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -473,8 +473,8 @@ async fn filter_invalid_attribute_array() {
#[actix_rt::test]
async fn filter_invalid_attribute_string() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -487,7 +487,7 @@ async fn filter_invalid_attribute_string() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -514,8 +514,8 @@ async fn filter_invalid_attribute_string() {
#[actix_rt::test]
async fn filter_reserved_geo_attribute_array() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -528,7 +528,7 @@ async fn filter_reserved_geo_attribute_array() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -554,8 +554,8 @@ async fn filter_reserved_geo_attribute_array() {
#[actix_rt::test]
async fn filter_reserved_geo_attribute_string() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -568,7 +568,7 @@ async fn filter_reserved_geo_attribute_string() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -594,8 +594,8 @@ async fn filter_reserved_geo_attribute_string() {
#[actix_rt::test]
async fn filter_reserved_attribute_array() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -608,7 +608,7 @@ async fn filter_reserved_attribute_array() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -634,8 +634,8 @@ async fn filter_reserved_attribute_array() {
#[actix_rt::test]
async fn filter_reserved_attribute_string() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -648,7 +648,7 @@ async fn filter_reserved_attribute_string() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -674,8 +674,8 @@ async fn filter_reserved_attribute_string() {
#[actix_rt::test]
async fn filter_reserved_geo_point_array() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -688,7 +688,7 @@ async fn filter_reserved_geo_point_array() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -714,8 +714,8 @@ async fn filter_reserved_geo_point_array() {
#[actix_rt::test]
async fn filter_reserved_geo_point_string() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -728,7 +728,7 @@ async fn filter_reserved_geo_point_string() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
@ -754,8 +754,8 @@ async fn filter_reserved_geo_point_string() {
#[actix_rt::test]
async fn similar_bad_retrieve_vectors() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) =
index.similar_post(json!({"retrieveVectors": "doggo", "embedder": "manual"})).await;
@ -806,8 +806,8 @@ async fn similar_bad_retrieve_vectors() {
#[actix_rt::test]
async fn similar_bad_embedder() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -820,7 +820,7 @@ async fn similar_bad_embedder() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;

View file

@ -47,8 +47,8 @@ static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
#[actix_rt::test]
async fn basic() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@ -61,12 +61,12 @@ async fn basic() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
index
.similar(
@ -233,8 +233,8 @@ async fn basic() {
#[actix_rt::test]
async fn ranking_score_threshold() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@ -247,12 +247,12 @@ async fn ranking_score_threshold() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
index
.similar(
@ -503,8 +503,8 @@ async fn ranking_score_threshold() {
#[actix_rt::test]
async fn filter() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@ -517,12 +517,12 @@ async fn filter() {
"filterableAttributes": ["title", "release_year"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
index
.similar(
@ -621,8 +621,8 @@ async fn filter() {
#[actix_rt::test]
async fn limit_and_offset() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index_with_prefix("test");
let (response, code) = index
.update_settings(json!({
@ -635,12 +635,12 @@ async fn limit_and_offset() {
"filterableAttributes": ["title"]}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = DOCUMENTS.clone();
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
index
.similar(

View file

@ -6,8 +6,8 @@ use crate::common::Server;
use crate::json;
#[actix_rt::test]
async fn get_settings_unexisting_index() {
let server = Server::new().await;
async fn get_version() {
let server = Server::new_shared();
let (response, code) = server.version().await;
assert_eq!(code, 200);
let version = response.as_object().unwrap();
@ -18,7 +18,7 @@ async fn get_settings_unexisting_index() {
#[actix_rt::test]
async fn test_healthyness() {
let server = Server::new().await;
let server = Server::new_shared();
let (response, status_code) = server.service.get("/health").await;
assert_eq!(status_code, 200);
@ -55,7 +55,7 @@ async fn stats() {
]);
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202, "{}", response);
assert_eq!(code, 202, "{response}");
assert_eq!(response["taskUid"], 1);
index.wait_task(response.uid()).await.succeeded();
@ -78,8 +78,8 @@ async fn stats() {
#[actix_rt::test]
async fn add_remove_embeddings() {
let server = Server::new().await;
let index = server.index("doggo");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -216,8 +216,8 @@ async fn add_remove_embeddings() {
#[actix_rt::test]
async fn add_remove_embedded_documents() {
let server = Server::new().await;
let index = server.index("doggo");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -293,8 +293,8 @@ async fn add_remove_embedded_documents() {
#[actix_rt::test]
async fn update_embedder_settings() {
let server = Server::new().await;
let index = server.index("doggo");
let server = Server::new_shared();
let index = server.unique_index();
// 2 embedded documents for 3 embeddings in total
// but no embedders are added in the settings yet so we expect 0 embedded documents for 0 embeddings in total

View file

@ -1,8 +1,7 @@
mod errors;
mod webhook;
use meili_snap::insta::assert_json_snapshot;
use meili_snap::snapshot;
use meili_snap::{json_string, snapshot};
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
@ -11,14 +10,12 @@ use crate::json;
#[actix_rt::test]
async fn error_get_unexisting_task_status() {
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(1).await;
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index.get_task(u32::MAX as u64).await;
let expected_response = json!({
"message": "Task `1` not found.",
"message": "Task `4294967295` not found.",
"code": "task_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#task_not_found"
@ -30,8 +27,8 @@ async fn error_get_unexisting_task_status() {
#[actix_rt::test]
async fn get_task_status() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (create_task, _status_code) = index.create(None).await;
let (add_task, _status_code) = index
.add_documents(
@ -42,7 +39,7 @@ async fn get_task_status() {
None,
)
.await;
index.wait_task(create_task.uid()).await.succeeded();
server.wait_task(create_task.uid()).await.succeeded();
let (_response, code) = index.get_task(add_task.uid()).await;
assert_eq!(code, 200);
// TODO check response format, as per #48
@ -50,10 +47,11 @@ async fn get_task_status() {
#[actix_rt::test]
async fn list_tasks() {
// Do not use a shared server because we want to assert stuff against the global list of tasks
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
@ -64,6 +62,7 @@ async fn list_tasks() {
#[actix_rt::test]
async fn list_tasks_pagination_and_reverse() {
// do not use a shared server here, as we want to assert tasks ids and we need them to be stable
let server = Server::new().await;
// First of all we want to create a lot of tasks very quickly. The fastest way is to delete a lot of unexisting indexes
let mut last_task = None;
@ -71,7 +70,7 @@ async fn list_tasks_pagination_and_reverse() {
let index = server.index(format!("test-{i}"));
last_task = Some(index.create(None).await.0.uid());
}
server.wait_task(last_task.unwrap()).await;
server.wait_task(last_task.unwrap()).await.succeeded();
let (response, code) = server.tasks_filter("limit=3").await;
assert_eq!(code, 200);
@ -103,13 +102,14 @@ async fn list_tasks_pagination_and_reverse() {
#[actix_rt::test]
async fn list_tasks_with_star_filters() {
let server = Server::new().await;
// Do not use a unique index here, as we want to test the `indexUids=*` filter.
let index = server.index("test");
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.service.get("/tasks?indexUids=test").await;
let (response, code) = index.service.get(format!("/tasks?indexUids={}", index.uid)).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
@ -127,93 +127,102 @@ async fn list_tasks_with_star_filters() {
let (response, code) =
index.service.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*").await;
assert_eq!(code, 200, "{:?}", response);
assert_eq!(code, 200, "{response:?}");
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index
.service
.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test")
.get(format!(
"/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids={}",
index.uid
))
.await;
assert_eq!(code, 200, "{:?}", response);
assert_eq!(code, 200, "{response:?}");
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index
.service
.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test,*")
.await;
assert_eq!(code, 200, "{:?}", response);
assert_eq!(code, 200, "{response:?}");
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn list_tasks_status_filtered() {
// Do not use a shared server because we want to assert stuff against the global list of tasks
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) = index.filtered_tasks(&[], &["succeeded", "failed"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn list_tasks_type_filtered() {
// Do not use a shared server because we want to assert stuff against the global list of tasks
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &[], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) =
index.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn list_tasks_invalid_canceled_by_filter() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
index
server.wait_task(task.uid()).await.succeeded();
let (task, _code) = index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.filtered_tasks(&[], &[], &["0"]).await;
assert_eq!(code, 200, "{}", response);
let (response, code) =
index.filtered_tasks(&[], &[], &[format!("{}", task.uid()).as_str()]).await;
assert_eq!(code, 200, "{response}");
assert_eq!(response["results"].as_array().unwrap().len(), 0);
}
#[actix_rt::test]
async fn list_tasks_status_and_type_filtered() {
// Do not use a shared server because we want to assert stuff against the global list of tasks
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["results"].as_array().unwrap().len(), 0);
let (response, code) = index
@ -223,12 +232,12 @@ async fn list_tasks_status_and_type_filtered() {
&[],
)
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(code, 200, "{response}");
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
macro_rules! assert_valid_summarized_task {
($response:expr, $task_type:literal, $index:literal) => {{
($response:expr, $task_type:literal, $index:tt) => {{
assert_eq!($response.as_object().unwrap().len(), 5);
assert!($response["taskUid"].as_u64().is_some());
assert_eq!($response["indexUid"], $index);
@ -242,49 +251,49 @@ macro_rules! assert_valid_summarized_task {
#[actix_web::test]
async fn test_summarized_task_view() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let index_uid = index.uid.clone();
let (response, _) = index.create(None).await;
assert_valid_summarized_task!(response, "indexCreation", "test");
assert_valid_summarized_task!(response, "indexCreation", index_uid);
let (response, _) = index.update(None).await;
assert_valid_summarized_task!(response, "indexUpdate", "test");
assert_valid_summarized_task!(response, "indexUpdate", index_uid);
let (response, _) = index.update_settings(json!({})).await;
assert_valid_summarized_task!(response, "settingsUpdate", "test");
assert_valid_summarized_task!(response, "settingsUpdate", index_uid);
let (response, _) = index.update_documents(json!([{"id": 1}]), None).await;
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", index_uid);
let (response, _) = index.add_documents(json!([{"id": 1}]), None).await;
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", index_uid);
let (response, _) = index.delete_document(1).await;
assert_valid_summarized_task!(response, "documentDeletion", "test");
assert_valid_summarized_task!(response, "documentDeletion", index_uid);
let (response, _) = index.clear_all_documents().await;
assert_valid_summarized_task!(response, "documentDeletion", "test");
assert_valid_summarized_task!(response, "documentDeletion", index_uid);
let (response, _) = index.delete().await;
assert_valid_summarized_task!(response, "indexDeletion", "test");
assert_valid_summarized_task!(response, "indexDeletion", index_uid);
}
#[actix_web::test]
async fn test_summarized_document_addition_or_update() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) =
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(0).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
server.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
@r###"
{
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
@ -302,15 +311,14 @@ async fn test_summarized_document_addition_or_update() {
let (task, _status_code) =
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(1).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
server.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
@r###"
{
"uid": 1,
"batchUid": 1,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
@ -329,18 +337,22 @@ async fn test_summarized_document_addition_or_update() {
#[actix_web::test]
async fn test_summarized_delete_documents_by_batch() {
let server = Server::new().await;
let index = server.index("test");
let (task, _status_code) = index.delete_batch(vec![1, 2, 3]).await;
index.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(0).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
let server = Server::new_shared();
let index = server.unique_index();
let non_existing_task_id1 = u32::MAX as u64;
let non_existing_task_id2 = non_existing_task_id1 - 1;
let non_existing_task_id3 = non_existing_task_id1 - 2;
let (task, _status_code) = index
.delete_batch(vec![non_existing_task_id1, non_existing_task_id2, non_existing_task_id3])
.await;
server.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
snapshot!(task,
@r###"
{
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "failed",
"type": "documentDeletion",
"canceledBy": null,
@ -350,7 +362,7 @@ async fn test_summarized_delete_documents_by_batch() {
"originalFilter": null
},
"error": {
"message": "Index `test` not found.",
"message": "Index `[uuid]` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -364,15 +376,14 @@ async fn test_summarized_delete_documents_by_batch() {
index.create(None).await;
let (del_task, _status_code) = index.delete_batch(vec![42]).await;
index.wait_task(del_task.uid()).await.succeeded();
server.wait_task(del_task.uid()).await.succeeded();
let (task, _) = index.get_task(del_task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 2,
"batchUid": 2,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,
@ -392,20 +403,19 @@ async fn test_summarized_delete_documents_by_batch() {
#[actix_web::test]
async fn test_summarized_delete_documents_by_filter() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "failed",
"type": "documentDeletion",
"canceledBy": null,
@ -415,7 +425,7 @@ async fn test_summarized_delete_documents_by_filter() {
"originalFilter": "\"doggo = bernese\""
},
"error": {
"message": "Index `test` not found.",
"message": "Index `[uuid]` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -430,15 +440,14 @@ async fn test_summarized_delete_documents_by_filter() {
index.create(None).await;
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 2,
"batchUid": 2,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "failed",
"type": "documentDeletion",
"canceledBy": null,
@ -448,7 +457,7 @@ async fn test_summarized_delete_documents_by_filter() {
"originalFilter": "\"doggo = bernese\""
},
"error": {
"message": "Index `test`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese",
"message": "Index `[uuid]`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese",
"code": "invalid_document_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_document_filter"
@ -463,15 +472,14 @@ async fn test_summarized_delete_documents_by_filter() {
index.update_settings(json!({ "filterableAttributes": ["doggo"] })).await;
let (task, _status_code) =
index.delete_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 4,
"batchUid": 4,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,
@ -491,18 +499,17 @@ async fn test_summarized_delete_documents_by_filter() {
#[actix_web::test]
async fn test_summarized_delete_document_by_id() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) = index.delete_document(1).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "failed",
"type": "documentDeletion",
"canceledBy": null,
@ -512,7 +519,7 @@ async fn test_summarized_delete_document_by_id() {
"originalFilter": null
},
"error": {
"message": "Index `test` not found.",
"message": "Index `[uuid]` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -526,15 +533,14 @@ async fn test_summarized_delete_document_by_id() {
index.create(None).await;
let (task, _status_code) = index.delete_document(42).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 2,
"batchUid": 2,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "documentDeletion",
"canceledBy": null,
@ -554,12 +560,12 @@ async fn test_summarized_delete_document_by_id() {
#[actix_web::test]
async fn test_summarized_settings_update() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
// here we should find my payload even in the failed task.
let (response, code) = index.update_settings(json!({ "rankingRules": ["custom"] })).await;
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value at `.rankingRules[0]`: `custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.",
"code": "invalid_settings_ranking_rules",
@ -569,15 +575,14 @@ async fn test_summarized_settings_update() {
"###);
let (task,_status_code) = index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "settingsUpdate",
"canceledBy": null,
@ -605,18 +610,17 @@ async fn test_summarized_settings_update() {
#[actix_web::test]
async fn test_summarized_index_creation() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "indexCreation",
"canceledBy": null,
@ -632,15 +636,14 @@ async fn test_summarized_index_creation() {
"###);
let (task, _status_code) = index.create(Some("doggos")).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 1,
"batchUid": 1,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "failed",
"type": "indexCreation",
"canceledBy": null,
@ -648,7 +651,7 @@ async fn test_summarized_index_creation() {
"primaryKey": "doggos"
},
"error": {
"message": "Index `test` already exists.",
"message": "Index `[uuid]` already exists.",
"code": "index_already_exists",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_already_exists"
@ -663,16 +666,16 @@ async fn test_summarized_index_creation() {
#[actix_web::test]
async fn test_summarized_index_deletion() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
let (ret, _code) = index.delete().await;
let task = index.wait_task(ret.uid()).await;
let task = server.wait_task(ret.uid()).await;
snapshot!(task,
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "test",
"indexUid": "[uuid]",
"status": "failed",
"type": "indexDeletion",
"canceledBy": null,
@ -680,7 +683,7 @@ async fn test_summarized_index_deletion() {
"deletedDocuments": 0
},
"error": {
"message": "Index `test` not found.",
"message": "Index `[uuid]` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -697,13 +700,13 @@ async fn test_summarized_index_deletion() {
// both tasks may get autobatched and the deleted documents count will be wrong.
let (ret, _code) =
index.add_documents(json!({ "id": 42, "content": "doggos & fluff" }), Some("id")).await;
let task = index.wait_task(ret.uid()).await;
let task = server.wait_task(ret.uid()).await;
snapshot!(task,
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "test",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
@ -720,13 +723,13 @@ async fn test_summarized_index_deletion() {
"###);
let (ret, _code) = index.delete().await;
let task = index.wait_task(ret.uid()).await;
let task = server.wait_task(ret.uid()).await;
snapshot!(task,
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "test",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "indexDeletion",
"canceledBy": null,
@ -743,13 +746,13 @@ async fn test_summarized_index_deletion() {
// What happens when you delete an index that doesn't exists.
let (ret, _code) = index.delete().await;
let task = index.wait_task(ret.uid()).await;
let task = server.wait_task(ret.uid()).await;
snapshot!(task,
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "test",
"indexUid": "[uuid]",
"status": "failed",
"type": "indexDeletion",
"canceledBy": null,
@ -757,7 +760,7 @@ async fn test_summarized_index_deletion() {
"deletedDocuments": 0
},
"error": {
"message": "Index `test` not found.",
"message": "Index `[uuid]` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -772,19 +775,18 @@ async fn test_summarized_index_deletion() {
#[actix_web::test]
async fn test_summarized_index_update() {
let server = Server::new().await;
let index = server.index("test");
let server = Server::new_shared();
let index = server.unique_index();
// If the index doesn't exist yet, we should get errors with or without the primary key.
let (task, _status_code) = index.update(None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "failed",
"type": "indexUpdate",
"canceledBy": null,
@ -792,7 +794,7 @@ async fn test_summarized_index_update() {
"primaryKey": null
},
"error": {
"message": "Index `test` not found.",
"message": "Index `[uuid]` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -805,15 +807,14 @@ async fn test_summarized_index_update() {
"###);
let (task, _status_code) = index.update(Some("bones")).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 1,
"batchUid": 1,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "failed",
"type": "indexUpdate",
"canceledBy": null,
@ -821,7 +822,7 @@ async fn test_summarized_index_update() {
"primaryKey": "bones"
},
"error": {
"message": "Index `test` not found.",
"message": "Index `[uuid]` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -837,15 +838,14 @@ async fn test_summarized_index_update() {
index.create(None).await;
let (task, _status_code) = index.update(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 3,
"batchUid": 3,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "indexUpdate",
"canceledBy": null,
@ -861,15 +861,14 @@ async fn test_summarized_index_update() {
"###);
let (task, _status_code) = index.update(Some("bones")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 4,
"batchUid": 4,
"indexUid": "test",
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "[uuid]",
"status": "succeeded",
"type": "indexUpdate",
"canceledBy": null,
@ -887,7 +886,7 @@ async fn test_summarized_index_update() {
#[actix_web::test]
async fn test_summarized_index_swap() {
let server = Server::new().await;
let server = Server::new_shared();
let (task, _status_code) = server
.index_swap(json!([
{ "indexes": ["doggos", "cattos"] }
@ -895,12 +894,11 @@ async fn test_summarized_index_swap() {
.await;
server.wait_task(task.uid()).await.failed();
let (task, _) = server.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 0,
"batchUid": 0,
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": null,
"status": "failed",
"type": "indexSwap",
@ -928,23 +926,25 @@ async fn test_summarized_index_swap() {
}
"###);
let (task, _code) = server.index("doggos").create(None).await;
let doggos_index = server.unique_index();
let (task, _code) = doggos_index.create(None).await;
server.wait_task(task.uid()).await.succeeded();
let (task, _code) = server.index("cattos").create(None).await;
let cattos_index = server.unique_index();
let (task, _code) = cattos_index.create(None).await;
server.wait_task(task.uid()).await.succeeded();
let (task, _code) = server
.index_swap(json!([
{ "indexes": ["doggos", "cattos"] }
{ "indexes": [doggos_index.uid, cattos_index.uid] }
]))
.await;
server.wait_task(task.uid()).await.succeeded();
let (task, _) = server.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(json_string!(task,
{ ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".**.indexes[0]" => "doggos", ".**.indexes[1]" => "cattos", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
"uid": 3,
"batchUid": 3,
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": null,
"status": "succeeded",
"type": "indexSwap",
@ -970,20 +970,21 @@ async fn test_summarized_index_swap() {
#[actix_web::test]
async fn test_summarized_task_cancelation() {
let server = Server::new().await;
let index = server.index("doggos");
let server = Server::new_shared();
let index = server.unique_index();
// to avoid being flaky we're only going to cancel an already finished task :(
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = server.cancel_tasks("uids=0").await;
index.wait_task(task.uid()).await.succeeded();
let task_uid = task.uid();
server.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = server.cancel_tasks(format!("uids={task_uid}").as_str()).await;
server.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(json_string!(task,
{ ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".**.originalFilter" => "[of]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
"uid": 1,
"batchUid": 1,
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": null,
"status": "succeeded",
"type": "taskCancelation",
@ -991,7 +992,7 @@ async fn test_summarized_task_cancelation() {
"details": {
"matchedTasks": 1,
"canceledTasks": 0,
"originalFilter": "?uids=0"
"originalFilter": "[of]"
},
"error": null,
"duration": "[duration]",
@ -1004,20 +1005,19 @@ async fn test_summarized_task_cancelation() {
#[actix_web::test]
async fn test_summarized_task_deletion() {
let server = Server::new().await;
let index = server.index("doggos");
let server = Server::new_shared();
let index = server.unique_index();
// to avoid being flaky we're only going to delete an already finished task :(
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = server.delete_tasks("uids=0").await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _) = index.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 1,
"batchUid": 1,
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": null,
"status": "succeeded",
"type": "taskDeletion",
@ -1038,22 +1038,22 @@ async fn test_summarized_task_deletion() {
#[actix_web::test]
async fn test_summarized_dump_creation() {
// Do not use a shared server because it takes too long to create a dump
let server = Server::new().await;
let (task, _status_code) = server.create_dump().await;
server.wait_task(task.uid()).await;
let (task, _) = server.get_task(task.uid()).await;
assert_json_snapshot!(task,
{ ".details.dumpUid" => "[dumpUid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
snapshot!(task,
@r###"
{
"uid": 0,
"batchUid": 0,
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": null,
"status": "succeeded",
"type": "dumpCreation",
"canceledBy": null,
"details": {
"dumpUid": "[dumpUid]"
"dumpUid": "[dump_uid]"
},
"error": null,
"duration": "[duration]",

View file

@ -43,7 +43,7 @@ async fn version_too_old() {
std::fs::write(db_path.join("VERSION"), "1.11.9999").unwrap();
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.15.0");
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.15.2");
}
#[actix_rt::test]
@ -58,7 +58,7 @@ async fn version_requires_downgrade() {
std::fs::write(db_path.join("VERSION"), format!("{major}.{minor}.{patch}")).unwrap();
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
snapshot!(err, @"Database version 1.15.1 is higher than the Meilisearch version 1.15.0. Downgrade is not supported");
snapshot!(err, @"Database version 1.15.3 is higher than the Meilisearch version 1.15.2. Downgrade is not supported");
}
#[actix_rt::test]

View file

@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.15.2"
},
"stats": {
"totalNbTasks": 1,
@ -24,7 +24,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
"batchStrategy": "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type."
},
{
"uid": 23,
@ -47,7 +47,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.004146631S",
"startedAt": "2025-01-23T11:38:57.012591321Z",
"finishedAt": "2025-01-23T11:38:57.016737952Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 22,
@ -71,7 +71,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.102738497S",
"startedAt": "2025-01-23T11:36:22.551906856Z",
"finishedAt": "2025-01-23T11:36:22.654645353Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 21,
@ -95,7 +95,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.005108474S",
"startedAt": "2025-01-23T11:36:04.132670526Z",
"finishedAt": "2025-01-23T11:36:04.137779Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 20,
@ -119,7 +119,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.027954894S",
"startedAt": "2025-01-23T11:35:53.631082795Z",
"finishedAt": "2025-01-23T11:35:53.659037689Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 19,
@ -142,7 +142,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.006903297S",
"startedAt": "2025-01-20T11:50:52.874106134Z",
"finishedAt": "2025-01-20T11:50:52.881009431Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 18,
@ -171,7 +171,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000481257S",
"startedAt": "2025-01-20T11:48:04.92820416Z",
"finishedAt": "2025-01-20T11:48:04.928685417Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 17,
@ -194,7 +194,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000407005S",
"startedAt": "2025-01-20T11:47:53.509403957Z",
"finishedAt": "2025-01-20T11:47:53.509810962Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 16,
@ -217,7 +217,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000403716S",
"startedAt": "2025-01-20T11:47:48.430653005Z",
"finishedAt": "2025-01-20T11:47:48.431056721Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 15,
@ -240,7 +240,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000417016S",
"startedAt": "2025-01-20T11:47:42.429678617Z",
"finishedAt": "2025-01-20T11:47:42.430095633Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 14,
@ -264,7 +264,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT12.086284842S",
"startedAt": "2025-01-20T11:47:03.092181576Z",
"finishedAt": "2025-01-20T11:47:15.178466418Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 13,
@ -296,7 +296,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.011506614S",
"startedAt": "2025-01-16T17:18:43.29334923Z",
"finishedAt": "2025-01-16T17:18:43.304855844Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 12,
@ -324,7 +324,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007640163S",
"startedAt": "2025-01-16T17:02:52.539749853Z",
"finishedAt": "2025-01-16T17:02:52.547390016Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 11,
@ -347,7 +347,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007307840S",
"startedAt": "2025-01-16T17:01:14.112756687Z",
"finishedAt": "2025-01-16T17:01:14.120064527Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 10,
@ -375,7 +375,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007391353S",
"startedAt": "2025-01-16T17:00:29.201180268Z",
"finishedAt": "2025-01-16T17:00:29.208571621Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 9,
@ -403,7 +403,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007445825S",
"startedAt": "2025-01-16T17:00:15.77629445Z",
"finishedAt": "2025-01-16T17:00:15.783740275Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 8,
@ -436,7 +436,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.012020083S",
"startedAt": "2025-01-16T16:59:42.744086671Z",
"finishedAt": "2025-01-16T16:59:42.756106754Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 7,
@ -463,7 +463,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007440092S",
"startedAt": "2025-01-16T16:58:41.2155771Z",
"finishedAt": "2025-01-16T16:58:41.223017192Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 6,
@ -490,7 +490,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007565161S",
"startedAt": "2025-01-16T16:54:51.940332781Z",
"finishedAt": "2025-01-16T16:54:51.947897942Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 5,
@ -516,7 +516,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.016307263S",
"startedAt": "2025-01-16T16:53:19.913351957Z",
"finishedAt": "2025-01-16T16:53:19.92965922Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 23,

View file

@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.15.2"
},
"stats": {
"totalNbTasks": 1,
@ -24,7 +24,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
"batchStrategy": "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type."
},
{
"uid": 23,
@ -47,7 +47,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.004146631S",
"startedAt": "2025-01-23T11:38:57.012591321Z",
"finishedAt": "2025-01-23T11:38:57.016737952Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 22,
@ -71,7 +71,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.102738497S",
"startedAt": "2025-01-23T11:36:22.551906856Z",
"finishedAt": "2025-01-23T11:36:22.654645353Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 21,
@ -95,7 +95,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.005108474S",
"startedAt": "2025-01-23T11:36:04.132670526Z",
"finishedAt": "2025-01-23T11:36:04.137779Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 20,
@ -119,7 +119,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.027954894S",
"startedAt": "2025-01-23T11:35:53.631082795Z",
"finishedAt": "2025-01-23T11:35:53.659037689Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 19,
@ -142,7 +142,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.006903297S",
"startedAt": "2025-01-20T11:50:52.874106134Z",
"finishedAt": "2025-01-20T11:50:52.881009431Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 18,
@ -171,7 +171,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000481257S",
"startedAt": "2025-01-20T11:48:04.92820416Z",
"finishedAt": "2025-01-20T11:48:04.928685417Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 17,
@ -194,7 +194,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000407005S",
"startedAt": "2025-01-20T11:47:53.509403957Z",
"finishedAt": "2025-01-20T11:47:53.509810962Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 16,
@ -217,7 +217,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000403716S",
"startedAt": "2025-01-20T11:47:48.430653005Z",
"finishedAt": "2025-01-20T11:47:48.431056721Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 15,
@ -240,7 +240,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000417016S",
"startedAt": "2025-01-20T11:47:42.429678617Z",
"finishedAt": "2025-01-20T11:47:42.430095633Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 14,
@ -264,7 +264,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT12.086284842S",
"startedAt": "2025-01-20T11:47:03.092181576Z",
"finishedAt": "2025-01-20T11:47:15.178466418Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 13,
@ -296,7 +296,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.011506614S",
"startedAt": "2025-01-16T17:18:43.29334923Z",
"finishedAt": "2025-01-16T17:18:43.304855844Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 12,
@ -324,7 +324,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007640163S",
"startedAt": "2025-01-16T17:02:52.539749853Z",
"finishedAt": "2025-01-16T17:02:52.547390016Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 11,
@ -347,7 +347,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007307840S",
"startedAt": "2025-01-16T17:01:14.112756687Z",
"finishedAt": "2025-01-16T17:01:14.120064527Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 10,
@ -375,7 +375,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007391353S",
"startedAt": "2025-01-16T17:00:29.201180268Z",
"finishedAt": "2025-01-16T17:00:29.208571621Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 9,
@ -403,7 +403,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007445825S",
"startedAt": "2025-01-16T17:00:15.77629445Z",
"finishedAt": "2025-01-16T17:00:15.783740275Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 8,
@ -436,7 +436,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.012020083S",
"startedAt": "2025-01-16T16:59:42.744086671Z",
"finishedAt": "2025-01-16T16:59:42.756106754Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 7,
@ -463,7 +463,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007440092S",
"startedAt": "2025-01-16T16:58:41.2155771Z",
"finishedAt": "2025-01-16T16:58:41.223017192Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 6,
@ -490,7 +490,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007565161S",
"startedAt": "2025-01-16T16:54:51.940332781Z",
"finishedAt": "2025-01-16T16:54:51.947897942Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 5,
@ -516,7 +516,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.016307263S",
"startedAt": "2025-01-16T16:53:19.913351957Z",
"finishedAt": "2025-01-16T16:53:19.92965922Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 23,

View file

@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.15.2"
},
"stats": {
"totalNbTasks": 1,
@ -24,7 +24,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
"batchStrategy": "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type."
},
{
"uid": 23,
@ -47,7 +47,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.004146631S",
"startedAt": "2025-01-23T11:38:57.012591321Z",
"finishedAt": "2025-01-23T11:38:57.016737952Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 22,
@ -71,7 +71,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.102738497S",
"startedAt": "2025-01-23T11:36:22.551906856Z",
"finishedAt": "2025-01-23T11:36:22.654645353Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 21,
@ -95,7 +95,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.005108474S",
"startedAt": "2025-01-23T11:36:04.132670526Z",
"finishedAt": "2025-01-23T11:36:04.137779Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 20,
@ -119,7 +119,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.027954894S",
"startedAt": "2025-01-23T11:35:53.631082795Z",
"finishedAt": "2025-01-23T11:35:53.659037689Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 19,
@ -142,7 +142,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.006903297S",
"startedAt": "2025-01-20T11:50:52.874106134Z",
"finishedAt": "2025-01-20T11:50:52.881009431Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 18,
@ -171,7 +171,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000481257S",
"startedAt": "2025-01-20T11:48:04.92820416Z",
"finishedAt": "2025-01-20T11:48:04.928685417Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 17,
@ -194,7 +194,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000407005S",
"startedAt": "2025-01-20T11:47:53.509403957Z",
"finishedAt": "2025-01-20T11:47:53.509810962Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 16,
@ -217,7 +217,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000403716S",
"startedAt": "2025-01-20T11:47:48.430653005Z",
"finishedAt": "2025-01-20T11:47:48.431056721Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 15,
@ -240,7 +240,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000417016S",
"startedAt": "2025-01-20T11:47:42.429678617Z",
"finishedAt": "2025-01-20T11:47:42.430095633Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 14,
@ -264,7 +264,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT12.086284842S",
"startedAt": "2025-01-20T11:47:03.092181576Z",
"finishedAt": "2025-01-20T11:47:15.178466418Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 13,
@ -296,7 +296,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.011506614S",
"startedAt": "2025-01-16T17:18:43.29334923Z",
"finishedAt": "2025-01-16T17:18:43.304855844Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 12,
@ -324,7 +324,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007640163S",
"startedAt": "2025-01-16T17:02:52.539749853Z",
"finishedAt": "2025-01-16T17:02:52.547390016Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 11,
@ -347,7 +347,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007307840S",
"startedAt": "2025-01-16T17:01:14.112756687Z",
"finishedAt": "2025-01-16T17:01:14.120064527Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 10,
@ -375,7 +375,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007391353S",
"startedAt": "2025-01-16T17:00:29.201180268Z",
"finishedAt": "2025-01-16T17:00:29.208571621Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 9,
@ -403,7 +403,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007445825S",
"startedAt": "2025-01-16T17:00:15.77629445Z",
"finishedAt": "2025-01-16T17:00:15.783740275Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 8,
@ -436,7 +436,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.012020083S",
"startedAt": "2025-01-16T16:59:42.744086671Z",
"finishedAt": "2025-01-16T16:59:42.756106754Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 7,
@ -463,7 +463,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007440092S",
"startedAt": "2025-01-16T16:58:41.2155771Z",
"finishedAt": "2025-01-16T16:58:41.223017192Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 6,
@ -490,7 +490,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007565161S",
"startedAt": "2025-01-16T16:54:51.940332781Z",
"finishedAt": "2025-01-16T16:54:51.947897942Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 5,
@ -516,7 +516,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.016307263S",
"startedAt": "2025-01-16T16:53:19.913351957Z",
"finishedAt": "2025-01-16T16:53:19.92965922Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 23,

View file

@ -29,7 +29,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 1,

View file

@ -25,7 +25,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 0,
@ -49,7 +49,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.111055654S",
"startedAt": "2025-01-16T16:45:16.020248085Z",
"finishedAt": "2025-01-16T16:45:16.131303739Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 2,

View file

@ -25,7 +25,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 0,
@ -49,7 +49,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.111055654S",
"startedAt": "2025-01-16T16:45:16.020248085Z",
"finishedAt": "2025-01-16T16:45:16.131303739Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 2,

View file

@ -25,7 +25,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 0,
@ -49,7 +49,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.111055654S",
"startedAt": "2025-01-16T16:45:16.020248085Z",
"finishedAt": "2025-01-16T16:45:16.131303739Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 2,

View file

@ -30,7 +30,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 1,

View file

@ -30,7 +30,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 1,

View file

@ -29,7 +29,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 1,

View file

@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.15.2"
},
"error": null,
"duration": "[duration]",

View file

@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.15.2"
},
"error": null,
"duration": "[duration]",

View file

@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.15.2"
},
"error": null,
"duration": "[duration]",

View file

@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"progress": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.15.2"
},
"stats": {
"totalNbTasks": 1,
@ -24,7 +24,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "[duration]",
"startedAt": "[date]",
"finishedAt": "[date]",
"batchCreationComplete": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
"batchStrategy": "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type."
},
{
"uid": 23,
@ -47,7 +47,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.004146631S",
"startedAt": "2025-01-23T11:38:57.012591321Z",
"finishedAt": "2025-01-23T11:38:57.016737952Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 22,
@ -71,7 +71,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.102738497S",
"startedAt": "2025-01-23T11:36:22.551906856Z",
"finishedAt": "2025-01-23T11:36:22.654645353Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 21,
@ -95,7 +95,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.005108474S",
"startedAt": "2025-01-23T11:36:04.132670526Z",
"finishedAt": "2025-01-23T11:36:04.137779Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 20,
@ -119,7 +119,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.027954894S",
"startedAt": "2025-01-23T11:35:53.631082795Z",
"finishedAt": "2025-01-23T11:35:53.659037689Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 19,
@ -142,7 +142,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.006903297S",
"startedAt": "2025-01-20T11:50:52.874106134Z",
"finishedAt": "2025-01-20T11:50:52.881009431Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 18,
@ -171,7 +171,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000481257S",
"startedAt": "2025-01-20T11:48:04.92820416Z",
"finishedAt": "2025-01-20T11:48:04.928685417Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 17,
@ -194,7 +194,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000407005S",
"startedAt": "2025-01-20T11:47:53.509403957Z",
"finishedAt": "2025-01-20T11:47:53.509810962Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 16,
@ -217,7 +217,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000403716S",
"startedAt": "2025-01-20T11:47:48.430653005Z",
"finishedAt": "2025-01-20T11:47:48.431056721Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 15,
@ -240,7 +240,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.000417016S",
"startedAt": "2025-01-20T11:47:42.429678617Z",
"finishedAt": "2025-01-20T11:47:42.430095633Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 14,
@ -264,7 +264,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT12.086284842S",
"startedAt": "2025-01-20T11:47:03.092181576Z",
"finishedAt": "2025-01-20T11:47:15.178466418Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 13,
@ -296,7 +296,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.011506614S",
"startedAt": "2025-01-16T17:18:43.29334923Z",
"finishedAt": "2025-01-16T17:18:43.304855844Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 12,
@ -324,7 +324,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007640163S",
"startedAt": "2025-01-16T17:02:52.539749853Z",
"finishedAt": "2025-01-16T17:02:52.547390016Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 11,
@ -347,7 +347,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007307840S",
"startedAt": "2025-01-16T17:01:14.112756687Z",
"finishedAt": "2025-01-16T17:01:14.120064527Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 10,
@ -375,7 +375,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007391353S",
"startedAt": "2025-01-16T17:00:29.201180268Z",
"finishedAt": "2025-01-16T17:00:29.208571621Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 9,
@ -403,7 +403,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007445825S",
"startedAt": "2025-01-16T17:00:15.77629445Z",
"finishedAt": "2025-01-16T17:00:15.783740275Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 8,
@ -436,7 +436,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.012020083S",
"startedAt": "2025-01-16T16:59:42.744086671Z",
"finishedAt": "2025-01-16T16:59:42.756106754Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 7,
@ -463,7 +463,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007440092S",
"startedAt": "2025-01-16T16:58:41.2155771Z",
"finishedAt": "2025-01-16T16:58:41.223017192Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 6,
@ -490,7 +490,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007565161S",
"startedAt": "2025-01-16T16:54:51.940332781Z",
"finishedAt": "2025-01-16T16:54:51.947897942Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 5,
@ -516,7 +516,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.016307263S",
"startedAt": "2025-01-16T16:53:19.913351957Z",
"finishedAt": "2025-01-16T16:53:19.92965922Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 4,
@ -540,7 +540,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.087655941S",
"startedAt": "2025-01-16T16:52:32.631145531Z",
"finishedAt": "2025-01-16T16:52:32.718801472Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 3,
@ -565,7 +565,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.007593573S",
"startedAt": "2025-01-16T16:47:53.677901409Z",
"finishedAt": "2025-01-16T16:47:53.685494982Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 2,
@ -591,7 +591,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.017769760S",
"startedAt": "2025-01-16T16:47:41.211587682Z",
"finishedAt": "2025-01-16T16:47:41.229357442Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 1,
@ -615,7 +615,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.066095506S",
"startedAt": "2025-01-16T16:47:10.217299609Z",
"finishedAt": "2025-01-16T16:47:10.283395115Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
},
{
"uid": 0,
@ -639,7 +639,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"duration": "PT0.111055654S",
"startedAt": "2025-01-16T16:45:16.020248085Z",
"finishedAt": "2025-01-16T16:45:16.131303739Z",
"batchCreationComplete": "unspecified"
"batchStrategy": "unspecified"
}
],
"total": 25,

View file

@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
"canceledBy": null,
"details": {
"upgradeFrom": "v1.12.0",
"upgradeTo": "v1.15.0"
"upgradeTo": "v1.15.2"
},
"error": null,
"duration": "[duration]",

View file

@ -6,8 +6,8 @@ use crate::vector::generate_default_user_provided_documents;
#[actix_rt::test]
async fn retrieve_binary_quantize_status_in_the_settings() {
let server = Server::new().await;
let index = server.index("doggo");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -65,8 +65,8 @@ async fn retrieve_binary_quantize_status_in_the_settings() {
#[actix_rt::test]
async fn binary_quantize_before_sending_documents() {
let server = Server::new().await;
let index = server.index("doggo");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -139,8 +139,8 @@ async fn binary_quantize_before_sending_documents() {
#[actix_rt::test]
async fn binary_quantize_after_sending_documents() {
let server = Server::new().await;
let index = server.index("doggo");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -226,8 +226,8 @@ async fn binary_quantize_after_sending_documents() {
#[actix_rt::test]
async fn try_to_disable_binary_quantization() {
let server = Server::new().await;
let index = server.index("doggo");
let server = Server::new_shared();
let index = server.unique_index();
let (response, code) = index
.update_settings(json!({
@ -256,11 +256,11 @@ async fn try_to_disable_binary_quantization() {
.await;
snapshot!(code, @"202 Accepted");
let ret = server.wait_task(response.uid()).await;
snapshot!(ret, @r#"
snapshot!(json_string!(ret, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".finishedAt" => "[date]", ".startedAt" => "[date]" }), @r#"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "doggo",
"indexUid": "[uuid]",
"status": "failed",
"type": "settingsUpdate",
"canceledBy": null,
@ -274,7 +274,7 @@ async fn try_to_disable_binary_quantization() {
}
},
"error": {
"message": "Index `doggo`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.",
"message": "Index `[uuid]`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.",
"code": "invalid_settings_embedders",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_settings_embedders"