Merge branch 'main' into indexer-edition-2024

This commit is contained in:
Louis Dureuil 2024-11-20 16:59:58 +01:00
commit 6e6acfcf1b
No known key found for this signature in database
330 changed files with 10063 additions and 1499 deletions

View file

@ -49,4 +49,18 @@ lazy_static! {
pub static ref MEILISEARCH_IS_INDEXING: IntGauge =
register_int_gauge!(opts!("meilisearch_is_indexing", "Meilisearch Is Indexing"))
.expect("Can't create a metric");
pub static ref MEILISEARCH_SEARCH_QUEUE_SIZE: IntGauge = register_int_gauge!(opts!(
"meilisearch_search_queue_size",
"Meilisearch Search Queue Size"
))
.expect("Can't create a metric");
pub static ref MEILISEARCH_SEARCHES_RUNNING: IntGauge =
register_int_gauge!(opts!("meilisearch_searches_running", "Meilisearch Searches Running"))
.expect("Can't create a metric");
pub static ref MEILISEARCH_SEARCHES_WAITING_TO_BE_PROCESSED: IntGauge =
register_int_gauge!(opts!(
"meilisearch_searches_waiting_to_be_processed",
"Meilisearch Searches Being Processed"
))
.expect("Can't create a metric");
}

View file

@ -0,0 +1,80 @@
use actix_web::{
web::{self, Data},
HttpResponse,
};
use deserr::actix_web::AwebQueryParameter;
use index_scheduler::{IndexScheduler, Query};
use meilisearch_types::{
batch_view::BatchView, batches::BatchId, deserr::DeserrQueryParamError, error::ResponseError,
keys::actions,
};
use serde::Serialize;
use crate::extractors::{authentication::GuardedData, sequential_extractor::SeqHandler};
use super::{tasks::TasksFilterQuery, ActionPolicy};
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_batches))))
.service(web::resource("/{batch_id}").route(web::get().to(SeqHandler(get_batch))));
}
async fn get_batch(
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>,
batch_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let batch_uid_string = batch_uid.into_inner();
let batch_uid: BatchId = match batch_uid_string.parse() {
Ok(id) => id,
Err(_e) => {
return Err(
index_scheduler::Error::InvalidBatchUid { batch_uid: batch_uid_string }.into()
)
}
};
let query = index_scheduler::Query { uids: Some(vec![batch_uid]), ..Query::default() };
let filters = index_scheduler.filters();
let (batches, _) = index_scheduler.get_batches_from_authorized_indexes(query, filters)?;
if let Some(batch) = batches.first() {
let task_view = BatchView::from_batch(batch);
Ok(HttpResponse::Ok().json(task_view))
} else {
Err(index_scheduler::Error::BatchNotFound(batch_uid).into())
}
}
#[derive(Debug, Serialize)]
pub struct AllBatches {
results: Vec<BatchView>,
total: u64,
limit: u32,
from: Option<u32>,
next: Option<u32>,
}
async fn get_batches(
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>,
params: AwebQueryParameter<TasksFilterQuery, DeserrQueryParamError>,
) -> Result<HttpResponse, ResponseError> {
let mut params = params.into_inner();
// We +1 just to know if there is more after this "page" or not.
params.limit.0 = params.limit.0.saturating_add(1);
let limit = params.limit.0;
let query = params.into_query();
let filters = index_scheduler.filters();
let (tasks, total) = index_scheduler.get_batches_from_authorized_indexes(query, filters)?;
let mut results: Vec<_> = tasks.iter().map(BatchView::from_batch).collect();
// If we were able to fetch the number +1 tasks we asked
// it means that there is more to come.
let next = if results.len() == limit as usize { results.pop().map(|t| t.uid) } else { None };
let from = results.first().map(|t| t.uid);
let tasks = AllBatches { results, limit: limit.saturating_sub(1), total, from, next };
Ok(HttpResponse::Ok().json(tasks))
}

View file

@ -10,6 +10,7 @@ use prometheus::{Encoder, TextEncoder};
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::{AuthenticationError, GuardedData};
use crate::routes::create_all_stats;
use crate::search_queue::SearchQueue;
pub fn configure(config: &mut web::ServiceConfig) {
config.service(web::resource("").route(web::get().to(get_metrics)));
@ -18,6 +19,7 @@ pub fn configure(config: &mut web::ServiceConfig) {
pub async fn get_metrics(
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
auth_controller: Data<AuthController>,
search_queue: web::Data<SearchQueue>,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_metrics()?;
let auth_filters = index_scheduler.filters();
@ -35,6 +37,11 @@ pub async fn get_metrics(
crate::metrics::MEILISEARCH_USED_DB_SIZE_BYTES.set(response.used_database_size as i64);
crate::metrics::MEILISEARCH_INDEX_COUNT.set(response.indexes.len() as i64);
crate::metrics::MEILISEARCH_SEARCH_QUEUE_SIZE.set(search_queue.capacity() as i64);
crate::metrics::MEILISEARCH_SEARCHES_RUNNING.set(search_queue.searches_running() as i64);
crate::metrics::MEILISEARCH_SEARCHES_WAITING_TO_BE_PROCESSED
.set(search_queue.searches_waiting() as i64);
for (index, value) in response.indexes.iter() {
crate::metrics::MEILISEARCH_INDEX_DOCS_COUNT
.with_label_values(&[index])

View file

@ -19,6 +19,7 @@ use crate::Opt;
const PAGINATION_DEFAULT_LIMIT: usize = 20;
mod api_key;
pub mod batches;
mod dump;
pub mod features;
pub mod indexes;
@ -32,6 +33,7 @@ pub mod tasks;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::scope("/tasks").configure(tasks::configure))
.service(web::scope("/batches").configure(batches::configure))
.service(web::resource("/health").route(web::get().to(get_health)))
.service(web::scope("/logs").configure(logs::configure))
.service(web::scope("/keys").configure(api_key::configure))

View file

@ -3,6 +3,7 @@ use actix_web::{web, HttpRequest, HttpResponse};
use deserr::actix_web::AwebQueryParameter;
use deserr::Deserr;
use index_scheduler::{IndexScheduler, Query, TaskId};
use meilisearch_types::batches::BatchId;
use meilisearch_types::deserr::query_params::Param;
use meilisearch_types::deserr::DeserrQueryParamError;
use meilisearch_types::error::deserr_codes::*;
@ -17,15 +18,13 @@ use time::macros::format_description;
use time::{Date, Duration, OffsetDateTime, Time};
use tokio::task;
use super::{get_task_id, is_dry_run, SummarizedTaskView};
use super::{get_task_id, is_dry_run, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
use crate::analytics::{Aggregate, AggregateMethod, Analytics};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::{aggregate_methods, Opt};
const DEFAULT_LIMIT: u32 = 20;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
web::resource("")
@ -35,13 +34,19 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(web::resource("/cancel").route(web::post().to(SeqHandler(cancel_tasks))))
.service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task))));
}
#[derive(Debug, Deserr)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
pub struct TasksFilterQuery {
#[deserr(default = Param(DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidTaskLimit>)]
#[deserr(default = Param(PAGINATION_DEFAULT_LIMIT as u32), error = DeserrQueryParamError<InvalidTaskLimit>)]
pub limit: Param<u32>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskFrom>)]
pub from: Option<Param<TaskId>>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskReverse>)]
pub reverse: Option<Param<bool>>,
#[deserr(default, error = DeserrQueryParamError<InvalidBatchUids>)]
pub batch_uids: OptionStarOrList<BatchId>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskUids>)]
pub uids: OptionStarOrList<u32>,
@ -69,10 +74,12 @@ pub struct TasksFilterQuery {
}
impl TasksFilterQuery {
fn into_query(self) -> Query {
pub(crate) fn into_query(self) -> Query {
Query {
limit: Some(self.limit.0),
from: self.from.as_deref().copied(),
reverse: self.reverse.as_deref().copied(),
batch_uids: self.batch_uids.merge_star_and_none(),
statuses: self.statuses.merge_star_and_none(),
types: self.types.merge_star_and_none(),
index_uids: self.index_uids.map(|x| x.to_string()).merge_star_and_none(),
@ -94,6 +101,7 @@ impl TaskDeletionOrCancelationQuery {
self,
TaskDeletionOrCancelationQuery {
uids: OptionStarOrList::None,
batch_uids: OptionStarOrList::None,
canceled_by: OptionStarOrList::None,
types: OptionStarOrList::None,
statuses: OptionStarOrList::None,
@ -113,9 +121,11 @@ impl TaskDeletionOrCancelationQuery {
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
pub struct TaskDeletionOrCancelationQuery {
#[deserr(default, error = DeserrQueryParamError<InvalidTaskUids>)]
pub uids: OptionStarOrList<u32>,
pub uids: OptionStarOrList<TaskId>,
#[deserr(default, error = DeserrQueryParamError<InvalidBatchUids>)]
pub batch_uids: OptionStarOrList<BatchId>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskCanceledBy>)]
pub canceled_by: OptionStarOrList<u32>,
pub canceled_by: OptionStarOrList<TaskId>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskTypes>)]
pub types: OptionStarOrList<Kind>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskStatuses>)]
@ -142,6 +152,8 @@ impl TaskDeletionOrCancelationQuery {
Query {
limit: None,
from: None,
reverse: None,
batch_uids: self.batch_uids.merge_star_and_none(),
statuses: self.statuses.merge_star_and_none(),
types: self.types.merge_star_and_none(),
index_uids: self.index_uids.map(|x| x.to_string()).merge_star_and_none(),
@ -359,7 +371,7 @@ async fn get_task(
let task_uid: TaskId = match task_uid_string.parse() {
Ok(id) => id,
Err(_e) => {
return Err(index_scheduler::Error::InvalidTaskUids { task_uid: task_uid_string }.into())
return Err(index_scheduler::Error::InvalidTaskUid { task_uid: task_uid_string }.into())
}
};
@ -481,7 +493,7 @@ mod tests {
// Stars are allowed in date fields as well
let params = "afterEnqueuedAt=*&beforeStartedAt=*&afterFinishedAt=*&beforeFinishedAt=*&afterStartedAt=*&beforeEnqueuedAt=*";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: None, canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Star, before_enqueued_at: Star, after_started_at: Star, before_started_at: Star, after_finished_at: Star, before_finished_at: Star }");
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: None, batch_uids: None, canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Star, before_enqueued_at: Star, after_started_at: Star, before_started_at: Star, after_finished_at: Star, before_finished_at: Star }");
}
{
let params = "afterFinishedAt=2021";
@ -701,20 +713,20 @@ mod tests {
{
let params = "from=12&limit=15&indexUids=toto,tata-78&statuses=succeeded,enqueued&afterEnqueuedAt=2012-04-23&uids=1,2,3";
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: Param(15), from: Some(Param(12)), uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: List([Succeeded, Enqueued]), index_uids: List([IndexUid("toto"), IndexUid("tata-78")]), after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"###);
snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: Param(15), from: Some(Param(12)), reverse: None, batch_uids: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: List([Succeeded, Enqueued]), index_uids: List([IndexUid("toto"), IndexUid("tata-78")]), after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"###);
}
{
// Stars should translate to `None` in the query
// Verify value of the default limit
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: Param(20), from: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: Param(20), from: None, reverse: None, batch_uids: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
}
{
// Stars should also translate to `None` in task deletion/cancelation queries
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: List([1, 2, 3]), batch_uids: None, canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
}
{
// Star in from not allowed
@ -735,7 +747,7 @@ mod tests {
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Unknown parameter `from`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"message": "Unknown parameter `from`: expected one of `uids`, `batchUids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"
@ -748,7 +760,7 @@ mod tests {
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Unknown parameter `limit`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"message": "Unknown parameter `limit`: expected one of `uids`, `batchUids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"
@ -768,7 +780,7 @@ mod tests {
let params = "statuses=*";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
assert!(!query.is_empty());
snapshot!(format!("{query:?}"), @"TaskDeletionOrCancelationQuery { uids: None, canceled_by: None, types: None, statuses: Star, index_uids: None, after_enqueued_at: None, before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
snapshot!(format!("{query:?}"), @"TaskDeletionOrCancelationQuery { uids: None, batch_uids: None, canceled_by: None, types: None, statuses: Star, index_uids: None, after_enqueued_at: None, before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
}
}
}

View file

@ -1735,46 +1735,51 @@ fn format_fields(
// select the attributes to retrieve
let displayable_names =
displayable_ids.iter().map(|&fid| field_ids_map.name(fid).expect("Missing field name"));
permissive_json_pointer::map_leaf_values(&mut document, displayable_names, |key, value| {
// To get the formatting option of each key we need to see all the rules that applies
// to the value and merge them together. eg. If a user said he wanted to highlight `doggo`
// and crop `doggo.name`. `doggo.name` needs to be highlighted + cropped while `doggo.age` is only
// highlighted.
// Warn: The time to compute the format list scales with the number of fields to format;
// cumulated with map_leaf_values that iterates over all the nested fields, it gives a quadratic complexity:
// d*f where d is the total number of fields to display and f is the total number of fields to format.
let format = formatting_fields_options
.iter()
.filter(|(name, _option)| {
milli::is_faceted_by(name, key) || milli::is_faceted_by(key, name)
})
.map(|(_, option)| **option)
.reduce(|acc, option| acc.merge(option));
let mut infos = Vec::new();
// if no locales has been provided, we try to find the locales in the localized_attributes.
let locales = locales.or_else(|| {
localized_attributes
permissive_json_pointer::map_leaf_values(
&mut document,
displayable_names,
|key, array_indices, value| {
// To get the formatting option of each key we need to see all the rules that applies
// to the value and merge them together. eg. If a user said he wanted to highlight `doggo`
// and crop `doggo.name`. `doggo.name` needs to be highlighted + cropped while `doggo.age` is only
// highlighted.
// Warn: The time to compute the format list scales with the number of fields to format;
// cumulated with map_leaf_values that iterates over all the nested fields, it gives a quadratic complexity:
// d*f where d is the total number of fields to display and f is the total number of fields to format.
let format = formatting_fields_options
.iter()
.find(|rule| rule.match_str(key))
.map(LocalizedAttributesRule::locales)
});
.filter(|(name, _option)| {
milli::is_faceted_by(name, key) || milli::is_faceted_by(key, name)
})
.map(|(_, option)| **option)
.reduce(|acc, option| acc.merge(option));
let mut infos = Vec::new();
*value = format_value(
std::mem::take(value),
builder,
format,
&mut infos,
compute_matches,
locales,
);
// if no locales has been provided, we try to find the locales in the localized_attributes.
let locales = locales.or_else(|| {
localized_attributes
.iter()
.find(|rule| rule.match_str(key))
.map(LocalizedAttributesRule::locales)
});
if let Some(matches) = matches_position.as_mut() {
if !infos.is_empty() {
matches.insert(key.to_owned(), infos);
*value = format_value(
std::mem::take(value),
builder,
format,
&mut infos,
compute_matches,
array_indices,
locales,
);
if let Some(matches) = matches_position.as_mut() {
if !infos.is_empty() {
matches.insert(key.to_owned(), infos);
}
}
}
});
},
);
let selectors = formatted_options
.keys()
@ -1792,13 +1797,14 @@ fn format_value(
format_options: Option<FormatOptions>,
infos: &mut Vec<MatchBounds>,
compute_matches: bool,
array_indices: &[usize],
locales: Option<&[Language]>,
) -> Value {
match value {
Value::String(old_string) => {
let mut matcher = builder.build(&old_string, locales);
if compute_matches {
let matches = matcher.matches();
let matches = matcher.matches(array_indices);
infos.extend_from_slice(&matches[..]);
}
@ -1810,51 +1816,15 @@ fn format_value(
None => Value::String(old_string),
}
}
Value::Array(values) => Value::Array(
values
.into_iter()
.map(|v| {
format_value(
v,
builder,
format_options.map(|format_options| FormatOptions {
highlight: format_options.highlight,
crop: None,
}),
infos,
compute_matches,
locales,
)
})
.collect(),
),
Value::Object(object) => Value::Object(
object
.into_iter()
.map(|(k, v)| {
(
k,
format_value(
v,
builder,
format_options.map(|format_options| FormatOptions {
highlight: format_options.highlight,
crop: None,
}),
infos,
compute_matches,
locales,
),
)
})
.collect(),
),
// `map_leaf_values` makes sure this is only called for leaf fields
Value::Array(_) => unreachable!(),
Value::Object(_) => unreachable!(),
Value::Number(number) => {
let s = number.to_string();
let mut matcher = builder.build(&s, locales);
if compute_matches {
let matches = matcher.matches();
let matches = matcher.matches(array_indices);
infos.extend_from_slice(&matches[..]);
}

View file

@ -18,6 +18,8 @@
//! And should drop the Permit only once you have freed all the RAM consumed by the method.
use std::num::NonZeroUsize;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use std::time::Duration;
use rand::rngs::StdRng;
@ -33,6 +35,8 @@ pub struct SearchQueue {
/// If we have waited longer than this to get a permit, we should abort the search request entirely.
/// The client probably already closed the connection, but we have no way to find out.
time_to_abort: Duration,
searches_running: Arc<AtomicUsize>,
searches_waiting_to_be_processed: Arc<AtomicUsize>,
}
/// You should only run search requests while holding this permit.
@ -68,14 +72,41 @@ impl SearchQueue {
// so let's not allocate any RAM and keep a capacity of 1.
let (sender, receiver) = mpsc::channel(1);
tokio::task::spawn(Self::run(capacity, paralellism, receiver));
Self { sender, capacity, time_to_abort: Duration::from_secs(60) }
let instance = Self {
sender,
capacity,
time_to_abort: Duration::from_secs(60),
searches_running: Default::default(),
searches_waiting_to_be_processed: Default::default(),
};
tokio::task::spawn(Self::run(
capacity,
paralellism,
receiver,
Arc::clone(&instance.searches_running),
Arc::clone(&instance.searches_waiting_to_be_processed),
));
instance
}
pub fn with_time_to_abort(self, time_to_abort: Duration) -> Self {
Self { time_to_abort, ..self }
}
pub fn capacity(&self) -> usize {
self.capacity
}
pub fn searches_running(&self) -> usize {
self.searches_running.load(Ordering::Relaxed)
}
pub fn searches_waiting(&self) -> usize {
self.searches_waiting_to_be_processed.load(Ordering::Relaxed)
}
/// This function is the main loop, it's in charge on scheduling which search request should execute first and
/// how many should executes at the same time.
///
@ -84,6 +115,8 @@ impl SearchQueue {
capacity: usize,
parallelism: NonZeroUsize,
mut receive_new_searches: mpsc::Receiver<oneshot::Sender<Permit>>,
metric_searches_running: Arc<AtomicUsize>,
metric_searches_waiting: Arc<AtomicUsize>,
) {
let mut queue: Vec<oneshot::Sender<Permit>> = Default::default();
let mut rng: StdRng = StdRng::from_entropy();
@ -133,6 +166,9 @@ impl SearchQueue {
queue.push(search_request);
},
}
metric_searches_running.store(searches_running, Ordering::Relaxed);
metric_searches_waiting.store(queue.len(), Ordering::Relaxed);
}
}

View file

@ -0,0 +1,238 @@
use meili_snap::*;
use crate::common::Server;
#[actix_rt::test]
async fn batch_bad_uids() {
let server = Server::new_shared();
let (response, code) = server.batches_filter("uids=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `uids`: could not parse `doggo` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
}
"#);
}
#[actix_rt::test]
async fn batch_bad_canceled_by() {
let server = Server::new_shared();
let (response, code) = server.batches_filter("canceledBy=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `canceledBy`: could not parse `doggo` as a positive integer",
"code": "invalid_task_canceled_by",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_canceled_by"
}
"#);
}
#[actix_rt::test]
async fn batch_bad_types() {
let server = Server::new_shared();
let (response, code) = server.batches_filter("types=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
}
"#);
}
#[actix_rt::test]
async fn batch_bad_statuses() {
let server = Server::new_shared();
let (response, code) = server.batches_filter("statuses=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `statuses`: `doggo` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.",
"code": "invalid_task_statuses",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_statuses"
}
"#);
}
#[actix_rt::test]
async fn batch_bad_index_uids() {
let server = Server::new_shared();
let (response, code) = server.batches_filter("indexUids=the%20good%20doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `indexUids`: `the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
}
"###);
}
#[actix_rt::test]
async fn batch_bad_limit() {
let server = Server::new_shared();
let (response, code) = server.batches_filter("limit=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `limit`: could not parse `doggo` as a positive integer",
"code": "invalid_task_limit",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_limit"
}
"#);
}
#[actix_rt::test]
async fn batch_bad_from() {
let server = Server::new_shared();
let (response, code) = server.batches_filter("from=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `from`: could not parse `doggo` as a positive integer",
"code": "invalid_task_from",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_from"
}
"#);
}
#[actix_rt::test]
async fn bask_bad_reverse() {
let server = Server::new_shared();
let (response, code) = server.batches_filter("reverse=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Invalid value in parameter `reverse`: could not parse `doggo` as a boolean, expected either `true` or `false`",
"code": "invalid_task_reverse",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_reverse"
}
"###);
let (response, code) = server.batches_filter("reverse=*").await;
snapshot!(code, @"400 Bad Request");
snapshot!(response, @r###"
{
"message": "Invalid value in parameter `reverse`: could not parse `*` as a boolean, expected either `true` or `false`",
"code": "invalid_task_reverse",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_reverse"
}
"###);
}
#[actix_rt::test]
async fn batch_bad_after_enqueued_at() {
let server = Server::new_shared();
let (response, code) = server.batches_filter("afterEnqueuedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `afterEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_after_enqueued_at"
}
"#);
}
#[actix_rt::test]
async fn batch_bad_before_enqueued_at() {
let server = Server::new_shared();
let (response, code) = server.batches_filter("beforeEnqueuedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `beforeEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_before_enqueued_at"
}
"#);
}
#[actix_rt::test]
async fn batch_bad_after_started_at() {
let server = Server::new_shared();
let (response, code) = server.batches_filter("afterStartedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `afterStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_after_started_at"
}
"#);
}
#[actix_rt::test]
async fn batch_bad_before_started_at() {
let server = Server::new_shared();
let (response, code) = server.batches_filter("beforeStartedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `beforeStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_before_started_at"
}
"#);
}
#[actix_rt::test]
async fn batch_bad_after_finished_at() {
let server = Server::new_shared();
let (response, code) = server.batches_filter("afterFinishedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `afterFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_after_finished_at"
}
"#);
}
#[actix_rt::test]
async fn batch_bad_before_finished_at() {
let server = Server::new_shared();
let (response, code) = server.batches_filter("beforeFinishedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `beforeFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_before_finished_at"
}
"#);
}

File diff suppressed because it is too large Load diff

View file

@ -136,6 +136,11 @@ impl<'a> Index<'a, Owned> {
self.service.get(url).await
}
pub async fn list_batches(&self) -> (Value, StatusCode) {
let url = format!("/batches?indexUids={}", self.uid);
self.service.get(url).await
}
pub async fn delete_document(&self, id: u64) -> (Value, StatusCode) {
let url = format!("/indexes/{}/documents/{}", urlencode(self.uid.as_ref()), id);
self.service.delete(url).await
@ -374,6 +379,30 @@ impl<State> Index<'_, State> {
self.service.get(url).await
}
pub async fn get_batch(&self, batch_id: u32) -> (Value, StatusCode) {
let url = format!("/batches/{}", batch_id);
self.service.get(url).await
}
pub async fn filtered_batches(
&self,
types: &[&str],
statuses: &[&str],
canceled_by: &[&str],
) -> (Value, StatusCode) {
let mut url = format!("/batches?indexUids={}", self.uid);
if !types.is_empty() {
let _ = write!(url, "&types={}", types.join(","));
}
if !statuses.is_empty() {
let _ = write!(url, "&statuses={}", statuses.join(","));
}
if !canceled_by.is_empty() {
let _ = write!(url, "&canceledBy={}", canceled_by.join(","));
}
self.service.get(url).await
}
pub async fn get_document(&self, id: u64, options: Option<Value>) -> (Value, StatusCode) {
let mut url = format!("/indexes/{}/documents/{}", urlencode(self.uid.as_ref()), id);
if let Some(options) = options {

View file

@ -99,6 +99,7 @@ impl Display for Value {
"{}",
json_string!(self, {
".uid" => "[uid]",
".batchUid" => "[batch_uid]",
".enqueuedAt" => "[date]",
".startedAt" => "[date]",
".finishedAt" => "[date]",
@ -389,3 +390,25 @@ pub static VECTOR_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
},
])
});
pub async fn shared_index_with_test_set() -> &'static Index<'static, Shared> {
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
INDEX
.get_or_init(|| async {
let server = Server::new_shared();
let index = server._index("SHARED_TEST_SET").to_shared();
let url = format!("/indexes/{}/documents", urlencoding::encode(index.uid.as_ref()));
let (response, code) = index
.service
.post_str(
url,
include_str!("../assets/test_set.json"),
vec![("content-type", "application/json")],
)
.await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await;
index
})
.await
}

View file

@ -330,6 +330,10 @@ impl<State> Server<State> {
self.service.get(format!("/tasks?{}", filter)).await
}
pub async fn batches_filter(&self, filter: &str) -> (Value, StatusCode) {
self.service.get(format!("/batches?{}", filter)).await
}
pub async fn version(&self) -> (Value, StatusCode) {
self.service.get("/version").await
}
@ -376,6 +380,11 @@ impl<State> Server<State> {
self.service.get(url).await
}
pub async fn get_batch(&self, batch_id: u32) -> (Value, StatusCode) {
let url = format!("/batches/{}", batch_id);
self.service.get(url).await
}
pub async fn get_features(&self) -> (Value, StatusCode) {
self.service.get("/experimental-features").await
}

View file

@ -293,6 +293,7 @@ async fn add_csv_document() {
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": 0,
"batchUid": 0,
"indexUid": "pets",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
@ -357,6 +358,7 @@ async fn add_csv_document_with_types() {
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": 0,
"batchUid": 0,
"indexUid": "pets",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
@ -432,6 +434,7 @@ async fn add_csv_document_with_custom_delimiter() {
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": 0,
"batchUid": 0,
"indexUid": "pets",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
@ -985,6 +988,7 @@ async fn add_documents_no_index_creation() {
@r###"
{
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
@ -1063,6 +1067,7 @@ async fn document_addition_with_primary_key() {
@r###"
{
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
@ -1111,6 +1116,7 @@ async fn document_addition_with_huge_int_primary_key() {
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
@ -1183,6 +1189,7 @@ async fn replace_document() {
@r###"
{
"uid": 1,
"batchUid": 1,
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
@ -1271,6 +1278,7 @@ async fn error_add_documents_bad_document_id() {
@r###"
{
"uid": 1,
"batchUid": 1,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -1312,6 +1320,7 @@ async fn error_add_documents_missing_document_id() {
@r###"
{
"uid": 1,
"batchUid": 1,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -1407,6 +1416,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() {
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
@ -1441,6 +1451,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() {
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -1490,6 +1501,7 @@ async fn error_document_field_limit_reached_in_one_nested_document() {
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
@ -1533,6 +1545,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
@ -1568,6 +1581,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_
@r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
@ -1615,6 +1629,7 @@ async fn add_documents_with_geo_field() {
@r###"
{
"uid": 1,
"batchUid": 1,
"indexUid": "doggo",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
@ -1655,6 +1670,7 @@ async fn add_documents_invalid_geo_field() {
@r###"
{
"uid": 2,
"batchUid": 2,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -1692,6 +1708,7 @@ async fn add_documents_invalid_geo_field() {
@r###"
{
"uid": 3,
"batchUid": 3,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -1729,6 +1746,7 @@ async fn add_documents_invalid_geo_field() {
@r###"
{
"uid": 4,
"batchUid": 4,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -1766,6 +1784,7 @@ async fn add_documents_invalid_geo_field() {
@r###"
{
"uid": 5,
"batchUid": 5,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -1803,6 +1822,7 @@ async fn add_documents_invalid_geo_field() {
@r###"
{
"uid": 6,
"batchUid": 6,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -1840,6 +1860,7 @@ async fn add_documents_invalid_geo_field() {
@r###"
{
"uid": 7,
"batchUid": 7,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -1877,6 +1898,7 @@ async fn add_documents_invalid_geo_field() {
@r###"
{
"uid": 8,
"batchUid": 8,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -1914,6 +1936,7 @@ async fn add_documents_invalid_geo_field() {
@r###"
{
"uid": 9,
"batchUid": 9,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -1951,6 +1974,7 @@ async fn add_documents_invalid_geo_field() {
@r###"
{
"uid": 10,
"batchUid": 10,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -1988,6 +2012,7 @@ async fn add_documents_invalid_geo_field() {
@r###"
{
"uid": 11,
"batchUid": 11,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -2025,6 +2050,7 @@ async fn add_documents_invalid_geo_field() {
@r###"
{
"uid": 12,
"batchUid": 12,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -2062,6 +2088,7 @@ async fn add_documents_invalid_geo_field() {
@r###"
{
"uid": 13,
"batchUid": 13,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -2100,6 +2127,7 @@ async fn add_documents_invalid_geo_field() {
@r###"
{
"uid": 14,
"batchUid": 14,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -2136,6 +2164,7 @@ async fn add_documents_invalid_geo_field() {
@r###"
{
"uid": 15,
"batchUid": 15,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -2172,6 +2201,7 @@ async fn add_documents_invalid_geo_field() {
@r###"
{
"uid": 16,
"batchUid": 16,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -2214,6 +2244,7 @@ async fn add_invalid_geo_and_then_settings() {
snapshot!(ret, @r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
@ -2236,6 +2267,7 @@ async fn add_invalid_geo_and_then_settings() {
snapshot!(ret, @r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "test",
"status": "failed",
"type": "settingsUpdate",
@ -2307,6 +2339,7 @@ async fn error_primary_key_inference() {
@r###"
{
"uid": 0,
"batchUid": 0,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -2347,6 +2380,7 @@ async fn error_primary_key_inference() {
@r###"
{
"uid": 1,
"batchUid": 1,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
@ -2385,6 +2419,7 @@ async fn error_primary_key_inference() {
@r###"
{
"uid": 2,
"batchUid": 2,
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -184,6 +184,7 @@ async fn delete_document_by_filter() {
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": 2,
"batchUid": 2,
"indexUid": "doggo",
"status": "succeeded",
"type": "documentDeletion",
@ -249,6 +250,7 @@ async fn delete_document_by_filter() {
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": 3,
"batchUid": 3,
"indexUid": "doggo",
"status": "succeeded",
"type": "documentDeletion",
@ -333,6 +335,7 @@ async fn delete_document_by_complex_filter() {
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": 2,
"batchUid": 2,
"indexUid": "doggo",
"status": "succeeded",
"type": "documentDeletion",
@ -391,6 +394,7 @@ async fn delete_document_by_complex_filter() {
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": 3,
"batchUid": 3,
"indexUid": "doggo",
"status": "succeeded",
"type": "documentDeletion",

View file

@ -563,6 +563,7 @@ async fn delete_document_by_filter() {
snapshot!(response, @r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "DOES_NOT_EXISTS",
"status": "failed",
"type": "documentDeletion",
@ -592,6 +593,7 @@ async fn delete_document_by_filter() {
snapshot!(response, @r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "EMPTY_INDEX",
"status": "failed",
"type": "documentDeletion",
@ -623,6 +625,7 @@ async fn delete_document_by_filter() {
snapshot!(response, @r###"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "SHARED_DOCUMENTS",
"status": "failed",
"type": "documentDeletion",

View file

@ -4,24 +4,27 @@ use meili_snap::*;
use urlencoding::encode as urlencode;
use crate::common::encoder::Encoder;
use crate::common::{GetAllDocumentsOptions, Server, Value};
use crate::common::{
shared_does_not_exists_index, shared_empty_index, shared_index_with_test_set,
GetAllDocumentsOptions, Server, Value,
};
use crate::json;
// TODO: partial test since we are testing error, amd error is not yet fully implemented in
// transplant
#[actix_rt::test]
async fn get_unexisting_index_single_document() {
let server = Server::new().await;
let (_response, code) = server.index("test").get_document(1, None).await;
let (_response, code) = shared_does_not_exists_index().await.get_document(1, None).await;
assert_eq!(code, 404);
}
#[actix_rt::test]
async fn error_get_unexisting_document() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document(1, None).await;
let expected_response = json!({
@ -37,18 +40,19 @@ async fn error_get_unexisting_document() {
#[actix_rt::test]
async fn get_document() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let documents = json!([
{
"id": 0,
"nested": { "content": "foobar" },
}
]);
let (_, code) = index.add_documents(documents, None).await;
let (task, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(1).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document(0, None).await;
assert_eq!(code, 200);
assert_eq!(
@ -81,12 +85,11 @@ async fn get_document() {
#[actix_rt::test]
async fn error_get_unexisting_index_all_documents() {
let server = Server::new().await;
let (response, code) =
server.index("test").get_all_documents(GetAllDocumentsOptions::default()).await;
let index = shared_does_not_exists_index().await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
let expected_response = json!({
"message": "Index `test` not found.",
"message": "Index `DOES_NOT_EXISTS` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
@ -98,12 +101,7 @@ async fn error_get_unexisting_index_all_documents() {
#[actix_rt::test]
async fn get_no_document() {
let server = Server::new().await;
let index = server.index("test");
let (_, code) = index.create(None).await;
assert_eq!(code, 202);
index.wait_task(0).await;
let index = shared_empty_index().await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
@ -112,14 +110,12 @@ async fn get_no_document() {
#[actix_rt::test]
async fn get_all_documents_no_options() {
let server = Server::new().await;
let index = server.index("test");
index.load_test_set().await;
let index = shared_index_with_test_set().await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
let arr = response["results"].as_array().unwrap();
assert_eq!(arr.len(), 20);
let results = response["results"].as_array().unwrap();
assert_eq!(results.len(), 20);
let first = json!({
"id":0,
"isActive":false,
@ -138,19 +134,16 @@ async fn get_all_documents_no_options() {
"longitude":-145.725388,
"tags":["bug"
,"bug"]});
assert_eq!(first, arr[0]);
assert_eq!(first, results[0]);
}
#[actix_rt::test]
async fn get_all_documents_no_options_with_response_compression() {
let server = Server::new().await;
let index_uid = "test";
let index = server.index(index_uid);
index.load_test_set().await;
let index = shared_index_with_test_set().await;
let app = server.init_web_app().await;
let app = Server::new_shared().init_web_app().await;
let req = test::TestRequest::get()
.uri(&format!("/indexes/{}/documents?", urlencode(index_uid)))
.uri(&format!("/indexes/{}/documents?", urlencode(&index.uid)))
.insert_header((ACCEPT_ENCODING, "gzip"))
.to_request();
@ -169,9 +162,7 @@ async fn get_all_documents_no_options_with_response_compression() {
#[actix_rt::test]
async fn test_get_all_documents_limit() {
let server = Server::new().await;
let index = server.index("test");
index.load_test_set().await;
let index = shared_index_with_test_set().await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions { limit: Some(5), ..Default::default() })
@ -186,9 +177,7 @@ async fn test_get_all_documents_limit() {
#[actix_rt::test]
async fn test_get_all_documents_offset() {
let server = Server::new().await;
let index = server.index("test");
index.load_test_set().await;
let index = shared_index_with_test_set().await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions { offset: Some(5), ..Default::default() })
@ -203,9 +192,7 @@ async fn test_get_all_documents_offset() {
#[actix_rt::test]
async fn test_get_all_documents_attributes_to_retrieve() {
let server = Server::new().await;
let index = server.index("test");
index.load_test_set().await;
let index = shared_index_with_test_set().await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
@ -286,9 +273,11 @@ async fn test_get_all_documents_attributes_to_retrieve() {
#[actix_rt::test]
async fn get_document_s_nested_attributes_to_retrieve() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
let documents = json!([
{
"id": 0,
@ -302,9 +291,9 @@ async fn get_document_s_nested_attributes_to_retrieve() {
},
},
]);
let (_, code) = index.add_documents(documents, None).await;
let (task, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(1).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document(0, Some(json!({ "fields": ["content"] }))).await;
assert_eq!(code, 200);
@ -343,10 +332,10 @@ async fn get_document_s_nested_attributes_to_retrieve() {
#[actix_rt::test]
async fn get_documents_displayed_attributes_is_ignored() {
let server = Server::new().await;
let index = server.index("test");
index.update_settings(json!({"displayedAttributes": ["gender"]})).await;
let server = Server::new_shared();
let index = server.unique_index();
index.load_test_set().await;
index.update_settings(json!({"displayedAttributes": ["gender"]})).await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
@ -366,10 +355,10 @@ async fn get_documents_displayed_attributes_is_ignored() {
#[actix_rt::test]
async fn get_document_by_filter() {
let server = Server::new().await;
let index = server.index("doggo");
let server = Server::new_shared();
let index = server.unique_index();
index.update_settings_filterable_attributes(json!(["color"])).await;
index
let (task, _code) = index
.add_documents(
json!([
{ "id": 0, "color": "red" },
@ -380,7 +369,7 @@ async fn get_document_by_filter() {
Some("id"),
)
.await;
index.wait_task(1).await;
index.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document_by_filter(json!({})).await;
let (response2, code2) = index.get_all_documents_raw("").await;
@ -552,7 +541,7 @@ async fn get_document_with_vectors() {
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await;
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{"id": 0, "name": "kefir", "_vectors": { "manual": [0, 0, 0] }},
@ -560,7 +549,7 @@ async fn get_document_with_vectors() {
]);
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await;
index.wait_task(value.uid()).await.succeeded();
// by default you shouldn't see the `_vectors` object
let (documents, _code) = index.get_all_documents(Default::default()).await;

View file

@ -88,7 +88,7 @@ async fn import_dump_v1_movie_raw() {
snapshot!(code, @"200 OK");
assert_eq!(
tasks,
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31968 }, "error": null, "duration": "PT9.317060500S", "enqueuedAt": "2021-09-08T09:08:45.153219Z", "startedAt": "2021-09-08T09:08:45.3961665Z", "finishedAt": "2021-09-08T09:08:54.713227Z" }], "total": 1, "limit": 20, "from": 0, "next": null })
json!({ "results": [{"uid": 0, "batchUid": null, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31968 }, "error": null, "duration": "PT9.317060500S", "enqueuedAt": "2021-09-08T09:08:45.153219Z", "startedAt": "2021-09-08T09:08:45.3961665Z", "finishedAt": "2021-09-08T09:08:54.713227Z" }], "total": 1, "limit": 20, "from": 0, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
@ -251,7 +251,7 @@ async fn import_dump_v1_movie_with_settings() {
snapshot!(code, @"200 OK");
assert_eq!(
tasks,
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["genres", "id", "overview", "poster", "release_date", "title"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT7.288826907S", "enqueuedAt": "2021-09-08T09:34:40.882977Z", "startedAt": "2021-09-08T09:34:40.883073093Z", "finishedAt": "2021-09-08T09:34:48.1719Z"}, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31968 }, "error": null, "duration": "PT9.090735774S", "enqueuedAt": "2021-09-08T09:34:16.036101Z", "startedAt": "2021-09-08T09:34:16.261191226Z", "finishedAt": "2021-09-08T09:34:25.351927Z" }], "total": 2, "limit": 20, "from": 1, "next": null })
json!({ "results": [{ "uid": 1, "batchUid": null, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["genres", "id", "overview", "poster", "release_date", "title"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT7.288826907S", "enqueuedAt": "2021-09-08T09:34:40.882977Z", "startedAt": "2021-09-08T09:34:40.883073093Z", "finishedAt": "2021-09-08T09:34:48.1719Z"}, { "uid": 0, "batchUid": null, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31968 }, "error": null, "duration": "PT9.090735774S", "enqueuedAt": "2021-09-08T09:34:16.036101Z", "startedAt": "2021-09-08T09:34:16.261191226Z", "finishedAt": "2021-09-08T09:34:25.351927Z" }], "total": 2, "limit": 20, "from": 1, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
@ -400,7 +400,7 @@ async fn import_dump_v1_rubygems_with_settings() {
snapshot!(code, @"200 OK");
assert_eq!(
tasks["results"][0],
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "error": null, "duration": "PT1.487793839S", "enqueuedAt": "2021-09-08T09:27:01.465296Z", "startedAt": "2021-09-08T09:28:44.882177161Z", "finishedAt": "2021-09-08T09:28:46.369971Z"})
json!({"uid": 92, "batchUid": null, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "error": null, "duration": "PT1.487793839S", "enqueuedAt": "2021-09-08T09:27:01.465296Z", "startedAt": "2021-09-08T09:28:44.882177161Z", "finishedAt": "2021-09-08T09:28:46.369971Z"})
);
// finally we're just going to check that we can still get a few documents by id
@ -535,7 +535,7 @@ async fn import_dump_v2_movie_raw() {
snapshot!(code, @"200 OK");
assert_eq!(
tasks,
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "total": 1, "limit": 20, "from": 0, "next": null })
json!({ "results": [{"uid": 0, "batchUid": null, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "total": 1, "limit": 20, "from": 0, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
@ -682,7 +682,7 @@ async fn import_dump_v2_movie_with_settings() {
snapshot!(code, @"200 OK");
assert_eq!(
tasks,
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "total": 2, "limit": 20, "from": 1, "next": null })
json!({ "results": [{ "uid": 1, "batchUid": null, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "batchUid": null, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "total": 2, "limit": 20, "from": 1, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
@ -828,7 +828,7 @@ async fn import_dump_v2_rubygems_with_settings() {
snapshot!(code, @"200 OK");
assert_eq!(
tasks["results"][0],
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "error": null, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
json!({"uid": 92, "batchUid": null, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "error": null, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
);
// finally we're just going to check that we can still get a few documents by id
@ -963,7 +963,7 @@ async fn import_dump_v3_movie_raw() {
snapshot!(code, @"200 OK");
assert_eq!(
tasks,
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "total": 1, "limit": 20, "from": 0, "next": null })
json!({ "results": [{"uid": 0, "batchUid": null, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "total": 1, "limit": 20, "from": 0, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
@ -1110,7 +1110,7 @@ async fn import_dump_v3_movie_with_settings() {
snapshot!(code, @"200 OK");
assert_eq!(
tasks,
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "total": 2, "limit": 20, "from": 1, "next": null })
json!({ "results": [{ "uid": 1, "batchUid": null, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "batchUid": null, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "total": 2, "limit": 20, "from": 1, "next": null })
);
// finally we're just going to check that we can["results"] still get a few documents by id
@ -1256,7 +1256,7 @@ async fn import_dump_v3_rubygems_with_settings() {
snapshot!(code, @"200 OK");
assert_eq!(
tasks["results"][0],
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "error": null, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
json!({"uid": 92, "batchUid": null, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "error": null, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
);
// finally we're just going to check that we can still get a few documents by id
@ -1391,7 +1391,7 @@ async fn import_dump_v4_movie_raw() {
snapshot!(code, @"200 OK");
assert_eq!(
tasks,
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "total": 1, "limit" : 20, "from": 0, "next": null })
json!({ "results": [{"uid": 0, "batchUid": null, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "total": 1, "limit" : 20, "from": 0, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
@ -1538,7 +1538,7 @@ async fn import_dump_v4_movie_with_settings() {
snapshot!(code, @"200 OK");
assert_eq!(
tasks,
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "total": 2, "limit": 20, "from": 1, "next": null })
json!({ "results": [{ "uid": 1, "batchUid": null, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "batchUid": null, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "total": 2, "limit": 20, "from": 1, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
@ -1684,7 +1684,7 @@ async fn import_dump_v4_rubygems_with_settings() {
snapshot!(code, @"200 OK");
assert_eq!(
tasks["results"][0],
json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "error": null, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
json!({ "uid": 92, "batchUid": null, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "error": null, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
);
// finally we're just going to check that we can still get a few documents by id

View file

@ -1,8 +1,9 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "pets",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,8 +1,9 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "pets",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 1,
"batchUid": null,
"indexUid": "indexUID",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 0,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "settingsUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

View file

@ -1,10 +1,11 @@
---
source: meilisearch/tests/dumps/mod.rs
source: crates/meilisearch/tests/dumps/mod.rs
---
{
"results": [
{
"uid": 92,
"batchUid": null,
"indexUid": "rubygems",
"status": "succeeded",
"type": "documentAdditionOrUpdate",

Some files were not shown because too many files have changed in this diff Show more