Introduce a rustfmt file

This commit is contained in:
Clément Renault 2022-10-20 18:00:07 +02:00
parent dd57e051d7
commit 3f6bd7fb11
No known key found for this signature in database
GPG key ID: 92ADA4E935E71FA4
92 changed files with 1251 additions and 2857 deletions

View file

@ -72,11 +72,8 @@ mod mini_dashboard {
resource_dir(&dashboard_dir).build()?;
// Write the sha1 for the dashboard back to file.
let mut file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(sha1_path)?;
let mut file =
OpenOptions::new().write(true).create(true).truncate(true).open(sha1_path)?;
file.write_all(sha1.as_bytes())?;
file.flush()?;

View file

@ -1,12 +1,13 @@
use std::{any::Any, sync::Arc};
use std::any::Any;
use std::sync::Arc;
use actix_web::HttpRequest;
use meilisearch_types::InstanceUid;
use serde_json::Value;
use crate::{routes::indexes::documents::UpdateDocumentsQuery, Opt};
use super::{find_user_id, Analytics};
use crate::routes::indexes::documents::UpdateDocumentsQuery;
use crate::Opt;
pub struct MockAnalytics {
instance_uid: Option<InstanceUid>,

View file

@ -9,14 +9,13 @@ use std::str::FromStr;
use actix_web::HttpRequest;
use meilisearch_types::InstanceUid;
pub use mock_analytics::MockAnalytics;
use once_cell::sync::Lazy;
use platform_dirs::AppDirs;
use serde_json::Value;
use crate::routes::indexes::documents::UpdateDocumentsQuery;
pub use mock_analytics::MockAnalytics;
// if we are in debug mode OR the analytics feature is disabled
// the `SegmentAnalytics` point to the mock instead of the real analytics
#[cfg(any(debug_assertions, not(feature = "analytics")))]
@ -42,12 +41,7 @@ fn config_user_id_path(db_path: &Path) -> Option<PathBuf> {
db_path
.canonicalize()
.ok()
.map(|path| {
path.join("instance-uid")
.display()
.to_string()
.replace('/', "-")
})
.map(|path| path.join("instance-uid").display().to_string().replace('/', "-"))
.zip(MEILISEARCH_CONFIG_PATH.as_ref())
.map(|(filename, config_path)| config_path.join(filename.trim_start_matches('-')))
}

View file

@ -21,6 +21,7 @@ use tokio::select;
use tokio::sync::mpsc::{self, Receiver, Sender};
use uuid::Uuid;
use super::{config_user_id_path, MEILISEARCH_CONFIG_PATH};
use crate::analytics::Analytics;
use crate::option::default_http_addr;
use crate::routes::indexes::documents::UpdateDocumentsQuery;
@ -31,16 +32,13 @@ use crate::search::{
};
use crate::Opt;
use super::{config_user_id_path, MEILISEARCH_CONFIG_PATH};
const ANALYTICS_HEADER: &str = "X-Meilisearch-Client";
/// Write the instance-uid in the `data.ms` and in `~/.config/MeiliSearch/path-to-db-instance-uid`. Ignore the errors.
fn write_user_id(db_path: &Path, user_id: &InstanceUid) {
let _ = fs::write(db_path.join("instance-uid"), user_id.as_bytes());
if let Some((meilisearch_config_path, user_id_path)) = MEILISEARCH_CONFIG_PATH
.as_ref()
.zip(config_user_id_path(db_path))
if let Some((meilisearch_config_path, user_id_path)) =
MEILISEARCH_CONFIG_PATH.as_ref().zip(config_user_id_path(db_path))
{
let _ = fs::create_dir_all(&meilisearch_config_path);
let _ = fs::write(user_id_path, user_id.to_string());
@ -84,22 +82,16 @@ impl SegmentAnalytics {
let instance_uid = instance_uid.unwrap_or_else(|| Uuid::new_v4());
write_user_id(&opt.db_path, &instance_uid);
let client = reqwest::Client::builder()
.connect_timeout(Duration::from_secs(10))
.build();
let client = reqwest::Client::builder().connect_timeout(Duration::from_secs(10)).build();
// if reqwest throws an error we won't be able to send analytics
if client.is_err() {
return super::MockAnalytics::new(opt);
}
let client = HttpClient::new(
client.unwrap(),
"https://telemetry.meilisearch.com".to_string(),
);
let user = User::UserId {
user_id: instance_uid.to_string(),
};
let client =
HttpClient::new(client.unwrap(), "https://telemetry.meilisearch.com".to_string());
let user = User::UserId { user_id: instance_uid.to_string() };
let mut batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string());
// If Meilisearch is Launched for the first time:
@ -108,9 +100,7 @@ impl SegmentAnalytics {
if first_time_run {
let _ = batcher
.push(Track {
user: User::UserId {
user_id: "total_launch".to_string(),
},
user: User::UserId { user_id: "total_launch".to_string() },
event: "Launched".to_string(),
..Default::default()
})
@ -139,11 +129,7 @@ impl SegmentAnalytics {
});
tokio::spawn(segment.run(index_scheduler.clone()));
let this = Self {
instance_uid,
sender,
user: user.clone(),
};
let this = Self { instance_uid, sender, user: user.clone() };
Arc::new(this)
}
@ -164,21 +150,15 @@ impl super::Analytics for SegmentAnalytics {
properties: send,
..Default::default()
};
let _ = self
.sender
.try_send(AnalyticsMsg::BatchMessage(event.into()));
let _ = self.sender.try_send(AnalyticsMsg::BatchMessage(event.into()));
}
fn get_search(&self, aggregate: SearchAggregator) {
let _ = self
.sender
.try_send(AnalyticsMsg::AggregateGetSearch(aggregate));
let _ = self.sender.try_send(AnalyticsMsg::AggregateGetSearch(aggregate));
}
fn post_search(&self, aggregate: SearchAggregator) {
let _ = self
.sender
.try_send(AnalyticsMsg::AggregatePostSearch(aggregate));
let _ = self.sender.try_send(AnalyticsMsg::AggregatePostSearch(aggregate));
}
fn add_documents(
@ -188,9 +168,7 @@ impl super::Analytics for SegmentAnalytics {
request: &HttpRequest,
) {
let aggregate = DocumentsAggregator::from_query(documents_query, index_creation, request);
let _ = self
.sender
.try_send(AnalyticsMsg::AggregateAddDocuments(aggregate));
let _ = self.sender.try_send(AnalyticsMsg::AggregateAddDocuments(aggregate));
}
fn update_documents(
@ -200,9 +178,7 @@ impl super::Analytics for SegmentAnalytics {
request: &HttpRequest,
) {
let aggregate = DocumentsAggregator::from_query(documents_query, index_creation, request);
let _ = self
.sender
.try_send(AnalyticsMsg::AggregateUpdateDocuments(aggregate));
let _ = self.sender.try_send(AnalyticsMsg::AggregateUpdateDocuments(aggregate));
}
}
@ -261,11 +237,8 @@ impl Segment {
infos
};
let number_of_documents = stats
.indexes
.values()
.map(|index| index.number_of_documents)
.collect::<Vec<u64>>();
let number_of_documents =
stats.indexes.values().map(|index| index.number_of_documents).collect::<Vec<u64>>();
json!({
"start_since_days": FIRST_START_TIMESTAMP.elapsed().as_secs() / (60 * 60 * 24), // one day
@ -412,11 +385,7 @@ impl SearchAggregator {
let syntax = match filter {
Value::String(_) => "string".to_string(),
Value::Array(values) => {
if values
.iter()
.map(|v| v.to_string())
.any(|s| RE.is_match(&s))
{
if values.iter().map(|v| v.to_string()).any(|s| RE.is_match(&s)) {
"mixed".to_string()
} else {
"array".to_string()
@ -436,8 +405,7 @@ impl SearchAggregator {
ret.max_terms_number = q.split_whitespace().count();
}
ret.matching_strategy
.insert(format!("{:?}", query.matching_strategy), 1);
ret.matching_strategy.insert(format!("{:?}", query.matching_strategy), 1);
ret.max_limit = query.limit;
ret.max_offset = query.offset.unwrap_or_default();
@ -472,17 +440,14 @@ impl SearchAggregator {
self.time_spent.append(&mut other.time_spent);
// sort
self.sort_with_geo_point |= other.sort_with_geo_point;
self.sort_sum_of_criteria_terms = self
.sort_sum_of_criteria_terms
.saturating_add(other.sort_sum_of_criteria_terms);
self.sort_total_number_of_criteria = self
.sort_total_number_of_criteria
.saturating_add(other.sort_total_number_of_criteria);
self.sort_sum_of_criteria_terms =
self.sort_sum_of_criteria_terms.saturating_add(other.sort_sum_of_criteria_terms);
self.sort_total_number_of_criteria =
self.sort_total_number_of_criteria.saturating_add(other.sort_total_number_of_criteria);
// filter
self.filter_with_geo_radius |= other.filter_with_geo_radius;
self.filter_sum_of_criteria_terms = self
.filter_sum_of_criteria_terms
.saturating_add(other.filter_sum_of_criteria_terms);
self.filter_sum_of_criteria_terms =
self.filter_sum_of_criteria_terms.saturating_add(other.filter_sum_of_criteria_terms);
self.filter_total_number_of_criteria = self
.filter_total_number_of_criteria
.saturating_add(other.filter_total_number_of_criteria);

View file

@ -33,11 +33,7 @@ impl<P, D> GuardedData<P, D> {
{
match Self::authenticate(auth, token, index).await? {
Some(filters) => match data {
Some(data) => Ok(Self {
data,
filters,
_marker: PhantomData,
}),
Some(data) => Ok(Self { data, filters, _marker: PhantomData }),
None => Err(AuthenticationError::IrretrievableState.into()),
},
None => Err(AuthenticationError::InvalidToken.into()),
@ -50,11 +46,7 @@ impl<P, D> GuardedData<P, D> {
{
match Self::authenticate(auth, String::new(), None).await? {
Some(filters) => match data {
Some(data) => Ok(Self {
data,
filters,
_marker: PhantomData,
}),
Some(data) => Ok(Self { data, filters, _marker: PhantomData }),
None => Err(AuthenticationError::IrretrievableState.into()),
},
None => Err(AuthenticationError::MissingAuthorizationHeader.into()),
@ -129,14 +121,14 @@ pub trait Policy {
pub mod policies {
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation};
use meilisearch_auth::{AuthController, AuthFilter, SearchRules};
// reexport actions in policies in order to be used in routes configuration.
pub use meilisearch_types::keys::{actions, Action};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use uuid::Uuid;
use crate::extractors::authentication::Policy;
use meilisearch_auth::{AuthController, AuthFilter, SearchRules};
// reexport actions in policies in order to be used in routes configuration.
pub use meilisearch_types::keys::{actions, Action};
fn tenant_token_validation() -> Validation {
let mut validation = Validation::default();
@ -174,10 +166,7 @@ pub mod policies {
// authenticate if token is the master key.
// master key can only have access to keys routes.
// if master key is None only keys routes are inaccessible.
if auth
.get_master_key()
.map_or_else(|| !is_keys_action(A), |mk| mk == token)
{
if auth.get_master_key().map_or_else(|| !is_keys_action(A), |mk| mk == token) {
return Some(AuthFilter::default());
}
@ -235,9 +224,7 @@ pub mod policies {
}
}
return auth
.get_key_filters(uid, Some(data.claims.search_rules))
.ok();
return auth.get_key_filters(uid, Some(data.claims.search_rules)).ok();
}
None

View file

@ -1,7 +1,10 @@
#![allow(non_snake_case)]
use std::{future::Future, pin::Pin, task::Poll};
use std::future::Future;
use std::pin::Pin;
use std::task::Poll;
use actix_web::{dev::Payload, FromRequest, Handler, HttpRequest};
use actix_web::dev::Payload;
use actix_web::{FromRequest, Handler, HttpRequest};
use pin_project_lite::pin_project;
/// `SeqHandler` is an actix `Handler` that enforces that extractors errors are returned in the

View file

@ -13,37 +13,32 @@ pub mod metrics;
#[cfg(feature = "metrics")]
pub mod route_metrics;
use std::{
fs::File,
io::{BufReader, BufWriter},
path::Path,
sync::{atomic::AtomicBool, Arc},
};
use std::fs::File;
use std::io::{BufReader, BufWriter};
use std::path::Path;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use crate::error::MeilisearchHttpError;
use actix_cors::Cors;
use actix_http::body::MessageBody;
use actix_web::{dev::ServiceFactory, error::JsonPayloadError, middleware};
use actix_web::{dev::ServiceResponse, web::Data};
use actix_web::dev::{ServiceFactory, ServiceResponse};
use actix_web::error::JsonPayloadError;
use actix_web::web::Data;
use actix_web::{middleware, web, HttpRequest};
use analytics::Analytics;
use anyhow::bail;
use error::PayloadError;
use http::header::CONTENT_TYPE;
use meilisearch_types::{
milli::{
self,
documents::{DocumentsBatchBuilder, DocumentsBatchReader},
update::{IndexDocumentsConfig, IndexDocumentsMethod},
},
settings::apply_settings_to_builder,
};
pub use option::Opt;
use actix_web::{web, HttpRequest};
use extractors::payload::PayloadConfig;
use http::header::CONTENT_TYPE;
use index_scheduler::IndexScheduler;
use meilisearch_auth::AuthController;
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod};
use meilisearch_types::milli::{self};
use meilisearch_types::settings::apply_settings_to_builder;
pub use option::Opt;
use crate::error::MeilisearchHttpError;
pub static AUTOBATCHING_ENABLED: AtomicBool = AtomicBool::new(false);
@ -103,14 +98,9 @@ pub fn create_app(
)
.wrap(middleware::Logger::default())
.wrap(middleware::Compress::default())
.wrap(middleware::NormalizePath::new(
middleware::TrailingSlash::Trim,
));
.wrap(middleware::NormalizePath::new(middleware::TrailingSlash::Trim));
#[cfg(feature = "metrics")]
let app = app.wrap(Condition::new(
opt.enable_metrics_route,
route_metrics::RouteMetrics,
));
let app = app.wrap(Condition::new(opt.enable_metrics_route, route_metrics::RouteMetrics));
app
}
@ -154,30 +144,18 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(IndexScheduler, AuthContr
if empty_db && src_path_exists {
let (mut index_scheduler, mut auth_controller) = meilisearch_builder()?;
import_dump(
&opt.db_path,
path,
&mut index_scheduler,
&mut auth_controller,
)?;
import_dump(&opt.db_path, path, &mut index_scheduler, &mut auth_controller)?;
(index_scheduler, auth_controller)
} else if !empty_db && !opt.ignore_dump_if_db_exists {
bail!(
"database already exists at {:?}, try to delete it or rename it",
opt.db_path
.canonicalize()
.unwrap_or_else(|_| opt.db_path.to_owned())
opt.db_path.canonicalize().unwrap_or_else(|_| opt.db_path.to_owned())
)
} else if !src_path_exists && !opt.ignore_missing_dump {
bail!("dump doesn't exist at {:?}", path)
} else {
let (mut index_scheduler, mut auth_controller) = meilisearch_builder()?;
import_dump(
&opt.db_path,
path,
&mut index_scheduler,
&mut auth_controller,
)?;
import_dump(&opt.db_path, path, &mut index_scheduler, &mut auth_controller)?;
(index_scheduler, auth_controller)
}
} else {
@ -232,10 +210,7 @@ fn import_dump(
// 1. Import the instance-uid.
if let Some(ref instance_uid) = instance_uid {
// we don't want to panic if there is an error with the instance-uid.
let _ = std::fs::write(
db_path.join("instance-uid"),
instance_uid.to_string().as_bytes(),
);
let _ = std::fs::write(db_path.join("instance-uid"), instance_uid.to_string().as_bytes());
};
// 2. Import the `Key`s.
@ -271,10 +246,7 @@ fn import_dump(
log::info!("Importing the settings.");
let settings = index_reader.settings()?;
apply_settings_to_builder(&settings, &mut builder);
builder.execute(
|indexing_step| log::debug!("update: {:?}", indexing_step),
|| false,
)?;
builder.execute(|indexing_step| log::debug!("update: {:?}", indexing_step), || false)?;
// 3.3 Import the documents.
// 3.3.1 We need to recreate the grenad+obkv format accepted by the index.
@ -368,9 +340,7 @@ pub fn dashboard(config: &mut web::ServiceConfig, enable_frontend: bool) {
let generated = generated::generate();
// Generate routes for mini-dashboard assets
for (path, resource) in generated.into_iter() {
let Resource {
mime_type, data, ..
} = resource;
let Resource { mime_type, data, .. } = resource;
// Redirect index.html to /
if path == "index.html" {
config.service(web::resource("/").route(web::get().to(move || async move {

View file

@ -8,8 +8,7 @@ use actix_web::HttpServer;
use index_scheduler::IndexScheduler;
use meilisearch_auth::AuthController;
use meilisearch_http::analytics::Analytics;
use meilisearch_http::{analytics, create_app};
use meilisearch_http::{setup_meilisearch, Opt};
use meilisearch_http::{analytics, create_app, setup_meilisearch, Opt};
#[global_allocator]
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
@ -89,24 +88,22 @@ async fn run_http(
.keep_alive(KeepAlive::Os);
if let Some(config) = opt_clone.get_ssl_config()? {
http_server
.bind_rustls(opt_clone.http_addr, config)?
.run()
.await?;
http_server.bind_rustls(opt_clone.http_addr, config)?.run().await?;
} else {
http_server.bind(&opt_clone.http_addr)?.run().await?;
}
Ok(())
}
pub fn print_launch_resume(opt: &Opt, analytics: Arc<dyn Analytics>, config_read_from: Option<PathBuf>) {
pub fn print_launch_resume(
opt: &Opt,
analytics: Arc<dyn Analytics>,
config_read_from: Option<PathBuf>,
) {
let commit_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown");
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
let protocol = if opt.ssl_cert_path.is_some() && opt.ssl_key_path.is_some() {
"https"
} else {
"http"
};
let protocol =
if opt.ssl_cert_path.is_some() && opt.ssl_key_path.is_some() { "https" } else { "http" };
let ascii_name = r#"
888b d888 d8b 888 d8b 888
8888b d8888 Y8P 888 Y8P 888
@ -131,10 +128,7 @@ pub fn print_launch_resume(opt: &Opt, analytics: Arc<dyn Analytics>, config_read
eprintln!("Environment:\t\t{:?}", opt.env);
eprintln!("Commit SHA:\t\t{:?}", commit_sha.to_string());
eprintln!("Commit date:\t\t{:?}", commit_date.to_string());
eprintln!(
"Package version:\t{:?}",
env!("CARGO_PKG_VERSION").to_string()
);
eprintln!("Package version:\t{:?}", env!("CARGO_PKG_VERSION").to_string());
#[cfg(all(not(debug_assertions), feature = "analytics"))]
{

View file

@ -1,9 +1,8 @@
use lazy_static::lazy_static;
use prometheus::{
opts, register_histogram_vec, register_int_counter_vec, register_int_gauge,
register_int_gauge_vec,
register_int_gauge_vec, HistogramVec, IntCounterVec, IntGauge, IntGaugeVec,
};
use prometheus::{HistogramVec, IntCounterVec, IntGauge, IntGaugeVec};
const HTTP_RESPONSE_TIME_CUSTOM_BUCKETS: &[f64; 14] = &[
0.0005, 0.0008, 0.00085, 0.0009, 0.00095, 0.001, 0.00105, 0.0011, 0.00115, 0.0012, 0.0015,
@ -16,19 +15,14 @@ lazy_static! {
&["method", "path"]
)
.expect("Can't create a metric");
pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge = register_int_gauge!(opts!(
"meilisearch_db_size_bytes",
"Meilisearch Db Size In Bytes"
))
.expect("Can't create a metric");
pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge =
register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch Db Size In Bytes"))
.expect("Can't create a metric");
pub static ref MEILISEARCH_INDEX_COUNT: IntGauge =
register_int_gauge!(opts!("meilisearch_index_count", "Meilisearch Index Count"))
.expect("Can't create a metric");
pub static ref MEILISEARCH_INDEX_DOCS_COUNT: IntGaugeVec = register_int_gauge_vec!(
opts!(
"meilisearch_index_docs_count",
"Meilisearch Index Docs Count"
),
opts!("meilisearch_index_docs_count", "Meilisearch Index Docs Count"),
&["index"]
)
.expect("Can't create a metric");

View file

@ -1,24 +1,21 @@
use std::convert::TryFrom;
use std::env::VarError;
use std::ffi::OsStr;
use std::io::{BufReader, Read};
use std::num::ParseIntError;
use std::ops::Deref;
use std::path::PathBuf;
use std::str::FromStr;
use std::ffi::OsStr;
use std::env::VarError;
use std::sync::Arc;
use std::{env, fmt, fs};
use byte_unit::{Byte, ByteError};
use clap::Parser;
use meilisearch_types::milli::update::IndexerConfig;
use rustls::{
server::{
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient,
ServerSessionMemoryCache,
},
RootCertStore,
use rustls::server::{
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient, ServerSessionMemoryCache,
};
use rustls::RootCertStore;
use rustls_pemfile::{certs, pkcs8_private_keys, rsa_private_keys};
use serde::{Deserialize, Serialize};
use sysinfo::{RefreshKind, System, SystemExt};
@ -502,9 +499,7 @@ pub struct SchedulerConfig {
impl SchedulerConfig {
pub fn export_to_env(self) {
let SchedulerConfig {
disable_auto_batching,
} = self;
let SchedulerConfig { disable_auto_batching } = self;
export_to_env_if_not_present(DISABLE_AUTO_BATCHING, disable_auto_batching.to_string());
}
}
@ -513,9 +508,8 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
type Error = anyhow::Error;
fn try_from(other: &IndexerOpts) -> Result<Self, Self::Error> {
let thread_pool = rayon::ThreadPoolBuilder::new()
.num_threads(*other.max_indexing_threads)
.build()?;
let thread_pool =
rayon::ThreadPoolBuilder::new().num_threads(*other.max_indexing_threads).build()?;
Ok(Self {
log_every_n: Some(other.log_every_n),
@ -553,11 +547,7 @@ impl FromStr for MaxMemory {
impl Default for MaxMemory {
fn default() -> MaxMemory {
MaxMemory(
total_memory_bytes()
.map(|bytes| bytes * 2 / 3)
.map(Byte::from_bytes),
)
MaxMemory(total_memory_bytes().map(|bytes| bytes * 2 / 3).map(Byte::from_bytes))
}
}
@ -757,21 +747,18 @@ mod test {
#[test]
fn test_meilli_config_file_path_invalid() {
temp_env::with_vars(
vec![("MEILI_CONFIG_FILE_PATH", Some("../configgg.toml"))],
|| {
let possible_error_messages = [
temp_env::with_vars(vec![("MEILI_CONFIG_FILE_PATH", Some("../configgg.toml"))], || {
let possible_error_messages = [
"unable to open or read the \"../configgg.toml\" configuration file: No such file or directory (os error 2).",
"unable to open or read the \"../configgg.toml\" configuration file: The system cannot find the file specified. (os error 2).", // Windows
];
let error_message = Opt::try_build().unwrap_err().to_string();
assert!(
possible_error_messages.contains(&error_message.as_str()),
"Expected onf of {:?}, got {:?}.",
possible_error_messages,
error_message
);
},
);
let error_message = Opt::try_build().unwrap_err().to_string();
assert!(
possible_error_messages.contains(&error_message.as_str()),
"Expected onf of {:?}, got {:?}.",
possible_error_messages,
error_message
);
});
}
}

View file

@ -1,17 +1,13 @@
use std::future::{ready, Ready};
use actix_web::dev::{self, Service, ServiceRequest, ServiceResponse, Transform};
use actix_web::http::header;
use actix_web::HttpResponse;
use actix_web::{
dev::{self, Service, ServiceRequest, ServiceResponse, Transform},
Error,
};
use actix_web::{Error, HttpResponse};
use futures_util::future::LocalBoxFuture;
use meilisearch_auth::actions;
use meilisearch_lib::MeiliSearch;
use meilisearch_types::error::ResponseError;
use prometheus::HistogramTimer;
use prometheus::{Encoder, TextEncoder};
use prometheus::{Encoder, HistogramTimer, TextEncoder};
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::GuardedData;
@ -33,15 +29,11 @@ pub async fn get_metrics(
let encoder = TextEncoder::new();
let mut buffer = vec![];
encoder
.encode(&prometheus::gather(), &mut buffer)
.expect("Failed to encode metrics");
encoder.encode(&prometheus::gather(), &mut buffer).expect("Failed to encode metrics");
let response = String::from_utf8(buffer).expect("Failed to convert bytes to string");
Ok(HttpResponse::Ok()
.insert_header(header::ContentType(mime::TEXT_PLAIN))
.body(response))
Ok(HttpResponse::Ok().insert_header(header::ContentType(mime::TEXT_PLAIN)).body(response))
}
pub struct RouteMetrics;

View file

@ -1,19 +1,18 @@
use std::str;
use actix_web::{web, HttpRequest, HttpResponse};
use meilisearch_auth::error::AuthControllerError;
use meilisearch_auth::AuthController;
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::keys::{Action, Key};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use time::OffsetDateTime;
use uuid::Uuid;
use meilisearch_auth::{error::AuthControllerError, AuthController};
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::keys::{Action, Key};
use crate::extractors::{
authentication::{policies::*, GuardedData},
sequential_extractor::SeqHandler,
};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::Pagination;
pub fn configure(cfg: &mut web::ServiceConfig) {
@ -52,10 +51,8 @@ pub async fn list_api_keys(
) -> Result<HttpResponse, ResponseError> {
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
let keys = auth_controller.list_keys()?;
let page_view = paginate.auto_paginate_sized(
keys.into_iter()
.map(|k| KeyView::from_key(k, &auth_controller)),
);
let page_view = paginate
.auto_paginate_sized(keys.into_iter().map(|k| KeyView::from_key(k, &auth_controller)));
Ok(page_view)
})

View file

@ -10,7 +10,8 @@ use time::macros::format_description;
use time::OffsetDateTime;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::SummarizedTaskView;
@ -38,9 +39,7 @@ pub async fn create_dump(
dump_uid,
};
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))

View file

@ -2,8 +2,7 @@ use std::io::Cursor;
use actix_web::http::header::CONTENT_TYPE;
use actix_web::web::Data;
use actix_web::HttpMessage;
use actix_web::{web, HttpRequest, HttpResponse};
use actix_web::{web, HttpMessage, HttpRequest, HttpResponse};
use bstr::ByteSlice;
use futures::StreamExt;
use index_scheduler::IndexScheduler;
@ -23,17 +22,14 @@ use serde_json::Value;
use crate::analytics::Analytics;
use crate::error::MeilisearchHttpError;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::payload::Payload;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::{fold_star_or, PaginationView, SummarizedTaskView};
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
vec![
"application/json".to_string(),
"application/x-ndjson".to_string(),
"text/csv".to_string(),
]
vec!["application/json".to_string(), "application/x-ndjson".to_string(), "text/csv".to_string()]
});
/// Extracts the mime type from the content type and return
@ -47,9 +43,7 @@ fn extract_mime_type(req: &HttpRequest) -> Result<Option<Mime>, MeilisearchHttpE
content_type.as_bytes().as_bstr().to_string(),
ACCEPTED_CONTENT_TYPE.clone(),
)),
None => Err(MeilisearchHttpError::MissingContentType(
ACCEPTED_CONTENT_TYPE.clone(),
)),
None => Err(MeilisearchHttpError::MissingContentType(ACCEPTED_CONTENT_TYPE.clone())),
},
}
}
@ -101,18 +95,10 @@ pub async fn delete_document(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
path: web::Path<DocumentParam>,
) -> Result<HttpResponse, ResponseError> {
let DocumentParam {
document_id,
index_uid,
} = path.into_inner();
let task = KindWithContent::DocumentDeletion {
index_uid,
documents_ids: vec![document_id],
};
let DocumentParam { document_id, index_uid } = path.into_inner();
let task = KindWithContent::DocumentDeletion { index_uid, documents_ids: vec![document_id] };
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
}
@ -133,11 +119,7 @@ pub async fn get_all_documents(
params: web::Query<BrowseQuery>,
) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", params);
let BrowseQuery {
limit,
offset,
fields,
} = params.into_inner();
let BrowseQuery { limit, offset, fields } = params.into_inner();
let attributes_to_retrieve = fields.and_then(fold_star_or);
let index = index_scheduler.index(&index_uid)?;
@ -220,10 +202,7 @@ async fn document_addition(
method: IndexDocumentsMethod,
allow_index_creation: bool,
) -> Result<SummarizedTaskView, MeilisearchHttpError> {
let format = match mime_type
.as_ref()
.map(|m| (m.type_().as_str(), m.subtype().as_str()))
{
let format = match mime_type.as_ref().map(|m| (m.type_().as_str(), m.subtype().as_str())) {
Some(("application", "json")) => PayloadType::Json,
Some(("application", "x-ndjson")) => PayloadType::Ndjson,
Some(("text", "csv")) => PayloadType::Csv,
@ -234,9 +213,7 @@ async fn document_addition(
))
}
None => {
return Err(MeilisearchHttpError::MissingContentType(
ACCEPTED_CONTENT_TYPE.clone(),
))
return Err(MeilisearchHttpError::MissingContentType(ACCEPTED_CONTENT_TYPE.clone()))
}
};
@ -308,21 +285,13 @@ pub async fn delete_documents(
debug!("called with params: {:?}", body);
let ids = body
.iter()
.map(|v| {
v.as_str()
.map(String::from)
.unwrap_or_else(|| v.to_string())
})
.map(|v| v.as_str().map(String::from).unwrap_or_else(|| v.to_string()))
.collect();
let task = KindWithContent::DocumentDeletion {
index_uid: path.into_inner(),
documents_ids: ids,
};
let task =
KindWithContent::DocumentDeletion { index_uid: path.into_inner(), documents_ids: ids };
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
@ -332,13 +301,9 @@ pub async fn clear_all_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
path: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let task = KindWithContent::DocumentClear {
index_uid: path.into_inner(),
};
let task = KindWithContent::DocumentClear { index_uid: path.into_inner() };
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
@ -352,10 +317,9 @@ fn all_documents<'a>(
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
Ok(index.all_documents(rtxn)?.map(move |ret| {
ret.map_err(ResponseError::from)
.and_then(|(_key, document)| -> Result<_, ResponseError> {
Ok(milli::obkv_to_json(&all_fields, &fields_ids_map, document)?)
})
ret.map_err(ResponseError::from).and_then(|(_key, document)| -> Result<_, ResponseError> {
Ok(milli::obkv_to_json(&all_fields, &fields_ids_map, document)?)
})
}))
}

View file

@ -9,11 +9,11 @@ use serde::{Deserialize, Serialize};
use serde_json::json;
use time::OffsetDateTime;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, AuthenticationError, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler;
use super::{Pagination, SummarizedTaskView};
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::{AuthenticationError, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler;
pub mod documents;
pub mod search;
@ -104,14 +104,9 @@ pub async fn create_index(
Some(&req),
);
let task = KindWithContent::IndexCreation {
index_uid: uid,
primary_key,
};
let task = KindWithContent::IndexCreation { index_uid: uid, primary_key };
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
Ok(HttpResponse::Accepted().json(task))
} else {
@ -160,9 +155,7 @@ pub async fn update_index(
};
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
@ -172,13 +165,9 @@ pub async fn delete_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let task = KindWithContent::IndexDeletion {
index_uid: index_uid.into_inner(),
};
let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() };
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
Ok(HttpResponse::Accepted().json(task))
}
@ -189,11 +178,7 @@ pub async fn get_index_stats(
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
analytics.publish(
"Stats Seen".to_string(),
json!({ "per_index_uid": true }),
Some(&req),
);
analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": true }), Some(&req));
let stats = IndexStats::new((*index_scheduler).clone(), index_uid.into_inner());

View file

@ -9,7 +9,8 @@ use serde_cs::vec::CS;
use serde_json::Value;
use crate::analytics::{Analytics, SearchAggregator};
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::search::{
perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
@ -64,14 +65,10 @@ impl From<SearchQueryGet> for SearchQuery {
q: other.q,
offset: other.offset,
limit: other.limit.unwrap_or_else(DEFAULT_SEARCH_LIMIT),
attributes_to_retrieve: other
.attributes_to_retrieve
.map(|o| o.into_iter().collect()),
attributes_to_retrieve: other.attributes_to_retrieve.map(|o| o.into_iter().collect()),
attributes_to_crop: other.attributes_to_crop.map(|o| o.into_iter().collect()),
crop_length: other.crop_length,
attributes_to_highlight: other
.attributes_to_highlight
.map(|o| o.into_iter().collect()),
attributes_to_highlight: other.attributes_to_highlight.map(|o| o.into_iter().collect()),
filter,
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
show_matches_position: other.show_matches_position,
@ -140,10 +137,8 @@ pub async fn search_with_url_query(
let mut query: SearchQuery = params.into_inner().into();
// Tenant token search_rules.
if let Some(search_rules) = index_scheduler
.filters()
.search_rules
.get_index_search_rules(&index_uid)
if let Some(search_rules) =
index_scheduler.filters().search_rules.get_index_search_rules(&index_uid)
{
add_search_rules(&mut query, search_rules);
}
@ -174,10 +169,8 @@ pub async fn search_with_post(
debug!("search called with params: {:?}", query);
// Tenant token search_rules.
if let Some(search_rules) = index_scheduler
.filters()
.search_rules
.get_index_search_rules(&index_uid)
if let Some(search_rules) =
index_scheduler.filters().search_rules.get_index_search_rules(&index_uid)
{
add_search_rules(&mut query, search_rules);
}
@ -206,13 +199,7 @@ mod test {
let sort = fix_sort_query_parameters("_geoPoint(12, 13):asc");
assert_eq!(sort, vec!["_geoPoint(12,13):asc".to_string()]);
let sort = fix_sort_query_parameters("doggo:asc,_geoPoint(12.45,13.56):desc");
assert_eq!(
sort,
vec![
"doggo:asc".to_string(),
"_geoPoint(12.45,13.56):desc".to_string(),
]
);
assert_eq!(sort, vec!["doggo:asc".to_string(), "_geoPoint(12.45,13.56):desc".to_string(),]);
let sort = fix_sort_query_parameters(
"doggo:asc , _geoPoint(12.45, 13.56, 2590352):desc , catto:desc",
);
@ -226,12 +213,6 @@ mod test {
);
let sort = fix_sort_query_parameters("doggo:asc , _geoPoint(1, 2), catto:desc");
// This is ugly but eh, I don't want to write a full parser just for this unused route
assert_eq!(
sort,
vec![
"doggo:asc".to_string(),
"_geoPoint(1,2),catto:desc".to_string(),
]
);
assert_eq!(sort, vec!["doggo:asc".to_string(), "_geoPoint(1,2),catto:desc".to_string(),]);
}
}

View file

@ -1,15 +1,15 @@
use actix_web::web::Data;
use log::debug;
use actix_web::{web, HttpRequest, HttpResponse};
use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_types::error::ResponseError;
use meilisearch_types::settings::{settings, Settings, Unchecked};
use meilisearch_types::tasks::KindWithContent;
use serde_json::json;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::routes::SummarizedTaskView;
#[macro_export]
@ -18,16 +18,15 @@ macro_rules! make_setting_route {
pub mod $attr {
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse, Resource};
use log::debug;
use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_types::error::ResponseError;
use meilisearch_types::milli::update::Setting;
use meilisearch_types::settings::{settings, Settings};
use meilisearch_types::tasks::KindWithContent;
use meilisearch_types::error::ResponseError;
use $crate::analytics::Analytics;
use $crate::extractors::authentication::{policies::*, GuardedData};
use $crate::extractors::authentication::policies::*;
use $crate::extractors::authentication::GuardedData;
use $crate::extractors::sequential_extractor::SeqHandler;
use $crate::routes::SummarizedTaskView;
@ -38,10 +37,7 @@ macro_rules! make_setting_route {
>,
index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let new_settings = Settings {
$attr: Setting::Reset,
..Default::default()
};
let new_settings = Settings { $attr: Setting::Reset, ..Default::default() };
let allow_index_creation = index_scheduler.filters().allow_index_creation;
let task = KindWithContent::Settings {
@ -270,13 +266,7 @@ make_setting_route!(
"synonyms"
);
make_setting_route!(
"/distinct-attribute",
put,
String,
distinct_attribute,
"distinctAttribute"
);
make_setting_route!("/distinct-attribute", put, String, distinct_attribute, "distinctAttribute");
make_setting_route!(
"/ranking-rules",
@ -453,9 +443,7 @@ pub async fn update_all(
allow_index_creation,
};
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
@ -486,9 +474,7 @@ pub async fn delete_all(
allow_index_creation,
};
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task))
.await??
.into();
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))

View file

@ -1,8 +1,5 @@
use std::collections::HashSet;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::tasks::TaskView;
use actix_web::web::Data;
use actix_web::{web, HttpResponse};
use index_scheduler::IndexScheduler;
@ -10,6 +7,11 @@ use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::tasks::KindWithContent;
use serde::Deserialize;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::tasks::TaskView;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(SeqHandler(indexes_swap))));
}
@ -33,10 +35,7 @@ pub async fn indexes_swap(
let mut swaps = vec![];
let mut indexes_set = HashSet::<String>::default();
for IndexesSwapPayload {
indexes: (lhs, rhs),
} in params.into_inner().into_iter()
{
for IndexesSwapPayload { indexes: (lhs, rhs) } in params.into_inner().into_iter() {
if !search_rules.is_index_authorized(&lhs) || !search_rules.is_index_authorized(&lhs) {
return Err(ResponseError::from_msg(
"TODO: error message when we swap with an index were not allowed to access"

View file

@ -12,10 +12,10 @@ use serde::{Deserialize, Serialize};
use serde_json::json;
use time::OffsetDateTime;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData};
use self::indexes::IndexStats;
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
mod api_key;
mod dump;
@ -102,11 +102,7 @@ impl Pagination {
T: Serialize,
{
let total = content.len();
let content: Vec<_> = content
.into_iter()
.skip(self.offset)
.take(self.limit)
.collect();
let content: Vec<_> = content.into_iter().skip(self.offset).take(self.limit).collect();
self.format_with(total, content)
}
@ -119,11 +115,7 @@ impl Pagination {
where
T: Serialize,
{
let content: Vec<_> = content
.into_iter()
.skip(self.offset)
.take(self.limit)
.collect();
let content: Vec<_> = content.into_iter().skip(self.offset).take(self.limit).collect();
self.format_with(total, content)
}
@ -133,23 +125,13 @@ impl Pagination {
where
T: Serialize,
{
PaginationView {
results,
offset: self.offset,
limit: self.limit,
total,
}
PaginationView { results, offset: self.offset, limit: self.limit, total }
}
}
impl<T> PaginationView<T> {
pub fn new(offset: usize, limit: usize, total: usize, results: Vec<T>) -> Self {
Self {
offset,
limit,
results,
total,
}
Self { offset, limit, results, total }
}
}
@ -211,10 +193,7 @@ pub struct EnqueuedUpdateResult {
pub update_type: UpdateType,
#[serde(with = "time::serde::rfc3339")]
pub enqueued_at: OffsetDateTime,
#[serde(
skip_serializing_if = "Option::is_none",
with = "time::serde::rfc3339::option"
)]
#[serde(skip_serializing_if = "Option::is_none", with = "time::serde::rfc3339::option")]
pub started_processing_at: Option<OffsetDateTime>,
}
@ -275,11 +254,7 @@ async fn get_stats(
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
analytics.publish(
"Stats Seen".to_string(),
json!({ "per_index_uid": false }),
Some(&req),
);
analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": false }), Some(&req));
let search_rules = &index_scheduler.filters().search_rules;
let stats = create_all_stats((*index_scheduler).clone(), search_rules)?;
@ -300,9 +275,7 @@ pub fn create_all_stats(
limit: Some(1),
..Query::default()
})?;
let processing_index = processing_task
.first()
.and_then(|task| task.index_uid().clone());
let processing_index = processing_task.first().and_then(|task| task.index_uid().clone());
for (name, index) in index_scheduler.indexes()? {
if !search_rules.is_index_authorized(&name) {
continue;
@ -313,9 +286,7 @@ pub fn create_all_stats(
let rtxn = index.read_txn()?;
let stats = IndexStats {
number_of_documents: index.number_of_documents(&rtxn)?,
is_indexing: processing_index
.as_deref()
.map_or(false, |index_name| name == index_name),
is_indexing: processing_index.as_deref().map_or(false, |index_name| name == index_name),
field_distribution: index.field_distribution(&rtxn)?,
};
@ -324,11 +295,7 @@ pub fn create_all_stats(
indexes.insert(name, stats);
}
let stats = Stats {
database_size,
last_update: last_task,
indexes,
};
let stats = Stats { database_size, last_update: last_task, indexes };
Ok(stats)
}

View file

@ -12,11 +12,11 @@ use serde_json::json;
use time::{Duration, OffsetDateTime};
use tokio::task::block_in_place;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler;
use super::fold_star_or;
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
const DEFAULT_LIMIT: fn() -> u32 = || 20;
@ -80,10 +80,7 @@ impl TaskView {
canceled_by: task.canceled_by,
details: task.details.clone().map(DetailsView::from),
error: task.error.clone(),
duration: task
.started_at
.zip(task.finished_at)
.map(|(start, end)| end - start),
duration: task.started_at.zip(task.finished_at).map(|(start, end)| end - start),
enqueued_at: task.enqueued_at,
started_at: task.started_at,
finished_at: task.finished_at,
@ -124,62 +121,45 @@ pub struct DetailsView {
impl From<Details> for DetailsView {
fn from(details: Details) -> Self {
match details.clone() {
Details::DocumentAddition {
received_documents,
indexed_documents,
} => DetailsView {
Details::DocumentAddition { received_documents, indexed_documents } => DetailsView {
received_documents: Some(received_documents),
indexed_documents,
..DetailsView::default()
},
Details::Settings { settings } => DetailsView {
settings: Some(settings),
..DetailsView::default()
},
Details::IndexInfo { primary_key } => DetailsView {
primary_key: Some(primary_key),
..DetailsView::default()
},
Details::DocumentDeletion {
received_document_ids,
deleted_documents,
} => DetailsView {
Details::Settings { settings } => {
DetailsView { settings: Some(settings), ..DetailsView::default() }
}
Details::IndexInfo { primary_key } => {
DetailsView { primary_key: Some(primary_key), ..DetailsView::default() }
}
Details::DocumentDeletion { received_document_ids, deleted_documents } => DetailsView {
received_document_ids: Some(received_document_ids),
deleted_documents: Some(deleted_documents),
..DetailsView::default()
},
Details::ClearAll { deleted_documents } => DetailsView {
deleted_documents: Some(deleted_documents),
..DetailsView::default()
},
Details::TaskCancelation {
matched_tasks,
canceled_tasks,
original_query,
} => DetailsView {
matched_tasks: Some(matched_tasks),
canceled_tasks: Some(canceled_tasks),
original_query: Some(original_query),
..DetailsView::default()
},
Details::TaskDeletion {
matched_tasks,
deleted_tasks,
original_query,
} => DetailsView {
Details::ClearAll { deleted_documents } => {
DetailsView { deleted_documents: Some(deleted_documents), ..DetailsView::default() }
}
Details::TaskCancelation { matched_tasks, canceled_tasks, original_query } => {
DetailsView {
matched_tasks: Some(matched_tasks),
canceled_tasks: Some(canceled_tasks),
original_query: Some(original_query),
..DetailsView::default()
}
}
Details::TaskDeletion { matched_tasks, deleted_tasks, original_query } => DetailsView {
matched_tasks: Some(matched_tasks),
deleted_tasks: Some(deleted_tasks),
original_query: Some(original_query),
..DetailsView::default()
},
Details::Dump { dump_uid } => DetailsView {
dump_uid: Some(dump_uid),
..DetailsView::default()
},
Details::IndexSwap { swaps } => DetailsView {
indexes: Some(swaps),
..Default::default()
},
Details::Dump { dump_uid } => {
DetailsView { dump_uid: Some(dump_uid), ..DetailsView::default() }
}
Details::IndexSwap { swaps } => {
DetailsView { indexes: Some(swaps), ..Default::default() }
}
}
}
}
@ -318,10 +298,8 @@ async fn cancel_tasks(
let filtered_query = filter_out_inaccessible_indexes_from_query(&index_scheduler, &query);
let tasks = index_scheduler.get_task_ids(&filtered_query)?;
let task_cancelation = KindWithContent::TaskCancelation {
query: req.query_string().to_string(),
tasks,
};
let task_cancelation =
KindWithContent::TaskCancelation { query: req.query_string().to_string(), tasks };
let task = block_in_place(|| index_scheduler.register(task_cancelation))?;
let task_view = TaskView::from_task(&task);
@ -377,10 +355,8 @@ async fn delete_tasks(
let filtered_query = filter_out_inaccessible_indexes_from_query(&index_scheduler, &query);
let tasks = index_scheduler.get_task_ids(&filtered_query)?;
let task_deletion = KindWithContent::TaskDeletion {
query: req.query_string().to_string(),
tasks,
};
let task_deletion =
KindWithContent::TaskDeletion { query: req.query_string().to_string(), tasks };
let task = block_in_place(|| index_scheduler.register(task_deletion))?;
let task_view = TaskView::from_task(&task);
@ -448,11 +424,8 @@ async fn get_tasks(
};
let query = filter_out_inaccessible_indexes_from_query(&index_scheduler, &query);
let mut tasks_results: Vec<TaskView> = index_scheduler
.get_tasks(query)?
.into_iter()
.map(|t| TaskView::from_task(&t))
.collect();
let mut tasks_results: Vec<TaskView> =
index_scheduler.get_tasks(query)?.into_iter().map(|t| TaskView::from_task(&t)).collect();
// If we were able to fetch the number +1 tasks we asked
// it means that there is more to come.
@ -483,11 +456,7 @@ async fn get_task(
) -> Result<HttpResponse, ResponseError> {
let task_id = task_id.into_inner();
analytics.publish(
"Tasks Seen".to_string(),
json!({ "per_task_uid": true }),
Some(&req),
);
analytics.publish("Tasks Seen".to_string(), json!({ "per_task_uid": true }), Some(&req));
let search_rules = &index_scheduler.filters().search_rules;
let mut filters = index_scheduler::Query::default();
@ -541,10 +510,9 @@ fn filter_out_inaccessible_indexes_from_query<const ACTION: u8>(
}
pub(crate) mod date_deserializer {
use time::{
format_description::well_known::Rfc3339, macros::format_description, Date, Duration,
OffsetDateTime, Time,
};
use time::format_description::well_known::Rfc3339;
use time::macros::format_description;
use time::{Date, Duration, OffsetDateTime, Time};
enum DeserializeDateOption {
Before,
@ -586,10 +554,11 @@ pub(crate) mod date_deserializer {
/// Deserialize an upper bound datetime with RFC3339 or YYYY-MM-DD.
pub(crate) mod before {
use super::{deserialize_date, DeserializeDateOption};
use serde::Deserializer;
use time::OffsetDateTime;
use super::{deserialize_date, DeserializeDateOption};
/// Deserialize an [`Option<OffsetDateTime>`] from its ISO 8601 representation.
pub fn deserialize<'a, D: Deserializer<'a>>(
deserializer: D,
@ -638,10 +607,11 @@ pub(crate) mod date_deserializer {
///
/// If YYYY-MM-DD is used, the day is incremented by one.
pub(crate) mod after {
use super::{deserialize_date, DeserializeDateOption};
use serde::Deserializer;
use time::OffsetDateTime;
use super::{deserialize_date, DeserializeDateOption};
/// Deserialize an [`Option<OffsetDateTime>`] from its ISO 8601 representation.
pub fn deserialize<'a, D: Deserializer<'a>>(
deserializer: D,
@ -689,9 +659,10 @@ pub(crate) mod date_deserializer {
#[cfg(test)]
mod tests {
use crate::routes::tasks::TaskDeletionQuery;
use meili_snap::snapshot;
use crate::routes::tasks::TaskDeletionQuery;
#[test]
fn deserialize_task_deletion_query_datetime() {
{

View file

@ -145,12 +145,7 @@ pub fn perform_search(
search.sort_criteria(sort);
}
let milli::SearchResult {
documents_ids,
matching_words,
candidates,
..
} = search.execute()?;
let milli::SearchResult { documents_ids, matching_words, candidates, .. } = search.execute()?;
let fields_ids_map = index.fields_ids_map(&rtxn).unwrap();
@ -240,11 +235,7 @@ pub fn perform_search(
insert_geo_distance(sort, &mut document);
}
let hit = SearchHit {
document,
formatted,
matches_position,
};
let hit = SearchHit { document, formatted, matches_position };
documents.push(hit);
}
@ -289,10 +280,7 @@ fn insert_geo_distance(sorts: &[String], document: &mut Document) {
};
if let Some(capture_group) = sorts.iter().find_map(|sort| GEO_REGEX.captures(sort)) {
// TODO: TAMO: milli encountered an internal error, what do we want to do?
let base = [
capture_group[1].parse().unwrap(),
capture_group[2].parse().unwrap(),
];
let base = [capture_group[1].parse().unwrap(), capture_group[2].parse().unwrap()];
let geo_point = &document.get("_geo").unwrap_or(&json!(null));
if let Some((lat, lng)) = geo_point["lat"].as_f64().zip(geo_point["lng"].as_f64()) {
let distance = milli::distance_between_two_points(&base, &[lat, lng]);
@ -341,10 +329,7 @@ fn add_highlight_to_formatted_options(
displayed_ids: &BTreeSet<FieldId>,
) {
for attr in attr_to_highlight {
let new_format = FormatOptions {
highlight: true,
crop: None,
};
let new_format = FormatOptions { highlight: true, crop: None };
if attr == "*" {
for id in displayed_ids {
@ -383,10 +368,7 @@ fn add_crop_to_formatted_options(
formatted_options
.entry(*id)
.and_modify(|f| f.crop = Some(attr_len))
.or_insert(FormatOptions {
highlight: false,
crop: Some(attr_len),
});
.or_insert(FormatOptions { highlight: false, crop: Some(attr_len) });
}
}
@ -395,10 +377,7 @@ fn add_crop_to_formatted_options(
formatted_options
.entry(id)
.and_modify(|f| f.crop = Some(attr_len))
.or_insert(FormatOptions {
highlight: false,
crop: Some(attr_len),
});
.or_insert(FormatOptions { highlight: false, crop: Some(attr_len) });
}
}
}
@ -409,10 +388,7 @@ fn add_non_formatted_ids_to_formatted_options(
to_retrieve_ids: &BTreeSet<FieldId>,
) {
for id in to_retrieve_ids {
formatted_options.entry(*id).or_insert(FormatOptions {
highlight: false,
crop: None,
});
formatted_options.entry(*id).or_insert(FormatOptions { highlight: false, crop: None });
}
}
@ -426,10 +402,7 @@ fn make_document(
// recreate the original json
for (key, value) in obkv.iter() {
let value = serde_json::from_slice(value)?;
let key = field_ids_map
.name(key)
.expect("Missing field name")
.to_string();
let key = field_ids_map.name(key).expect("Missing field name").to_string();
document.insert(key, value);
}
@ -455,9 +428,8 @@ fn format_fields<'a, A: AsRef<[u8]>>(
let mut document = document.clone();
// select the attributes to retrieve
let displayable_names = displayable_ids
.iter()
.map(|&fid| field_ids_map.name(fid).expect("Missing field name"));
let displayable_names =
displayable_ids.iter().map(|&fid| field_ids_map.name(fid).expect("Missing field name"));
permissive_json_pointer::map_leaf_values(&mut document, displayable_names, |key, value| {
// To get the formatting option of each key we need to see all the rules that applies
// to the value and merge them together. eg. If a user said he wanted to highlight `doggo`
@ -473,13 +445,7 @@ fn format_fields<'a, A: AsRef<[u8]>>(
.reduce(|acc, option| acc.merge(option));
let mut infos = Vec::new();
*value = format_value(
std::mem::take(value),
builder,
format,
&mut infos,
compute_matches,
);
*value = format_value(std::mem::take(value), builder, format, &mut infos, compute_matches);
if let Some(matches) = matches_position.as_mut() {
if !infos.is_empty() {

View file

@ -1,7 +1,9 @@
use crate::common::Server;
use std::{thread, time};
use assert_json_diff::assert_json_include;
use serde_json::{json, Value};
use std::{thread, time};
use crate::common::Server;
#[actix_rt::test]
async fn add_valid_api_key() {

View file

@ -1,11 +1,13 @@
use crate::common::Server;
use std::collections::{HashMap, HashSet};
use ::time::format_description::well_known::Rfc3339;
use maplit::{hashmap, hashset};
use once_cell::sync::Lazy;
use serde_json::{json, Value};
use std::collections::{HashMap, HashSet};
use time::{Duration, OffsetDateTime};
use crate::common::Server;
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
Lazy::new(|| {
let mut authorizations = hashmap! {
@ -57,21 +59,14 @@ pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'
};
if cfg!(feature = "metrics") {
authorizations.insert(
("GET", "/metrics"),
hashset! {"metrics.get", "metrics.*", "*"},
);
authorizations.insert(("GET", "/metrics"), hashset! {"metrics.get", "metrics.*", "*"});
}
authorizations
});
pub static ALL_ACTIONS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
AUTHORIZATIONS
.values()
.cloned()
.reduce(|l, r| l.union(&r).cloned().collect())
.unwrap()
AUTHORIZATIONS.values().cloned().reduce(|l, r| l.union(&r).cloned().collect()).unwrap()
});
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
@ -109,13 +104,7 @@ async fn error_access_expired_key() {
for (method, route) in AUTHORIZATIONS.keys() {
let (response, code) = server.dummy_request(method, route).await;
assert_eq!(
response,
INVALID_RESPONSE.clone(),
"on route: {:?} - {:?}",
method,
route
);
assert_eq!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
assert_eq!(403, code, "{:?}", &response);
}
}
@ -146,13 +135,7 @@ async fn error_access_unauthorized_index() {
{
let (response, code) = server.dummy_request(method, route).await;
assert_eq!(
response,
INVALID_RESPONSE.clone(),
"on route: {:?} - {:?}",
method,
route
);
assert_eq!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
assert_eq!(403, code, "{:?}", &response);
}
}
@ -180,13 +163,7 @@ async fn error_access_unauthorized_action() {
server.use_api_key(&key);
let (response, code) = server.dummy_request(method, route).await;
assert_eq!(
response,
INVALID_RESPONSE.clone(),
"on route: {:?} - {:?}",
method,
route
);
assert_eq!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
assert_eq!(403, code, "{:?}", &response);
}
}
@ -201,13 +178,7 @@ async fn access_authorized_master_key() {
for ((method, route), _) in AUTHORIZATIONS.iter() {
let (response, code) = server.dummy_request(method, route).await;
assert_ne!(
response,
INVALID_RESPONSE.clone(),
"on route: {:?} - {:?}",
method,
route
);
assert_ne!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
assert_ne!(code, 403);
}
}

View file

@ -3,11 +3,11 @@ mod authorization;
mod payload;
mod tenant_token;
use crate::common::Server;
use actix_web::http::StatusCode;
use serde_json::{json, Value};
use crate::common::Server;
impl Server {
pub fn use_api_key(&mut self, api_key: impl AsRef<str>) {
self.service.api_key = Some(api_key.as_ref().to_string());

View file

@ -1,7 +1,8 @@
use crate::common::Server;
use actix_web::test;
use serde_json::{json, Value};
use crate::common::Server;
#[actix_rt::test]
async fn error_api_key_bad_content_types() {
let content = json!({
@ -36,10 +37,7 @@ async fn error_api_key_bad_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#invalid_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
// patch
let req = test::TestRequest::patch()
@ -61,10 +59,7 @@ async fn error_api_key_bad_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#invalid_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
}
#[actix_rt::test]
@ -101,10 +96,7 @@ async fn error_api_key_empty_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#invalid_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
// patch
let req = test::TestRequest::patch()
@ -126,10 +118,7 @@ async fn error_api_key_empty_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#invalid_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
}
#[actix_rt::test]
@ -165,10 +154,7 @@ async fn error_api_key_missing_content_types() {
);
assert_eq!(response["code"], "missing_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#missing_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
// patch
let req = test::TestRequest::patch()
@ -189,10 +175,7 @@ async fn error_api_key_missing_content_types() {
);
assert_eq!(response["code"], "missing_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#missing_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
}
#[actix_rt::test]
@ -217,10 +200,7 @@ async fn error_api_key_empty_payload() {
assert_eq!(status_code, 400);
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
// patch
@ -237,10 +217,7 @@ async fn error_api_key_empty_payload() {
assert_eq!(status_code, 400);
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
}
@ -266,10 +243,7 @@ async fn error_api_key_malformed_payload() {
assert_eq!(status_code, 400);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
assert_eq!(
response["message"],
json!(
@ -291,10 +265,7 @@ async fn error_api_key_malformed_payload() {
assert_eq!(status_code, 400);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
assert_eq!(
response["message"],
json!(

View file

@ -1,12 +1,13 @@
use crate::common::Server;
use std::collections::HashMap;
use ::time::format_description::well_known::Rfc3339;
use maplit::hashmap;
use once_cell::sync::Lazy;
use serde_json::{json, Value};
use std::collections::HashMap;
use time::{Duration, OffsetDateTime};
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
use crate::common::Server;
fn generate_tenant_token(
parent_uid: impl AsRef<str>,
@ -17,12 +18,8 @@ fn generate_tenant_token(
let parent_uid = parent_uid.as_ref();
body.insert("apiKeyUid", json!(parent_uid));
encode(
&Header::default(),
&body,
&EncodingKey::from_secret(parent_key.as_ref().as_bytes()),
)
.unwrap()
encode(&Header::default(), &body, &EncodingKey::from_secret(parent_key.as_ref().as_bytes()))
.unwrap()
}
static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
@ -513,18 +510,14 @@ async fn error_access_expired_parent_key() {
server.use_api_key(&web_token);
// test search request while parent_key is not expired
let (response, code) = server
.dummy_request("POST", "/indexes/products/search")
.await;
let (response, code) = server.dummy_request("POST", "/indexes/products/search").await;
assert_ne!(response, INVALID_RESPONSE.clone());
assert_ne!(code, 403);
// wait until the key is expired.
thread::sleep(time::Duration::new(1, 0));
let (response, code) = server
.dummy_request("POST", "/indexes/products/search")
.await;
let (response, code) = server.dummy_request("POST", "/indexes/products/search").await;
assert_eq!(response, INVALID_RESPONSE.clone());
assert_eq!(code, 403);
}
@ -556,9 +549,7 @@ async fn error_access_modified_token() {
server.use_api_key(&web_token);
// test search request while web_token is valid
let (response, code) = server
.dummy_request("POST", "/indexes/products/search")
.await;
let (response, code) = server.dummy_request("POST", "/indexes/products/search").await;
assert_ne!(response, INVALID_RESPONSE.clone());
assert_ne!(code, 403);
@ -576,9 +567,7 @@ async fn error_access_modified_token() {
.join(".");
server.use_api_key(&altered_token);
let (response, code) = server
.dummy_request("POST", "/indexes/products/search")
.await;
let (response, code) = server.dummy_request("POST", "/indexes/products/search").await;
assert_eq!(response, INVALID_RESPONSE.clone());
assert_eq!(code, 403);
}

View file

@ -1,9 +1,10 @@
use std::io::{Read, Write};
use actix_http::header::TryIntoHeaderPair;
use bytes::Bytes;
use flate2::read::{GzDecoder, ZlibDecoder};
use flate2::write::{GzEncoder, ZlibEncoder};
use flate2::Compression;
use std::io::{Read, Write};
#[derive(Clone, Copy)]
pub enum Encoder {
@ -18,24 +19,18 @@ impl Encoder {
match self {
Self::Gzip => {
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
encoder
.write_all(&body.into())
.expect("Failed to encode request body");
encoder.write_all(&body.into()).expect("Failed to encode request body");
encoder.finish().expect("Failed to encode request body")
}
Self::Deflate => {
let mut encoder = ZlibEncoder::new(Vec::new(), Compression::default());
encoder
.write_all(&body.into())
.expect("Failed to encode request body");
encoder.write_all(&body.into()).expect("Failed to encode request body");
encoder.finish().unwrap()
}
Self::Plain => Vec::from(body.into()),
Self::Brotli => {
let mut encoder = brotli::CompressorWriter::new(Vec::new(), 32 * 1024, 3, 22);
encoder
.write_all(&body.into())
.expect("Failed to encode request body");
encoder.write_all(&body.into()).expect("Failed to encode request body");
encoder.flush().expect("Failed to encode request body");
encoder.into_inner()
}
@ -57,9 +52,7 @@ impl Encoder {
.expect("Invalid zlib stream");
}
Self::Plain => {
buffer
.write_all(input.as_ref())
.expect("Unexpected memory copying issue");
buffer.write_all(input.as_ref()).expect("Unexpected memory copying issue");
}
Self::Brotli => {
brotli::Decompressor::new(input.as_ref(), 4096)
@ -80,8 +73,6 @@ impl Encoder {
}
pub fn iterator() -> impl Iterator<Item = Self> {
[Self::Plain, Self::Gzip, Self::Deflate, Self::Brotli]
.iter()
.copied()
[Self::Plain, Self::Gzip, Self::Deflate, Self::Brotli].iter().copied()
}
}

View file

@ -1,17 +1,14 @@
use std::{
fmt::Write,
panic::{catch_unwind, resume_unwind, UnwindSafe},
time::Duration,
};
use std::fmt::Write;
use std::panic::{catch_unwind, resume_unwind, UnwindSafe};
use std::time::Duration;
use actix_web::http::StatusCode;
use serde_json::{json, Value};
use tokio::time::sleep;
use urlencoding::encode as urlencode;
use super::service::Service;
use super::encoder::Encoder;
use super::service::Service;
pub struct Index<'a> {
pub uid: String,
@ -28,10 +25,8 @@ impl Index<'_> {
pub async fn load_test_set(&self) -> u64 {
let url = format!("/indexes/{}/documents", urlencode(self.uid.as_ref()));
let (response, code) = self
.service
.post_str(url, include_str!("../assets/test_set.json"))
.await;
let (response, code) =
self.service.post_str(url, include_str!("../assets/test_set.json")).await;
assert_eq!(code, 202);
let update_id = response["taskUid"].as_i64().unwrap();
self.wait_task(update_id as u64).await;
@ -43,9 +38,7 @@ impl Index<'_> {
"uid": self.uid,
"primaryKey": primary_key,
});
self.service
.post_encoded("/indexes", body, self.encoder)
.await
self.service.post_encoded("/indexes", body, self.encoder).await
}
pub async fn update(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
@ -68,16 +61,12 @@ impl Index<'_> {
primary_key: Option<&str>,
) -> (Value, StatusCode) {
let url = match primary_key {
Some(key) => format!(
"/indexes/{}/documents?primaryKey={}",
urlencode(self.uid.as_ref()),
key
),
Some(key) => {
format!("/indexes/{}/documents?primaryKey={}", urlencode(self.uid.as_ref()), key)
}
None => format!("/indexes/{}/documents", urlencode(self.uid.as_ref())),
};
self.service
.post_encoded(url, documents, self.encoder)
.await
self.service.post_encoded(url, documents, self.encoder).await
}
pub async fn update_documents(
@ -86,11 +75,9 @@ impl Index<'_> {
primary_key: Option<&str>,
) -> (Value, StatusCode) {
let url = match primary_key {
Some(key) => format!(
"/indexes/{}/documents?primaryKey={}",
urlencode(self.uid.as_ref()),
key
),
Some(key) => {
format!("/indexes/{}/documents?primaryKey={}", urlencode(self.uid.as_ref()), key)
}
None => format!("/indexes/{}/documents", urlencode(self.uid.as_ref())),
};
self.service.put_encoded(url, documents, self.encoder).await
@ -174,13 +161,8 @@ impl Index<'_> {
}
pub async fn delete_batch(&self, ids: Vec<u64>) -> (Value, StatusCode) {
let url = format!(
"/indexes/{}/documents/delete-batch",
urlencode(self.uid.as_ref())
);
self.service
.post_encoded(url, serde_json::to_value(&ids).unwrap(), self.encoder)
.await
let url = format!("/indexes/{}/documents/delete-batch", urlencode(self.uid.as_ref()));
self.service.post_encoded(url, serde_json::to_value(&ids).unwrap(), self.encoder).await
}
pub async fn settings(&self) -> (Value, StatusCode) {
@ -190,9 +172,7 @@ impl Index<'_> {
pub async fn update_settings(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref()));
self.service
.patch_encoded(url, settings, self.encoder)
.await
self.service.patch_encoded(url, settings, self.encoder).await
}
pub async fn delete_settings(&self) -> (Value, StatusCode) {
@ -232,29 +212,19 @@ impl Index<'_> {
pub async fn search_get(&self, query: Value) -> (Value, StatusCode) {
let params = yaup::to_string(&query).unwrap();
let url = format!(
"/indexes/{}/search?{}",
urlencode(self.uid.as_ref()),
params
);
let url = format!("/indexes/{}/search?{}", urlencode(self.uid.as_ref()), params);
self.service.get(url).await
}
pub async fn update_distinct_attribute(&self, value: Value) -> (Value, StatusCode) {
let url = format!(
"/indexes/{}/settings/{}",
urlencode(self.uid.as_ref()),
"distinct-attribute"
);
let url =
format!("/indexes/{}/settings/{}", urlencode(self.uid.as_ref()), "distinct-attribute");
self.service.put_encoded(url, value, self.encoder).await
}
pub async fn get_distinct_attribute(&self) -> (Value, StatusCode) {
let url = format!(
"/indexes/{}/settings/{}",
urlencode(self.uid.as_ref()),
"distinct-attribute"
);
let url =
format!("/indexes/{}/settings/{}", urlencode(self.uid.as_ref()), "distinct-attribute");
self.service.get(url).await
}
}

View file

@ -15,18 +15,10 @@ macro_rules! test_post_get_search {
let get_query: meilisearch_http::routes::search::SearchQuery = post_query.into();
let get_query = ::serde_url_params::to_string(&get_query).unwrap();
let ($response, $status_code) = $server.search_get(&get_query).await;
let _ = ::std::panic::catch_unwind(|| $block).map_err(|e| {
panic!(
"panic in get route: {:?}",
e.downcast_ref::<&str>().unwrap()
)
});
let _ = ::std::panic::catch_unwind(|| $block)
.map_err(|e| panic!("panic in get route: {:?}", e.downcast_ref::<&str>().unwrap()));
let ($response, $status_code) = $server.search_post($query).await;
let _ = ::std::panic::catch_unwind(|| $block).map_err(|e| {
panic!(
"panic in post route: {:?}",
e.downcast_ref::<&str>().unwrap()
)
});
let _ = ::std::panic::catch_unwind(|| $block)
.map_err(|e| panic!("panic in post route: {:?}", e.downcast_ref::<&str>().unwrap()));
};
}

View file

@ -1,23 +1,22 @@
#![allow(dead_code)]
use actix_http::body::MessageBody;
use actix_web::dev::ServiceResponse;
use clap::Parser;
use std::path::Path;
use std::sync::Arc;
use actix_http::body::MessageBody;
use actix_web::dev::ServiceResponse;
use actix_web::http::StatusCode;
use byte_unit::{Byte, ByteUnit};
use clap::Parser;
use meilisearch_http::option::{IndexerOpts, MaxMemory, Opt};
use meilisearch_http::{analytics, create_app, setup_meilisearch};
use once_cell::sync::Lazy;
use serde_json::Value;
use tempfile::TempDir;
use meilisearch_http::option::{IndexerOpts, MaxMemory, Opt};
use meilisearch_http::{analytics, create_app, setup_meilisearch};
use crate::common::encoder::Encoder;
use super::index::Index;
use super::service::Service;
use crate::common::encoder::Encoder;
pub struct Server {
pub service: Service,
@ -40,17 +39,10 @@ impl Server {
let options = default_settings(dir.path());
let (index_scheduler, auth) = setup_meilisearch(&options).unwrap();
let service = Service {
index_scheduler: Arc::new(index_scheduler),
auth,
options,
api_key: None,
};
let service =
Service { index_scheduler: Arc::new(index_scheduler), auth, options, api_key: None };
Server {
service,
_dir: Some(dir),
}
Server { service, _dir: Some(dir) }
}
pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self {
@ -63,17 +55,10 @@ impl Server {
options.master_key = Some("MASTER_KEY".to_string());
let (index_scheduler, auth) = setup_meilisearch(&options).unwrap();
let service = Service {
index_scheduler: Arc::new(index_scheduler),
auth,
options,
api_key: None,
};
let service =
Service { index_scheduler: Arc::new(index_scheduler), auth, options, api_key: None };
Server {
service,
_dir: Some(dir),
}
Server { service, _dir: Some(dir) }
}
pub async fn new_auth() -> Self {
@ -84,17 +69,10 @@ impl Server {
pub async fn new_with_options(options: Opt) -> Result<Self, anyhow::Error> {
let (index_scheduler, auth) = setup_meilisearch(&options)?;
let service = Service {
index_scheduler: Arc::new(index_scheduler),
auth,
options,
api_key: None,
};
let service =
Service { index_scheduler: Arc::new(index_scheduler), auth, options, api_key: None };
Ok(Server {
service,
_dir: None,
})
Ok(Server { service, _dir: None })
}
pub async fn init_web_app(
@ -120,11 +98,7 @@ impl Server {
}
pub fn index_with_encoder(&self, uid: impl AsRef<str>, encoder: Encoder) -> Index<'_> {
Index {
uid: uid.as_ref().to_string(),
service: &self.service,
encoder,
}
Index { uid: uid.as_ref().to_string(), service: &self.service, encoder }
}
pub async fn list_indexes(
@ -142,9 +116,7 @@ impl Server {
.map(|(offset, limit)| format!("{offset}&{limit}"))
.or_else(|| offset.xor(limit));
if let Some(query_parameter) = query_parameter {
self.service
.get(format!("/indexes?{query_parameter}"))
.await
self.service.get(format!("/indexes?{query_parameter}")).await
} else {
self.service.get("/indexes").await
}

View file

@ -1,14 +1,15 @@
use std::sync::Arc;
use actix_web::http::header::ContentType;
use actix_web::http::StatusCode;
use actix_web::test;
use actix_web::test::TestRequest;
use actix_web::{http::StatusCode, test};
use index_scheduler::IndexScheduler;
use meilisearch_auth::AuthController;
use meilisearch_http::{analytics, create_app, Opt};
use serde_json::Value;
use crate::common::encoder::Encoder;
use meilisearch_http::{analytics, create_app, Opt};
pub struct Service {
pub index_scheduler: Arc<IndexScheduler>,

View file

@ -2,10 +2,11 @@
mod common;
use crate::common::Server;
use actix_web::test;
use serde_json::{json, Value};
use crate::common::Server;
enum HttpVerb {
Put,
Patch,
@ -75,11 +76,7 @@ async fn error_json_bad_content_type() {
"calling the route `{}` with a content-type of json isn't supposed to throw a bad media type error", route);
// No content-type.
let req = verb
.test_request()
.uri(route)
.set_payload(document)
.to_request();
let req = verb.test_request().uri(route).set_payload(document).to_request();
let res = test::call_service(&app, req).await;
let status_code = res.status();
let body = test::read_body(res).await;

View file

@ -1,9 +1,10 @@
use crate::common::{GetAllDocumentsOptions, Server};
use actix_web::test;
use serde_json::{json, Value};
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
use crate::common::encoder::Encoder;
use serde_json::{json, Value};
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
use crate::common::{GetAllDocumentsOptions, Server};
/// This is the basic usage of our API and every other tests uses the content-type application/json
#[actix_rt::test]
@ -192,10 +193,7 @@ async fn error_add_documents_test_bad_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#invalid_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
// put
let req = test::TestRequest::put()
@ -216,10 +214,7 @@ async fn error_add_documents_test_bad_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#invalid_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
}
/// missing content-type must be refused
@ -253,10 +248,7 @@ async fn error_add_documents_test_no_content_type() {
);
assert_eq!(response["code"], "missing_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#missing_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
// put
let req = test::TestRequest::put()
@ -276,10 +268,7 @@ async fn error_add_documents_test_no_content_type() {
);
assert_eq!(response["code"], "missing_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(
response["link"],
"https://docs.meilisearch.com/errors#missing_content_type"
);
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
}
#[actix_rt::test]
@ -308,10 +297,7 @@ async fn error_add_malformed_csv_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
// put
let req = test::TestRequest::put()
@ -332,10 +318,7 @@ async fn error_add_malformed_csv_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
}
#[actix_rt::test]
@ -364,10 +347,7 @@ async fn error_add_malformed_json_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
// put
let req = test::TestRequest::put()
@ -388,10 +368,7 @@ async fn error_add_malformed_json_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
// truncate
@ -416,10 +393,7 @@ async fn error_add_malformed_json_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
// add one more char to the long string to test if the truncating works.
let document = format!("\"{}m\"", long);
@ -438,10 +412,7 @@ async fn error_add_malformed_json_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
}
#[actix_rt::test]
@ -470,10 +441,7 @@ async fn error_add_malformed_ndjson_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
// put
let req = test::TestRequest::put()
@ -492,10 +460,7 @@ async fn error_add_malformed_ndjson_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#malformed_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
}
#[actix_rt::test]
@ -519,10 +484,7 @@ async fn error_add_missing_payload_csv_documents() {
assert_eq!(response["message"], json!(r#"A csv payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
// put
let req = test::TestRequest::put()
@ -538,10 +500,7 @@ async fn error_add_missing_payload_csv_documents() {
assert_eq!(response["message"], json!(r#"A csv payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
}
#[actix_rt::test]
@ -565,10 +524,7 @@ async fn error_add_missing_payload_json_documents() {
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
// put
let req = test::TestRequest::put()
@ -584,10 +540,7 @@ async fn error_add_missing_payload_json_documents() {
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
}
#[actix_rt::test]
@ -608,16 +561,10 @@ async fn error_add_missing_payload_ndjson_documents() {
let body = test::read_body(res).await;
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
assert_eq!(status_code, 400);
assert_eq!(
response["message"],
json!(r#"A ndjson payload is missing."#)
);
assert_eq!(response["message"], json!(r#"A ndjson payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
// put
let req = test::TestRequest::put()
@ -630,16 +577,10 @@ async fn error_add_missing_payload_ndjson_documents() {
let body = test::read_body(res).await;
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
assert_eq!(status_code, 400);
assert_eq!(
response["message"],
json!(r#"A ndjson payload is missing."#)
);
assert_eq!(response["message"], json!(r#"A ndjson payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(
response["link"],
json!("https://docs.meilisearch.com/errors#missing_payload")
);
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
}
#[actix_rt::test]
@ -792,10 +733,7 @@ async fn add_larger_dataset() {
assert_eq!(response["details"]["indexedDocuments"], 77);
assert_eq!(response["details"]["receivedDocuments"], 77);
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
limit: Some(1000),
..Default::default()
})
.get_all_documents(GetAllDocumentsOptions { limit: Some(1000), ..Default::default() })
.await;
assert_eq!(code, 200, "failed with `{}`", response);
assert_eq!(response["results"].as_array().unwrap().len(), 77);
@ -900,9 +838,7 @@ async fn add_documents_invalid_geo_field() {
let server = Server::new().await;
let index = server.index("test");
index.create(Some("id")).await;
index
.update_settings(json!({"sortableAttributes": ["_geo"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["_geo"]})).await;
let documents = json!([
{
@ -1045,10 +981,7 @@ async fn batch_several_documents_addition() {
// Check if there are exactly 120 documents (150 - 30) in the index;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
limit: Some(200),
..Default::default()
})
.get_all_documents(GetAllDocumentsOptions { limit: Some(200), ..Default::default() })
.await;
assert_eq!(code, 200, "failed with `{}`", response);
assert_eq!(response["results"].as_array().unwrap().len(), 120);

View file

@ -29,9 +29,7 @@ async fn delete_one_unexisting_document() {
async fn delete_one_document() {
let server = Server::new().await;
let index = server.index("test");
index
.add_documents(json!([{ "id": 0, "content": "foobar" }]), None)
.await;
index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await;
index.wait_task(0).await;
let (_response, code) = server.index("test").delete_document(0).await;
assert_eq!(code, 202);
@ -68,9 +66,7 @@ async fn clear_all_documents() {
assert_eq!(code, 202);
let _update = index.wait_task(1).await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
}
@ -85,9 +81,7 @@ async fn clear_all_documents_empty_index() {
assert_eq!(code, 202);
let _update = index.wait_task(0).await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
}
@ -121,9 +115,7 @@ async fn delete_batch() {
assert_eq!(code, 202);
let _update = index.wait_task(1).await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
assert_eq!(response["results"][0]["id"], json!(3));
@ -139,9 +131,7 @@ async fn delete_no_document_batch() {
assert_eq!(code, 202, "{}", _response);
let _update = index.wait_task(1).await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 3);
}

View file

@ -1,11 +1,11 @@
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server};
use actix_web::test;
use http::header::ACCEPT_ENCODING;
use crate::common::encoder::Encoder;
use serde_json::{json, Value};
use urlencoding::encode as urlencode;
use crate::common::encoder::Encoder;
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server};
// TODO: partial test since we are testing error, amd error is not yet fully implemented in
// transplant
#[actix_rt::test]
@ -58,14 +58,8 @@ async fn get_document() {
})
);
let (response, code) = index
.get_document(
0,
Some(GetDocumentOptions {
fields: Some(vec!["id"]),
}),
)
.await;
let (response, code) =
index.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["id"]) })).await;
assert_eq!(code, 200);
assert_eq!(
response,
@ -75,12 +69,7 @@ async fn get_document() {
);
let (response, code) = index
.get_document(
0,
Some(GetDocumentOptions {
fields: Some(vec!["nested.content"]),
}),
)
.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["nested.content"]) }))
.await;
assert_eq!(code, 200);
assert_eq!(
@ -94,10 +83,8 @@ async fn get_document() {
#[actix_rt::test]
async fn error_get_unexisting_index_all_documents() {
let server = Server::new().await;
let (response, code) = server
.index("test")
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) =
server.index("test").get_all_documents(GetAllDocumentsOptions::default()).await;
let expected_response = json!({
"message": "Index `test` not found.",
@ -119,9 +106,7 @@ async fn get_no_document() {
index.wait_task(0).await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
}
@ -132,9 +117,7 @@ async fn get_all_documents_no_options() {
let index = server.index("test");
index.load_test_set().await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
let arr = response["results"].as_array().unwrap();
assert_eq!(arr.len(), 20);
@ -192,10 +175,7 @@ async fn test_get_all_documents_limit() {
index.load_test_set().await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
limit: Some(5),
..Default::default()
})
.get_all_documents(GetAllDocumentsOptions { limit: Some(5), ..Default::default() })
.await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 5);
@ -212,10 +192,7 @@ async fn test_get_all_documents_offset() {
index.load_test_set().await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
offset: Some(5),
..Default::default()
})
.get_all_documents(GetAllDocumentsOptions { offset: Some(5), ..Default::default() })
.await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 20);
@ -338,24 +315,12 @@ async fn get_document_s_nested_attributes_to_retrieve() {
assert_eq!(code, 202);
index.wait_task(1).await;
let (response, code) = index
.get_document(
0,
Some(GetDocumentOptions {
fields: Some(vec!["content"]),
}),
)
.await;
let (response, code) =
index.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["content"]) })).await;
assert_eq!(code, 200);
assert_eq!(response, json!({}));
let (response, code) = index
.get_document(
1,
Some(GetDocumentOptions {
fields: Some(vec!["content"]),
}),
)
.await;
let (response, code) =
index.get_document(1, Some(GetDocumentOptions { fields: Some(vec!["content"]) })).await;
assert_eq!(code, 200);
assert_eq!(
response,
@ -368,12 +333,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
);
let (response, code) = index
.get_document(
0,
Some(GetDocumentOptions {
fields: Some(vec!["content.truc"]),
}),
)
.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["content.truc"]) }))
.await;
assert_eq!(code, 200);
assert_eq!(
@ -383,12 +343,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
})
);
let (response, code) = index
.get_document(
1,
Some(GetDocumentOptions {
fields: Some(vec!["content.truc"]),
}),
)
.get_document(1, Some(GetDocumentOptions { fields: Some(vec!["content.truc"]) }))
.await;
assert_eq!(code, 200);
assert_eq!(
@ -405,20 +360,13 @@ async fn get_document_s_nested_attributes_to_retrieve() {
async fn get_documents_displayed_attributes_is_ignored() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"displayedAttributes": ["gender"]}))
.await;
index.update_settings(json!({"displayedAttributes": ["gender"]})).await;
index.load_test_set().await;
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 20);
assert_eq!(
response["results"][0].as_object().unwrap().keys().count(),
16
);
assert_eq!(response["results"][0].as_object().unwrap().keys().count(), 16);
assert!(response["results"][0]["gender"] != json!(null));
assert_eq!(response["offset"], json!(0));

View file

@ -1,7 +1,7 @@
use crate::common::{GetAllDocumentsOptions, Server};
use serde_json::json;
use crate::common::encoder::Encoder;
use serde_json::json;
use crate::common::{GetAllDocumentsOptions, Server};
#[actix_rt::test]
async fn error_document_update_create_index_bad_uid() {
@ -84,10 +84,7 @@ async fn update_document() {
let (response, code) = index.get_document(1, None).await;
assert_eq!(code, 200);
assert_eq!(
response.to_string(),
r##"{"doc_id":1,"content":"foo","other":"bar"}"##
);
assert_eq!(response.to_string(), r##"{"doc_id":1,"content":"foo","other":"bar"}"##);
}
#[actix_rt::test]
@ -125,10 +122,7 @@ async fn update_document_gzip_encoded() {
let (response, code) = index.get_document(1, None).await;
assert_eq!(code, 200);
assert_eq!(
response.to_string(),
r##"{"doc_id":1,"content":"foo","other":"bar"}"##
);
assert_eq!(response.to_string(), r##"{"doc_id":1,"content":"foo","other":"bar"}"##);
}
#[actix_rt::test]
@ -143,10 +137,7 @@ async fn update_larger_dataset() {
assert_eq!(response["type"], "documentAdditionOrUpdate");
assert_eq!(response["details"]["indexedDocuments"], 77);
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
limit: Some(1000),
..Default::default()
})
.get_all_documents(GetAllDocumentsOptions { limit: Some(1000), ..Default::default() })
.await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 77);

View file

@ -1,10 +1,10 @@
mod data;
use crate::common::{default_settings, GetAllDocumentsOptions, Server};
use meilisearch_http::Opt;
use serde_json::json;
use self::data::GetDump;
use crate::common::{default_settings, GetAllDocumentsOptions, Server};
// all the following test are ignored on windows. See #2364
#[actix_rt::test]
@ -17,14 +17,8 @@ async fn import_dump_v1() {
GetDump::MoviesWithSettingsV1.path(),
GetDump::RubyGemsWithSettingsV1.path(),
] {
let options = Opt {
import_dump: Some(path),
..default_settings(temp.path())
};
let error = Server::new_with_options(options)
.await
.map(drop)
.unwrap_err();
let options = Opt { import_dump: Some(path), ..default_settings(temp.path()) };
let error = Server::new_with_options(options).await.map(drop).unwrap_err();
assert_eq!(error.to_string(), "The version 1 of the dumps is not supported anymore. You can re-export your dump from a version between 0.21 and 0.24, or start fresh from a version 0.25 onwards.");
}
@ -35,10 +29,8 @@ async fn import_dump_v1() {
async fn import_dump_v2_movie_raw() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::MoviesRawV2.path()),
..default_settings(temp.path())
};
let options =
Opt { import_dump: Some(GetDump::MoviesRawV2.path()), ..default_settings(temp.path()) };
let server = Server::new_with_options(options).await.unwrap();
let (indexes, code) = server.list_indexes(None, None).await;
@ -227,10 +219,8 @@ async fn import_dump_v2_rubygems_with_settings() {
async fn import_dump_v3_movie_raw() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::MoviesRawV3.path()),
..default_settings(temp.path())
};
let options =
Opt { import_dump: Some(GetDump::MoviesRawV3.path()), ..default_settings(temp.path()) };
let server = Server::new_with_options(options).await.unwrap();
let (indexes, code) = server.list_indexes(None, None).await;
@ -419,10 +409,8 @@ async fn import_dump_v3_rubygems_with_settings() {
async fn import_dump_v4_movie_raw() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::MoviesRawV4.path()),
..default_settings(temp.path())
};
let options =
Opt { import_dump: Some(GetDump::MoviesRawV4.path()), ..default_settings(temp.path()) };
let server = Server::new_with_options(options).await.unwrap();
let (indexes, code) = server.list_indexes(None, None).await;
@ -611,10 +599,8 @@ async fn import_dump_v4_rubygems_with_settings() {
async fn import_dump_v5() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::TestV5.path()),
..default_settings(temp.path())
};
let options =
Opt { import_dump: Some(GetDump::TestV5.path()), ..default_settings(temp.path()) };
let mut server = Server::new_auth_with_options(options, temp).await;
server.use_api_key("MASTER_KEY");
@ -654,14 +640,10 @@ async fn import_dump_v5() {
assert_eq!(code, 200);
assert_eq!(stats, expected_stats);
let (docs, code) = index2
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (docs, code) = index2.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(docs["results"].as_array().unwrap().len(), 10);
let (docs, code) = index1
.get_all_documents(GetAllDocumentsOptions::default())
.await;
let (docs, code) = index1.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(docs["results"].as_array().unwrap().len(), 10);

View file

@ -1,10 +1,11 @@
use crate::common::encoder::Encoder;
use crate::common::Server;
use actix_web::http::header::ContentType;
use actix_web::test;
use http::header::ACCEPT_ENCODING;
use serde_json::{json, Value};
use crate::common::encoder::Encoder;
use crate::common::Server;
#[actix_rt::test]
async fn create_index_no_primary_key() {
let server = Server::new().await;

View file

@ -1,6 +1,6 @@
use serde_json::{json, Value};
use crate::common::Server;
use serde_json::json;
use serde_json::Value;
#[actix_rt::test]
async fn create_and_get_index() {
@ -63,12 +63,8 @@ async fn list_multiple_indexes() {
assert!(response["results"].is_array());
let arr = response["results"].as_array().unwrap();
assert_eq!(arr.len(), 2);
assert!(arr
.iter()
.any(|entry| entry["uid"] == "test" && entry["primaryKey"] == Value::Null));
assert!(arr
.iter()
.any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
assert!(arr.iter().any(|entry| entry["uid"] == "test" && entry["primaryKey"] == Value::Null));
assert!(arr.iter().any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
}
#[actix_rt::test]
@ -77,10 +73,7 @@ async fn get_and_paginate_indexes() {
const NB_INDEXES: usize = 50;
for i in 0..NB_INDEXES {
server.index(&format!("test_{i:02}")).create(None).await;
server
.index(&format!("test_{i:02}"))
.wait_task(i as u64)
.await;
server.index(&format!("test_{i:02}")).wait_task(i as u64).await;
}
// basic

View file

@ -17,10 +17,7 @@ async fn stats() {
assert_eq!(code, 200);
assert_eq!(response["numberOfDocuments"], 0);
assert!(response["isIndexing"] == false);
assert!(response["fieldDistribution"]
.as_object()
.unwrap()
.is_empty());
assert!(response["fieldDistribution"].as_object().unwrap().is_empty());
let documents = json!([
{

View file

@ -1,7 +1,9 @@
use serde_json::json;
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
use crate::common::encoder::Encoder;
use crate::common::Server;
use serde_json::json;
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
#[actix_rt::test]
async fn update_primary_key() {

View file

@ -1,7 +1,7 @@
use crate::common::Server;
use serde_json::json;
use super::DOCUMENTS;
use crate::common::Server;
#[actix_rt::test]
async fn search_unexisting_index() {
@ -45,16 +45,14 @@ async fn search_invalid_highlight_and_crop_tags() {
for field in fields {
// object
let (response, code) = index
.search_post(json!({field.to_string(): {"marker": "<crop>"}}))
.await;
let (response, code) =
index.search_post(json!({field.to_string(): {"marker": "<crop>"}})).await;
assert_eq!(code, 400, "field {} passing object: {}", &field, response);
assert_eq!(response["code"], "bad_request");
// array
let (response, code) = index
.search_post(json!({field.to_string(): ["marker", "<crop>"]}))
.await;
let (response, code) =
index.search_post(json!({field.to_string(): ["marker", "<crop>"]})).await;
assert_eq!(code, 400, "field {} passing array: {}", &field, response);
assert_eq!(response["code"], "bad_request");
}
@ -65,9 +63,7 @@ async fn filter_invalid_syntax_object() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -92,9 +88,7 @@ async fn filter_invalid_syntax_array() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -119,9 +113,7 @@ async fn filter_invalid_syntax_string() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -134,13 +126,10 @@ async fn filter_invalid_syntax_string() {
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(
json!({"filter": "title = Glass XOR title = Glass"}),
|response, code| {
assert_eq!(response, expected_response);
assert_eq!(code, 400);
},
)
.search(json!({"filter": "title = Glass XOR title = Glass"}), |response, code| {
assert_eq!(response, expected_response);
assert_eq!(code, 400);
})
.await;
}
@ -149,9 +138,7 @@ async fn filter_invalid_attribute_array() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -176,9 +163,7 @@ async fn filter_invalid_attribute_string() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -203,9 +188,7 @@ async fn filter_reserved_geo_attribute_array() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -230,9 +213,7 @@ async fn filter_reserved_geo_attribute_string() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -257,9 +238,7 @@ async fn filter_reserved_attribute_array() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -272,13 +251,10 @@ async fn filter_reserved_attribute_array() {
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(
json!({"filter": ["_geoDistance = Glass"]}),
|response, code| {
assert_eq!(response, expected_response);
assert_eq!(code, 400);
},
)
.search(json!({"filter": ["_geoDistance = Glass"]}), |response, code| {
assert_eq!(response, expected_response);
assert_eq!(code, 400);
})
.await;
}
@ -287,9 +263,7 @@ async fn filter_reserved_attribute_string() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -302,13 +276,10 @@ async fn filter_reserved_attribute_string() {
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(
json!({"filter": "_geoDistance = Glass"}),
|response, code| {
assert_eq!(response, expected_response);
assert_eq!(code, 400);
},
)
.search(json!({"filter": "_geoDistance = Glass"}), |response, code| {
assert_eq!(response, expected_response);
assert_eq!(code, 400);
})
.await;
}
@ -317,9 +288,7 @@ async fn sort_geo_reserved_attribute() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"sortableAttributes": ["id"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["id"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -349,9 +318,7 @@ async fn sort_reserved_attribute() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"sortableAttributes": ["id"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["id"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -381,9 +348,7 @@ async fn sort_unsortable_attribute() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"sortableAttributes": ["id"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["id"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -413,9 +378,7 @@ async fn sort_invalid_syntax() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"sortableAttributes": ["id"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["id"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;

View file

@ -1,15 +1,14 @@
use serde_json::json;
use super::*;
use crate::common::Server;
use serde_json::json;
#[actix_rt::test]
async fn formatted_contain_wildcard() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({ "displayedAttributes": ["id", "cattos"] }))
.await;
index.update_settings(json!({ "displayedAttributes": ["id", "cattos"] })).await;
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -34,19 +33,16 @@ async fn formatted_contain_wildcard() {
.await;
index
.search(
json!({ "q": "pesti", "attributesToRetrieve": ["*"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"cattos": "pesti",
})
);
},
)
.search(json!({ "q": "pesti", "attributesToRetrieve": ["*"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"cattos": "pesti",
})
);
})
.await;
index
@ -91,23 +87,20 @@ async fn formatted_contain_wildcard() {
.await;
index
.search(
json!({ "q": "pesti", "attributesToCrop": ["*"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
.search(json!({ "q": "pesti", "attributesToCrop": ["*"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"cattos": "pesti",
"_formatted": {
"id": "852",
"cattos": "pesti",
"_formatted": {
"id": "852",
"cattos": "pesti",
}
})
);
},
)
}
})
);
})
.await;
}
@ -121,27 +114,24 @@ async fn format_nested() {
index.wait_task(0).await;
index
.search(
json!({ "q": "pesti", "attributesToRetrieve": ["doggos"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"doggos": [
{
"name": "bobby",
"age": 2,
},
{
"name": "buddy",
"age": 4,
},
],
})
);
},
)
.search(json!({ "q": "pesti", "attributesToRetrieve": ["doggos"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"doggos": [
{
"name": "bobby",
"age": 2,
},
{
"name": "buddy",
"age": 4,
},
],
})
);
})
.await;
index
@ -297,9 +287,7 @@ async fn displayedattr_2_smol() {
let index = server.index("test");
// not enough displayed for the other settings
index
.update_settings(json!({ "displayedAttributes": ["id"] }))
.await;
index.update_settings(json!({ "displayedAttributes": ["id"] })).await;
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -319,36 +307,30 @@ async fn displayedattr_2_smol() {
.await;
index
.search(
json!({ "attributesToRetrieve": ["id"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
})
);
},
)
.search(json!({ "attributesToRetrieve": ["id"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
})
);
})
.await;
index
.search(
json!({ "attributesToHighlight": ["id"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"_formatted": {
"id": "852",
}
})
);
},
)
.search(json!({ "attributesToHighlight": ["id"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"_formatted": {
"id": "852",
}
})
);
})
.await;
index
@ -385,43 +367,34 @@ async fn displayedattr_2_smol() {
.await;
index
.search(
json!({ "attributesToHighlight": ["cattos"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
})
);
},
)
.search(json!({ "attributesToHighlight": ["cattos"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
})
);
})
.await;
index
.search(
json!({ "attributesToCrop": ["cattos"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
})
);
},
)
.search(json!({ "attributesToCrop": ["cattos"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
})
);
})
.await;
index
.search(
json!({ "attributesToRetrieve": ["cattos"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"][0], json!({}));
},
)
.search(json!({ "attributesToRetrieve": ["cattos"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"][0], json!({}));
})
.await;
index

View file

@ -4,10 +4,11 @@
mod errors;
mod formatted;
use crate::common::Server;
use once_cell::sync::Lazy;
use serde_json::{json, Value};
use crate::common::Server;
pub(self) static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
json!([
{
@ -198,9 +199,7 @@ async fn search_with_filter_string_notation() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -220,9 +219,7 @@ async fn search_with_filter_string_notation() {
let index = server.index("nested");
index
.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})).await;
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -261,9 +258,7 @@ async fn search_with_filter_array_notation() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -291,9 +286,7 @@ async fn search_with_sort_on_numbers() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"sortableAttributes": ["id"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["id"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -313,9 +306,7 @@ async fn search_with_sort_on_numbers() {
let index = server.index("nested");
index
.update_settings(json!({"sortableAttributes": ["doggos.age"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["doggos.age"]})).await;
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -339,9 +330,7 @@ async fn search_with_sort_on_strings() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"sortableAttributes": ["title"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -361,9 +350,7 @@ async fn search_with_sort_on_strings() {
let index = server.index("nested");
index
.update_settings(json!({"sortableAttributes": ["doggos.name"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["doggos.name"]})).await;
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -387,9 +374,7 @@ async fn search_with_multiple_sort() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"sortableAttributes": ["id", "title"]}))
.await;
index.update_settings(json!({"sortableAttributes": ["id", "title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -409,9 +394,7 @@ async fn search_facet_distribution() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({"filterableAttributes": ["title"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -433,9 +416,7 @@ async fn search_facet_distribution() {
let index = server.index("nested");
index
.update_settings(json!({"filterableAttributes": ["father", "doggos.name"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["father", "doggos.name"]})).await;
let documents = NESTED_DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -466,9 +447,7 @@ async fn search_facet_distribution() {
)
.await;
index
.update_settings(json!({"filterableAttributes": ["doggos"]}))
.await;
index.update_settings(json!({"filterableAttributes": ["doggos"]})).await;
index.wait_task(4).await;
index
@ -501,10 +480,7 @@ async fn search_facet_distribution() {
dist["doggos.name"],
json!({ "bobby": 1, "buddy": 1, "gros bill": 1, "turbo": 1, "fast": 1})
);
assert_eq!(
dist["doggos.age"],
json!({ "2": 1, "4": 1, "5": 1, "6": 1, "8": 1})
);
assert_eq!(dist["doggos.age"], json!({ "2": 1, "4": 1, "5": 1, "6": 1, "8": 1}));
},
)
.await;
@ -515,17 +491,14 @@ async fn displayed_attributes() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({ "displayedAttributes": ["title"] }))
.await;
index.update_settings(json!({ "displayedAttributes": ["title"] })).await;
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (response, code) = index
.search_post(json!({ "attributesToRetrieve": ["title", "id"] }))
.await;
let (response, code) =
index.search_post(json!({ "attributesToRetrieve": ["title", "id"] })).await;
assert_eq!(code, 200, "{}", response);
assert!(response["hits"][0].get("title").is_some());
}
@ -535,9 +508,7 @@ async fn placeholder_search_is_hard_limited() {
let server = Server::new().await;
let index = server.index("test");
let documents: Vec<_> = (0..1200)
.map(|i| json!({ "id": i, "text": "I am unique!" }))
.collect();
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
index.add_documents(documents.into(), None).await;
index.wait_task(0).await;
@ -566,9 +537,7 @@ async fn placeholder_search_is_hard_limited() {
)
.await;
index
.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } }))
.await;
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
index.wait_task(1).await;
index
@ -602,9 +571,7 @@ async fn search_is_hard_limited() {
let server = Server::new().await;
let index = server.index("test");
let documents: Vec<_> = (0..1200)
.map(|i| json!({ "id": i, "text": "I am unique!" }))
.collect();
let documents: Vec<_> = (0..1200).map(|i| json!({ "id": i, "text": "I am unique!" })).collect();
index.add_documents(documents.into(), None).await;
index.wait_task(0).await;
@ -635,9 +602,7 @@ async fn search_is_hard_limited() {
)
.await;
index
.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } }))
.await;
index.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } })).await;
index.wait_task(1).await;
index
@ -673,13 +638,9 @@ async fn faceting_max_values_per_facet() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({ "filterableAttributes": ["number"] }))
.await;
index.update_settings(json!({ "filterableAttributes": ["number"] })).await;
let documents: Vec<_> = (0..10_000)
.map(|id| json!({ "id": id, "number": id * 10 }))
.collect();
let documents: Vec<_> = (0..10_000).map(|id| json!({ "id": id, "number": id * 10 })).collect();
index.add_documents(json!(documents), None).await;
index.wait_task(1).await;
@ -696,9 +657,7 @@ async fn faceting_max_values_per_facet() {
)
.await;
index
.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } }))
.await;
index.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } })).await;
index.wait_task(2).await;
index

View file

@ -1,23 +1,20 @@
use crate::common::Server;
use serde_json::json;
use crate::common::Server;
#[actix_rt::test]
async fn set_and_reset_distinct_attribute() {
let server = Server::new().await;
let index = server.index("test");
let (_response, _code) = index
.update_settings(json!({ "distinctAttribute": "test"}))
.await;
let (_response, _code) = index.update_settings(json!({ "distinctAttribute": "test"})).await;
index.wait_task(0).await;
let (response, _) = index.settings().await;
assert_eq!(response["distinctAttribute"], "test");
index
.update_settings(json!({ "distinctAttribute": null }))
.await;
index.update_settings(json!({ "distinctAttribute": null })).await;
index.wait_task(1).await;

View file

@ -13,14 +13,7 @@ static DEFAULT_SETTINGS_VALUES: Lazy<HashMap<&'static str, Value>> = Lazy::new(|
map.insert("distinct_attribute", json!(Value::Null));
map.insert(
"ranking_rules",
json!([
"words",
"typo",
"proximity",
"attribute",
"sort",
"exactness"
]),
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"]),
);
map.insert("stop_words", json!([]));
map.insert("synonyms", json!({}));
@ -63,14 +56,7 @@ async fn get_settings() {
assert_eq!(settings["distinctAttribute"], json!(null));
assert_eq!(
settings["rankingRules"],
json!([
"words",
"typo",
"proximity",
"attribute",
"sort",
"exactness"
])
json!(["words", "typo", "proximity", "attribute", "sort", "exactness"])
);
assert_eq!(settings["stopWords"], json!([]));
assert_eq!(
@ -99,18 +85,14 @@ async fn error_update_settings_unknown_field() {
async fn test_partial_update() {
let server = Server::new().await;
let index = server.index("test");
let (_response, _code) = index
.update_settings(json!({"displayedAttributes": ["foo"]}))
.await;
let (_response, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await;
index.wait_task(0).await;
let (response, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(response["displayedAttributes"], json!(["foo"]));
assert_eq!(response["searchableAttributes"], json!(["*"]));
let (_response, _) = index
.update_settings(json!({"searchableAttributes": ["bar"]}))
.await;
let (_response, _) = index.update_settings(json!({"searchableAttributes": ["bar"]})).await;
index.wait_task(1).await;
let (response, code) = index.settings().await;
@ -158,10 +140,7 @@ async fn reset_all_settings() {
assert_eq!(response["displayedAttributes"], json!(["name", "age"]));
assert_eq!(response["searchableAttributes"], json!(["name"]));
assert_eq!(response["stopWords"], json!(["the"]));
assert_eq!(
response["synonyms"],
json!({"puppy": ["dog", "doggo", "potat"] })
);
assert_eq!(response["synonyms"], json!({"puppy": ["dog", "doggo", "potat"] }));
assert_eq!(response["filterableAttributes"], json!(["age"]));
index.delete_settings().await;
@ -299,9 +278,8 @@ async fn error_set_invalid_ranking_rules() {
let index = server.index("test");
index.create(None).await;
let (_response, _code) = index
.update_settings(json!({ "rankingRules": [ "manyTheFish"]}))
.await;
let (_response, _code) =
index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await;
index.wait_task(1).await;
let (response, code) = index.get_task(1).await;

View file

@ -1,11 +1,10 @@
use std::time::Duration;
use crate::common::server::default_settings;
use crate::common::GetAllDocumentsOptions;
use crate::common::Server;
use meilisearch_http::Opt;
use tokio::time::sleep;
use meilisearch_http::Opt;
use crate::common::server::default_settings;
use crate::common::{GetAllDocumentsOptions, Server};
macro_rules! verify_snapshot {
(
@ -62,10 +61,7 @@ async fn perform_snapshot() {
let snapshot_path = snapshot_dir.path().to_owned().join("db.snapshot");
let options = Opt {
import_snapshot: Some(snapshot_path),
..default_settings(temp.path())
};
let options = Opt { import_snapshot: Some(snapshot_path), ..default_settings(temp.path()) };
let snapshot_server = Server::new_with_options(options).await.unwrap();

View file

@ -1,5 +1,6 @@
use serde_json::json;
use time::{format_description::well_known::Rfc3339, OffsetDateTime};
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
use crate::common::Server;

View file

@ -1,8 +1,9 @@
use crate::common::Server;
use serde_json::json;
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
use crate::common::Server;
#[actix_rt::test]
async fn error_get_unexisting_task_status() {
let server = Server::new().await;
@ -49,10 +50,7 @@ async fn list_tasks() {
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.list_tasks().await;
assert_eq!(code, 200);
@ -66,10 +64,7 @@ async fn list_tasks_with_star_filters() {
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.service.get("/tasks?indexUid=test").await;
assert_eq!(code, 200);
@ -87,10 +82,8 @@ async fn list_tasks_with_star_filters() {
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index
.service
.get("/tasks?type=*,documentAdditionOrUpdate&status=*")
.await;
let (response, code) =
index.service.get("/tasks?type=*,documentAdditionOrUpdate&status=*").await;
assert_eq!(code, 200, "{:?}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
@ -116,10 +109,7 @@ async fn list_tasks_status_filtered() {
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await;
@ -145,19 +135,15 @@ async fn list_tasks_type_filtered() {
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) = index
.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[])
.await;
let (response, code) =
index.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
@ -169,10 +155,7 @@ async fn list_tasks_status_and_type_filtered() {
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"]).await;