Merge pull request #601 from meilisearch/tide-to-actix-web

Change tide to actix-web
This commit is contained in:
Clément Renault 2020-04-28 18:43:06 +02:00 committed by GitHub
commit 899559a060
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 2730 additions and 2216 deletions

1281
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -14,7 +14,13 @@ name = "meilisearch"
path = "src/main.rs"
[dependencies]
async-std = { version = "1.5.0", features = ["attributes"] }
actix-cors = "0.2.0"
actix-files = "0.2.1"
actix-http = "1"
actix-rt = "1"
actix-service = "1.0.5"
actix-web = "2"
actix-web-macros = "0.1.0"
chrono = { version = "0.4.11", features = ["serde"] }
crossbeam-channel = "0.4.2"
env_logger = "0.7.1"
@ -30,25 +36,24 @@ meilisearch-tokenizer = {path = "../meilisearch-tokenizer", version = "0.10.1"}
mime = "0.3.16"
pretty-bytes = "0.2.2"
rand = "0.7.3"
rayon = "1.3.0"
serde = { version = "1.0.105", features = ["derive"] }
serde_json = { version = "1.0.50", features = ["preserve_order"] }
serde_qs = "0.5.2"
sha2 = "0.8.1"
siphasher = "0.3.2"
slice-group-by = "0.2.6"
structopt = "0.3.12"
sysinfo = "0.12.0"
tide = "0.6.0"
tokio = { version = "0.2.18", features = ["macros"] }
ureq = { version = "0.12.0", features = ["tls"], default-features = false }
walkdir = "2.3.1"
whoami = "0.8.1"
slice-group-by = "0.2.6"
[dev-dependencies]
http-service = "0.4.0"
http-service-mock = "0.4.0"
tempdir = "0.3.7"
once_cell = "1.3.1"
tokio = { version = "0.2.18", features = ["macros", "time"] }
[dev-dependencies.assert-json-diff]
git = "https://github.com/qdequele/assert-json-diff"

View File

@ -9,8 +9,8 @@ use meilisearch_core::{Database, Error as MError, MResult, MainT, UpdateT};
use sha2::Digest;
use sysinfo::Pid;
use crate::index_update_callback;
use crate::option::Opt;
use crate::routes::index::index_update_callback;
const LAST_UPDATE_KEY: &str = "last-update";
@ -37,7 +37,7 @@ pub struct DataInner {
pub server_pid: Pid,
}
#[derive(Default, Clone)]
#[derive(Clone)]
pub struct ApiKeys {
pub public: Option<String>,
pub private: Option<String>,
@ -135,7 +135,7 @@ impl Data {
let db = Arc::new(Database::open_or_create(opt.db_path).unwrap());
let mut api_keys = ApiKeys {
master: opt.master_key.clone(),
master: opt.master_key,
private: None,
public: None,
};

View File

@ -1,191 +1,182 @@
use std::fmt::Display;
use std::fmt;
use http::status::StatusCode;
use log::{error, warn};
use meilisearch_core::{FstError, HeedError};
use serde::{Deserialize, Serialize};
use tide::IntoResponse;
use tide::Response;
use crate::helpers::meilisearch::Error as SearchError;
pub type SResult<T> = Result<T, ResponseError>;
use actix_http::ResponseBuilder;
use actix_web as aweb;
use actix_web::http::StatusCode;
use serde_json::json;
#[derive(Debug)]
pub enum ResponseError {
Internal(String),
BadRequest(String),
InvalidToken(String),
NotFound(String),
IndexNotFound(String),
DocumentNotFound(String),
MissingHeader(String),
FilterParsing(String),
BadParameter(String, String),
OpenIndex(String),
BadRequest(String),
CreateIndex(String),
DocumentNotFound(String),
IndexNotFound(String),
Internal(String),
InvalidIndexUid,
InvalidToken(String),
Maintenance,
MissingAuthorizationHeader,
MissingHeader(String),
NotFound(String),
OpenIndex(String),
FilterParsing(String),
RetrieveDocument(u64, String),
SearchDocuments(String),
}
impl ResponseError {
pub fn internal(message: impl Display) -> ResponseError {
ResponseError::Internal(message.to_string())
pub fn internal(err: impl fmt::Display) -> ResponseError {
ResponseError::Internal(err.to_string())
}
pub fn bad_request(message: impl Display) -> ResponseError {
ResponseError::BadRequest(message.to_string())
pub fn bad_request(err: impl fmt::Display) -> ResponseError {
ResponseError::BadRequest(err.to_string())
}
pub fn invalid_token(message: impl Display) -> ResponseError {
ResponseError::InvalidToken(message.to_string())
pub fn missing_authorization_header() -> ResponseError {
ResponseError::MissingAuthorizationHeader
}
pub fn not_found(message: impl Display) -> ResponseError {
ResponseError::NotFound(message.to_string())
pub fn invalid_token(err: impl fmt::Display) -> ResponseError {
ResponseError::InvalidToken(err.to_string())
}
pub fn index_not_found(message: impl Display) -> ResponseError {
ResponseError::IndexNotFound(message.to_string())
pub fn not_found(err: impl fmt::Display) -> ResponseError {
ResponseError::NotFound(err.to_string())
}
pub fn document_not_found(message: impl Display) -> ResponseError {
ResponseError::DocumentNotFound(message.to_string())
pub fn index_not_found(err: impl fmt::Display) -> ResponseError {
ResponseError::IndexNotFound(err.to_string())
}
pub fn missing_header(message: impl Display) -> ResponseError {
ResponseError::MissingHeader(message.to_string())
pub fn document_not_found(err: impl fmt::Display) -> ResponseError {
ResponseError::DocumentNotFound(err.to_string())
}
pub fn bad_parameter(name: impl Display, message: impl Display) -> ResponseError {
ResponseError::BadParameter(name.to_string(), message.to_string())
pub fn missing_header(err: impl fmt::Display) -> ResponseError {
ResponseError::MissingHeader(err.to_string())
}
pub fn open_index(message: impl Display) -> ResponseError {
ResponseError::OpenIndex(message.to_string())
pub fn bad_parameter(param: impl fmt::Display, err: impl fmt::Display) -> ResponseError {
ResponseError::BadParameter(param.to_string(), err.to_string())
}
pub fn create_index(message: impl Display) -> ResponseError {
ResponseError::CreateIndex(message.to_string())
pub fn open_index(err: impl fmt::Display) -> ResponseError {
ResponseError::OpenIndex(err.to_string())
}
pub fn create_index(err: impl fmt::Display) -> ResponseError {
ResponseError::CreateIndex(err.to_string())
}
pub fn invalid_index_uid() -> ResponseError {
ResponseError::InvalidIndexUid
}
pub fn maintenance() -> ResponseError {
ResponseError::Maintenance
}
pub fn retrieve_document(doc_id: u64, err: impl fmt::Display) -> ResponseError {
ResponseError::RetrieveDocument(doc_id, err.to_string())
}
pub fn search_documents(err: impl fmt::Display) -> ResponseError {
ResponseError::SearchDocuments(err.to_string())
}
}
impl IntoResponse for ResponseError {
fn into_response(self) -> Response {
impl fmt::Display for ResponseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ResponseError::Internal(err) => {
error!("internal server error: {}", err);
error("Internal server error".to_string(),
StatusCode::INTERNAL_SERVER_ERROR,
)
}
ResponseError::FilterParsing(err) => {
warn!("error paring filter: {}", err);
error(format!("parsing error: {}", err),
StatusCode::BAD_REQUEST)
}
ResponseError::BadRequest(err) => {
warn!("bad request: {}", err);
error(err, StatusCode::BAD_REQUEST)
}
ResponseError::InvalidToken(err) => {
error(format!("Invalid API key: {}", err), StatusCode::FORBIDDEN)
}
ResponseError::NotFound(err) => error(err, StatusCode::NOT_FOUND),
ResponseError::IndexNotFound(index) => {
error(format!("Index {} not found", index), StatusCode::NOT_FOUND)
}
ResponseError::DocumentNotFound(id) => error(
format!("Document with id {} not found", id),
StatusCode::NOT_FOUND,
),
ResponseError::MissingHeader(header) => error(
format!("Header {} is missing", header),
StatusCode::UNAUTHORIZED,
),
ResponseError::BadParameter(param, e) => error(
format!("Url parameter {} error: {}", param, e),
StatusCode::BAD_REQUEST,
),
ResponseError::CreateIndex(err) => error(
format!("Impossible to create index; {}", err),
StatusCode::BAD_REQUEST,
),
ResponseError::OpenIndex(err) => error(
format!("Impossible to open index; {}", err),
StatusCode::BAD_REQUEST,
),
ResponseError::InvalidIndexUid => error(
"Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_).".to_string(),
StatusCode::BAD_REQUEST,
),
ResponseError::Maintenance => error(
String::from("Server is in maintenance, please try again later"),
StatusCode::SERVICE_UNAVAILABLE,
),
Self::BadParameter(param, err) => write!(f, "Url parameter {} error: {}", param, err),
Self::BadRequest(err) => f.write_str(err),
Self::CreateIndex(err) => write!(f, "Impossible to create index; {}", err),
Self::DocumentNotFound(document_id) => write!(f, "Document with id {} not found", document_id),
Self::IndexNotFound(index_uid) => write!(f, "Index {} not found", index_uid),
Self::Internal(err) => f.write_str(err),
Self::InvalidIndexUid => f.write_str("Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_)."),
Self::InvalidToken(err) => write!(f, "Invalid API key: {}", err),
Self::Maintenance => f.write_str("Server is in maintenance, please try again later"),
Self::FilterParsing(err) => write!(f, "parsing error: {}", err),
Self::MissingAuthorizationHeader => f.write_str("You must have an authorization token"),
Self::MissingHeader(header) => write!(f, "Header {} is missing", header),
Self::NotFound(err) => write!(f, "{} not found", err),
Self::OpenIndex(err) => write!(f, "Impossible to open index; {}", err),
Self::RetrieveDocument(id, err) => write!(f, "impossible to retrieve the document with id: {}; {}", id, err),
Self::SearchDocuments(err) => write!(f, "impossible to search documents; {}", err),
}
}
}
#[derive(Serialize, Deserialize)]
struct ErrorMessage {
message: String,
impl aweb::error::ResponseError for ResponseError {
fn error_response(&self) -> aweb::HttpResponse {
ResponseBuilder::new(self.status_code()).json(json!({
"message": self.to_string(),
}))
}
fn error(message: String, status: StatusCode) -> Response {
let message = ErrorMessage { message };
tide::Response::new(status.as_u16())
.body_json(&message)
.unwrap()
fn status_code(&self) -> StatusCode {
match *self {
Self::BadParameter(_, _)
| Self::BadRequest(_)
| Self::CreateIndex(_)
| Self::InvalidIndexUid
| Self::OpenIndex(_)
| Self::RetrieveDocument(_, _)
| Self::SearchDocuments(_)
| Self::FilterParsing(_) => StatusCode::BAD_REQUEST,
Self::DocumentNotFound(_)
| Self::IndexNotFound(_)
| Self::NotFound(_) => StatusCode::NOT_FOUND,
Self::InvalidToken(_)
| Self::MissingHeader(_) => StatusCode::UNAUTHORIZED,
Self::MissingAuthorizationHeader => StatusCode::FORBIDDEN,
Self::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR,
Self::Maintenance => StatusCode::SERVICE_UNAVAILABLE,
}
}
}
impl From<serde_json::Error> for ResponseError {
fn from(err: serde_json::Error) -> ResponseError {
ResponseError::internal(err)
impl From<meilisearch_core::HeedError> for ResponseError {
fn from(err: meilisearch_core::HeedError) -> ResponseError {
ResponseError::Internal(err.to_string())
}
}
impl From<meilisearch_core::FstError> for ResponseError {
fn from(err: meilisearch_core::FstError) -> ResponseError {
ResponseError::Internal(err.to_string())
}
}
impl From<meilisearch_core::Error> for ResponseError {
fn from(err: meilisearch_core::Error) -> ResponseError {
ResponseError::internal(err)
}
}
impl From<HeedError> for ResponseError {
fn from(err: HeedError) -> ResponseError {
ResponseError::internal(err)
}
}
impl From<FstError> for ResponseError {
fn from(err: FstError) -> ResponseError {
ResponseError::internal(err)
}
}
impl From<SearchError> for ResponseError {
fn from(err: SearchError) -> ResponseError {
use meilisearch_core::pest_error::LineColLocation::*;
match err {
SearchError::FilterParsing(s) => ResponseError::FilterParsing(s),
_ => ResponseError::internal(err),
meilisearch_core::Error::FilterParseError(e) => {
let (line, column) = match e.line_col {
Span((line, _), (column, _)) => (line, column),
Pos((line, column)) => (line, column),
};
let message = format!("parsing error on line {} at column {}: {}", line, column, e.variant.message());
ResponseError::FilterParsing(message)
},
_ => ResponseError::Internal(err.to_string()),
}
}
}
impl From<meilisearch_core::settings::RankingRuleConversionError> for ResponseError {
fn from(err: meilisearch_core::settings::RankingRuleConversionError) -> ResponseError {
ResponseError::internal(err)
impl From<meilisearch_schema::Error> for ResponseError {
fn from(err: meilisearch_schema::Error) -> ResponseError {
ResponseError::Internal(err.to_string())
}
}
pub trait IntoInternalError<T> {
fn into_internal_error(self) -> SResult<T>;
}
impl<T> IntoInternalError<T> for Option<T> {
fn into_internal_error(self) -> SResult<T> {
match self {
Some(value) => Ok(value),
None => Err(ResponseError::internal("Heed cannot find requested value")),
}
impl From<actix_http::Error> for ResponseError {
fn from(err: actix_http::Error) -> ResponseError {
ResponseError::Internal(err.to_string())
}
}

View File

@ -0,0 +1,103 @@
use std::cell::RefCell;
use std::pin::Pin;
use std::rc::Rc;
use std::task::{Context, Poll};
use actix_service::{Service, Transform};
use actix_web::{dev::ServiceRequest, dev::ServiceResponse, Error};
use futures::future::{err, ok, Future, Ready};
use crate::error::ResponseError;
use crate::Data;
#[derive(Clone)]
pub enum Authentication {
Public,
Private,
Admin,
}
impl<S: 'static, B> Transform<S> for Authentication
where
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
S::Future: 'static,
B: 'static,
{
type Request = ServiceRequest;
type Response = ServiceResponse<B>;
type Error = Error;
type InitError = ();
type Transform = LoggingMiddleware<S>;
type Future = Ready<Result<Self::Transform, Self::InitError>>;
fn new_transform(&self, service: S) -> Self::Future {
ok(LoggingMiddleware {
acl: self.clone(),
service: Rc::new(RefCell::new(service)),
})
}
}
pub struct LoggingMiddleware<S> {
acl: Authentication,
service: Rc<RefCell<S>>,
}
impl<S, B> Service for LoggingMiddleware<S>
where
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,
S::Future: 'static,
B: 'static,
{
type Request = ServiceRequest;
type Response = ServiceResponse<B>;
type Error = Error;
type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>>>>;
fn poll_ready(&mut self, cx: &mut Context) -> Poll<Result<(), Self::Error>> {
self.service.poll_ready(cx)
}
fn call(&mut self, req: ServiceRequest) -> Self::Future {
let mut svc = self.service.clone();
// This unwrap is left because this error should never appear. If that's the case, then
// it means that actix-web has an issue or someone changes the type `Data`.
let data = req.app_data::<Data>().unwrap();
if data.api_keys.master.is_none() {
return Box::pin(svc.call(req));
}
let auth_header = match req.headers().get("X-Meili-API-Key") {
Some(auth) => match auth.to_str() {
Ok(auth) => auth,
Err(_) => return Box::pin(err(ResponseError::MissingAuthorizationHeader.into())),
},
None => {
return Box::pin(err(ResponseError::MissingAuthorizationHeader.into()));
}
};
let authenticated = match self.acl {
Authentication::Admin => data.api_keys.master.as_deref() == Some(auth_header),
Authentication::Private => {
data.api_keys.master.as_deref() == Some(auth_header)
|| data.api_keys.private.as_deref() == Some(auth_header)
}
Authentication::Public => {
data.api_keys.master.as_deref() == Some(auth_header)
|| data.api_keys.private.as_deref() == Some(auth_header)
|| data.api_keys.public.as_deref() == Some(auth_header)
}
};
if authenticated {
Box::pin(svc.call(req))
} else {
Box::pin(err(
ResponseError::InvalidToken(auth_header.to_string()).into()
))
}
}
}

View File

@ -1,10 +1,7 @@
use std::cmp::Ordering;
use std::collections::{HashMap, HashSet};
use std::convert::From;
use std::error;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::time::{Duration, Instant};
use std::time::Instant;
use indexmap::IndexMap;
use log::error;
@ -19,74 +16,7 @@ use serde_json::Value;
use siphasher::sip::SipHasher;
use slice_group_by::GroupBy;
#[derive(Debug)]
pub enum Error {
SearchDocuments(String),
RetrieveDocument(u64, String),
DocumentNotFound(u64),
CropFieldWrongType(String),
FilterParsing(String),
AttributeNotFoundOnDocument(String),
AttributeNotFoundOnSchema(String),
MissingFilterValue,
UnknownFilteredAttribute,
Internal(String),
}
impl error::Error for Error {}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use Error::*;
match self {
SearchDocuments(err) => write!(f, "impossible to search documents; {}", err),
RetrieveDocument(id, err) => write!(
f,
"impossible to retrieve the document with id: {}; {}",
id, err
),
DocumentNotFound(id) => write!(f, "document {} not found", id),
CropFieldWrongType(field) => {
write!(f, "the field {} cannot be cropped it's not a string", field)
}
AttributeNotFoundOnDocument(field) => {
write!(f, "field {} is not found on document", field)
}
AttributeNotFoundOnSchema(field) => write!(f, "field {} is not found on schema", field),
MissingFilterValue => f.write_str("a filter doesn't have a value to compare it with"),
UnknownFilteredAttribute => {
f.write_str("a filter is specifying an unknown schema attribute")
}
Internal(err) => write!(f, "internal error; {}", err),
FilterParsing(err) => write!(f, "filter parsing error: {}", err),
}
}
}
impl From<meilisearch_core::Error> for Error {
fn from(error: meilisearch_core::Error) -> Self {
use meilisearch_core::pest_error::LineColLocation::*;
match error {
meilisearch_core::Error::FilterParseError(e) => {
let (line, column) = match e.line_col {
Span((line, _), (column, _)) => (line, column),
Pos((line, column)) => (line, column),
};
let message = format!("parsing error on line {} at column {}: {}", line, column, e.variant.message());
Error::FilterParsing(message)
},
_ => Error::Internal(error.to_string()),
}
}
}
impl From<heed::Error> for Error {
fn from(error: heed::Error) -> Self {
Error::Internal(error.to_string())
}
}
use crate::error::ResponseError;
pub trait IndexSearchExt {
fn new_search(&self, query: String) -> SearchBuilder;
@ -103,7 +33,6 @@ impl IndexSearchExt for Index {
attributes_to_retrieve: None,
attributes_to_highlight: None,
filters: None,
timeout: Duration::from_millis(30),
matches: false,
}
}
@ -118,7 +47,6 @@ pub struct SearchBuilder<'a> {
attributes_to_retrieve: Option<HashSet<String>>,
attributes_to_highlight: Option<HashSet<String>>,
filters: Option<String>,
timeout: Duration,
matches: bool,
}
@ -159,27 +87,19 @@ impl<'a> SearchBuilder<'a> {
self
}
pub fn timeout(&mut self, value: Duration) -> &SearchBuilder {
self.timeout = value;
self
}
pub fn get_matches(&mut self) -> &SearchBuilder {
self.matches = true;
self
}
pub fn search(&self, reader: &heed::RoTxn<MainT>) -> Result<SearchResult, Error> {
let schema = self.index.main.schema(reader);
let schema = schema.map_err(|e| Error::Internal(e.to_string()))?;
let schema = match schema {
Some(schema) => schema,
None => return Err(Error::Internal(String::from("missing schema"))),
};
pub fn search(&self, reader: &heed::RoTxn<MainT>) -> Result<SearchResult, ResponseError> {
let schema = self
.index
.main
.schema(reader)?
.ok_or(ResponseError::internal("missing schema"))?;
let ranked_map = self.index.main.ranked_map(reader);
let ranked_map = ranked_map.map_err(|e| Error::Internal(e.to_string()))?;
let ranked_map = ranked_map.unwrap_or_default();
let ranked_map = self.index.main.ranked_map(reader)?.unwrap_or_default();
// Change criteria
let mut query_builder = match self.get_criteria(reader, &ranked_map, &schema)? {
@ -203,8 +123,6 @@ impl<'a> SearchBuilder<'a> {
});
}
query_builder.with_fetch_timeout(self.timeout);
if let Some(field) = self.index.main.distinct_attribute(reader)? {
if let Some(field_id) = schema.id(&field) {
query_builder.with_distinct(1, move |id| {
@ -221,9 +139,8 @@ impl<'a> SearchBuilder<'a> {
}
let start = Instant::now();
let result =
query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit));
let (docs, nb_hits) = result.map_err(|e| Error::SearchDocuments(e.to_string()))?;
let result = query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit));
let (docs, nb_hits) = result.map_err(ResponseError::search_documents)?;
let time_ms = start.elapsed().as_millis() as usize;
let mut all_attributes: HashSet<&str> = HashSet::new();
@ -258,8 +175,10 @@ impl<'a> SearchBuilder<'a> {
let mut document: IndexMap<String, Value> = self
.index
.document(reader, Some(&all_attributes), doc.id)
.map_err(|e| Error::RetrieveDocument(doc.id.0, e.to_string()))?
.ok_or(Error::DocumentNotFound(doc.id.0))?;
.map_err(|e| ResponseError::retrieve_document(doc.id.0, e))?
.ok_or(ResponseError::internal(
"Impossible to retrieve the document; Corrupted data",
))?;
let mut formatted = document.iter()
.filter(|(key, _)| all_formatted.contains(key.as_str()))
@ -320,7 +239,7 @@ impl<'a> SearchBuilder<'a> {
reader: &heed::RoTxn<MainT>,
ranked_map: &'a RankedMap,
schema: &Schema,
) -> Result<Option<Criteria<'a>>, Error> {
) -> Result<Option<Criteria<'a>>, ResponseError> {
let ranking_rules = self.index.main.ranking_rules(reader)?;
if let Some(ranking_rules) = ranking_rules {

View File

@ -1,2 +1,4 @@
pub mod authentication;
pub mod meilisearch;
pub mod tide;
pub use authentication::Authentication;

View File

@ -1,83 +0,0 @@
use crate::error::{ResponseError, SResult};
use crate::Data;
use meilisearch_core::Index;
use tide::Request;
pub enum ACL {
Admin,
Private,
Public,
}
pub trait RequestExt {
fn is_allowed(&self, acl: ACL) -> SResult<()>;
fn url_param(&self, name: &str) -> SResult<String>;
fn index(&self) -> SResult<Index>;
fn document_id(&self) -> SResult<String>;
}
impl RequestExt for Request<Data> {
fn is_allowed(&self, acl: ACL) -> SResult<()> {
let user_api_key = self.header("X-Meili-API-Key");
if self.state().api_keys.master.is_none() {
return Ok(())
}
match acl {
ACL::Admin => {
if user_api_key == self.state().api_keys.master.as_deref() {
return Ok(());
}
}
ACL::Private => {
if user_api_key == self.state().api_keys.master.as_deref() {
return Ok(());
}
if user_api_key == self.state().api_keys.private.as_deref() {
return Ok(());
}
}
ACL::Public => {
if user_api_key == self.state().api_keys.master.as_deref() {
return Ok(());
}
if user_api_key == self.state().api_keys.private.as_deref() {
return Ok(());
}
if user_api_key == self.state().api_keys.public.as_deref() {
return Ok(());
}
}
}
Err(ResponseError::InvalidToken(
user_api_key.unwrap_or("Need a token").to_owned(),
))
}
fn url_param(&self, name: &str) -> SResult<String> {
let param = self
.param::<String>(name)
.map_err(|e| ResponseError::bad_parameter(name, e))?;
Ok(param)
}
fn index(&self) -> SResult<Index> {
let index_uid = self.url_param("index")?;
let index = self
.state()
.db
.open_index(&index_uid)
.ok_or(ResponseError::index_not_found(index_uid))?;
Ok(index)
}
fn document_id(&self) -> SResult<String> {
let name = self
.param::<String>("document_id")
.map_err(|_| ResponseError::bad_parameter("documentId", "primaryKey"))?;
Ok(name)
}
}

View File

@ -8,3 +8,69 @@ pub mod option;
pub mod routes;
pub use self::data::Data;
use actix_http::Error;
use actix_service::ServiceFactory;
use actix_web::{dev, web, App};
use log::error;
use meilisearch_core::ProcessedUpdateResult;
pub fn create_app(
data: &Data,
) -> App<
impl ServiceFactory<
Config = (),
Request = dev::ServiceRequest,
Response = dev::ServiceResponse<actix_http::body::Body>,
Error = Error,
InitError = (),
>,
actix_http::body::Body,
> {
App::new()
.app_data(web::Data::new(data.clone()))
.app_data(web::JsonConfig::default().limit(1024 * 1024 * 10)) // Json Limit of 10Mb
.service(routes::load_html)
.service(routes::load_css)
.configure(routes::document::services)
.configure(routes::index::services)
.configure(routes::search::services)
.configure(routes::setting::services)
.configure(routes::stop_words::services)
.configure(routes::synonym::services)
.configure(routes::health::services)
.configure(routes::stats::services)
.configure(routes::key::services)
}
pub fn index_update_callback(index_uid: &str, data: &Data, status: ProcessedUpdateResult) {
if status.error.is_some() {
return;
}
if let Some(index) = data.db.open_index(&index_uid) {
let db = &data.db;
let mut writer = match db.main_write_txn() {
Ok(writer) => writer,
Err(e) => {
error!("Impossible to get write_txn; {}", e);
return;
}
};
if let Err(e) = data.compute_stats(&mut writer, &index_uid) {
error!("Impossible to compute stats; {}", e)
}
if let Err(e) = data.set_last_update(&mut writer) {
error!("Impossible to update last_update; {}", e)
}
if let Err(e) = index.main.put_updated_at(&mut writer) {
error!("Impossible to update updated_at; {}", e)
}
if let Err(e) = writer.commit() {
error!("Impossible to get write_txn; {}", e);
}
}
}

View File

@ -1,16 +1,13 @@
use std::{env, thread};
use async_std::task;
use actix_cors::Cors;
use actix_web::{middleware, HttpServer};
use log::info;
use main_error::MainError;
use structopt::StructOpt;
use tide::middleware::{Cors, RequestLogger, Origin};
use http::header::HeaderValue;
use meilisearch_http::data::Data;
use meilisearch_http::option::Opt;
use meilisearch_http::routes;
use meilisearch_http::routes::index::index_update_callback;
use meilisearch_http::{create_app, index_update_callback};
use structopt::StructOpt;
mod analytics;
@ -18,7 +15,8 @@ mod analytics;
#[global_allocator]
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
pub fn main() -> Result<(), MainError> {
#[actix_rt::main]
async fn main() -> Result<(), MainError> {
let opt = Opt::from_args();
match opt.env.as_ref() {
@ -29,7 +27,6 @@ pub fn main() -> Result<(), MainError> {
.into(),
);
}
env_logger::init();
}
"development" => {
env_logger::from_env(env_logger::Env::default().default_filter_or("info")).init();
@ -50,17 +47,21 @@ pub fn main() -> Result<(), MainError> {
print_launch_resume(&opt, &data);
let mut app = tide::with_state(data);
HttpServer::new(move || {
create_app(&data)
.wrap(
Cors::new()
.send_wildcard()
.allowed_header("x-meili-api-key")
.finish(),
)
.wrap(middleware::Logger::default())
.wrap(middleware::Compress::default())
})
.bind(opt.http_addr)?
.run()
.await?;
app.middleware(Cors::new()
.allow_methods(HeaderValue::from_static("GET, POST, PUT, DELETE, OPTIONS"))
.allow_headers(HeaderValue::from_static("X-Meili-API-Key"))
.allow_origin(Origin::from("*")));
app.middleware(RequestLogger::new());
routes::load_routes(&mut app);
task::block_on(app.listen(opt.http_addr))?;
Ok(())
}
@ -76,7 +77,7 @@ pub fn print_launch_resume(opt: &Opt, data: &Data) {
888 888 "Y8888 888 888 888 "Y8888P" "Y8888 "Y888888 888 "Y8888P 888 888
"#;
println!("{}", ascii_name);
info!("{}", ascii_name);
info!("Database path: {:?}", opt.db_path);
info!("Start server on: {:?}", opt.http_addr);

View File

@ -1,62 +1,85 @@
use std::collections::{BTreeSet, HashSet};
use actix_web::{web, HttpResponse};
use actix_web_macros::{delete, get, post, put};
use indexmap::IndexMap;
use serde::{Deserialize, Serialize};
use serde::Deserialize;
use serde_json::Value;
use tide::{Request, Response};
use crate::error::{ResponseError, SResult};
use crate::helpers::tide::RequestExt;
use crate::helpers::tide::ACL::*;
use crate::error::ResponseError;
use crate::helpers::Authentication;
use crate::routes::{IndexParam, IndexUpdateResponse};
use crate::Data;
pub async fn get_document(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Public)?;
type Document = IndexMap<String, Value>;
let index = ctx.index()?;
let original_document_id = ctx.document_id()?;
let document_id = meilisearch_core::serde::compute_document_id(original_document_id.clone());
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let response = index
.document::<IndexMap<String, Value>>(&reader, None, document_id)?
.ok_or(ResponseError::document_not_found(&original_document_id))?;
if response.is_empty() {
return Err(ResponseError::document_not_found(&original_document_id));
#[derive(Deserialize)]
struct DocumentParam {
index_uid: String,
document_id: String,
}
Ok(tide::Response::new(200).body_json(&response)?)
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(get_document)
.service(delete_document)
.service(get_all_documents)
.service(add_documents)
.service(update_documents)
.service(delete_documents)
.service(clear_all_documents);
}
#[derive(Default, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct IndexUpdateResponse {
pub update_id: u64,
#[get(
"/indexes/{index_uid}/documents/{document_id}",
wrap = "Authentication::Public"
)]
async fn get_document(
data: web::Data<Data>,
path: web::Path<DocumentParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let document_id = meilisearch_core::serde::compute_document_id(&path.document_id);
let reader = data.db.main_read_txn()?;
let response: Document = index
.document(&reader, None, document_id)?
.ok_or(ResponseError::document_not_found(&path.document_id))?;
Ok(HttpResponse::Ok().json(response))
}
pub async fn delete_document(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
#[delete(
"/indexes/{index_uid}/documents/{document_id}",
wrap = "Authentication::Private"
)]
async fn delete_document(
data: web::Data<Data>,
path: web::Path<DocumentParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let document_id = meilisearch_core::serde::compute_document_id(&path.document_id);
let mut update_writer = data.db.update_write_txn()?;
let index = ctx.index()?;
let document_id = ctx.document_id()?;
let document_id = meilisearch_core::serde::compute_document_id(document_id);
let db = &ctx.state().db;
let mut update_writer = db.update_write_txn()?;
let mut documents_deletion = index.documents_deletion();
documents_deletion.delete_document_by_id(document_id);
let update_id = documents_deletion.finalize(&mut update_writer)?;
update_writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
#[derive(Default, Deserialize)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct BrowseQuery {
offset: Option<usize>,
@ -64,17 +87,21 @@ struct BrowseQuery {
attributes_to_retrieve: Option<String>,
}
pub async fn get_all_documents(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
#[get("/indexes/{index_uid}/documents", wrap = "Authentication::Public")]
async fn get_all_documents(
data: web::Data<Data>,
path: web::Path<IndexParam>,
params: web::Query<BrowseQuery>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let index = ctx.index()?;
let query: BrowseQuery = ctx.query().unwrap_or_default();
let offset = params.offset.unwrap_or(0);
let limit = params.limit.unwrap_or(20);
let offset = query.offset.unwrap_or(0);
let limit = query.limit.unwrap_or(20);
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let reader = data.db.main_read_txn()?;
let documents_ids: Result<BTreeSet<_>, _> = index
.documents_fields_counts
@ -83,29 +110,23 @@ pub async fn get_all_documents(ctx: Request<Data>) -> SResult<Response> {
.take(limit)
.collect();
let documents_ids = match documents_ids {
Ok(documents_ids) => documents_ids,
Err(e) => return Err(ResponseError::internal(e)),
};
let documents_ids = documents_ids?;
let mut response_body = Vec::<IndexMap<String, Value>>::new();
let attributes: Option<HashSet<&str>> = params
.attributes_to_retrieve
.as_ref()
.map(|a| a.split(',').collect());
if let Some(attributes) = query.attributes_to_retrieve {
let attributes = attributes.split(',').collect::<HashSet<&str>>();
let mut response = Vec::new();
for document_id in documents_ids {
if let Ok(Some(document)) = index.document(&reader, Some(&attributes), document_id) {
response_body.push(document);
}
}
} else {
for document_id in documents_ids {
if let Ok(Some(document)) = index.document(&reader, None, document_id) {
response_body.push(document);
}
if let Ok(Some(document)) =
index.document::<Document>(&reader, attributes.as_ref(), document_id)
{
response.push(document);
}
}
Ok(tide::Response::new(200).body_json(&response_body)?)
Ok(HttpResponse::Ok().json(response))
}
fn find_primary_key(document: &IndexMap<String, Value>) -> Option<String> {
@ -117,40 +138,45 @@ fn find_primary_key(document: &IndexMap<String, Value>) -> Option<String> {
None
}
#[derive(Default, Deserialize)]
#[derive(Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct UpdateDocumentsQuery {
primary_key: Option<String>,
}
async fn update_multiple_documents(mut ctx: Request<Data>, is_partial: bool) -> SResult<Response> {
ctx.is_allowed(Private)?;
async fn update_multiple_documents(
data: web::Data<Data>,
path: web::Path<IndexParam>,
params: web::Query<UpdateDocumentsQuery>,
body: web::Json<Vec<Document>>,
is_partial: bool,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let index = ctx.index()?;
let reader = data.db.main_read_txn()?;
let data: Vec<IndexMap<String, Value>> =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let query: UpdateDocumentsQuery = ctx.query().unwrap_or_default();
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let mut schema = index
.main
.schema(&reader)?
.ok_or(ResponseError::internal("schema not found"))?;
.ok_or(ResponseError::internal("Impossible to retrieve the schema"))?;
if schema.primary_key().is_none() {
let id = match query.primary_key {
Some(id) => id,
None => match data.first().and_then(|docs| find_primary_key(docs)) {
Some(id) => id,
None => return Err(ResponseError::bad_request("Could not infer a primary key")),
},
let id = match &params.primary_key {
Some(id) => id.to_string(),
None => body
.first()
.and_then(find_primary_key)
.ok_or(ResponseError::bad_request("Could not infer a primary key"))?,
};
let mut writer = db.main_write_txn()?;
schema.set_primary_key(&id).map_err(ResponseError::bad_request)?;
let mut writer = data.db.main_write_txn()?;
schema
.set_primary_key(&id)
.map_err(ResponseError::bad_request)?;
index.main.put_schema(&mut writer, &schema)?;
writer.commit()?;
}
@ -161,38 +187,56 @@ async fn update_multiple_documents(mut ctx: Request<Data>, is_partial: bool) ->
index.documents_addition()
};
for document in data {
for document in body.into_inner() {
document_addition.update_document(document);
}
let mut update_writer = db.update_write_txn()?;
let mut update_writer = data.db.update_write_txn()?;
let update_id = document_addition.finalize(&mut update_writer)?;
update_writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
pub async fn add_or_replace_multiple_documents(ctx: Request<Data>) -> SResult<Response> {
update_multiple_documents(ctx, false).await
#[post("/indexes/{index_uid}/documents", wrap = "Authentication::Private")]
async fn add_documents(
data: web::Data<Data>,
path: web::Path<IndexParam>,
params: web::Query<UpdateDocumentsQuery>,
body: web::Json<Vec<Document>>,
) -> Result<HttpResponse, ResponseError> {
update_multiple_documents(data, path, params, body, false).await
}
pub async fn add_or_update_multiple_documents(ctx: Request<Data>) -> SResult<Response> {
update_multiple_documents(ctx, true).await
#[put("/indexes/{index_uid}/documents", wrap = "Authentication::Private")]
async fn update_documents(
data: web::Data<Data>,
path: web::Path<IndexParam>,
params: web::Query<UpdateDocumentsQuery>,
body: web::Json<Vec<Document>>,
) -> Result<HttpResponse, ResponseError> {
update_multiple_documents(data, path, params, body, true).await
}
pub async fn delete_multiple_documents(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
#[post(
"/indexes/{index_uid}/documents/delete-batch",
wrap = "Authentication::Private"
)]
async fn delete_documents(
data: web::Data<Data>,
path: web::Path<IndexParam>,
body: web::Json<Vec<Value>>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let data: Vec<Value> = ctx.body_json().await.map_err(ResponseError::bad_request)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let mut writer = db.update_write_txn()?;
let mut writer = data.db.update_write_txn()?;
let mut documents_deletion = index.documents_deletion();
for document_id in data {
for document_id in body.into_inner() {
if let Some(document_id) = meilisearch_core::serde::value_to_string(&document_id) {
documents_deletion
.delete_document_by_id(meilisearch_core::serde::compute_document_id(document_id));
@ -203,21 +247,24 @@ pub async fn delete_multiple_documents(mut ctx: Request<Data>) -> SResult<Respon
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
pub async fn clear_all_documents(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
#[delete("/indexes/{index_uid}/documents", wrap = "Authentication::Private")]
async fn clear_all_documents(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let index = ctx.index()?;
let db = &ctx.state().db;
let mut writer = db.update_write_txn()?;
let mut writer = data.db.update_write_txn()?;
let update_id = index.clear_all(&mut writer)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}

View File

@ -1,47 +1,47 @@
use crate::error::{ResponseError, SResult};
use crate::helpers::tide::RequestExt;
use crate::helpers::tide::ACL::*;
use crate::Data;
use actix_web::{web, HttpResponse};
use actix_web_macros::{get, put};
use heed::types::{Str, Unit};
use serde::Deserialize;
use tide::{Request, Response};
use crate::error::ResponseError;
use crate::helpers::Authentication;
use crate::Data;
const UNHEALTHY_KEY: &str = "_is_unhealthy";
pub async fn get_health(ctx: Request<Data>) -> SResult<Response> {
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(get_health).service(change_healthyness);
}
let common_store = ctx.state().db.common_store();
#[get("/health", wrap = "Authentication::Private")]
async fn get_health(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
let reader = data.db.main_read_txn()?;
let common_store = data.db.common_store();
if let Ok(Some(_)) = common_store.get::<_, Str, Unit>(&reader, UNHEALTHY_KEY) {
return Err(ResponseError::Maintenance);
}
Ok(tide::Response::new(200))
Ok(HttpResponse::Ok().finish())
}
pub async fn set_healthy(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?;
let db = &ctx.state().db;
let mut writer = db.main_write_txn()?;
let common_store = ctx.state().db.common_store();
async fn set_healthy(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
let mut writer = data.db.main_write_txn()?;
let common_store = data.db.common_store();
common_store.delete::<_, Str>(&mut writer, UNHEALTHY_KEY)?;
writer.commit()?;
Ok(tide::Response::new(200))
Ok(HttpResponse::Ok().finish())
}
pub async fn set_unhealthy(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?;
let db = &ctx.state().db;
let mut writer = db.main_write_txn()?;
let common_store = ctx.state().db.common_store();
async fn set_unhealthy(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
let mut writer = data.db.main_write_txn()?;
let common_store = data.db.common_store();
common_store.put::<_, Str, Unit>(&mut writer, UNHEALTHY_KEY, &())?;
writer.commit()?;
Ok(tide::Response::new(200))
Ok(HttpResponse::Ok().finish())
}
#[derive(Deserialize, Clone)]
@ -49,12 +49,14 @@ struct HealtBody {
health: bool,
}
pub async fn change_healthyness(mut ctx: Request<Data>) -> SResult<Response> {
let body: HealtBody = ctx.body_json().await.map_err(ResponseError::bad_request)?;
#[put("/health", wrap = "Authentication::Private")]
async fn change_healthyness(
data: web::Data<Data>,
body: web::Json<HealtBody>,
) -> Result<HttpResponse, ResponseError> {
if body.health {
set_healthy(ctx).await
set_healthy(data).await
} else {
set_unhealthy(ctx).await
set_unhealthy(data).await
}
}

View File

@ -1,16 +1,25 @@
use actix_web::{web, HttpResponse};
use actix_web_macros::{delete, get, post, put};
use chrono::{DateTime, Utc};
use log::error;
use meilisearch_core::ProcessedUpdateResult;
use rand::seq::SliceRandom;
use serde::{Deserialize, Serialize};
use serde_json::json;
use tide::{Request, Response};
use crate::error::{IntoInternalError, ResponseError, SResult};
use crate::helpers::tide::RequestExt;
use crate::helpers::tide::ACL::*;
use crate::error::ResponseError;
use crate::helpers::Authentication;
use crate::routes::IndexParam;
use crate::Data;
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(list_indexes)
.service(get_index)
.service(create_index)
.service(update_index)
.service(delete_index)
.service(get_update_status)
.service(get_all_updates_status);
}
fn generate_uid() -> String {
let mut rng = rand::thread_rng();
let sample = b"abcdefghijklmnopqrstuvwxyz0123456789";
@ -20,24 +29,42 @@ fn generate_uid() -> String {
.collect()
}
pub async fn list_indexes(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct IndexResponse {
name: String,
uid: String,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
primary_key: Option<String>,
}
let indexes_uids = ctx.state().db.indexes_uids();
#[get("/indexes", wrap = "Authentication::Private")]
async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
let reader = data.db.main_read_txn()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let mut response = Vec::new();
let mut response_body = Vec::new();
for index_uid in indexes_uids {
let index = ctx.state().db.open_index(&index_uid);
for index_uid in data.db.indexes_uids() {
let index = data.db.open_index(&index_uid);
match index {
Some(index) => {
let name = index.main.name(&reader)?.into_internal_error()?;
let created_at = index.main.created_at(&reader)?.into_internal_error()?;
let updated_at = index.main.updated_at(&reader)?.into_internal_error()?;
let name = index.main.name(&reader)?.ok_or(ResponseError::internal(
"Impossible to get the name of an index",
))?;
let created_at = index
.main
.created_at(&reader)?
.ok_or(ResponseError::internal(
"Impossible to get the create date of an index",
))?;
let updated_at = index
.main
.updated_at(&reader)?
.ok_or(ResponseError::internal(
"Impossible to get the last update date of an index",
))?;
let primary_key = match index.main.schema(&reader) {
Ok(Some(schema)) => match schema.primary_key() {
@ -54,7 +81,7 @@ pub async fn list_indexes(ctx: Request<Data>) -> SResult<Response> {
updated_at,
primary_key,
};
response_body.push(index_response);
response.push(index_response);
}
None => error!(
"Index {} is referenced in the indexes list but cannot be found",
@ -63,31 +90,36 @@ pub async fn list_indexes(ctx: Request<Data>) -> SResult<Response> {
}
}
Ok(tide::Response::new(200).body_json(&response_body)?)
Ok(HttpResponse::Ok().json(response))
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct IndexResponse {
name: String,
uid: String,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
primary_key: Option<String>,
}
#[get("/indexes/{index_uid}", wrap = "Authentication::Private")]
async fn get_index(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
pub async fn get_index(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let reader = data.db.main_read_txn()?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let uid = ctx.url_param("index")?;
let name = index.main.name(&reader)?.into_internal_error()?;
let created_at = index.main.created_at(&reader)?.into_internal_error()?;
let updated_at = index.main.updated_at(&reader)?.into_internal_error()?;
let name = index.main.name(&reader)?.ok_or(ResponseError::internal(
"Impossible to get the name of an index",
))?;
let created_at = index
.main
.created_at(&reader)?
.ok_or(ResponseError::internal(
"Impossible to get the create date of an index",
))?;
let updated_at = index
.main
.updated_at(&reader)?
.ok_or(ResponseError::internal(
"Impossible to get the last update date of an index",
))?;
let primary_key = match index.main.schema(&reader) {
Ok(Some(schema)) => match schema.primary_key() {
@ -97,15 +129,13 @@ pub async fn get_index(ctx: Request<Data>) -> SResult<Response> {
_ => None,
};
let response_body = IndexResponse {
Ok(HttpResponse::Ok().json(IndexResponse {
name,
uid,
uid: path.index_uid.clone(),
created_at,
updated_at,
primary_key,
};
Ok(tide::Response::new(200).body_json(&response_body)?)
}))
}
#[derive(Debug, Deserialize)]
@ -116,86 +146,74 @@ struct IndexCreateRequest {
primary_key: Option<String>,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct IndexCreateResponse {
name: String,
uid: String,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
primary_key: Option<String>,
}
pub async fn create_index(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let body = ctx
.body_json::<IndexCreateRequest>()
.await
.map_err(ResponseError::bad_request)?;
#[post("/indexes", wrap = "Authentication::Private")]
async fn create_index(
data: web::Data<Data>,
body: web::Json<IndexCreateRequest>,
) -> Result<HttpResponse, ResponseError> {
if let (None, None) = (body.name.clone(), body.uid.clone()) {
return Err(ResponseError::bad_request(
"Index creation must have an uid",
));
}
let db = &ctx.state().db;
let uid = match body.uid {
let uid = match &body.uid {
Some(uid) => {
if uid
.chars()
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
{
uid
uid.to_owned()
} else {
return Err(ResponseError::InvalidIndexUid);
}
}
None => loop {
let uid = generate_uid();
if db.open_index(&uid).is_none() {
if data.db.open_index(&uid).is_none() {
break uid;
}
},
};
let created_index = match db.create_index(&uid) {
Ok(index) => index,
Err(e) => return Err(ResponseError::create_index(e)),
};
let created_index = data
.db
.create_index(&uid)
.map_err(ResponseError::create_index)?;
let mut writer = data.db.main_write_txn()?;
let name = body.name.as_ref().unwrap_or(&uid);
created_index.main.put_name(&mut writer, name)?;
let mut writer = db.main_write_txn()?;
let name = body.name.unwrap_or(uid.clone());
created_index.main.put_name(&mut writer, &name)?;
let created_at = created_index
.main
.created_at(&writer)?
.into_internal_error()?;
.ok_or(ResponseError::internal("Impossible to read created at"))?;
let updated_at = created_index
.main
.updated_at(&writer)?
.into_internal_error()?;
.ok_or(ResponseError::internal("Impossible to read updated at"))?;
if let Some(id) = body.primary_key.clone() {
if let Some(mut schema) = created_index.main.schema(&mut writer)? {
schema.set_primary_key(&id).map_err(ResponseError::bad_request)?;
if let Some(mut schema) = created_index.main.schema(&writer)? {
schema
.set_primary_key(&id)
.map_err(ResponseError::bad_request)?;
created_index.main.put_schema(&mut writer, &schema)?;
}
}
writer.commit()?;
let response_body = IndexCreateResponse {
name,
Ok(HttpResponse::Created().json(IndexResponse {
name: name.to_string(),
uid,
created_at,
updated_at,
primary_key: body.primary_key,
};
Ok(tide::Response::new(201).body_json(&response_body)?)
primary_key: body.primary_key.clone(),
}))
}
#[derive(Debug, Deserialize)]
@ -215,26 +233,25 @@ struct UpdateIndexResponse {
primary_key: Option<String>,
}
pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
#[put("/indexes/{index_uid}", wrap = "Authentication::Private")]
async fn update_index(
data: web::Data<Data>,
path: web::Path<IndexParam>,
body: web::Json<IndexCreateRequest>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let body = ctx
.body_json::<UpdateIndexRequest>()
.await
.map_err(ResponseError::bad_request)?;
let mut writer = data.db.main_write_txn()?;
let index_uid = ctx.url_param("index")?;
let index = ctx.index()?;
let db = &ctx.state().db;
let mut writer = db.main_write_txn()?;
if let Some(name) = body.name {
index.main.put_name(&mut writer, &name)?;
if let Some(name) = &body.name {
index.main.put_name(&mut writer, name)?;
}
if let Some(id) = body.primary_key.clone() {
if let Some(mut schema) = index.main.schema(&mut writer)? {
if let Some(mut schema) = index.main.schema(&writer)? {
match schema.primary_key() {
Some(_) => {
return Err(ResponseError::bad_request(
@ -242,9 +259,7 @@ pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
));
}
None => {
schema
.set_primary_key(&id)
.map_err(ResponseError::bad_request)?;
schema.set_primary_key(&id)?;
index.main.put_schema(&mut writer, &schema)?;
}
}
@ -254,10 +269,23 @@ pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
index.main.put_updated_at(&mut writer)?;
writer.commit()?;
let reader = db.main_read_txn()?;
let name = index.main.name(&reader)?.into_internal_error()?;
let created_at = index.main.created_at(&reader)?.into_internal_error()?;
let updated_at = index.main.updated_at(&reader)?.into_internal_error()?;
let reader = data.db.main_read_txn()?;
let name = index.main.name(&reader)?.ok_or(ResponseError::internal(
"Impossible to get the name of an index",
))?;
let created_at = index
.main
.created_at(&reader)?
.ok_or(ResponseError::internal(
"Impossible to get the create date of an index",
))?;
let updated_at = index
.main
.updated_at(&reader)?
.ok_or(ResponseError::internal(
"Impossible to get the last update date of an index",
))?;
let primary_key = match index.main.schema(&reader) {
Ok(Some(schema)) => match schema.primary_key() {
@ -267,86 +295,70 @@ pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
_ => None,
};
let response_body = UpdateIndexResponse {
Ok(HttpResponse::Ok().json(IndexResponse {
name,
uid: index_uid,
uid: path.index_uid.clone(),
created_at,
updated_at,
primary_key,
};
Ok(tide::Response::new(200).body_json(&response_body)?)
}))
}
pub async fn get_update_status(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
#[delete("/indexes/{index_uid}", wrap = "Authentication::Private")]
async fn delete_index(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
data.db.delete_index(&path.index_uid)?;
let db = &ctx.state().db;
let reader = db.update_read_txn()?;
let update_id = ctx
.param::<u64>("update_id")
.map_err(|e| ResponseError::bad_parameter("update_id", e))?;
let index = ctx.index()?;
let status = index.update_status(&reader, update_id)?;
let response = match status {
Some(status) => tide::Response::new(200).body_json(&status).unwrap(),
None => tide::Response::new(404)
.body_json(&json!({ "message": "unknown update id" }))
.unwrap(),
};
Ok(response)
Ok(HttpResponse::NoContent().finish())
}
pub async fn get_all_updates_status(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let db = &ctx.state().db;
let reader = db.update_read_txn()?;
let index = ctx.index()?;
#[derive(Deserialize)]
struct UpdateParam {
index_uid: String,
update_id: u64,
}
#[get(
"/indexes/{index_uid}/updates/{update_id}",
wrap = "Authentication::Private"
)]
async fn get_update_status(
data: web::Data<Data>,
path: web::Path<UpdateParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let reader = data.db.update_read_txn()?;
let status = index.update_status(&reader, path.update_id)?;
match status {
Some(status) => Ok(HttpResponse::Ok().json(status)),
None => Err(ResponseError::NotFound(format!(
"Update {} not found",
path.update_id
))),
}
}
#[get("/indexes/{index_uid}/updates", wrap = "Authentication::Private")]
async fn get_all_updates_status(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let reader = data.db.update_read_txn()?;
let response = index.all_updates_status(&reader)?;
Ok(tide::Response::new(200).body_json(&response).unwrap())
}
pub async fn delete_index(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let _ = ctx.index()?;
let index_uid = ctx.url_param("index")?;
ctx.state().db.delete_index(&index_uid)?;
Ok(tide::Response::new(204))
}
pub fn index_update_callback(index_uid: &str, data: &Data, status: ProcessedUpdateResult) {
if status.error.is_some() {
return;
}
if let Some(index) = data.db.open_index(&index_uid) {
let db = &data.db;
let mut writer = match db.main_write_txn() {
Ok(writer) => writer,
Err(e) => {
error!("Impossible to get write_txn; {}", e);
return;
}
};
if let Err(e) = data.compute_stats(&mut writer, &index_uid) {
error!("Impossible to compute stats; {}", e)
}
if let Err(e) = data.set_last_update(&mut writer) {
error!("Impossible to update last_update; {}", e)
}
if let Err(e) = index.main.put_updated_at(&mut writer) {
error!("Impossible to update updated_at; {}", e)
}
if let Err(e) = writer.commit() {
error!("Impossible to get write_txn; {}", e);
}
}
Ok(HttpResponse::Ok().json(response))
}

View File

@ -1,17 +1,26 @@
use crate::error::SResult;
use crate::helpers::tide::RequestExt;
use crate::helpers::tide::ACL::*;
use actix_web::web;
use actix_web::HttpResponse;
use actix_web_macros::get;
use serde::Serialize;
use crate::helpers::Authentication;
use crate::Data;
use serde_json::json;
use tide::{Request, Response};
pub async fn list(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?;
let keys = &ctx.state().api_keys;
Ok(tide::Response::new(200).body_json(&json!({
"private": keys.private,
"public": keys.public,
}))?)
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(list);
}
#[derive(Serialize)]
struct KeysResponse {
private: Option<String>,
public: Option<String>,
}
#[get("/keys", wrap = "Authentication::Admin")]
async fn list(data: web::Data<Data>) -> HttpResponse {
let api_keys = data.api_keys.clone();
HttpResponse::Ok().json(KeysResponse {
private: api_keys.private,
public: api_keys.public,
})
}

View File

@ -1,7 +1,5 @@
use crate::data::Data;
use std::future::Future;
use tide::IntoResponse;
use tide::Response;
use actix_web::{get, HttpResponse};
use serde::{Deserialize, Serialize};
pub mod document;
pub mod health;
@ -13,118 +11,33 @@ pub mod stats;
pub mod stop_words;
pub mod synonym;
async fn into_response<T: IntoResponse, U: IntoResponse>(
x: impl Future<Output = Result<T, U>>,
) -> Response {
match x.await {
Ok(resp) => resp.into_response(),
Err(resp) => resp.into_response(),
#[derive(Deserialize)]
pub struct IndexParam {
index_uid: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct IndexUpdateResponse {
pub update_id: u64,
}
impl IndexUpdateResponse {
pub fn with_id(update_id: u64) -> Self {
Self { update_id }
}
}
pub fn load_routes(app: &mut tide::Server<Data>) {
app.at("/").get(|_| async {
tide::Response::new(200)
.body_string(include_str!("../../public/interface.html").to_string())
.set_mime(mime::TEXT_HTML_UTF_8)
});
app.at("/bulma.min.css").get(|_| async {
tide::Response::new(200)
.body_string(include_str!("../../public/bulma.min.css").to_string())
.set_mime(mime::TEXT_CSS_UTF_8)
});
app.at("/indexes")
.get(|ctx| into_response(index::list_indexes(ctx)))
.post(|ctx| into_response(index::create_index(ctx)));
app.at("/indexes/search")
.post(|ctx| into_response(search::search_multi_index(ctx)));
app.at("/indexes/:index")
.get(|ctx| into_response(index::get_index(ctx)))
.put(|ctx| into_response(index::update_index(ctx)))
.delete(|ctx| into_response(index::delete_index(ctx)));
app.at("/indexes/:index/search")
.get(|ctx| into_response(search::search_with_url_query(ctx)));
app.at("/indexes/:index/updates")
.get(|ctx| into_response(index::get_all_updates_status(ctx)));
app.at("/indexes/:index/updates/:update_id")
.get(|ctx| into_response(index::get_update_status(ctx)));
app.at("/indexes/:index/documents")
.get(|ctx| into_response(document::get_all_documents(ctx)))
.post(|ctx| into_response(document::add_or_replace_multiple_documents(ctx)))
.put(|ctx| into_response(document::add_or_update_multiple_documents(ctx)))
.delete(|ctx| into_response(document::clear_all_documents(ctx)));
app.at("/indexes/:index/documents/:document_id")
.get(|ctx| into_response(document::get_document(ctx)))
.delete(|ctx| into_response(document::delete_document(ctx)));
app.at("/indexes/:index/documents/delete-batch")
.post(|ctx| into_response(document::delete_multiple_documents(ctx)));
app.at("/indexes/:index/settings")
.get(|ctx| into_response(setting::get_all(ctx)))
.post(|ctx| into_response(setting::update_all(ctx)))
.delete(|ctx| into_response(setting::delete_all(ctx)));
app.at("/indexes/:index/settings/ranking-rules")
.get(|ctx| into_response(setting::get_rules(ctx)))
.post(|ctx| into_response(setting::update_rules(ctx)))
.delete(|ctx| into_response(setting::delete_rules(ctx)));
app.at("/indexes/:index/settings/distinct-attribute")
.get(|ctx| into_response(setting::get_distinct(ctx)))
.post(|ctx| into_response(setting::update_distinct(ctx)))
.delete(|ctx| into_response(setting::delete_distinct(ctx)));
app.at("/indexes/:index/settings/searchable-attributes")
.get(|ctx| into_response(setting::get_searchable(ctx)))
.post(|ctx| into_response(setting::update_searchable(ctx)))
.delete(|ctx| into_response(setting::delete_searchable(ctx)));
app.at("/indexes/:index/settings/displayed-attributes")
.get(|ctx| into_response(setting::displayed(ctx)))
.post(|ctx| into_response(setting::update_displayed(ctx)))
.delete(|ctx| into_response(setting::delete_displayed(ctx)));
app.at("/indexes/:index/settings/accept-new-fields")
.get(|ctx| into_response(setting::get_accept_new_fields(ctx)))
.post(|ctx| into_response(setting::update_accept_new_fields(ctx)));
app.at("/indexes/:index/settings/synonyms")
.get(|ctx| into_response(synonym::get(ctx)))
.post(|ctx| into_response(synonym::update(ctx)))
.delete(|ctx| into_response(synonym::delete(ctx)));
app.at("/indexes/:index/settings/stop-words")
.get(|ctx| into_response(stop_words::get(ctx)))
.post(|ctx| into_response(stop_words::update(ctx)))
.delete(|ctx| into_response(stop_words::delete(ctx)));
app.at("/indexes/:index/stats")
.get(|ctx| into_response(stats::index_stats(ctx)));
app.at("/keys").get(|ctx| into_response(key::list(ctx)));
app.at("/health")
.get(|ctx| into_response(health::get_health(ctx)))
.put(|ctx| into_response(health::change_healthyness(ctx)));
app.at("/stats")
.get(|ctx| into_response(stats::get_stats(ctx)));
app.at("/version")
.get(|ctx| into_response(stats::get_version(ctx)));
app.at("/sys-info")
.get(|ctx| into_response(stats::get_sys_info(ctx)));
app.at("/sys-info/pretty")
.get(|ctx| into_response(stats::get_sys_info_pretty(ctx)));
#[get("/")]
pub async fn load_html() -> HttpResponse {
HttpResponse::Ok()
.content_type("text/html; charset=utf-8")
.body(include_str!("../../public/interface.html").to_string())
}
#[get("/bulma.min.css")]
pub async fn load_css() -> HttpResponse {
HttpResponse::Ok()
.content_type("text/css; charset=utf-8")
.body(include_str!("../../public/bulma.min.css").to_string())
}

View File

@ -1,19 +1,21 @@
use std::collections::HashMap;
use std::collections::HashSet;
use std::time::Duration;
use std::collections::{HashSet, HashMap};
use log::warn;
use meilisearch_core::Index;
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use serde::{Deserialize, Serialize};
use tide::{Request, Response};
use actix_web::web;
use actix_web::HttpResponse;
use actix_web_macros::get;
use serde::Deserialize;
use crate::error::{ResponseError, SResult};
use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit};
use crate::helpers::tide::RequestExt;
use crate::helpers::tide::ACL::*;
use crate::error::ResponseError;
use crate::helpers::meilisearch::IndexSearchExt;
use crate::helpers::Authentication;
use crate::routes::IndexParam;
use crate::Data;
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(search_with_url_query);
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct SearchQuery {
@ -25,38 +27,39 @@ struct SearchQuery {
crop_length: Option<usize>,
attributes_to_highlight: Option<String>,
filters: Option<String>,
timeout_ms: Option<u64>,
matches: Option<bool>,
}
pub async fn search_with_url_query(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Public)?;
#[get("/indexes/{index_uid}/search", wrap = "Authentication::Public")]
async fn search_with_url_query(
data: web::Data<Data>,
path: web::Path<IndexParam>,
params: web::Query<SearchQuery>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let reader = data.db.main_read_txn()?;
let schema = index
.main
.schema(&reader)?
.ok_or(ResponseError::open_index("No Schema found"))?;
.ok_or(ResponseError::internal("Impossible to retrieve the schema"))?;
let query: SearchQuery = ctx
.query()
.map_err(|_| ResponseError::bad_request("invalid query parameter"))?;
let mut search_builder = index.new_search(params.q.clone());
let mut search_builder = index.new_search(query.q.clone());
if let Some(offset) = query.offset {
if let Some(offset) = params.offset {
search_builder.offset(offset);
}
if let Some(limit) = query.limit {
if let Some(limit) = params.limit {
search_builder.limit(limit);
}
let available_attributes = schema.displayed_name();
let mut restricted_attributes: HashSet<&str>;
match &query.attributes_to_retrieve {
match &params.attributes_to_retrieve {
Some(attributes_to_retrieve) => {
let attributes_to_retrieve: HashSet<&str> = attributes_to_retrieve.split(',').collect();
if attributes_to_retrieve.contains("*") {
@ -78,8 +81,8 @@ pub async fn search_with_url_query(ctx: Request<Data>) -> SResult<Response> {
}
}
if let Some(attributes_to_crop) = query.attributes_to_crop {
let default_length = query.crop_length.unwrap_or(200);
if let Some(attributes_to_crop) = &params.attributes_to_crop {
let default_length = params.crop_length.unwrap_or(200);
let mut final_attributes: HashMap<String, usize> = HashMap::new();
for attribute in attributes_to_crop.split(',') {
@ -106,7 +109,7 @@ pub async fn search_with_url_query(ctx: Request<Data>) -> SResult<Response> {
search_builder.attributes_to_crop(final_attributes);
}
if let Some(attributes_to_highlight) = query.attributes_to_highlight {
if let Some(attributes_to_highlight) = &params.attributes_to_highlight {
let mut final_attributes: HashSet<String> = HashSet::new();
for attribute in attributes_to_highlight.split(',') {
if attribute == "*" {
@ -125,144 +128,15 @@ pub async fn search_with_url_query(ctx: Request<Data>) -> SResult<Response> {
search_builder.attributes_to_highlight(final_attributes);
}
if let Some(filters) = query.filters {
search_builder.filters(filters);
if let Some(filters) = &params.filters {
search_builder.filters(filters.to_string());
}
if let Some(timeout_ms) = query.timeout_ms {
search_builder.timeout(Duration::from_millis(timeout_ms));
}
if let Some(matches) = query.matches {
if let Some(matches) = params.matches {
if matches {
search_builder.get_matches();
}
}
let response = match search_builder.search(&reader) {
Ok(response) => response,
Err(Error::Internal(message)) => return Err(ResponseError::Internal(message)),
Err(others) => return Err(ResponseError::bad_request(others)),
};
Ok(tide::Response::new(200).body_json(&response).unwrap())
}
#[derive(Clone, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct SearchMultiBody {
indexes: HashSet<String>,
query: String,
offset: Option<usize>,
limit: Option<usize>,
attributes_to_retrieve: Option<HashSet<String>>,
searchable_attributes: Option<HashSet<String>>,
attributes_to_crop: Option<HashMap<String, usize>>,
attributes_to_highlight: Option<HashSet<String>>,
filters: Option<String>,
timeout_ms: Option<u64>,
matches: Option<bool>,
}
#[derive(Debug, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
struct SearchMultiBodyResponse {
hits: HashMap<String, Vec<SearchHit>>,
offset: usize,
hits_per_page: usize,
processing_time_ms: usize,
query: String,
}
pub async fn search_multi_index(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Public)?;
let body = ctx
.body_json::<SearchMultiBody>()
.await
.map_err(ResponseError::bad_request)?;
let mut index_list = body.clone().indexes;
for index in index_list.clone() {
if index == "*" {
index_list = ctx.state().db.indexes_uids().into_iter().collect();
break;
}
}
let mut offset = 0;
let mut count = 20;
if let Some(body_offset) = body.offset {
if let Some(limit) = body.limit {
offset = body_offset;
count = limit;
}
}
let offset = offset;
let count = count;
let db = &ctx.state().db;
let par_body = body.clone();
let responses_per_index: Vec<SResult<_>> = index_list
.into_par_iter()
.map(move |index_uid| {
let index: Index = db
.open_index(&index_uid)
.ok_or(ResponseError::index_not_found(&index_uid))?;
let mut search_builder = index.new_search(par_body.query.clone());
search_builder.offset(offset);
search_builder.limit(count);
if let Some(attributes_to_retrieve) = par_body.attributes_to_retrieve.clone() {
search_builder.attributes_to_retrieve(attributes_to_retrieve);
}
if let Some(attributes_to_crop) = par_body.attributes_to_crop.clone() {
search_builder.attributes_to_crop(attributes_to_crop);
}
if let Some(attributes_to_highlight) = par_body.attributes_to_highlight.clone() {
search_builder.attributes_to_highlight(attributes_to_highlight);
}
if let Some(filters) = par_body.filters.clone() {
search_builder.filters(filters);
}
if let Some(timeout_ms) = par_body.timeout_ms {
search_builder.timeout(Duration::from_millis(timeout_ms));
}
if let Some(matches) = par_body.matches {
if matches {
search_builder.get_matches();
}
}
let reader = db.main_read_txn()?;
let response = search_builder.search(&reader)?;
Ok((index_uid, response))
})
.collect();
let mut hits_map = HashMap::new();
let mut max_query_time = 0;
for response in responses_per_index {
if let Ok((index_uid, response)) = response {
if response.processing_time_ms > max_query_time {
max_query_time = response.processing_time_ms;
}
hits_map.insert(index_uid, response.hits);
}
}
let response = SearchMultiBodyResponse {
hits: hits_map,
offset,
hits_per_page: count,
processing_time_ms: max_query_time,
query: body.query,
};
Ok(tide::Response::new(200).body_json(&response).unwrap())
Ok(HttpResponse::Ok().json(search_builder.search(&reader)?))
}

View File

@ -1,18 +1,66 @@
use actix_web::{web, HttpResponse};
use actix_web_macros::{delete, get, post};
use meilisearch_core::settings::{Settings, SettingsUpdate, UpdateState, DEFAULT_RANKING_RULES};
use std::collections::{BTreeMap, BTreeSet, HashSet};
use tide::{Request, Response};
use crate::error::{ResponseError, SResult};
use crate::helpers::tide::RequestExt;
use crate::helpers::tide::ACL::*;
use crate::routes::document::IndexUpdateResponse;
use crate::error::ResponseError;
use crate::helpers::Authentication;
use crate::routes::{IndexParam, IndexUpdateResponse};
use crate::Data;
pub async fn get_all(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(update_all)
.service(get_all)
.service(delete_all)
.service(get_rules)
.service(update_rules)
.service(delete_rules)
.service(get_distinct)
.service(update_distinct)
.service(delete_distinct)
.service(get_searchable)
.service(update_searchable)
.service(delete_searchable)
.service(get_displayed)
.service(update_displayed)
.service(delete_displayed)
.service(get_accept_new_fields)
.service(update_accept_new_fields);
}
#[post("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
async fn update_all(
data: web::Data<Data>,
path: web::Path<IndexParam>,
body: web::Json<Settings>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let mut writer = data.db.update_write_txn()?;
let settings = body
.into_inner()
.into_update()
.map_err(ResponseError::bad_request)?;
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
#[get("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
async fn get_all(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let reader = data.db.main_read_txn()?;
let stop_words_fst = index.main.stop_words_fst(&reader)?;
let stop_words = stop_words_fst.unwrap_or_default().stream().into_strs()?;
@ -46,14 +94,14 @@ pub async fn get_all(ctx: Request<Data>) -> SResult<Response> {
let searchable_attributes = schema.clone().map(|s| {
s.indexed_name()
.iter()
.map(|s| (*s).to_string())
.map(|s| s.to_string())
.collect::<Vec<String>>()
});
let displayed_attributes = schema.clone().map(|s| {
s.displayed_name()
.iter()
.map(|s| (*s).to_string())
.map(|s| s.to_string())
.collect::<HashSet<String>>()
});
@ -69,30 +117,19 @@ pub async fn get_all(ctx: Request<Data>) -> SResult<Response> {
accept_new_fields: Some(accept_new_fields),
};
Ok(tide::Response::new(200).body_json(&settings).unwrap())
Ok(HttpResponse::Ok().json(settings))
}
pub async fn update_all(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let settings: Settings =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
let mut writer = db.update_write_txn()?;
let settings = settings.into_update().map_err(ResponseError::bad_request)?;
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
}
pub async fn delete_all(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let mut writer = db.update_write_txn()?;
#[delete("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
async fn delete_all(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let mut writer = data.db.update_write_txn()?;
let settings = SettingsUpdate {
ranking_rules: UpdateState::Clear,
@ -106,18 +143,24 @@ pub async fn delete_all(ctx: Request<Data>) -> SResult<Response> {
};
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
pub async fn get_rules(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
#[get(
"/indexes/{index_uid}/settings/ranking-rules",
wrap = "Authentication::Private"
)]
async fn get_rules(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let reader = data.db.main_read_txn()?;
let ranking_rules = index
.main
@ -127,35 +170,49 @@ pub async fn get_rules(ctx: Request<Data>) -> SResult<Response> {
.map(|r| r.to_string())
.collect::<Vec<String>>();
Ok(tide::Response::new(200).body_json(&ranking_rules).unwrap())
Ok(HttpResponse::Ok().json(ranking_rules))
}
pub async fn update_rules(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let ranking_rules: Option<Vec<String>> =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
#[post(
"/indexes/{index_uid}/settings/ranking-rules",
wrap = "Authentication::Private"
)]
async fn update_rules(
data: web::Data<Data>,
path: web::Path<IndexParam>,
body: web::Json<Option<Vec<String>>>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let settings = Settings {
ranking_rules: Some(ranking_rules),
ranking_rules: Some(body.into_inner()),
..Settings::default()
};
let mut writer = db.update_write_txn()?;
let mut writer = data.db.update_write_txn()?;
let settings = settings.into_update().map_err(ResponseError::bad_request)?;
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
pub async fn delete_rules(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let mut writer = db.update_write_txn()?;
#[delete(
"/indexes/{index_uid}/settings/ranking-rules",
wrap = "Authentication::Private"
)]
async fn delete_rules(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let mut writer = data.db.update_write_txn()?;
let settings = SettingsUpdate {
ranking_rules: UpdateState::Clear,
@ -166,49 +223,67 @@ pub async fn delete_rules(ctx: Request<Data>) -> SResult<Response> {
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
pub async fn get_distinct(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
#[get(
"/indexes/{index_uid}/settings/distinct-attribute",
wrap = "Authentication::Private"
)]
async fn get_distinct(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let reader = data.db.main_read_txn()?;
let distinct_attribute = index.main.distinct_attribute(&reader)?;
Ok(tide::Response::new(200)
.body_json(&distinct_attribute)
.unwrap())
Ok(HttpResponse::Ok().json(distinct_attribute))
}
pub async fn update_distinct(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let distinct_attribute: Option<String> =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
#[post(
"/indexes/{index_uid}/settings/distinct-attribute",
wrap = "Authentication::Private"
)]
async fn update_distinct(
data: web::Data<Data>,
path: web::Path<IndexParam>,
body: web::Json<Option<String>>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let settings = Settings {
distinct_attribute: Some(distinct_attribute),
distinct_attribute: Some(body.into_inner()),
..Settings::default()
};
let mut writer = db.update_write_txn()?;
let mut writer = data.db.update_write_txn()?;
let settings = settings.into_update().map_err(ResponseError::bad_request)?;
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
pub async fn delete_distinct(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let mut writer = db.update_write_txn()?;
#[delete(
"/indexes/{index_uid}/settings/distinct-attribute",
wrap = "Authentication::Private"
)]
async fn delete_distinct(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let mut writer = data.db.update_write_txn()?;
let settings = SettingsUpdate {
distinct_attribute: UpdateState::Clear,
@ -219,156 +294,199 @@ pub async fn delete_distinct(ctx: Request<Data>) -> SResult<Response> {
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
pub async fn get_searchable(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
#[get(
"/indexes/{index_uid}/settings/searchable-attributes",
wrap = "Authentication::Private"
)]
async fn get_searchable(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let reader = data.db.main_read_txn()?;
let schema = index.main.schema(&reader)?;
let searchable_attributes: Option<Vec<String>> =
schema.map(|s| s.indexed_name().iter().map(|i| (*i).to_string()).collect());
schema.map(|s| s.indexed_name().iter().map(|i| i.to_string()).collect());
Ok(tide::Response::new(200)
.body_json(&searchable_attributes)
.unwrap())
Ok(HttpResponse::Ok().json(searchable_attributes))
}
pub async fn update_searchable(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let searchable_attributes: Option<Vec<String>> =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
#[post(
"/indexes/{index_uid}/settings/searchable-attributes",
wrap = "Authentication::Private"
)]
async fn update_searchable(
data: web::Data<Data>,
path: web::Path<IndexParam>,
body: web::Json<Option<Vec<String>>>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let settings = Settings {
searchable_attributes: Some(searchable_attributes),
searchable_attributes: Some(body.into_inner()),
..Settings::default()
};
let mut writer = db.update_write_txn()?;
let mut writer = data.db.update_write_txn()?;
let settings = settings.into_update().map_err(ResponseError::bad_request)?;
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
pub async fn delete_searchable(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let db = &ctx.state().db;
#[delete(
"/indexes/{index_uid}/settings/searchable-attributes",
wrap = "Authentication::Private"
)]
async fn delete_searchable(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let settings = SettingsUpdate {
searchable_attributes: UpdateState::Clear,
..SettingsUpdate::default()
};
let mut writer = db.update_write_txn()?;
let mut writer = data.db.update_write_txn()?;
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
pub async fn displayed(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
#[get(
"/indexes/{index_uid}/settings/displayed-attributes",
wrap = "Authentication::Private"
)]
async fn get_displayed(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let reader = data.db.main_read_txn()?;
let schema = index.main.schema(&reader)?;
let displayed_attributes: Option<HashSet<String>> = schema.map(|s| {
s.displayed_name()
.iter()
.map(|i| (*i).to_string())
.collect()
});
let displayed_attributes: Option<HashSet<String>> =
schema.map(|s| s.displayed_name().iter().map(|i| i.to_string()).collect());
Ok(tide::Response::new(200)
.body_json(&displayed_attributes)
.unwrap())
Ok(HttpResponse::Ok().json(displayed_attributes))
}
pub async fn update_displayed(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let displayed_attributes: Option<HashSet<String>> =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
#[post(
"/indexes/{index_uid}/settings/displayed-attributes",
wrap = "Authentication::Private"
)]
async fn update_displayed(
data: web::Data<Data>,
path: web::Path<IndexParam>,
body: web::Json<Option<HashSet<String>>>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let settings = Settings {
displayed_attributes: Some(displayed_attributes),
displayed_attributes: Some(body.into_inner()),
..Settings::default()
};
let mut writer = db.update_write_txn()?;
let mut writer = data.db.update_write_txn()?;
let settings = settings.into_update().map_err(ResponseError::bad_request)?;
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
pub async fn delete_displayed(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let db = &ctx.state().db;
#[delete(
"/indexes/{index_uid}/settings/displayed-attributes",
wrap = "Authentication::Private"
)]
async fn delete_displayed(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let settings = SettingsUpdate {
displayed_attributes: UpdateState::Clear,
..SettingsUpdate::default()
};
let mut writer = db.update_write_txn()?;
let mut writer = data.db.update_write_txn()?;
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
pub async fn get_accept_new_fields(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
#[get(
"/indexes/{index_uid}/settings/accept-new-fields",
wrap = "Authentication::Private"
)]
async fn get_accept_new_fields(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let reader = data.db.main_read_txn()?;
let schema = index.main.schema(&reader)?;
let accept_new_fields = schema.map(|s| s.accept_new_fields());
Ok(tide::Response::new(200)
.body_json(&accept_new_fields)
.unwrap())
Ok(HttpResponse::Ok().json(accept_new_fields))
}
pub async fn update_accept_new_fields(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let accept_new_fields: Option<bool> =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
#[post(
"/indexes/{index_uid}/settings/accept-new-fields",
wrap = "Authentication::Private"
)]
async fn update_accept_new_fields(
data: web::Data<Data>,
path: web::Path<IndexParam>,
body: web::Json<Option<bool>>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let settings = Settings {
accept_new_fields: Some(accept_new_fields),
accept_new_fields: Some(body.into_inner()),
..Settings::default()
};
let mut writer = db.update_write_txn()?;
let mut writer = data.db.update_write_txn()?;
let settings = settings.into_update().map_err(ResponseError::bad_request)?;
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}

View File

@ -1,18 +1,28 @@
use std::collections::HashMap;
use actix_web::web;
use actix_web::HttpResponse;
use actix_web_macros::get;
use chrono::{DateTime, Utc};
use log::error;
use pretty_bytes::converter::convert;
use serde::Serialize;
use sysinfo::{NetworkExt, Pid, ProcessExt, ProcessorExt, System, SystemExt};
use tide::{Request, Response};
use sysinfo::{NetworkExt, ProcessExt, ProcessorExt, System, SystemExt};
use walkdir::WalkDir;
use crate::error::{IntoInternalError, SResult};
use crate::helpers::tide::RequestExt;
use crate::helpers::tide::ACL::*;
use crate::error::ResponseError;
use crate::helpers::Authentication;
use crate::routes::IndexParam;
use crate::Data;
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(index_stats)
.service(get_stats)
.service(get_version)
.service(get_sys_info)
.service(get_sys_info_pretty);
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct IndexStatsResponse {
@ -21,26 +31,35 @@ struct IndexStatsResponse {
fields_frequency: HashMap<String, usize>,
}
pub async fn index_stats(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?;
let index_uid = ctx.url_param("index")?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let update_reader = db.update_read_txn()?;
let number_of_documents = index.main.number_of_documents(&reader)?;
let fields_frequency = index.main.fields_frequency(&reader)?.unwrap_or_default();
let is_indexing = ctx
.state()
.is_indexing(&update_reader, &index_uid)?
.into_internal_error()?;
#[get("/indexes/{index_uid}/stats", wrap = "Authentication::Private")]
async fn index_stats(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let response = IndexStatsResponse {
let reader = data.db.main_read_txn()?;
let number_of_documents = index.main.number_of_documents(&reader)?;
let fields_frequency = index.main.fields_frequency(&reader)?.unwrap_or_default();
let update_reader = data.db.update_read_txn()?;
let is_indexing =
data.is_indexing(&update_reader, &path.index_uid)?
.ok_or(ResponseError::internal(
"Impossible to know if the database is indexing",
))?;
Ok(HttpResponse::Ok().json(IndexStatsResponse {
number_of_documents,
is_indexing,
fields_frequency,
};
Ok(tide::Response::new(200).body_json(&response).unwrap())
}))
}
#[derive(Serialize)]
@ -51,29 +70,25 @@ struct StatsResult {
indexes: HashMap<String, IndexStatsResponse>,
}
pub async fn get_stats(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?;
#[get("/stats", wrap = "Authentication::Private")]
async fn get_stats(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
let mut index_list = HashMap::new();
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let update_reader = db.update_read_txn()?;
let reader = data.db.main_read_txn()?;
let update_reader = data.db.update_read_txn()?;
let indexes_set = ctx.state().db.indexes_uids();
let indexes_set = data.db.indexes_uids();
for index_uid in indexes_set {
let index = ctx.state().db.open_index(&index_uid);
let index = data.db.open_index(&index_uid);
match index {
Some(index) => {
let number_of_documents = index.main.number_of_documents(&reader)?;
let fields_frequency = index.main.fields_frequency(&reader)?.unwrap_or_default();
let is_indexing = ctx
.state()
.is_indexing(&update_reader, &index_uid)?
.into_internal_error()?;
let is_indexing = data.is_indexing(&update_reader, &index_uid)?.ok_or(
ResponseError::internal("Impossible to know if the database is indexing"),
)?;
let response = IndexStatsResponse {
number_of_documents,
@ -89,22 +104,20 @@ pub async fn get_stats(ctx: Request<Data>) -> SResult<Response> {
}
}
let database_size = WalkDir::new(ctx.state().db_path.clone())
let database_size = WalkDir::new(&data.db_path)
.into_iter()
.filter_map(|entry| entry.ok())
.filter_map(|entry| entry.metadata().ok())
.filter(|metadata| metadata.is_file())
.fold(0, |acc, m| acc + m.len());
let last_update = ctx.state().last_update(&reader)?;
let last_update = data.last_update(&reader)?;
let response = StatsResult {
Ok(HttpResponse::Ok().json(StatsResult {
database_size,
last_update,
indexes: index_list,
};
Ok(tide::Response::new(200).body_json(&response).unwrap())
}))
}
#[derive(Serialize)]
@ -115,20 +128,18 @@ struct VersionResponse {
pkg_version: String,
}
pub async fn get_version(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?;
let response = VersionResponse {
#[get("/version", wrap = "Authentication::Private")]
async fn get_version() -> HttpResponse {
HttpResponse::Ok().json(VersionResponse {
commit_sha: env!("VERGEN_SHA").to_string(),
build_date: env!("VERGEN_BUILD_TIMESTAMP").to_string(),
pkg_version: env!("CARGO_PKG_VERSION").to_string(),
};
Ok(tide::Response::new(200).body_json(&response).unwrap())
})
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct SysGlobal {
struct SysGlobal {
total_memory: u64,
used_memory: u64,
total_swap: u64,
@ -152,7 +163,7 @@ impl SysGlobal {
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct SysProcess {
struct SysProcess {
memory: u64,
cpu: f32,
}
@ -168,7 +179,7 @@ impl SysProcess {
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct SysInfo {
struct SysInfo {
memory_usage: f64,
processor_usage: Vec<f32>,
global: SysGlobal,
@ -186,7 +197,8 @@ impl SysInfo {
}
}
pub(crate) fn report(pid: Pid) -> SysInfo {
#[get("/sys-info", wrap = "Authentication::Private")]
async fn get_sys_info(data: web::Data<Data>) -> HttpResponse {
let mut sys = System::new();
let mut info = SysInfo::new();
@ -200,28 +212,29 @@ pub(crate) fn report(pid: Pid) -> SysInfo {
info.global.used_memory = sys.get_used_memory();
info.global.total_swap = sys.get_total_swap();
info.global.used_swap = sys.get_used_swap();
info.global.input_data = sys.get_networks().into_iter().map(|(_, n)| n.get_received()).sum::<u64>();
info.global.output_data = sys.get_networks().into_iter().map(|(_, n)| n.get_transmitted()).sum::<u64>();
info.global.input_data = sys
.get_networks()
.into_iter()
.map(|(_, n)| n.get_received())
.sum::<u64>();
info.global.output_data = sys
.get_networks()
.into_iter()
.map(|(_, n)| n.get_transmitted())
.sum::<u64>();
if let Some(process) = sys.get_process(pid) {
if let Some(process) = sys.get_process(data.server_pid) {
info.process.memory = process.memory();
info.process.cpu = process.cpu_usage() * 100.0;
}
sys.refresh_all();
info
}
pub async fn get_sys_info(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?;
let response = report(ctx.state().server_pid);
Ok(tide::Response::new(200).body_json(&response).unwrap())
HttpResponse::Ok().json(info)
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct SysGlobalPretty {
struct SysGlobalPretty {
total_memory: String,
used_memory: String,
total_swap: String,
@ -245,7 +258,7 @@ impl SysGlobalPretty {
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct SysProcessPretty {
struct SysProcessPretty {
memory: String,
cpu: String,
}
@ -261,7 +274,7 @@ impl SysProcessPretty {
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct SysInfoPretty {
struct SysInfoPretty {
memory_usage: String,
processor_usage: Vec<String>,
global: SysGlobalPretty,
@ -279,7 +292,8 @@ impl SysInfoPretty {
}
}
pub(crate) fn report_pretty(pid: Pid) -> SysInfoPretty {
#[get("/sys-info/pretty", wrap = "Authentication::Private")]
async fn get_sys_info_pretty(data: web::Data<Data>) -> HttpResponse {
let mut sys = System::new();
let mut info = SysInfoPretty::new();
@ -297,21 +311,25 @@ pub(crate) fn report_pretty(pid: Pid) -> SysInfoPretty {
info.global.used_memory = convert(sys.get_used_memory() as f64 * 1024.0);
info.global.total_swap = convert(sys.get_total_swap() as f64 * 1024.0);
info.global.used_swap = convert(sys.get_used_swap() as f64 * 1024.0);
info.global.input_data = convert(sys.get_networks().into_iter().map(|(_, n)| n.get_received()).sum::<u64>() as f64);
info.global.output_data = convert(sys.get_networks().into_iter().map(|(_, n)| n.get_transmitted()).sum::<u64>() as f64);
info.global.input_data = convert(
sys.get_networks()
.into_iter()
.map(|(_, n)| n.get_received())
.sum::<u64>() as f64,
);
info.global.output_data = convert(
sys.get_networks()
.into_iter()
.map(|(_, n)| n.get_transmitted())
.sum::<u64>() as f64,
);
if let Some(process) = sys.get_process(pid) {
if let Some(process) = sys.get_process(data.server_pid) {
info.process.memory = convert(process.memory() as f64 * 1024.0);
info.process.cpu = format!("{:.1} %", process.cpu_usage() * 100.0);
}
sys.refresh_all();
info
}
pub async fn get_sys_info_pretty(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?;
let response = report_pretty(ctx.state().server_pid);
Ok(tide::Response::new(200).body_json(&response).unwrap())
HttpResponse::Ok().json(info)
}

View File

@ -1,63 +1,83 @@
use actix_web::{web, HttpResponse};
use actix_web_macros::{delete, get, post};
use meilisearch_core::settings::{SettingsUpdate, UpdateState};
use std::collections::BTreeSet;
use meilisearch_core::settings::{SettingsUpdate, UpdateState};
use tide::{Request, Response};
use crate::error::{ResponseError, SResult};
use crate::helpers::tide::RequestExt;
use crate::helpers::tide::ACL::*;
use crate::routes::document::IndexUpdateResponse;
use crate::error::ResponseError;
use crate::helpers::Authentication;
use crate::routes::{IndexParam, IndexUpdateResponse};
use crate::Data;
pub async fn get(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(get).service(update).service(delete);
}
#[get(
"/indexes/{index_uid}/settings/stop-words",
wrap = "Authentication::Private"
)]
async fn get(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let reader = data.db.main_read_txn()?;
let stop_words_fst = index.main.stop_words_fst(&reader)?;
let stop_words = stop_words_fst.unwrap_or_default().stream().into_strs()?;
Ok(tide::Response::new(200).body_json(&stop_words).unwrap())
Ok(HttpResponse::Ok().json(stop_words))
}
pub async fn update(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let data: BTreeSet<String> = ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
let mut writer = db.update_write_txn()?;
#[post(
"/indexes/{index_uid}/settings/stop-words",
wrap = "Authentication::Private"
)]
async fn update(
data: web::Data<Data>,
path: web::Path<IndexParam>,
body: web::Json<BTreeSet<String>>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let settings = SettingsUpdate {
stop_words: UpdateState::Update(data),
stop_words: UpdateState::Update(body.into_inner()),
..SettingsUpdate::default()
};
let mut writer = data.db.update_write_txn()?;
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
pub async fn delete(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let mut writer = db.update_write_txn()?;
#[delete(
"/indexes/{index_uid}/settings/stop-words",
wrap = "Authentication::Private"
)]
async fn delete(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let settings = SettingsUpdate {
stop_words: UpdateState::Clear,
..SettingsUpdate::default()
};
let mut writer = data.db.update_write_txn()?;
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}

View File

@ -1,29 +1,39 @@
use std::collections::BTreeMap;
use actix_web::{web, HttpResponse};
use actix_web_macros::{delete, get, post};
use indexmap::IndexMap;
use meilisearch_core::settings::{SettingsUpdate, UpdateState};
use tide::{Request, Response};
use crate::error::{ResponseError, SResult};
use crate::helpers::tide::RequestExt;
use crate::helpers::tide::ACL::*;
use crate::routes::document::IndexUpdateResponse;
use crate::error::ResponseError;
use crate::helpers::Authentication;
use crate::routes::{IndexParam, IndexUpdateResponse};
use crate::Data;
pub async fn get(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(get).service(update).service(delete);
}
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
#[get(
"/indexes/{index_uid}/settings/synonyms",
wrap = "Authentication::Private"
)]
async fn get(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let reader = data.db.main_read_txn()?;
let synonyms_fst = index.main.synonyms_fst(&reader)?.unwrap_or_default();
let synonyms_list = synonyms_fst.stream().into_strs()?;
let mut synonyms = IndexMap::new();
let index_synonyms = &index.synonyms;
for synonym in synonyms_list {
let alternative_list = index_synonyms.synonyms(&reader, synonym.as_bytes())?;
@ -33,50 +43,57 @@ pub async fn get(ctx: Request<Data>) -> SResult<Response> {
}
}
Ok(tide::Response::new(200).body_json(&synonyms).unwrap())
Ok(HttpResponse::Ok().json(synonyms))
}
pub async fn update(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let data: BTreeMap<String, Vec<String>> =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let mut writer = db.update_write_txn()?;
#[post(
"/indexes/{index_uid}/settings/synonyms",
wrap = "Authentication::Private"
)]
async fn update(
data: web::Data<Data>,
path: web::Path<IndexParam>,
body: web::Json<BTreeMap<String, Vec<String>>>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let settings = SettingsUpdate {
synonyms: UpdateState::Update(data),
synonyms: UpdateState::Update(body.into_inner()),
..SettingsUpdate::default()
};
let mut writer = data.db.update_write_txn()?;
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
pub async fn delete(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let mut writer = db.update_write_txn()?;
#[delete(
"/indexes/{index_uid}/settings/synonyms",
wrap = "Authentication::Private"
)]
async fn delete(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(ResponseError::index_not_found(&path.index_uid))?;
let settings = SettingsUpdate {
synonyms: UpdateState::Clear,
..SettingsUpdate::default()
};
let mut writer = data.db.update_write_txn()?;
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}

View File

@ -1,23 +1,17 @@
#![allow(dead_code)]
use http::StatusCode;
use serde_json::Value;
use serde_json::{json, Value};
use std::time::Duration;
use async_std::io::prelude::*;
use async_std::task::{block_on, sleep};
use http_service::Body;
use http_service_mock::{make_server, TestBackend};
use actix_web::{http::StatusCode, test};
use meilisearch_http::data::Data;
use meilisearch_http::option::Opt;
use meilisearch_http::routes;
use serde_json::json;
use tempdir::TempDir;
use tide::server::Service;
use tokio::time::delay_for;
pub struct Server {
uid: String,
mock: TestBackend<Service<Data>>,
data: Data,
}
impl Server {
@ -33,20 +27,16 @@ impl Server {
};
let data = Data::new(opt.clone());
let mut app = tide::with_state(data);
routes::load_routes(&mut app);
let http_server = app.into_http_service();
let mock = make_server(http_server).unwrap();
Server {
uid: uid.to_string(),
mock,
data: data,
}
}
pub fn wait_update_id(&mut self, update_id: u64) {
pub async fn wait_update_id(&mut self, update_id: u64) {
loop {
let (response, status_code) = self.get_update_status(update_id);
let (response, status_code) = self.get_update_status(update_id).await;
assert_eq!(status_code, 200);
if response["status"] == "processed" || response["status"] == "error" {
@ -54,350 +44,365 @@ impl Server {
return;
}
block_on(sleep(Duration::from_secs(1)));
delay_for(Duration::from_secs(1)).await;
}
}
// Global Http request GET/POST/DELETE async or sync
pub fn get_request(&mut self, url: &str) -> (Value, StatusCode) {
pub async fn get_request(&mut self, url: &str) -> (Value, StatusCode) {
eprintln!("get_request: {}", url);
let req = http::Request::get(url).body(Body::empty()).unwrap();
let res = self.mock.simulate(req).unwrap();
let mut app = test::init_service(meilisearch_http::create_app(&self.data)).await;
let req = test::TestRequest::get().uri(url).to_request();
let res = test::call_service(&mut app, req).await;
let status_code = res.status().clone();
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let response = serde_json::from_slice(&buf).unwrap_or_default();
let body = test::read_body(res).await;
let response = serde_json::from_slice(&body).unwrap_or_default();
(response, status_code)
}
pub fn post_request(&mut self, url: &str, body: Value) -> (Value, StatusCode) {
pub async fn post_request(&mut self, url: &str, body: Value) -> (Value, StatusCode) {
eprintln!("post_request: {}", url);
let body_bytes = body.to_string().into_bytes();
let req = http::Request::post(url)
.body(Body::from(body_bytes))
.unwrap();
let res = self.mock.simulate(req).unwrap();
let mut app = test::init_service(meilisearch_http::create_app(&self.data)).await;
let req = test::TestRequest::post()
.uri(url)
.set_json(&body)
.to_request();
let res = test::call_service(&mut app, req).await;
let status_code = res.status().clone();
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let response = serde_json::from_slice(&buf).unwrap_or_default();
let body = test::read_body(res).await;
let response = serde_json::from_slice(&body).unwrap_or_default();
(response, status_code)
}
pub fn post_request_async(&mut self, url: &str, body: Value) -> (Value, StatusCode) {
pub async fn post_request_async(&mut self, url: &str, body: Value) -> (Value, StatusCode) {
eprintln!("post_request_async: {}", url);
let (response, status_code) = self.post_request(url, body);
let (response, status_code) = self.post_request(url, body).await;
assert_eq!(status_code, 202);
assert!(response["updateId"].as_u64().is_some());
self.wait_update_id(response["updateId"].as_u64().unwrap());
self.wait_update_id(response["updateId"].as_u64().unwrap())
.await;
(response, status_code)
}
pub fn put_request(&mut self, url: &str, body: Value) -> (Value, StatusCode) {
pub async fn put_request(&mut self, url: &str, body: Value) -> (Value, StatusCode) {
eprintln!("put_request: {}", url);
let body_bytes = body.to_string().into_bytes();
let req = http::Request::put(url)
.body(Body::from(body_bytes))
.unwrap();
let res = self.mock.simulate(req).unwrap();
let mut app = test::init_service(meilisearch_http::create_app(&self.data)).await;
let req = test::TestRequest::put()
.uri(url)
.set_json(&body)
.to_request();
let res = test::call_service(&mut app, req).await;
let status_code = res.status().clone();
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let response = serde_json::from_slice(&buf).unwrap_or_default();
let body = test::read_body(res).await;
let response = serde_json::from_slice(&body).unwrap_or_default();
(response, status_code)
}
pub fn put_request_async(&mut self, url: &str, body: Value) -> (Value, StatusCode) {
pub async fn put_request_async(&mut self, url: &str, body: Value) -> (Value, StatusCode) {
eprintln!("put_request_async: {}", url);
let (response, status_code) = self.put_request(url, body);
let (response, status_code) = self.put_request(url, body).await;
assert!(response["updateId"].as_u64().is_some());
assert_eq!(status_code, 202);
self.wait_update_id(response["updateId"].as_u64().unwrap());
self.wait_update_id(response["updateId"].as_u64().unwrap())
.await;
(response, status_code)
}
pub fn delete_request(&mut self, url: &str) -> (Value, StatusCode) {
pub async fn delete_request(&mut self, url: &str) -> (Value, StatusCode) {
eprintln!("delete_request: {}", url);
let req = http::Request::delete(url).body(Body::empty()).unwrap();
let res = self.mock.simulate(req).unwrap();
let mut app = test::init_service(meilisearch_http::create_app(&self.data)).await;
let req = test::TestRequest::delete().uri(url).to_request();
let res = test::call_service(&mut app, req).await;
let status_code = res.status().clone();
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let response = serde_json::from_slice(&buf).unwrap_or_default();
let body = test::read_body(res).await;
let response = serde_json::from_slice(&body).unwrap_or_default();
(response, status_code)
}
pub fn delete_request_async(&mut self, url: &str) -> (Value, StatusCode) {
pub async fn delete_request_async(&mut self, url: &str) -> (Value, StatusCode) {
eprintln!("delete_request_async: {}", url);
let (response, status_code) = self.delete_request(url);
let (response, status_code) = self.delete_request(url).await;
assert!(response["updateId"].as_u64().is_some());
assert_eq!(status_code, 202);
self.wait_update_id(response["updateId"].as_u64().unwrap());
self.wait_update_id(response["updateId"].as_u64().unwrap())
.await;
(response, status_code)
}
// All Routes
pub fn list_indexes(&mut self) -> (Value, StatusCode) {
self.get_request("/indexes")
pub async fn list_indexes(&mut self) -> (Value, StatusCode) {
self.get_request("/indexes").await
}
pub fn create_index(&mut self, body: Value) -> (Value, StatusCode) {
self.post_request("/indexes", body)
pub async fn create_index(&mut self, body: Value) -> (Value, StatusCode) {
self.post_request("/indexes", body).await
}
pub fn search_multi_index(&mut self, query: &str) -> (Value, StatusCode) {
pub async fn search_multi_index(&mut self, query: &str) -> (Value, StatusCode) {
let url = format!("/indexes/search?{}", query);
self.get_request(&url)
self.get_request(&url).await
}
pub fn get_index(&mut self) -> (Value, StatusCode) {
pub async fn get_index(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}", self.uid);
self.get_request(&url)
self.get_request(&url).await
}
pub fn update_index(&mut self, body: Value) -> (Value, StatusCode) {
pub async fn update_index(&mut self, body: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}", self.uid);
self.put_request(&url, body)
self.put_request(&url, body).await
}
pub fn delete_index(&mut self) -> (Value, StatusCode) {
pub async fn delete_index(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}", self.uid);
self.delete_request(&url)
self.delete_request(&url).await
}
pub fn search(&mut self, query: &str) -> (Value, StatusCode) {
pub async fn search(&mut self, query: &str) -> (Value, StatusCode) {
let url = format!("/indexes/{}/search?{}", self.uid, query);
self.get_request(&url)
self.get_request(&url).await
}
pub fn get_all_updates_status(&mut self) -> (Value, StatusCode) {
pub async fn get_all_updates_status(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/updates", self.uid);
self.get_request(&url)
self.get_request(&url).await
}
pub fn get_update_status(&mut self, update_id: u64) -> (Value, StatusCode) {
pub async fn get_update_status(&mut self, update_id: u64) -> (Value, StatusCode) {
let url = format!("/indexes/{}/updates/{}", self.uid, update_id);
self.get_request(&url)
self.get_request(&url).await
}
pub fn get_all_documents(&mut self) -> (Value, StatusCode) {
pub async fn get_all_documents(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/documents", self.uid);
self.get_request(&url)
self.get_request(&url).await
}
pub fn add_or_replace_multiple_documents(&mut self, body: Value) {
pub async fn add_or_replace_multiple_documents(&mut self, body: Value) {
let url = format!("/indexes/{}/documents", self.uid);
self.post_request_async(&url, body);
self.post_request_async(&url, body).await;
}
pub fn add_or_replace_multiple_documents_sync(&mut self, body: Value) -> (Value, StatusCode) {
pub async fn add_or_replace_multiple_documents_sync(
&mut self,
body: Value,
) -> (Value, StatusCode) {
let url = format!("/indexes/{}/documents", self.uid);
self.post_request(&url, body)
self.post_request(&url, body).await
}
pub fn add_or_update_multiple_documents(&mut self, body: Value) {
pub async fn add_or_update_multiple_documents(&mut self, body: Value) {
let url = format!("/indexes/{}/documents", self.uid);
self.put_request_async(&url, body);
self.put_request_async(&url, body).await;
}
pub fn clear_all_documents(&mut self) {
pub async fn clear_all_documents(&mut self) {
let url = format!("/indexes/{}/documents", self.uid);
self.delete_request_async(&url);
self.delete_request_async(&url).await;
}
pub fn get_document(&mut self, document_id: impl ToString) -> (Value, StatusCode) {
pub async fn get_document(&mut self, document_id: impl ToString) -> (Value, StatusCode) {
let url = format!(
"/indexes/{}/documents/{}",
self.uid,
document_id.to_string()
);
self.get_request(&url)
self.get_request(&url).await
}
pub fn delete_document(&mut self, document_id: impl ToString) -> (Value, StatusCode) {
pub async fn delete_document(&mut self, document_id: impl ToString) -> (Value, StatusCode) {
let url = format!(
"/indexes/{}/documents/{}",
self.uid,
document_id.to_string()
);
self.delete_request_async(&url)
self.delete_request_async(&url).await
}
pub fn delete_multiple_documents(&mut self, body: Value) {
pub async fn delete_multiple_documents(&mut self, body: Value) {
let url = format!("/indexes/{}/documents/delete-batch", self.uid);
self.post_request_async(&url, body);
self.post_request_async(&url, body).await;
}
pub fn get_all_settings(&mut self) -> (Value, StatusCode) {
pub async fn get_all_settings(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings", self.uid);
self.get_request(&url)
self.get_request(&url).await
}
pub fn update_all_settings(&mut self, body: Value) {
pub async fn update_all_settings(&mut self, body: Value) {
let url = format!("/indexes/{}/settings", self.uid);
self.post_request_async(&url, body);
self.post_request_async(&url, body).await;
}
pub fn delete_all_settings(&mut self) -> (Value, StatusCode) {
pub async fn delete_all_settings(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings", self.uid);
self.delete_request_async(&url)
self.delete_request_async(&url).await
}
pub fn get_ranking_rules(&mut self) -> (Value, StatusCode) {
pub async fn get_ranking_rules(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/ranking-rules", self.uid);
self.get_request(&url)
self.get_request(&url).await
}
pub fn update_ranking_rules(&mut self, body: Value) {
pub async fn update_ranking_rules(&mut self, body: Value) {
let url = format!("/indexes/{}/settings/ranking-rules", self.uid);
self.post_request_async(&url, body);
self.post_request_async(&url, body).await;
}
pub fn update_ranking_rules_sync(&mut self, body: Value) -> (Value, StatusCode) {
pub async fn update_ranking_rules_sync(&mut self, body: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/ranking-rules", self.uid);
self.post_request(&url, body)
self.post_request(&url, body).await
}
pub fn delete_ranking_rules(&mut self) -> (Value, StatusCode) {
pub async fn delete_ranking_rules(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/ranking-rules", self.uid);
self.delete_request_async(&url)
self.delete_request_async(&url).await
}
pub fn get_distinct_attribute(&mut self) -> (Value, StatusCode) {
pub async fn get_distinct_attribute(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/distinct-attribute", self.uid);
self.get_request(&url)
self.get_request(&url).await
}
pub fn update_distinct_attribute(&mut self, body: Value) {
pub async fn update_distinct_attribute(&mut self, body: Value) {
let url = format!("/indexes/{}/settings/distinct-attribute", self.uid);
self.post_request_async(&url, body);
self.post_request_async(&url, body).await;
}
pub fn delete_distinct_attribute(&mut self) -> (Value, StatusCode) {
pub async fn delete_distinct_attribute(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/distinct-attribute", self.uid);
self.delete_request_async(&url)
self.delete_request_async(&url).await
}
pub fn get_primary_key(&mut self) -> (Value, StatusCode) {
pub async fn get_primary_key(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/primary_key", self.uid);
self.get_request(&url)
self.get_request(&url).await
}
pub fn get_searchable_attributes(&mut self) -> (Value, StatusCode) {
pub async fn get_searchable_attributes(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/searchable-attributes", self.uid);
self.get_request(&url)
self.get_request(&url).await
}
pub fn update_searchable_attributes(&mut self, body: Value) {
pub async fn update_searchable_attributes(&mut self, body: Value) {
let url = format!("/indexes/{}/settings/searchable-attributes", self.uid);
self.post_request_async(&url, body);
self.post_request_async(&url, body).await;
}
pub fn delete_searchable_attributes(&mut self) -> (Value, StatusCode) {
pub async fn delete_searchable_attributes(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/searchable-attributes", self.uid);
self.delete_request_async(&url)
self.delete_request_async(&url).await
}
pub fn get_displayed_attributes(&mut self) -> (Value, StatusCode) {
pub async fn get_displayed_attributes(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/displayed-attributes", self.uid);
self.get_request(&url)
self.get_request(&url).await
}
pub fn update_displayed_attributes(&mut self, body: Value) {
pub async fn update_displayed_attributes(&mut self, body: Value) {
let url = format!("/indexes/{}/settings/displayed-attributes", self.uid);
self.post_request_async(&url, body);
self.post_request_async(&url, body).await;
}
pub fn delete_displayed_attributes(&mut self) -> (Value, StatusCode) {
pub async fn delete_displayed_attributes(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/displayed-attributes", self.uid);
self.delete_request_async(&url)
self.delete_request_async(&url).await
}
pub fn get_accept_new_fields(&mut self) -> (Value, StatusCode) {
pub async fn get_accept_new_fields(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/accept-new-fields", self.uid);
self.get_request(&url)
self.get_request(&url).await
}
pub fn update_accept_new_fields(&mut self, body: Value) {
pub async fn update_accept_new_fields(&mut self, body: Value) {
let url = format!("/indexes/{}/settings/accept-new-fields", self.uid);
self.post_request_async(&url, body);
self.post_request_async(&url, body).await;
}
pub fn get_synonyms(&mut self) -> (Value, StatusCode) {
pub async fn get_synonyms(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/synonyms", self.uid);
self.get_request(&url)
self.get_request(&url).await
}
pub fn update_synonyms(&mut self, body: Value) {
pub async fn update_synonyms(&mut self, body: Value) {
let url = format!("/indexes/{}/settings/synonyms", self.uid);
self.post_request_async(&url, body);
self.post_request_async(&url, body).await;
}
pub fn delete_synonyms(&mut self) -> (Value, StatusCode) {
pub async fn delete_synonyms(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/synonyms", self.uid);
self.delete_request_async(&url)
self.delete_request_async(&url).await
}
pub fn get_stop_words(&mut self) -> (Value, StatusCode) {
pub async fn get_stop_words(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/stop-words", self.uid);
self.get_request(&url)
self.get_request(&url).await
}
pub fn update_stop_words(&mut self, body: Value) {
pub async fn update_stop_words(&mut self, body: Value) {
let url = format!("/indexes/{}/settings/stop-words", self.uid);
self.post_request_async(&url, body);
self.post_request_async(&url, body).await;
}
pub fn delete_stop_words(&mut self) -> (Value, StatusCode) {
pub async fn delete_stop_words(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/stop-words", self.uid);
self.delete_request_async(&url)
self.delete_request_async(&url).await
}
pub fn get_index_stats(&mut self) -> (Value, StatusCode) {
pub async fn get_index_stats(&mut self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/stats", self.uid);
self.get_request(&url)
self.get_request(&url).await
}
pub fn list_keys(&mut self) -> (Value, StatusCode) {
self.get_request("/keys")
pub async fn list_keys(&mut self) -> (Value, StatusCode) {
self.get_request("/keys").await
}
pub fn get_health(&mut self) -> (Value, StatusCode) {
self.get_request("/health")
pub async fn get_health(&mut self) -> (Value, StatusCode) {
self.get_request("/health").await
}
pub fn update_health(&mut self, body: Value) -> (Value, StatusCode) {
self.put_request("/health", body)
pub async fn update_health(&mut self, body: Value) -> (Value, StatusCode) {
self.put_request("/health", body).await
}
pub fn get_version(&mut self) -> (Value, StatusCode) {
self.get_request("/version")
pub async fn get_version(&mut self) -> (Value, StatusCode) {
self.get_request("/version").await
}
pub fn get_sys_info(&mut self) -> (Value, StatusCode) {
self.get_request("/sys-info")
pub async fn get_sys_info(&mut self) -> (Value, StatusCode) {
self.get_request("/sys-info").await
}
pub fn get_sys_info_pretty(&mut self) -> (Value, StatusCode) {
self.get_request("/sys-info/pretty")
pub async fn get_sys_info_pretty(&mut self) -> (Value, StatusCode) {
self.get_request("/sys-info/pretty").await
}
// Populate routes
pub fn populate_movies(&mut self) {
pub async fn populate_movies(&mut self) {
let body = json!({
"uid": "movies",
"primaryKey": "id",
});
self.create_index(body);
self.create_index(body).await;
let body = json!({
"rankingRules": [
@ -436,12 +441,12 @@ impl Server {
"acceptNewFields": false,
});
self.update_all_settings(body);
self.update_all_settings(body).await;
let dataset = include_bytes!("assets/movies.json");
let body: Value = serde_json::from_slice(dataset).unwrap();
self.add_or_replace_multiple_documents(body);
self.add_or_replace_multiple_documents(body).await;
}
}

View File

@ -3,8 +3,8 @@ use serde_json::json;
mod common;
// Test issue https://github.com/meilisearch/MeiliSearch/issues/519
#[test]
fn check_add_documents_with_primary_key_param() {
#[actix_rt::test]
async fn check_add_documents_with_primary_key_param() {
let mut server = common::Server::with_uid("movies");
// 1 - Create the index with no primary_key
@ -12,7 +12,7 @@ fn check_add_documents_with_primary_key_param() {
let body = json!({
"uid": "movies",
});
let (response, status_code) = server.create_index(body);
let (response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(response["primaryKey"], json!(null));
@ -24,28 +24,28 @@ fn check_add_documents_with_primary_key_param() {
}]);
let url = "/indexes/movies/documents?primaryKey=title";
let (response, status_code) = server.post_request(&url, body);
let (response, status_code) = server.post_request(&url, body).await;
eprintln!("{:#?}", response);
assert_eq!(status_code, 202);
let update_id = response["updateId"].as_u64().unwrap();
server.wait_update_id(update_id);
server.wait_update_id(update_id).await;
// 3 - Check update success
let (response, status_code) = server.get_update_status(update_id);
let (response, status_code) = server.get_update_status(update_id).await;
assert_eq!(status_code, 200);
assert_eq!(response["status"], "processed");
}
// Test issue https://github.com/meilisearch/MeiliSearch/issues/568
#[test]
fn check_add_documents_with_nested_boolean() {
#[actix_rt::test]
async fn check_add_documents_with_nested_boolean() {
let mut server = common::Server::with_uid("tasks");
// 1 - Create the index with no primary_key
let body = json!({ "uid": "tasks" });
let (response, status_code) = server.create_index(body);
let (response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(response["primaryKey"], json!(null));
@ -64,28 +64,28 @@ fn check_add_documents_with_nested_boolean() {
}]);
let url = "/indexes/tasks/documents";
let (response, status_code) = server.post_request(&url, body);
let (response, status_code) = server.post_request(&url, body).await;
eprintln!("{:#?}", response);
assert_eq!(status_code, 202);
let update_id = response["updateId"].as_u64().unwrap();
server.wait_update_id(update_id);
server.wait_update_id(update_id).await;
// 3 - Check update success
let (response, status_code) = server.get_update_status(update_id);
let (response, status_code) = server.get_update_status(update_id).await;
assert_eq!(status_code, 200);
assert_eq!(response["status"], "processed");
}
// Test issue https://github.com/meilisearch/MeiliSearch/issues/571
#[test]
fn check_add_documents_with_nested_null() {
#[actix_rt::test]
async fn check_add_documents_with_nested_null() {
let mut server = common::Server::with_uid("tasks");
// 1 - Create the index with no primary_key
let body = json!({ "uid": "tasks" });
let (response, status_code) = server.create_index(body);
let (response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(response["primaryKey"], json!(null));
@ -99,28 +99,28 @@ fn check_add_documents_with_nested_null() {
}]);
let url = "/indexes/tasks/documents";
let (response, status_code) = server.post_request(&url, body);
let (response, status_code) = server.post_request(&url, body).await;
eprintln!("{:#?}", response);
assert_eq!(status_code, 202);
let update_id = response["updateId"].as_u64().unwrap();
server.wait_update_id(update_id);
server.wait_update_id(update_id).await;
// 3 - Check update success
let (response, status_code) = server.get_update_status(update_id);
let (response, status_code) = server.get_update_status(update_id).await;
assert_eq!(status_code, 200);
assert_eq!(response["status"], "processed");
}
// Test issue https://github.com/meilisearch/MeiliSearch/issues/574
#[test]
fn check_add_documents_with_nested_sequence() {
#[actix_rt::test]
async fn check_add_documents_with_nested_sequence() {
let mut server = common::Server::with_uid("tasks");
// 1 - Create the index with no primary_key
let body = json!({ "uid": "tasks" });
let (response, status_code) = server.create_index(body);
let (response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(response["primaryKey"], json!(null));
@ -158,20 +158,20 @@ fn check_add_documents_with_nested_sequence() {
}]);
let url = "/indexes/tasks/documents";
let (response, status_code) = server.post_request(&url, body.clone());
let (response, status_code) = server.post_request(&url, body.clone()).await;
eprintln!("{:#?}", response);
assert_eq!(status_code, 202);
let update_id = response["updateId"].as_u64().unwrap();
server.wait_update_id(update_id);
server.wait_update_id(update_id).await;
// 3 - Check update success
let (response, status_code) = server.get_update_status(update_id);
let (response, status_code) = server.get_update_status(update_id).await;
assert_eq!(status_code, 200);
assert_eq!(response["status"], "processed");
let url = "/indexes/tasks/search?q=leesz";
let (response, status_code) = server.get_request(&url);
let (response, status_code) = server.get_request(&url).await;
assert_eq!(status_code, 200);
assert_eq!(response["hits"], body);
}

View File

@ -1,31 +1,31 @@
mod common;
#[test]
fn delete() {
#[actix_rt::test]
async fn delete() {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
server.populate_movies().await;
let (_response, status_code) = server.get_document(419704);
let (_response, status_code) = server.get_document(419704).await;
assert_eq!(status_code, 200);
server.delete_document(419704);
server.delete_document(419704).await;
let (_response, status_code) = server.get_document(419704);
let (_response, status_code) = server.get_document(419704).await;
assert_eq!(status_code, 404);
}
// Resolve teh issue https://github.com/meilisearch/MeiliSearch/issues/493
#[test]
fn delete_batch() {
#[actix_rt::test]
async fn delete_batch() {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
server.populate_movies().await;
let (_response, status_code) = server.get_document(419704);
let (_response, status_code) = server.get_document(419704).await;
assert_eq!(status_code, 200);
let body = serde_json::json!([419704, 512200, 181812]);
server.delete_multiple_documents(body);
server.delete_multiple_documents(body).await;
let (_response, status_code) = server.get_document(419704);
let (_response, status_code) = server.get_document(419704).await;
assert_eq!(status_code, 404);
}

View File

@ -3,36 +3,36 @@ use std::convert::Into;
mod common;
#[test]
fn test_healthyness() {
#[actix_rt::test]
async fn test_healthyness() {
let mut server = common::Server::with_uid("movies");
// Check that the server is healthy
let (_response, status_code) = server.get_health();
let (_response, status_code) = server.get_health().await;
assert_eq!(status_code, 200);
// Set the serve Unhealthy
let body = json!({
"health": false,
});
let (_response, status_code) = server.update_health(body);
let (_response, status_code) = server.update_health(body).await;
assert_eq!(status_code, 200);
// Check that the server is unhealthy
let (_response, status_code) = server.get_health();
let (_response, status_code) = server.get_health().await;
assert_eq!(status_code, 503);
// Set the server healthy
let body = json!({
"health": true,
});
let (_response, status_code) = server.update_health(body);
let (_response, status_code) = server.update_health(body).await;
assert_eq!(status_code, 200);
// Check if the server is healthy
let (_response, status_code) = server.get_health();
let (_response, status_code) = server.get_health().await;
assert_eq!(status_code, 200);
}

View File

@ -4,8 +4,8 @@ use serde_json::Value;
mod common;
#[test]
fn create_index_with_name() {
#[actix_rt::test]
async fn create_index_with_name() {
let mut server = common::Server::with_uid("movies");
// 1 - Create a new index
@ -14,7 +14,7 @@ fn create_index_with_name() {
"name": "movies",
});
let (res1_value, status_code) = server.create_index(body);
let (res1_value, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(res1_value.as_object().unwrap().len(), 5);
@ -29,7 +29,7 @@ fn create_index_with_name() {
// 2 - Check the list of indexes
let (res2_value, status_code) = server.list_indexes();
let (res2_value, status_code) = server.list_indexes().await;
assert_eq!(status_code, 200);
assert_eq!(res2_value.as_array().unwrap().len(), 1);
@ -44,8 +44,8 @@ fn create_index_with_name() {
assert_eq!(r2_updated_at.len(), r1_updated_at.len());
}
#[test]
fn create_index_with_uid() {
#[actix_rt::test]
async fn create_index_with_uid() {
let mut server = common::Server::with_uid("movies");
// 1 - Create a new index
@ -54,7 +54,7 @@ fn create_index_with_uid() {
"uid": "movies",
});
let (res1_value, status_code) = server.create_index(body);
let (res1_value, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(res1_value.as_object().unwrap().len(), 5);
@ -69,7 +69,7 @@ fn create_index_with_uid() {
// 2 - Check the list of indexes
let (res2_value, status_code) = server.list_indexes();
let (res2_value, status_code) = server.list_indexes().await;
assert_eq!(status_code, 200);
assert_eq!(res2_value.as_array().unwrap().len(), 1);
@ -84,8 +84,8 @@ fn create_index_with_uid() {
assert_eq!(r2_updated_at.len(), r1_updated_at.len());
}
#[test]
fn create_index_with_name_and_uid() {
#[actix_rt::test]
async fn create_index_with_name_and_uid() {
let mut server = common::Server::with_uid("movies");
// 1 - Create a new index
@ -94,7 +94,7 @@ fn create_index_with_name_and_uid() {
"name": "Films",
"uid": "fr_movies",
});
let (res1_value, status_code) = server.create_index(body);
let (res1_value, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(res1_value.as_object().unwrap().len(), 5);
@ -109,7 +109,7 @@ fn create_index_with_name_and_uid() {
// 2 - Check the list of indexes
let (res2_value, status_code) = server.list_indexes();
let (res2_value, status_code) = server.list_indexes().await;
assert_eq!(status_code, 200);
assert_eq!(res2_value.as_array().unwrap().len(), 1);
@ -124,8 +124,8 @@ fn create_index_with_name_and_uid() {
assert_eq!(r2_updated_at.len(), r1_updated_at.len());
}
#[test]
fn rename_index() {
#[actix_rt::test]
async fn rename_index() {
let mut server = common::Server::with_uid("movies");
// 1 - Create a new index
@ -135,7 +135,7 @@ fn rename_index() {
"uid": "movies",
});
let (res1_value, status_code) = server.create_index(body);
let (res1_value, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(res1_value.as_object().unwrap().len(), 5);
@ -154,7 +154,7 @@ fn rename_index() {
"name": "TV Shows",
});
let (res2_value, status_code) = server.update_index(body);
let (res2_value, status_code) = server.update_index(body).await;
assert_eq!(status_code, 200);
assert_eq!(res2_value.as_object().unwrap().len(), 5);
@ -169,7 +169,7 @@ fn rename_index() {
// 3 - Check the list of indexes
let (res3_value, status_code) = server.list_indexes();
let (res3_value, status_code) = server.list_indexes().await;
assert_eq!(status_code, 200);
assert_eq!(res3_value.as_array().unwrap().len(), 1);
@ -184,8 +184,8 @@ fn rename_index() {
assert_eq!(r3_updated_at.len(), r2_updated_at.len());
}
#[test]
fn delete_index_and_recreate_it() {
#[actix_rt::test]
async fn delete_index_and_recreate_it() {
let mut server = common::Server::with_uid("movies");
// 1 - Create a new index
@ -195,7 +195,7 @@ fn delete_index_and_recreate_it() {
"uid": "movies",
});
let (res1_value, status_code) = server.create_index(body);
let (res1_value, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(res1_value.as_object().unwrap().len(), 5);
@ -210,7 +210,7 @@ fn delete_index_and_recreate_it() {
// 2 - Check the list of indexes
let (res2_value, status_code) = server.list_indexes();
let (res2_value, status_code) = server.list_indexes().await;
assert_eq!(status_code, 200);
assert_eq!(res2_value.as_array().unwrap().len(), 1);
@ -226,13 +226,13 @@ fn delete_index_and_recreate_it() {
// 3- Delete an index
let (_res2_value, status_code) = server.delete_index();
let (_res2_value, status_code) = server.delete_index().await;
assert_eq!(status_code, 204);
// 4 - Check the list of indexes
let (res2_value, status_code) = server.list_indexes();
let (res2_value, status_code) = server.list_indexes().await;
assert_eq!(status_code, 200);
assert_eq!(res2_value.as_array().unwrap().len(), 0);
@ -243,7 +243,7 @@ fn delete_index_and_recreate_it() {
"name": "movies",
});
let (res1_value, status_code) = server.create_index(body);
let (res1_value, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(res1_value.as_object().unwrap().len(), 5);
@ -258,7 +258,7 @@ fn delete_index_and_recreate_it() {
// 6 - Check the list of indexes
let (res2_value, status_code) = server.list_indexes();
let (res2_value, status_code) = server.list_indexes().await;
assert_eq!(status_code, 200);
assert_eq!(res2_value.as_array().unwrap().len(), 1);
assert_eq!(res2_value[0].as_object().unwrap().len(), 5);
@ -272,8 +272,8 @@ fn delete_index_and_recreate_it() {
assert_eq!(r2_updated_at.len(), r1_updated_at.len());
}
#[test]
fn check_multiples_indexes() {
#[actix_rt::test]
async fn check_multiples_indexes() {
let mut server = common::Server::with_uid("movies");
// 1 - Create a new index
@ -282,7 +282,7 @@ fn check_multiples_indexes() {
"name": "movies",
});
let (res1_value, status_code) = server.create_index(body);
let (res1_value, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(res1_value.as_object().unwrap().len(), 5);
@ -297,7 +297,7 @@ fn check_multiples_indexes() {
// 2 - Check the list of indexes
let (res2_value, status_code) = server.list_indexes();
let (res2_value, status_code) = server.list_indexes().await;
assert_eq!(status_code, 200);
assert_eq!(res2_value.as_array().unwrap().len(), 1);
@ -317,7 +317,7 @@ fn check_multiples_indexes() {
"name": "films",
});
let (res3_value, status_code) = server.create_index(body);
let (res3_value, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(res3_value.as_object().unwrap().len(), 5);
@ -332,7 +332,7 @@ fn check_multiples_indexes() {
// 4 - Check the list of indexes
let (res4_value, status_code) = server.list_indexes();
let (res4_value, status_code) = server.list_indexes().await;
assert_eq!(status_code, 200);
assert_eq!(res4_value.as_array().unwrap().len(), 2);
@ -370,15 +370,15 @@ fn check_multiples_indexes() {
}
}
#[test]
fn create_index_failed() {
#[actix_rt::test]
async fn create_index_failed() {
let mut server = common::Server::with_uid("movies");
// 2 - Push index creation with empty json body
let body = json!({});
let (res_value, status_code) = server.create_index(body);
let (res_value, status_code) = server.create_index(body).await;
assert_eq!(status_code, 400);
let message = res_value["message"].as_str().unwrap();
@ -392,12 +392,9 @@ fn create_index_failed() {
"active": true
});
let (res_value, status_code) = server.create_index(body);
let (_res_value, status_code) = server.create_index(body).await;
assert_eq!(status_code, 400);
let message = res_value["message"].as_str().unwrap();
assert_eq!(res_value.as_object().unwrap().len(), 1);
assert_eq!(message, "invalid data");
// 3 - Create a index with wrong data type
@ -406,17 +403,14 @@ fn create_index_failed() {
"uid": 0
});
let (res_value, status_code) = server.create_index(body);
let (_res_value, status_code) = server.create_index(body).await;
assert_eq!(status_code, 400);
let message = res_value["message"].as_str().unwrap();
assert_eq!(res_value.as_object().unwrap().len(), 1);
assert_eq!(message, "invalid data");
}
// Resolve issue https://github.com/meilisearch/MeiliSearch/issues/492
#[test]
fn create_index_with_primary_key_and_index() {
#[actix_rt::test]
async fn create_index_with_primary_key_and_index() {
let mut server = common::Server::with_uid("movies");
// 1 - Create the index
@ -426,7 +420,7 @@ fn create_index_with_primary_key_and_index() {
"primaryKey": "id",
});
let (_response, status_code) = server.create_index(body);
let (_response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
// 2 - Add content
@ -436,11 +430,11 @@ fn create_index_with_primary_key_and_index() {
"text": "The mask"
}]);
server.add_or_replace_multiple_documents(body.clone());
server.add_or_replace_multiple_documents(body.clone()).await;
// 3 - Retreive document
let (response, _status_code) = server.get_document(123);
let (response, _status_code) = server.get_document(123).await;
let expect = json!({
"id": 123,
@ -454,8 +448,8 @@ fn create_index_with_primary_key_and_index() {
// Test when the given index uid is not valid
// Should have a 400 status code
// Should have the right error message
#[test]
fn create_index_with_invalid_uid() {
#[actix_rt::test]
async fn create_index_with_invalid_uid() {
let mut server = common::Server::with_uid("");
// 1 - Create the index with invalid uid
@ -464,7 +458,7 @@ fn create_index_with_invalid_uid() {
"uid": "the movies"
});
let (response, status_code) = server.create_index(body);
let (response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 400);
let message = response["message"].as_str().unwrap();
@ -477,7 +471,7 @@ fn create_index_with_invalid_uid() {
"uid": "%$#"
});
let (response, status_code) = server.create_index(body);
let (response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 400);
let message = response["message"].as_str().unwrap();
@ -490,7 +484,7 @@ fn create_index_with_invalid_uid() {
"uid": "the~movies"
});
let (response, status_code) = server.create_index(body);
let (response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 400);
let message = response["message"].as_str().unwrap();
@ -503,7 +497,7 @@ fn create_index_with_invalid_uid() {
"uid": "🎉"
});
let (response, status_code) = server.create_index(body);
let (response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 400);
let message = response["message"].as_str().unwrap();
@ -512,8 +506,8 @@ fn create_index_with_invalid_uid() {
}
// Test that it's possible to add primary_key if it's not already set on index creation
#[test]
fn create_index_and_add_indentifier_after() {
#[actix_rt::test]
async fn create_index_and_add_indentifier_after() {
let mut server = common::Server::with_uid("movies");
// 1 - Create the index with no primary_key
@ -521,7 +515,7 @@ fn create_index_and_add_indentifier_after() {
let body = json!({
"uid": "movies",
});
let (response, status_code) = server.create_index(body);
let (response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(response["primaryKey"], json!(null));
@ -531,21 +525,21 @@ fn create_index_and_add_indentifier_after() {
"primaryKey": "id",
});
let (response, status_code) = server.update_index(body);
let (response, status_code) = server.update_index(body).await;
assert_eq!(status_code, 200);
eprintln!("response: {:#?}", response);
assert_eq!(response["primaryKey"].as_str().unwrap(), "id");
// 3 - Get index to verify if the primary_key is good
let (response, status_code) = server.get_index();
let (response, status_code) = server.get_index().await;
assert_eq!(status_code, 200);
assert_eq!(response["primaryKey"].as_str().unwrap(), "id");
}
// Test that it's impossible to change the primary_key
#[test]
fn create_index_and_update_indentifier_after() {
#[actix_rt::test]
async fn create_index_and_update_indentifier_after() {
let mut server = common::Server::with_uid("movies");
// 1 - Create the index with no primary_key
@ -554,7 +548,7 @@ fn create_index_and_update_indentifier_after() {
"uid": "movies",
"primaryKey": "id",
});
let (response, status_code) = server.create_index(body);
let (response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(response["primaryKey"].as_str().unwrap(), "id");
@ -564,19 +558,19 @@ fn create_index_and_update_indentifier_after() {
"primaryKey": "skuid",
});
let (_response, status_code) = server.update_index(body);
let (_response, status_code) = server.update_index(body).await;
assert_eq!(status_code, 400);
// 3 - Get index to verify if the primary_key still the first one
let (response, status_code) = server.get_index();
let (response, status_code) = server.get_index().await;
assert_eq!(status_code, 200);
assert_eq!(response["primaryKey"].as_str().unwrap(), "id");
}
// Test that schema inference work well
#[test]
fn create_index_without_primary_key_and_add_document() {
#[actix_rt::test]
async fn create_index_without_primary_key_and_add_document() {
let mut server = common::Server::with_uid("movies");
// 1 - Create the index with no primary_key
@ -584,7 +578,7 @@ fn create_index_without_primary_key_and_add_document() {
let body = json!({
"uid": "movies",
});
let (response, status_code) = server.create_index(body);
let (response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(response["primaryKey"], json!(null));
@ -595,18 +589,18 @@ fn create_index_without_primary_key_and_add_document() {
"title": "I'm a legend",
}]);
server.add_or_update_multiple_documents(body);
server.add_or_update_multiple_documents(body).await;
// 3 - Get index to verify if the primary_key is good
let (response, status_code) = server.get_index();
let (response, status_code) = server.get_index().await;
assert_eq!(status_code, 200);
assert_eq!(response["primaryKey"].as_str().unwrap(), "id");
}
// Test search with no primary_key
#[test]
fn create_index_without_primary_key_and_search() {
#[actix_rt::test]
async fn create_index_without_primary_key_and_search() {
let mut server = common::Server::with_uid("movies");
// 1 - Create the index with no primary_key
@ -614,7 +608,7 @@ fn create_index_without_primary_key_and_search() {
let body = json!({
"uid": "movies",
});
let (response, status_code) = server.create_index(body);
let (response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(response["primaryKey"], json!(null));
@ -622,15 +616,15 @@ fn create_index_without_primary_key_and_search() {
let query = "q=captain&limit=3";
let (response, status_code) = server.search(&query);
let (response, status_code) = server.search(&query).await;
assert_eq!(status_code, 200);
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
}
// Test the error message when we push an document update and impossibility to find primary key
// Test issue https://github.com/meilisearch/MeiliSearch/issues/517
#[test]
fn check_add_documents_without_primary_key() {
#[actix_rt::test]
async fn check_add_documents_without_primary_key() {
let mut server = common::Server::with_uid("movies");
// 1 - Create the index with no primary_key
@ -638,7 +632,7 @@ fn check_add_documents_without_primary_key() {
let body = json!({
"uid": "movies",
});
let (response, status_code) = server.create_index(body);
let (response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(response["primaryKey"], json!(null));
@ -649,7 +643,7 @@ fn check_add_documents_without_primary_key() {
"comment": "comment test"
}]);
let (response, status_code) = server.add_or_replace_multiple_documents_sync(body);
let (response, status_code) = server.add_or_replace_multiple_documents_sync(body).await;
let expected = json!({
"message": "Could not infer a primary key"
@ -659,8 +653,8 @@ fn check_add_documents_without_primary_key() {
assert_json_eq!(response, expected, ordered: false);
}
#[test]
fn check_first_update_should_bring_up_processed_status_after_first_docs_addition(){
#[actix_rt::test]
async fn check_first_update_should_bring_up_processed_status_after_first_docs_addition() {
let mut server = common::Server::with_uid("movies");
let body = json!({
@ -668,7 +662,7 @@ fn check_first_update_should_bring_up_processed_status_after_first_docs_addition
});
// 1. Create Index
let (response, status_code) = server.create_index(body);
let (response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(response["primaryKey"], json!(null));
@ -677,10 +671,10 @@ fn check_first_update_should_bring_up_processed_status_after_first_docs_addition
let body: Value = serde_json::from_slice(dataset).unwrap();
// 2. Index the documents from movies.json, present inside of assets directory
server.add_or_replace_multiple_documents(body);
server.add_or_replace_multiple_documents(body).await;
// 3. Fetch the status of the indexing done above.
let (response, status_code) = server.get_all_updates_status();
let (response, status_code) = server.get_all_updates_status().await;
// 4. Verify the fetch is successful and indexing status is 'processed'
assert_eq!(status_code, 200);

View File

@ -1,23 +1,18 @@
use std::convert::Into;
use std::sync::Mutex;
use assert_json_diff::assert_json_eq;
use once_cell::sync::Lazy;
use serde_json::json;
mod common;
static GLOBAL_SERVER: Lazy<Mutex<common::Server>> = Lazy::new(|| {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
Mutex::new(server)
});
// Search
// q: Captain
// limit: 3
#[test]
fn search_with_limit() {
#[actix_rt::test]
async fn search_with_limit() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=3";
let expected = json!([
@ -74,7 +69,7 @@ fn search_with_limit() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -82,8 +77,11 @@ fn search_with_limit() {
// q: Captain
// limit: 3
// offset: 1
#[test]
fn search_with_offset() {
#[actix_rt::test]
async fn search_with_offset() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=3&offset=1";
let expected = json!([
@ -141,7 +139,7 @@ fn search_with_offset() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -149,8 +147,11 @@ fn search_with_offset() {
// q: Captain
// limit: 1
// attributeToHighlight: *
#[test]
fn search_with_attribute_to_highlight_wildcard() {
#[actix_rt::test]
async fn search_with_attribute_to_highlight_wildcard() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToHighlight=*";
let expected = json!([
@ -190,7 +191,7 @@ fn search_with_attribute_to_highlight_wildcard() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -198,8 +199,11 @@ fn search_with_attribute_to_highlight_wildcard() {
// q: Captain
// limit: 1
// attributeToHighlight: title
#[test]
fn search_with_attribute_to_highlight_1() {
#[actix_rt::test]
async fn search_with_attribute_to_highlight_1() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToHighlight=title";
let expected = json!([
@ -239,7 +243,7 @@ fn search_with_attribute_to_highlight_1() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -247,8 +251,11 @@ fn search_with_attribute_to_highlight_1() {
// q: Captain
// limit: 1
// attributeToHighlight: title,tagline
#[test]
fn search_with_attribute_to_highlight_title_tagline() {
#[actix_rt::test]
async fn search_with_attribute_to_highlight_title_tagline() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToHighlight=title,tagline";
let expected = json!([
@ -288,7 +295,7 @@ fn search_with_attribute_to_highlight_title_tagline() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -296,8 +303,11 @@ fn search_with_attribute_to_highlight_title_tagline() {
// q: Captain
// limit: 1
// attributeToHighlight: title,overview
#[test]
fn search_with_attribute_to_highlight_title_overview() {
#[actix_rt::test]
async fn search_with_attribute_to_highlight_title_overview() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToHighlight=title,overview";
let expected = json!([
@ -337,7 +347,7 @@ fn search_with_attribute_to_highlight_title_overview() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -345,8 +355,11 @@ fn search_with_attribute_to_highlight_title_overview() {
// q: Captain
// limit: 1
// matches: true
#[test]
fn search_with_matches() {
#[actix_rt::test]
async fn search_with_matches() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&matches=true";
let expected = json!([
@ -383,7 +396,7 @@ fn search_with_matches() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -392,8 +405,11 @@ fn search_with_matches() {
// limit: 1
// attributesToCrop: overview
// cropLength: 20
#[test]
fn search_witch_crop() {
#[actix_rt::test]
async fn search_witch_crop() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToCrop=overview&cropLength=20";
let expected = json!([
@ -433,7 +449,7 @@ fn search_witch_crop() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -441,8 +457,11 @@ fn search_witch_crop() {
// q: Captain
// limit: 1
// attributesToRetrieve: [title,tagline,overview,poster_path]
#[test]
fn search_with_attributes_to_retrieve() {
#[actix_rt::test]
async fn search_with_attributes_to_retrieve() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToRetrieve=title,tagline,overview,poster_path";
let expected = json!([
@ -454,7 +473,7 @@ fn search_with_attributes_to_retrieve() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -462,8 +481,11 @@ fn search_with_attributes_to_retrieve() {
// q: Captain
// limit: 1
// attributesToRetrieve: *
#[test]
fn search_with_attributes_to_retrieve_wildcard() {
#[actix_rt::test]
async fn search_with_attributes_to_retrieve_wildcard() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToRetrieve=*";
let expected = json!([
@ -486,7 +508,7 @@ fn search_with_attributes_to_retrieve_wildcard() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -494,8 +516,11 @@ fn search_with_attributes_to_retrieve_wildcard() {
// q: Captain
// limit: 3
// filters: director:Anthony%20Russo
#[test]
fn search_with_filter() {
#[actix_rt::test]
async fn search_with_filter() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&filters=director%20%3D%20%22Anthony%20Russo%22&limit=3";
let expected = json!([
{
@ -550,7 +575,7 @@ fn search_with_filter() {
"vote_count": 10497
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
let expected = json!([
@ -574,7 +599,7 @@ fn search_with_filter() {
// filters: title = "american pie 2"
let query = "q=american&filters=title%20%3D%20%22american%20pie%202%22";
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
let expected = json!([
@ -615,7 +640,7 @@ fn search_with_filter() {
]);
// limit: 3, director = "anthony russo" AND (title = "captain america: civil war" OR title = "Captain America: The Winter Soldier")
let query = "q=a&limit=3&filters=director%20%3D%20%22anthony%20russo%22%20AND%20%20(title%20%3D%20%22captain%20america%3A%20civil%20war%22%20OR%20title%20%3D%20%22Captain%20America%3A%20The%20Winter%20Soldier%22)";
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
let expected = json!([
@ -673,7 +698,7 @@ fn search_with_filter() {
]);
// director = "anthony russo" AND (title = "captain america: civil war" OR vote_average > 8.0)
let query = "q=a&limit=3&filters=director%20%3D%20%22anthony%20russo%22%20AND%20%20(title%20%3D%20%22captain%20america%3A%20civil%20war%22%20OR%20vote_average%20%3E%208.0)";
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
let expected = json!([
@ -730,12 +755,12 @@ fn search_with_filter() {
]);
// NOT director = "anthony russo" AND vote_average > 7.5
let query = "q=a&limit=3&filters=NOT%20director%20%3D%20%22anthony%20russo%22%20AND%20vote_average%20%3E%207.5";
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
let expected = json!([]);
let query = "q=a&filters=NOT%20director%20%3D%20%22anthony%20russo%22%20AND%20title%20%20%3D%20%22Avengers%3A%20Endgame%22";
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -744,8 +769,11 @@ fn search_with_filter() {
// limit: 1
// attributesToHighlight: [title,overview]
// matches: true
#[test]
fn search_with_attributes_to_highlight_and_matches() {
#[actix_rt::test]
async fn search_with_attributes_to_highlight_and_matches() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToHighlight=title,overview&matches=true";
let expected = json!( [
@ -799,7 +827,7 @@ fn search_with_attributes_to_highlight_and_matches() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -810,8 +838,11 @@ fn search_with_attributes_to_highlight_and_matches() {
// matches: true
// cropLength: 20
// attributesToCrop: overview
#[test]
fn search_with_attributes_to_highlight_and_matches_and_crop() {
#[actix_rt::test]
async fn search_with_attributes_to_highlight_and_matches_and_crop() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToCrop=overview&cropLength=20&attributesToHighlight=title,overview&matches=true";
let expected = json!([
@ -865,7 +896,7 @@ fn search_with_attributes_to_highlight_and_matches_and_crop() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -874,8 +905,11 @@ fn search_with_attributes_to_highlight_and_matches_and_crop() {
// limit: 1
// attributesToRetrieve: [title,producer,director]
// attributesToHighlight: [title]
#[test]
fn search_with_differents_attributes() {
#[actix_rt::test]
async fn search_with_differents_attributes() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToRetrieve=title,producer,director&attributesToHighlight=title";
let expected = json!([
@ -889,7 +923,7 @@ fn search_with_differents_attributes() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -899,8 +933,11 @@ fn search_with_differents_attributes() {
// attributesToRetrieve: [title,producer,director]
// attributesToCrop: [overview]
// cropLength: 10
#[test]
fn search_with_differents_attributes_2() {
#[actix_rt::test]
async fn search_with_differents_attributes_2() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToRetrieve=title,producer,director&attributesToCrop=overview&cropLength=10";
let expected = json!([
@ -914,7 +951,7 @@ fn search_with_differents_attributes_2() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -923,8 +960,11 @@ fn search_with_differents_attributes_2() {
// limit: 1
// attributesToRetrieve: [title,producer,director]
// attributesToCrop: [overview:10]
#[test]
fn search_with_differents_attributes_3() {
#[actix_rt::test]
async fn search_with_differents_attributes_3() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToRetrieve=title,producer,director&attributesToCrop=overview:10";
let expected = json!([
@ -938,7 +978,7 @@ fn search_with_differents_attributes_3() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -947,8 +987,11 @@ fn search_with_differents_attributes_3() {
// limit: 1
// attributesToRetrieve: [title,producer,director]
// attributesToCrop: [overview:10,title:0]
#[test]
fn search_with_differents_attributes_4() {
#[actix_rt::test]
async fn search_with_differents_attributes_4() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToRetrieve=title,producer,director&attributesToCrop=overview:10,title:0";
let expected = json!([
@ -963,7 +1006,7 @@ fn search_with_differents_attributes_4() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -972,8 +1015,11 @@ fn search_with_differents_attributes_4() {
// limit: 1
// attributesToRetrieve: [title,producer,director]
// attributesToCrop: [*,overview:10]
#[test]
fn search_with_differents_attributes_5() {
#[actix_rt::test]
async fn search_with_differents_attributes_5() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToRetrieve=title,producer,director&attributesToCrop=*,overview:10";
let expected = json!([
@ -990,7 +1036,7 @@ fn search_with_differents_attributes_5() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -1000,8 +1046,11 @@ fn search_with_differents_attributes_5() {
// attributesToRetrieve: [title,producer,director]
// attributesToCrop: [*,overview:10]
// attributesToHighlight: [title]
#[test]
fn search_with_differents_attributes_6() {
#[actix_rt::test]
async fn search_with_differents_attributes_6() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToRetrieve=title,producer,director&attributesToCrop=*,overview:10&attributesToHighlight=title";
let expected = json!([
@ -1018,7 +1067,7 @@ fn search_with_differents_attributes_6() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -1028,8 +1077,11 @@ fn search_with_differents_attributes_6() {
// attributesToRetrieve: [title,producer,director]
// attributesToCrop: [*,overview:10]
// attributesToHighlight: [*]
#[test]
fn search_with_differents_attributes_7() {
#[actix_rt::test]
async fn search_with_differents_attributes_7() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToRetrieve=title,producer,director&attributesToCrop=*,overview:10&attributesToHighlight=*";
let expected = json!([
@ -1046,7 +1098,7 @@ fn search_with_differents_attributes_7() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}
@ -1056,8 +1108,11 @@ fn search_with_differents_attributes_7() {
// attributesToRetrieve: [title,producer,director]
// attributesToCrop: [*,overview:10]
// attributesToHighlight: [*,tagline]
#[test]
fn search_with_differents_attributes_8() {
#[actix_rt::test]
async fn search_with_differents_attributes_8() {
let mut server = common::Server::with_uid("movies");
server.populate_movies().await;
let query = "q=captain&limit=1&attributesToRetrieve=title,producer,director&attributesToCrop=*,overview:10&attributesToHighlight=*,tagline";
let expected = json!([
@ -1075,6 +1130,6 @@ fn search_with_differents_attributes_8() {
}
]);
let (response, _status_code) = GLOBAL_SERVER.lock().unwrap().search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expected, response["hits"].clone(), ordered: false);
}

View File

@ -4,10 +4,10 @@ use std::convert::Into;
mod common;
#[test]
fn search_with_settings_basic() {
#[actix_rt::test]
async fn search_with_settings_basic() {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
server.populate_movies().await;
let config = json!({
"rankingRules": [
@ -49,7 +49,7 @@ fn search_with_settings_basic() {
"acceptNewFields": false,
});
server.update_all_settings(config);
server.update_all_settings(config).await;
let query = "q=the%20avangers&limit=3";
let expect = json!([
@ -106,14 +106,14 @@ fn search_with_settings_basic() {
}
]);
let (response, _status_code) = server.search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expect, response["hits"].clone(), ordered: false);
}
#[test]
fn search_with_settings_stop_words() {
#[actix_rt::test]
async fn search_with_settings_stop_words() {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
server.populate_movies().await;
let config = json!({
"rankingRules": [
@ -155,7 +155,7 @@ fn search_with_settings_stop_words() {
"acceptNewFields": false,
});
server.update_all_settings(config);
server.update_all_settings(config).await;
let query = "q=the%20avangers&limit=3";
let expect = json!([
@ -212,14 +212,14 @@ fn search_with_settings_stop_words() {
}
]);
let (response, _status_code) = server.search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expect, response["hits"].clone(), ordered: false);
}
#[test]
fn search_with_settings_synonyms() {
#[actix_rt::test]
async fn search_with_settings_synonyms() {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
server.populate_movies().await;
let config = json!({
"rankingRules": [
@ -266,7 +266,7 @@ fn search_with_settings_synonyms() {
"acceptNewFields": false,
});
server.update_all_settings(config);
server.update_all_settings(config).await;
let query = "q=avangers&limit=3";
let expect = json!([
@ -323,14 +323,14 @@ fn search_with_settings_synonyms() {
}
]);
let (response, _status_code) = server.search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expect, response["hits"].clone(), ordered: false);
}
#[test]
fn search_with_settings_ranking_rules() {
#[actix_rt::test]
async fn search_with_settings_ranking_rules() {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
server.populate_movies().await;
let config = json!({
"rankingRules": [
@ -372,7 +372,7 @@ fn search_with_settings_ranking_rules() {
"acceptNewFields": false,
});
server.update_all_settings(config);
server.update_all_settings(config).await;
let query = "q=avangers&limit=3";
let expect = json!([
@ -429,14 +429,14 @@ fn search_with_settings_ranking_rules() {
}
]);
let (response, _status_code) = server.search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expect, response["hits"].clone(), ordered: false);
}
#[test]
fn search_with_settings_searchable_attributes() {
#[actix_rt::test]
async fn search_with_settings_searchable_attributes() {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
server.populate_movies().await;
let config = json!({
"rankingRules": [
@ -477,7 +477,7 @@ fn search_with_settings_searchable_attributes() {
"acceptNewFields": false,
});
server.update_all_settings(config);
server.update_all_settings(config).await;
let query = "q=avangers&limit=3";
let expect = json!([
@ -534,14 +534,14 @@ fn search_with_settings_searchable_attributes() {
}
]);
let (response, _status_code) = server.search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expect, response["hits"].clone(), ordered: false);
}
#[test]
fn search_with_settings_displayed_attributes() {
#[actix_rt::test]
async fn search_with_settings_displayed_attributes() {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
server.populate_movies().await;
let config = json!({
"rankingRules": [
@ -577,7 +577,7 @@ fn search_with_settings_displayed_attributes() {
"acceptNewFields": false,
});
server.update_all_settings(config);
server.update_all_settings(config).await;
let query = "q=avangers&limit=3";
let expect = json!([
@ -604,14 +604,14 @@ fn search_with_settings_displayed_attributes() {
}
]);
let (response, _status_code) = server.search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expect, response["hits"].clone(), ordered: false);
}
#[test]
fn search_with_settings_searchable_attributes_2() {
#[actix_rt::test]
async fn search_with_settings_searchable_attributes_2() {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
server.populate_movies().await;
let config = json!({
"rankingRules": [
@ -647,7 +647,7 @@ fn search_with_settings_searchable_attributes_2() {
"acceptNewFields": false,
});
server.update_all_settings(config);
server.update_all_settings(config).await;
let query = "q=avangers&limit=3";
let expect = json!([
@ -674,6 +674,6 @@ fn search_with_settings_searchable_attributes_2() {
}
]);
let (response, _status_code) = server.search(query);
let (response, _status_code) = server.search(query).await;
assert_json_eq!(expect, response["hits"].clone(), ordered: false);
}

View File

@ -4,10 +4,10 @@ use std::convert::Into;
mod common;
#[test]
fn write_all_and_delete() {
#[actix_rt::test]
async fn write_all_and_delete() {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
server.populate_movies().await;
// 2 - Send the settings
@ -51,21 +51,21 @@ fn write_all_and_delete() {
"acceptNewFields": false,
});
server.update_all_settings(body.clone());
server.update_all_settings(body.clone()).await;
// 3 - Get all settings and compare to the previous one
let (response, _status_code) = server.get_all_settings();
let (response, _status_code) = server.get_all_settings().await;
assert_json_eq!(body, response, ordered: false);
// 4 - Delete all settings
server.delete_all_settings();
server.delete_all_settings().await;
// 5 - Get all settings and check if they are set to default values
let (response, _status_code) = server.get_all_settings();
let (response, _status_code) = server.get_all_settings().await;
let expect = json!({
"rankingRules": [
@ -125,10 +125,10 @@ fn write_all_and_delete() {
assert_json_eq!(expect, response, ordered: false);
}
#[test]
fn write_all_and_update() {
#[actix_rt::test]
async fn write_all_and_update() {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
server.populate_movies().await;
// 2 - Send the settings
@ -172,11 +172,11 @@ fn write_all_and_update() {
"acceptNewFields": false,
});
server.update_all_settings(body.clone());
server.update_all_settings(body.clone()).await;
// 3 - Get all settings and compare to the previous one
let (response, _status_code) = server.get_all_settings();
let (response, _status_code) = server.get_all_settings().await;
assert_json_eq!(body, response, ordered: false);
@ -213,11 +213,11 @@ fn write_all_and_update() {
"acceptNewFields": false,
});
server.update_all_settings(body);
server.update_all_settings(body).await;
// 5 - Get all settings and check if the content is the same of (4)
let (response, _status_code) = server.get_all_settings();
let (response, _status_code) = server.get_all_settings().await;
let expected = json!({
"rankingRules": [
@ -253,13 +253,13 @@ fn write_all_and_update() {
assert_json_eq!(expected, response, ordered: false);
}
#[test]
fn test_default_settings() {
#[actix_rt::test]
async fn test_default_settings() {
let mut server = common::Server::with_uid("movies");
let body = json!({
"uid": "movies",
});
server.create_index(body);
server.create_index(body).await;
// 1 - Get all settings and compare to the previous one
@ -280,19 +280,19 @@ fn test_default_settings() {
"acceptNewFields": true,
});
let (response, _status_code) = server.get_all_settings();
let (response, _status_code) = server.get_all_settings().await;
assert_json_eq!(body, response, ordered: false);
}
#[test]
fn test_default_settings_2() {
#[actix_rt::test]
async fn test_default_settings_2() {
let mut server = common::Server::with_uid("movies");
let body = json!({
"uid": "movies",
"primaryKey": "id",
});
server.create_index(body);
server.create_index(body).await;
// 1 - Get all settings and compare to the previous one
@ -317,19 +317,19 @@ fn test_default_settings_2() {
"acceptNewFields": true,
});
let (response, _status_code) = server.get_all_settings();
let (response, _status_code) = server.get_all_settings().await;
assert_json_eq!(body, response, ordered: false);
}
// Test issue https://github.com/meilisearch/MeiliSearch/issues/516
#[test]
fn write_setting_and_update_partial() {
#[actix_rt::test]
async fn write_setting_and_update_partial() {
let mut server = common::Server::with_uid("movies");
let body = json!({
"uid": "movies",
});
server.create_index(body);
server.create_index(body).await;
// 2 - Send the settings
@ -352,7 +352,7 @@ fn write_setting_and_update_partial() {
]
});
server.update_all_settings(body.clone());
server.update_all_settings(body.clone()).await;
// 2 - Send the settings
@ -380,7 +380,7 @@ fn write_setting_and_update_partial() {
"acceptNewFields": false,
});
server.update_all_settings(body.clone());
server.update_all_settings(body.clone()).await;
// 2 - Send the settings
@ -424,7 +424,7 @@ fn write_setting_and_update_partial() {
"acceptNewFields": false,
});
let (response, _status_code) = server.get_all_settings();
let (response, _status_code) = server.get_all_settings().await;
assert_json_eq!(expected, response, ordered: false);
}

View File

@ -3,14 +3,14 @@ use serde_json::json;
mod common;
#[test]
fn index_new_fields_default() {
#[actix_rt::test]
async fn index_new_fields_default() {
let mut server = common::Server::with_uid("movies");
let body = json!({
"uid": "movies",
"primaryKey": "id",
});
server.create_index(body);
server.create_index(body).await;
// 1 - Add a document
@ -19,7 +19,7 @@ fn index_new_fields_default() {
"title": "I'm a legend",
}]);
server.add_or_replace_multiple_documents(body);
server.add_or_replace_multiple_documents(body).await;
// 2 - Get the complete document
@ -28,7 +28,7 @@ fn index_new_fields_default() {
"title": "I'm a legend",
});
let (response, status_code) = server.get_document(1);
let (response, status_code) = server.get_document(1).await;
assert_eq!(status_code, 200);
assert_json_eq!(response, expected);
@ -40,7 +40,7 @@ fn index_new_fields_default() {
"description": "A bad copy of the original movie I'm a lengend"
}]);
server.add_or_replace_multiple_documents(body);
server.add_or_replace_multiple_documents(body).await;
// 4 - Get the complete document
@ -50,23 +50,23 @@ fn index_new_fields_default() {
"description": "A bad copy of the original movie I'm a lengend"
});
let (response, status_code) = server.get_document(2);
let (response, status_code) = server.get_document(2).await;
assert_eq!(status_code, 200);
assert_json_eq!(response, expected);
}
#[test]
fn index_new_fields_true() {
#[actix_rt::test]
async fn index_new_fields_true() {
let mut server = common::Server::with_uid("movies");
let body = json!({
"uid": "movies",
"primaryKey": "id",
});
server.create_index(body);
server.create_index(body).await;
// 1 - Set indexNewFields = true
server.update_accept_new_fields(json!(true));
server.update_accept_new_fields(json!(true)).await;
// 2 - Add a document
@ -75,7 +75,7 @@ fn index_new_fields_true() {
"title": "I'm a legend",
}]);
server.add_or_replace_multiple_documents(body);
server.add_or_replace_multiple_documents(body).await;
// 3 - Get the complete document
@ -84,7 +84,7 @@ fn index_new_fields_true() {
"title": "I'm a legend",
});
let (response, status_code) = server.get_document(1);
let (response, status_code) = server.get_document(1).await;
assert_eq!(status_code, 200);
assert_json_eq!(response, expected);
@ -96,7 +96,7 @@ fn index_new_fields_true() {
"description": "A bad copy of the original movie I'm a lengend"
}]);
server.add_or_replace_multiple_documents(body);
server.add_or_replace_multiple_documents(body).await;
// 5 - Get the complete document
@ -106,23 +106,23 @@ fn index_new_fields_true() {
"description": "A bad copy of the original movie I'm a lengend"
});
let (response, status_code) = server.get_document(2);
let (response, status_code) = server.get_document(2).await;
assert_eq!(status_code, 200);
assert_json_eq!(response, expected);
}
#[test]
fn index_new_fields_false() {
#[actix_rt::test]
async fn index_new_fields_false() {
let mut server = common::Server::with_uid("movies");
let body = json!({
"uid": "movies",
"primaryKey": "id",
});
server.create_index(body);
server.create_index(body).await;
// 1 - Set indexNewFields = false
server.update_accept_new_fields(json!(false));
server.update_accept_new_fields(json!(false)).await;
// 2 - Add a document
@ -131,7 +131,7 @@ fn index_new_fields_false() {
"title": "I'm a legend",
}]);
server.add_or_replace_multiple_documents(body);
server.add_or_replace_multiple_documents(body).await;
// 3 - Get the complete document
@ -139,7 +139,7 @@ fn index_new_fields_false() {
"id": 1,
});
let (response, status_code) = server.get_document(1);
let (response, status_code) = server.get_document(1).await;
assert_eq!(status_code, 200);
assert_json_eq!(response, expected);
@ -151,7 +151,7 @@ fn index_new_fields_false() {
"description": "A bad copy of the original movie I'm a lengend"
}]);
server.add_or_replace_multiple_documents(body);
server.add_or_replace_multiple_documents(body).await;
// 5 - Get the complete document
@ -159,23 +159,23 @@ fn index_new_fields_false() {
"id": 2,
});
let (response, status_code) = server.get_document(2);
let (response, status_code) = server.get_document(2).await;
assert_eq!(status_code, 200);
assert_json_eq!(response, expected);
}
#[test]
fn index_new_fields_true_then_false() {
#[actix_rt::test]
async fn index_new_fields_true_then_false() {
let mut server = common::Server::with_uid("movies");
let body = json!({
"uid": "movies",
"primaryKey": "id",
});
server.create_index(body);
server.create_index(body).await;
// 1 - Set indexNewFields = true
server.update_accept_new_fields(json!(true));
server.update_accept_new_fields(json!(true)).await;
// 2 - Add a document
@ -184,7 +184,7 @@ fn index_new_fields_true_then_false() {
"title": "I'm a legend",
}]);
server.add_or_replace_multiple_documents(body);
server.add_or_replace_multiple_documents(body).await;
// 3 - Get the complete document
@ -193,13 +193,13 @@ fn index_new_fields_true_then_false() {
"title": "I'm a legend",
});
let (response, status_code) = server.get_document(1);
let (response, status_code) = server.get_document(1).await;
assert_eq!(status_code, 200);
assert_json_eq!(response, expected);
// 4 - Set indexNewFields = false
server.update_accept_new_fields(json!(false));
server.update_accept_new_fields(json!(false)).await;
// 5 - Add a document with more fields
@ -209,7 +209,7 @@ fn index_new_fields_true_then_false() {
"description": "A bad copy of the original movie I'm a lengend"
}]);
server.add_or_replace_multiple_documents(body);
server.add_or_replace_multiple_documents(body).await;
// 6 - Get the complete document
@ -218,23 +218,23 @@ fn index_new_fields_true_then_false() {
"title": "I'm not a legend",
});
let (response, status_code) = server.get_document(2);
let (response, status_code) = server.get_document(2).await;
assert_eq!(status_code, 200);
assert_json_eq!(response, expected);
}
#[test]
fn index_new_fields_false_then_true() {
#[actix_rt::test]
async fn index_new_fields_false_then_true() {
let mut server = common::Server::with_uid("movies");
let body = json!({
"uid": "movies",
"primaryKey": "id",
});
server.create_index(body);
server.create_index(body).await;
// 1 - Set indexNewFields = false
server.update_accept_new_fields(json!(false));
server.update_accept_new_fields(json!(false)).await;
// 2 - Add a document
@ -243,7 +243,7 @@ fn index_new_fields_false_then_true() {
"title": "I'm a legend",
}]);
server.add_or_replace_multiple_documents(body);
server.add_or_replace_multiple_documents(body).await;
// 3 - Get the complete document
@ -251,13 +251,13 @@ fn index_new_fields_false_then_true() {
"id": 1,
});
let (response, status_code) = server.get_document(1);
let (response, status_code) = server.get_document(1).await;
assert_eq!(status_code, 200);
assert_json_eq!(response, expected);
// 4 - Set indexNewFields = false
server.update_accept_new_fields(json!(true));
server.update_accept_new_fields(json!(true)).await;
// 5 - Add a document with more fields
@ -267,7 +267,7 @@ fn index_new_fields_false_then_true() {
"description": "A bad copy of the original movie I'm a lengend"
}]);
server.add_or_replace_multiple_documents(body);
server.add_or_replace_multiple_documents(body).await;
// 6 - Get the complete document
@ -275,7 +275,7 @@ fn index_new_fields_false_then_true() {
"id": 1,
});
let (response, status_code) = server.get_document(1);
let (response, status_code) = server.get_document(1).await;
assert_eq!(status_code, 200);
assert_json_eq!(response, expected);
@ -284,15 +284,14 @@ fn index_new_fields_false_then_true() {
"description": "A bad copy of the original movie I'm a lengend"
});
let (response, status_code) = server.get_document(2);
let (response, status_code) = server.get_document(2).await;
assert_eq!(status_code, 200);
assert_json_eq!(response, expected);
}
// Fix issue https://github.com/meilisearch/MeiliSearch/issues/518
#[test]
fn accept_new_fields_does_not_take_into_account_the_primary_key () {
#[actix_rt::test]
async fn accept_new_fields_does_not_take_into_account_the_primary_key() {
let mut server = common::Server::with_uid("movies");
// 1 - Create an index with no primary-key
@ -300,7 +299,7 @@ fn accept_new_fields_does_not_take_into_account_the_primary_key () {
let body = json!({
"uid": "movies",
});
let (response, status_code) = server.create_index(body);
let (response, status_code) = server.create_index(body).await;
assert_eq!(status_code, 201);
assert_eq!(response["primaryKey"], json!(null));
@ -312,7 +311,7 @@ fn accept_new_fields_does_not_take_into_account_the_primary_key () {
"acceptNewFields": false,
});
server.update_all_settings(body);
server.update_all_settings(body).await;
// 4 - Add a document
@ -322,11 +321,11 @@ fn accept_new_fields_does_not_take_into_account_the_primary_key () {
"comment": "comment test"
}]);
server.add_or_replace_multiple_documents(body);
server.add_or_replace_multiple_documents(body).await;
// 5 - Get settings, they should not changed
let (response, _status_code) = server.get_all_settings();
let (response, _status_code) = server.get_all_settings().await;
let expected = json!({
"rankingRules": [

View File

@ -3,10 +3,10 @@ use serde_json::json;
mod common;
#[test]
fn write_all_and_delete() {
#[actix_rt::test]
async fn write_all_and_delete() {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
server.populate_movies().await;
// 2 - Send the settings
@ -21,21 +21,21 @@ fn write_all_and_delete() {
"desc(rank)",
]);
server.update_ranking_rules(body.clone());
server.update_ranking_rules(body.clone()).await;
// 3 - Get all settings and compare to the previous one
let (response, _status_code) = server.get_ranking_rules();
let (response, _status_code) = server.get_ranking_rules().await;
assert_json_eq!(body, response, ordered: false);
// 4 - Delete all settings
server.delete_ranking_rules();
server.delete_ranking_rules().await;
// 5 - Get all settings and check if they are empty
let (response, _status_code) = server.get_ranking_rules();
let (response, _status_code) = server.get_ranking_rules().await;
let expected = json!([
"typo",
@ -49,10 +49,10 @@ fn write_all_and_delete() {
assert_json_eq!(expected, response, ordered: false);
}
#[test]
fn write_all_and_update() {
#[actix_rt::test]
async fn write_all_and_update() {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
server.populate_movies().await;
// 2 - Send the settings
@ -67,11 +67,11 @@ fn write_all_and_update() {
"desc(rank)",
]);
server.update_ranking_rules(body.clone());
server.update_ranking_rules(body.clone()).await;
// 3 - Get all settings and compare to the previous one
let (response, _status_code) = server.get_ranking_rules();
let (response, _status_code) = server.get_ranking_rules().await;
assert_json_eq!(body, response, ordered: false);
@ -87,11 +87,11 @@ fn write_all_and_update() {
"desc(release_date)",
]);
server.update_ranking_rules(body);
server.update_ranking_rules(body).await;
// 5 - Get all settings and check if the content is the same of (4)
let (response, _status_code) = server.get_ranking_rules();
let (response, _status_code) = server.get_ranking_rules().await;
let expected = json!([
"typo",
@ -106,54 +106,51 @@ fn write_all_and_update() {
assert_json_eq!(expected, response, ordered: false);
}
#[test]
fn send_undefined_rule() {
#[actix_rt::test]
async fn send_undefined_rule() {
let mut server = common::Server::with_uid("movies");
let body = json!({
"uid": "movies",
"primaryKey": "id",
});
server.create_index(body);
server.create_index(body).await;
let body = json!(["typos",]);
let (_response, status_code) = server.update_ranking_rules_sync(body);
let (_response, status_code) = server.update_ranking_rules_sync(body).await;
assert_eq!(status_code, 400);
}
#[test]
fn send_malformed_custom_rule() {
#[actix_rt::test]
async fn send_malformed_custom_rule() {
let mut server = common::Server::with_uid("movies");
let body = json!({
"uid": "movies",
"primaryKey": "id",
});
server.create_index(body);
server.create_index(body).await;
let body = json!(["dsc(truc)",]);
let (_response, status_code) = server.update_ranking_rules_sync(body);
let (_response, status_code) = server.update_ranking_rules_sync(body).await;
assert_eq!(status_code, 400);
}
// Test issue https://github.com/meilisearch/MeiliSearch/issues/521
#[test]
fn write_custom_ranking_and_index_documents() {
#[actix_rt::test]
async fn write_custom_ranking_and_index_documents() {
let mut server = common::Server::with_uid("movies");
let body = json!({
"uid": "movies",
"primaryKey": "id",
});
server.create_index(body);
server.create_index(body).await;
// 1 - Add ranking rules with one custom ranking on a string
let body = json!([
"asc(title)",
"typo"
]);
let body = json!(["asc(title)", "typo"]);
server.update_ranking_rules(body);
server.update_ranking_rules(body).await;
// 2 - Add documents
@ -170,7 +167,7 @@ fn write_custom_ranking_and_index_documents() {
}
]);
server.add_or_replace_multiple_documents(body);
server.add_or_replace_multiple_documents(body).await;
// 3 - Get the first document and compare
@ -180,9 +177,8 @@ fn write_custom_ranking_and_index_documents() {
"author": "Exupéry"
});
let (response, status_code) = server.get_document(1);
let (response, status_code) = server.get_document(1).await;
assert_eq!(status_code, 200);
assert_json_eq!(response, expected, ordered: false);
}

View File

@ -3,54 +3,54 @@ use serde_json::json;
mod common;
#[test]
fn update_stop_words() {
#[actix_rt::test]
async fn update_stop_words() {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
server.populate_movies().await;
// 1 - Get stop words
let (response, _status_code) = server.get_stop_words();
let (response, _status_code) = server.get_stop_words().await;
assert_eq!(response.as_array().unwrap().is_empty(), true);
// 2 - Update stop words
let body = json!(["the", "a"]);
server.update_stop_words(body.clone());
server.update_stop_words(body.clone()).await;
// 3 - Get all stop words and compare to the previous one
let (response, _status_code) = server.get_stop_words();
let (response, _status_code) = server.get_stop_words().await;
assert_json_eq!(body, response, ordered: false);
// 4 - Delete all stop words
server.delete_stop_words();
server.delete_stop_words().await;
// 5 - Get all stop words and check if they are empty
let (response, _status_code) = server.get_stop_words();
let (response, _status_code) = server.get_stop_words().await;
assert_eq!(response.as_array().unwrap().is_empty(), true);
}
#[test]
fn add_documents_and_stop_words() {
#[actix_rt::test]
async fn add_documents_and_stop_words() {
let mut server = common::Server::with_uid("movies");
server.populate_movies();
server.populate_movies().await;
// 2 - Update stop words
let body = json!(["the", "of"]);
server.update_stop_words(body.clone());
server.update_stop_words(body.clone()).await;
// 3 - Search for a document with stop words
let (response, _status_code) = server.search("q=the%20mask");
let (response, _status_code) = server.search("q=the%20mask").await;
assert!(!response["hits"].as_array().unwrap().is_empty());
// 4 - Search for documents with *only* stop words
let (response, _status_code) = server.search("q=the%20of");
let (response, _status_code) = server.search("q=the%20of").await;
assert!(response["hits"].as_array().unwrap().is_empty());
// 5 - Delete all stop words