diff --git a/Cargo.lock b/Cargo.lock index 9b2f212af..bc4b3c6a5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1527,7 +1527,6 @@ dependencies = [ "once_cell", "pretty-bytes", "rand 0.7.3", - "rayon", "serde", "serde_json", "serde_qs", diff --git a/meilisearch-http/Cargo.toml b/meilisearch-http/Cargo.toml index bd6a9aa5e..ca7f3b256 100644 --- a/meilisearch-http/Cargo.toml +++ b/meilisearch-http/Cargo.toml @@ -29,7 +29,6 @@ meilisearch-tokenizer = {path = "../meilisearch-tokenizer", version = "0.10.1"} mime = "0.3.16" pretty-bytes = "0.2.2" rand = "0.7.3" -rayon = "1.3.0" serde = { version = "1.0.105", features = ["derive"] } serde_json = { version = "1.0.50", features = ["preserve_order"] } serde_qs = "0.5.2" @@ -47,13 +46,14 @@ actix-http = "1" actix-files = "0.2.1" actix-cors = "0.2.0" actix-service = "1.0.5" -tokio = { version = "0.2.18", features = ["macros", "time"] } +tokio = { version = "0.2.18", features = ["macros"] } [dev-dependencies] http-service = "0.4.0" http-service-mock = "0.4.0" tempdir = "0.3.7" once_cell = "1.3.1" +tokio = { version = "0.2.18", features = ["macros", "time"] } [dev-dependencies.assert-json-diff] git = "https://github.com/qdequele/assert-json-diff" diff --git a/meilisearch-http/src/lib.rs b/meilisearch-http/src/lib.rs index 13a239a6c..7ce65c5d9 100644 --- a/meilisearch-http/src/lib.rs +++ b/meilisearch-http/src/lib.rs @@ -33,7 +33,6 @@ pub fn create_app( .service(routes::load_html) .service(routes::load_css) .service(routes::search::search_with_url_query) - .service(routes::search::search_multi_index) .service(routes::document::get_document) .service(routes::document::get_all_documents) .wrap(helpers::Authentication::Private) diff --git a/meilisearch-http/src/routes/search.rs b/meilisearch-http/src/routes/search.rs index cd88694ff..92112c4a4 100644 --- a/meilisearch-http/src/routes/search.rs +++ b/meilisearch-http/src/routes/search.rs @@ -1,16 +1,14 @@ -use std::collections::HashMap; -use std::collections::HashSet; +use std::collections::{HashSet, HashMap}; use std::time::Duration; use log::warn; use meilisearch_core::Index; use actix_web as aweb; -use actix_web::{get, post, web}; -use rayon::iter::{IntoParallelIterator, ParallelIterator}; -use serde::{Deserialize, Serialize}; +use actix_web::{get, web}; +use serde::{Deserialize}; use crate::error::ResponseError; -use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit, SearchResult}; +use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchResult}; use crate::routes::IndexParam; use crate::Data; @@ -155,115 +153,3 @@ pub async fn search_with_url_query( Ok(web::Json(response)) } - -#[derive(Clone, Deserialize)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct SearchMultiBody { - indexes: HashSet, - query: String, - offset: Option, - limit: Option, - attributes_to_retrieve: Option>, - searchable_attributes: Option>, - attributes_to_crop: Option>, - attributes_to_highlight: Option>, - filters: Option, - timeout_ms: Option, - matches: Option, -} - -#[derive(Debug, Clone, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct SearchMultiBodyResponse { - hits: HashMap>, - offset: usize, - hits_per_page: usize, - processing_time_ms: usize, - query: String, -} - -#[post("/indexes/search")] -pub async fn search_multi_index( - data: web::Data, - body: web::Json, -) -> aweb::Result> { - let mut index_list = body.clone().indexes; - - for index in index_list.clone() { - if index == "*" { - index_list = data.db.indexes_uids().into_iter().collect(); - break; - } - } - - let mut offset = 0; - let mut count = 20; - let query = body.query.clone(); - - if let Some(body_offset) = body.offset { - if let Some(limit) = body.limit { - offset = body_offset; - count = limit; - } - } - - let par_body = body.clone(); - let responses_per_index: Vec<(String, SearchResult)> = index_list - .into_par_iter() - .map(move |index_uid| { - let index = data.db.open_index(&index_uid).unwrap(); - - let mut search_builder = index.new_search(par_body.query.clone()); - - search_builder.offset(offset); - search_builder.limit(count); - - if let Some(attributes_to_retrieve) = par_body.attributes_to_retrieve.clone() { - search_builder.attributes_to_retrieve(attributes_to_retrieve); - } - if let Some(attributes_to_crop) = par_body.attributes_to_crop.clone() { - search_builder.attributes_to_crop(attributes_to_crop); - } - if let Some(attributes_to_highlight) = par_body.attributes_to_highlight.clone() { - search_builder.attributes_to_highlight(attributes_to_highlight); - } - if let Some(filters) = par_body.filters.clone() { - search_builder.filters(filters); - } - if let Some(timeout_ms) = par_body.timeout_ms { - search_builder.timeout(Duration::from_millis(timeout_ms)); - } - if let Some(matches) = par_body.matches { - if matches { - search_builder.get_matches(); - } - } - - let reader = data.db.main_read_txn().unwrap(); - let response = search_builder.search(&reader).unwrap(); - - (index_uid, response) - }) - .collect(); - - let mut hits_map = HashMap::new(); - - let mut max_query_time = 0; - - for (index_uid, response) in responses_per_index { - if response.processing_time_ms > max_query_time { - max_query_time = response.processing_time_ms; - } - hits_map.insert(index_uid, response.hits); - } - - let response = SearchMultiBodyResponse { - hits: hits_map, - offset, - hits_per_page: count, - processing_time_ms: max_query_time, - query, - }; - - Ok(web::Json(response)) -}