MeiliSearch/meilisearch-http/src/routes/search.rs

258 lines
8.2 KiB
Rust
Raw Normal View History

2019-10-31 15:00:36 +01:00
use std::collections::HashMap;
use std::collections::HashSet;
use std::time::Duration;
2020-04-02 19:53:51 +02:00
use log::warn;
2019-11-26 11:06:55 +01:00
use meilisearch_core::Index;
2019-10-31 15:00:36 +01:00
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use serde::{Deserialize, Serialize};
2020-01-15 17:10:33 +01:00
use tide::{Request, Response};
2019-10-31 15:00:36 +01:00
use crate::error::{ResponseError, SResult};
2019-11-26 11:06:55 +01:00
use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit};
2020-01-15 17:10:33 +01:00
use crate::helpers::tide::RequestExt;
2020-02-26 18:49:17 +01:00
use crate::helpers::tide::ACL::*;
2019-10-31 15:00:36 +01:00
use crate::Data;
#[derive(Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct SearchQuery {
q: String,
offset: Option<usize>,
limit: Option<usize>,
attributes_to_retrieve: Option<String>,
attributes_to_crop: Option<String>,
crop_length: Option<usize>,
attributes_to_highlight: Option<String>,
filters: Option<String>,
timeout_ms: Option<u64>,
matches: Option<bool>,
}
2020-01-15 17:10:33 +01:00
pub async fn search_with_url_query(ctx: Request<Data>) -> SResult<Response> {
2020-02-06 15:41:11 +01:00
ctx.is_allowed(Public)?;
2019-10-31 15:00:36 +01:00
let index = ctx.index()?;
let db = &ctx.state().db;
2020-01-16 16:58:57 +01:00
let reader = db.main_read_txn()?;
2019-10-31 15:00:36 +01:00
2020-01-23 11:30:18 +01:00
let schema = index
.main
.schema(&reader)?
.ok_or(ResponseError::open_index("No Schema found"))?;
2020-01-23 11:30:18 +01:00
let query: SearchQuery = ctx
.query()
2019-10-31 15:00:36 +01:00
.map_err(|_| ResponseError::bad_request("invalid query parameter"))?;
let mut search_builder = index.new_search(query.q.clone());
if let Some(offset) = query.offset {
search_builder.offset(offset);
}
if let Some(limit) = query.limit {
search_builder.limit(limit);
}
2020-04-02 19:53:51 +02:00
let available_attributes = schema.displayed_name();
let mut restricted_attributes: HashSet<&str>;
match &query.attributes_to_retrieve {
Some(attributes_to_retrieve) => {
restricted_attributes = attributes_to_retrieve.split(',').collect();
restricted_attributes.retain(|attr| available_attributes.contains(attr));
},
None => {
restricted_attributes = available_attributes.clone();
2019-10-31 15:00:36 +01:00
}
}
2020-01-03 10:00:04 +01:00
2019-10-31 15:00:36 +01:00
if let Some(attributes_to_crop) = query.attributes_to_crop {
2020-04-02 19:53:51 +02:00
let default_length = query.crop_length.unwrap_or(200);
let mut final_attributes: HashMap<String, usize> = HashMap::new();
for attribute in attributes_to_crop.split(',') {
let mut attribute = attribute.split(':');
let attr = attribute.next();
let length = attribute.next().and_then(|s| s.parse().ok()).unwrap_or(default_length);
match attr {
Some("*") => {
for attr in &restricted_attributes {
final_attributes.insert(attr.to_string(), length);
}
},
Some(attr) => {
if available_attributes.contains(attr) {
final_attributes.insert(attr.to_string(), length);
} else {
warn!("The attributes {:?} present in attributesToCrop parameter doesn't exist", attr);
}
},
None => (),
}
}
2020-04-02 19:53:51 +02:00
search_builder.attributes_to_crop(final_attributes);
2019-10-31 15:00:36 +01:00
}
2020-04-02 19:53:51 +02:00
if let Some(inline_attributes) = query.attributes_to_highlight {
let mut final_attributes: HashSet<String> = HashSet::new();
for attribute in inline_attributes.split(',') {
if attribute == "*" {
for attr in &restricted_attributes {
final_attributes.insert(attr.to_string());
}
} else {
if available_attributes.contains(attribute) {
final_attributes.insert(attribute.to_string());
} else {
warn!("The attributes {:?} present in attributesToHighlight parameter doesn't exist", attribute);
}
}
}
search_builder.attributes_to_highlight(final_attributes);
2019-10-31 15:00:36 +01:00
}
if let Some(filters) = query.filters {
search_builder.filters(filters);
}
if let Some(timeout_ms) = query.timeout_ms {
search_builder.timeout(Duration::from_millis(timeout_ms));
}
if let Some(matches) = query.matches {
if matches {
search_builder.get_matches();
}
}
let response = match search_builder.search(&reader) {
Ok(response) => response,
Err(Error::Internal(message)) => return Err(ResponseError::Internal(message)),
Err(others) => return Err(ResponseError::bad_request(others)),
};
2020-01-15 17:10:33 +01:00
Ok(tide::Response::new(200).body_json(&response).unwrap())
2019-10-31 15:00:36 +01:00
}
#[derive(Clone, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct SearchMultiBody {
indexes: HashSet<String>,
query: String,
offset: Option<usize>,
limit: Option<usize>,
attributes_to_retrieve: Option<HashSet<String>>,
searchable_attributes: Option<HashSet<String>>,
2019-10-31 15:00:36 +01:00
attributes_to_crop: Option<HashMap<String, usize>>,
attributes_to_highlight: Option<HashSet<String>>,
filters: Option<String>,
timeout_ms: Option<u64>,
matches: Option<bool>,
}
#[derive(Debug, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
struct SearchMultiBodyResponse {
hits: HashMap<String, Vec<SearchHit>>,
offset: usize,
hits_per_page: usize,
processing_time_ms: usize,
query: String,
}
2020-01-15 17:10:33 +01:00
pub async fn search_multi_index(mut ctx: Request<Data>) -> SResult<Response> {
2020-02-06 15:41:11 +01:00
ctx.is_allowed(Public)?;
2019-10-31 15:00:36 +01:00
let body = ctx
.body_json::<SearchMultiBody>()
.await
.map_err(ResponseError::bad_request)?;
let mut index_list = body.clone().indexes;
for index in index_list.clone() {
if index == "*" {
2019-11-20 11:24:08 +01:00
index_list = ctx.state().db.indexes_uids().into_iter().collect();
2019-11-20 17:28:46 +01:00
break;
2019-10-31 15:00:36 +01:00
}
}
let mut offset = 0;
let mut count = 20;
if let Some(body_offset) = body.offset {
if let Some(limit) = body.limit {
offset = body_offset;
count = limit;
}
}
let offset = offset;
let count = count;
let db = &ctx.state().db;
let par_body = body.clone();
let responses_per_index: Vec<SResult<_>> = index_list
.into_par_iter()
2019-11-19 16:15:49 +01:00
.map(move |index_uid| {
2019-10-31 15:00:36 +01:00
let index: Index = db
2019-11-19 16:15:49 +01:00
.open_index(&index_uid)
.ok_or(ResponseError::index_not_found(&index_uid))?;
2019-10-31 15:00:36 +01:00
let mut search_builder = index.new_search(par_body.query.clone());
search_builder.offset(offset);
search_builder.limit(count);
if let Some(attributes_to_retrieve) = par_body.attributes_to_retrieve.clone() {
search_builder.attributes_to_retrieve(attributes_to_retrieve);
}
if let Some(attributes_to_crop) = par_body.attributes_to_crop.clone() {
search_builder.attributes_to_crop(attributes_to_crop);
}
if let Some(attributes_to_highlight) = par_body.attributes_to_highlight.clone() {
search_builder.attributes_to_highlight(attributes_to_highlight);
}
if let Some(filters) = par_body.filters.clone() {
search_builder.filters(filters);
}
if let Some(timeout_ms) = par_body.timeout_ms {
search_builder.timeout(Duration::from_millis(timeout_ms));
2019-10-31 15:00:36 +01:00
}
if let Some(matches) = par_body.matches {
if matches {
search_builder.get_matches();
}
}
2020-01-16 16:58:57 +01:00
let reader = db.main_read_txn()?;
let response = search_builder.search(&reader)?;
2019-11-19 16:15:49 +01:00
Ok((index_uid, response))
2019-10-31 15:00:36 +01:00
})
.collect();
let mut hits_map = HashMap::new();
let mut max_query_time = 0;
for response in responses_per_index {
2019-11-19 16:15:49 +01:00
if let Ok((index_uid, response)) = response {
2019-10-31 15:00:36 +01:00
if response.processing_time_ms > max_query_time {
max_query_time = response.processing_time_ms;
}
2019-11-19 16:15:49 +01:00
hits_map.insert(index_uid, response.hits);
2019-10-31 15:00:36 +01:00
}
}
let response = SearchMultiBodyResponse {
hits: hits_map,
offset,
hits_per_page: count,
processing_time_ms: max_query_time,
query: body.query,
};
2020-01-15 17:10:33 +01:00
Ok(tide::Response::new(200).body_json(&response).unwrap())
2019-10-31 15:00:36 +01:00
}