add search endpoint; warn unwrap

This commit is contained in:
Quentin de Quelen 2020-04-07 19:34:57 +02:00 committed by qdequele
parent 0aa16dd3b1
commit 73b5c87cbb
No known key found for this signature in database
GPG Key ID: B3F0A000EBF11745
5 changed files with 58 additions and 60 deletions

View File

@ -257,7 +257,7 @@ impl<'a> SearchBuilder<'a> {
for doc in docs { for doc in docs {
let mut document: IndexMap<String, Value> = self let mut document: IndexMap<String, Value> = self
.index .index
.document(reader, Some(&all_attributes), doc.id) .document(reader, Some(all_attributes.clone()), doc.id)
.map_err(|e| Error::RetrieveDocument(doc.id.0, e.to_string()))? .map_err(|e| Error::RetrieveDocument(doc.id.0, e.to_string()))?
.ok_or(Error::DocumentNotFound(doc.id.0))?; .ok_or(Error::DocumentNotFound(doc.id.0))?;

View File

@ -1,2 +1,2 @@
// pub mod meilisearch; pub mod meilisearch;
// pub mod tide; // pub mod tide;

View File

@ -45,7 +45,7 @@ pub async fn delete_document(
documents_deletion.delete_document_by_id(document_id); documents_deletion.delete_document_by_id(document_id);
let update_id = documents_deletion.finalize(&mut update_writer) let update_id = documents_deletion.finalize(&mut update_writer)
.map_err(|_| ResponseError::Internal(path.1.clone()))?; .map_err(|err| ResponseError::Internal(err.to_string()))?;
update_writer.commit() update_writer.commit()
.map_err(|_| ResponseError::CommitTransaction)?; .map_err(|_| ResponseError::CommitTransaction)?;

View File

@ -6,7 +6,7 @@ pub mod document;
pub mod health; pub mod health;
// pub mod index; // pub mod index;
pub mod key; pub mod key;
// pub mod search; pub mod search;
// pub mod setting; // pub mod setting;
// pub mod stats; // pub mod stats;
// pub mod stop_words; // pub mod stop_words;

View File

@ -6,12 +6,12 @@ use log::warn;
use meilisearch_core::Index; use meilisearch_core::Index;
use rayon::iter::{IntoParallelIterator, ParallelIterator}; use rayon::iter::{IntoParallelIterator, ParallelIterator};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tide::{Request, Response}; use actix_web::*;
use crate::error::{ResponseError, SResult}; use crate::error::ResponseError;
use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit}; use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit, SearchResult};
use crate::helpers::tide::RequestExt; // use crate::helpers::tide::RequestExt;
use crate::helpers::tide::ACL::*; // use crate::helpers::tide::ACL::*;
use crate::Data; use crate::Data;
#[derive(Deserialize)] #[derive(Deserialize)]
@ -29,34 +29,37 @@ struct SearchQuery {
matches: Option<bool>, matches: Option<bool>,
} }
pub async fn search_with_url_query(ctx: Request<Data>) -> SResult<Response> { #[get("/indexes/{index_uid}/search")]
ctx.is_allowed(Public)?; pub async fn search_with_url_query(
data: web::Data<Data>,
path: web::Path<String>,
params: web::Query<SearchQuery>,
) -> Result<web::Json<SearchResult>> {
let index = ctx.index()?; let index = data.db.open_index(path.clone())
let db = &ctx.state().db; .ok_or(ResponseError::IndexNotFound(path.clone()))?;
let reader = db.main_read_txn()?;
let reader = data.db.main_read_txn()
.map_err(|_| ResponseError::CreateTransaction)?;
let schema = index let schema = index
.main .main
.schema(&reader)? .schema(&reader)
.ok_or(ResponseError::open_index("No Schema found"))?; .map_err(|_| ResponseError::Schema)?
.ok_or(ResponseError::Schema)?;
let query: SearchQuery = ctx let mut search_builder = index.new_search(params.q.clone());
.query()
.map_err(|_| ResponseError::bad_request("invalid query parameter"))?;
let mut search_builder = index.new_search(query.q.clone()); if let Some(offset) = params.offset {
if let Some(offset) = query.offset {
search_builder.offset(offset); search_builder.offset(offset);
} }
if let Some(limit) = query.limit { if let Some(limit) = params.limit {
search_builder.limit(limit); search_builder.limit(limit);
} }
let available_attributes = schema.displayed_name(); let available_attributes = schema.displayed_name();
let mut restricted_attributes: HashSet<&str>; let mut restricted_attributes: HashSet<&str>;
match &query.attributes_to_retrieve { match &params.attributes_to_retrieve {
Some(attributes_to_retrieve) => { Some(attributes_to_retrieve) => {
let attributes_to_retrieve: HashSet<&str> = attributes_to_retrieve.split(',').collect(); let attributes_to_retrieve: HashSet<&str> = attributes_to_retrieve.split(',').collect();
if attributes_to_retrieve.contains("*") { if attributes_to_retrieve.contains("*") {
@ -78,8 +81,8 @@ pub async fn search_with_url_query(ctx: Request<Data>) -> SResult<Response> {
} }
} }
if let Some(attributes_to_crop) = query.attributes_to_crop { if let Some(attributes_to_crop) = &params.attributes_to_crop {
let default_length = query.crop_length.unwrap_or(200); let default_length = params.crop_length.unwrap_or(200);
let mut final_attributes: HashMap<String, usize> = HashMap::new(); let mut final_attributes: HashMap<String, usize> = HashMap::new();
for attribute in attributes_to_crop.split(',') { for attribute in attributes_to_crop.split(',') {
@ -106,7 +109,7 @@ pub async fn search_with_url_query(ctx: Request<Data>) -> SResult<Response> {
search_builder.attributes_to_crop(final_attributes); search_builder.attributes_to_crop(final_attributes);
} }
if let Some(attributes_to_highlight) = query.attributes_to_highlight { if let Some(attributes_to_highlight) = &params.attributes_to_highlight {
let mut final_attributes: HashSet<String> = HashSet::new(); let mut final_attributes: HashSet<String> = HashSet::new();
for attribute in attributes_to_highlight.split(',') { for attribute in attributes_to_highlight.split(',') {
if attribute == "*" { if attribute == "*" {
@ -125,15 +128,15 @@ pub async fn search_with_url_query(ctx: Request<Data>) -> SResult<Response> {
search_builder.attributes_to_highlight(final_attributes); search_builder.attributes_to_highlight(final_attributes);
} }
if let Some(filters) = query.filters { if let Some(filters) = &params.filters {
search_builder.filters(filters); search_builder.filters(filters.to_string());
} }
if let Some(timeout_ms) = query.timeout_ms { if let Some(timeout_ms) = params.timeout_ms {
search_builder.timeout(Duration::from_millis(timeout_ms)); search_builder.timeout(Duration::from_millis(timeout_ms));
} }
if let Some(matches) = query.matches { if let Some(matches) = params.matches {
if matches { if matches {
search_builder.get_matches(); search_builder.get_matches();
} }
@ -141,11 +144,11 @@ pub async fn search_with_url_query(ctx: Request<Data>) -> SResult<Response> {
let response = match search_builder.search(&reader) { let response = match search_builder.search(&reader) {
Ok(response) => response, Ok(response) => response,
Err(Error::Internal(message)) => return Err(ResponseError::Internal(message)), Err(Error::Internal(message)) => return Err(ResponseError::Internal(message))?,
Err(others) => return Err(ResponseError::bad_request(others)), Err(others) => return Err(ResponseError::BadRequest(others.to_string()))?,
}; };
Ok(tide::Response::new(200).body_json(&response).unwrap()) Ok(web::Json(response))
} }
#[derive(Clone, Deserialize)] #[derive(Clone, Deserialize)]
@ -174,24 +177,24 @@ struct SearchMultiBodyResponse {
query: String, query: String,
} }
pub async fn search_multi_index(mut ctx: Request<Data>) -> SResult<Response> { #[post("/indexes/search")]
ctx.is_allowed(Public)?; pub async fn search_multi_index(
let body = ctx data: web::Data<Data>,
.body_json::<SearchMultiBody>() body: web::Json<SearchMultiBody>,
.await ) -> Result<web::Json<SearchMultiBodyResponse>> {
.map_err(ResponseError::bad_request)?;
let mut index_list = body.clone().indexes; let mut index_list = body.clone().indexes;
for index in index_list.clone() { for index in index_list.clone() {
if index == "*" { if index == "*" {
index_list = ctx.state().db.indexes_uids().into_iter().collect(); index_list = data.db.indexes_uids().into_iter().collect();
break; break;
} }
} }
let mut offset = 0; let mut offset = 0;
let mut count = 20; let mut count = 20;
let query = body.query.clone();
if let Some(body_offset) = body.offset { if let Some(body_offset) = body.offset {
if let Some(limit) = body.limit { if let Some(limit) = body.limit {
@ -200,16 +203,12 @@ pub async fn search_multi_index(mut ctx: Request<Data>) -> SResult<Response> {
} }
} }
let offset = offset;
let count = count;
let db = &ctx.state().db;
let par_body = body.clone(); let par_body = body.clone();
let responses_per_index: Vec<SResult<_>> = index_list let responses_per_index: Vec<(String, SearchResult)> = index_list
.into_par_iter() .into_par_iter()
.map(move |index_uid| { .map(move |index_uid| {
let index: Index = db let index = data.db.open_index(&index_uid).unwrap();
.open_index(&index_uid)
.ok_or(ResponseError::index_not_found(&index_uid))?;
let mut search_builder = index.new_search(par_body.query.clone()); let mut search_builder = index.new_search(par_body.query.clone());
@ -237,9 +236,10 @@ pub async fn search_multi_index(mut ctx: Request<Data>) -> SResult<Response> {
} }
} }
let reader = db.main_read_txn()?; let reader = data.db.main_read_txn().unwrap();
let response = search_builder.search(&reader)?; let response = search_builder.search(&reader).unwrap();
Ok((index_uid, response))
(index_uid, response)
}) })
.collect(); .collect();
@ -247,13 +247,11 @@ pub async fn search_multi_index(mut ctx: Request<Data>) -> SResult<Response> {
let mut max_query_time = 0; let mut max_query_time = 0;
for response in responses_per_index { for (index_uid, response) in responses_per_index {
if let Ok((index_uid, response)) = response { if response.processing_time_ms > max_query_time {
if response.processing_time_ms > max_query_time { max_query_time = response.processing_time_ms;
max_query_time = response.processing_time_ms;
}
hits_map.insert(index_uid, response.hits);
} }
hits_map.insert(index_uid, response.hits);
} }
let response = SearchMultiBodyResponse { let response = SearchMultiBodyResponse {
@ -261,8 +259,8 @@ pub async fn search_multi_index(mut ctx: Request<Data>) -> SResult<Response> {
offset, offset,
hits_per_page: count, hits_per_page: count,
processing_time_ms: max_query_time, processing_time_ms: max_query_time,
query: body.query, query,
}; };
Ok(tide::Response::new(200).body_json(&response).unwrap()) Ok(web::Json(response))
} }