Merge branch 'master' into issue943

This commit is contained in:
gorogoroumaru 2020-10-02 21:01:31 +09:00 committed by GitHub
commit f4d918d22a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
45 changed files with 2490 additions and 777 deletions

View file

@ -0,0 +1,64 @@
use std::fs::File;
use std::path::Path;
use actix_web::{get, post};
use actix_web::{HttpResponse, web};
use serde::{Deserialize, Serialize};
use crate::backup::{BackupInfo, BackupStatus, compressed_backup_folder, init_backup_process};
use crate::Data;
use crate::error::{Error, ResponseError};
use crate::helpers::Authentication;
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(trigger_backup)
.service(get_backup_status);
}
#[post("/backups", wrap = "Authentication::Private")]
async fn trigger_backup(
data: web::Data<Data>,
) -> Result<HttpResponse, ResponseError> {
let backup_folder = Path::new(&data.backup_folder);
match init_backup_process(&data, &backup_folder) {
Ok(resume) => Ok(HttpResponse::Accepted().json(resume)),
Err(e) => Err(e.into())
}
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct BackupStatusResponse {
status: String,
}
#[derive(Deserialize)]
struct BackupParam {
backup_uid: String,
}
#[get("/backups/{backup_uid}/status", wrap = "Authentication::Private")]
async fn get_backup_status(
data: web::Data<Data>,
path: web::Path<BackupParam>,
) -> Result<HttpResponse, ResponseError> {
let backup_folder = Path::new(&data.backup_folder);
let backup_uid = &path.backup_uid;
if let Some(resume) = BackupInfo::get_current() {
if &resume.uid == backup_uid {
return Ok(HttpResponse::Ok().json(resume));
}
}
if File::open(compressed_backup_folder(Path::new(backup_folder), backup_uid)).is_ok() {
let resume = BackupInfo::new(
backup_uid.into(),
BackupStatus::Done
);
Ok(HttpResponse::Ok().json(resume))
} else {
Err(Error::not_found("backup does not exist").into())
}
}

View file

@ -1,11 +1,11 @@
use std::collections::{BTreeSet, HashSet};
use actix_web::{delete, get, post, put};
use actix_web::{web, HttpResponse};
use actix_web_macros::{delete, get, post, put};
use indexmap::IndexMap;
use meilisearch_core::update;
use serde::Deserialize;
use meilisearch_core::{update, MainReader};
use serde_json::Value;
use serde::Deserialize;
use crate::Data;
use crate::error::{Error, ResponseError};
@ -85,41 +85,61 @@ struct BrowseQuery {
attributes_to_retrieve: Option<String>,
}
pub fn get_all_documents_sync(
data: &web::Data<Data>,
reader: &MainReader,
index_uid: &str,
offset: usize,
limit: usize,
attributes_to_retrieve: Option<&String>
) -> Result<Vec<Document>, Error> {
let index = data
.db
.open_index(index_uid)
.ok_or(Error::index_not_found(index_uid))?;
let documents_ids: Result<BTreeSet<_>, _> = index
.documents_fields_counts
.documents_ids(reader)?
.skip(offset)
.take(limit)
.collect();
let attributes: Option<HashSet<&str>> = attributes_to_retrieve
.map(|a| a.split(',').collect());
let mut documents = Vec::new();
for document_id in documents_ids? {
if let Ok(Some(document)) =
index.document::<Document>(reader, attributes.as_ref(), document_id)
{
documents.push(document);
}
}
Ok(documents)
}
#[get("/indexes/{index_uid}/documents", wrap = "Authentication::Public")]
async fn get_all_documents(
data: web::Data<Data>,
path: web::Path<IndexParam>,
params: web::Query<BrowseQuery>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?;
let offset = params.offset.unwrap_or(0);
let limit = params.limit.unwrap_or(20);
let index_uid = &path.index_uid;
let reader = data.db.main_read_txn()?;
let documents_ids: Result<BTreeSet<_>, _> = index
.documents_fields_counts
.documents_ids(&reader)?
.skip(offset)
.take(limit)
.collect();
let attributes: Option<HashSet<&str>> = params
.attributes_to_retrieve
.as_ref()
.map(|a| a.split(',').collect());
let mut documents = Vec::new();
for document_id in documents_ids? {
if let Ok(Some(document)) =
index.document::<Document>(&reader, attributes.as_ref(), document_id)
{
documents.push(document);
}
}
let documents = get_all_documents_sync(
&data,
&reader,
index_uid,
offset,
limit,
params.attributes_to_retrieve.as_ref()
)?;
Ok(HttpResponse::Ok().json(documents))
}

View file

@ -1,5 +1,5 @@
use actix_web::{web, HttpResponse};
use actix_web_macros::{get, put};
use actix_web::{get, put};
use serde::Deserialize;
use crate::error::{Error, ResponseError};

View file

@ -1,14 +1,16 @@
use actix_web::{delete, get, post, put};
use actix_web::{web, HttpResponse};
use actix_web_macros::{delete, get, post, put};
use chrono::{DateTime, Utc};
use log::error;
use meilisearch_core::{Database, MainReader, UpdateReader};
use meilisearch_core::update::UpdateStatus;
use rand::seq::SliceRandom;
use serde::{Deserialize, Serialize};
use crate::Data;
use crate::error::{Error, ResponseError};
use crate::helpers::Authentication;
use crate::routes::IndexParam;
use crate::Data;
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(list_indexes)
@ -29,19 +31,17 @@ fn generate_uid() -> String {
.collect()
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
struct IndexResponse {
name: String,
uid: String,
pub struct IndexResponse {
pub name: String,
pub uid: String,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
primary_key: Option<String>,
pub primary_key: Option<String>,
}
#[get("/indexes", wrap = "Authentication::Private")]
async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
let reader = data.db.main_read_txn()?;
pub fn list_indexes_sync(data: &web::Data<Data>, reader: &MainReader) -> Result<Vec<IndexResponse>, ResponseError> {
let mut indexes = Vec::new();
for index_uid in data.db.indexes_uids() {
@ -49,23 +49,23 @@ async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseErr
match index {
Some(index) => {
let name = index.main.name(&reader)?.ok_or(Error::internal(
let name = index.main.name(reader)?.ok_or(Error::internal(
"Impossible to get the name of an index",
))?;
let created_at = index
.main
.created_at(&reader)?
.created_at(reader)?
.ok_or(Error::internal(
"Impossible to get the create date of an index",
))?;
let updated_at = index
.main
.updated_at(&reader)?
.updated_at(reader)?
.ok_or(Error::internal(
"Impossible to get the last update date of an index",
))?;
let primary_key = match index.main.schema(&reader) {
let primary_key = match index.main.schema(reader) {
Ok(Some(schema)) => match schema.primary_key() {
Some(primary_key) => Some(primary_key.to_owned()),
None => None,
@ -89,6 +89,14 @@ async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseErr
}
}
Ok(indexes)
}
#[get("/indexes", wrap = "Authentication::Private")]
async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
let reader = data.db.main_read_txn()?;
let indexes = list_indexes_sync(&data, &reader)?;
Ok(HttpResponse::Ok().json(indexes))
}
@ -145,6 +153,55 @@ struct IndexCreateRequest {
primary_key: Option<String>,
}
pub fn create_index_sync(
database: &std::sync::Arc<Database>,
uid: String,
name: String,
primary_key: Option<String>,
) -> Result<IndexResponse, Error> {
let created_index = database
.create_index(&uid)
.map_err(|e| match e {
meilisearch_core::Error::IndexAlreadyExists => Error::IndexAlreadyExists(uid.clone()),
_ => Error::create_index(e)
})?;
let index_response = database.main_write::<_, _, Error>(|mut write_txn| {
created_index.main.put_name(&mut write_txn, &name)?;
let created_at = created_index
.main
.created_at(&write_txn)?
.ok_or(Error::internal("Impossible to read created at"))?;
let updated_at = created_index
.main
.updated_at(&write_txn)?
.ok_or(Error::internal("Impossible to read updated at"))?;
if let Some(id) = primary_key.clone() {
if let Some(mut schema) = created_index.main.schema(&write_txn)? {
schema
.set_primary_key(&id)
.map_err(Error::bad_request)?;
created_index.main.put_schema(&mut write_txn, &schema)?;
}
}
let index_response = IndexResponse {
name,
uid,
created_at,
updated_at,
primary_key,
};
Ok(index_response)
})?;
Ok(index_response)
}
#[post("/indexes", wrap = "Authentication::Private")]
async fn create_index(
data: web::Data<Data>,
@ -175,45 +232,9 @@ async fn create_index(
},
};
let created_index = data
.db
.create_index(&uid)
.map_err(|e| match e {
meilisearch_core::Error::IndexAlreadyExists => e.into(),
_ => ResponseError::from(Error::create_index(e))
})?;
let name = body.name.as_ref().unwrap_or(&uid).to_string();
let index_response = data.db.main_write::<_, _, ResponseError>(|mut writer| {
let name = body.name.as_ref().unwrap_or(&uid);
created_index.main.put_name(&mut writer, name)?;
let created_at = created_index
.main
.created_at(&writer)?
.ok_or(Error::internal("Impossible to read created at"))?;
let updated_at = created_index
.main
.updated_at(&writer)?
.ok_or(Error::internal("Impossible to read updated at"))?;
if let Some(id) = body.primary_key.clone() {
if let Some(mut schema) = created_index.main.schema(&writer)? {
schema
.set_primary_key(&id)
.map_err(Error::bad_request)?;
created_index.main.put_schema(&mut writer, &schema)?;
}
}
let index_response = IndexResponse {
name: name.to_string(),
uid,
created_at,
updated_at,
primary_key: body.primary_key.clone(),
};
Ok(index_response)
})?;
let index_response = create_index_sync(&data.db, uid, name, body.primary_key.clone())?;
Ok(HttpResponse::Created().json(index_response))
}
@ -340,20 +361,28 @@ async fn get_update_status(
)).into()),
}
}
pub fn get_all_updates_status_sync(
data: &web::Data<Data>,
reader: &UpdateReader,
index_uid: &str,
) -> Result<Vec<UpdateStatus>, Error> {
let index = data
.db
.open_index(index_uid)
.ok_or(Error::index_not_found(index_uid))?;
Ok(index.all_updates_status(reader)?)
}
#[get("/indexes/{index_uid}/updates", wrap = "Authentication::Private")]
async fn get_all_updates_status(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?;
let reader = data.db.update_read_txn()?;
let response = index.all_updates_status(&reader)?;
let response = get_all_updates_status_sync(&data, &reader, &path.index_uid)?;
Ok(HttpResponse::Ok().json(response))
}

View file

@ -1,6 +1,6 @@
use actix_web::web;
use actix_web::HttpResponse;
use actix_web_macros::get;
use actix_web::get;
use serde::Serialize;
use crate::helpers::Authentication;

View file

@ -10,6 +10,7 @@ pub mod setting;
pub mod stats;
pub mod stop_words;
pub mod synonym;
pub mod backup;
#[derive(Deserialize)]
pub struct IndexParam {

View file

@ -1,9 +1,7 @@
use std::collections::{HashSet, HashMap};
use std::collections::{HashMap, HashSet};
use actix_web::{get, post, web, HttpResponse};
use log::warn;
use actix_web::web;
use actix_web::HttpResponse;
use actix_web_macros::{get, post};
use serde::{Deserialize, Serialize};
use serde_json::Value;
@ -14,11 +12,10 @@ use crate::routes::IndexParam;
use crate::Data;
use meilisearch_core::facets::FacetFilter;
use meilisearch_schema::{Schema, FieldId};
use meilisearch_schema::{FieldId, Schema};
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(search_with_post)
.service(search_with_url_query);
cfg.service(search_with_post).service(search_with_url_query);
}
#[derive(Serialize, Deserialize)]
@ -93,7 +90,11 @@ async fn search_with_post(
}
impl SearchQuery {
fn search(&self, index_uid: &str, data: web::Data<Data>) -> Result<SearchResult, ResponseError> {
fn search(
&self,
index_uid: &str,
data: web::Data<Data>,
) -> Result<SearchResult, ResponseError> {
let index = data
.db
.open_index(index_uid)
@ -105,7 +106,12 @@ impl SearchQuery {
.schema(&reader)?
.ok_or(Error::internal("Impossible to retrieve the schema"))?;
let mut search_builder = index.new_search(self.q.clone());
let query = self
.q
.clone()
.and_then(|q| if q.is_empty() { None } else { Some(q) });
let mut search_builder = index.new_search(query);
if let Some(offset) = self.offset {
search_builder.offset(offset);
@ -118,7 +124,8 @@ impl SearchQuery {
let mut restricted_attributes: HashSet<&str>;
match &self.attributes_to_retrieve {
Some(attributes_to_retrieve) => {
let attributes_to_retrieve: HashSet<&str> = attributes_to_retrieve.split(',').collect();
let attributes_to_retrieve: HashSet<&str> =
attributes_to_retrieve.split(',').collect();
if attributes_to_retrieve.contains("*") {
restricted_attributes = available_attributes.clone();
} else {
@ -132,15 +139,22 @@ impl SearchQuery {
}
}
}
},
}
None => {
restricted_attributes = available_attributes.clone();
}
}
if let Some(ref facet_filters) = self.facet_filters {
let attrs = index.main.attributes_for_faceting(&reader)?.unwrap_or_default();
search_builder.add_facet_filters(FacetFilter::from_str(facet_filters, &schema, &attrs)?);
let attrs = index
.main
.attributes_for_faceting(&reader)?
.unwrap_or_default();
search_builder.add_facet_filters(FacetFilter::from_str(
facet_filters,
&schema,
&attrs,
)?);
}
if let Some(facets) = &self.facets_distribution {
@ -148,7 +162,7 @@ impl SearchQuery {
Some(ref attrs) => {
let field_ids = prepare_facet_list(&facets, &schema, attrs)?;
search_builder.add_facets(field_ids);
},
}
None => return Err(FacetCountError::NoFacetSet.into()),
}
}
@ -160,20 +174,23 @@ impl SearchQuery {
for attribute in attributes_to_crop.split(',') {
let mut attribute = attribute.split(':');
let attr = attribute.next();
let length = attribute.next().and_then(|s| s.parse().ok()).unwrap_or(default_length);
let length = attribute
.next()
.and_then(|s| s.parse().ok())
.unwrap_or(default_length);
match attr {
Some("*") => {
for attr in &restricted_attributes {
final_attributes.insert(attr.to_string(), length);
}
},
}
Some(attr) => {
if available_attributes.contains(attr) {
final_attributes.insert(attr.to_string(), length);
} else {
warn!("The attributes {:?} present in attributesToCrop parameter doesn't exist", attr);
}
},
}
None => (),
}
}
@ -215,7 +232,11 @@ impl SearchQuery {
///
/// An error is returned if the array is malformed, or if it contains attributes that are
/// unexisting, or not set as facets.
fn prepare_facet_list(facets: &str, schema: &Schema, facet_attrs: &[FieldId]) -> Result<Vec<(FieldId, String)>, FacetCountError> {
fn prepare_facet_list(
facets: &str,
schema: &Schema,
facet_attrs: &[FieldId],
) -> Result<Vec<(FieldId, String)>, FacetCountError> {
let json_array = serde_json::from_str(facets)?;
match json_array {
Value::Array(vals) => {
@ -243,6 +264,6 @@ fn prepare_facet_list(facets: &str, schema: &Schema, facet_attrs: &[FieldId]) ->
}
Ok(field_ids)
}
bad_val => Err(FacetCountError::unexpected_token(bad_val, &["[String]"]))
bad_val => Err(FacetCountError::unexpected_token(bad_val, &["[String]"])),
}
}

View file

@ -1,13 +1,15 @@
use std::collections::{BTreeMap, BTreeSet, HashSet};
use actix_web::{delete, get, post};
use actix_web::{web, HttpResponse};
use actix_web_macros::{delete, get, post};
use meilisearch_core::{MainReader, UpdateWriter};
use meilisearch_core::settings::{Settings, SettingsUpdate, UpdateState, DEFAULT_RANKING_RULES};
use meilisearch_schema::Schema;
use std::collections::{BTreeMap, BTreeSet};
use crate::Data;
use crate::error::{Error, ResponseError};
use crate::helpers::Authentication;
use crate::routes::{IndexParam, IndexUpdateResponse};
use crate::Data;
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(update_all)
@ -30,73 +32,77 @@ pub fn services(cfg: &mut web::ServiceConfig) {
.service(update_attributes_for_faceting);
}
pub fn update_all_settings_txn(
data: &web::Data<Data>,
settings: SettingsUpdate,
index_uid: &str,
write_txn: &mut UpdateWriter,
) -> Result<u64, Error> {
let index = data
.db
.open_index(index_uid)
.ok_or(Error::index_not_found(index_uid))?;
let update_id = index.settings_update(write_txn, settings)?;
Ok(update_id)
}
#[post("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
async fn update_all(
data: web::Data<Data>,
path: web::Path<IndexParam>,
body: web::Json<Settings>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?;
let settings = body
.into_inner()
.to_update()
.map_err(Error::bad_request)?;
let update_id = data.db.update_write::<_, _, ResponseError>(|writer| {
let settings = body
.into_inner()
.to_update()
.map_err(Error::bad_request)?;
let update_id = index.settings_update(writer, settings)?;
Ok(update_id)
let update_id = data.db.update_write::<_, _, Error>(|writer| {
update_all_settings_txn(&data, settings, &path.index_uid, writer)
})?;
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
}
#[get("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
async fn get_all(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
pub fn get_all_sync(data: &web::Data<Data>, reader: &MainReader, index_uid: &str) -> Result<Settings, Error> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?;
let reader = data.db.main_read_txn()?;
.open_index(index_uid)
.ok_or(Error::index_not_found(index_uid))?;
let stop_words: BTreeSet<String> = index
.main
.stop_words(&reader)?
.stop_words(reader)?
.into_iter()
.collect();
let synonyms_list = index.main.synonyms(&reader)?;
let synonyms_list = index.main.synonyms(reader)?;
let mut synonyms = BTreeMap::new();
let index_synonyms = &index.synonyms;
for synonym in synonyms_list {
let list = index_synonyms.synonyms(&reader, synonym.as_bytes())?;
let list = index_synonyms.synonyms(reader, synonym.as_bytes())?;
synonyms.insert(synonym, list);
}
let ranking_rules = index
.main
.ranking_rules(&reader)?
.ranking_rules(reader)?
.unwrap_or(DEFAULT_RANKING_RULES.to_vec())
.into_iter()
.map(|r| r.to_string())
.collect();
let schema = index.main.schema(&reader)?;
let schema = index.main.schema(reader)?;
let distinct_attribute = match (index.main.distinct_attribute(&reader)?, &schema) {
let distinct_attribute = match (index.main.distinct_attribute(reader)?, &schema) {
(Some(id), Some(schema)) => schema.name(id).map(str::to_string),
_ => None,
};
let attributes_for_faceting = match (&schema, &index.main.attributes_for_faceting(&reader)?) {
let attributes_for_faceting = match (&schema, &index.main.attributes_for_faceting(reader)?) {
(Some(schema), Some(attrs)) => {
attrs
.iter()
@ -110,7 +116,7 @@ async fn get_all(
let searchable_attributes = schema.as_ref().map(get_indexed_attributes);
let displayed_attributes = schema.as_ref().map(get_displayed_attributes);
let settings = Settings {
Ok(Settings {
ranking_rules: Some(Some(ranking_rules)),
distinct_attribute: Some(distinct_attribute),
searchable_attributes: Some(searchable_attributes),
@ -118,7 +124,16 @@ async fn get_all(
stop_words: Some(Some(stop_words)),
synonyms: Some(Some(synonyms)),
attributes_for_faceting: Some(Some(attributes_for_faceting)),
};
})
}
#[get("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
async fn get_all(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let reader = data.db.main_read_txn()?;
let settings = get_all_sync(&data, &reader, &path.index_uid)?;
Ok(HttpResponse::Ok().json(settings))
}

View file

@ -2,7 +2,7 @@ use std::collections::{HashMap, BTreeMap};
use actix_web::web;
use actix_web::HttpResponse;
use actix_web_macros::get;
use actix_web::get;
use chrono::{DateTime, Utc};
use log::error;
use serde::Serialize;

View file

@ -1,5 +1,5 @@
use actix_web::{web, HttpResponse};
use actix_web_macros::{delete, get, post};
use actix_web::{delete, get, post};
use meilisearch_core::settings::{SettingsUpdate, UpdateState};
use std::collections::BTreeSet;

View file

@ -1,7 +1,7 @@
use std::collections::BTreeMap;
use actix_web::{web, HttpResponse};
use actix_web_macros::{delete, get, post};
use actix_web::{delete, get, post};
use indexmap::IndexMap;
use meilisearch_core::settings::{SettingsUpdate, UpdateState};