Fix for review

This commit is contained in:
Quentin de Quelen 2019-11-20 17:28:46 +01:00
parent 7f2e5d091a
commit 055368acd8
9 changed files with 140 additions and 111 deletions

View File

@ -6,16 +6,16 @@ use meilidb_schema::Schema;
use std::collections::HashMap;
use std::sync::Arc;
const CREATED_AT: &str = "created-at";
const CREATED_AT_KEY: &str = "created-at";
const CUSTOMS_KEY: &str = "customs-key";
const FIELDS_FREQUENCY: &str = "fields-frequency";
const NAME: &str = "name";
const FIELDS_FREQUENCY_KEY: &str = "fields-frequency";
const NAME_KEY: &str = "name";
const NUMBER_OF_DOCUMENTS_KEY: &str = "number-of-documents";
const RANKED_MAP_KEY: &str = "ranked-map";
const SCHEMA_KEY: &str = "schema";
const STOP_WORDS_KEY: &str = "stop-words";
const SYNONYMS_KEY: &str = "synonyms";
const UPDATED_AT: &str = "updated-at";
const UPDATED_AT_KEY: &str = "updated-at";
const WORDS_KEY: &str = "words";
pub type FreqsMap = HashMap<String, usize>;
@ -33,32 +33,32 @@ impl Main {
}
pub fn put_name(self, writer: &mut heed::RwTxn, name: &str) -> ZResult<()> {
self.main.put::<Str, Str>(writer, NAME, name)
self.main.put::<Str, Str>(writer, NAME_KEY, name)
}
pub fn name(self, reader: &heed::RoTxn) -> ZResult<Option<String>> {
Ok(self
.main
.get::<Str, Str>(reader, NAME)?
.get::<Str, Str>(reader, NAME_KEY)?
.map(|name| name.to_owned()))
}
pub fn put_created_at(self, writer: &mut heed::RwTxn) -> ZResult<()> {
self.main
.put::<Str, SerdeDatetime>(writer, CREATED_AT, &Utc::now())
.put::<Str, SerdeDatetime>(writer, CREATED_AT_KEY, &Utc::now())
}
pub fn created_at(self, reader: &heed::RoTxn) -> ZResult<Option<DateTime<Utc>>> {
self.main.get::<Str, SerdeDatetime>(reader, CREATED_AT)
self.main.get::<Str, SerdeDatetime>(reader, CREATED_AT_KEY)
}
pub fn put_updated_at(self, writer: &mut heed::RwTxn) -> ZResult<()> {
self.main
.put::<Str, SerdeDatetime>(writer, UPDATED_AT, &Utc::now())
.put::<Str, SerdeDatetime>(writer, UPDATED_AT_KEY, &Utc::now())
}
pub fn updated_at(self, reader: &heed::RoTxn) -> ZResult<Option<DateTime<Utc>>> {
self.main.get::<Str, SerdeDatetime>(reader, UPDATED_AT)
self.main.get::<Str, SerdeDatetime>(reader, UPDATED_AT_KEY)
}
pub fn put_words_fst(self, writer: &mut heed::RwTxn, fst: &fst::Set) -> ZResult<()> {
@ -159,13 +159,13 @@ impl Main {
fields_frequency: &FreqsMap,
) -> ZResult<()> {
self.main
.put::<Str, SerdeFreqsMap>(writer, FIELDS_FREQUENCY, fields_frequency)
.put::<Str, SerdeFreqsMap>(writer, FIELDS_FREQUENCY_KEY, fields_frequency)
}
pub fn fields_frequency(&self, reader: &heed::RoTxn) -> ZResult<Option<FreqsMap>> {
match self
.main
.get::<Str, SerdeFreqsMap>(&reader, FIELDS_FREQUENCY)?
.get::<Str, SerdeFreqsMap>(reader, FIELDS_FREQUENCY_KEY)?
{
Some(freqs) => Ok(Some(freqs)),
None => Ok(None),

View File

@ -4,13 +4,15 @@ use std::sync::Arc;
use chrono::{DateTime, Utc};
use heed::types::{SerdeBincode, Str};
use log::*;
use log::error;
use meilidb_core::{Database, Error as MError, MResult};
use sysinfo::Pid;
use crate::option::Opt;
use crate::routes::index::index_update_callback;
const LAST_UPDATE_KEY: &str = "last-update";
type SerdeDatetime = SerdeBincode<DateTime<Utc>>;
#[derive(Clone)]
@ -46,7 +48,7 @@ impl DataInner {
match self
.db
.common_store()
.get::<Str, SerdeDatetime>(&reader, "last-update")?
.get::<Str, SerdeDatetime>(reader, LAST_UPDATE_KEY)?
{
Some(datetime) => Ok(Some(datetime)),
None => Ok(None),
@ -56,11 +58,11 @@ impl DataInner {
pub fn set_last_update(&self, writer: &mut heed::RwTxn) -> MResult<()> {
self.db
.common_store()
.put::<Str, SerdeDatetime>(writer, "last-update", &Utc::now())
.put::<Str, SerdeDatetime>(writer, LAST_UPDATE_KEY, &Utc::now())
.map_err(Into::into)
}
pub fn compute_stats(&self, mut writer: &mut heed::RwTxn, index_uid: &str) -> MResult<()> {
pub fn compute_stats(&self, writer: &mut heed::RwTxn, index_uid: &str) -> MResult<()> {
let index = match self.db.open_index(&index_uid) {
Some(index) => index,
None => {
@ -93,7 +95,7 @@ impl DataInner {
index
.main
.put_fields_frequency(&mut writer, &frequency)
.put_fields_frequency(writer, &frequency)
.map_err(MError::Zlmdb)
}
}

View File

@ -1,6 +1,6 @@
use crate::routes::setting::{RankingOrdering, SettingBody};
use indexmap::IndexMap;
use log::*;
use log::error;
use meilidb_core::criterion::*;
use meilidb_core::Highlight;
use meilidb_core::{Index, RankedMap};

View File

@ -39,7 +39,7 @@ impl ContextExt for Context<Data> {
.get::<Str, SerdeBincode<Token>>(&reader, &token_key)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::invalid_token(format!(
"token key does not exist: {}",
"Api key does not exist: {}",
user_api_key
)))?;

View File

@ -114,9 +114,7 @@ pub async fn get_all_documents(ctx: Context<Data>) -> SResult<Response> {
}
}
Ok(tide::response::json(response_body)
.with_status(StatusCode::OK)
.into_response())
Ok(tide::response::json(response_body))
}
fn infered_schema(document: &IndexMap<String, Value>) -> Option<meilidb_schema::Schema> {

View File

@ -1,6 +1,6 @@
use chrono::{DateTime, Utc};
use http::StatusCode;
use log::*;
use log::error;
use meilidb_core::ProcessedUpdateResult;
use meilidb_schema::{Schema, SchemaBuilder};
use rand::seq::SliceRandom;
@ -32,46 +32,51 @@ pub async fn list_indexes(ctx: Context<Data>) -> SResult<Response> {
let indexes_uids = ctx.state().db.indexes_uids();
let env = &ctx.state().db.env;
let mut reader = env.read_txn().map_err(ResponseError::internal)?;
let reader = env.read_txn().map_err(ResponseError::internal)?;
let mut response_body = Vec::new();
for index_uid in indexes_uids {
let index = ctx
.state()
.db
.open_index(&index_uid)
.ok_or(ResponseError::internal(&index_uid))?;
let name = index
.main
.name(&mut reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("Name not found"))?;
let created_at = index
.main
.created_at(&mut reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("Created date not found"))?;
let updated_at = index
.main
.updated_at(&mut reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("Updated date not found"))?;
let index = ctx.state().db.open_index(&index_uid);
let index_reponse = IndexResponse {
name,
uid: index_uid,
created_at,
updated_at,
};
response_body.push(index_reponse);
match index {
Some(index) => {
let name = index
.main
.name(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'name' not found"))?;
let created_at = index
.main
.created_at(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'created_at' date not found"))?;
let updated_at = index
.main
.updated_at(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'updated_at' date not found"))?;
let index_reponse = IndexResponse {
name,
uid: index_uid,
created_at,
updated_at,
};
response_body.push(index_reponse);
}
None => error!(
"Index {} is referenced in the indexes list but cannot be found",
index_uid
),
}
}
Ok(tide::response::json(response_body))
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct IndexResponse {
name: String,
uid: String,
@ -85,24 +90,24 @@ pub async fn get_index(ctx: Context<Data>) -> SResult<Response> {
let index = ctx.index()?;
let env = &ctx.state().db.env;
let mut reader = env.read_txn().map_err(ResponseError::internal)?;
let reader = env.read_txn().map_err(ResponseError::internal)?;
let uid = ctx.url_param("index")?.to_string();
let uid = ctx.url_param("index")?;
let name = index
.main
.name(&mut reader)
.name(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("Name not found"))?;
.ok_or(ResponseError::internal("'name' not found"))?;
let created_at = index
.main
.created_at(&mut reader)
.created_at(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("Created date not found"))?;
.ok_or(ResponseError::internal("'created_at' date not found"))?;
let updated_at = index
.main
.updated_at(&mut reader)
.updated_at(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("Updated date not found"))?;
.ok_or(ResponseError::internal("'updated_at' date not found"))?;
let response_body = IndexResponse {
name,
@ -114,15 +119,15 @@ pub async fn get_index(ctx: Context<Data>) -> SResult<Response> {
Ok(tide::response::json(response_body))
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct IndexCreateRequest {
name: String,
schema: Option<SchemaBody>,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct IndexCreateResponse {
name: String,
uid: String,
@ -166,11 +171,11 @@ pub async fn create_index(mut ctx: Context<Data>) -> SResult<Response> {
.put_updated_at(&mut writer)
.map_err(ResponseError::internal)?;
let schema: Option<Schema> = body.schema.clone().map(|s| s.into());
let schema: Option<Schema> = body.schema.clone().map(Into::into);
let mut response_update_id = None;
if let Some(schema) = schema {
let update_id = created_index
.schema_update(&mut writer, schema.clone())
.schema_update(&mut writer, schema)
.map_err(ResponseError::internal)?;
response_update_id = Some(update_id)
}
@ -191,14 +196,14 @@ pub async fn create_index(mut ctx: Context<Data>) -> SResult<Response> {
.into_response())
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct UpdateIndexRequest {
name: String,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct UpdateIndexResponse {
name: String,
uid: String,
@ -239,12 +244,12 @@ pub async fn update_index(mut ctx: Context<Data>) -> SResult<Response> {
.main
.created_at(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("Created date not found"))?;
.ok_or(ResponseError::internal("'created_at' date not found"))?;
let updated_at = index
.main
.updated_at(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("Updated date not found"))?;
.ok_or(ResponseError::internal("'updated_at' date not found"))?;
let response_body = UpdateIndexResponse {
name: body.name,
@ -259,7 +264,7 @@ pub async fn update_index(mut ctx: Context<Data>) -> SResult<Response> {
}
#[derive(Default, Deserialize)]
#[serde(rename_all = "camelCase")]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct SchemaParams {
raw: bool,
}
@ -283,16 +288,12 @@ pub async fn get_index_schema(ctx: Context<Data>) -> SResult<Response> {
match schema {
Some(schema) => {
if params.raw {
Ok(tide::response::json(schema.to_builder()))
Ok(tide::response::json(schema))
} else {
Ok(tide::response::json(SchemaBody::from(schema)))
}
}
None => Ok(
tide::response::json(json!({ "message": "missing index schema" }))
.with_status(StatusCode::NOT_FOUND)
.into_response(),
),
None => Err(ResponseError::not_found("missing index schema")),
}
}
@ -303,7 +304,7 @@ pub async fn update_schema(mut ctx: Context<Data>) -> SResult<Response> {
let params: SchemaParams = ctx.url_query().unwrap_or_default();
let schema: Schema = if params.raw {
let schema = if params.raw {
ctx.body_json::<SchemaBuilder>()
.await
.map_err(ResponseError::bad_request)?
@ -397,18 +398,35 @@ pub async fn delete_index(ctx: Context<Data>) -> SResult<StatusCode> {
}
}
pub fn index_update_callback(index_uid: &str, data: &Data, _status: ProcessedUpdateResult) {
let env = &data.db.env;
let mut writer = env.write_txn().unwrap();
data.compute_stats(&mut writer, &index_uid).unwrap();
data.set_last_update(&mut writer).unwrap();
pub fn index_update_callback(index_uid: &str, data: &Data, status: ProcessedUpdateResult) {
if status.error.is_some() {
return;
}
if let Some(index) = data.db.open_index(&index_uid) {
let env = &data.db.env;
let mut writer = match env.write_txn() {
Ok(writer) => writer,
Err(e) => {
error!("Impossible to get write_txn; {}", e);
return;
}
};
if let Err(e) = data.compute_stats(&mut writer, &index_uid) {
error!("Impossible to compute stats; {}", e)
}
if let Err(e) = data.set_last_update(&mut writer) {
error!("Impossible to update last_update; {}", e)
}
if let Err(e) = index.main.put_updated_at(&mut writer) {
error!("Impossible to update updated_at; {}", e)
}
}
writer.commit().unwrap();
if let Err(e) = writer.commit() {
error!("Impossible to get write_txn; {}", e);
}
}
}

View File

@ -156,6 +156,7 @@ pub async fn search_multi_index(mut ctx: Context<Data>) -> SResult<Response> {
for index in index_list.clone() {
if index == "*" {
index_list = ctx.state().db.indexes_uids().into_iter().collect();
break;
}
}

View File

@ -1,7 +1,9 @@
use std::collections::HashMap;
use chrono::{DateTime, Utc};
use log::error;
use pretty_bytes::converter::convert;
use serde::Serialize;
use std::collections::HashMap;
use sysinfo::{NetworkExt, Pid, ProcessExt, ProcessorExt, System, SystemExt};
use tide::{Context, Response};
use walkdir::WalkDir;
@ -42,7 +44,7 @@ pub async fn index_stat(ctx: Context<Data>) -> SResult<Response> {
.state()
.is_indexing(&reader, &index_uid)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::not_found("Index not found"))?;
.ok_or(ResponseError::internal("'is_indexing' date not found"))?;
let response = IndexStatsResponse {
number_of_documents,
@ -71,31 +73,39 @@ pub async fn get_stats(ctx: Context<Data>) -> SResult<Response> {
let indexes_set = ctx.state().db.indexes_uids();
for index_uid in indexes_set {
let index = db.open_index(&index_uid).unwrap();
let index = ctx.state().db.open_index(&index_uid);
let number_of_documents = index
.main
.number_of_documents(&reader)
.map_err(ResponseError::internal)?;
match index {
Some(index) => {
let number_of_documents = index
.main
.number_of_documents(&reader)
.map_err(ResponseError::internal)?;
let fields_frequency = index
.main
.fields_frequency(&reader)
.map_err(ResponseError::internal)?
.unwrap_or_default();
let fields_frequency = index
.main
.fields_frequency(&reader)
.map_err(ResponseError::internal)?
.unwrap_or_default();
let is_indexing = ctx
.state()
.is_indexing(&reader, &index_uid)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::not_found("Index not found"))?;
let is_indexing = ctx
.state()
.is_indexing(&reader, &index_uid)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'is_indexing' date not found"))?;
let response = IndexStatsResponse {
number_of_documents,
is_indexing,
fields_frequency,
};
index_list.insert(index_uid, response);
let response = IndexStatsResponse {
number_of_documents,
is_indexing,
fields_frequency,
};
index_list.insert(index_uid, response);
}
None => error!(
"Index {:?} is referenced in the indexes list but cannot be found",
index_uid
),
}
}
let database_size = WalkDir::new(ctx.state().db_path.clone())

View File

@ -145,7 +145,7 @@ struct InnerSchema {
}
impl Schema {
pub fn to_builder(&self) -> SchemaBuilder {
fn to_builder(&self) -> SchemaBuilder {
let identifier = self.inner.identifier.clone();
let attributes = self.attributes_ordered();
SchemaBuilder {