add index endpoint & key endpoint & stats endpoint

This commit is contained in:
Quentin de Quelen 2020-04-08 14:13:45 +02:00 committed by qdequele
parent 73b5c87cbb
commit 6c581fb3bd
No known key found for this signature in database
GPG Key ID: B3F0A000EBF11745
11 changed files with 358 additions and 348 deletions

View File

@ -381,7 +381,7 @@ fn search_command(command: SearchCommand, database: Database) -> Result<(), Box<
.sort_unstable_by_key(|m| (m.char_index, m.char_length));
let start_retrieve = Instant::now();
let result = index.document::<Document>(&reader, Some(&fields), doc.id);
let result = index.document::<Document, _>(&reader, Some(fields.clone()), doc.id);
retrieve_duration += start_retrieve.elapsed();
match result {

View File

@ -10,7 +10,7 @@ use sha2::Digest;
use sysinfo::Pid;
use crate::option::Opt;
// use crate::routes::index::index_update_callback;
use crate::routes::index_update_callback;
const LAST_UPDATE_KEY: &str = "last-update";
@ -155,7 +155,7 @@ impl Data {
let callback_context = data.clone();
db.set_update_callback(Box::new(move |index_uid, status| {
// index_update_callback(&index_uid, &callback_context, status);
index_update_callback(&index_uid, &callback_context, status);
}));
data

View File

@ -1,12 +1,8 @@
use std::fmt;
use meilisearch_core::{FstError, HeedError};
use serde_json::json;
use actix_http::{ResponseBuilder, Response};
use actix_http::ResponseBuilder;
use actix_web::http::StatusCode;
use actix_web::*;
use futures::future::{ok, Ready};
// use crate::helpers::meilisearch::Error as SearchError;
#[derive(Debug)]
pub enum ResponseError {
@ -16,6 +12,7 @@ pub enum ResponseError {
NotFound(String),
IndexNotFound(String),
DocumentNotFound(String),
UpdateNotFound(u64),
MissingHeader(String),
FilterParsing(String),
BadParameter(String, String),
@ -38,6 +35,7 @@ impl fmt::Display for ResponseError {
Self::NotFound(err) => write!(f, "{} not found", err),
Self::IndexNotFound(index_uid) => write!(f, "Index {} not found", index_uid),
Self::DocumentNotFound(document_id) => write!(f, "Document with id {} not found", document_id),
Self::UpdateNotFound(update_id) => write!(f, "Update with id {} not found", update_id),
Self::MissingHeader(header) => write!(f, "Header {} is missing", header),
Self::BadParameter(param, err) => write!(f, "Url parameter {} error: {}", param, err),
Self::OpenIndex(err) => write!(f, "Impossible to open index; {}", err),
@ -68,6 +66,7 @@ impl error::ResponseError for ResponseError {
Self::NotFound(_) => StatusCode::NOT_FOUND,
Self::IndexNotFound(_) => StatusCode::NOT_FOUND,
Self::DocumentNotFound(_) => StatusCode::NOT_FOUND,
Self::UpdateNotFound(_) => StatusCode::NOT_FOUND,
Self::MissingHeader(_) => StatusCode::UNAUTHORIZED,
Self::BadParameter(_, _) => StatusCode::BAD_REQUEST,
Self::OpenIndex(_) => StatusCode::BAD_REQUEST,
@ -82,52 +81,3 @@ impl error::ResponseError for ResponseError {
}
}
}
// impl Responder for ResponseError {
// type Error = Error;
// type Future = Ready<Result<Response, Error>>;
// #[inline]
// fn respond_to(self, req: &HttpRequest) -> Self::Future {
// ok(self.error_response())
// }
// }
impl From<serde_json::Error> for ResponseError {
fn from(err: serde_json::Error) -> ResponseError {
ResponseError::Internal(err.to_string())
}
}
impl From<meilisearch_core::Error> for ResponseError {
fn from(err: meilisearch_core::Error) -> ResponseError {
ResponseError::Internal(err.to_string())
}
}
impl From<HeedError> for ResponseError {
fn from(err: HeedError) -> ResponseError {
ResponseError::Internal(err.to_string())
}
}
impl From<FstError> for ResponseError {
fn from(err: FstError) -> ResponseError {
ResponseError::Internal(err.to_string())
}
}
// impl From<SearchError> for ResponseError {
// fn from(err: SearchError) -> ResponseError {
// match err {
// SearchError::FilterParsing(s) => ResponseError::FilterParsing(s),
// _ => ResponseError::Internal(err),
// }
// }
// }
impl From<meilisearch_core::settings::RankingRuleConversionError> for ResponseError {
fn from(err: meilisearch_core::settings::RankingRuleConversionError) -> ResponseError {
ResponseError::Internal(err.to_string())
}
}

View File

@ -1,14 +1,14 @@
use std::{env, thread, fs};
use std::{env, thread};
use log::info;
use main_error::MainError;
use structopt::StructOpt;
use actix_web::middleware::Logger;
use actix_web::{post, web, App, HttpServer, HttpResponse, Responder};
use actix_web::*;
use meilisearch_http::data::Data;
use meilisearch_http::option::Opt;
use meilisearch_http::routes;
// use meilisearch_http::routes::index::index_update_callback;
use meilisearch_http::routes::index_update_callback;
mod analytics;
@ -45,7 +45,7 @@ async fn main() -> Result<(), MainError> {
let data_cloned = data.clone();
data.db.set_update_callback(Box::new(move |name, status| {
// index_update_callback(name, &data_cloned, status);
index_update_callback(name, &data_cloned, status);
}));
print_launch_resume(&opt, &data);
@ -56,6 +56,13 @@ async fn main() -> Result<(), MainError> {
.app_data(web::Data::new(data.clone()))
.service(routes::load_html)
.service(routes::load_css)
.service(routes::index::list_indexes)
.service(routes::index::get_index)
.service(routes::index::create_index)
.service(routes::index::update_index)
.service(routes::index::delete_index)
.service(routes::search::search_with_url_query)
.service(routes::search::search_multi_index)
.service(routes::document::get_document)
.service(routes::document::delete_document)
.service(routes::document::get_all_documents)
@ -63,7 +70,14 @@ async fn main() -> Result<(), MainError> {
.service(routes::document::update_documents)
.service(routes::document::delete_documents)
.service(routes::document::clear_all_documents)
.service(routes::update::get_update_status)
.service(routes::update::get_all_updates_status)
.service(routes::key::list)
.service(routes::stats::index_stats)
.service(routes::stats::get_stats)
.service(routes::stats::get_version)
.service(routes::stats::get_sys_info)
.service(routes::stats::get_sys_info_pretty)
.service(routes::health::get_health)
.service(routes::health::change_healthyness)
)

View File

@ -14,7 +14,7 @@ type Document = IndexMap<String, Value>;
pub async fn get_document(
data: web::Data<Data>,
path: web::Path<(String, String)>,
) -> Result<HttpResponse> {
) -> Result<web::Json<Document>> {
let index = data.db.open_index(&path.0)
.ok_or(ResponseError::IndexNotFound(path.0.clone()))?;
let document_id = meilisearch_core::serde::compute_document_id(path.1.clone());
@ -26,7 +26,7 @@ pub async fn get_document(
.map_err(|_| ResponseError::DocumentNotFound(path.1.clone()))?
.ok_or(ResponseError::DocumentNotFound(path.1.clone()))?;
Ok(HttpResponse::Ok().json(response))
Ok(web::Json(response))
}
#[delete("/indexes/{index_uid}/documents/{document_id}")]
@ -67,7 +67,7 @@ pub async fn get_all_documents(
data: web::Data<Data>,
path: web::Path<String>,
params: web::Query<BrowseQuery>,
) -> Result<HttpResponse> {
) -> Result<web::Json<Vec<Document>>> {
let index = data.db.open_index(path.clone())
.ok_or(ResponseError::IndexNotFound(path.clone()))?;
@ -86,19 +86,20 @@ pub async fn get_all_documents(
.take(limit)
.collect();
let documents_ids = documents_ids.map_err(|err| ResponseError::Internal(err.to_string()))?;
let documents_ids = documents_ids
.map_err(|err| ResponseError::Internal(err.to_string()))?;
let attributes = params.attributes_to_retrieve.clone()
.map(|a| a.split(',').map(|a| a.to_string()).collect());
let mut response_body = Vec::<IndexMap<String, Value>>::new();
let mut response_body = Vec::<Document>::new();
for document_id in documents_ids {
if let Ok(Some(document)) = index.document(&reader, attributes.clone(), document_id) {
response_body.push(document);
}
}
Ok(HttpResponse::Ok().json(response_body))
Ok(web::Json(response_body))
}
fn find_primary_key(document: &IndexMap<String, Value>) -> Option<String> {

View File

@ -1,14 +1,10 @@
use chrono::{DateTime, Utc};
use log::error;
use meilisearch_core::ProcessedUpdateResult;
use rand::seq::SliceRandom;
use serde::{Deserialize, Serialize};
use serde_json::json;
use tide::{Request, Response};
use actix_web::*;
use crate::error::{IntoInternalError, ResponseError, SResult};
use crate::helpers::tide::RequestExt;
use crate::helpers::tide::ACL::*;
use crate::error::ResponseError;
use crate::Data;
fn generate_uid() -> String {
@ -20,24 +16,40 @@ fn generate_uid() -> String {
.collect()
}
pub async fn list_indexes(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct IndexResponse {
name: String,
uid: String,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
primary_key: Option<String>,
}
let indexes_uids = ctx.state().db.indexes_uids();
#[get("/indexes")]
pub async fn list_indexes(
data: web::Data<Data>,
) -> Result<web::Json<Vec<IndexResponse>>> {
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let reader = data.db.main_read_txn()
.map_err(|_| ResponseError::CreateTransaction)?;
let mut response_body = Vec::new();
for index_uid in indexes_uids {
let index = ctx.state().db.open_index(&index_uid);
for index_uid in data.db.indexes_uids() {
let index = data.db.open_index(&index_uid);
match index {
Some(index) => {
let name = index.main.name(&reader)?.into_internal_error()?;
let created_at = index.main.created_at(&reader)?.into_internal_error()?;
let updated_at = index.main.updated_at(&reader)?.into_internal_error()?;
let name = index.main.name(&reader)
.map_err(|e| ResponseError::Internal(e.to_string()))?
.ok_or(ResponseError::Internal("Impossible to get the name of an index".to_string()))?;
let created_at = index.main.created_at(&reader)
.map_err(|e| ResponseError::Internal(e.to_string()))?
.ok_or(ResponseError::Internal("Impossible to get the create date of an index".to_string()))?;
let updated_at = index.main.updated_at(&reader)
.map_err(|e| ResponseError::Internal(e.to_string()))?
.ok_or(ResponseError::Internal("Impossible to get the last update date of an index".to_string()))?;
let primary_key = match index.main.schema(&reader) {
Ok(Some(schema)) => match schema.primary_key() {
@ -63,31 +75,30 @@ pub async fn list_indexes(ctx: Request<Data>) -> SResult<Response> {
}
}
Ok(tide::Response::new(200).body_json(&response_body)?)
Ok(web::Json(response_body))
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct IndexResponse {
name: String,
uid: String,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
primary_key: Option<String>,
}
#[get("/indexes/{index_uid}")]
pub async fn get_index(
data: web::Data<Data>,
path: web::Path<String>,
) -> Result<web::Json<IndexResponse>> {
pub async fn get_index(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let index = data.db.open_index(path.clone())
.ok_or(ResponseError::IndexNotFound(path.clone()))?;
let index = ctx.index()?;
let reader = data.db.main_read_txn()
.map_err(|_| ResponseError::CreateTransaction)?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let uid = ctx.url_param("index")?;
let name = index.main.name(&reader)?.into_internal_error()?;
let created_at = index.main.created_at(&reader)?.into_internal_error()?;
let updated_at = index.main.updated_at(&reader)?.into_internal_error()?;
let name = index.main.name(&reader)
.map_err(|e| ResponseError::Internal(e.to_string()))?
.ok_or(ResponseError::Internal("Impossible to get the name of an index".to_string()))?;
let created_at = index.main.created_at(&reader)
.map_err(|e| ResponseError::Internal(e.to_string()))?
.ok_or(ResponseError::Internal("Impossible to get the create date of an index".to_string()))?;
let updated_at = index.main.updated_at(&reader)
.map_err(|e| ResponseError::Internal(e.to_string()))?
.ok_or(ResponseError::Internal("Impossible to get the last update date of an index".to_string()))?;
let primary_key = match index.main.schema(&reader) {
Ok(Some(schema)) => match schema.primary_key() {
@ -97,52 +108,34 @@ pub async fn get_index(ctx: Request<Data>) -> SResult<Response> {
_ => None,
};
let response_body = IndexResponse {
Ok(web::Json(IndexResponse {
name,
uid,
uid: path.to_string(),
created_at,
updated_at,
primary_key,
};
Ok(tide::Response::new(200).body_json(&response_body)?)
}))
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct IndexCreateRequest {
pub struct IndexCreateRequest {
name: Option<String>,
uid: Option<String>,
primary_key: Option<String>,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct IndexCreateResponse {
name: String,
uid: String,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
primary_key: Option<String>,
}
pub async fn create_index(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let body = ctx
.body_json::<IndexCreateRequest>()
.await
.map_err(ResponseError::bad_request)?;
#[post("/indexes")]
pub async fn create_index(
data: web::Data<Data>,
body: web::Json<IndexCreateRequest>
) -> Result<web::Json<IndexResponse>> {
if let (None, None) = (body.name.clone(), body.uid.clone()) {
return Err(ResponseError::bad_request(
"Index creation must have an uid",
));
return Err(ResponseError::BadRequest("Index creation must have an uid".to_string()))?;
}
let db = &ctx.state().db;
let uid = match body.uid {
let uid = match body.uid.clone() {
Some(uid) => {
if uid
.chars()
@ -150,64 +143,71 @@ pub async fn create_index(mut ctx: Request<Data>) -> SResult<Response> {
{
uid
} else {
return Err(ResponseError::InvalidIndexUid);
return Err(ResponseError::InvalidIndexUid)?;
}
}
None => loop {
let uid = generate_uid();
if db.open_index(&uid).is_none() {
if data.db.open_index(&uid).is_none() {
break uid;
}
},
};
let created_index = match db.create_index(&uid) {
Ok(index) => index,
Err(e) => return Err(ResponseError::create_index(e)),
};
let created_index = data.db.create_index(&uid)
.map_err(|e| ResponseError::CreateIndex(e.to_string()))?;
let mut writer = data.db.main_write_txn()
.map_err(|_| ResponseError::CreateTransaction)?;
let name = body.name.clone().unwrap_or(uid.clone());
created_index.main.put_name(&mut writer, &name)
.map_err(|e| ResponseError::Internal(e.to_string()))?;
let mut writer = db.main_write_txn()?;
let name = body.name.unwrap_or(uid.clone());
created_index.main.put_name(&mut writer, &name)?;
let created_at = created_index
.main
.created_at(&writer)?
.into_internal_error()?;
.created_at(&writer)
.map_err(|e| ResponseError::Internal(e.to_string()))?
.ok_or(ResponseError::Internal("".to_string()))?;
let updated_at = created_index
.main
.updated_at(&writer)?
.into_internal_error()?;
.updated_at(&writer)
.map_err(|e| ResponseError::Internal(e.to_string()))?
.ok_or(ResponseError::Internal("".to_string()))?;
if let Some(id) = body.primary_key.clone() {
if let Some(mut schema) = created_index.main.schema(&mut writer)? {
schema.set_primary_key(&id).map_err(ResponseError::bad_request)?;
created_index.main.put_schema(&mut writer, &schema)?;
if let Some(mut schema) = created_index.main.schema(&mut writer)
.map_err(|e| ResponseError::Internal(e.to_string()))? {
schema.set_primary_key(&id)
.map_err(|e| ResponseError::BadRequest(e.to_string()))?;
created_index.main.put_schema(&mut writer, &schema)
.map_err(|e| ResponseError::Internal(e.to_string()))?;
}
}
writer.commit()?;
writer.commit()
.map_err(|_| ResponseError::CommitTransaction)?;
let response_body = IndexCreateResponse {
Ok(web::Json(IndexResponse {
name,
uid,
created_at,
updated_at,
primary_key: body.primary_key,
};
Ok(tide::Response::new(201).body_json(&response_body)?)
primary_key: body.primary_key.clone(),
}))
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct UpdateIndexRequest {
pub struct UpdateIndexRequest {
name: Option<String>,
primary_key: Option<String>,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct UpdateIndexResponse {
pub struct UpdateIndexResponse {
name: String,
uid: String,
created_at: DateTime<Utc>,
@ -215,49 +215,59 @@ struct UpdateIndexResponse {
primary_key: Option<String>,
}
pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
#[post("/indexes/{index_uid}")]
pub async fn update_index(
data: web::Data<Data>,
path: web::Path<String>,
body: web::Json<IndexCreateRequest>
) -> Result<web::Json<IndexResponse>> {
let body = ctx
.body_json::<UpdateIndexRequest>()
.await
.map_err(ResponseError::bad_request)?;
let index = data.db.open_index(path.clone())
.ok_or(ResponseError::IndexNotFound(path.clone()))?;
let index_uid = ctx.url_param("index")?;
let index = ctx.index()?;
let mut writer = data.db.main_write_txn()
.map_err(|_| ResponseError::CreateTransaction)?;
let db = &ctx.state().db;
let mut writer = db.main_write_txn()?;
if let Some(name) = body.name {
index.main.put_name(&mut writer, &name)?;
if let Some(name) = body.name.clone() {
index.main.put_name(&mut writer, &name)
.map_err(|e| ResponseError::Internal(e.to_string()))?;
}
if let Some(id) = body.primary_key.clone() {
if let Some(mut schema) = index.main.schema(&mut writer)? {
if let Some(mut schema) = index.main.schema(&mut writer)
.map_err(|e| ResponseError::Internal(e.to_string()))? {
match schema.primary_key() {
Some(_) => {
return Err(ResponseError::bad_request(
"The primary key cannot be updated",
));
return Err(ResponseError::BadRequest("The primary key cannot be updated".to_string()))?;
}
None => {
schema
.set_primary_key(&id)
.map_err(ResponseError::bad_request)?;
index.main.put_schema(&mut writer, &schema)?;
.map_err(|e| ResponseError::Internal(e.to_string()))?;
index.main.put_schema(&mut writer, &schema)
.map_err(|e| ResponseError::Internal(e.to_string()))?;
}
}
}
}
index.main.put_updated_at(&mut writer)?;
writer.commit()?;
index.main.put_updated_at(&mut writer)
.map_err(|e| ResponseError::Internal(e.to_string()))?;
writer.commit()
.map_err(|_| ResponseError::CommitTransaction)?;
let reader = db.main_read_txn()?;
let name = index.main.name(&reader)?.into_internal_error()?;
let created_at = index.main.created_at(&reader)?.into_internal_error()?;
let updated_at = index.main.updated_at(&reader)?.into_internal_error()?;
let reader = data.db.main_read_txn()
.map_err(|_| ResponseError::CreateTransaction)?;
let name = index.main.name(&reader)
.map_err(|e| ResponseError::Internal(e.to_string()))?
.ok_or(ResponseError::Internal("Impossible to get the name of an index".to_string()))?;
let created_at = index.main.created_at(&reader)
.map_err(|e| ResponseError::Internal(e.to_string()))?
.ok_or(ResponseError::Internal("Impossible to get the create date of an index".to_string()))?;
let updated_at = index.main.updated_at(&reader)
.map_err(|e| ResponseError::Internal(e.to_string()))?
.ok_or(ResponseError::Internal("Impossible to get the last update date of an index".to_string()))?;
let primary_key = match index.main.schema(&reader) {
Ok(Some(schema)) => match schema.primary_key() {
@ -267,86 +277,23 @@ pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
_ => None,
};
let response_body = UpdateIndexResponse {
Ok(web::Json(IndexResponse {
name,
uid: index_uid,
uid: path.clone(),
created_at,
updated_at,
primary_key,
};
Ok(tide::Response::new(200).body_json(&response_body)?)
}))
}
pub async fn get_update_status(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
#[delete("/indexes/{index_uid}")]
pub async fn delete_index(
data: web::Data<Data>,
path: web::Path<String>,
) -> Result<HttpResponse> {
let db = &ctx.state().db;
let reader = db.update_read_txn()?;
data.db.delete_index(&path.to_string())
.map_err(|e| ResponseError::Internal(e.to_string()))?;
let update_id = ctx
.param::<u64>("update_id")
.map_err(|e| ResponseError::bad_parameter("update_id", e))?;
let index = ctx.index()?;
let status = index.update_status(&reader, update_id)?;
let response = match status {
Some(status) => tide::Response::new(200).body_json(&status).unwrap(),
None => tide::Response::new(404)
.body_json(&json!({ "message": "unknown update id" }))
.unwrap(),
};
Ok(response)
}
pub async fn get_all_updates_status(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let db = &ctx.state().db;
let reader = db.update_read_txn()?;
let index = ctx.index()?;
let response = index.all_updates_status(&reader)?;
Ok(tide::Response::new(200).body_json(&response).unwrap())
}
pub async fn delete_index(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Private)?;
let _ = ctx.index()?;
let index_uid = ctx.url_param("index")?;
ctx.state().db.delete_index(&index_uid)?;
Ok(tide::Response::new(204))
}
pub fn index_update_callback(index_uid: &str, data: &Data, status: ProcessedUpdateResult) {
if status.error.is_some() {
return;
}
if let Some(index) = data.db.open_index(&index_uid) {
let db = &data.db;
let mut writer = match db.main_write_txn() {
Ok(writer) => writer,
Err(e) => {
error!("Impossible to get write_txn; {}", e);
return;
}
};
if let Err(e) = data.compute_stats(&mut writer, &index_uid) {
error!("Impossible to compute stats; {}", e)
}
if let Err(e) = data.set_last_update(&mut writer) {
error!("Impossible to update last_update; {}", e)
}
if let Err(e) = index.main.put_updated_at(&mut writer) {
error!("Impossible to update updated_at; {}", e)
}
if let Err(e) = writer.commit() {
error!("Impossible to get write_txn; {}", e);
}
}
HttpResponse::NoContent().await
}

View File

@ -1,15 +1,21 @@
use crate::Data;
use serde_json::json;
use actix_web::*;
use serde::Serialize;
#[derive(Default, Serialize)]
pub struct KeysResponse {
private: Option<String>,
public: Option<String>,
}
#[get("/keys")]
pub async fn list(
data: web::Data<Data>,
) -> Result<HttpResponse> {
let keys = &data.api_keys;
HttpResponse::Ok().json(&json!({
"private": keys.private,
"public": keys.public,
})).await
) -> web::Json<KeysResponse> {
let api_keys = data.api_keys.clone();
web::Json(KeysResponse{
private: api_keys.private,
public: api_keys.public,
})
}

View File

@ -1,16 +1,20 @@
use actix_web::*;
use serde::Serialize;
use log::error;
use meilisearch_core::ProcessedUpdateResult;
use crate::Data;
pub mod document;
pub mod health;
// pub mod index;
pub mod index;
pub mod key;
pub mod search;
// pub mod setting;
// pub mod stats;
pub mod stats;
// pub mod stop_words;
// pub mod synonym;
pub mod update;
#[derive(Default, Serialize)]
#[serde(rename_all = "camelCase")]
@ -42,6 +46,41 @@ pub async fn load_css() -> HttpResponse {
.body(include_str!("../../public/bulma.min.css").to_string())
}
pub fn index_update_callback(index_uid: &str, data: &Data, status: ProcessedUpdateResult) {
if status.error.is_some() {
return;
}
if let Some(index) = data.db.open_index(&index_uid) {
let db = &data.db;
let mut writer = match db.main_write_txn() {
Ok(writer) => writer,
Err(e) => {
error!("Impossible to get write_txn; {}", e);
return;
}
};
if let Err(e) = data.compute_stats(&mut writer, &index_uid) {
error!("Impossible to compute stats; {}", e)
}
if let Err(e) = data.set_last_update(&mut writer) {
error!("Impossible to update last_update; {}", e)
}
if let Err(e) = index.main.put_updated_at(&mut writer) {
error!("Impossible to update updated_at; {}", e)
}
if let Err(e) = writer.commit() {
error!("Impossible to get write_txn; {}", e);
}
}
}
// pub fn load_routes(app: &mut tide::Server<Data>) {
// app.at("/").get(|_| async {
// tide::Response::new(200)

View File

@ -10,13 +10,11 @@ use actix_web::*;
use crate::error::ResponseError;
use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit, SearchResult};
// use crate::helpers::tide::RequestExt;
// use crate::helpers::tide::ACL::*;
use crate::Data;
#[derive(Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct SearchQuery {
pub struct SearchQuery {
q: String,
offset: Option<usize>,
limit: Option<usize>,
@ -153,7 +151,7 @@ pub async fn search_with_url_query(
#[derive(Clone, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct SearchMultiBody {
pub struct SearchMultiBody {
indexes: HashSet<String>,
query: String,
offset: Option<usize>,
@ -169,7 +167,7 @@ struct SearchMultiBody {
#[derive(Debug, Clone, Serialize)]
#[serde(rename_all = "camelCase")]
struct SearchMultiBodyResponse {
pub struct SearchMultiBodyResponse {
hits: HashMap<String, Vec<SearchHit>>,
offset: usize,
hits_per_page: usize,

View File

@ -1,79 +1,93 @@
use std::collections::HashMap;
use actix_web::*;
use chrono::{DateTime, Utc};
use log::error;
use pretty_bytes::converter::convert;
use serde::Serialize;
use sysinfo::{NetworkExt, Pid, ProcessExt, ProcessorExt, System, SystemExt};
use tide::{Request, Response};
use sysinfo::{NetworkExt, ProcessExt, ProcessorExt, System, SystemExt};
use walkdir::WalkDir;
use crate::error::{IntoInternalError, SResult};
use crate::helpers::tide::RequestExt;
use crate::helpers::tide::ACL::*;
use crate::Data;
use crate::error::ResponseError;
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct IndexStatsResponse {
pub struct IndexStatsResponse {
number_of_documents: u64,
is_indexing: bool,
fields_frequency: HashMap<String, usize>,
}
pub async fn index_stats(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?;
let index_uid = ctx.url_param("index")?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let update_reader = db.update_read_txn()?;
let number_of_documents = index.main.number_of_documents(&reader)?;
let fields_frequency = index.main.fields_frequency(&reader)?.unwrap_or_default();
let is_indexing = ctx
.state()
.is_indexing(&update_reader, &index_uid)?
.into_internal_error()?;
#[get("/indexes/{index_uid}/stats")]
pub async fn index_stats(
data: web::Data<Data>,
path: web::Path<String>,
) -> Result<web::Json<IndexStatsResponse>> {
let index = data.db.open_index(path.clone())
.ok_or(ResponseError::IndexNotFound(path.clone()))?;
let response = IndexStatsResponse {
let reader = data.db.main_read_txn()
.map_err(|_| ResponseError::CreateTransaction)?;
let number_of_documents = index.main.number_of_documents(&reader)
.map_err(|err| ResponseError::Internal(err.to_string()))?;
let fields_frequency = index.main.fields_frequency(&reader)
.map_err(|err| ResponseError::Internal(err.to_string()))?
.unwrap_or_default();
let update_reader = data.db.update_read_txn()
.map_err(|_| ResponseError::CreateTransaction)?;
let is_indexing = data
.is_indexing(&update_reader, &path)
.map_err(|err| ResponseError::Internal(err.to_string()))?
.unwrap_or_default();
Ok(web::Json(IndexStatsResponse {
number_of_documents,
is_indexing,
fields_frequency,
};
Ok(tide::Response::new(200).body_json(&response).unwrap())
}))
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct StatsResult {
pub struct StatsResult {
database_size: u64,
last_update: Option<DateTime<Utc>>,
indexes: HashMap<String, IndexStatsResponse>,
}
pub async fn get_stats(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?;
#[get("/stats")]
pub async fn get_stats(
data: web::Data<Data>,
) -> Result<web::Json<StatsResult>> {
let mut index_list = HashMap::new();
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let update_reader = db.update_read_txn()?;
let reader = data.db.main_read_txn()
.map_err(|_| ResponseError::CreateTransaction)?;
let update_reader = data.db.update_read_txn()
.map_err(|_| ResponseError::CreateTransaction)?;
let indexes_set = ctx.state().db.indexes_uids();
let indexes_set = data.db.indexes_uids();
for index_uid in indexes_set {
let index = ctx.state().db.open_index(&index_uid);
let index = data.db.open_index(&index_uid);
match index {
Some(index) => {
let number_of_documents = index.main.number_of_documents(&reader)?;
let number_of_documents = index.main.number_of_documents(&reader)
.map_err(|err| ResponseError::Internal(err.to_string()))?;
let fields_frequency = index.main.fields_frequency(&reader)?.unwrap_or_default();
let fields_frequency = index.main.fields_frequency(&reader)
.map_err(|err| ResponseError::Internal(err.to_string()))?
.unwrap_or_default();
let is_indexing = ctx
.state()
.is_indexing(&update_reader, &index_uid)?
.into_internal_error()?;
let is_indexing = data
.is_indexing(&update_reader, &index_uid)
.map_err(|err| ResponseError::Internal(err.to_string()))?
.unwrap_or_default();
let response = IndexStatsResponse {
number_of_documents,
@ -89,46 +103,43 @@ pub async fn get_stats(ctx: Request<Data>) -> SResult<Response> {
}
}
let database_size = WalkDir::new(ctx.state().db_path.clone())
let database_size = WalkDir::new(data.db_path.clone())
.into_iter()
.filter_map(|entry| entry.ok())
.filter_map(|entry| entry.metadata().ok())
.filter(|metadata| metadata.is_file())
.fold(0, |acc, m| acc + m.len());
let last_update = ctx.state().last_update(&reader)?;
let last_update = data.last_update(&reader)
.map_err(|err| ResponseError::Internal(err.to_string()))?;
let response = StatsResult {
Ok(web::Json(StatsResult {
database_size,
last_update,
indexes: index_list,
};
Ok(tide::Response::new(200).body_json(&response).unwrap())
}))
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct VersionResponse {
pub struct VersionResponse {
commit_sha: String,
build_date: String,
pkg_version: String,
}
pub async fn get_version(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?;
let response = VersionResponse {
#[get("/version")]
pub async fn get_version() -> web::Json<VersionResponse> {
web::Json(VersionResponse {
commit_sha: env!("VERGEN_SHA").to_string(),
build_date: env!("VERGEN_BUILD_TIMESTAMP").to_string(),
pkg_version: env!("CARGO_PKG_VERSION").to_string(),
};
Ok(tide::Response::new(200).body_json(&response).unwrap())
})
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct SysGlobal {
pub struct SysGlobal {
total_memory: u64,
used_memory: u64,
total_swap: u64,
@ -152,7 +163,7 @@ impl SysGlobal {
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct SysProcess {
pub struct SysProcess {
memory: u64,
cpu: f32,
}
@ -168,7 +179,7 @@ impl SysProcess {
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct SysInfo {
pub struct SysInfo {
memory_usage: f64,
processor_usage: Vec<f32>,
global: SysGlobal,
@ -186,7 +197,10 @@ impl SysInfo {
}
}
pub(crate) fn report(pid: Pid) -> SysInfo {
#[get("/sys-info")]
pub async fn get_sys_info(
data: web::Data<Data>,
) -> web::Json<SysInfo> {
let mut sys = System::new();
let mut info = SysInfo::new();
@ -200,28 +214,27 @@ pub(crate) fn report(pid: Pid) -> SysInfo {
info.global.used_memory = sys.get_used_memory();
info.global.total_swap = sys.get_total_swap();
info.global.used_swap = sys.get_used_swap();
info.global.input_data = sys.get_networks().into_iter().map(|(_, n)| n.get_received()).sum::<u64>();
info.global.output_data = sys.get_networks().into_iter().map(|(_, n)| n.get_transmitted()).sum::<u64>();
info.global.input_data = sys.get_networks()
.into_iter()
.map(|(_, n)| n.get_received())
.sum::<u64>();
info.global.output_data = sys.get_networks()
.into_iter()
.map(|(_, n)| n.get_transmitted())
.sum::<u64>();
if let Some(process) = sys.get_process(pid) {
if let Some(process) = sys.get_process(data.server_pid) {
info.process.memory = process.memory();
info.process.cpu = process.cpu_usage() * 100.0;
}
sys.refresh_all();
info
}
pub async fn get_sys_info(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?;
let response = report(ctx.state().server_pid);
Ok(tide::Response::new(200).body_json(&response).unwrap())
web::Json(info)
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct SysGlobalPretty {
pub struct SysGlobalPretty {
total_memory: String,
used_memory: String,
total_swap: String,
@ -245,7 +258,7 @@ impl SysGlobalPretty {
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct SysProcessPretty {
pub struct SysProcessPretty {
memory: String,
cpu: String,
}
@ -261,7 +274,7 @@ impl SysProcessPretty {
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub(crate) struct SysInfoPretty {
pub struct SysInfoPretty {
memory_usage: String,
processor_usage: Vec<String>,
global: SysGlobalPretty,
@ -279,7 +292,11 @@ impl SysInfoPretty {
}
}
pub(crate) fn report_pretty(pid: Pid) -> SysInfoPretty {
#[get("/sys-info/pretty")]
pub async fn get_sys_info_pretty(
data: web::Data<Data>,
) -> web::Json<SysInfoPretty> {
let mut sys = System::new();
let mut info = SysInfoPretty::new();
@ -300,18 +317,12 @@ pub(crate) fn report_pretty(pid: Pid) -> SysInfoPretty {
info.global.input_data = convert(sys.get_networks().into_iter().map(|(_, n)| n.get_received()).sum::<u64>() as f64);
info.global.output_data = convert(sys.get_networks().into_iter().map(|(_, n)| n.get_transmitted()).sum::<u64>() as f64);
if let Some(process) = sys.get_process(pid) {
if let Some(process) = sys.get_process(data.server_pid) {
info.process.memory = convert(process.memory() as f64 * 1024.0);
info.process.cpu = format!("{:.1} %", process.cpu_usage() * 100.0);
}
sys.refresh_all();
info
}
pub async fn get_sys_info_pretty(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?;
let response = report_pretty(ctx.state().server_pid);
Ok(tide::Response::new(200).body_json(&response).unwrap())
web::Json(info)
}

View File

@ -0,0 +1,44 @@
use actix_web::*;
use meilisearch_core::UpdateStatus;
use crate::error::ResponseError;
use crate::Data;
#[get("/indexes/{index_uid}/updates/{update_id}")]
pub async fn get_update_status(
data: web::Data<Data>,
path: web::Path<(String, u64)>,
) -> Result<web::Json<UpdateStatus>> {
let index = data.db.open_index(path.0.clone())
.ok_or(ResponseError::IndexNotFound(path.0.clone()))?;
let reader = data.db.update_read_txn()
.map_err(|_| ResponseError::CreateTransaction)?;
let status = index.update_status(&reader, path.1)
.map_err(|e| ResponseError::Internal(e.to_string()))?;
match status {
Some(status) => Ok(web::Json(status)),
None => Err(ResponseError::UpdateNotFound(path.1))?
}
}
#[get("/indexes/{index_uid}/updates")]
pub async fn get_all_updates_status(
data: web::Data<Data>,
path: web::Path<String>,
) -> Result<web::Json<Vec<UpdateStatus>>> {
let index = data.db.open_index(path.clone())
.ok_or(ResponseError::IndexNotFound(path.clone()))?;
let reader = data.db.update_read_txn()
.map_err(|_| ResponseError::CreateTransaction)?;
let response = index.all_updates_status(&reader)
.map_err(|err| ResponseError::Internal(err.to_string()))?;
Ok(web::Json(response))
}