2022-09-27 19:52:06 +02:00
|
|
|
use std::collections::BTreeMap;
|
2022-12-14 13:00:43 +01:00
|
|
|
use std::str::FromStr;
|
2022-09-27 19:52:06 +02:00
|
|
|
|
2022-09-27 16:33:37 +02:00
|
|
|
use actix_web::web::Data;
|
2022-08-22 09:57:26 +02:00
|
|
|
use actix_web::{web, HttpRequest, HttpResponse};
|
2022-10-12 00:43:24 +02:00
|
|
|
use index_scheduler::{IndexScheduler, Query};
|
2021-07-05 14:29:20 +02:00
|
|
|
use log::debug;
|
2023-01-11 14:31:34 +01:00
|
|
|
use meilisearch_types::error::{ResponseError, TakeErrorMessage};
|
2022-10-11 17:42:43 +02:00
|
|
|
use meilisearch_types::settings::{Settings, Unchecked};
|
2022-10-04 11:06:48 +02:00
|
|
|
use meilisearch_types::star_or::StarOr;
|
2022-10-12 03:21:25 +02:00
|
|
|
use meilisearch_types::tasks::{Kind, Status, Task, TaskId};
|
2020-12-12 13:32:06 +01:00
|
|
|
use serde::{Deserialize, Serialize};
|
2022-08-17 16:12:26 +02:00
|
|
|
use serde_json::json;
|
2022-02-14 15:32:41 +01:00
|
|
|
use time::OffsetDateTime;
|
2020-12-12 13:32:06 +01:00
|
|
|
|
2023-01-11 17:10:32 +01:00
|
|
|
use self::indexes::IndexStats;
|
|
|
|
use crate::analytics::Analytics;
|
|
|
|
use crate::extractors::authentication::policies::*;
|
|
|
|
use crate::extractors::authentication::GuardedData;
|
|
|
|
|
2021-11-08 18:31:27 +01:00
|
|
|
mod api_key;
|
2021-07-05 14:29:20 +02:00
|
|
|
mod dump;
|
2021-10-25 16:41:23 +02:00
|
|
|
pub mod indexes;
|
2022-10-24 14:49:39 +02:00
|
|
|
mod swap_indexes;
|
2022-11-28 16:27:41 +01:00
|
|
|
pub mod tasks;
|
2021-07-05 14:29:20 +02:00
|
|
|
|
|
|
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
2021-12-02 16:03:26 +01:00
|
|
|
cfg.service(web::scope("/tasks").configure(tasks::configure))
|
|
|
|
.service(web::resource("/health").route(web::get().to(get_health)))
|
2021-11-08 18:31:27 +01:00
|
|
|
.service(web::scope("/keys").configure(api_key::configure))
|
2021-07-05 14:29:20 +02:00
|
|
|
.service(web::scope("/dumps").configure(dump::configure))
|
|
|
|
.service(web::resource("/stats").route(web::get().to(get_stats)))
|
|
|
|
.service(web::resource("/version").route(web::get().to(get_version)))
|
2022-10-17 16:30:18 +02:00
|
|
|
.service(web::scope("/indexes").configure(indexes::configure))
|
2022-10-26 11:11:53 +02:00
|
|
|
.service(web::scope("/swap-indexes").configure(swap_indexes::configure));
|
2021-07-05 14:29:20 +02:00
|
|
|
}
|
2020-12-12 13:32:06 +01:00
|
|
|
|
2022-05-25 11:51:26 +02:00
|
|
|
/// Extracts the raw values from the `StarOr` types and
|
|
|
|
/// return None if a `StarOr::Star` is encountered.
|
2022-06-02 11:14:46 +02:00
|
|
|
pub fn fold_star_or<T, O>(content: impl IntoIterator<Item = StarOr<T>>) -> Option<O>
|
|
|
|
where
|
|
|
|
O: FromIterator<T>,
|
|
|
|
{
|
2022-05-25 11:51:26 +02:00
|
|
|
content
|
|
|
|
.into_iter()
|
|
|
|
.map(|value| match value {
|
|
|
|
StarOr::Star => None,
|
|
|
|
StarOr::Other(val) => Some(val),
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
2022-12-14 13:00:43 +01:00
|
|
|
pub fn from_string_to_option<T, E>(input: &str) -> Result<Option<T>, E>
|
|
|
|
where
|
|
|
|
T: FromStr<Err = E>,
|
|
|
|
{
|
|
|
|
Ok(Some(input.parse()?))
|
|
|
|
}
|
2023-01-11 12:33:56 +01:00
|
|
|
pub fn from_string_to_option_take_error_message<T, E>(
|
|
|
|
input: &str,
|
|
|
|
) -> Result<Option<T>, TakeErrorMessage<E>>
|
|
|
|
where
|
|
|
|
T: FromStr<Err = E>,
|
|
|
|
{
|
|
|
|
Ok(Some(input.parse().map_err(TakeErrorMessage)?))
|
|
|
|
}
|
2022-12-14 13:00:43 +01:00
|
|
|
|
2022-06-02 13:31:46 +02:00
|
|
|
const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20;
|
|
|
|
|
2022-10-12 03:21:25 +02:00
|
|
|
#[derive(Debug, Serialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct SummarizedTaskView {
|
|
|
|
task_uid: TaskId,
|
|
|
|
index_uid: Option<String>,
|
|
|
|
status: Status,
|
|
|
|
#[serde(rename = "type")]
|
|
|
|
kind: Kind,
|
|
|
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
|
|
|
enqueued_at: OffsetDateTime,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<Task> for SummarizedTaskView {
|
|
|
|
fn from(task: Task) -> Self {
|
|
|
|
SummarizedTaskView {
|
|
|
|
task_uid: task.uid,
|
|
|
|
index_uid: task.index_uid().map(|s| s.to_string()),
|
|
|
|
status: task.status,
|
|
|
|
kind: task.kind.as_kind(),
|
|
|
|
enqueued_at: task.enqueued_at,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-01-11 12:33:56 +01:00
|
|
|
pub struct Pagination {
|
|
|
|
pub offset: usize,
|
|
|
|
pub limit: usize,
|
|
|
|
}
|
2022-10-12 03:21:25 +02:00
|
|
|
|
2022-06-02 13:31:46 +02:00
|
|
|
#[derive(Debug, Clone, Serialize)]
|
|
|
|
pub struct PaginationView<T> {
|
|
|
|
pub results: Vec<T>,
|
|
|
|
pub offset: usize,
|
|
|
|
pub limit: usize,
|
|
|
|
pub total: usize,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Pagination {
|
|
|
|
/// Given the full data to paginate, returns the selected section.
|
|
|
|
pub fn auto_paginate_sized<T>(
|
|
|
|
self,
|
|
|
|
content: impl IntoIterator<Item = T> + ExactSizeIterator,
|
|
|
|
) -> PaginationView<T>
|
|
|
|
where
|
|
|
|
T: Serialize,
|
|
|
|
{
|
|
|
|
let total = content.len();
|
2022-10-20 18:00:07 +02:00
|
|
|
let content: Vec<_> = content.into_iter().skip(self.offset).take(self.limit).collect();
|
2022-06-02 13:31:46 +02:00
|
|
|
self.format_with(total, content)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Given an iterator and the total number of elements, returns the selected section.
|
|
|
|
pub fn auto_paginate_unsized<T>(
|
|
|
|
self,
|
|
|
|
total: usize,
|
|
|
|
content: impl IntoIterator<Item = T>,
|
|
|
|
) -> PaginationView<T>
|
|
|
|
where
|
|
|
|
T: Serialize,
|
|
|
|
{
|
2022-10-20 18:00:07 +02:00
|
|
|
let content: Vec<_> = content.into_iter().skip(self.offset).take(self.limit).collect();
|
2022-06-02 13:31:46 +02:00
|
|
|
self.format_with(total, content)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Given the data already paginated + the total number of elements, it stores
|
|
|
|
/// everything in a [PaginationResult].
|
|
|
|
pub fn format_with<T>(self, total: usize, results: Vec<T>) -> PaginationView<T>
|
|
|
|
where
|
|
|
|
T: Serialize,
|
|
|
|
{
|
2022-10-20 18:00:07 +02:00
|
|
|
PaginationView { results, offset: self.offset, limit: self.limit, total }
|
2022-06-02 13:31:46 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> PaginationView<T> {
|
|
|
|
pub fn new(offset: usize, limit: usize, total: usize, results: Vec<T>) -> Self {
|
2022-10-20 18:00:07 +02:00
|
|
|
Self { offset, limit, results, total }
|
2022-06-02 13:31:46 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-29 19:31:58 +02:00
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
2021-06-17 18:51:07 +02:00
|
|
|
#[allow(clippy::large_enum_variant)]
|
2021-04-29 19:31:58 +02:00
|
|
|
#[serde(tag = "name")]
|
|
|
|
pub enum UpdateType {
|
|
|
|
ClearAll,
|
|
|
|
Customs,
|
|
|
|
DocumentsAddition {
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
2021-06-15 17:39:07 +02:00
|
|
|
number: Option<usize>,
|
2021-04-29 19:31:58 +02:00
|
|
|
},
|
|
|
|
DocumentsPartial {
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
2021-06-15 17:39:07 +02:00
|
|
|
number: Option<usize>,
|
2021-04-29 19:31:58 +02:00
|
|
|
},
|
|
|
|
DocumentsDeletion {
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
2021-06-15 17:39:07 +02:00
|
|
|
number: Option<usize>,
|
|
|
|
},
|
|
|
|
Settings {
|
|
|
|
settings: Settings<Unchecked>,
|
2021-04-29 19:31:58 +02:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct ProcessedUpdateResult {
|
|
|
|
pub update_id: u64,
|
|
|
|
#[serde(rename = "type")]
|
|
|
|
pub update_type: UpdateType,
|
|
|
|
pub duration: f64, // in seconds
|
2022-03-01 19:02:32 +01:00
|
|
|
#[serde(with = "time::serde::rfc3339")]
|
2022-02-14 15:32:41 +01:00
|
|
|
pub enqueued_at: OffsetDateTime,
|
2022-03-01 19:02:32 +01:00
|
|
|
#[serde(with = "time::serde::rfc3339")]
|
2022-02-14 15:32:41 +01:00
|
|
|
pub processed_at: OffsetDateTime,
|
2021-04-29 19:31:58 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct FailedUpdateResult {
|
|
|
|
pub update_id: u64,
|
|
|
|
#[serde(rename = "type")]
|
|
|
|
pub update_type: UpdateType,
|
2021-11-04 17:04:06 +01:00
|
|
|
pub error: ResponseError,
|
2021-04-29 19:31:58 +02:00
|
|
|
pub duration: f64, // in seconds
|
2022-03-01 19:02:32 +01:00
|
|
|
#[serde(with = "time::serde::rfc3339")]
|
2022-02-14 15:32:41 +01:00
|
|
|
pub enqueued_at: OffsetDateTime,
|
2022-03-01 19:02:32 +01:00
|
|
|
#[serde(with = "time::serde::rfc3339")]
|
2022-02-14 15:32:41 +01:00
|
|
|
pub processed_at: OffsetDateTime,
|
2021-04-29 19:31:58 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct EnqueuedUpdateResult {
|
|
|
|
pub update_id: u64,
|
|
|
|
#[serde(rename = "type")]
|
|
|
|
pub update_type: UpdateType,
|
2022-03-01 19:02:32 +01:00
|
|
|
#[serde(with = "time::serde::rfc3339")]
|
2022-02-14 15:32:41 +01:00
|
|
|
pub enqueued_at: OffsetDateTime,
|
2022-10-20 18:00:07 +02:00
|
|
|
#[serde(skip_serializing_if = "Option::is_none", with = "time::serde::rfc3339::option")]
|
2022-02-14 15:32:41 +01:00
|
|
|
pub started_processing_at: Option<OffsetDateTime>,
|
2021-04-29 19:31:58 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase", tag = "status")]
|
|
|
|
pub enum UpdateStatusResponse {
|
|
|
|
Enqueued {
|
|
|
|
#[serde(flatten)]
|
|
|
|
content: EnqueuedUpdateResult,
|
|
|
|
},
|
|
|
|
Processing {
|
|
|
|
#[serde(flatten)]
|
|
|
|
content: EnqueuedUpdateResult,
|
|
|
|
},
|
|
|
|
Failed {
|
|
|
|
#[serde(flatten)]
|
|
|
|
content: FailedUpdateResult,
|
|
|
|
},
|
|
|
|
Processed {
|
|
|
|
#[serde(flatten)]
|
|
|
|
content: ProcessedUpdateResult,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2020-12-12 13:32:06 +01:00
|
|
|
#[derive(Serialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct IndexUpdateResponse {
|
|
|
|
pub update_id: u64,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl IndexUpdateResponse {
|
|
|
|
pub fn with_id(update_id: u64) -> Self {
|
|
|
|
Self { update_id }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-19 11:34:54 +01:00
|
|
|
/// Always return a 200 with:
|
|
|
|
/// ```json
|
|
|
|
/// {
|
2022-01-26 17:43:16 +01:00
|
|
|
/// "status": "Meilisearch is running"
|
2021-03-19 11:34:54 +01:00
|
|
|
/// }
|
|
|
|
/// ```
|
|
|
|
pub async fn running() -> HttpResponse {
|
2022-01-26 17:43:16 +01:00
|
|
|
HttpResponse::Ok().json(serde_json::json!({ "status": "Meilisearch is running" }))
|
2021-03-19 11:34:54 +01:00
|
|
|
}
|
2021-07-05 14:29:20 +02:00
|
|
|
|
2022-09-27 19:52:06 +02:00
|
|
|
#[derive(Serialize, Debug)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct Stats {
|
|
|
|
pub database_size: u64,
|
|
|
|
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
|
|
|
pub last_update: Option<OffsetDateTime>,
|
|
|
|
pub indexes: BTreeMap<String, IndexStats>,
|
|
|
|
}
|
|
|
|
|
2021-09-28 22:22:59 +02:00
|
|
|
async fn get_stats(
|
2022-09-27 16:33:37 +02:00
|
|
|
index_scheduler: GuardedData<ActionPolicy<{ actions::STATS_GET }>, Data<IndexScheduler>>,
|
2022-08-17 16:12:26 +02:00
|
|
|
req: HttpRequest,
|
|
|
|
analytics: web::Data<dyn Analytics>,
|
2021-09-28 22:22:59 +02:00
|
|
|
) -> Result<HttpResponse, ResponseError> {
|
2022-10-20 18:00:07 +02:00
|
|
|
analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": false }), Some(&req));
|
2022-09-27 16:33:37 +02:00
|
|
|
let search_rules = &index_scheduler.filters().search_rules;
|
2021-07-05 14:29:20 +02:00
|
|
|
|
2022-10-18 12:45:06 +02:00
|
|
|
let stats = create_all_stats((*index_scheduler).clone(), search_rules)?;
|
|
|
|
|
|
|
|
debug!("returns: {:?}", stats);
|
|
|
|
Ok(HttpResponse::Ok().json(stats))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn create_all_stats(
|
|
|
|
index_scheduler: Data<IndexScheduler>,
|
|
|
|
search_rules: &meilisearch_auth::SearchRules,
|
|
|
|
) -> Result<Stats, ResponseError> {
|
2022-09-27 19:52:06 +02:00
|
|
|
let mut last_task: Option<OffsetDateTime> = None;
|
|
|
|
let mut indexes = BTreeMap::new();
|
|
|
|
let mut database_size = 0;
|
2022-10-27 11:17:50 +02:00
|
|
|
let processing_task = index_scheduler.get_tasks_from_authorized_indexes(
|
2022-11-28 16:27:41 +01:00
|
|
|
Query { statuses: Some(vec![Status::Processing]), limit: Some(1), ..Query::default() },
|
2022-10-27 11:17:50 +02:00
|
|
|
search_rules.authorized_indexes(),
|
|
|
|
)?;
|
2022-10-22 16:35:42 +02:00
|
|
|
let processing_index = processing_task.first().and_then(|task| task.index_uid());
|
2022-10-04 11:06:48 +02:00
|
|
|
for (name, index) in index_scheduler.indexes()? {
|
|
|
|
if !search_rules.is_index_authorized(&name) {
|
2022-09-27 19:52:06 +02:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2022-10-04 11:06:48 +02:00
|
|
|
database_size += index.on_disk_size()?;
|
2022-09-27 19:52:06 +02:00
|
|
|
|
2022-10-04 11:06:48 +02:00
|
|
|
let rtxn = index.read_txn()?;
|
2022-09-27 19:52:06 +02:00
|
|
|
let stats = IndexStats {
|
2022-10-04 11:06:48 +02:00
|
|
|
number_of_documents: index.number_of_documents(&rtxn)?,
|
2022-10-22 16:35:42 +02:00
|
|
|
is_indexing: processing_index.map_or(false, |index_name| name == index_name),
|
2022-10-04 11:06:48 +02:00
|
|
|
field_distribution: index.field_distribution(&rtxn)?,
|
2022-09-27 19:52:06 +02:00
|
|
|
};
|
|
|
|
|
2022-10-04 11:06:48 +02:00
|
|
|
let updated_at = index.updated_at(&rtxn)?;
|
2022-09-27 19:52:06 +02:00
|
|
|
last_task = last_task.map_or(Some(updated_at), |last| Some(last.max(updated_at)));
|
|
|
|
|
2022-10-04 11:06:48 +02:00
|
|
|
indexes.insert(name, stats);
|
2022-09-27 19:52:06 +02:00
|
|
|
}
|
2022-10-20 18:00:07 +02:00
|
|
|
let stats = Stats { database_size, last_update: last_task, indexes };
|
2022-10-18 12:45:06 +02:00
|
|
|
Ok(stats)
|
2021-07-05 14:29:20 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Serialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
struct VersionResponse {
|
|
|
|
commit_sha: String,
|
|
|
|
commit_date: String,
|
|
|
|
pkg_version: String,
|
|
|
|
}
|
|
|
|
|
2021-11-08 18:31:27 +01:00
|
|
|
async fn get_version(
|
2022-09-27 16:33:37 +02:00
|
|
|
_index_scheduler: GuardedData<ActionPolicy<{ actions::VERSION }>, Data<IndexScheduler>>,
|
2022-11-28 16:27:41 +01:00
|
|
|
req: HttpRequest,
|
|
|
|
analytics: web::Data<dyn Analytics>,
|
2021-11-08 18:31:27 +01:00
|
|
|
) -> HttpResponse {
|
2022-11-28 16:27:41 +01:00
|
|
|
analytics.publish("Version Seen".to_string(), json!(null), Some(&req));
|
|
|
|
|
2021-08-30 17:41:24 +02:00
|
|
|
let commit_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown");
|
|
|
|
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
|
2021-07-05 14:29:20 +02:00
|
|
|
|
|
|
|
HttpResponse::Ok().json(VersionResponse {
|
|
|
|
commit_sha: commit_sha.to_string(),
|
|
|
|
commit_date: commit_date.to_string(),
|
|
|
|
pkg_version: env!("CARGO_PKG_VERSION").to_string(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Serialize)]
|
|
|
|
struct KeysResponse {
|
|
|
|
private: Option<String>,
|
|
|
|
public: Option<String>,
|
|
|
|
}
|
|
|
|
|
2022-11-28 16:27:41 +01:00
|
|
|
pub async fn get_health(
|
|
|
|
req: HttpRequest,
|
|
|
|
analytics: web::Data<dyn Analytics>,
|
|
|
|
) -> Result<HttpResponse, ResponseError> {
|
|
|
|
analytics.health_seen(&req);
|
|
|
|
|
2021-07-05 14:29:20 +02:00
|
|
|
Ok(HttpResponse::Ok().json(serde_json::json!({ "status": "available" })))
|
|
|
|
}
|