2022-09-27 19:52:06 +02:00
|
|
|
use std::collections::BTreeMap;
|
|
|
|
|
2022-09-27 16:33:37 +02:00
|
|
|
use actix_web::web::Data;
|
2022-08-22 13:27:26 +05:30
|
|
|
use actix_web::{web, HttpRequest, HttpResponse};
|
2023-02-23 19:31:57 +01:00
|
|
|
use index_scheduler::IndexScheduler;
|
2023-01-24 16:17:23 +01:00
|
|
|
use meilisearch_auth::AuthController;
|
2024-12-26 17:16:52 +01:00
|
|
|
use meilisearch_types::batch_view::BatchView;
|
|
|
|
use meilisearch_types::batches::BatchStats;
|
2024-08-08 19:14:19 +02:00
|
|
|
use meilisearch_types::error::{Code, ErrorType, ResponseError};
|
|
|
|
use meilisearch_types::index_uid::IndexUid;
|
|
|
|
use meilisearch_types::keys::CreateApiKey;
|
|
|
|
use meilisearch_types::settings::{
|
|
|
|
Checked, FacetingSettings, MinWordSizeTyposSetting, PaginationSettings, Settings, TypoSettings,
|
|
|
|
Unchecked,
|
|
|
|
};
|
|
|
|
use meilisearch_types::task_view::{DetailsView, TaskView};
|
2022-10-12 03:21:25 +02:00
|
|
|
use meilisearch_types::tasks::{Kind, Status, Task, TaskId};
|
2020-12-12 13:32:06 +01:00
|
|
|
use serde::{Deserialize, Serialize};
|
2022-02-14 15:32:41 +01:00
|
|
|
use time::OffsetDateTime;
|
2024-02-08 10:14:50 +01:00
|
|
|
use tracing::debug;
|
2024-08-08 19:14:19 +02:00
|
|
|
use utoipa::{OpenApi, ToSchema};
|
|
|
|
|
|
|
|
use self::api_key::KeyView;
|
|
|
|
use self::indexes::documents::BrowseQuery;
|
|
|
|
use self::indexes::{IndexCreateRequest, IndexStats, UpdateIndexRequest};
|
2025-01-06 11:57:25 +01:00
|
|
|
use self::logs::{GetLogs, LogMode, UpdateStderrLogs};
|
2024-08-08 19:14:19 +02:00
|
|
|
use self::open_api_utils::OpenApiAuth;
|
|
|
|
use self::tasks::AllTasks;
|
2025-01-06 11:57:25 +01:00
|
|
|
use crate::extractors::authentication::policies::*;
|
|
|
|
use crate::extractors::authentication::GuardedData;
|
|
|
|
use crate::milli::progress::{ProgressStepView, ProgressView};
|
|
|
|
use crate::routes::batches::AllBatches;
|
|
|
|
use crate::routes::features::RuntimeTogglableFeatures;
|
|
|
|
use crate::routes::indexes::documents::{DocumentDeletionByFilter, DocumentEditionByFunction};
|
|
|
|
use crate::routes::indexes::IndexView;
|
|
|
|
use crate::routes::multi_search::SearchResults;
|
2025-01-29 10:49:08 +01:00
|
|
|
use crate::routes::network::{Network, Remote};
|
2025-01-06 11:57:25 +01:00
|
|
|
use crate::routes::swap_indexes::SwapIndexesPayload;
|
|
|
|
use crate::search::{
|
|
|
|
FederatedSearch, FederatedSearchResult, Federation, FederationOptions, MergeFacets,
|
|
|
|
SearchQueryWithIndex, SearchResultWithIndex, SimilarQuery, SimilarResult,
|
|
|
|
};
|
|
|
|
use crate::search_queue::SearchQueue;
|
|
|
|
use crate::Opt;
|
2023-01-11 17:10:32 +01:00
|
|
|
|
2023-02-09 13:14:05 +01:00
|
|
|
const PAGINATION_DEFAULT_LIMIT: usize = 20;
|
2024-12-23 15:58:52 +01:00
|
|
|
const PAGINATION_DEFAULT_LIMIT_FN: fn() -> usize = || 20;
|
2023-02-09 13:14:05 +01:00
|
|
|
|
2021-11-08 18:31:27 +01:00
|
|
|
mod api_key;
|
2024-11-13 11:27:12 +01:00
|
|
|
pub mod batches;
|
2021-07-05 14:29:20 +02:00
|
|
|
mod dump;
|
2023-06-22 22:58:54 +02:00
|
|
|
pub mod features;
|
2021-10-25 16:41:23 +02:00
|
|
|
pub mod indexes;
|
2024-01-25 18:09:50 +01:00
|
|
|
mod logs;
|
2023-02-21 18:18:47 +01:00
|
|
|
mod metrics;
|
2023-01-24 15:18:00 +01:00
|
|
|
mod multi_search;
|
2024-10-20 17:54:43 +02:00
|
|
|
mod multi_search_analytics;
|
2025-01-29 10:49:08 +01:00
|
|
|
pub mod network;
|
2024-08-08 19:14:19 +02:00
|
|
|
mod open_api_utils;
|
2023-09-11 11:00:05 +02:00
|
|
|
mod snapshot;
|
2022-10-24 14:49:39 +02:00
|
|
|
mod swap_indexes;
|
2022-11-28 16:27:41 +01:00
|
|
|
pub mod tasks;
|
2021-07-05 14:29:20 +02:00
|
|
|
|
2024-08-08 19:14:19 +02:00
|
|
|
#[derive(OpenApi)]
|
|
|
|
#[openapi(
|
|
|
|
nest(
|
|
|
|
(path = "/tasks", api = tasks::TaskApi),
|
2024-12-26 17:16:52 +01:00
|
|
|
(path = "/batches", api = batches::BatchesApi),
|
2024-08-08 19:14:19 +02:00
|
|
|
(path = "/indexes", api = indexes::IndexesApi),
|
2024-12-23 15:58:52 +01:00
|
|
|
// We must stop the search path here because the rest must be configured by each route individually
|
|
|
|
(path = "/indexes", api = indexes::search::SearchApi),
|
2024-08-08 19:14:19 +02:00
|
|
|
(path = "/snapshots", api = snapshot::SnapshotApi),
|
|
|
|
(path = "/dumps", api = dump::DumpApi),
|
|
|
|
(path = "/keys", api = api_key::ApiKeyApi),
|
|
|
|
(path = "/metrics", api = metrics::MetricApi),
|
|
|
|
(path = "/logs", api = logs::LogsApi),
|
2024-12-26 15:56:44 +01:00
|
|
|
(path = "/multi-search", api = multi_search::MultiSearchApi),
|
2024-12-26 16:16:56 +01:00
|
|
|
(path = "/swap-indexes", api = swap_indexes::SwapIndexesApi),
|
2024-12-26 16:38:01 +01:00
|
|
|
(path = "/experimental-features", api = features::ExperimentalFeaturesApi),
|
2025-01-29 10:49:08 +01:00
|
|
|
(path = "/network", api = network::NetworkApi),
|
2024-08-08 19:14:19 +02:00
|
|
|
),
|
|
|
|
paths(get_health, get_version, get_stats),
|
|
|
|
tags(
|
|
|
|
(name = "Stats", description = "Stats gives extended information and metrics about indexes and the Meilisearch database."),
|
|
|
|
),
|
|
|
|
modifiers(&OpenApiAuth),
|
2025-01-13 12:13:39 +01:00
|
|
|
servers((
|
|
|
|
url = "/",
|
|
|
|
description = "Local server",
|
|
|
|
)),
|
2025-01-29 10:49:08 +01:00
|
|
|
components(schemas(PaginationView<KeyView>, PaginationView<IndexView>, IndexView, DocumentDeletionByFilter, AllBatches, BatchStats, ProgressStepView, ProgressView, BatchView, RuntimeTogglableFeatures, SwapIndexesPayload, DocumentEditionByFunction, MergeFacets, FederationOptions, SearchQueryWithIndex, Federation, FederatedSearch, FederatedSearchResult, SearchResults, SearchResultWithIndex, SimilarQuery, SimilarResult, PaginationView<serde_json::Value>, BrowseQuery, UpdateIndexRequest, IndexUid, IndexCreateRequest, KeyView, Action, CreateApiKey, UpdateStderrLogs, LogMode, GetLogs, IndexStats, Stats, HealthStatus, HealthResponse, VersionResponse, Code, ErrorType, AllTasks, TaskView, Status, DetailsView, ResponseError, Settings<Unchecked>, Settings<Checked>, TypoSettings, MinWordSizeTyposSetting, FacetingSettings, PaginationSettings, SummarizedTaskView, Kind, Network, Remote))
|
2024-08-08 19:14:19 +02:00
|
|
|
)]
|
|
|
|
pub struct MeilisearchApi;
|
|
|
|
|
2023-06-22 23:14:01 +02:00
|
|
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
2024-12-26 17:16:52 +01:00
|
|
|
cfg.service(web::scope("/tasks").configure(tasks::configure))
|
|
|
|
.service(web::scope("/batches").configure(batches::configure))
|
|
|
|
.service(web::resource("/health").route(web::get().to(get_health)))
|
|
|
|
.service(web::scope("/logs").configure(logs::configure))
|
|
|
|
.service(web::scope("/keys").configure(api_key::configure))
|
|
|
|
.service(web::scope("/dumps").configure(dump::configure))
|
|
|
|
.service(web::scope("/snapshots").configure(snapshot::configure))
|
|
|
|
.service(web::resource("/stats").route(web::get().to(get_stats)))
|
|
|
|
.service(web::resource("/version").route(web::get().to(get_version)))
|
|
|
|
.service(web::scope("/indexes").configure(indexes::configure))
|
|
|
|
.service(web::scope("/multi-search").configure(multi_search::configure))
|
|
|
|
.service(web::scope("/swap-indexes").configure(swap_indexes::configure))
|
|
|
|
.service(web::scope("/metrics").configure(metrics::configure))
|
2025-01-29 10:49:08 +01:00
|
|
|
.service(web::scope("/experimental-features").configure(features::configure))
|
|
|
|
.service(web::scope("/network").configure(network::configure));
|
2024-12-30 14:33:22 +01:00
|
|
|
|
2025-01-06 12:09:07 +01:00
|
|
|
#[cfg(feature = "swagger")]
|
|
|
|
{
|
|
|
|
use utoipa_scalar::{Scalar, Servable as ScalarServable};
|
|
|
|
let openapi = MeilisearchApi::openapi();
|
|
|
|
cfg.service(Scalar::with_url("/scalar", openapi.clone()));
|
|
|
|
}
|
2021-07-05 14:29:20 +02:00
|
|
|
}
|
2020-12-12 13:32:06 +01:00
|
|
|
|
2024-02-20 11:24:44 +01:00
|
|
|
pub fn get_task_id(req: &HttpRequest, opt: &Opt) -> Result<Option<TaskId>, ResponseError> {
|
2024-02-21 14:33:40 +01:00
|
|
|
if !opt.experimental_replication_parameters {
|
2024-02-20 11:24:44 +01:00
|
|
|
return Ok(None);
|
|
|
|
}
|
2023-09-07 11:16:51 +02:00
|
|
|
let task_id = req
|
|
|
|
.headers()
|
|
|
|
.get("TaskId")
|
|
|
|
.map(|header| {
|
|
|
|
header.to_str().map_err(|e| {
|
|
|
|
ResponseError::from_msg(
|
|
|
|
format!("TaskId is not a valid utf-8 string: {e}"),
|
|
|
|
Code::BadRequest,
|
|
|
|
)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
.transpose()?
|
|
|
|
.map(|s| {
|
|
|
|
s.parse::<TaskId>().map_err(|e| {
|
|
|
|
ResponseError::from_msg(
|
|
|
|
format!(
|
|
|
|
"Could not parse the TaskId as a {}: {e}",
|
|
|
|
std::any::type_name::<TaskId>(),
|
|
|
|
),
|
|
|
|
Code::BadRequest,
|
|
|
|
)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
.transpose()?;
|
|
|
|
Ok(task_id)
|
|
|
|
}
|
|
|
|
|
2024-02-21 11:21:26 +01:00
|
|
|
pub fn is_dry_run(req: &HttpRequest, opt: &Opt) -> Result<bool, ResponseError> {
|
2024-02-21 14:33:40 +01:00
|
|
|
if !opt.experimental_replication_parameters {
|
2024-02-21 11:21:26 +01:00
|
|
|
return Ok(false);
|
|
|
|
}
|
|
|
|
Ok(req
|
|
|
|
.headers()
|
|
|
|
.get("DryRun")
|
|
|
|
.map(|header| {
|
|
|
|
header.to_str().map_err(|e| {
|
|
|
|
ResponseError::from_msg(
|
|
|
|
format!("DryRun is not a valid utf-8 string: {e}"),
|
|
|
|
Code::BadRequest,
|
|
|
|
)
|
|
|
|
})
|
|
|
|
})
|
|
|
|
.transpose()?
|
|
|
|
.map_or(false, |s| s.to_lowercase() == "true"))
|
|
|
|
}
|
|
|
|
|
2024-08-08 19:14:19 +02:00
|
|
|
#[derive(Debug, Serialize, ToSchema)]
|
2022-10-12 03:21:25 +02:00
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct SummarizedTaskView {
|
2024-08-08 19:14:19 +02:00
|
|
|
/// The task unique identifier.
|
|
|
|
#[schema(value_type = u32)]
|
2022-10-12 03:21:25 +02:00
|
|
|
task_uid: TaskId,
|
2024-08-08 19:14:19 +02:00
|
|
|
/// The index affected by this task. May be `null` if the task is not linked to any index.
|
2022-10-12 03:21:25 +02:00
|
|
|
index_uid: Option<String>,
|
2024-08-08 19:14:19 +02:00
|
|
|
/// The status of the task.
|
2022-10-12 03:21:25 +02:00
|
|
|
status: Status,
|
2024-08-08 19:14:19 +02:00
|
|
|
/// The type of the task.
|
2022-10-12 03:21:25 +02:00
|
|
|
#[serde(rename = "type")]
|
|
|
|
kind: Kind,
|
2024-08-08 19:14:19 +02:00
|
|
|
/// The date on which the task was enqueued.
|
2022-10-12 03:21:25 +02:00
|
|
|
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
|
|
|
|
enqueued_at: OffsetDateTime,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl From<Task> for SummarizedTaskView {
|
|
|
|
fn from(task: Task) -> Self {
|
|
|
|
SummarizedTaskView {
|
|
|
|
task_uid: task.uid,
|
|
|
|
index_uid: task.index_uid().map(|s| s.to_string()),
|
|
|
|
status: task.status,
|
|
|
|
kind: task.kind.as_kind(),
|
|
|
|
enqueued_at: task.enqueued_at,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-01-25 14:42:03 +01:00
|
|
|
|
2023-01-11 12:33:56 +01:00
|
|
|
pub struct Pagination {
|
|
|
|
pub offset: usize,
|
|
|
|
pub limit: usize,
|
|
|
|
}
|
2022-10-12 03:21:25 +02:00
|
|
|
|
2024-12-19 15:54:10 +01:00
|
|
|
#[derive(Debug, Clone, Serialize, ToSchema)]
|
2024-08-08 19:14:19 +02:00
|
|
|
#[serde(rename_all = "camelCase")]
|
2024-12-30 11:40:58 +01:00
|
|
|
#[schema(rename_all = "camelCase")]
|
2024-05-20 15:09:45 +02:00
|
|
|
pub struct PaginationView<T> {
|
|
|
|
pub results: Vec<T>,
|
2022-06-02 13:31:46 +02:00
|
|
|
pub offset: usize,
|
|
|
|
pub limit: usize,
|
|
|
|
pub total: usize,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Pagination {
|
|
|
|
/// Given the full data to paginate, returns the selected section.
|
|
|
|
pub fn auto_paginate_sized<T>(
|
|
|
|
self,
|
|
|
|
content: impl IntoIterator<Item = T> + ExactSizeIterator,
|
2024-05-20 15:09:45 +02:00
|
|
|
) -> PaginationView<T>
|
2022-06-02 13:31:46 +02:00
|
|
|
where
|
|
|
|
T: Serialize,
|
|
|
|
{
|
|
|
|
let total = content.len();
|
2022-10-20 18:00:07 +02:00
|
|
|
let content: Vec<_> = content.into_iter().skip(self.offset).take(self.limit).collect();
|
2022-06-02 13:31:46 +02:00
|
|
|
self.format_with(total, content)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Given an iterator and the total number of elements, returns the selected section.
|
|
|
|
pub fn auto_paginate_unsized<T>(
|
|
|
|
self,
|
|
|
|
total: usize,
|
|
|
|
content: impl IntoIterator<Item = T>,
|
2024-05-20 15:09:45 +02:00
|
|
|
) -> PaginationView<T>
|
2022-06-02 13:31:46 +02:00
|
|
|
where
|
|
|
|
T: Serialize,
|
|
|
|
{
|
2022-10-20 18:00:07 +02:00
|
|
|
let content: Vec<_> = content.into_iter().skip(self.offset).take(self.limit).collect();
|
2022-06-02 13:31:46 +02:00
|
|
|
self.format_with(total, content)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Given the data already paginated + the total number of elements, it stores
|
|
|
|
/// everything in a [PaginationResult].
|
2024-05-20 15:09:45 +02:00
|
|
|
pub fn format_with<T>(self, total: usize, results: Vec<T>) -> PaginationView<T>
|
2022-06-02 13:31:46 +02:00
|
|
|
where
|
|
|
|
T: Serialize,
|
|
|
|
{
|
2022-10-20 18:00:07 +02:00
|
|
|
PaginationView { results, offset: self.offset, limit: self.limit, total }
|
2022-06-02 13:31:46 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-05-20 15:09:45 +02:00
|
|
|
impl<T> PaginationView<T> {
|
|
|
|
pub fn new(offset: usize, limit: usize, total: usize, results: Vec<T>) -> Self {
|
2022-10-20 18:00:07 +02:00
|
|
|
Self { offset, limit, results, total }
|
2022-06-02 13:31:46 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-29 19:31:58 +02:00
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
2021-06-17 18:51:07 +02:00
|
|
|
#[allow(clippy::large_enum_variant)]
|
2021-04-29 19:31:58 +02:00
|
|
|
#[serde(tag = "name")]
|
|
|
|
pub enum UpdateType {
|
|
|
|
ClearAll,
|
|
|
|
Customs,
|
|
|
|
DocumentsAddition {
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
2021-06-15 17:39:07 +02:00
|
|
|
number: Option<usize>,
|
2021-04-29 19:31:58 +02:00
|
|
|
},
|
|
|
|
DocumentsPartial {
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
2021-06-15 17:39:07 +02:00
|
|
|
number: Option<usize>,
|
2021-04-29 19:31:58 +02:00
|
|
|
},
|
|
|
|
DocumentsDeletion {
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")]
|
2021-06-15 17:39:07 +02:00
|
|
|
number: Option<usize>,
|
|
|
|
},
|
|
|
|
Settings {
|
|
|
|
settings: Settings<Unchecked>,
|
2021-04-29 19:31:58 +02:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct ProcessedUpdateResult {
|
|
|
|
pub update_id: u64,
|
|
|
|
#[serde(rename = "type")]
|
|
|
|
pub update_type: UpdateType,
|
|
|
|
pub duration: f64, // in seconds
|
2022-03-01 19:02:32 +01:00
|
|
|
#[serde(with = "time::serde::rfc3339")]
|
2022-02-14 15:32:41 +01:00
|
|
|
pub enqueued_at: OffsetDateTime,
|
2022-03-01 19:02:32 +01:00
|
|
|
#[serde(with = "time::serde::rfc3339")]
|
2022-02-14 15:32:41 +01:00
|
|
|
pub processed_at: OffsetDateTime,
|
2021-04-29 19:31:58 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct FailedUpdateResult {
|
|
|
|
pub update_id: u64,
|
|
|
|
#[serde(rename = "type")]
|
|
|
|
pub update_type: UpdateType,
|
2021-11-04 17:04:06 +01:00
|
|
|
pub error: ResponseError,
|
2021-04-29 19:31:58 +02:00
|
|
|
pub duration: f64, // in seconds
|
2022-03-01 19:02:32 +01:00
|
|
|
#[serde(with = "time::serde::rfc3339")]
|
2022-02-14 15:32:41 +01:00
|
|
|
pub enqueued_at: OffsetDateTime,
|
2022-03-01 19:02:32 +01:00
|
|
|
#[serde(with = "time::serde::rfc3339")]
|
2022-02-14 15:32:41 +01:00
|
|
|
pub processed_at: OffsetDateTime,
|
2021-04-29 19:31:58 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct EnqueuedUpdateResult {
|
|
|
|
pub update_id: u64,
|
|
|
|
#[serde(rename = "type")]
|
|
|
|
pub update_type: UpdateType,
|
2022-03-01 19:02:32 +01:00
|
|
|
#[serde(with = "time::serde::rfc3339")]
|
2022-02-14 15:32:41 +01:00
|
|
|
pub enqueued_at: OffsetDateTime,
|
2022-10-20 18:00:07 +02:00
|
|
|
#[serde(skip_serializing_if = "Option::is_none", with = "time::serde::rfc3339::option")]
|
2022-02-14 15:32:41 +01:00
|
|
|
pub started_processing_at: Option<OffsetDateTime>,
|
2021-04-29 19:31:58 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
|
|
#[serde(rename_all = "camelCase", tag = "status")]
|
|
|
|
pub enum UpdateStatusResponse {
|
|
|
|
Enqueued {
|
|
|
|
#[serde(flatten)]
|
|
|
|
content: EnqueuedUpdateResult,
|
|
|
|
},
|
|
|
|
Processing {
|
|
|
|
#[serde(flatten)]
|
|
|
|
content: EnqueuedUpdateResult,
|
|
|
|
},
|
|
|
|
Failed {
|
|
|
|
#[serde(flatten)]
|
|
|
|
content: FailedUpdateResult,
|
|
|
|
},
|
|
|
|
Processed {
|
|
|
|
#[serde(flatten)]
|
|
|
|
content: ProcessedUpdateResult,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2020-12-12 13:32:06 +01:00
|
|
|
#[derive(Serialize)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct IndexUpdateResponse {
|
|
|
|
pub update_id: u64,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl IndexUpdateResponse {
|
|
|
|
pub fn with_id(update_id: u64) -> Self {
|
|
|
|
Self { update_id }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-19 11:34:54 +01:00
|
|
|
/// Always return a 200 with:
|
|
|
|
/// ```json
|
|
|
|
/// {
|
2022-01-26 17:43:16 +01:00
|
|
|
/// "status": "Meilisearch is running"
|
2021-03-19 11:34:54 +01:00
|
|
|
/// }
|
|
|
|
/// ```
|
|
|
|
pub async fn running() -> HttpResponse {
|
2022-01-26 17:43:16 +01:00
|
|
|
HttpResponse::Ok().json(serde_json::json!({ "status": "Meilisearch is running" }))
|
2021-03-19 11:34:54 +01:00
|
|
|
}
|
2021-07-05 14:29:20 +02:00
|
|
|
|
2024-08-08 19:14:19 +02:00
|
|
|
#[derive(Serialize, Debug, ToSchema)]
|
2022-09-27 19:52:06 +02:00
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct Stats {
|
2025-02-05 13:25:08 +01:00
|
|
|
/// The disk space used by the database, in bytes.
|
2022-09-27 19:52:06 +02:00
|
|
|
pub database_size: u64,
|
2025-02-05 13:25:08 +01:00
|
|
|
/// The size of the database, in bytes.
|
2023-05-25 18:30:30 +02:00
|
|
|
pub used_database_size: u64,
|
2024-08-08 19:14:19 +02:00
|
|
|
/// The date of the last update in the RFC 3339 formats. Can be `null` if no update has ever been processed.
|
2022-09-27 19:52:06 +02:00
|
|
|
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
|
|
|
pub last_update: Option<OffsetDateTime>,
|
2024-08-08 19:14:19 +02:00
|
|
|
/// The stats of every individual index your API key lets you access.
|
|
|
|
#[schema(value_type = HashMap<String, indexes::IndexStats>)]
|
2023-02-23 19:31:57 +01:00
|
|
|
pub indexes: BTreeMap<String, indexes::IndexStats>,
|
2022-09-27 19:52:06 +02:00
|
|
|
}
|
|
|
|
|
2024-08-08 19:14:19 +02:00
|
|
|
/// Get stats of all indexes.
|
|
|
|
///
|
|
|
|
/// Get stats of all indexes.
|
|
|
|
#[utoipa::path(
|
|
|
|
get,
|
|
|
|
path = "/stats",
|
|
|
|
tag = "Stats",
|
|
|
|
security(("Bearer" = ["stats.get", "stats.*", "*"])),
|
|
|
|
responses(
|
|
|
|
(status = 200, description = "The stats of the instance", body = Stats, content_type = "application/json", example = json!(
|
|
|
|
{
|
|
|
|
"databaseSize": 567,
|
2025-02-05 13:25:08 +01:00
|
|
|
"usedDatabaseSize": 456,
|
2024-08-08 19:14:19 +02:00
|
|
|
"lastUpdate": "2019-11-20T09:40:33.711324Z",
|
|
|
|
"indexes": {
|
|
|
|
"movies": {
|
|
|
|
"numberOfDocuments": 10,
|
2025-02-11 10:01:22 +01:00
|
|
|
"rawDocumentDbSize": 100,
|
|
|
|
"maxDocumentSize": 16,
|
|
|
|
"avgDocumentSize": 10,
|
2024-08-08 19:14:19 +02:00
|
|
|
"isIndexing": true,
|
|
|
|
"fieldDistribution": {
|
|
|
|
"genre": 10,
|
|
|
|
"author": 9
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)),
|
|
|
|
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
|
|
|
|
{
|
|
|
|
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
|
|
|
"code": "missing_authorization_header",
|
|
|
|
"type": "auth",
|
|
|
|
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
|
|
|
}
|
|
|
|
)),
|
|
|
|
)
|
|
|
|
)]
|
2021-09-28 22:22:59 +02:00
|
|
|
async fn get_stats(
|
2022-09-27 16:33:37 +02:00
|
|
|
index_scheduler: GuardedData<ActionPolicy<{ actions::STATS_GET }>, Data<IndexScheduler>>,
|
2023-04-06 13:38:47 +02:00
|
|
|
auth_controller: GuardedData<ActionPolicy<{ actions::STATS_GET }>, Data<AuthController>>,
|
2021-09-28 22:22:59 +02:00
|
|
|
) -> Result<HttpResponse, ResponseError> {
|
2023-02-20 09:25:29 +01:00
|
|
|
let filters = index_scheduler.filters();
|
2021-07-05 14:29:20 +02:00
|
|
|
|
2023-02-20 09:25:29 +01:00
|
|
|
let stats = create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), filters)?;
|
2022-10-18 12:45:06 +02:00
|
|
|
|
2024-02-08 10:14:50 +01:00
|
|
|
debug!(returns = ?stats, "Get stats");
|
2022-10-18 12:45:06 +02:00
|
|
|
Ok(HttpResponse::Ok().json(stats))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn create_all_stats(
|
|
|
|
index_scheduler: Data<IndexScheduler>,
|
2023-04-06 13:38:47 +02:00
|
|
|
auth_controller: Data<AuthController>,
|
2023-02-20 09:25:29 +01:00
|
|
|
filters: &meilisearch_auth::AuthFilter,
|
2022-10-18 12:45:06 +02:00
|
|
|
) -> Result<Stats, ResponseError> {
|
2022-09-27 19:52:06 +02:00
|
|
|
let mut last_task: Option<OffsetDateTime> = None;
|
|
|
|
let mut indexes = BTreeMap::new();
|
|
|
|
let mut database_size = 0;
|
2023-05-25 18:30:30 +02:00
|
|
|
let mut used_database_size = 0;
|
2023-02-23 19:31:57 +01:00
|
|
|
|
|
|
|
for index_uid in index_scheduler.index_names()? {
|
2023-03-07 14:05:27 +01:00
|
|
|
// Accumulate the size of all indexes, even unauthorized ones, so
|
|
|
|
// as to return a database_size representative of the correct database size on disk.
|
|
|
|
// See <https://github.com/meilisearch/meilisearch/pull/3541#discussion_r1126747643> for context.
|
|
|
|
let stats = index_scheduler.index_stats(&index_uid)?;
|
|
|
|
database_size += stats.inner_stats.database_size;
|
2023-05-25 18:30:30 +02:00
|
|
|
used_database_size += stats.inner_stats.used_database_size;
|
2023-03-07 14:05:27 +01:00
|
|
|
|
2023-02-23 19:31:57 +01:00
|
|
|
if !filters.is_index_authorized(&index_uid) {
|
|
|
|
continue;
|
2022-09-27 19:52:06 +02:00
|
|
|
}
|
|
|
|
|
2023-02-23 19:31:57 +01:00
|
|
|
last_task = last_task.map_or(Some(stats.inner_stats.updated_at), |last| {
|
|
|
|
Some(last.max(stats.inner_stats.updated_at))
|
|
|
|
});
|
|
|
|
indexes.insert(index_uid.to_string(), stats.into());
|
|
|
|
}
|
2023-01-24 16:17:23 +01:00
|
|
|
|
|
|
|
database_size += index_scheduler.size()?;
|
2023-05-25 18:30:30 +02:00
|
|
|
used_database_size += index_scheduler.used_size()?;
|
2023-01-24 16:17:23 +01:00
|
|
|
database_size += auth_controller.size()?;
|
2023-05-25 18:30:30 +02:00
|
|
|
used_database_size += auth_controller.used_size()?;
|
2023-01-24 16:17:23 +01:00
|
|
|
|
2023-05-25 18:30:30 +02:00
|
|
|
let stats = Stats { database_size, used_database_size, last_update: last_task, indexes };
|
2022-10-18 12:45:06 +02:00
|
|
|
Ok(stats)
|
2021-07-05 14:29:20 +02:00
|
|
|
}
|
|
|
|
|
2024-08-08 19:14:19 +02:00
|
|
|
#[derive(Serialize, ToSchema)]
|
2021-07-05 14:29:20 +02:00
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
struct VersionResponse {
|
2024-08-08 19:14:19 +02:00
|
|
|
/// The commit used to compile this build of Meilisearch.
|
2021-07-05 14:29:20 +02:00
|
|
|
commit_sha: String,
|
2024-08-08 19:14:19 +02:00
|
|
|
/// The date of this build.
|
2021-07-05 14:29:20 +02:00
|
|
|
commit_date: String,
|
2024-08-08 19:14:19 +02:00
|
|
|
/// The version of Meilisearch.
|
2021-07-05 14:29:20 +02:00
|
|
|
pkg_version: String,
|
|
|
|
}
|
|
|
|
|
2024-08-08 19:14:19 +02:00
|
|
|
/// Get version
|
|
|
|
///
|
|
|
|
/// Current version of Meilisearch.
|
|
|
|
#[utoipa::path(
|
|
|
|
get,
|
|
|
|
path = "/version",
|
|
|
|
tag = "Version",
|
|
|
|
security(("Bearer" = ["version", "*"])),
|
|
|
|
responses(
|
|
|
|
(status = 200, description = "Instance is healthy", body = VersionResponse, content_type = "application/json", example = json!(
|
|
|
|
{
|
|
|
|
"commitSha": "b46889b5f0f2f8b91438a08a358ba8f05fc09fc1",
|
|
|
|
"commitDate": "2021-07-08",
|
|
|
|
"pkgVersion": "0.23.0"
|
|
|
|
}
|
|
|
|
)),
|
|
|
|
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
|
|
|
|
{
|
|
|
|
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
|
|
|
"code": "missing_authorization_header",
|
|
|
|
"type": "auth",
|
|
|
|
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
|
|
|
}
|
|
|
|
)),
|
|
|
|
)
|
|
|
|
)]
|
2021-11-08 18:31:27 +01:00
|
|
|
async fn get_version(
|
2022-09-27 16:33:37 +02:00
|
|
|
_index_scheduler: GuardedData<ActionPolicy<{ actions::VERSION }>, Data<IndexScheduler>>,
|
2021-11-08 18:31:27 +01:00
|
|
|
) -> HttpResponse {
|
2024-02-27 18:34:52 +01:00
|
|
|
let build_info = build_info::BuildInfo::from_build();
|
2021-07-05 14:29:20 +02:00
|
|
|
|
|
|
|
HttpResponse::Ok().json(VersionResponse {
|
2024-02-27 18:34:52 +01:00
|
|
|
commit_sha: build_info.commit_sha1.unwrap_or("unknown").to_string(),
|
|
|
|
commit_date: build_info
|
|
|
|
.commit_timestamp
|
|
|
|
.and_then(|commit_timestamp| {
|
|
|
|
commit_timestamp
|
|
|
|
.format(&time::format_description::well_known::Iso8601::DEFAULT)
|
|
|
|
.ok()
|
|
|
|
})
|
|
|
|
.unwrap_or("unknown".into()),
|
2021-07-05 14:29:20 +02:00
|
|
|
pkg_version: env!("CARGO_PKG_VERSION").to_string(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2024-08-08 19:14:19 +02:00
|
|
|
#[derive(Default, Serialize, ToSchema)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
struct HealthResponse {
|
|
|
|
/// The status of the instance.
|
|
|
|
status: HealthStatus,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Default, Serialize, ToSchema)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
enum HealthStatus {
|
|
|
|
#[default]
|
|
|
|
Available,
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Get Health
|
|
|
|
///
|
|
|
|
/// The health check endpoint enables you to periodically test the health of your Meilisearch instance.
|
|
|
|
#[utoipa::path(
|
|
|
|
get,
|
|
|
|
path = "/health",
|
|
|
|
tag = "Health",
|
|
|
|
responses(
|
|
|
|
(status = 200, description = "Instance is healthy", body = HealthResponse, content_type = "application/json", example = json!(
|
|
|
|
{
|
|
|
|
"status": "available"
|
|
|
|
}
|
|
|
|
)),
|
|
|
|
)
|
|
|
|
)]
|
2022-11-28 16:27:41 +01:00
|
|
|
pub async fn get_health(
|
2023-04-06 13:38:47 +02:00
|
|
|
index_scheduler: Data<IndexScheduler>,
|
|
|
|
auth_controller: Data<AuthController>,
|
2024-03-27 15:49:43 +01:00
|
|
|
search_queue: Data<SearchQueue>,
|
2022-11-28 16:27:41 +01:00
|
|
|
) -> Result<HttpResponse, ResponseError> {
|
2024-03-27 15:49:43 +01:00
|
|
|
search_queue.health().unwrap();
|
2023-04-06 13:38:47 +02:00
|
|
|
index_scheduler.health().unwrap();
|
|
|
|
auth_controller.health().unwrap();
|
|
|
|
|
2025-01-08 16:16:21 +01:00
|
|
|
Ok(HttpResponse::Ok().json(HealthResponse::default()))
|
2021-07-05 14:29:20 +02:00
|
|
|
}
|