mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-12-03 18:15:46 +01:00
Add reports route
This commit is contained in:
parent
84f701679d
commit
146908f062
@ -55,6 +55,7 @@ use meilisearch_types::milli::update::IndexerConfig;
|
|||||||
use meilisearch_types::milli::{self, CboRoaringBitmapCodec, Index, RoaringBitmapCodec, BEU32};
|
use meilisearch_types::milli::{self, CboRoaringBitmapCodec, Index, RoaringBitmapCodec, BEU32};
|
||||||
use meilisearch_types::tasks::{Kind, KindWithContent, Status, Task};
|
use meilisearch_types::tasks::{Kind, KindWithContent, Status, Task};
|
||||||
use panic_hook::PanicReader;
|
use panic_hook::PanicReader;
|
||||||
|
pub use panic_hook::{Panic, Report, ReportRegistry};
|
||||||
use puffin::FrameView;
|
use puffin::FrameView;
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
use synchronoise::SignalEvent;
|
use synchronoise::SignalEvent;
|
||||||
@ -1326,6 +1327,10 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn reports(&self) -> Arc<RwLock<ReportRegistry>> {
|
||||||
|
self.panic_reader.registry()
|
||||||
|
}
|
||||||
|
|
||||||
/// Blocks the thread until the test handle asks to progress to/through this breakpoint.
|
/// Blocks the thread until the test handle asks to progress to/through this breakpoint.
|
||||||
///
|
///
|
||||||
/// Two messages are sent through the channel for each breakpoint.
|
/// Two messages are sent through the channel for each breakpoint.
|
||||||
|
@ -88,7 +88,6 @@ pub trait ErrorCode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::enum_variant_names)]
|
|
||||||
enum ErrorType {
|
enum ErrorType {
|
||||||
Internal,
|
Internal,
|
||||||
InvalidRequest,
|
InvalidRequest,
|
||||||
@ -298,6 +297,7 @@ MissingSwapIndexes , InvalidRequest , BAD_REQUEST ;
|
|||||||
MissingTaskFilters , InvalidRequest , BAD_REQUEST ;
|
MissingTaskFilters , InvalidRequest , BAD_REQUEST ;
|
||||||
NoSpaceLeftOnDevice , System , UNPROCESSABLE_ENTITY;
|
NoSpaceLeftOnDevice , System , UNPROCESSABLE_ENTITY;
|
||||||
PayloadTooLarge , InvalidRequest , PAYLOAD_TOO_LARGE ;
|
PayloadTooLarge , InvalidRequest , PAYLOAD_TOO_LARGE ;
|
||||||
|
ReportNotFound , InvalidRequest , NOT_FOUND ;
|
||||||
TaskNotFound , InvalidRequest , NOT_FOUND ;
|
TaskNotFound , InvalidRequest , NOT_FOUND ;
|
||||||
TooManyOpenFiles , System , UNPROCESSABLE_ENTITY ;
|
TooManyOpenFiles , System , UNPROCESSABLE_ENTITY ;
|
||||||
UnretrievableDocument , Internal , BAD_REQUEST ;
|
UnretrievableDocument , Internal , BAD_REQUEST ;
|
||||||
|
@ -51,6 +51,8 @@ pub enum MeilisearchHttpError {
|
|||||||
DocumentFormat(#[from] DocumentFormatError),
|
DocumentFormat(#[from] DocumentFormatError),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Join(#[from] JoinError),
|
Join(#[from] JoinError),
|
||||||
|
#[error("Report `{0}` not found. Either its id is incorrect, or it was deleted. To save on memory, only a limited amount of reports are kept.")]
|
||||||
|
ReportNotFound(uuid::Uuid),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ErrorCode for MeilisearchHttpError {
|
impl ErrorCode for MeilisearchHttpError {
|
||||||
@ -74,6 +76,7 @@ impl ErrorCode for MeilisearchHttpError {
|
|||||||
MeilisearchHttpError::FileStore(_) => Code::Internal,
|
MeilisearchHttpError::FileStore(_) => Code::Internal,
|
||||||
MeilisearchHttpError::DocumentFormat(e) => e.error_code(),
|
MeilisearchHttpError::DocumentFormat(e) => e.error_code(),
|
||||||
MeilisearchHttpError::Join(_) => Code::Internal,
|
MeilisearchHttpError::Join(_) => Code::Internal,
|
||||||
|
MeilisearchHttpError::ReportNotFound(_) => Code::ReportNotFound,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -24,6 +24,7 @@ pub mod features;
|
|||||||
pub mod indexes;
|
pub mod indexes;
|
||||||
mod metrics;
|
mod metrics;
|
||||||
mod multi_search;
|
mod multi_search;
|
||||||
|
mod reports;
|
||||||
mod snapshot;
|
mod snapshot;
|
||||||
mod swap_indexes;
|
mod swap_indexes;
|
||||||
pub mod tasks;
|
pub mod tasks;
|
||||||
@ -40,7 +41,8 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
.service(web::scope("/multi-search").configure(multi_search::configure))
|
.service(web::scope("/multi-search").configure(multi_search::configure))
|
||||||
.service(web::scope("/swap-indexes").configure(swap_indexes::configure))
|
.service(web::scope("/swap-indexes").configure(swap_indexes::configure))
|
||||||
.service(web::scope("/metrics").configure(metrics::configure))
|
.service(web::scope("/metrics").configure(metrics::configure))
|
||||||
.service(web::scope("/experimental-features").configure(features::configure));
|
.service(web::scope("/experimental-features").configure(features::configure))
|
||||||
|
.service(web::scope("/reports").configure(reports::configure));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
|
39
meilisearch/src/routes/reports.rs
Normal file
39
meilisearch/src/routes/reports.rs
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
use actix_web::web::{self, Data};
|
||||||
|
use actix_web::HttpResponse;
|
||||||
|
use index_scheduler::{IndexScheduler, Report};
|
||||||
|
use meilisearch_types::error::ResponseError;
|
||||||
|
use meilisearch_types::keys::actions;
|
||||||
|
|
||||||
|
use crate::extractors::authentication::policies::ActionPolicy;
|
||||||
|
use crate::extractors::authentication::GuardedData;
|
||||||
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
|
||||||
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
|
cfg.service(web::resource("").route(web::get().to(list_reports))).service(
|
||||||
|
web::scope("/{report_uid}")
|
||||||
|
.service(web::resource("").route(web::get().to(SeqHandler(get_report)))),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn list_reports(
|
||||||
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_ALL }>, Data<IndexScheduler>>,
|
||||||
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let reports = &index_scheduler.reports();
|
||||||
|
let reports = &reports.read().unwrap();
|
||||||
|
let reports: Vec<&Report> = reports.iter().collect();
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(reports))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn get_report(
|
||||||
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_ALL }>, Data<IndexScheduler>>,
|
||||||
|
report_id: web::Path<uuid::Uuid>,
|
||||||
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
let reports = &index_scheduler.reports();
|
||||||
|
let reports = &reports.read().unwrap();
|
||||||
|
let report = reports
|
||||||
|
.find(*report_id)
|
||||||
|
.ok_or(crate::error::MeilisearchHttpError::ReportNotFound(*report_id))?;
|
||||||
|
|
||||||
|
Ok(HttpResponse::Ok().json(report))
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user