4126: Make the experimental route /metrics activable via HTTP r=dureuill a=braddotcoffee

# Pull Request

## Related issue
Closes #4086

## What does this PR do?
- [x] Make `/metrics` available via HTTP as described in #4086 
- [x] The users can still launch Meilisearch using the `--experimental-enable-metrics` flag.
- [x] If the flag `--experimental-enable-metrics` is activated, a call to the `GET /experimental-features` route right after the launch will show `"metrics": true` even if the user has not called the `PATCH /experimental-features` route yet.
- [x] Even if the --experimental-enable-metrics flag is present at launch, calling the `PATCH /experimental-features` route with `"metrics": false` disables the experimental feature.
- [x] Update the spec
    - I was unable to find docs in this repository to update about the `/experimental-features` endpoint. I'll happily update if you point me in the right direction!

## PR checklist
Please check if your PR fulfills the following requirements:
- [x] Does this PR fix an existing issue, or have you listed the changes applied in the PR description (and why they are needed)?
- [x] Have you read the contributing guidelines?
- [x] Have you made sure that the title is accurate and descriptive of the changes?

Co-authored-by: bwbonanno <bradfordbonanno@gmail.com>
Co-authored-by: Louis Dureuil <louis@meilisearch.com>
This commit is contained in:
meili-bors[bot] 2023-10-23 08:51:37 +00:00 committed by GitHub
commit eae9eab181
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 156 additions and 56 deletions

View File

@ -896,7 +896,7 @@ impl IndexScheduler {
})?; })?;
// 4. Dump experimental feature settings // 4. Dump experimental feature settings
let features = self.features()?.runtime_features(); let features = self.features().runtime_features();
dump.create_experimental_features(features)?; dump.create_experimental_features(features)?;
let dump_uid = started_at.format(format_description!( let dump_uid = started_at.format(format_description!(

View File

@ -1,6 +1,8 @@
use std::sync::{Arc, RwLock};
use meilisearch_types::features::{InstanceTogglableFeatures, RuntimeTogglableFeatures}; use meilisearch_types::features::{InstanceTogglableFeatures, RuntimeTogglableFeatures};
use meilisearch_types::heed::types::{SerdeJson, Str}; use meilisearch_types::heed::types::{SerdeJson, Str};
use meilisearch_types::heed::{Database, Env, RoTxn, RwTxn}; use meilisearch_types::heed::{Database, Env, RwTxn};
use crate::error::FeatureNotEnabledError; use crate::error::FeatureNotEnabledError;
use crate::Result; use crate::Result;
@ -9,20 +11,19 @@ const EXPERIMENTAL_FEATURES: &str = "experimental-features";
#[derive(Clone)] #[derive(Clone)]
pub(crate) struct FeatureData { pub(crate) struct FeatureData {
runtime: Database<Str, SerdeJson<RuntimeTogglableFeatures>>, persisted: Database<Str, SerdeJson<RuntimeTogglableFeatures>>,
instance: InstanceTogglableFeatures, runtime: Arc<RwLock<RuntimeTogglableFeatures>>,
} }
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub struct RoFeatures { pub struct RoFeatures {
runtime: RuntimeTogglableFeatures, runtime: RuntimeTogglableFeatures,
instance: InstanceTogglableFeatures,
} }
impl RoFeatures { impl RoFeatures {
fn new(txn: RoTxn<'_>, data: &FeatureData) -> Result<Self> { fn new(data: &FeatureData) -> Self {
let runtime = data.runtime_features(txn)?; let runtime = data.runtime_features();
Ok(Self { runtime, instance: data.instance }) Self { runtime }
} }
pub fn runtime_features(&self) -> RuntimeTogglableFeatures { pub fn runtime_features(&self) -> RuntimeTogglableFeatures {
@ -43,7 +44,7 @@ impl RoFeatures {
} }
pub fn check_metrics(&self) -> Result<()> { pub fn check_metrics(&self) -> Result<()> {
if self.instance.metrics { if self.runtime.metrics {
Ok(()) Ok(())
} else { } else {
Err(FeatureNotEnabledError { Err(FeatureNotEnabledError {
@ -85,10 +86,18 @@ impl RoFeatures {
impl FeatureData { impl FeatureData {
pub fn new(env: &Env, instance_features: InstanceTogglableFeatures) -> Result<Self> { pub fn new(env: &Env, instance_features: InstanceTogglableFeatures) -> Result<Self> {
let mut wtxn = env.write_txn()?; let mut wtxn = env.write_txn()?;
let runtime_features = env.create_database(&mut wtxn, Some(EXPERIMENTAL_FEATURES))?; let runtime_features_db = env.create_database(&mut wtxn, Some(EXPERIMENTAL_FEATURES))?;
wtxn.commit()?; wtxn.commit()?;
Ok(Self { runtime: runtime_features, instance: instance_features }) let txn = env.read_txn()?;
let persisted_features: RuntimeTogglableFeatures =
runtime_features_db.get(&txn, EXPERIMENTAL_FEATURES)?.unwrap_or_default();
let runtime = Arc::new(RwLock::new(RuntimeTogglableFeatures {
metrics: instance_features.metrics || persisted_features.metrics,
..persisted_features
}));
Ok(Self { persisted: runtime_features_db, runtime })
} }
pub fn put_runtime_features( pub fn put_runtime_features(
@ -96,16 +105,25 @@ impl FeatureData {
mut wtxn: RwTxn, mut wtxn: RwTxn,
features: RuntimeTogglableFeatures, features: RuntimeTogglableFeatures,
) -> Result<()> { ) -> Result<()> {
self.runtime.put(&mut wtxn, EXPERIMENTAL_FEATURES, &features)?; self.persisted.put(&mut wtxn, EXPERIMENTAL_FEATURES, &features)?;
wtxn.commit()?; wtxn.commit()?;
// safe to unwrap, the lock will only fail if:
// 1. requested by the same thread concurrently -> it is called and released in methods that don't call each other
// 2. there's a panic while the thread is held -> it is only used for an assignment here.
let mut toggled_features = self.runtime.write().unwrap();
*toggled_features = features;
Ok(()) Ok(())
} }
fn runtime_features(&self, txn: RoTxn) -> Result<RuntimeTogglableFeatures> { fn runtime_features(&self) -> RuntimeTogglableFeatures {
Ok(self.runtime.get(&txn, EXPERIMENTAL_FEATURES)?.unwrap_or_default()) // sound to unwrap, the lock will only fail if:
// 1. requested by the same thread concurrently -> it is called and released in methods that don't call each other
// 2. there's a panic while the thread is held -> it is only used for copying the data here
*self.runtime.read().unwrap()
} }
pub fn features(&self, txn: RoTxn) -> Result<RoFeatures> { pub fn features(&self) -> RoFeatures {
RoFeatures::new(txn, self) RoFeatures::new(self)
} }
} }

View File

@ -579,13 +579,7 @@ impl IndexScheduler {
run.wake_up.wait(); run.wake_up.wait();
loop { loop {
let puffin_enabled = match run.features() { let puffin_enabled = run.features().check_puffin().is_ok();
Ok(features) => features.check_puffin().is_ok(),
Err(e) => {
log::error!("{e}");
continue;
}
};
puffin::set_scopes_on(puffin_enabled); puffin::set_scopes_on(puffin_enabled);
puffin::GlobalProfiler::lock().new_frame(); puffin::GlobalProfiler::lock().new_frame();
@ -1299,9 +1293,8 @@ impl IndexScheduler {
Ok(IndexStats { is_indexing, inner_stats: index_stats }) Ok(IndexStats { is_indexing, inner_stats: index_stats })
} }
pub fn features(&self) -> Result<RoFeatures> { pub fn features(&self) -> RoFeatures {
let rtxn = self.read_txn()?; self.features.features()
self.features.features(rtxn)
} }
pub fn put_runtime_features(&self, features: RuntimeTogglableFeatures) -> Result<()> { pub fn put_runtime_features(&self, features: RuntimeTogglableFeatures) -> Result<()> {

View File

@ -5,6 +5,7 @@ use serde::{Deserialize, Serialize};
pub struct RuntimeTogglableFeatures { pub struct RuntimeTogglableFeatures {
pub score_details: bool, pub score_details: bool,
pub vector_store: bool, pub vector_store: bool,
pub metrics: bool,
pub export_puffin_reports: bool, pub export_puffin_reports: bool,
} }

View File

@ -114,10 +114,7 @@ pub fn create_app(
.configure(routes::configure) .configure(routes::configure)
.configure(|s| dashboard(s, enable_dashboard)); .configure(|s| dashboard(s, enable_dashboard));
let app = app.wrap(actix_web::middleware::Condition::new( let app = app.wrap(middleware::RouteMetrics);
opt.experimental_enable_metrics,
middleware::RouteMetrics,
));
app.wrap( app.wrap(
Cors::default() Cors::default()
.send_wildcard() .send_wildcard()

View File

@ -3,8 +3,10 @@
use std::future::{ready, Ready}; use std::future::{ready, Ready};
use actix_web::dev::{self, Service, ServiceRequest, ServiceResponse, Transform}; use actix_web::dev::{self, Service, ServiceRequest, ServiceResponse, Transform};
use actix_web::web::Data;
use actix_web::Error; use actix_web::Error;
use futures_util::future::LocalBoxFuture; use futures_util::future::LocalBoxFuture;
use index_scheduler::IndexScheduler;
use prometheus::HistogramTimer; use prometheus::HistogramTimer;
pub struct RouteMetrics; pub struct RouteMetrics;
@ -47,19 +49,27 @@ where
fn call(&self, req: ServiceRequest) -> Self::Future { fn call(&self, req: ServiceRequest) -> Self::Future {
let mut histogram_timer: Option<HistogramTimer> = None; let mut histogram_timer: Option<HistogramTimer> = None;
let request_path = req.path();
let is_registered_resource = req.resource_map().has_resource(request_path); // calling unwrap here is safe because index scheduler is added to app data while creating actix app.
if is_registered_resource { // also, the tests will fail if this is not present.
let request_method = req.method().to_string(); let index_scheduler = req.app_data::<Data<IndexScheduler>>().unwrap();
histogram_timer = Some( let features = index_scheduler.features();
crate::metrics::MEILISEARCH_HTTP_RESPONSE_TIME_SECONDS
if features.check_metrics().is_ok() {
let request_path = req.path();
let is_registered_resource = req.resource_map().has_resource(request_path);
if is_registered_resource {
let request_method = req.method().to_string();
histogram_timer = Some(
crate::metrics::MEILISEARCH_HTTP_RESPONSE_TIME_SECONDS
.with_label_values(&[&request_method, request_path])
.start_timer(),
);
crate::metrics::MEILISEARCH_HTTP_REQUESTS_TOTAL
.with_label_values(&[&request_method, request_path]) .with_label_values(&[&request_method, request_path])
.start_timer(), .inc();
); }
crate::metrics::MEILISEARCH_HTTP_REQUESTS_TOTAL };
.with_label_values(&[&request_method, request_path])
.inc();
}
let fut = self.service.call(req); let fut = self.service.call(req);

View File

@ -29,12 +29,12 @@ async fn get_features(
>, >,
req: HttpRequest, req: HttpRequest,
analytics: Data<dyn Analytics>, analytics: Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> HttpResponse {
let features = index_scheduler.features()?; let features = index_scheduler.features();
analytics.publish("Experimental features Seen".to_string(), json!(null), Some(&req)); analytics.publish("Experimental features Seen".to_string(), json!(null), Some(&req));
debug!("returns: {:?}", features.runtime_features()); debug!("returns: {:?}", features.runtime_features());
Ok(HttpResponse::Ok().json(features.runtime_features())) HttpResponse::Ok().json(features.runtime_features())
} }
#[derive(Debug, Deserr)] #[derive(Debug, Deserr)]
@ -45,6 +45,8 @@ pub struct RuntimeTogglableFeatures {
#[deserr(default)] #[deserr(default)]
pub vector_store: Option<bool>, pub vector_store: Option<bool>,
#[deserr(default)] #[deserr(default)]
pub metrics: Option<bool>,
#[deserr(default)]
pub export_puffin_reports: Option<bool>, pub export_puffin_reports: Option<bool>,
} }
@ -57,12 +59,13 @@ async fn patch_features(
req: HttpRequest, req: HttpRequest,
analytics: Data<dyn Analytics>, analytics: Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
let features = index_scheduler.features()?; let features = index_scheduler.features();
let old_features = features.runtime_features(); let old_features = features.runtime_features();
let new_features = meilisearch_types::features::RuntimeTogglableFeatures { let new_features = meilisearch_types::features::RuntimeTogglableFeatures {
score_details: new_features.0.score_details.unwrap_or(old_features.score_details), score_details: new_features.0.score_details.unwrap_or(old_features.score_details),
vector_store: new_features.0.vector_store.unwrap_or(old_features.vector_store), vector_store: new_features.0.vector_store.unwrap_or(old_features.vector_store),
metrics: new_features.0.metrics.unwrap_or(old_features.metrics),
export_puffin_reports: new_features export_puffin_reports: new_features
.0 .0
.export_puffin_reports .export_puffin_reports
@ -75,6 +78,7 @@ async fn patch_features(
let meilisearch_types::features::RuntimeTogglableFeatures { let meilisearch_types::features::RuntimeTogglableFeatures {
score_details, score_details,
vector_store, vector_store,
metrics,
export_puffin_reports, export_puffin_reports,
} = new_features; } = new_features;
@ -83,6 +87,7 @@ async fn patch_features(
json!({ json!({
"score_details": score_details, "score_details": score_details,
"vector_store": vector_store, "vector_store": vector_store,
"metrics": metrics,
"export_puffin_reports": export_puffin_reports, "export_puffin_reports": export_puffin_reports,
}), }),
Some(&req), Some(&req),

View File

@ -68,7 +68,7 @@ pub async fn search(
} }
let index = index_scheduler.index(&index_uid)?; let index = index_scheduler.index(&index_uid)?;
let features = index_scheduler.features()?; let features = index_scheduler.features();
let search_result = tokio::task::spawn_blocking(move || { let search_result = tokio::task::spawn_blocking(move || {
perform_facet_search(&index, search_query, facet_query, facet_name, features) perform_facet_search(&index, search_query, facet_query, facet_name, features)
}) })

View File

@ -157,7 +157,7 @@ pub async fn search_with_url_query(
let mut aggregate = SearchAggregator::from_query(&query, &req); let mut aggregate = SearchAggregator::from_query(&query, &req);
let index = index_scheduler.index(&index_uid)?; let index = index_scheduler.index(&index_uid)?;
let features = index_scheduler.features()?; let features = index_scheduler.features();
let search_result = let search_result =
tokio::task::spawn_blocking(move || perform_search(&index, query, features)).await?; tokio::task::spawn_blocking(move || perform_search(&index, query, features)).await?;
if let Ok(ref search_result) = search_result { if let Ok(ref search_result) = search_result {
@ -192,7 +192,7 @@ pub async fn search_with_post(
let index = index_scheduler.index(&index_uid)?; let index = index_scheduler.index(&index_uid)?;
let features = index_scheduler.features()?; let features = index_scheduler.features();
let search_result = let search_result =
tokio::task::spawn_blocking(move || perform_search(&index, query, features)).await?; tokio::task::spawn_blocking(move || perform_search(&index, query, features)).await?;
if let Ok(ref search_result) = search_result { if let Ok(ref search_result) = search_result {

View File

@ -19,7 +19,7 @@ pub async fn get_metrics(
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>, index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
auth_controller: Data<AuthController>, auth_controller: Data<AuthController>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
index_scheduler.features()?.check_metrics()?; index_scheduler.features().check_metrics()?;
let auth_filters = index_scheduler.filters(); let auth_filters = index_scheduler.filters();
if !auth_filters.all_indexes_authorized() { if !auth_filters.all_indexes_authorized() {
let mut error = ResponseError::from(AuthenticationError::InvalidToken); let mut error = ResponseError::from(AuthenticationError::InvalidToken);

View File

@ -41,7 +41,7 @@ pub async fn multi_search_with_post(
let queries = params.into_inner().queries; let queries = params.into_inner().queries;
let mut multi_aggregate = MultiSearchAggregator::from_queries(&queries, &req); let mut multi_aggregate = MultiSearchAggregator::from_queries(&queries, &req);
let features = index_scheduler.features()?; let features = index_scheduler.features();
// Explicitly expect a `(ResponseError, usize)` for the error type rather than `ResponseError` only, // Explicitly expect a `(ResponseError, usize)` for the error type rather than `ResponseError` only,
// so that `?` doesn't work if it doesn't use `with_index`, ensuring that it is not forgotten in case of code // so that `?` doesn't work if it doesn't use `with_index`, ensuring that it is not forgotten in case of code

View File

@ -2,10 +2,12 @@ use std::collections::{HashMap, HashSet};
use ::time::format_description::well_known::Rfc3339; use ::time::format_description::well_known::Rfc3339;
use maplit::{hashmap, hashset}; use maplit::{hashmap, hashset};
use meilisearch::Opt;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use tempfile::TempDir;
use time::{Duration, OffsetDateTime}; use time::{Duration, OffsetDateTime};
use crate::common::{Server, Value}; use crate::common::{default_settings, Server, Value};
use crate::json; use crate::json;
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> = pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
@ -195,7 +197,9 @@ async fn access_authorized_master_key() {
#[actix_rt::test] #[actix_rt::test]
async fn access_authorized_restricted_index() { async fn access_authorized_restricted_index() {
let mut server = Server::new_auth().await; let dir = TempDir::new().unwrap();
let enable_metrics = Opt { experimental_enable_metrics: true, ..default_settings(dir.path()) };
let mut server = Server::new_auth_with_options(enable_metrics, dir).await;
for ((method, route), actions) in AUTHORIZATIONS.iter() { for ((method, route), actions) in AUTHORIZATIONS.iter() {
for action in actions { for action in actions {
// create a new API key letting only the needed action. // create a new API key letting only the needed action.

View File

@ -202,6 +202,10 @@ impl Server {
pub async fn set_features(&self, value: Value) -> (Value, StatusCode) { pub async fn set_features(&self, value: Value) -> (Value, StatusCode) {
self.service.patch("/experimental-features", value).await self.service.patch("/experimental-features", value).await
} }
pub async fn get_metrics(&self) -> (Value, StatusCode) {
self.service.get("/metrics").await
}
} }
pub fn default_settings(dir: impl AsRef<Path>) -> Opt { pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
@ -221,7 +225,7 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
skip_index_budget: true, skip_index_budget: true,
..Parser::parse_from(None as Option<&str>) ..Parser::parse_from(None as Option<&str>)
}, },
experimental_enable_metrics: true, experimental_enable_metrics: false,
..Parser::parse_from(None as Option<&str>) ..Parser::parse_from(None as Option<&str>)
} }
} }

View File

@ -1,4 +1,7 @@
use crate::common::Server; use meilisearch::Opt;
use tempfile::TempDir;
use crate::common::{default_settings, Server};
use crate::json; use crate::json;
/// Feature name to test against. /// Feature name to test against.
@ -17,6 +20,7 @@ async fn experimental_features() {
{ {
"scoreDetails": false, "scoreDetails": false,
"vectorStore": false, "vectorStore": false,
"metrics": false,
"exportPuffinReports": false "exportPuffinReports": false
} }
"###); "###);
@ -28,6 +32,7 @@ async fn experimental_features() {
{ {
"scoreDetails": false, "scoreDetails": false,
"vectorStore": true, "vectorStore": true,
"metrics": false,
"exportPuffinReports": false "exportPuffinReports": false
} }
"###); "###);
@ -39,6 +44,7 @@ async fn experimental_features() {
{ {
"scoreDetails": false, "scoreDetails": false,
"vectorStore": true, "vectorStore": true,
"metrics": false,
"exportPuffinReports": false "exportPuffinReports": false
} }
"###); "###);
@ -51,6 +57,7 @@ async fn experimental_features() {
{ {
"scoreDetails": false, "scoreDetails": false,
"vectorStore": true, "vectorStore": true,
"metrics": false,
"exportPuffinReports": false "exportPuffinReports": false
} }
"###); "###);
@ -63,11 +70,72 @@ async fn experimental_features() {
{ {
"scoreDetails": false, "scoreDetails": false,
"vectorStore": true, "vectorStore": true,
"metrics": false,
"exportPuffinReports": false "exportPuffinReports": false
} }
"###); "###);
} }
#[actix_rt::test]
async fn experimental_feature_metrics() {
// instance flag for metrics enables metrics at startup
let dir = TempDir::new().unwrap();
let enable_metrics = Opt { experimental_enable_metrics: true, ..default_settings(dir.path()) };
let server = Server::new_with_options(enable_metrics).await.unwrap();
let (response, code) = server.get_features().await;
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{
"scoreDetails": false,
"vectorStore": false,
"metrics": true,
"exportPuffinReports": false
}
"###);
let (response, code) = server.get_metrics().await;
meili_snap::snapshot!(code, @"200 OK");
// metrics are not returned in json format
// so the test server will return null
meili_snap::snapshot!(response, @"null");
// disabling metrics results in invalid request
let (response, code) = server.set_features(json!({"metrics": false})).await;
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(response["metrics"], @"false");
let (response, code) = server.get_metrics().await;
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{
"message": "Getting metrics requires enabling the `metrics` experimental feature. See https://github.com/meilisearch/product/discussions/625",
"code": "feature_not_enabled",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
}
"###);
// enabling metrics via HTTP results in valid request
let (response, code) = server.set_features(json!({"metrics": true})).await;
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(response["metrics"], @"true");
let (response, code) = server.get_metrics().await;
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(response, @"null");
// startup without flag respects persisted metrics value
let disable_metrics =
Opt { experimental_enable_metrics: false, ..default_settings(dir.path()) };
let server_no_flag = Server::new_with_options(disable_metrics).await.unwrap();
let (response, code) = server_no_flag.get_metrics().await;
meili_snap::snapshot!(code, @"200 OK");
meili_snap::snapshot!(response, @"null");
}
#[actix_rt::test] #[actix_rt::test]
async fn errors() { async fn errors() {
let server = Server::new().await; let server = Server::new().await;
@ -78,7 +146,7 @@ async fn errors() {
meili_snap::snapshot!(code, @"400 Bad Request"); meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###" meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{ {
"message": "Unknown field `NotAFeature`: expected one of `scoreDetails`, `vectorStore`, `exportPuffinReports`", "message": "Unknown field `NotAFeature`: expected one of `scoreDetails`, `vectorStore`, `metrics`, `exportPuffinReports`",
"code": "bad_request", "code": "bad_request",
"type": "invalid_request", "type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request" "link": "https://docs.meilisearch.com/errors#bad_request"