mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-02-06 02:23:27 +01:00
Merge #5314
5314: Activate used database size r=irevoire a=ManyTheFish # Pull Request make the `/stats` route return the `usedDatabaseSize` corresponding to the size used to store the "real" data in the database and not the disk size used by LMDB Co-authored-by: ManyTheFish <many@meilisearch.com>
This commit is contained in:
commit
00e764b0d3
@ -359,9 +359,9 @@ pub async fn running() -> HttpResponse {
|
||||
#[derive(Serialize, Debug, ToSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Stats {
|
||||
/// The size of the database, in bytes.
|
||||
/// The disk space used by the database, in bytes.
|
||||
pub database_size: u64,
|
||||
#[serde(skip)]
|
||||
/// The size of the database, in bytes.
|
||||
pub used_database_size: u64,
|
||||
/// The date of the last update in the RFC 3339 formats. Can be `null` if no update has ever been processed.
|
||||
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
||||
@ -383,6 +383,7 @@ pub struct Stats {
|
||||
(status = 200, description = "The stats of the instance", body = Stats, content_type = "application/json", example = json!(
|
||||
{
|
||||
"databaseSize": 567,
|
||||
"usedDatabaseSize": 456,
|
||||
"lastUpdate": "2019-11-20T09:40:33.711324Z",
|
||||
"indexes": {
|
||||
"movies": {
|
||||
|
@ -126,9 +126,10 @@ async fn check_the_index_scheduler(server: &Server) {
|
||||
"#);
|
||||
// And their metadata are still right
|
||||
let (stats, _) = server.stats().await;
|
||||
snapshot!(stats, @r#"
|
||||
snapshot!(stats, @r###"
|
||||
{
|
||||
"databaseSize": 438272,
|
||||
"usedDatabaseSize": 196608,
|
||||
"lastUpdate": "2025-01-23T11:36:22.634859166Z",
|
||||
"indexes": {
|
||||
"kefir": {
|
||||
@ -144,7 +145,7 @@ async fn check_the_index_scheduler(server: &Server) {
|
||||
}
|
||||
}
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
|
||||
// Wait until the upgrade has been applied to all indexes to avoid flakyness
|
||||
let (tasks, _) = server.tasks_filter("types=upgradeDatabase&limit=1").await;
|
||||
@ -205,9 +206,10 @@ async fn check_the_index_scheduler(server: &Server) {
|
||||
snapshot!(json_string!(batches, { ".results[0].duration" => "[duration]", ".results[0].enqueuedAt" => "[date]", ".results[0].startedAt" => "[date]", ".results[0].finishedAt" => "[date]" }), name: "batches_filter_afterFinishedAt_equal_2025-01-16T16_47_41");
|
||||
|
||||
let (stats, _) = server.stats().await;
|
||||
snapshot!(stats, @r#"
|
||||
snapshot!(stats, @r###"
|
||||
{
|
||||
"databaseSize": 438272,
|
||||
"usedDatabaseSize": 196608,
|
||||
"lastUpdate": "2025-01-23T11:36:22.634859166Z",
|
||||
"indexes": {
|
||||
"kefir": {
|
||||
@ -223,7 +225,7 @@ async fn check_the_index_scheduler(server: &Server) {
|
||||
}
|
||||
}
|
||||
}
|
||||
"#);
|
||||
"###);
|
||||
let index = server.index("kefir");
|
||||
let (stats, _) = index.stats().await;
|
||||
snapshot!(stats, @r#"
|
||||
|
Loading…
x
Reference in New Issue
Block a user