Make the batched tasks size limit effectively work

This commit is contained in:
Clément Renault 2025-01-09 11:59:35 +01:00
parent 8650ee66c1
commit d0bdff7b7b
No known key found for this signature in database
GPG key ID: F250A4C4E3AE5F5F
7 changed files with 34 additions and 15 deletions

View file

@ -194,7 +194,7 @@ struct Infos {
experimental_enable_logs_route: bool,
experimental_reduce_indexing_memory_usage: bool,
experimental_max_number_of_batched_tasks: usize,
experimental_limit_batched_tasks_total_size: usize,
experimental_limit_batched_tasks_total_size: u64,
gpu_enabled: bool,
db_path: bool,
import_dump: bool,

View file

@ -312,6 +312,7 @@ fn open_or_create_database_unchecked(
cleanup_enabled: !opt.experimental_replication_parameters,
max_number_of_tasks: 1_000_000,
max_number_of_batched_tasks: opt.experimental_max_number_of_batched_tasks,
batched_tasks_size_limit: opt.experimental_limit_batched_tasks_total_size,
index_growth_amount: byte_unit::Byte::from_str("10GiB").unwrap().as_u64() as usize,
index_count: DEFAULT_INDEX_COUNT,
instance_features,

View file

@ -436,7 +436,7 @@ pub struct Opt {
/// see: <https://github.com/orgs/meilisearch/discussions/801>
#[clap(long, env = MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE, default_value_t = default_limit_batched_tasks_total_size())]
#[serde(default = "default_limit_batched_tasks_total_size")]
pub experimental_limit_batched_tasks_total_size: usize,
pub experimental_limit_batched_tasks_total_size: u64,
#[serde(flatten)]
#[clap(flatten)]
@ -931,8 +931,8 @@ fn default_limit_batched_tasks() -> usize {
usize::MAX
}
fn default_limit_batched_tasks_total_size() -> usize {
usize::MAX
fn default_limit_batched_tasks_total_size() -> u64 {
u64::MAX
}
fn default_snapshot_dir() -> PathBuf {