mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-22 12:54:26 +01:00
Remove --disable-auto-batching
CLI option
This commit is contained in:
parent
336c77aa45
commit
b821c72459
@ -47,11 +47,6 @@ max_task_db_size = "100 GiB"
|
||||
# Sets the maximum number of threads Meilisearch can use during indexing.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#max-indexing-threads
|
||||
|
||||
disable_auto_batching = false
|
||||
# Deactivates auto-batching when provided.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#disable-auto-batching
|
||||
|
||||
|
||||
#############
|
||||
### DUMPS ###
|
||||
#############
|
||||
|
@ -25,7 +25,7 @@ use uuid::Uuid;
|
||||
|
||||
use super::{config_user_id_path, DocumentDeletionKind, MEILISEARCH_CONFIG_PATH};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::option::{default_http_addr, IndexerOpts, MaxMemory, MaxThreads, SchedulerConfig};
|
||||
use crate::option::{default_http_addr, IndexerOpts, MaxMemory, MaxThreads};
|
||||
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
||||
use crate::routes::tasks::TasksFilterQueryRaw;
|
||||
use crate::routes::{create_all_stats, Stats};
|
||||
@ -229,7 +229,6 @@ struct Infos {
|
||||
max_index_size: Byte,
|
||||
max_task_db_size: Byte,
|
||||
http_payload_size_limit: Byte,
|
||||
disable_auto_batching: bool,
|
||||
log_level: String,
|
||||
max_indexing_memory: MaxMemory,
|
||||
max_indexing_threads: MaxThreads,
|
||||
@ -275,13 +274,11 @@ impl From<Opt> for Infos {
|
||||
dump_dir,
|
||||
log_level,
|
||||
indexer_options,
|
||||
scheduler_options,
|
||||
config_file_path,
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
no_analytics: _,
|
||||
} = options;
|
||||
|
||||
let SchedulerConfig { disable_auto_batching } = scheduler_options;
|
||||
let IndexerOpts {
|
||||
log_every_n: _,
|
||||
max_nb_chunks: _,
|
||||
@ -308,7 +305,6 @@ impl From<Opt> for Infos {
|
||||
max_index_size,
|
||||
max_task_db_size,
|
||||
http_payload_size_limit,
|
||||
disable_auto_batching,
|
||||
log_level,
|
||||
max_indexing_memory,
|
||||
max_indexing_threads,
|
||||
|
@ -16,7 +16,6 @@ pub mod route_metrics;
|
||||
use std::fs::File;
|
||||
use std::io::{BufReader, BufWriter};
|
||||
use std::path::Path;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
@ -45,8 +44,6 @@ pub use option::Opt;
|
||||
|
||||
use crate::error::MeilisearchHttpError;
|
||||
|
||||
pub static AUTOBATCHING_ENABLED: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
/// Check if a db is empty. It does not provide any information on the
|
||||
/// validity of the data in it.
|
||||
/// We consider a database as non empty when it's a non empty directory.
|
||||
@ -209,7 +206,7 @@ fn open_or_create_database_unchecked(
|
||||
task_db_size: opt.max_task_db_size.get_bytes() as usize,
|
||||
index_size: opt.max_index_size.get_bytes() as usize,
|
||||
indexer_config: (&opt.indexer_options).try_into()?,
|
||||
autobatching_enabled: !opt.scheduler_options.disable_auto_batching,
|
||||
autobatching_enabled: true,
|
||||
})?)
|
||||
};
|
||||
|
||||
|
@ -66,7 +66,6 @@ const DEFAULT_LOG_LEVEL: &str = "INFO";
|
||||
|
||||
const MEILI_MAX_INDEXING_MEMORY: &str = "MEILI_MAX_INDEXING_MEMORY";
|
||||
const MEILI_MAX_INDEXING_THREADS: &str = "MEILI_MAX_INDEXING_THREADS";
|
||||
const DISABLE_AUTO_BATCHING: &str = "DISABLE_AUTO_BATCHING";
|
||||
const DEFAULT_LOG_EVERY_N: usize = 100000;
|
||||
|
||||
#[derive(Debug, Clone, Parser, Deserialize)]
|
||||
@ -240,10 +239,6 @@ pub struct Opt {
|
||||
#[clap(flatten)]
|
||||
pub indexer_options: IndexerOpts,
|
||||
|
||||
#[serde(flatten)]
|
||||
#[clap(flatten)]
|
||||
pub scheduler_options: SchedulerConfig,
|
||||
|
||||
/// Set the path to a configuration file that should be used to setup the engine.
|
||||
/// Format must be TOML.
|
||||
#[clap(long)]
|
||||
@ -323,7 +318,6 @@ impl Opt {
|
||||
dump_dir,
|
||||
log_level,
|
||||
indexer_options,
|
||||
scheduler_options,
|
||||
import_snapshot: _,
|
||||
ignore_missing_snapshot: _,
|
||||
ignore_snapshot_if_db_exists: _,
|
||||
@ -383,7 +377,6 @@ impl Opt {
|
||||
);
|
||||
}
|
||||
indexer_options.export_to_env();
|
||||
scheduler_options.export_to_env();
|
||||
}
|
||||
|
||||
pub fn get_ssl_config(&self) -> anyhow::Result<Option<rustls::ServerConfig>> {
|
||||
@ -481,22 +474,6 @@ impl IndexerOpts {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Parser, Default, Deserialize)]
|
||||
#[serde(rename_all = "snake_case", deny_unknown_fields)]
|
||||
pub struct SchedulerConfig {
|
||||
/// Deactivates auto-batching when provided.
|
||||
#[clap(long, env = DISABLE_AUTO_BATCHING)]
|
||||
#[serde(default)]
|
||||
pub disable_auto_batching: bool,
|
||||
}
|
||||
|
||||
impl SchedulerConfig {
|
||||
pub fn export_to_env(self) {
|
||||
let SchedulerConfig { disable_auto_batching } = self;
|
||||
export_to_env_if_not_present(DISABLE_AUTO_BATCHING, disable_auto_batching.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&IndexerOpts> for IndexerConfig {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user