Fully remove vector store feature

This commit is contained in:
Louis Dureuil 2025-01-14 13:48:08 +01:00
parent 6d62fa061b
commit 87ea080c10
No known key found for this signature in database
15 changed files with 24 additions and 110 deletions

View File

@ -458,7 +458,7 @@ pub(crate) mod test {
} }
fn create_test_features() -> RuntimeTogglableFeatures { fn create_test_features() -> RuntimeTogglableFeatures {
RuntimeTogglableFeatures { vector_store: true, ..Default::default() } RuntimeTogglableFeatures::default()
} }
#[test] #[test]

View File

@ -327,10 +327,7 @@ pub(crate) mod test {
} }
} }
assert_eq!( assert_eq!(dump.features().unwrap().unwrap(), RuntimeTogglableFeatures::default());
dump.features().unwrap().unwrap(),
RuntimeTogglableFeatures { vector_store: true, ..Default::default() }
);
} }
#[test] #[test]
@ -373,10 +370,7 @@ pub(crate) mod test {
assert_eq!(test.documents().unwrap().count(), 1); assert_eq!(test.documents().unwrap().count(), 1);
assert_eq!( assert_eq!(dump.features().unwrap().unwrap(), RuntimeTogglableFeatures::default());
dump.features().unwrap().unwrap(),
RuntimeTogglableFeatures { vector_store: true, ..Default::default() }
);
} }
#[test] #[test]

View File

@ -56,19 +56,6 @@ impl RoFeatures {
} }
} }
pub fn check_vector(&self, disabled_action: &'static str) -> Result<()> {
if self.runtime.vector_store {
Ok(())
} else {
Err(FeatureNotEnabledError {
disabled_action,
feature: "vector store",
issue_link: "https://github.com/meilisearch/product/discussions/677",
}
.into())
}
}
pub fn check_edit_documents_by_function(&self, disabled_action: &'static str) -> Result<()> { pub fn check_edit_documents_by_function(&self, disabled_action: &'static str) -> Result<()> {
if self.runtime.edit_documents_by_function { if self.runtime.edit_documents_by_function {
Ok(()) Ok(())
@ -105,17 +92,11 @@ impl FeatureData {
let txn = env.read_txn()?; let txn = env.read_txn()?;
let persisted_features: RuntimeTogglableFeatures = let persisted_features: RuntimeTogglableFeatures =
runtime_features_db.get(&txn, EXPERIMENTAL_FEATURES)?.unwrap_or_default(); runtime_features_db.get(&txn, EXPERIMENTAL_FEATURES)?.unwrap_or_default();
let InstanceTogglableFeatures { let InstanceTogglableFeatures { metrics, logs_route, contains_filter } = instance_features;
metrics,
logs_route,
contains_filter,
disable_vector_store,
} = instance_features;
let runtime = Arc::new(RwLock::new(RuntimeTogglableFeatures { let runtime = Arc::new(RwLock::new(RuntimeTogglableFeatures {
metrics: metrics || persisted_features.metrics, metrics: metrics || persisted_features.metrics,
logs_route: logs_route || persisted_features.logs_route, logs_route: logs_route || persisted_features.logs_route,
contains_filter: contains_filter || persisted_features.contains_filter, contains_filter: contains_filter || persisted_features.contains_filter,
vector_store: !disable_vector_store,
..persisted_features ..persisted_features
})); }));

View File

@ -3,7 +3,6 @@ use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Default, PartialEq, Eq)] #[derive(Serialize, Deserialize, Debug, Clone, Copy, Default, PartialEq, Eq)]
#[serde(rename_all = "camelCase", default)] #[serde(rename_all = "camelCase", default)]
pub struct RuntimeTogglableFeatures { pub struct RuntimeTogglableFeatures {
pub vector_store: bool,
pub metrics: bool, pub metrics: bool,
pub logs_route: bool, pub logs_route: bool,
pub edit_documents_by_function: bool, pub edit_documents_by_function: bool,
@ -15,5 +14,4 @@ pub struct InstanceTogglableFeatures {
pub metrics: bool, pub metrics: bool,
pub logs_route: bool, pub logs_route: bool,
pub contains_filter: bool, pub contains_filter: bool,
pub disable_vector_store: bool,
} }

View File

@ -177,13 +177,12 @@ impl SegmentAnalytics {
/// This structure represent the `infos` field we send in the analytics. /// This structure represent the `infos` field we send in the analytics.
/// It's quite close to the `Opt` structure except all sensitive informations /// It's quite close to the `Opt` structure except all sensitive informations
/// have been simplified to a boolean. /// have been simplified to a boolean.
/// It's send as-is in amplitude thus you should never update a name of the /// It's sent as-is in amplitude thus you should never update a name of the
/// struct without the approval of the PM. /// struct without the approval of the PM.
#[derive(Debug, Clone, Serialize)] #[derive(Debug, Clone, Serialize)]
struct Infos { struct Infos {
env: String, env: String,
experimental_contains_filter: bool, experimental_contains_filter: bool,
experimental_vector_store: bool,
experimental_enable_metrics: bool, experimental_enable_metrics: bool,
experimental_edit_documents_by_function: bool, experimental_edit_documents_by_function: bool,
experimental_search_queue_size: usize, experimental_search_queue_size: usize,
@ -269,7 +268,6 @@ impl Infos {
indexer_options, indexer_options,
config_file_path, config_file_path,
no_analytics: _, no_analytics: _,
experimental_disable_vector_store,
} = options; } = options;
let schedule_snapshot = match schedule_snapshot { let schedule_snapshot = match schedule_snapshot {
@ -281,7 +279,6 @@ impl Infos {
indexer_options; indexer_options;
let RuntimeTogglableFeatures { let RuntimeTogglableFeatures {
vector_store: _,
metrics, metrics,
logs_route, logs_route,
edit_documents_by_function, edit_documents_by_function,
@ -293,7 +290,6 @@ impl Infos {
Self { Self {
env, env,
experimental_contains_filter: experimental_contains_filter | contains_filter, experimental_contains_filter: experimental_contains_filter | contains_filter,
experimental_vector_store: !experimental_disable_vector_store,
experimental_edit_documents_by_function: edit_documents_by_function, experimental_edit_documents_by_function: edit_documents_by_function,
experimental_enable_metrics: experimental_enable_metrics | metrics, experimental_enable_metrics: experimental_enable_metrics | metrics,
experimental_search_queue_size, experimental_search_queue_size,

View File

@ -52,7 +52,6 @@ const MEILI_EXPERIMENTAL_LOGS_MODE: &str = "MEILI_EXPERIMENTAL_LOGS_MODE";
const MEILI_EXPERIMENTAL_REPLICATION_PARAMETERS: &str = "MEILI_EXPERIMENTAL_REPLICATION_PARAMETERS"; const MEILI_EXPERIMENTAL_REPLICATION_PARAMETERS: &str = "MEILI_EXPERIMENTAL_REPLICATION_PARAMETERS";
const MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE: &str = "MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE"; const MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE: &str = "MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE";
const MEILI_EXPERIMENTAL_CONTAINS_FILTER: &str = "MEILI_EXPERIMENTAL_CONTAINS_FILTER"; const MEILI_EXPERIMENTAL_CONTAINS_FILTER: &str = "MEILI_EXPERIMENTAL_CONTAINS_FILTER";
const MEILI_EXPERIMENTAL_DISABLE_VECTOR_STORE: &str = "MEILI_EXPERIMENTAL_DISABLE_VECTOR_STORE";
const MEILI_EXPERIMENTAL_ENABLE_METRICS: &str = "MEILI_EXPERIMENTAL_ENABLE_METRICS"; const MEILI_EXPERIMENTAL_ENABLE_METRICS: &str = "MEILI_EXPERIMENTAL_ENABLE_METRICS";
const MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE: &str = "MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE"; const MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE: &str = "MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE";
const MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER: &str = "MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER"; const MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER: &str = "MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER";
@ -361,14 +360,6 @@ pub struct Opt {
#[serde(default)] #[serde(default)]
pub experimental_enable_metrics: bool, pub experimental_enable_metrics: bool,
/// Experimental disabling of the vector store feature. For more information,
/// see: <https://www.notion.so/meilisearch/v1-13-AI-search-changes-17a4b06b651f80538b65d31724545def#17a4b06b651f80319796d3e7dbaa57c5>
///
/// If set, disables embedder configuration, hybrid search, semantic search and similar requests.
#[clap(long, env = MEILI_EXPERIMENTAL_DISABLE_VECTOR_STORE)]
#[serde(default)]
pub experimental_disable_vector_store: bool,
/// Experimental search queue size. For more information, /// Experimental search queue size. For more information,
/// see: <https://github.com/orgs/meilisearch/discussions/729> /// see: <https://github.com/orgs/meilisearch/discussions/729>
/// ///
@ -549,7 +540,6 @@ impl Opt {
experimental_reduce_indexing_memory_usage, experimental_reduce_indexing_memory_usage,
experimental_max_number_of_batched_tasks, experimental_max_number_of_batched_tasks,
experimental_limit_batched_tasks_total_size, experimental_limit_batched_tasks_total_size,
experimental_disable_vector_store,
} = self; } = self;
export_to_env_if_not_present(MEILI_DB_PATH, db_path); export_to_env_if_not_present(MEILI_DB_PATH, db_path);
export_to_env_if_not_present(MEILI_HTTP_ADDR, http_addr); export_to_env_if_not_present(MEILI_HTTP_ADDR, http_addr);
@ -598,10 +588,6 @@ impl Opt {
MEILI_EXPERIMENTAL_CONTAINS_FILTER, MEILI_EXPERIMENTAL_CONTAINS_FILTER,
experimental_contains_filter.to_string(), experimental_contains_filter.to_string(),
); );
export_to_env_if_not_present(
MEILI_EXPERIMENTAL_DISABLE_VECTOR_STORE,
experimental_disable_vector_store.to_string(),
);
export_to_env_if_not_present( export_to_env_if_not_present(
MEILI_EXPERIMENTAL_ENABLE_METRICS, MEILI_EXPERIMENTAL_ENABLE_METRICS,
experimental_enable_metrics.to_string(), experimental_enable_metrics.to_string(),
@ -694,7 +680,6 @@ impl Opt {
metrics: self.experimental_enable_metrics, metrics: self.experimental_enable_metrics,
logs_route: self.experimental_enable_logs_route, logs_route: self.experimental_enable_logs_route,
contains_filter: self.experimental_contains_filter, contains_filter: self.experimental_contains_filter,
disable_vector_store: self.experimental_disable_vector_store,
} }
} }
} }

View File

@ -94,7 +94,6 @@ pub struct RuntimeTogglableFeatures {
impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogglableFeatures { impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogglableFeatures {
fn from(value: meilisearch_types::features::RuntimeTogglableFeatures) -> Self { fn from(value: meilisearch_types::features::RuntimeTogglableFeatures) -> Self {
let meilisearch_types::features::RuntimeTogglableFeatures { let meilisearch_types::features::RuntimeTogglableFeatures {
vector_store: _,
metrics, metrics,
logs_route, logs_route,
edit_documents_by_function, edit_documents_by_function,
@ -112,7 +111,6 @@ impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogg
#[derive(Serialize)] #[derive(Serialize)]
pub struct PatchExperimentalFeatureAnalytics { pub struct PatchExperimentalFeatureAnalytics {
vector_store: bool,
metrics: bool, metrics: bool,
logs_route: bool, logs_route: bool,
edit_documents_by_function: bool, edit_documents_by_function: bool,
@ -126,7 +124,6 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> { fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
Box::new(Self { Box::new(Self {
vector_store: new.vector_store,
metrics: new.metrics, metrics: new.metrics,
logs_route: new.logs_route, logs_route: new.logs_route,
edit_documents_by_function: new.edit_documents_by_function, edit_documents_by_function: new.edit_documents_by_function,
@ -179,7 +176,6 @@ async fn patch_features(
let old_features = features.runtime_features(); let old_features = features.runtime_features();
let new_features = meilisearch_types::features::RuntimeTogglableFeatures { let new_features = meilisearch_types::features::RuntimeTogglableFeatures {
vector_store: true,
metrics: new_features.0.metrics.unwrap_or(old_features.metrics), metrics: new_features.0.metrics.unwrap_or(old_features.metrics),
logs_route: new_features.0.logs_route.unwrap_or(old_features.logs_route), logs_route: new_features.0.logs_route.unwrap_or(old_features.logs_route),
edit_documents_by_function: new_features edit_documents_by_function: new_features
@ -193,7 +189,6 @@ async fn patch_features(
// the it renames to camelCase, which we don't want for analytics. // the it renames to camelCase, which we don't want for analytics.
// **Do not** ignore fields with `..` or `_` here, because we want to add them in the future. // **Do not** ignore fields with `..` or `_` here, because we want to add them in the future.
let meilisearch_types::features::RuntimeTogglableFeatures { let meilisearch_types::features::RuntimeTogglableFeatures {
vector_store,
metrics, metrics,
logs_route, logs_route,
edit_documents_by_function, edit_documents_by_function,
@ -202,7 +197,6 @@ async fn patch_features(
analytics.publish( analytics.publish(
PatchExperimentalFeatureAnalytics { PatchExperimentalFeatureAnalytics {
vector_store,
metrics, metrics,
logs_route, logs_route,
edit_documents_by_function, edit_documents_by_function,

View File

@ -257,8 +257,7 @@ pub async fn get_document(
let GetDocument { fields, retrieve_vectors: param_retrieve_vectors } = params.into_inner(); let GetDocument { fields, retrieve_vectors: param_retrieve_vectors } = params.into_inner();
let attributes_to_retrieve = fields.merge_star_and_none(); let attributes_to_retrieve = fields.merge_star_and_none();
let features = index_scheduler.features(); let retrieve_vectors = RetrieveVectors::new(param_retrieve_vectors.0);
let retrieve_vectors = RetrieveVectors::new(param_retrieve_vectors.0, features)?;
analytics.publish( analytics.publish(
DocumentsFetchAggregator::<DocumentsGET> { DocumentsFetchAggregator::<DocumentsGET> {
@ -593,8 +592,7 @@ fn documents_by_query(
let index_uid = IndexUid::try_from(index_uid.into_inner())?; let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let BrowseQuery { offset, limit, fields, retrieve_vectors, filter } = query; let BrowseQuery { offset, limit, fields, retrieve_vectors, filter } = query;
let features = index_scheduler.features(); let retrieve_vectors = RetrieveVectors::new(retrieve_vectors);
let retrieve_vectors = RetrieveVectors::new(retrieve_vectors, features)?;
let index = index_scheduler.index(&index_uid)?; let index = index_scheduler.index(&index_uid)?;
let (total, documents) = retrieve_documents( let (total, documents) = retrieve_documents(
@ -1420,7 +1418,6 @@ fn some_documents<'a, 't: 'a>(
ret.map_err(ResponseError::from).and_then(|(key, document)| -> Result<_, ResponseError> { ret.map_err(ResponseError::from).and_then(|(key, document)| -> Result<_, ResponseError> {
let mut document = milli::obkv_to_json(&all_fields, &fields_ids_map, document)?; let mut document = milli::obkv_to_json(&all_fields, &fields_ids_map, document)?;
match retrieve_vectors { match retrieve_vectors {
RetrieveVectors::Ignore => {}
RetrieveVectors::Hide => { RetrieveVectors::Hide => {
document.remove("_vectors"); document.remove("_vectors");
} }

View File

@ -252,9 +252,7 @@ pub async fn search(
} }
let index = index_scheduler.index(&index_uid)?; let index = index_scheduler.index(&index_uid)?;
let features = index_scheduler.features(); let search_kind = search_kind(&search_query, &index_scheduler, index_uid.to_string(), &index)?;
let search_kind =
search_kind(&search_query, &index_scheduler, index_uid.to_string(), &index, features)?;
let permit = search_queue.try_get_search_permit().await?; let permit = search_queue.try_get_search_permit().await?;
let search_result = tokio::task::spawn_blocking(move || { let search_result = tokio::task::spawn_blocking(move || {
perform_facet_search( perform_facet_search(

View File

@ -1,7 +1,7 @@
use actix_web::web::Data; use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse}; use actix_web::{web, HttpRequest, HttpResponse};
use deserr::actix_web::{AwebJson, AwebQueryParameter}; use deserr::actix_web::{AwebJson, AwebQueryParameter};
use index_scheduler::{IndexScheduler, RoFeatures}; use index_scheduler::IndexScheduler;
use meilisearch_types::deserr::query_params::Param; use meilisearch_types::deserr::query_params::Param;
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError}; use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
use meilisearch_types::error::deserr_codes::*; use meilisearch_types::error::deserr_codes::*;
@ -336,11 +336,10 @@ pub async fn search_with_url_query(
let mut aggregate = SearchAggregator::<SearchGET>::from_query(&query); let mut aggregate = SearchAggregator::<SearchGET>::from_query(&query);
let index = index_scheduler.index(&index_uid)?; let index = index_scheduler.index(&index_uid)?;
let features = index_scheduler.features();
let search_kind = let search_kind =
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index, features)?; search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index)?;
let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors, features)?; let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors);
let permit = search_queue.try_get_search_permit().await?; let permit = search_queue.try_get_search_permit().await?;
let search_result = tokio::task::spawn_blocking(move || { let search_result = tokio::task::spawn_blocking(move || {
perform_search( perform_search(
@ -444,11 +443,9 @@ pub async fn search_with_post(
let index = index_scheduler.index(&index_uid)?; let index = index_scheduler.index(&index_uid)?;
let features = index_scheduler.features();
let search_kind = let search_kind =
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index, features)?; search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index)?;
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors, features)?; let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors);
let permit = search_queue.try_get_search_permit().await?; let permit = search_queue.try_get_search_permit().await?;
let search_result = tokio::task::spawn_blocking(move || { let search_result = tokio::task::spawn_blocking(move || {
@ -483,15 +480,7 @@ pub fn search_kind(
index_scheduler: &IndexScheduler, index_scheduler: &IndexScheduler,
index_uid: String, index_uid: String,
index: &milli::Index, index: &milli::Index,
features: RoFeatures,
) -> Result<SearchKind, ResponseError> { ) -> Result<SearchKind, ResponseError> {
if query.vector.is_some() {
features.check_vector("Passing `vector` as a parameter")?;
}
if query.hybrid.is_some() {
features.check_vector("Passing `hybrid` as a parameter")?;
}
// handle with care, the order of cases matters, the semantics is subtle // handle with care, the order of cases matters, the semantics is subtle
match (query.q.as_deref(), &query.hybrid, query.vector.as_deref()) { match (query.q.as_deref(), &query.hybrid, query.vector.as_deref()) {
// empty query, no vector => placeholder search // empty query, no vector => placeholder search

View File

@ -5,7 +5,6 @@ use index_scheduler::IndexScheduler;
use meilisearch_types::deserr::DeserrJsonError; use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::ResponseError; use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid; use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::milli::update::Setting;
use meilisearch_types::settings::{ use meilisearch_types::settings::{
settings, SecretPolicy, SettingEmbeddingSettings, Settings, Unchecked, settings, SecretPolicy, SettingEmbeddingSettings, Settings, Unchecked,
}; };
@ -711,10 +710,7 @@ pub async fn delete_all(
fn validate_settings( fn validate_settings(
settings: Settings<Unchecked>, settings: Settings<Unchecked>,
index_scheduler: &IndexScheduler, _index_scheduler: &IndexScheduler,
) -> Result<Settings<Unchecked>, ResponseError> { ) -> Result<Settings<Unchecked>, ResponseError> {
if matches!(settings.embedders, Setting::Set(_)) {
index_scheduler.features().check_vector("Passing `embedders` in settings")?
}
Ok(settings.validate()?) Ok(settings.validate()?)
} }

View File

@ -216,11 +216,7 @@ async fn similar(
index_uid: IndexUid, index_uid: IndexUid,
mut query: SimilarQuery, mut query: SimilarQuery,
) -> Result<SimilarResult, ResponseError> { ) -> Result<SimilarResult, ResponseError> {
let features = index_scheduler.features(); let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors);
features.check_vector("Using the similar API")?;
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors, features)?;
// Tenant token search_rules. // Tenant token search_rules.
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) { if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {

View File

@ -240,11 +240,9 @@ pub async fn multi_search_with_post(
index_scheduler.get_ref(), index_scheduler.get_ref(),
index_uid_str.clone(), index_uid_str.clone(),
&index, &index,
features,
) )
.with_index(query_index)?; .with_index(query_index)?;
let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors, features) let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors);
.with_index(query_index)?;
let search_result = tokio::task::spawn_blocking(move || { let search_result = tokio::task::spawn_blocking(move || {
perform_search( perform_search(

View File

@ -569,7 +569,7 @@ pub fn perform_federated_search(
let res: Result<(), ResponseError> = (|| { let res: Result<(), ResponseError> = (|| {
let search_kind = let search_kind =
search_kind(&query, index_scheduler, index_uid.to_string(), &index, features)?; search_kind(&query, index_scheduler, index_uid.to_string(), &index)?;
let canonicalization_kind = match (&search_kind, &query.q) { let canonicalization_kind = match (&search_kind, &query.q) {
(SearchKind::SemanticOnly { .. }, _) => { (SearchKind::SemanticOnly { .. }, _) => {
@ -631,7 +631,7 @@ pub fn perform_federated_search(
_ => semantic_hit_count = Some(0), _ => semantic_hit_count = Some(0),
} }
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors, features)?; let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors);
let time_budget = match cutoff { let time_budget = match cutoff {
Some(cutoff) => TimeBudget::new(Duration::from_millis(cutoff)), Some(cutoff) => TimeBudget::new(Duration::from_millis(cutoff)),

View File

@ -1162,10 +1162,6 @@ struct AttributesFormat {
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum RetrieveVectors { pub enum RetrieveVectors {
/// Do not touch the `_vectors` field
///
/// this is the behavior when the vectorStore feature is disabled
Ignore,
/// Remove the `_vectors` field /// Remove the `_vectors` field
/// ///
/// this is the behavior when the vectorStore feature is enabled, and `retrieveVectors` is `false` /// this is the behavior when the vectorStore feature is enabled, and `retrieveVectors` is `false`
@ -1177,15 +1173,11 @@ pub enum RetrieveVectors {
} }
impl RetrieveVectors { impl RetrieveVectors {
pub fn new( pub fn new(retrieve_vector: bool) -> Self {
retrieve_vector: bool, if retrieve_vector {
features: index_scheduler::RoFeatures, Self::Retrieve
) -> Result<Self, index_scheduler::Error> { } else {
match (retrieve_vector, features.check_vector("Passing `retrieveVectors` as a parameter")) { Self::Hide
(true, Ok(())) => Ok(Self::Retrieve),
(true, Err(error)) => Err(error),
(false, Ok(())) => Ok(Self::Hide),
(false, Err(_)) => Ok(Self::Ignore),
} }
} }
} }