diff --git a/meilisearch-core/src/store/mod.rs b/meilisearch-core/src/store/mod.rs index 6bc12231e..488e6d6a4 100644 --- a/meilisearch-core/src/store/mod.rs +++ b/meilisearch-core/src/store/mod.rs @@ -242,7 +242,7 @@ impl Index { pub fn schema_update(&self, writer: &mut heed::RwTxn, schema: Schema) -> MResult { let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); - update::push_schema_update(writer, self.updates, self.updates_results, schema) + update::push_schema_update(writer, self, schema) } pub fn customs_update(&self, writer: &mut heed::RwTxn, customs: Vec) -> ZResult { diff --git a/meilisearch-core/src/update/clear_all.rs b/meilisearch-core/src/update/clear_all.rs index d142715ed..0c52f5190 100644 --- a/meilisearch-core/src/update/clear_all.rs +++ b/meilisearch-core/src/update/clear_all.rs @@ -4,23 +4,17 @@ use crate::{store, MResult, RankedMap}; pub fn apply_clear_all( writer: &mut heed::RwTxn, - main_store: store::Main, - documents_fields_store: store::DocumentsFields, - documents_fields_counts_store: store::DocumentsFieldsCounts, - postings_lists_store: store::PostingsLists, - docs_words_store: store::DocsWords, - prefix_documents_cache: store::PrefixDocumentsCache, - prefix_postings_lists_cache: store::PrefixPostingsListsCache, + index: &store::Index, ) -> MResult<()> { - main_store.put_words_fst(writer, &fst::Set::default())?; - main_store.put_ranked_map(writer, &RankedMap::default())?; - main_store.put_number_of_documents(writer, |_| 0)?; - documents_fields_store.clear(writer)?; - documents_fields_counts_store.clear(writer)?; - postings_lists_store.clear(writer)?; - docs_words_store.clear(writer)?; - prefix_documents_cache.clear(writer)?; - prefix_postings_lists_cache.clear(writer)?; + index.main.put_words_fst(writer, &fst::Set::default())?; + index.main.put_ranked_map(writer, &RankedMap::default())?; + index.main.put_number_of_documents(writer, |_| 0)?; + index.documents_fields.clear(writer)?; + index.documents_fields_counts.clear(writer)?; + index.postings_lists.clear(writer)?; + index.docs_words.clear(writer)?; + index.prefix_documents_cache.clear(writer)?; + index.prefix_postings_lists_cache.clear(writer)?; Ok(()) } diff --git a/meilisearch-core/src/update/documents_addition.rs b/meilisearch-core/src/update/documents_addition.rs index 5c60af2a3..ec45b40ad 100644 --- a/meilisearch-core/src/update/documents_addition.rs +++ b/meilisearch-core/src/update/documents_addition.rs @@ -104,18 +104,12 @@ pub fn push_documents_addition( pub fn apply_documents_addition<'a, 'b>( writer: &'a mut heed::RwTxn<'b, MainT>, - main_store: store::Main, - documents_fields_store: store::DocumentsFields, - documents_fields_counts_store: store::DocumentsFieldsCounts, - postings_lists_store: store::PostingsLists, - docs_words_store: store::DocsWords, - prefix_documents_cache_store: store::PrefixDocumentsCache, - prefix_postings_lists_cache_store: store::PrefixPostingsListsCache, + index: &store::Index, addition: Vec>, ) -> MResult<()> { let mut documents_additions = HashMap::new(); - let schema = match main_store.schema(writer)? { + let schema = match index.main.schema(writer)? { Some(schema) => schema, None => return Err(Error::SchemaMissing), }; @@ -135,23 +129,14 @@ pub fn apply_documents_addition<'a, 'b>( // 2. remove the documents posting lists let number_of_inserted_documents = documents_additions.len(); let documents_ids = documents_additions.iter().map(|(id, _)| *id).collect(); - apply_documents_deletion( - writer, - main_store, - documents_fields_store, - documents_fields_counts_store, - postings_lists_store, - docs_words_store, - prefix_postings_lists_cache_store, - documents_ids, - )?; + apply_documents_deletion(writer, index, documents_ids)?; - let mut ranked_map = match main_store.ranked_map(writer)? { + let mut ranked_map = match index.main.ranked_map(writer)? { Some(ranked_map) => ranked_map, None => RankedMap::default(), }; - let stop_words = match main_store.stop_words_fst(writer)? { + let stop_words = match index.main.stop_words_fst(writer)? { Some(stop_words) => stop_words, None => fst::Set::default(), }; @@ -163,8 +148,8 @@ pub fn apply_documents_addition<'a, 'b>( let serializer = Serializer { txn: writer, schema: &schema, - document_store: documents_fields_store, - document_fields_counts: documents_fields_counts_store, + document_store: index.documents_fields, + document_fields_counts: index.documents_fields_counts, indexer: &mut indexer, ranked_map: &mut ranked_map, document_id, @@ -175,40 +160,25 @@ pub fn apply_documents_addition<'a, 'b>( write_documents_addition_index( writer, - main_store, - postings_lists_store, - docs_words_store, - prefix_documents_cache_store, - prefix_postings_lists_cache_store, + index, &ranked_map, number_of_inserted_documents, indexer, )?; - compute_short_prefixes( - writer, - main_store, - postings_lists_store, - prefix_postings_lists_cache_store, - )?; + compute_short_prefixes(writer, index)?; Ok(()) } pub fn apply_documents_partial_addition<'a, 'b>( writer: &'a mut heed::RwTxn<'b, MainT>, - main_store: store::Main, - documents_fields_store: store::DocumentsFields, - documents_fields_counts_store: store::DocumentsFieldsCounts, - postings_lists_store: store::PostingsLists, - docs_words_store: store::DocsWords, - prefix_documents_cache_store: store::PrefixDocumentsCache, - prefix_postings_lists_cache_store: store::PrefixPostingsListsCache, + index: &store::Index, addition: Vec>, ) -> MResult<()> { let mut documents_additions = HashMap::new(); - let schema = match main_store.schema(writer)? { + let schema = match index.main.schema(writer)? { Some(schema) => schema, None => return Err(Error::SchemaMissing), }; @@ -225,7 +195,7 @@ pub fn apply_documents_partial_addition<'a, 'b>( let mut deserializer = Deserializer { document_id, reader: writer, - documents_fields: documents_fields_store, + documents_fields: index.documents_fields, schema: &schema, attributes: None, }; @@ -245,23 +215,14 @@ pub fn apply_documents_partial_addition<'a, 'b>( // 2. remove the documents posting lists let number_of_inserted_documents = documents_additions.len(); let documents_ids = documents_additions.iter().map(|(id, _)| *id).collect(); - apply_documents_deletion( - writer, - main_store, - documents_fields_store, - documents_fields_counts_store, - postings_lists_store, - docs_words_store, - prefix_postings_lists_cache_store, - documents_ids, - )?; + apply_documents_deletion(writer, index, documents_ids)?; - let mut ranked_map = match main_store.ranked_map(writer)? { + let mut ranked_map = match index.main.ranked_map(writer)? { Some(ranked_map) => ranked_map, None => RankedMap::default(), }; - let stop_words = match main_store.stop_words_fst(writer)? { + let stop_words = match index.main.stop_words_fst(writer)? { Some(stop_words) => stop_words, None => fst::Set::default(), }; @@ -273,8 +234,8 @@ pub fn apply_documents_partial_addition<'a, 'b>( let serializer = Serializer { txn: writer, schema: &schema, - document_store: documents_fields_store, - document_fields_counts: documents_fields_counts_store, + document_store: index.documents_fields, + document_fields_counts: index.documents_fields_counts, indexer: &mut indexer, ranked_map: &mut ranked_map, document_id, @@ -285,37 +246,19 @@ pub fn apply_documents_partial_addition<'a, 'b>( write_documents_addition_index( writer, - main_store, - postings_lists_store, - docs_words_store, - prefix_documents_cache_store, - prefix_postings_lists_cache_store, + index, &ranked_map, number_of_inserted_documents, indexer, )?; - compute_short_prefixes( - writer, - main_store, - postings_lists_store, - prefix_postings_lists_cache_store, - )?; + compute_short_prefixes(writer, index)?; Ok(()) } -pub fn reindex_all_documents( - writer: &mut heed::RwTxn, - main_store: store::Main, - documents_fields_store: store::DocumentsFields, - documents_fields_counts_store: store::DocumentsFieldsCounts, - postings_lists_store: store::PostingsLists, - docs_words_store: store::DocsWords, - prefix_documents_cache_store: store::PrefixDocumentsCache, - prefix_postings_lists_cache_store: store::PrefixPostingsListsCache, -) -> MResult<()> { - let schema = match main_store.schema(writer)? { +pub fn reindex_all_documents(writer: &mut heed::RwTxn, index: &store::Index) -> MResult<()> { + let schema = match index.main.schema(writer)? { Some(schema) => schema, None => return Err(Error::SchemaMissing), }; @@ -324,21 +267,21 @@ pub fn reindex_all_documents( // 1. retrieve all documents ids let mut documents_ids_to_reindex = Vec::new(); - for result in documents_fields_counts_store.documents_ids(writer)? { + for result in index.documents_fields_counts.documents_ids(writer)? { let document_id = result?; documents_ids_to_reindex.push(document_id); } // 2. remove the documents posting lists - main_store.put_words_fst(writer, &fst::Set::default())?; - main_store.put_ranked_map(writer, &ranked_map)?; - main_store.put_number_of_documents(writer, |_| 0)?; - postings_lists_store.clear(writer)?; - docs_words_store.clear(writer)?; + index.main.put_words_fst(writer, &fst::Set::default())?; + index.main.put_ranked_map(writer, &ranked_map)?; + index.main.put_number_of_documents(writer, |_| 0)?; + index.postings_lists.clear(writer)?; + index.docs_words.clear(writer)?; // 3. re-index chunks of documents (otherwise we make the borrow checker unhappy) for documents_ids in documents_ids_to_reindex.chunks(100) { - let stop_words = match main_store.stop_words_fst(writer)? { + let stop_words = match index.main.stop_words_fst(writer)? { Some(stop_words) => stop_words, None => fst::Set::default(), }; @@ -348,7 +291,7 @@ pub fn reindex_all_documents( let mut ram_store = HashMap::new(); for document_id in documents_ids { - for result in documents_fields_store.document_fields(writer, *document_id)? { + for result in index.documents_fields.document_fields(writer, *document_id)? { let (attr, bytes) = result?; let value: serde_json::Value = serde_json::from_slice(bytes)?; ram_store.insert((document_id, attr), value); @@ -360,8 +303,8 @@ pub fn reindex_all_documents( attr, schema.props(attr), *docid, - documents_fields_store, - documents_fields_counts_store, + index.documents_fields, + index.documents_fields_counts, &mut indexer, &mut ranked_map, &value, @@ -372,34 +315,21 @@ pub fn reindex_all_documents( // 4. write the new index in the main store write_documents_addition_index( writer, - main_store, - postings_lists_store, - docs_words_store, - prefix_documents_cache_store, - prefix_postings_lists_cache_store, + index, &ranked_map, number_of_inserted_documents, indexer, )?; } - compute_short_prefixes( - writer, - main_store, - postings_lists_store, - prefix_postings_lists_cache_store, - )?; + compute_short_prefixes(writer, index)?; Ok(()) } pub fn write_documents_addition_index( writer: &mut heed::RwTxn, - main_store: store::Main, - postings_lists_store: store::PostingsLists, - docs_words_store: store::DocsWords, - _prefix_documents_cache_store: store::PrefixDocumentsCache, - prefix_postings_lists_cache_store: store::PrefixPostingsListsCache, + index: &store::Index, ranked_map: &RankedMap, number_of_inserted_documents: usize, indexer: RawIndexer, @@ -410,16 +340,16 @@ pub fn write_documents_addition_index( for (word, delta_set) in indexed.words_doc_indexes { delta_words_builder.insert(&word).unwrap(); - let set = match postings_lists_store.postings_list(writer, &word)? { + let set = match index.postings_lists.postings_list(writer, &word)? { Some(postings) => Union::new(&postings.matches, &delta_set).into_set_buf(), None => delta_set, }; - postings_lists_store.put_postings_list(writer, &word, &set)?; + index.postings_lists.put_postings_list(writer, &word, &set)?; } for (id, words) in indexed.docs_words { - docs_words_store.put_doc_words(writer, id, &words)?; + index.docs_words.put_doc_words(writer, id, &words)?; } let delta_words = delta_words_builder @@ -427,7 +357,7 @@ pub fn write_documents_addition_index( .and_then(fst::Set::from_bytes) .unwrap(); - let words = match main_store.words_fst(writer)? { + let words = match index.main.words_fst(writer)? { Some(words) => { let op = OpBuilder::new() .add(words.stream()) @@ -444,16 +374,11 @@ pub fn write_documents_addition_index( None => delta_words, }; - main_store.put_words_fst(writer, &words)?; - main_store.put_ranked_map(writer, ranked_map)?; - main_store.put_number_of_documents(writer, |old| old + number_of_inserted_documents as u64)?; + index.main.put_words_fst(writer, &words)?; + index.main.put_ranked_map(writer, ranked_map)?; + index.main.put_number_of_documents(writer, |old| old + number_of_inserted_documents as u64)?; - compute_short_prefixes( - writer, - main_store, - postings_lists_store, - prefix_postings_lists_cache_store, - )?; + compute_short_prefixes(writer, index)?; Ok(()) } diff --git a/meilisearch-core/src/update/documents_deletion.rs b/meilisearch-core/src/update/documents_deletion.rs index 110aa5ac0..6efa9bf01 100644 --- a/meilisearch-core/src/update/documents_deletion.rs +++ b/meilisearch-core/src/update/documents_deletion.rs @@ -85,22 +85,17 @@ pub fn push_documents_deletion( pub fn apply_documents_deletion( writer: &mut heed::RwTxn, - main_store: store::Main, - documents_fields_store: store::DocumentsFields, - documents_fields_counts_store: store::DocumentsFieldsCounts, - postings_lists_store: store::PostingsLists, - docs_words_store: store::DocsWords, - prefix_postings_lists_cache_store: store::PrefixPostingsListsCache, + index: &store::Index, deletion: Vec, ) -> MResult<()> { let idset = SetBuf::from_dirty(deletion); - let schema = match main_store.schema(writer)? { + let schema = match index.main.schema(writer)? { Some(schema) => schema, None => return Err(Error::SchemaMissing), }; - let mut ranked_map = match main_store.ranked_map(writer)? { + let mut ranked_map = match index.main.ranked_map(writer)? { Some(ranked_map) => ranked_map, None => RankedMap::default(), }; @@ -126,7 +121,7 @@ pub fn apply_documents_deletion( ranked_map.remove(id, *ranked_attr); } - if let Some(words) = docs_words_store.doc_words(writer, id)? { + if let Some(words) = index.docs_words.doc_words(writer, id)? { let mut stream = words.stream(); while let Some(word) = stream.next() { let word = word.to_vec(); @@ -143,21 +138,21 @@ pub fn apply_documents_deletion( for (word, document_ids) in words_document_ids { let document_ids = SetBuf::from_dirty(document_ids); - if let Some(postings) = postings_lists_store.postings_list(writer, &word)? { + if let Some(postings) = index.postings_lists.postings_list(writer, &word)? { let op = DifferenceByKey::new(&postings.matches, &document_ids, |d| d.document_id, |id| *id); let doc_indexes = op.into_set_buf(); if !doc_indexes.is_empty() { - postings_lists_store.put_postings_list(writer, &word, &doc_indexes)?; + index.postings_lists.put_postings_list(writer, &word, &doc_indexes)?; } else { - postings_lists_store.del_postings_list(writer, &word)?; + index.postings_lists.del_postings_list(writer, &word)?; removed_words.insert(word); } } for id in document_ids { - documents_fields_counts_store.del_all_document_fields_counts(writer, id)?; - if documents_fields_store.del_all_document_fields(writer, id)? != 0 { + index.documents_fields_counts.del_all_document_fields_counts(writer, id)?; + if index.documents_fields.del_all_document_fields(writer, id)? != 0 { deleted_documents.insert(id); } } @@ -165,11 +160,11 @@ pub fn apply_documents_deletion( let deleted_documents_len = deleted_documents.len() as u64; for id in deleted_documents { - docs_words_store.del_doc_words(writer, id)?; + index.docs_words.del_doc_words(writer, id)?; } let removed_words = fst::Set::from_iter(removed_words).unwrap(); - let words = match main_store.words_fst(writer)? { + let words = match index.main.words_fst(writer)? { Some(words_set) => { let op = fst::set::OpBuilder::new() .add(words_set.stream()) @@ -186,16 +181,11 @@ pub fn apply_documents_deletion( None => fst::Set::default(), }; - main_store.put_words_fst(writer, &words)?; - main_store.put_ranked_map(writer, &ranked_map)?; - main_store.put_number_of_documents(writer, |old| old - deleted_documents_len)?; + index.main.put_words_fst(writer, &words)?; + index.main.put_ranked_map(writer, &ranked_map)?; + index.main.put_number_of_documents(writer, |old| old - deleted_documents_len)?; - compute_short_prefixes( - writer, - main_store, - postings_lists_store, - prefix_postings_lists_cache_store, - )?; + compute_short_prefixes(writer, index)?; Ok(()) } diff --git a/meilisearch-core/src/update/mod.rs b/meilisearch-core/src/update/mod.rs index 0ddd5f1be..47df4bf0a 100644 --- a/meilisearch-core/src/update/mod.rs +++ b/meilisearch-core/src/update/mod.rs @@ -257,16 +257,7 @@ pub fn update_task<'a, 'b>( let start = Instant::now(); let update_type = UpdateType::ClearAll; - let result = apply_clear_all( - writer, - index.main, - index.documents_fields, - index.documents_fields_counts, - index.postings_lists, - index.docs_words, - index.prefix_documents_cache, - index.prefix_postings_lists_cache, - ); + let result = apply_clear_all(writer, index); (update_type, result, start.elapsed()) } @@ -274,17 +265,7 @@ pub fn update_task<'a, 'b>( let start = Instant::now(); let update_type = UpdateType::Schema; - let result = apply_schema_update( - writer, - &schema, - index.main, - index.documents_fields, - index.documents_fields_counts, - index.postings_lists, - index.docs_words, - index.prefix_documents_cache, - index.prefix_postings_lists_cache, - ); + let result = apply_schema_update(writer, &schema, index); (update_type, result, start.elapsed()) } @@ -303,17 +284,7 @@ pub fn update_task<'a, 'b>( number: documents.len(), }; - let result = apply_documents_addition( - writer, - index.main, - index.documents_fields, - index.documents_fields_counts, - index.postings_lists, - index.docs_words, - index.prefix_documents_cache, - index.prefix_postings_lists_cache, - documents, - ); + let result = apply_documents_addition(writer, index, documents); (update_type, result, start.elapsed()) } @@ -324,17 +295,7 @@ pub fn update_task<'a, 'b>( number: documents.len(), }; - let result = apply_documents_partial_addition( - writer, - index.main, - index.documents_fields, - index.documents_fields_counts, - index.postings_lists, - index.docs_words, - index.prefix_documents_cache, - index.prefix_postings_lists_cache, - documents, - ); + let result = apply_documents_partial_addition(writer, index, documents); (update_type, result, start.elapsed()) } @@ -345,16 +306,7 @@ pub fn update_task<'a, 'b>( number: documents.len(), }; - let result = apply_documents_deletion( - writer, - index.main, - index.documents_fields, - index.documents_fields_counts, - index.postings_lists, - index.docs_words, - index.prefix_postings_lists_cache, - documents, - ); + let result = apply_documents_deletion(writer, index, documents); (update_type, result, start.elapsed()) } @@ -388,17 +340,7 @@ pub fn update_task<'a, 'b>( number: stop_words.len(), }; - let result = apply_stop_words_deletion( - writer, - index.main, - index.documents_fields, - index.documents_fields_counts, - index.postings_lists, - index.docs_words, - index.prefix_documents_cache, - index.prefix_postings_lists_cache, - stop_words, - ); + let result = apply_stop_words_deletion(writer, index, stop_words); (update_type, result, start.elapsed()) } @@ -421,21 +363,15 @@ pub fn update_task<'a, 'b>( Ok(status) } -fn compute_short_prefixes( - writer: &mut heed::RwTxn, - main_store: store::Main, - postings_lists_store: store::PostingsLists, - prefix_postings_lists_cache_store: store::PrefixPostingsListsCache, -) -> MResult<()> -{ +fn compute_short_prefixes(writer: &mut heed::RwTxn, index: &store::Index) -> MResult<()> { // retrieve the words fst to compute all those prefixes - let words_fst = match main_store.words_fst(writer)? { + let words_fst = match index.main.words_fst(writer)? { Some(fst) => fst, None => return Ok(()), }; // clear the prefixes - let pplc_store = prefix_postings_lists_cache_store; + let pplc_store = index.prefix_postings_lists_cache; pplc_store.clear(writer)?; for prefix_len in 1..=2 { @@ -450,7 +386,7 @@ fn compute_short_prefixes( // to consider it as an exact match and not as a prefix (=). if input.len() <= prefix_len { continue } - if let Some(postings_list) = postings_lists_store.postings_list(writer, input)?.map(|p| p.matches.into_owned()) { + if let Some(postings_list) = index.postings_lists.postings_list(writer, input)?.map(|p| p.matches.into_owned()) { let prefix = &input[..prefix_len]; let mut arr_prefix = [0; 4]; diff --git a/meilisearch-core/src/update/schema_update.rs b/meilisearch-core/src/update/schema_update.rs index 3b3a79ac6..fd7b0f513 100644 --- a/meilisearch-core/src/update/schema_update.rs +++ b/meilisearch-core/src/update/schema_update.rs @@ -8,13 +8,7 @@ use crate::{error::UnsupportedOperation, store, MResult}; pub fn apply_schema_update( writer: &mut heed::RwTxn, new_schema: &Schema, - main_store: store::Main, - documents_fields_store: store::DocumentsFields, - documents_fields_counts_store: store::DocumentsFieldsCounts, - postings_lists_store: store::PostingsLists, - docs_words_store: store::DocsWords, - prefix_documents_cache_store: store::PrefixDocumentsCache, - prefix_postings_lists_cache_store: store::PrefixPostingsListsCache, + index: &store::Index, ) -> MResult<()> { use UnsupportedOperation::{ CanOnlyIntroduceNewSchemaAttributesAtEnd, CannotRemoveSchemaAttribute, @@ -23,7 +17,7 @@ pub fn apply_schema_update( let mut need_full_reindexing = false; - if let Some(old_schema) = main_store.schema(writer)? { + if let Some(old_schema) = index.main.schema(writer)? { for diff in meilisearch_schema::diff(&old_schema, new_schema) { match diff { Diff::IdentChange { .. } => return Err(CannotUpdateSchemaIdentifier.into()), @@ -47,19 +41,10 @@ pub fn apply_schema_update( } } - main_store.put_schema(writer, new_schema)?; + index.main.put_schema(writer, new_schema)?; if need_full_reindexing { - reindex_all_documents( - writer, - main_store, - documents_fields_store, - documents_fields_counts_store, - postings_lists_store, - docs_words_store, - prefix_documents_cache_store, - prefix_postings_lists_cache_store, - )? + reindex_all_documents(writer, index)? } Ok(()) @@ -67,14 +52,13 @@ pub fn apply_schema_update( pub fn push_schema_update( writer: &mut heed::RwTxn, - updates_store: store::Updates, - updates_results_store: store::UpdatesResults, + index: &store::Index, schema: Schema, ) -> MResult { - let last_update_id = next_update_id(writer, updates_store, updates_results_store)?; + let last_update_id = next_update_id(writer, index.updates, index.updates_results)?; let update = Update::schema(schema); - updates_store.put_update(writer, last_update_id, &update)?; + index.updates.put_update(writer, last_update_id, &update)?; Ok(last_update_id) } diff --git a/meilisearch-core/src/update/stop_words_deletion.rs b/meilisearch-core/src/update/stop_words_deletion.rs index 29ec8edf6..39af132ce 100644 --- a/meilisearch-core/src/update/stop_words_deletion.rs +++ b/meilisearch-core/src/update/stop_words_deletion.rs @@ -63,13 +63,7 @@ pub fn push_stop_words_deletion( pub fn apply_stop_words_deletion( writer: &mut heed::RwTxn, - main_store: store::Main, - documents_fields_store: store::DocumentsFields, - documents_fields_counts_store: store::DocumentsFieldsCounts, - postings_lists_store: store::PostingsLists, - docs_words_store: store::DocsWords, - prefix_documents_cache_store: store::PrefixDocumentsCache, - prefix_postings_lists_cache_store: store::PrefixPostingsListsCache, + index: &store::Index, deletion: BTreeSet, ) -> MResult<()> { let mut stop_words_builder = SetBuilder::memory(); @@ -85,7 +79,7 @@ pub fn apply_stop_words_deletion( .unwrap(); // now we delete all of these stop words from the main store - let stop_words_fst = main_store.stop_words_fst(writer)?.unwrap_or_default(); + let stop_words_fst = index.main.stop_words_fst(writer)?.unwrap_or_default(); let op = OpBuilder::new() .add(&stop_words_fst) @@ -99,22 +93,13 @@ pub fn apply_stop_words_deletion( .and_then(fst::Set::from_bytes) .unwrap(); - main_store.put_stop_words_fst(writer, &stop_words_fst)?; + index.main.put_stop_words_fst(writer, &stop_words_fst)?; // now that we have setup the stop words // lets reindex everything... - if let Ok(number) = main_store.number_of_documents(writer) { + if let Ok(number) = index.main.number_of_documents(writer) { if number > 0 { - reindex_all_documents( - writer, - main_store, - documents_fields_store, - documents_fields_counts_store, - postings_lists_store, - docs_words_store, - prefix_documents_cache_store, - prefix_postings_lists_cache_store, - )?; + reindex_all_documents(writer, index)?; } }