mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-03 03:47:02 +02:00
rebase from master
This commit is contained in:
parent
2143226f04
commit
dc6907e748
29 changed files with 92 additions and 105 deletions
|
@ -109,7 +109,7 @@ pub fn apply_documents_addition<'a, 'b>(
|
|||
) -> MResult<()> {
|
||||
let mut documents_additions = HashMap::new();
|
||||
|
||||
let schema = match index.main.schema(writer)? {
|
||||
let mut schema = match index.main.schema(writer)? {
|
||||
Some(schema) => schema,
|
||||
None => return Err(Error::SchemaMissing),
|
||||
};
|
||||
|
@ -147,7 +147,7 @@ pub fn apply_documents_addition<'a, 'b>(
|
|||
for (document_id, document) in documents_additions {
|
||||
let serializer = Serializer {
|
||||
txn: writer,
|
||||
schema: &schema,
|
||||
schema: &mut schema,
|
||||
document_store: index.documents_fields,
|
||||
document_fields_counts: index.documents_fields_counts,
|
||||
indexer: &mut indexer,
|
||||
|
@ -166,7 +166,7 @@ pub fn apply_documents_addition<'a, 'b>(
|
|||
indexer,
|
||||
)?;
|
||||
|
||||
compute_short_prefixes(writer, index)?;
|
||||
index.main.put_schema(writer, &schema)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -178,7 +178,7 @@ pub fn apply_documents_partial_addition<'a, 'b>(
|
|||
) -> MResult<()> {
|
||||
let mut documents_additions = HashMap::new();
|
||||
|
||||
|
||||
let mut schema = match index.main.schema(writer)? {
|
||||
Some(schema) => schema,
|
||||
None => return Err(Error::SchemaMissing),
|
||||
};
|
||||
|
@ -233,7 +233,7 @@ pub fn apply_documents_partial_addition<'a, 'b>(
|
|||
for (document_id, document) in documents_additions {
|
||||
let serializer = Serializer {
|
||||
txn: writer,
|
||||
schema: &schema,
|
||||
schema: &mut schema,
|
||||
document_store: index.documents_fields,
|
||||
document_fields_counts: index.documents_fields_counts,
|
||||
indexer: &mut indexer,
|
||||
|
@ -252,7 +252,7 @@ pub fn apply_documents_partial_addition<'a, 'b>(
|
|||
indexer,
|
||||
)?;
|
||||
|
||||
compute_short_prefixes(writer, index)?;
|
||||
index.main.put_schema(writer, &schema)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -292,7 +292,7 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
|
|||
|
||||
for document_id in documents_ids {
|
||||
for result in index.documents_fields.document_fields(writer, *document_id)? {
|
||||
let (attr, bytes) = result?;
|
||||
let (field_id, bytes) = result?;
|
||||
let value: serde_json::Value = serde_json::from_slice(bytes)?;
|
||||
ram_store.insert((document_id, field_id), value);
|
||||
}
|
||||
|
@ -322,7 +322,7 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
|
|||
)?;
|
||||
}
|
||||
|
||||
compute_short_prefixes(writer, index)?;
|
||||
index.main.put_schema(writer, &schema)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
|
|
@ -130,22 +130,10 @@ pub fn apply_settings_update(
|
|||
_ => (),
|
||||
}
|
||||
|
||||
let main_store = index.main;
|
||||
let documents_fields_store = index.documents_fields;
|
||||
let documents_fields_counts_store = index.documents_fields_counts;
|
||||
let postings_lists_store = index.postings_lists;
|
||||
let docs_words_store = index.docs_words;
|
||||
|
||||
if must_reindex {
|
||||
reindex_all_documents(
|
||||
writer,
|
||||
main_store,
|
||||
documents_fields_store,
|
||||
documents_fields_counts_store,
|
||||
postings_lists_store,
|
||||
docs_words_store,
|
||||
)?;
|
||||
reindex_all_documents(writer, index)?;
|
||||
}
|
||||
|
||||
if let UpdateState::Clear = settings.identifier {
|
||||
index.main.delete_schema(writer)?;
|
||||
}
|
||||
|
@ -158,10 +146,7 @@ pub fn apply_stop_words_update(
|
|||
stop_words: BTreeSet<String>,
|
||||
) -> MResult<bool> {
|
||||
|
||||
let main_store = index.main;
|
||||
let mut must_reindex = false;
|
||||
|
||||
let old_stop_words: BTreeSet<String> = main_store
|
||||
let old_stop_words: BTreeSet<String> = index.main
|
||||
.stop_words_fst(writer)?
|
||||
.unwrap_or_default()
|
||||
.stream()
|
||||
|
@ -184,10 +169,9 @@ pub fn apply_stop_words_update(
|
|||
index,
|
||||
deletion
|
||||
)?;
|
||||
must_reindex = true;
|
||||
return Ok(true)
|
||||
}
|
||||
|
||||
Ok(must_reindex)
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn apply_stop_words_addition(
|
||||
|
@ -256,8 +240,6 @@ fn apply_stop_words_deletion(
|
|||
deletion: BTreeSet<String>,
|
||||
) -> MResult<()> {
|
||||
|
||||
let main_store = index.main;
|
||||
|
||||
let mut stop_words_builder = SetBuilder::memory();
|
||||
|
||||
for word in deletion {
|
||||
|
@ -271,7 +253,7 @@ fn apply_stop_words_deletion(
|
|||
.unwrap();
|
||||
|
||||
// now we delete all of these stop words from the main store
|
||||
let stop_words_fst = main_store.stop_words_fst(writer)?.unwrap_or_default();
|
||||
let stop_words_fst = index.main.stop_words_fst(writer)?.unwrap_or_default();
|
||||
|
||||
let op = OpBuilder::new()
|
||||
.add(&stop_words_fst)
|
||||
|
@ -285,7 +267,7 @@ fn apply_stop_words_deletion(
|
|||
.and_then(fst::Set::from_bytes)
|
||||
.unwrap();
|
||||
|
||||
Ok(main_store.put_stop_words_fst(writer, &stop_words_fst)?)
|
||||
Ok(index.main.put_stop_words_fst(writer, &stop_words_fst)?)
|
||||
}
|
||||
|
||||
pub fn apply_synonyms_update(
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
|
Loading…
Add table
Add a link
Reference in a new issue