mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-23 13:24:27 +01:00
chore: Simplify the update application
This commit is contained in:
parent
4deee93a55
commit
b0be06540a
@ -1,15 +1,15 @@
|
|||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use meilidb_core::DocumentId;
|
|
||||||
use fst::{SetBuilder, set::OpBuilder};
|
use fst::{SetBuilder, set::OpBuilder};
|
||||||
use sdset::{SetOperation, duo::Union};
|
use sdset::{SetOperation, duo::Union};
|
||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
use crate::indexer::Indexer;
|
use crate::indexer::Indexer;
|
||||||
use crate::serde::{extract_document_id, Serializer, RamDocumentStore};
|
use crate::serde::{extract_document_id, Serializer, RamDocumentStore};
|
||||||
use crate::RankedMap;
|
use crate::RankedMap;
|
||||||
|
|
||||||
use super::{Error, Index, FinalDocumentsDeletion};
|
use super::{Error, Index, apply_documents_deletion};
|
||||||
use super::index::Cache;
|
use super::index::Cache;
|
||||||
|
|
||||||
pub struct DocumentsAddition<'a, D> {
|
pub struct DocumentsAddition<'a, D> {
|
||||||
@ -33,123 +33,106 @@ impl<'a, D> DocumentsAddition<'a, D> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FinalDocumentsAddition<'a> {
|
pub fn apply_documents_addition(
|
||||||
inner: &'a Index,
|
index: &Index,
|
||||||
document_ids: HashSet<DocumentId>,
|
mut ranked_map: RankedMap,
|
||||||
document_store: RamDocumentStore,
|
addition: Vec<serde_json::Value>,
|
||||||
indexer: Indexer,
|
) -> Result<(), Error>
|
||||||
ranked_map: RankedMap,
|
{
|
||||||
}
|
let mut document_ids = HashSet::new();
|
||||||
|
let mut document_store = RamDocumentStore::new();
|
||||||
|
let mut indexer = Indexer::new();
|
||||||
|
|
||||||
impl<'a> FinalDocumentsAddition<'a> {
|
let schema = &index.schema();
|
||||||
pub fn new(inner: &'a Index, ranked_map: RankedMap) -> FinalDocumentsAddition<'a> {
|
let identifier = schema.identifier_name();
|
||||||
FinalDocumentsAddition {
|
|
||||||
inner,
|
|
||||||
document_ids: HashSet::new(),
|
|
||||||
document_store: RamDocumentStore::new(),
|
|
||||||
indexer: Indexer::new(),
|
|
||||||
ranked_map,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn update_document<D>(&mut self, document: D) -> Result<(), Error>
|
|
||||||
where D: serde::Serialize,
|
|
||||||
{
|
|
||||||
let schema = &self.inner.schema();
|
|
||||||
let identifier = schema.identifier_name();
|
|
||||||
|
|
||||||
|
for document in addition {
|
||||||
let document_id = match extract_document_id(identifier, &document)? {
|
let document_id = match extract_document_id(identifier, &document)? {
|
||||||
Some(id) => id,
|
Some(id) => id,
|
||||||
None => return Err(Error::MissingDocumentId),
|
None => return Err(Error::MissingDocumentId),
|
||||||
};
|
};
|
||||||
|
|
||||||
// 1. store the document id for future deletion
|
// 1. store the document id for future deletion
|
||||||
self.document_ids.insert(document_id);
|
document_ids.insert(document_id);
|
||||||
|
|
||||||
// 2. index the document fields in ram stores
|
// 2. index the document fields in ram stores
|
||||||
let serializer = Serializer {
|
let serializer = Serializer {
|
||||||
schema,
|
schema,
|
||||||
document_store: &mut self.document_store,
|
document_store: &mut document_store,
|
||||||
indexer: &mut self.indexer,
|
indexer: &mut indexer,
|
||||||
ranked_map: &mut self.ranked_map,
|
ranked_map: &mut ranked_map,
|
||||||
document_id,
|
document_id,
|
||||||
};
|
};
|
||||||
|
|
||||||
document.serialize(serializer)?;
|
document.serialize(serializer)?;
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn finalize(self) -> Result<(), Error> {
|
let ref_index = index.as_ref();
|
||||||
let ref_index = self.inner.as_ref();
|
let docs_words = ref_index.docs_words_index;
|
||||||
let docs_words = ref_index.docs_words_index;
|
let documents = ref_index.documents_index;
|
||||||
let documents = ref_index.documents_index;
|
let main = ref_index.main_index;
|
||||||
let main = ref_index.main_index;
|
let words = ref_index.words_index;
|
||||||
let words = ref_index.words_index;
|
|
||||||
|
|
||||||
// 1. remove the previous documents match indexes
|
// 1. remove the previous documents match indexes
|
||||||
let mut documents_deletion = FinalDocumentsDeletion::new(self.inner, self.ranked_map.clone());
|
let document_ids = document_ids.into_iter().collect();
|
||||||
documents_deletion.extend(self.document_ids);
|
apply_documents_deletion(index, ranked_map.clone(), document_ids)?;
|
||||||
documents_deletion.finalize()?;
|
|
||||||
|
|
||||||
// 2. insert new document attributes in the database
|
// 2. insert new document attributes in the database
|
||||||
for ((id, attr), value) in self.document_store.into_inner() {
|
for ((id, attr), value) in document_store.into_inner() {
|
||||||
documents.set_document_field(id, attr, value)?;
|
documents.set_document_field(id, attr, value)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let indexed = self.indexer.build();
|
let indexed = indexer.build();
|
||||||
let mut delta_words_builder = SetBuilder::memory();
|
let mut delta_words_builder = SetBuilder::memory();
|
||||||
|
|
||||||
for (word, delta_set) in indexed.words_doc_indexes {
|
for (word, delta_set) in indexed.words_doc_indexes {
|
||||||
delta_words_builder.insert(&word).unwrap();
|
delta_words_builder.insert(&word).unwrap();
|
||||||
|
|
||||||
let set = match words.doc_indexes(&word)? {
|
let set = match words.doc_indexes(&word)? {
|
||||||
Some(set) => Union::new(&set, &delta_set).into_set_buf(),
|
Some(set) => Union::new(&set, &delta_set).into_set_buf(),
|
||||||
None => delta_set,
|
None => delta_set,
|
||||||
};
|
|
||||||
|
|
||||||
words.set_doc_indexes(&word, &set)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (id, words) in indexed.docs_words {
|
|
||||||
docs_words.set_doc_words(id, &words)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let delta_words = delta_words_builder
|
|
||||||
.into_inner()
|
|
||||||
.and_then(fst::Set::from_bytes)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let words = match main.words_set()? {
|
|
||||||
Some(words) => {
|
|
||||||
let op = OpBuilder::new()
|
|
||||||
.add(words.stream())
|
|
||||||
.add(delta_words.stream())
|
|
||||||
.r#union();
|
|
||||||
|
|
||||||
let mut words_builder = SetBuilder::memory();
|
|
||||||
words_builder.extend_stream(op).unwrap();
|
|
||||||
words_builder
|
|
||||||
.into_inner()
|
|
||||||
.and_then(fst::Set::from_bytes)
|
|
||||||
.unwrap()
|
|
||||||
},
|
|
||||||
None => delta_words,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
main.set_words_set(&words)?;
|
words.set_doc_indexes(&word, &set)?;
|
||||||
main.set_ranked_map(&self.ranked_map)?;
|
|
||||||
|
|
||||||
// update the "consistent" view of the Index
|
|
||||||
let cache = ref_index.cache;
|
|
||||||
let words = Arc::new(words);
|
|
||||||
let ranked_map = self.ranked_map;
|
|
||||||
let synonyms = cache.synonyms.clone();
|
|
||||||
let schema = cache.schema.clone();
|
|
||||||
|
|
||||||
let cache = Cache { words, synonyms, schema, ranked_map };
|
|
||||||
self.inner.cache.store(Arc::new(cache));
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (id, words) in indexed.docs_words {
|
||||||
|
docs_words.set_doc_words(id, &words)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let delta_words = delta_words_builder
|
||||||
|
.into_inner()
|
||||||
|
.and_then(fst::Set::from_bytes)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let words = match main.words_set()? {
|
||||||
|
Some(words) => {
|
||||||
|
let op = OpBuilder::new()
|
||||||
|
.add(words.stream())
|
||||||
|
.add(delta_words.stream())
|
||||||
|
.r#union();
|
||||||
|
|
||||||
|
let mut words_builder = SetBuilder::memory();
|
||||||
|
words_builder.extend_stream(op).unwrap();
|
||||||
|
words_builder
|
||||||
|
.into_inner()
|
||||||
|
.and_then(fst::Set::from_bytes)
|
||||||
|
.unwrap()
|
||||||
|
},
|
||||||
|
None => delta_words,
|
||||||
|
};
|
||||||
|
|
||||||
|
main.set_words_set(&words)?;
|
||||||
|
main.set_ranked_map(&ranked_map)?;
|
||||||
|
|
||||||
|
// update the "consistent" view of the Index
|
||||||
|
let cache = ref_index.cache;
|
||||||
|
let words = Arc::new(words);
|
||||||
|
let synonyms = cache.synonyms.clone();
|
||||||
|
let schema = cache.schema.clone();
|
||||||
|
|
||||||
|
let cache = Cache { words, synonyms, schema, ranked_map };
|
||||||
|
index.cache.store(Arc::new(cache));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -21,10 +21,25 @@ impl<'a> DocumentsDeletion<'a> {
|
|||||||
DocumentsDeletion { index, documents: Vec::new() }
|
DocumentsDeletion { index, documents: Vec::new() }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_document(&mut self, document_id: DocumentId) {
|
pub fn delete_document_by_id(&mut self, document_id: DocumentId) {
|
||||||
self.documents.push(document_id);
|
self.documents.push(document_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn delete_document<D>(&mut self, document: D) -> Result<(), Error>
|
||||||
|
where D: serde::Serialize,
|
||||||
|
{
|
||||||
|
let schema = self.index.schema();
|
||||||
|
let identifier = schema.identifier_name();
|
||||||
|
let document_id = match extract_document_id(identifier, &document)? {
|
||||||
|
Some(id) => id,
|
||||||
|
None => return Err(Error::MissingDocumentId),
|
||||||
|
};
|
||||||
|
|
||||||
|
self.delete_document_by_id(document_id);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn finalize(self) -> Result<u64, Error> {
|
pub fn finalize(self) -> Result<u64, Error> {
|
||||||
self.index.push_documents_deletion(self.documents)
|
self.index.push_documents_deletion(self.documents)
|
||||||
}
|
}
|
||||||
@ -36,129 +51,95 @@ impl Extend<DocumentId> for DocumentsDeletion<'_> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FinalDocumentsDeletion<'a> {
|
pub fn apply_documents_deletion(
|
||||||
inner: &'a Index,
|
index: &Index,
|
||||||
documents: Vec<DocumentId>,
|
mut ranked_map: RankedMap,
|
||||||
ranked_map: RankedMap,
|
deletion: Vec<DocumentId>,
|
||||||
}
|
) -> Result<(), Error>
|
||||||
|
{
|
||||||
|
let ref_index = index.as_ref();
|
||||||
|
let schema = index.schema();
|
||||||
|
let docs_words = ref_index.docs_words_index;
|
||||||
|
let documents = ref_index.documents_index;
|
||||||
|
let main = ref_index.main_index;
|
||||||
|
let words = ref_index.words_index;
|
||||||
|
|
||||||
impl<'a> FinalDocumentsDeletion<'a> {
|
let idset = SetBuf::from_dirty(deletion);
|
||||||
pub fn new(inner: &'a Index, ranked_map: RankedMap) -> FinalDocumentsDeletion {
|
|
||||||
FinalDocumentsDeletion { inner, documents: Vec::new(), ranked_map }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn delete_document_by_id(&mut self, id: DocumentId) {
|
// collect the ranked attributes according to the schema
|
||||||
self.documents.push(id);
|
let ranked_attrs: Vec<_> = schema.iter()
|
||||||
}
|
.filter_map(|(_, attr, prop)| {
|
||||||
|
if prop.is_ranked() { Some(attr) } else { None }
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
pub fn delete_document<D>(&mut self, document: D) -> Result<(), Error>
|
let mut words_document_ids = HashMap::new();
|
||||||
where D: serde::Serialize,
|
for id in idset {
|
||||||
{
|
// remove all the ranked attributes from the ranked_map
|
||||||
let schema = &self.inner.schema();
|
for ranked_attr in &ranked_attrs {
|
||||||
let identifier = schema.identifier_name();
|
ranked_map.remove(id, *ranked_attr);
|
||||||
|
}
|
||||||
|
|
||||||
let document_id = match extract_document_id(identifier, &document)? {
|
if let Some(words) = docs_words.doc_words(id)? {
|
||||||
Some(id) => id,
|
let mut stream = words.stream();
|
||||||
None => return Err(Error::MissingDocumentId),
|
while let Some(word) = stream.next() {
|
||||||
};
|
let word = word.to_vec();
|
||||||
|
words_document_ids.entry(word).or_insert_with(Vec::new).push(id);
|
||||||
self.delete_document_by_id(document_id);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn finalize(mut self) -> Result<(), Error> {
|
|
||||||
let ref_index = self.inner.as_ref();
|
|
||||||
let schema = self.inner.schema();
|
|
||||||
let docs_words = ref_index.docs_words_index;
|
|
||||||
let documents = ref_index.documents_index;
|
|
||||||
let main = ref_index.main_index;
|
|
||||||
let words = ref_index.words_index;
|
|
||||||
|
|
||||||
let idset = SetBuf::from_dirty(self.documents);
|
|
||||||
|
|
||||||
// collect the ranked attributes according to the schema
|
|
||||||
let ranked_attrs: Vec<_> = schema.iter()
|
|
||||||
.filter_map(|(_, attr, prop)| {
|
|
||||||
if prop.is_ranked() { Some(attr) } else { None }
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let mut words_document_ids = HashMap::new();
|
|
||||||
for id in idset {
|
|
||||||
// remove all the ranked attributes from the ranked_map
|
|
||||||
for ranked_attr in &ranked_attrs {
|
|
||||||
self.ranked_map.remove(id, *ranked_attr);
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(words) = docs_words.doc_words(id)? {
|
let mut removed_words = BTreeSet::new();
|
||||||
let mut stream = words.stream();
|
for (word, document_ids) in words_document_ids {
|
||||||
while let Some(word) = stream.next() {
|
let document_ids = SetBuf::from_dirty(document_ids);
|
||||||
let word = word.to_vec();
|
|
||||||
words_document_ids.entry(word).or_insert_with(Vec::new).push(id);
|
if let Some(doc_indexes) = words.doc_indexes(&word)? {
|
||||||
}
|
let op = DifferenceByKey::new(&doc_indexes, &document_ids, |d| d.document_id, |id| *id);
|
||||||
|
let doc_indexes = op.into_set_buf();
|
||||||
|
|
||||||
|
if !doc_indexes.is_empty() {
|
||||||
|
words.set_doc_indexes(&word, &doc_indexes)?;
|
||||||
|
} else {
|
||||||
|
words.del_doc_indexes(&word)?;
|
||||||
|
removed_words.insert(word);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut removed_words = BTreeSet::new();
|
for id in document_ids {
|
||||||
for (word, document_ids) in words_document_ids {
|
documents.del_all_document_fields(id)?;
|
||||||
let document_ids = SetBuf::from_dirty(document_ids);
|
docs_words.del_doc_words(id)?;
|
||||||
|
|
||||||
if let Some(doc_indexes) = words.doc_indexes(&word)? {
|
|
||||||
let op = DifferenceByKey::new(&doc_indexes, &document_ids, |d| d.document_id, |id| *id);
|
|
||||||
let doc_indexes = op.into_set_buf();
|
|
||||||
|
|
||||||
if !doc_indexes.is_empty() {
|
|
||||||
words.set_doc_indexes(&word, &doc_indexes)?;
|
|
||||||
} else {
|
|
||||||
words.del_doc_indexes(&word)?;
|
|
||||||
removed_words.insert(word);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for id in document_ids {
|
|
||||||
documents.del_all_document_fields(id)?;
|
|
||||||
docs_words.del_doc_words(id)?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let removed_words = fst::Set::from_iter(removed_words).unwrap();
|
|
||||||
let words = match main.words_set()? {
|
|
||||||
Some(words_set) => {
|
|
||||||
let op = fst::set::OpBuilder::new()
|
|
||||||
.add(words_set.stream())
|
|
||||||
.add(removed_words.stream())
|
|
||||||
.difference();
|
|
||||||
|
|
||||||
let mut words_builder = SetBuilder::memory();
|
|
||||||
words_builder.extend_stream(op).unwrap();
|
|
||||||
words_builder
|
|
||||||
.into_inner()
|
|
||||||
.and_then(fst::Set::from_bytes)
|
|
||||||
.unwrap()
|
|
||||||
},
|
|
||||||
None => fst::Set::default(),
|
|
||||||
};
|
|
||||||
|
|
||||||
main.set_words_set(&words)?;
|
|
||||||
main.set_ranked_map(&self.ranked_map)?;
|
|
||||||
|
|
||||||
// update the "consistent" view of the Index
|
|
||||||
let cache = ref_index.cache;
|
|
||||||
let words = Arc::new(words);
|
|
||||||
let ranked_map = self.ranked_map;
|
|
||||||
let synonyms = cache.synonyms.clone();
|
|
||||||
let schema = cache.schema.clone();
|
|
||||||
|
|
||||||
let cache = Cache { words, synonyms, schema, ranked_map };
|
|
||||||
self.inner.cache.store(Arc::new(cache));
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
impl Extend<DocumentId> for FinalDocumentsDeletion<'_> {
|
let removed_words = fst::Set::from_iter(removed_words).unwrap();
|
||||||
fn extend<T: IntoIterator<Item=DocumentId>>(&mut self, iter: T) {
|
let words = match main.words_set()? {
|
||||||
self.documents.extend(iter)
|
Some(words_set) => {
|
||||||
}
|
let op = fst::set::OpBuilder::new()
|
||||||
|
.add(words_set.stream())
|
||||||
|
.add(removed_words.stream())
|
||||||
|
.difference();
|
||||||
|
|
||||||
|
let mut words_builder = SetBuilder::memory();
|
||||||
|
words_builder.extend_stream(op).unwrap();
|
||||||
|
words_builder
|
||||||
|
.into_inner()
|
||||||
|
.and_then(fst::Set::from_bytes)
|
||||||
|
.unwrap()
|
||||||
|
},
|
||||||
|
None => fst::Set::default(),
|
||||||
|
};
|
||||||
|
|
||||||
|
main.set_words_set(&words)?;
|
||||||
|
main.set_ranked_map(&ranked_map)?;
|
||||||
|
|
||||||
|
// update the "consistent" view of the Index
|
||||||
|
let cache = ref_index.cache;
|
||||||
|
let words = Arc::new(words);
|
||||||
|
let synonyms = cache.synonyms.clone();
|
||||||
|
let schema = cache.schema.clone();
|
||||||
|
|
||||||
|
let cache = Cache { words, synonyms, schema, ranked_map };
|
||||||
|
index.cache.store(Arc::new(cache));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
use std::collections::{HashSet, BTreeMap};
|
use std::collections::{HashSet, BTreeMap};
|
||||||
use std::convert::TryInto;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::thread;
|
use std::thread;
|
||||||
|
|
||||||
@ -24,10 +23,10 @@ use self::synonyms_index::SynonymsIndex;
|
|||||||
use self::words_index::WordsIndex;
|
use self::words_index::WordsIndex;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
DocumentsAddition, FinalDocumentsAddition,
|
DocumentsAddition, DocumentsDeletion,
|
||||||
DocumentsDeletion, FinalDocumentsDeletion,
|
SynonymsAddition, SynonymsDeletion,
|
||||||
SynonymsAddition, FinalSynonymsAddition,
|
apply_documents_addition, apply_documents_deletion,
|
||||||
SynonymsDeletion, FinalSynonymsDeletion,
|
apply_synonyms_addition, apply_synonyms_deletion,
|
||||||
};
|
};
|
||||||
|
|
||||||
mod custom_settings_index;
|
mod custom_settings_index;
|
||||||
@ -71,33 +70,23 @@ fn spawn_update_system(index: Index) -> thread::JoinHandle<()> {
|
|||||||
let results = &index.updates_results_index;
|
let results = &index.updates_results_index;
|
||||||
(updates, results).transaction(|(updates, results)| {
|
(updates, results).transaction(|(updates, results)| {
|
||||||
let update = updates.remove(&key)?.unwrap();
|
let update = updates.remove(&key)?.unwrap();
|
||||||
let array_id = key.as_ref().try_into().unwrap();
|
|
||||||
let id = u64::from_be_bytes(array_id);
|
|
||||||
|
|
||||||
// this is an emulation of the try block (#31436)
|
// this is an emulation of the try block (#31436)
|
||||||
let result: Result<(), Error> = (|| {
|
let result: Result<(), Error> = (|| {
|
||||||
match bincode::deserialize(&update)? {
|
match bincode::deserialize(&update)? {
|
||||||
UpdateOwned::DocumentsAddition(documents) => {
|
UpdateOwned::DocumentsAddition(documents) => {
|
||||||
let ranked_map = index.cache.load().ranked_map.clone();
|
let ranked_map = index.cache.load().ranked_map.clone();
|
||||||
let mut addition = FinalDocumentsAddition::new(&index, ranked_map);
|
apply_documents_addition(&index, ranked_map, documents)?;
|
||||||
for document in documents {
|
|
||||||
addition.update_document(document)?;
|
|
||||||
}
|
|
||||||
addition.finalize()?;
|
|
||||||
},
|
},
|
||||||
UpdateOwned::DocumentsDeletion(documents) => {
|
UpdateOwned::DocumentsDeletion(documents) => {
|
||||||
let ranked_map = index.cache.load().ranked_map.clone();
|
let ranked_map = index.cache.load().ranked_map.clone();
|
||||||
let mut deletion = FinalDocumentsDeletion::new(&index, ranked_map);
|
apply_documents_deletion(&index, ranked_map, documents)?;
|
||||||
deletion.extend(documents);
|
|
||||||
deletion.finalize()?;
|
|
||||||
},
|
},
|
||||||
UpdateOwned::SynonymsAddition(synonyms) => {
|
UpdateOwned::SynonymsAddition(synonyms) => {
|
||||||
let addition = FinalSynonymsAddition::from_map(&index, synonyms);
|
apply_synonyms_addition(&index, synonyms)?;
|
||||||
addition.finalize()?;
|
|
||||||
},
|
},
|
||||||
UpdateOwned::SynonymsDeletion(synonyms) => {
|
UpdateOwned::SynonymsDeletion(synonyms) => {
|
||||||
let deletion = FinalSynonymsDeletion::from_map(&index, synonyms);
|
apply_synonyms_deletion(&index, synonyms)?;
|
||||||
deletion.finalize()?;
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -105,7 +94,7 @@ fn spawn_update_system(index: Index) -> thread::JoinHandle<()> {
|
|||||||
|
|
||||||
let result = result.map_err(|e| e.to_string());
|
let result = result.map_err(|e| e.to_string());
|
||||||
let value = bincode::serialize(&result).unwrap();
|
let value = bincode::serialize(&result).unwrap();
|
||||||
results.insert(&array_id, value)
|
results.insert(&key, value)
|
||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
}
|
}
|
||||||
@ -310,7 +299,12 @@ impl Index {
|
|||||||
self.raw_push_update(update)
|
self.raw_push_update(update)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn push_documents_deletion(&self, deletion: Vec<DocumentId>) -> Result<u64, Error> {
|
pub(crate) fn push_documents_deletion(
|
||||||
|
&self,
|
||||||
|
deletion: Vec<DocumentId>,
|
||||||
|
) -> Result<u64, Error>
|
||||||
|
{
|
||||||
|
let deletion = Update::<()>::DocumentsDeletion(deletion);
|
||||||
let update = bincode::serialize(&deletion)?;
|
let update = bincode::serialize(&deletion)?;
|
||||||
self.raw_push_update(update)
|
self.raw_push_update(update)
|
||||||
}
|
}
|
||||||
@ -320,6 +314,7 @@ impl Index {
|
|||||||
addition: BTreeMap<String, Vec<String>>,
|
addition: BTreeMap<String, Vec<String>>,
|
||||||
) -> Result<u64, Error>
|
) -> Result<u64, Error>
|
||||||
{
|
{
|
||||||
|
let addition = Update::<()>::SynonymsAddition(addition);
|
||||||
let update = bincode::serialize(&addition)?;
|
let update = bincode::serialize(&addition)?;
|
||||||
self.raw_push_update(update)
|
self.raw_push_update(update)
|
||||||
}
|
}
|
||||||
@ -329,6 +324,7 @@ impl Index {
|
|||||||
deletion: BTreeMap<String, Option<Vec<String>>>,
|
deletion: BTreeMap<String, Option<Vec<String>>>,
|
||||||
) -> Result<u64, Error>
|
) -> Result<u64, Error>
|
||||||
{
|
{
|
||||||
|
let deletion = Update::<()>::SynonymsDeletion(deletion);
|
||||||
let update = bincode::serialize(&deletion)?;
|
let update = bincode::serialize(&deletion)?;
|
||||||
self.raw_push_update(update)
|
self.raw_push_update(update)
|
||||||
}
|
}
|
||||||
|
@ -15,10 +15,15 @@ mod synonyms_deletion;
|
|||||||
pub use self::error::Error;
|
pub use self::error::Error;
|
||||||
pub use self::index::{Index, CustomSettingsIndex};
|
pub use self::index::{Index, CustomSettingsIndex};
|
||||||
|
|
||||||
use self::documents_addition::{DocumentsAddition, FinalDocumentsAddition};
|
pub use self::documents_addition::DocumentsAddition;
|
||||||
use self::documents_deletion::{DocumentsDeletion, FinalDocumentsDeletion};
|
pub use self::documents_deletion::DocumentsDeletion;
|
||||||
use self::synonyms_addition::{SynonymsAddition, FinalSynonymsAddition};
|
pub use self::synonyms_addition::SynonymsAddition;
|
||||||
use self::synonyms_deletion::{SynonymsDeletion, FinalSynonymsDeletion};
|
pub use self::synonyms_deletion::SynonymsDeletion;
|
||||||
|
|
||||||
|
use self::documents_addition::apply_documents_addition;
|
||||||
|
use self::documents_deletion::apply_documents_deletion;
|
||||||
|
use self::synonyms_addition::apply_synonyms_addition;
|
||||||
|
use self::synonyms_deletion::apply_synonyms_deletion;
|
||||||
|
|
||||||
fn load_indexes(tree: &sled::Tree) -> Result<HashSet<String>, Error> {
|
fn load_indexes(tree: &sled::Tree) -> Result<HashSet<String>, Error> {
|
||||||
match tree.get("indexes")? {
|
match tree.get("indexes")? {
|
||||||
|
@ -21,10 +21,10 @@ impl<'a> SynonymsAddition<'a> {
|
|||||||
pub fn add_synonym<S, T, I>(&mut self, synonym: S, alternatives: I)
|
pub fn add_synonym<S, T, I>(&mut self, synonym: S, alternatives: I)
|
||||||
where S: AsRef<str>,
|
where S: AsRef<str>,
|
||||||
T: AsRef<str>,
|
T: AsRef<str>,
|
||||||
I: Iterator<Item=T>,
|
I: IntoIterator<Item=T>,
|
||||||
{
|
{
|
||||||
let synonym = normalize_str(synonym.as_ref());
|
let synonym = normalize_str(synonym.as_ref());
|
||||||
let alternatives = alternatives.map(|s| s.as_ref().to_lowercase());
|
let alternatives = alternatives.into_iter().map(|s| s.as_ref().to_lowercase());
|
||||||
self.synonyms.entry(synonym).or_insert_with(Vec::new).extend(alternatives);
|
self.synonyms.entry(synonym).or_insert_with(Vec::new).extend(alternatives);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -33,87 +33,62 @@ impl<'a> SynonymsAddition<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FinalSynonymsAddition<'a> {
|
pub fn apply_synonyms_addition(
|
||||||
inner: &'a Index,
|
index: &Index,
|
||||||
synonyms: BTreeMap<String, Vec<String>>,
|
addition: BTreeMap<String, Vec<String>>,
|
||||||
}
|
) -> Result<(), Error>
|
||||||
|
{
|
||||||
|
let ref_index = index.as_ref();
|
||||||
|
let synonyms = ref_index.synonyms_index;
|
||||||
|
let main = ref_index.main_index;
|
||||||
|
|
||||||
impl<'a> FinalSynonymsAddition<'a> {
|
let mut synonyms_builder = SetBuilder::memory();
|
||||||
pub fn new(inner: &'a Index) -> FinalSynonymsAddition<'a> {
|
|
||||||
FinalSynonymsAddition { inner, synonyms: BTreeMap::new() }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_map(
|
for (synonym, alternatives) in addition {
|
||||||
inner: &'a Index,
|
synonyms_builder.insert(&synonym).unwrap();
|
||||||
synonyms: BTreeMap<String, Vec<String>>,
|
|
||||||
) -> FinalSynonymsAddition<'a>
|
|
||||||
{
|
|
||||||
FinalSynonymsAddition { inner, synonyms }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_synonym<S, T, I>(&mut self, synonym: S, alternatives: I)
|
let alternatives = {
|
||||||
where S: AsRef<str>,
|
let alternatives = SetBuf::from_dirty(alternatives);
|
||||||
T: AsRef<str>,
|
let mut alternatives_builder = SetBuilder::memory();
|
||||||
I: IntoIterator<Item=T>,
|
alternatives_builder.extend_iter(alternatives).unwrap();
|
||||||
{
|
alternatives_builder.into_inner().unwrap()
|
||||||
let synonym = normalize_str(synonym.as_ref());
|
|
||||||
let alternatives = alternatives.into_iter().map(|s| s.as_ref().to_lowercase());
|
|
||||||
self.synonyms.entry(synonym).or_insert_with(Vec::new).extend(alternatives);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn finalize(self) -> Result<(), Error> {
|
|
||||||
let ref_index = self.inner.as_ref();
|
|
||||||
let synonyms = ref_index.synonyms_index;
|
|
||||||
let main = ref_index.main_index;
|
|
||||||
|
|
||||||
let mut synonyms_builder = SetBuilder::memory();
|
|
||||||
|
|
||||||
for (synonym, alternatives) in self.synonyms {
|
|
||||||
synonyms_builder.insert(&synonym).unwrap();
|
|
||||||
|
|
||||||
let alternatives = {
|
|
||||||
let alternatives = SetBuf::from_dirty(alternatives);
|
|
||||||
let mut alternatives_builder = SetBuilder::memory();
|
|
||||||
alternatives_builder.extend_iter(alternatives).unwrap();
|
|
||||||
alternatives_builder.into_inner().unwrap()
|
|
||||||
};
|
|
||||||
synonyms.set_alternatives_to(synonym.as_bytes(), alternatives)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let delta_synonyms = synonyms_builder
|
|
||||||
.into_inner()
|
|
||||||
.and_then(fst::Set::from_bytes)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let synonyms = match main.synonyms_set()? {
|
|
||||||
Some(synonyms) => {
|
|
||||||
let op = OpBuilder::new()
|
|
||||||
.add(synonyms.stream())
|
|
||||||
.add(delta_synonyms.stream())
|
|
||||||
.r#union();
|
|
||||||
|
|
||||||
let mut synonyms_builder = SetBuilder::memory();
|
|
||||||
synonyms_builder.extend_stream(op).unwrap();
|
|
||||||
synonyms_builder
|
|
||||||
.into_inner()
|
|
||||||
.and_then(fst::Set::from_bytes)
|
|
||||||
.unwrap()
|
|
||||||
},
|
|
||||||
None => delta_synonyms,
|
|
||||||
};
|
};
|
||||||
|
synonyms.set_alternatives_to(synonym.as_bytes(), alternatives)?;
|
||||||
main.set_synonyms_set(&synonyms)?;
|
|
||||||
|
|
||||||
// update the "consistent" view of the Index
|
|
||||||
let cache = ref_index.cache;
|
|
||||||
let words = Arc::new(main.words_set()?.unwrap_or_default());
|
|
||||||
let ranked_map = cache.ranked_map.clone();
|
|
||||||
let synonyms = Arc::new(synonyms);
|
|
||||||
let schema = cache.schema.clone();
|
|
||||||
|
|
||||||
let cache = Cache { words, synonyms, schema, ranked_map };
|
|
||||||
self.inner.cache.store(Arc::new(cache));
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let delta_synonyms = synonyms_builder
|
||||||
|
.into_inner()
|
||||||
|
.and_then(fst::Set::from_bytes)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let synonyms = match main.synonyms_set()? {
|
||||||
|
Some(synonyms) => {
|
||||||
|
let op = OpBuilder::new()
|
||||||
|
.add(synonyms.stream())
|
||||||
|
.add(delta_synonyms.stream())
|
||||||
|
.r#union();
|
||||||
|
|
||||||
|
let mut synonyms_builder = SetBuilder::memory();
|
||||||
|
synonyms_builder.extend_stream(op).unwrap();
|
||||||
|
synonyms_builder
|
||||||
|
.into_inner()
|
||||||
|
.and_then(fst::Set::from_bytes)
|
||||||
|
.unwrap()
|
||||||
|
},
|
||||||
|
None => delta_synonyms,
|
||||||
|
};
|
||||||
|
|
||||||
|
main.set_synonyms_set(&synonyms)?;
|
||||||
|
|
||||||
|
// update the "consistent" view of the Index
|
||||||
|
let cache = ref_index.cache;
|
||||||
|
let words = Arc::new(main.words_set()?.unwrap_or_default());
|
||||||
|
let ranked_map = cache.ranked_map.clone();
|
||||||
|
let synonyms = Arc::new(synonyms);
|
||||||
|
let schema = cache.schema.clone();
|
||||||
|
|
||||||
|
let cache = Cache { words, synonyms, schema, ranked_map };
|
||||||
|
index.cache.store(Arc::new(cache));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -43,110 +43,95 @@ impl<'a> SynonymsDeletion<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FinalSynonymsDeletion<'a> {
|
pub fn apply_synonyms_deletion(
|
||||||
inner: &'a Index,
|
index: &Index,
|
||||||
synonyms: BTreeMap<String, Option<Vec<String>>>,
|
deletion: BTreeMap<String, Option<Vec<String>>>,
|
||||||
}
|
) -> Result<(), Error>
|
||||||
|
{
|
||||||
|
let ref_index = index.as_ref();
|
||||||
|
let synonyms = ref_index.synonyms_index;
|
||||||
|
let main = ref_index.main_index;
|
||||||
|
|
||||||
impl<'a> FinalSynonymsDeletion<'a> {
|
let mut delete_whole_synonym_builder = SetBuilder::memory();
|
||||||
pub fn new(inner: &'a Index) -> FinalSynonymsDeletion<'a> {
|
|
||||||
FinalSynonymsDeletion { inner, synonyms: BTreeMap::new() }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_map(
|
for (synonym, alternatives) in deletion {
|
||||||
inner: &'a Index,
|
match alternatives {
|
||||||
synonyms: BTreeMap<String, Option<Vec<String>>>,
|
Some(alternatives) => {
|
||||||
) -> FinalSynonymsDeletion<'a>
|
let prev_alternatives = synonyms.alternatives_to(synonym.as_bytes())?;
|
||||||
{
|
let prev_alternatives = match prev_alternatives {
|
||||||
FinalSynonymsDeletion { inner, synonyms }
|
Some(alternatives) => alternatives,
|
||||||
}
|
None => continue,
|
||||||
|
};
|
||||||
|
|
||||||
pub fn finalize(self) -> Result<(), Error> {
|
let delta_alternatives = {
|
||||||
let ref_index = self.inner.as_ref();
|
let alternatives = SetBuf::from_dirty(alternatives);
|
||||||
let synonyms = ref_index.synonyms_index;
|
let mut builder = SetBuilder::memory();
|
||||||
let main = ref_index.main_index;
|
builder.extend_iter(alternatives).unwrap();
|
||||||
|
builder.into_inner()
|
||||||
|
.and_then(fst::Set::from_bytes)
|
||||||
|
.unwrap()
|
||||||
|
};
|
||||||
|
|
||||||
let mut delete_whole_synonym_builder = SetBuilder::memory();
|
|
||||||
|
|
||||||
for (synonym, alternatives) in self.synonyms {
|
|
||||||
match alternatives {
|
|
||||||
Some(alternatives) => {
|
|
||||||
let prev_alternatives = synonyms.alternatives_to(synonym.as_bytes())?;
|
|
||||||
let prev_alternatives = match prev_alternatives {
|
|
||||||
Some(alternatives) => alternatives,
|
|
||||||
None => continue,
|
|
||||||
};
|
|
||||||
|
|
||||||
let delta_alternatives = {
|
|
||||||
let alternatives = SetBuf::from_dirty(alternatives);
|
|
||||||
let mut builder = SetBuilder::memory();
|
|
||||||
builder.extend_iter(alternatives).unwrap();
|
|
||||||
builder.into_inner()
|
|
||||||
.and_then(fst::Set::from_bytes)
|
|
||||||
.unwrap()
|
|
||||||
};
|
|
||||||
|
|
||||||
let op = OpBuilder::new()
|
|
||||||
.add(prev_alternatives.stream())
|
|
||||||
.add(delta_alternatives.stream())
|
|
||||||
.difference();
|
|
||||||
|
|
||||||
let (alternatives, empty_alternatives) = {
|
|
||||||
let mut builder = SetBuilder::memory();
|
|
||||||
let len = builder.get_ref().len();
|
|
||||||
builder.extend_stream(op).unwrap();
|
|
||||||
let is_empty = len == builder.get_ref().len();
|
|
||||||
let alternatives = builder.into_inner().unwrap();
|
|
||||||
(alternatives, is_empty)
|
|
||||||
};
|
|
||||||
|
|
||||||
if empty_alternatives {
|
|
||||||
delete_whole_synonym_builder.insert(synonym.as_bytes())?;
|
|
||||||
} else {
|
|
||||||
synonyms.set_alternatives_to(synonym.as_bytes(), alternatives)?;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
None => {
|
|
||||||
delete_whole_synonym_builder.insert(&synonym).unwrap();
|
|
||||||
synonyms.del_alternatives_of(synonym.as_bytes())?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let delta_synonyms = delete_whole_synonym_builder
|
|
||||||
.into_inner()
|
|
||||||
.and_then(fst::Set::from_bytes)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let synonyms = match main.synonyms_set()? {
|
|
||||||
Some(synonyms) => {
|
|
||||||
let op = OpBuilder::new()
|
let op = OpBuilder::new()
|
||||||
.add(synonyms.stream())
|
.add(prev_alternatives.stream())
|
||||||
.add(delta_synonyms.stream())
|
.add(delta_alternatives.stream())
|
||||||
.difference();
|
.difference();
|
||||||
|
|
||||||
let mut synonyms_builder = SetBuilder::memory();
|
let (alternatives, empty_alternatives) = {
|
||||||
synonyms_builder.extend_stream(op).unwrap();
|
let mut builder = SetBuilder::memory();
|
||||||
synonyms_builder
|
let len = builder.get_ref().len();
|
||||||
.into_inner()
|
builder.extend_stream(op).unwrap();
|
||||||
.and_then(fst::Set::from_bytes)
|
let is_empty = len == builder.get_ref().len();
|
||||||
.unwrap()
|
let alternatives = builder.into_inner().unwrap();
|
||||||
|
(alternatives, is_empty)
|
||||||
|
};
|
||||||
|
|
||||||
|
if empty_alternatives {
|
||||||
|
delete_whole_synonym_builder.insert(synonym.as_bytes())?;
|
||||||
|
} else {
|
||||||
|
synonyms.set_alternatives_to(synonym.as_bytes(), alternatives)?;
|
||||||
|
}
|
||||||
},
|
},
|
||||||
None => fst::Set::default(),
|
None => {
|
||||||
};
|
delete_whole_synonym_builder.insert(&synonym).unwrap();
|
||||||
|
synonyms.del_alternatives_of(synonym.as_bytes())?;
|
||||||
main.set_synonyms_set(&synonyms)?;
|
}
|
||||||
|
}
|
||||||
// update the "consistent" view of the Index
|
|
||||||
let cache = ref_index.cache;
|
|
||||||
let words = Arc::new(main.words_set()?.unwrap_or_default());
|
|
||||||
let ranked_map = cache.ranked_map.clone();
|
|
||||||
let synonyms = Arc::new(synonyms);
|
|
||||||
let schema = cache.schema.clone();
|
|
||||||
|
|
||||||
let cache = Cache { words, synonyms, schema, ranked_map };
|
|
||||||
self.inner.cache.store(Arc::new(cache));
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let delta_synonyms = delete_whole_synonym_builder
|
||||||
|
.into_inner()
|
||||||
|
.and_then(fst::Set::from_bytes)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let synonyms = match main.synonyms_set()? {
|
||||||
|
Some(synonyms) => {
|
||||||
|
let op = OpBuilder::new()
|
||||||
|
.add(synonyms.stream())
|
||||||
|
.add(delta_synonyms.stream())
|
||||||
|
.difference();
|
||||||
|
|
||||||
|
let mut synonyms_builder = SetBuilder::memory();
|
||||||
|
synonyms_builder.extend_stream(op).unwrap();
|
||||||
|
synonyms_builder
|
||||||
|
.into_inner()
|
||||||
|
.and_then(fst::Set::from_bytes)
|
||||||
|
.unwrap()
|
||||||
|
},
|
||||||
|
None => fst::Set::default(),
|
||||||
|
};
|
||||||
|
|
||||||
|
main.set_synonyms_set(&synonyms)?;
|
||||||
|
|
||||||
|
// update the "consistent" view of the Index
|
||||||
|
let cache = ref_index.cache;
|
||||||
|
let words = Arc::new(main.words_set()?.unwrap_or_default());
|
||||||
|
let ranked_map = cache.ranked_map.clone();
|
||||||
|
let synonyms = Arc::new(synonyms);
|
||||||
|
let schema = cache.schema.clone();
|
||||||
|
|
||||||
|
let cache = Cache { words, synonyms, schema, ranked_map };
|
||||||
|
index.cache.store(Arc::new(cache));
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user