2020-01-13 19:10:58 +01:00
|
|
|
use std::collections::HashMap;
|
2019-10-03 15:04:11 +02:00
|
|
|
|
2020-01-16 16:19:04 +01:00
|
|
|
use fst::{set::OpBuilder, SetBuilder};
|
|
|
|
use sdset::{duo::Union, SetOperation};
|
2019-11-05 15:23:41 +01:00
|
|
|
use serde::{Deserialize, Serialize};
|
2019-10-03 15:04:11 +02:00
|
|
|
|
2019-11-26 16:12:06 +01:00
|
|
|
use crate::database::{MainT, UpdateT};
|
2019-11-06 10:49:13 +01:00
|
|
|
use crate::database::{UpdateEvent, UpdateEventsEmitter};
|
2019-10-03 15:04:11 +02:00
|
|
|
use crate::raw_indexer::RawIndexer;
|
2020-01-13 19:10:58 +01:00
|
|
|
use crate::serde::{extract_document_id, serialize_value_with_id, Deserializer, Serializer};
|
2019-10-03 15:04:11 +02:00
|
|
|
use crate::store;
|
2020-01-16 16:19:04 +01:00
|
|
|
use crate::update::{apply_documents_deletion, compute_short_prefixes, next_update_id, Update};
|
2019-10-18 13:05:28 +02:00
|
|
|
use crate::{Error, MResult, RankedMap};
|
2019-10-03 15:04:11 +02:00
|
|
|
|
|
|
|
pub struct DocumentsAddition<D> {
|
|
|
|
updates_store: store::Updates,
|
2019-10-07 16:16:04 +02:00
|
|
|
updates_results_store: store::UpdatesResults,
|
2019-11-06 10:49:13 +01:00
|
|
|
updates_notifier: UpdateEventsEmitter,
|
2019-10-03 15:04:11 +02:00
|
|
|
documents: Vec<D>,
|
2019-11-05 15:23:41 +01:00
|
|
|
is_partial: bool,
|
2019-10-03 15:04:11 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<D> DocumentsAddition<D> {
|
2019-10-07 16:16:04 +02:00
|
|
|
pub fn new(
|
|
|
|
updates_store: store::Updates,
|
|
|
|
updates_results_store: store::UpdatesResults,
|
2019-11-06 10:49:13 +01:00
|
|
|
updates_notifier: UpdateEventsEmitter,
|
2019-10-18 13:05:28 +02:00
|
|
|
) -> DocumentsAddition<D> {
|
2019-10-07 16:16:04 +02:00
|
|
|
DocumentsAddition {
|
|
|
|
updates_store,
|
|
|
|
updates_results_store,
|
|
|
|
updates_notifier,
|
|
|
|
documents: Vec::new(),
|
2019-11-05 15:23:41 +01:00
|
|
|
is_partial: false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn new_partial(
|
|
|
|
updates_store: store::Updates,
|
|
|
|
updates_results_store: store::UpdatesResults,
|
2019-11-06 10:49:13 +01:00
|
|
|
updates_notifier: UpdateEventsEmitter,
|
2019-11-05 15:23:41 +01:00
|
|
|
) -> DocumentsAddition<D> {
|
|
|
|
DocumentsAddition {
|
|
|
|
updates_store,
|
|
|
|
updates_results_store,
|
|
|
|
updates_notifier,
|
|
|
|
documents: Vec::new(),
|
|
|
|
is_partial: true,
|
2019-10-07 16:16:04 +02:00
|
|
|
}
|
2019-10-03 15:04:11 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn update_document(&mut self, document: D) {
|
|
|
|
self.documents.push(document);
|
|
|
|
}
|
|
|
|
|
2019-11-26 16:12:06 +01:00
|
|
|
pub fn finalize(self, writer: &mut heed::RwTxn<UpdateT>) -> MResult<u64>
|
2019-10-18 13:05:28 +02:00
|
|
|
where
|
|
|
|
D: serde::Serialize,
|
2019-10-03 15:04:11 +02:00
|
|
|
{
|
2019-11-06 10:49:13 +01:00
|
|
|
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
|
2019-10-07 16:16:04 +02:00
|
|
|
let update_id = push_documents_addition(
|
2019-10-11 11:29:47 +02:00
|
|
|
writer,
|
2019-10-07 16:16:04 +02:00
|
|
|
self.updates_store,
|
|
|
|
self.updates_results_store,
|
|
|
|
self.documents,
|
2019-11-05 15:23:41 +01:00
|
|
|
self.is_partial,
|
2019-10-07 16:16:04 +02:00
|
|
|
)?;
|
|
|
|
Ok(update_id)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<D> Extend<D> for DocumentsAddition<D> {
|
2019-10-18 13:05:28 +02:00
|
|
|
fn extend<T: IntoIterator<Item = D>>(&mut self, iter: T) {
|
2019-10-07 16:16:04 +02:00
|
|
|
self.documents.extend(iter)
|
2019-10-03 15:04:11 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-08 17:24:11 +02:00
|
|
|
pub fn push_documents_addition<D: serde::Serialize>(
|
2019-11-26 16:12:06 +01:00
|
|
|
writer: &mut heed::RwTxn<UpdateT>,
|
2019-10-08 17:24:11 +02:00
|
|
|
updates_store: store::Updates,
|
|
|
|
updates_results_store: store::UpdatesResults,
|
|
|
|
addition: Vec<D>,
|
2019-11-05 15:23:41 +01:00
|
|
|
is_partial: bool,
|
2019-10-18 13:05:28 +02:00
|
|
|
) -> MResult<u64> {
|
2019-10-08 17:24:11 +02:00
|
|
|
let mut values = Vec::with_capacity(addition.len());
|
|
|
|
for add in addition {
|
2019-10-11 16:16:21 +02:00
|
|
|
let vec = serde_json::to_vec(&add)?;
|
|
|
|
let add = serde_json::from_slice(&vec)?;
|
2019-10-08 17:24:11 +02:00
|
|
|
values.push(add);
|
|
|
|
}
|
|
|
|
|
|
|
|
let last_update_id = next_update_id(writer, updates_store, updates_results_store)?;
|
|
|
|
|
2019-11-05 15:23:41 +01:00
|
|
|
let update = if is_partial {
|
2019-11-12 18:00:47 +01:00
|
|
|
Update::documents_partial(values)
|
2019-11-05 15:23:41 +01:00
|
|
|
} else {
|
2019-11-12 18:00:47 +01:00
|
|
|
Update::documents_addition(values)
|
2019-11-05 15:23:41 +01:00
|
|
|
};
|
|
|
|
|
2019-10-08 17:31:07 +02:00
|
|
|
updates_store.put_update(writer, last_update_id, &update)?;
|
2019-10-08 17:24:11 +02:00
|
|
|
|
|
|
|
Ok(last_update_id)
|
|
|
|
}
|
|
|
|
|
2019-11-04 10:49:27 +01:00
|
|
|
pub fn apply_documents_addition<'a, 'b>(
|
2019-11-26 16:12:06 +01:00
|
|
|
writer: &'a mut heed::RwTxn<'b, MainT>,
|
2020-01-16 16:29:50 +01:00
|
|
|
index: &store::Index,
|
2019-11-05 15:23:41 +01:00
|
|
|
addition: Vec<HashMap<String, serde_json::Value>>,
|
2019-10-18 13:05:28 +02:00
|
|
|
) -> MResult<()> {
|
2019-10-28 20:40:33 +01:00
|
|
|
let mut documents_additions = HashMap::new();
|
2019-10-03 15:04:11 +02:00
|
|
|
|
2020-01-16 16:29:50 +01:00
|
|
|
let schema = match index.main.schema(writer)? {
|
2019-10-07 17:48:26 +02:00
|
|
|
Some(schema) => schema,
|
|
|
|
None => return Err(Error::SchemaMissing),
|
|
|
|
};
|
|
|
|
|
2020-01-13 19:10:58 +01:00
|
|
|
let identifier = schema.identifier();
|
2019-10-03 15:04:11 +02:00
|
|
|
|
2019-10-21 17:33:52 +02:00
|
|
|
// 1. store documents ids for future deletion
|
2019-10-28 20:40:33 +01:00
|
|
|
for document in addition {
|
2020-01-13 19:10:58 +01:00
|
|
|
let document_id = match extract_document_id(&identifier, &document)? {
|
2019-10-03 15:04:11 +02:00
|
|
|
Some(id) => id,
|
|
|
|
None => return Err(Error::MissingDocumentId),
|
|
|
|
};
|
|
|
|
|
2019-10-28 20:40:33 +01:00
|
|
|
documents_additions.insert(document_id, document);
|
2019-10-21 17:33:52 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// 2. remove the documents posting lists
|
2019-10-28 20:40:33 +01:00
|
|
|
let number_of_inserted_documents = documents_additions.len();
|
|
|
|
let documents_ids = documents_additions.iter().map(|(id, _)| *id).collect();
|
2020-01-16 16:29:50 +01:00
|
|
|
apply_documents_deletion(writer, index, documents_ids)?;
|
2019-10-21 17:33:52 +02:00
|
|
|
|
2020-01-16 16:29:50 +01:00
|
|
|
let mut ranked_map = match index.main.ranked_map(writer)? {
|
2019-10-21 17:33:52 +02:00
|
|
|
Some(ranked_map) => ranked_map,
|
|
|
|
None => RankedMap::default(),
|
|
|
|
};
|
|
|
|
|
2020-01-16 16:29:50 +01:00
|
|
|
let stop_words = match index.main.stop_words_fst(writer)? {
|
2019-10-29 15:53:45 +01:00
|
|
|
Some(stop_words) => stop_words,
|
|
|
|
None => fst::Set::default(),
|
|
|
|
};
|
2019-11-05 15:23:41 +01:00
|
|
|
|
|
|
|
// 3. index the documents fields in the stores
|
|
|
|
let mut indexer = RawIndexer::new(stop_words);
|
|
|
|
|
|
|
|
for (document_id, document) in documents_additions {
|
|
|
|
let serializer = Serializer {
|
|
|
|
txn: writer,
|
|
|
|
schema: &schema,
|
2020-01-16 16:29:50 +01:00
|
|
|
document_store: index.documents_fields,
|
|
|
|
document_fields_counts: index.documents_fields_counts,
|
2019-11-05 15:23:41 +01:00
|
|
|
indexer: &mut indexer,
|
|
|
|
ranked_map: &mut ranked_map,
|
|
|
|
document_id,
|
|
|
|
};
|
|
|
|
|
|
|
|
document.serialize(serializer)?;
|
|
|
|
}
|
|
|
|
|
2020-01-15 17:07:25 +01:00
|
|
|
|
|
|
|
|
2019-11-05 15:23:41 +01:00
|
|
|
write_documents_addition_index(
|
|
|
|
writer,
|
2020-01-16 16:29:50 +01:00
|
|
|
index,
|
2019-11-05 15:23:41 +01:00
|
|
|
&ranked_map,
|
|
|
|
number_of_inserted_documents,
|
|
|
|
indexer,
|
2019-12-30 12:27:24 +01:00
|
|
|
)?;
|
|
|
|
|
2020-01-16 16:29:50 +01:00
|
|
|
compute_short_prefixes(writer, index)?;
|
2019-12-30 14:37:31 +01:00
|
|
|
|
2019-12-30 12:27:24 +01:00
|
|
|
Ok(())
|
2019-11-05 15:23:41 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn apply_documents_partial_addition<'a, 'b>(
|
2019-11-26 16:12:06 +01:00
|
|
|
writer: &'a mut heed::RwTxn<'b, MainT>,
|
2020-01-16 16:29:50 +01:00
|
|
|
index: &store::Index,
|
2019-11-05 15:23:41 +01:00
|
|
|
addition: Vec<HashMap<String, serde_json::Value>>,
|
|
|
|
) -> MResult<()> {
|
|
|
|
let mut documents_additions = HashMap::new();
|
|
|
|
|
2020-01-02 18:08:52 +01:00
|
|
|
|
2019-11-05 15:23:41 +01:00
|
|
|
Some(schema) => schema,
|
|
|
|
None => return Err(Error::SchemaMissing),
|
|
|
|
};
|
|
|
|
|
2020-01-13 19:10:58 +01:00
|
|
|
let identifier = schema.identifier();
|
2019-11-05 15:23:41 +01:00
|
|
|
|
|
|
|
// 1. store documents ids for future deletion
|
|
|
|
for mut document in addition {
|
2020-01-13 19:10:58 +01:00
|
|
|
let document_id = match extract_document_id(&identifier, &document)? {
|
2019-11-05 15:23:41 +01:00
|
|
|
Some(id) => id,
|
|
|
|
None => return Err(Error::MissingDocumentId),
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut deserializer = Deserializer {
|
|
|
|
document_id,
|
|
|
|
reader: writer,
|
2020-01-16 16:29:50 +01:00
|
|
|
documents_fields: index.documents_fields,
|
2019-11-05 15:23:41 +01:00
|
|
|
schema: &schema,
|
|
|
|
attributes: None,
|
|
|
|
};
|
|
|
|
|
|
|
|
// retrieve the old document and
|
|
|
|
// update the new one with missing keys found in the old one
|
|
|
|
let result = Option::<HashMap<String, serde_json::Value>>::deserialize(&mut deserializer)?;
|
|
|
|
if let Some(old_document) = result {
|
|
|
|
for (key, value) in old_document {
|
|
|
|
document.entry(key).or_insert(value);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
documents_additions.insert(document_id, document);
|
|
|
|
}
|
|
|
|
|
|
|
|
// 2. remove the documents posting lists
|
|
|
|
let number_of_inserted_documents = documents_additions.len();
|
|
|
|
let documents_ids = documents_additions.iter().map(|(id, _)| *id).collect();
|
2020-01-16 16:29:50 +01:00
|
|
|
apply_documents_deletion(writer, index, documents_ids)?;
|
2019-11-05 15:23:41 +01:00
|
|
|
|
2020-01-16 16:29:50 +01:00
|
|
|
let mut ranked_map = match index.main.ranked_map(writer)? {
|
2019-11-05 15:23:41 +01:00
|
|
|
Some(ranked_map) => ranked_map,
|
|
|
|
None => RankedMap::default(),
|
|
|
|
};
|
|
|
|
|
2020-01-16 16:29:50 +01:00
|
|
|
let stop_words = match index.main.stop_words_fst(writer)? {
|
2019-11-05 15:23:41 +01:00
|
|
|
Some(stop_words) => stop_words,
|
|
|
|
None => fst::Set::default(),
|
|
|
|
};
|
2019-10-29 15:53:45 +01:00
|
|
|
|
2019-10-21 17:33:52 +02:00
|
|
|
// 3. index the documents fields in the stores
|
2019-10-29 15:53:45 +01:00
|
|
|
let mut indexer = RawIndexer::new(stop_words);
|
|
|
|
|
2019-10-28 20:40:33 +01:00
|
|
|
for (document_id, document) in documents_additions {
|
2019-10-03 15:04:11 +02:00
|
|
|
let serializer = Serializer {
|
2019-10-21 17:33:52 +02:00
|
|
|
txn: writer,
|
2019-10-07 17:48:26 +02:00
|
|
|
schema: &schema,
|
2020-01-16 16:29:50 +01:00
|
|
|
document_store: index.documents_fields,
|
|
|
|
document_fields_counts: index.documents_fields_counts,
|
2019-10-03 15:04:11 +02:00
|
|
|
indexer: &mut indexer,
|
|
|
|
ranked_map: &mut ranked_map,
|
|
|
|
document_id,
|
|
|
|
};
|
|
|
|
|
|
|
|
document.serialize(serializer)?;
|
|
|
|
}
|
|
|
|
|
2020-01-15 17:07:25 +01:00
|
|
|
|
|
|
|
|
2019-10-21 17:33:52 +02:00
|
|
|
write_documents_addition_index(
|
2019-10-03 15:04:11 +02:00
|
|
|
writer,
|
2020-01-16 16:29:50 +01:00
|
|
|
index,
|
2019-10-29 17:46:23 +01:00
|
|
|
&ranked_map,
|
2019-10-21 17:33:52 +02:00
|
|
|
number_of_inserted_documents,
|
|
|
|
indexer,
|
2020-01-16 16:19:04 +01:00
|
|
|
)?;
|
|
|
|
|
2020-01-16 16:29:50 +01:00
|
|
|
compute_short_prefixes(writer, index)?;
|
2020-01-16 16:19:04 +01:00
|
|
|
|
|
|
|
Ok(())
|
2019-10-21 17:33:52 +02:00
|
|
|
}
|
2019-10-03 15:04:11 +02:00
|
|
|
|
2020-01-16 16:29:50 +01:00
|
|
|
pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Index) -> MResult<()> {
|
|
|
|
let schema = match index.main.schema(writer)? {
|
2019-10-21 17:33:52 +02:00
|
|
|
Some(schema) => schema,
|
|
|
|
None => return Err(Error::SchemaMissing),
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut ranked_map = RankedMap::default();
|
|
|
|
|
|
|
|
// 1. retrieve all documents ids
|
|
|
|
let mut documents_ids_to_reindex = Vec::new();
|
2020-01-16 16:29:50 +01:00
|
|
|
for result in index.documents_fields_counts.documents_ids(writer)? {
|
2019-10-21 17:33:52 +02:00
|
|
|
let document_id = result?;
|
|
|
|
documents_ids_to_reindex.push(document_id);
|
2019-10-03 15:04:11 +02:00
|
|
|
}
|
|
|
|
|
2019-10-21 17:33:52 +02:00
|
|
|
// 2. remove the documents posting lists
|
2020-01-16 16:29:50 +01:00
|
|
|
index.main.put_words_fst(writer, &fst::Set::default())?;
|
|
|
|
index.main.put_ranked_map(writer, &ranked_map)?;
|
|
|
|
index.main.put_number_of_documents(writer, |_| 0)?;
|
|
|
|
index.postings_lists.clear(writer)?;
|
|
|
|
index.docs_words.clear(writer)?;
|
2019-10-21 17:33:52 +02:00
|
|
|
|
2019-10-29 17:46:23 +01:00
|
|
|
// 3. re-index chunks of documents (otherwise we make the borrow checker unhappy)
|
|
|
|
for documents_ids in documents_ids_to_reindex.chunks(100) {
|
2020-01-16 16:29:50 +01:00
|
|
|
let stop_words = match index.main.stop_words_fst(writer)? {
|
2019-10-29 17:46:23 +01:00
|
|
|
Some(stop_words) => stop_words,
|
|
|
|
None => fst::Set::default(),
|
|
|
|
};
|
2019-10-21 17:33:52 +02:00
|
|
|
|
2019-10-29 17:46:23 +01:00
|
|
|
let number_of_inserted_documents = documents_ids.len();
|
|
|
|
let mut indexer = RawIndexer::new(stop_words);
|
|
|
|
let mut ram_store = HashMap::new();
|
|
|
|
|
|
|
|
for document_id in documents_ids {
|
2020-01-16 16:29:50 +01:00
|
|
|
for result in index.documents_fields.document_fields(writer, *document_id)? {
|
2019-10-29 17:46:23 +01:00
|
|
|
let (attr, bytes) = result?;
|
|
|
|
let value: serde_json::Value = serde_json::from_slice(bytes)?;
|
2020-01-13 19:10:58 +01:00
|
|
|
ram_store.insert((document_id, field_id), value);
|
2019-10-29 17:46:23 +01:00
|
|
|
}
|
|
|
|
|
2020-01-13 19:10:58 +01:00
|
|
|
for ((docid, field_id), value) in ram_store.drain() {
|
|
|
|
serialize_value_with_id(
|
2019-10-29 17:46:23 +01:00
|
|
|
writer,
|
2020-01-13 19:10:58 +01:00
|
|
|
field_id,
|
|
|
|
&schema,
|
2019-10-29 17:46:23 +01:00
|
|
|
*docid,
|
2020-01-16 16:29:50 +01:00
|
|
|
index.documents_fields,
|
|
|
|
index.documents_fields_counts,
|
2019-10-29 17:46:23 +01:00
|
|
|
&mut indexer,
|
|
|
|
&mut ranked_map,
|
2020-01-13 19:10:58 +01:00
|
|
|
&value
|
2019-10-29 17:46:23 +01:00
|
|
|
)?;
|
|
|
|
}
|
2019-10-21 17:33:52 +02:00
|
|
|
}
|
|
|
|
|
2019-10-29 17:46:23 +01:00
|
|
|
// 4. write the new index in the main store
|
|
|
|
write_documents_addition_index(
|
|
|
|
writer,
|
2020-01-16 16:29:50 +01:00
|
|
|
index,
|
2019-10-29 17:46:23 +01:00
|
|
|
&ranked_map,
|
|
|
|
number_of_inserted_documents,
|
|
|
|
indexer,
|
|
|
|
)?;
|
2019-10-14 14:07:10 +02:00
|
|
|
}
|
|
|
|
|
2020-01-16 16:29:50 +01:00
|
|
|
compute_short_prefixes(writer, index)?;
|
2020-01-16 16:19:04 +01:00
|
|
|
|
2019-10-29 17:46:23 +01:00
|
|
|
Ok(())
|
2019-10-21 17:33:52 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn write_documents_addition_index(
|
2019-11-26 16:12:06 +01:00
|
|
|
writer: &mut heed::RwTxn<MainT>,
|
2020-01-16 16:29:50 +01:00
|
|
|
index: &store::Index,
|
2019-10-29 17:46:23 +01:00
|
|
|
ranked_map: &RankedMap,
|
2019-10-21 17:33:52 +02:00
|
|
|
number_of_inserted_documents: usize,
|
|
|
|
indexer: RawIndexer,
|
|
|
|
) -> MResult<()> {
|
2019-10-03 15:04:11 +02:00
|
|
|
let indexed = indexer.build();
|
|
|
|
let mut delta_words_builder = SetBuilder::memory();
|
|
|
|
|
|
|
|
for (word, delta_set) in indexed.words_doc_indexes {
|
|
|
|
delta_words_builder.insert(&word).unwrap();
|
|
|
|
|
2020-01-16 16:29:50 +01:00
|
|
|
let set = match index.postings_lists.postings_list(writer, &word)? {
|
2020-01-08 15:30:43 +01:00
|
|
|
Some(postings) => Union::new(&postings.matches, &delta_set).into_set_buf(),
|
2019-10-03 15:04:11 +02:00
|
|
|
None => delta_set,
|
|
|
|
};
|
|
|
|
|
2020-01-16 16:29:50 +01:00
|
|
|
index.postings_lists.put_postings_list(writer, &word, &set)?;
|
2019-10-03 15:04:11 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
for (id, words) in indexed.docs_words {
|
2020-01-16 16:29:50 +01:00
|
|
|
index.docs_words.put_doc_words(writer, id, &words)?;
|
2019-10-03 15:04:11 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
let delta_words = delta_words_builder
|
|
|
|
.into_inner()
|
|
|
|
.and_then(fst::Set::from_bytes)
|
|
|
|
.unwrap();
|
|
|
|
|
2020-01-16 16:29:50 +01:00
|
|
|
let words = match index.main.words_fst(writer)? {
|
2019-10-03 15:04:11 +02:00
|
|
|
Some(words) => {
|
|
|
|
let op = OpBuilder::new()
|
|
|
|
.add(words.stream())
|
|
|
|
.add(delta_words.stream())
|
|
|
|
.r#union();
|
|
|
|
|
|
|
|
let mut words_builder = SetBuilder::memory();
|
|
|
|
words_builder.extend_stream(op).unwrap();
|
|
|
|
words_builder
|
|
|
|
.into_inner()
|
|
|
|
.and_then(fst::Set::from_bytes)
|
|
|
|
.unwrap()
|
2019-10-18 13:05:28 +02:00
|
|
|
}
|
2019-10-03 15:04:11 +02:00
|
|
|
None => delta_words,
|
|
|
|
};
|
|
|
|
|
2020-01-16 16:29:50 +01:00
|
|
|
index.main.put_words_fst(writer, &words)?;
|
|
|
|
index.main.put_ranked_map(writer, ranked_map)?;
|
|
|
|
index.main.put_number_of_documents(writer, |old| old + number_of_inserted_documents as u64)?;
|
2019-10-03 15:04:11 +02:00
|
|
|
|
2020-01-16 16:29:50 +01:00
|
|
|
compute_short_prefixes(writer, index)?;
|
2020-01-16 16:19:04 +01:00
|
|
|
|
2019-10-03 15:04:11 +02:00
|
|
|
Ok(())
|
|
|
|
}
|