Rename the PrefixCache into PrefixDocumentsCache

This commit is contained in:
Clément Renault 2019-12-30 11:44:42 +01:00
parent 1a5a104f13
commit 58836d89aa
No known key found for this signature in database
GPG Key ID: 0151CDAB43460DAE
8 changed files with 44 additions and 44 deletions

View File

@ -39,7 +39,7 @@ pub fn bucket_sort<'c, FI>(
postings_lists_store: store::PostingsLists, postings_lists_store: store::PostingsLists,
documents_fields_counts_store: store::DocumentsFieldsCounts, documents_fields_counts_store: store::DocumentsFieldsCounts,
synonyms_store: store::Synonyms, synonyms_store: store::Synonyms,
prefix_cache_store: store::PrefixCache, prefix_documents_cache_store: store::PrefixDocumentsCache,
) -> MResult<Vec<Document>> ) -> MResult<Vec<Document>>
where where
FI: Fn(DocumentId) -> bool, FI: Fn(DocumentId) -> bool,
@ -62,7 +62,7 @@ where
postings_lists_store, postings_lists_store,
documents_fields_counts_store, documents_fields_counts_store,
synonyms_store, synonyms_store,
prefix_cache_store, prefix_documents_cache_store,
); );
} }
@ -78,7 +78,7 @@ where
prefix[..len].copy_from_slice(&automaton.query.as_bytes()[..len]); prefix[..len].copy_from_slice(&automaton.query.as_bytes()[..len]);
let mut documents = Vec::new(); let mut documents = Vec::new();
let iter = prefix_cache_store.prefix_documents(reader, prefix)?; let iter = prefix_documents_cache_store.prefix_documents(reader, prefix)?;
for result in iter.skip(range.start).take(range.len()) { for result in iter.skip(range.start).take(range.len()) {
let (docid, highlights) = result?; let (docid, highlights) = result?;
documents.push(Document::from_highlights(docid, &highlights)); documents.push(Document::from_highlights(docid, &highlights));
@ -201,7 +201,7 @@ pub fn bucket_sort_with_distinct<'c, FI, FD>(
postings_lists_store: store::PostingsLists, postings_lists_store: store::PostingsLists,
documents_fields_counts_store: store::DocumentsFieldsCounts, documents_fields_counts_store: store::DocumentsFieldsCounts,
synonyms_store: store::Synonyms, synonyms_store: store::Synonyms,
prefix_cache_store: store::PrefixCache, prefix_documents_cache_store: store::PrefixDocumentsCache,
) -> MResult<Vec<Document>> ) -> MResult<Vec<Document>>
where where
FI: Fn(DocumentId) -> bool, FI: Fn(DocumentId) -> bool,

View File

@ -16,7 +16,7 @@ pub struct QueryBuilder<'c, 'f, 'd> {
postings_lists_store: store::PostingsLists, postings_lists_store: store::PostingsLists,
documents_fields_counts_store: store::DocumentsFieldsCounts, documents_fields_counts_store: store::DocumentsFieldsCounts,
synonyms_store: store::Synonyms, synonyms_store: store::Synonyms,
prefix_cache_store: store::PrefixCache, prefix_cache_store: store::PrefixDocumentsCache,
} }
impl<'c, 'f, 'd> QueryBuilder<'c, 'f, 'd> { impl<'c, 'f, 'd> QueryBuilder<'c, 'f, 'd> {
@ -25,7 +25,7 @@ impl<'c, 'f, 'd> QueryBuilder<'c, 'f, 'd> {
postings_lists: store::PostingsLists, postings_lists: store::PostingsLists,
documents_fields_counts: store::DocumentsFieldsCounts, documents_fields_counts: store::DocumentsFieldsCounts,
synonyms: store::Synonyms, synonyms: store::Synonyms,
prefix_cache: store::PrefixCache, prefix_cache: store::PrefixDocumentsCache,
) -> QueryBuilder<'c, 'f, 'd> { ) -> QueryBuilder<'c, 'f, 'd> {
QueryBuilder::with_criteria( QueryBuilder::with_criteria(
main, main,
@ -42,7 +42,7 @@ impl<'c, 'f, 'd> QueryBuilder<'c, 'f, 'd> {
postings_lists: store::PostingsLists, postings_lists: store::PostingsLists,
documents_fields_counts: store::DocumentsFieldsCounts, documents_fields_counts: store::DocumentsFieldsCounts,
synonyms: store::Synonyms, synonyms: store::Synonyms,
prefix_cache: store::PrefixCache, prefix_cache: store::PrefixDocumentsCache,
criteria: Criteria<'c>, criteria: Criteria<'c>,
) -> QueryBuilder<'c, 'f, 'd> { ) -> QueryBuilder<'c, 'f, 'd> {
QueryBuilder { QueryBuilder {

View File

@ -1,5 +1,5 @@
mod docs_words; mod docs_words;
mod prefix_cache; mod prefix_documents_cache;
mod documents_fields; mod documents_fields;
mod documents_fields_counts; mod documents_fields_counts;
mod main; mod main;
@ -9,7 +9,7 @@ mod updates;
mod updates_results; mod updates_results;
pub use self::docs_words::DocsWords; pub use self::docs_words::DocsWords;
pub use self::prefix_cache::PrefixCache; pub use self::prefix_documents_cache::PrefixDocumentsCache;
pub use self::documents_fields::{DocumentFieldsIter, DocumentsFields}; pub use self::documents_fields::{DocumentFieldsIter, DocumentsFields};
pub use self::documents_fields_counts::{ pub use self::documents_fields_counts::{
DocumentFieldsCountsIter, DocumentsFieldsCounts, DocumentsIdsIter, DocumentFieldsCountsIter, DocumentsFieldsCounts, DocumentsIdsIter,
@ -76,7 +76,7 @@ fn docs_words_name(name: &str) -> String {
format!("store-{}-docs-words", name) format!("store-{}-docs-words", name)
} }
fn prefix_cache_name(name: &str) -> String { fn prefix_documents_cache_name(name: &str) -> String {
format!("store-{}-prefix-cache", name) format!("store-{}-prefix-cache", name)
} }
@ -96,7 +96,7 @@ pub struct Index {
pub documents_fields_counts: DocumentsFieldsCounts, pub documents_fields_counts: DocumentsFieldsCounts,
pub synonyms: Synonyms, pub synonyms: Synonyms,
pub docs_words: DocsWords, pub docs_words: DocsWords,
pub prefix_cache: PrefixCache, pub prefix_documents_cache: PrefixDocumentsCache,
pub updates: Updates, pub updates: Updates,
pub updates_results: UpdatesResults, pub updates_results: UpdatesResults,
@ -259,7 +259,7 @@ impl Index {
self.postings_lists, self.postings_lists,
self.documents_fields_counts, self.documents_fields_counts,
self.synonyms, self.synonyms,
self.prefix_cache, self.prefix_documents_cache,
) )
} }
@ -272,7 +272,7 @@ impl Index {
self.postings_lists, self.postings_lists,
self.documents_fields_counts, self.documents_fields_counts,
self.synonyms, self.synonyms,
self.prefix_cache, self.prefix_documents_cache,
criteria, criteria,
) )
} }
@ -291,7 +291,7 @@ pub fn create(
let documents_fields_counts_name = documents_fields_counts_name(name); let documents_fields_counts_name = documents_fields_counts_name(name);
let synonyms_name = synonyms_name(name); let synonyms_name = synonyms_name(name);
let docs_words_name = docs_words_name(name); let docs_words_name = docs_words_name(name);
let prefix_cache_name = prefix_cache_name(name); let prefix_documents_cache_name = prefix_documents_cache_name(name);
let updates_name = updates_name(name); let updates_name = updates_name(name);
let updates_results_name = updates_results_name(name); let updates_results_name = updates_results_name(name);
@ -302,7 +302,7 @@ pub fn create(
let documents_fields_counts = env.create_database(Some(&documents_fields_counts_name))?; let documents_fields_counts = env.create_database(Some(&documents_fields_counts_name))?;
let synonyms = env.create_database(Some(&synonyms_name))?; let synonyms = env.create_database(Some(&synonyms_name))?;
let docs_words = env.create_database(Some(&docs_words_name))?; let docs_words = env.create_database(Some(&docs_words_name))?;
let prefix_cache = env.create_database(Some(&prefix_cache_name))?; let prefix_documents_cache = env.create_database(Some(&prefix_documents_cache_name))?;
let updates = update_env.create_database(Some(&updates_name))?; let updates = update_env.create_database(Some(&updates_name))?;
let updates_results = update_env.create_database(Some(&updates_results_name))?; let updates_results = update_env.create_database(Some(&updates_results_name))?;
@ -315,7 +315,7 @@ pub fn create(
}, },
synonyms: Synonyms { synonyms }, synonyms: Synonyms { synonyms },
docs_words: DocsWords { docs_words }, docs_words: DocsWords { docs_words },
prefix_cache: PrefixCache { prefix_cache }, prefix_documents_cache: PrefixDocumentsCache { prefix_documents_cache },
updates: Updates { updates }, updates: Updates { updates },
updates_results: UpdatesResults { updates_results }, updates_results: UpdatesResults { updates_results },
updates_notifier, updates_notifier,
@ -335,7 +335,7 @@ pub fn open(
let documents_fields_counts_name = documents_fields_counts_name(name); let documents_fields_counts_name = documents_fields_counts_name(name);
let synonyms_name = synonyms_name(name); let synonyms_name = synonyms_name(name);
let docs_words_name = docs_words_name(name); let docs_words_name = docs_words_name(name);
let prefix_cache_name = prefix_cache_name(name); let prefix_documents_cache_name = prefix_documents_cache_name(name);
let updates_name = updates_name(name); let updates_name = updates_name(name);
let updates_results_name = updates_results_name(name); let updates_results_name = updates_results_name(name);
@ -364,8 +364,8 @@ pub fn open(
Some(docs_words) => docs_words, Some(docs_words) => docs_words,
None => return Ok(None), None => return Ok(None),
}; };
let prefix_cache = match env.open_database(Some(&prefix_cache_name))? { let prefix_documents_cache = match env.open_database(Some(&prefix_documents_cache_name))? {
Some(prefix_cache) => prefix_cache, Some(prefix_documents_cache) => prefix_documents_cache,
None => return Ok(None), None => return Ok(None),
}; };
let updates = match update_env.open_database(Some(&updates_name))? { let updates = match update_env.open_database(Some(&updates_name))? {
@ -386,7 +386,7 @@ pub fn open(
}, },
synonyms: Synonyms { synonyms }, synonyms: Synonyms { synonyms },
docs_words: DocsWords { docs_words }, docs_words: DocsWords { docs_words },
prefix_cache: PrefixCache { prefix_cache }, prefix_documents_cache: PrefixDocumentsCache { prefix_documents_cache },
updates: Updates { updates }, updates: Updates { updates },
updates_results: UpdatesResults { updates_results }, updates_results: UpdatesResults { updates_results },
updates_notifier, updates_notifier,
@ -405,7 +405,7 @@ pub fn clear(
index.documents_fields_counts.clear(writer)?; index.documents_fields_counts.clear(writer)?;
index.synonyms.clear(writer)?; index.synonyms.clear(writer)?;
index.docs_words.clear(writer)?; index.docs_words.clear(writer)?;
index.prefix_cache.clear(writer)?; index.prefix_documents_cache.clear(writer)?;
index.updates.clear(update_writer)?; index.updates.clear(update_writer)?;
index.updates_results.clear(update_writer)?; index.updates_results.clear(update_writer)?;
Ok(()) Ok(())

View File

@ -27,11 +27,11 @@ impl PrefixKey {
} }
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
pub struct PrefixCache { pub struct PrefixDocumentsCache {
pub(crate) prefix_cache: heed::Database<OwnedType<PrefixKey>, CowSlice<Highlight>>, pub(crate) prefix_documents_cache: heed::Database<OwnedType<PrefixKey>, CowSlice<Highlight>>,
} }
impl PrefixCache { impl PrefixDocumentsCache {
pub fn put_prefix_document( pub fn put_prefix_document(
self, self,
writer: &mut heed::RwTxn<MainT>, writer: &mut heed::RwTxn<MainT>,
@ -41,11 +41,11 @@ impl PrefixCache {
highlights: &[Highlight], highlights: &[Highlight],
) -> ZResult<()> { ) -> ZResult<()> {
let key = PrefixKey::new(prefix, index as u64, docid.0); let key = PrefixKey::new(prefix, index as u64, docid.0);
self.prefix_cache.put(writer, &key, highlights) self.prefix_documents_cache.put(writer, &key, highlights)
} }
pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> { pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> {
self.prefix_cache.clear(writer) self.prefix_documents_cache.clear(writer)
} }
pub fn prefix_documents<'txn>( pub fn prefix_documents<'txn>(
@ -55,7 +55,7 @@ impl PrefixCache {
) -> ZResult<PrefixDocumentsIter<'txn>> { ) -> ZResult<PrefixDocumentsIter<'txn>> {
let start = PrefixKey::new(prefix, 0, 0); let start = PrefixKey::new(prefix, 0, 0);
let end = PrefixKey::new(prefix, u64::max_value(), u64::max_value()); let end = PrefixKey::new(prefix, u64::max_value(), u64::max_value());
let iter = self.prefix_cache.range(reader, &(start..=end))?; let iter = self.prefix_documents_cache.range(reader, &(start..=end))?;
Ok(PrefixDocumentsIter { iter }) Ok(PrefixDocumentsIter { iter })
} }
} }

View File

@ -109,7 +109,7 @@ pub fn apply_documents_addition<'a, 'b>(
documents_fields_counts_store: store::DocumentsFieldsCounts, documents_fields_counts_store: store::DocumentsFieldsCounts,
postings_lists_store: store::PostingsLists, postings_lists_store: store::PostingsLists,
docs_words_store: store::DocsWords, docs_words_store: store::DocsWords,
prefix_cache_store: store::PrefixCache, prefix_documents_cache_store: store::PrefixDocumentsCache,
addition: Vec<HashMap<String, serde_json::Value>>, addition: Vec<HashMap<String, serde_json::Value>>,
) -> MResult<()> { ) -> MResult<()> {
let mut documents_additions = HashMap::new(); let mut documents_additions = HashMap::new();
@ -176,7 +176,7 @@ pub fn apply_documents_addition<'a, 'b>(
main_store, main_store,
postings_lists_store, postings_lists_store,
docs_words_store, docs_words_store,
prefix_cache_store, prefix_documents_cache_store,
&ranked_map, &ranked_map,
number_of_inserted_documents, number_of_inserted_documents,
indexer, indexer,
@ -190,7 +190,7 @@ pub fn apply_documents_partial_addition<'a, 'b>(
documents_fields_counts_store: store::DocumentsFieldsCounts, documents_fields_counts_store: store::DocumentsFieldsCounts,
postings_lists_store: store::PostingsLists, postings_lists_store: store::PostingsLists,
docs_words_store: store::DocsWords, docs_words_store: store::DocsWords,
prefix_cache_store: store::PrefixCache, prefix_documents_cache_store: store::PrefixDocumentsCache,
addition: Vec<HashMap<String, serde_json::Value>>, addition: Vec<HashMap<String, serde_json::Value>>,
) -> MResult<()> { ) -> MResult<()> {
let mut documents_additions = HashMap::new(); let mut documents_additions = HashMap::new();
@ -274,7 +274,7 @@ pub fn apply_documents_partial_addition<'a, 'b>(
main_store, main_store,
postings_lists_store, postings_lists_store,
docs_words_store, docs_words_store,
prefix_cache_store, prefix_documents_cache_store,
&ranked_map, &ranked_map,
number_of_inserted_documents, number_of_inserted_documents,
indexer, indexer,
@ -288,7 +288,7 @@ pub fn reindex_all_documents(
documents_fields_counts_store: store::DocumentsFieldsCounts, documents_fields_counts_store: store::DocumentsFieldsCounts,
postings_lists_store: store::PostingsLists, postings_lists_store: store::PostingsLists,
docs_words_store: store::DocsWords, docs_words_store: store::DocsWords,
prefix_cache_store: store::PrefixCache, prefix_documents_cache_store: store::PrefixDocumentsCache,
) -> MResult<()> { ) -> MResult<()> {
let schema = match main_store.schema(writer)? { let schema = match main_store.schema(writer)? {
Some(schema) => schema, Some(schema) => schema,
@ -350,7 +350,7 @@ pub fn reindex_all_documents(
main_store, main_store,
postings_lists_store, postings_lists_store,
docs_words_store, docs_words_store,
prefix_cache_store, prefix_documents_cache_store,
&ranked_map, &ranked_map,
number_of_inserted_documents, number_of_inserted_documents,
indexer, indexer,
@ -365,7 +365,7 @@ pub fn write_documents_addition_index(
main_store: store::Main, main_store: store::Main,
postings_lists_store: store::PostingsLists, postings_lists_store: store::PostingsLists,
docs_words_store: store::DocsWords, docs_words_store: store::DocsWords,
prefix_cache_store: store::PrefixCache, prefix_documents_cache_store: store::PrefixDocumentsCache,
ranked_map: &RankedMap, ranked_map: &RankedMap,
number_of_inserted_documents: usize, number_of_inserted_documents: usize,
indexer: RawIndexer, indexer: RawIndexer,

View File

@ -281,7 +281,7 @@ pub fn update_task<'a, 'b>(
index.documents_fields_counts, index.documents_fields_counts,
index.postings_lists, index.postings_lists,
index.docs_words, index.docs_words,
index.prefix_cache, index.prefix_documents_cache,
); );
(update_type, result, start.elapsed()) (update_type, result, start.elapsed())
@ -308,7 +308,7 @@ pub fn update_task<'a, 'b>(
index.documents_fields_counts, index.documents_fields_counts,
index.postings_lists, index.postings_lists,
index.docs_words, index.docs_words,
index.prefix_cache, index.prefix_documents_cache,
documents, documents,
); );
@ -339,7 +339,7 @@ pub fn update_task<'a, 'b>(
index.postings_lists, index.postings_lists,
index.documents_fields_counts, index.documents_fields_counts,
index.synonyms, index.synonyms,
index.prefix_cache, index.prefix_documents_cache,
).unwrap(); ).unwrap();
let mut prefix = [0; 4]; let mut prefix = [0; 4];
@ -347,7 +347,7 @@ pub fn update_task<'a, 'b>(
prefix[..len].copy_from_slice(&s.as_bytes()[..len]); prefix[..len].copy_from_slice(&s.as_bytes()[..len]);
for (i, document) in documents.into_iter().enumerate() { for (i, document) in documents.into_iter().enumerate() {
index.prefix_cache.put_prefix_document( index.prefix_documents_cache.put_prefix_document(
writer, writer,
prefix, prefix,
i, i,
@ -381,7 +381,7 @@ pub fn update_task<'a, 'b>(
index.documents_fields_counts, index.documents_fields_counts,
index.postings_lists, index.postings_lists,
index.docs_words, index.docs_words,
index.prefix_cache, index.prefix_documents_cache,
documents, documents,
); );
@ -443,7 +443,7 @@ pub fn update_task<'a, 'b>(
index.documents_fields_counts, index.documents_fields_counts,
index.postings_lists, index.postings_lists,
index.docs_words, index.docs_words,
index.prefix_cache, index.prefix_documents_cache,
stop_words, stop_words,
); );

View File

@ -13,7 +13,7 @@ pub fn apply_schema_update(
documents_fields_counts_store: store::DocumentsFieldsCounts, documents_fields_counts_store: store::DocumentsFieldsCounts,
postings_lists_store: store::PostingsLists, postings_lists_store: store::PostingsLists,
docs_words_store: store::DocsWords, docs_words_store: store::DocsWords,
prefix_cache_store: store::PrefixCache, prefix_documents_cache_store: store::PrefixDocumentsCache,
) -> MResult<()> { ) -> MResult<()> {
use UnsupportedOperation::{ use UnsupportedOperation::{
CanOnlyIntroduceNewSchemaAttributesAtEnd, CannotRemoveSchemaAttribute, CanOnlyIntroduceNewSchemaAttributesAtEnd, CannotRemoveSchemaAttribute,
@ -56,7 +56,7 @@ pub fn apply_schema_update(
documents_fields_counts_store, documents_fields_counts_store,
postings_lists_store, postings_lists_store,
docs_words_store, docs_words_store,
prefix_cache_store, prefix_documents_cache_store,
)? )?
} }

View File

@ -68,7 +68,7 @@ pub fn apply_stop_words_deletion(
documents_fields_counts_store: store::DocumentsFieldsCounts, documents_fields_counts_store: store::DocumentsFieldsCounts,
postings_lists_store: store::PostingsLists, postings_lists_store: store::PostingsLists,
docs_words_store: store::DocsWords, docs_words_store: store::DocsWords,
prefix_cache_store: store::PrefixCache, prefix_documents_cache_store: store::PrefixDocumentsCache,
deletion: BTreeSet<String>, deletion: BTreeSet<String>,
) -> MResult<()> { ) -> MResult<()> {
let mut stop_words_builder = SetBuilder::memory(); let mut stop_words_builder = SetBuilder::memory();
@ -111,7 +111,7 @@ pub fn apply_stop_words_deletion(
documents_fields_counts_store, documents_fields_counts_store,
postings_lists_store, postings_lists_store,
docs_words_store, docs_words_store,
prefix_cache_store, prefix_documents_cache_store,
)?; )?;
} }
} }