Merge pull request #26 from meilisearch/searchable-attributes

Introduce the searchable attributes
This commit is contained in:
Clément Renault 2020-11-04 09:40:03 +01:00 committed by GitHub
commit 749764f35b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 442 additions and 42 deletions

View File

@ -18,6 +18,7 @@ pub const DISPLAYED_FIELDS_KEY: &str = "displayed-fields";
pub const DOCUMENTS_IDS_KEY: &str = "documents-ids"; pub const DOCUMENTS_IDS_KEY: &str = "documents-ids";
pub const FIELDS_IDS_MAP_KEY: &str = "fields-ids-map"; pub const FIELDS_IDS_MAP_KEY: &str = "fields-ids-map";
pub const PRIMARY_KEY_KEY: &str = "primary-key"; pub const PRIMARY_KEY_KEY: &str = "primary-key";
pub const SEARCHABLE_FIELDS_KEY: &str = "searchable-fields";
pub const USERS_IDS_DOCUMENTS_IDS_KEY: &str = "users-ids-documents-ids"; pub const USERS_IDS_DOCUMENTS_IDS_KEY: &str = "users-ids-documents-ids";
pub const WORDS_FST_KEY: &str = "words-fst"; pub const WORDS_FST_KEY: &str = "words-fst";
@ -94,7 +95,7 @@ impl Index {
self.main.put::<_, Str, OwnedType<u8>>(wtxn, PRIMARY_KEY_KEY, &primary_key) self.main.put::<_, Str, OwnedType<u8>>(wtxn, PRIMARY_KEY_KEY, &primary_key)
} }
/// Delete the primary key of the documents, this can be done to reset indexes settings. /// Deletes the primary key of the documents, this can be done to reset indexes settings.
pub fn delete_primary_key(&self, wtxn: &mut RwTxn) -> heed::Result<bool> { pub fn delete_primary_key(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
self.main.delete::<_, Str>(wtxn, PRIMARY_KEY_KEY) self.main.delete::<_, Str>(wtxn, PRIMARY_KEY_KEY)
} }
@ -155,6 +156,25 @@ impl Index {
self.main.get::<_, Str, ByteSlice>(rtxn, DISPLAYED_FIELDS_KEY) self.main.get::<_, Str, ByteSlice>(rtxn, DISPLAYED_FIELDS_KEY)
} }
/* searchable fields */
/// Writes the searchable fields, when this list is specified, only these are indexed.
pub fn put_searchable_fields(&self, wtxn: &mut RwTxn, fields: &[u8]) -> heed::Result<()> {
assert!(fields.windows(2).all(|win| win[0] < win[1])); // is sorted
self.main.put::<_, Str, ByteSlice>(wtxn, SEARCHABLE_FIELDS_KEY, fields)
}
/// Deletes the searchable fields, when no fields are specified, all fields are indexed.
pub fn delete_searchable_fields(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
self.main.delete::<_, Str>(wtxn, SEARCHABLE_FIELDS_KEY)
}
/// Returns the searchable fields ids, those are the fields that are indexed,
/// if the searchable fields aren't there it means that **all** the fields are indexed.
pub fn searchable_fields<'t>(&self, rtxn: &'t RoTxn) -> heed::Result<Option<&'t [u8]>> {
self.main.get::<_, Str, ByteSlice>(rtxn, SEARCHABLE_FIELDS_KEY)
}
/* words fst */ /* words fst */
/// Writes the FST which is the words dictionnary of the engine. /// Writes the FST which is the words dictionnary of the engine.

View File

@ -184,6 +184,13 @@ struct Settings {
skip_serializing_if = "Option::is_none", skip_serializing_if = "Option::is_none",
)] )]
displayed_attributes: Option<Option<Vec<String>>>, displayed_attributes: Option<Option<Vec<String>>>,
#[serde(
default,
deserialize_with = "deserialize_some",
skip_serializing_if = "Option::is_none",
)]
searchable_attributes: Option<Option<Vec<String>>>,
} }
// Any value that is present is considered Some value, including null. // Any value that is present is considered Some value, including null.
@ -300,6 +307,14 @@ pub fn run(opt: Opt) -> anyhow::Result<()> {
let mut wtxn = index_cloned.write_txn()?; let mut wtxn = index_cloned.write_txn()?;
let mut builder = update_builder.settings(&mut wtxn, &index_cloned); let mut builder = update_builder.settings(&mut wtxn, &index_cloned);
// We transpose the settings JSON struct into a real setting update.
if let Some(names) = settings.searchable_attributes {
match names {
Some(names) => builder.set_searchable_fields(names),
None => builder.reset_searchable_fields(),
}
}
// We transpose the settings JSON struct into a real setting update. // We transpose the settings JSON struct into a real setting update.
if let Some(names) = settings.displayed_attributes { if let Some(names) = settings.displayed_attributes {
match names { match names {
@ -308,7 +323,17 @@ pub fn run(opt: Opt) -> anyhow::Result<()> {
} }
} }
match builder.execute() { let result = builder.execute(|count, total| {
let _ = update_status_sender_cloned.send(UpdateStatus::Progressing {
update_id,
meta: UpdateMetaProgress::DocumentsAddition {
processed_number_of_documents: count,
total_number_of_documents: Some(total),
}
});
});
match result {
Ok(_count) => wtxn.commit().map_err(Into::into), Ok(_count) => wtxn.commit().map_err(Into::into),
Err(e) => Err(e.into()) Err(e) => Err(e.into())
} }

View File

@ -1,4 +1,5 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::HashSet;
use std::fs::File; use std::fs::File;
use std::io::{self, Seek, SeekFrom}; use std::io::{self, Seek, SeekFrom};
use std::sync::mpsc::sync_channel; use std::sync::mpsc::sync_channel;
@ -9,8 +10,10 @@ use bstr::ByteSlice as _;
use grenad::{Writer, Sorter, Merger, Reader, FileFuse, CompressionType}; use grenad::{Writer, Sorter, Merger, Reader, FileFuse, CompressionType};
use heed::types::ByteSlice; use heed::types::ByteSlice;
use log::{debug, info, error}; use log::{debug, info, error};
use memmap::Mmap;
use rayon::prelude::*; use rayon::prelude::*;
use rayon::ThreadPool; use rayon::ThreadPool;
use crate::index::Index; use crate::index::Index;
use self::store::Store; use self::store::Store;
use self::merge_function::{ use self::merge_function::{
@ -248,7 +251,7 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
R: io::Read, R: io::Read,
F: Fn(usize, usize) + Sync, F: Fn(usize, usize) + Sync,
{ {
let before_indexing = Instant::now(); let before_transform = Instant::now();
let transform = Transform { let transform = Transform {
rtxn: &self.wtxn, rtxn: &self.wtxn,
@ -268,6 +271,17 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
UpdateFormat::JsonStream => transform.from_json_stream(reader)?, UpdateFormat::JsonStream => transform.from_json_stream(reader)?,
}; };
info!("Update transformed in {:.02?}", before_transform.elapsed());
self.execute_raw(output, progress_callback)
}
pub fn execute_raw<F>(self, output: TransformOutput, progress_callback: F) -> anyhow::Result<()>
where
F: Fn(usize, usize) + Sync
{
let before_indexing = Instant::now();
let TransformOutput { let TransformOutput {
primary_key, primary_key,
fields_ids_map, fields_ids_map,
@ -296,16 +310,14 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
let _deleted_documents_count = deletion_builder.execute()?; let _deleted_documents_count = deletion_builder.execute()?;
} }
let mmap = if documents_count == 0 { let mmap;
None let bytes = if documents_count == 0 {
&[][..]
} else { } else {
let mmap = unsafe { mmap = unsafe { Mmap::map(&documents_file).context("mmaping the transform documents file")? };
memmap::Mmap::map(&documents_file).context("mmaping the transform documents file")? &mmap
};
Some(mmap)
}; };
let bytes = mmap.as_ref().map(AsRef::as_ref).unwrap_or_default();
let documents = grenad::Reader::new(bytes).unwrap(); let documents = grenad::Reader::new(bytes).unwrap();
// The enum which indicates the type of the readers // The enum which indicates the type of the readers
@ -316,6 +328,11 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
WordsPairsProximitiesDocids, WordsPairsProximitiesDocids,
} }
let searchable_fields: HashSet<_> = match self.index.searchable_fields(self.wtxn)? {
Some(fields) => fields.iter().copied().collect(),
None => fields_ids_map.iter().map(|(id, _name)| id).collect(),
};
let linked_hash_map_size = self.linked_hash_map_size; let linked_hash_map_size = self.linked_hash_map_size;
let max_nb_chunks = self.max_nb_chunks; let max_nb_chunks = self.max_nb_chunks;
let max_memory = self.max_memory; let max_memory = self.max_memory;
@ -343,6 +360,7 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
.enumerate() .enumerate()
.map(|(i, documents)| { .map(|(i, documents)| {
let store = Store::new( let store = Store::new(
searchable_fields.clone(),
linked_hash_map_size, linked_hash_map_size,
max_nb_chunks, max_nb_chunks,
max_memory_by_job, max_memory_by_job,
@ -492,7 +510,7 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
} }
} }
info!("Update processed in {:.02?}", before_indexing.elapsed()); info!("Transform output indexed in {:.02?}", before_indexing.elapsed());
Ok(()) Ok(())
} }

View File

@ -1,5 +1,5 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::{BTreeMap, HashMap}; use std::collections::{BTreeMap, HashMap, HashSet};
use std::convert::{TryFrom, TryInto}; use std::convert::{TryFrom, TryInto};
use std::fs::File; use std::fs::File;
use std::iter::FromIterator; use std::iter::FromIterator;
@ -37,6 +37,9 @@ pub struct Readers {
} }
pub struct Store { pub struct Store {
// Indexing parameters
searchable_fields: HashSet<u8>,
// Caches
word_docids: LinkedHashMap<SmallVec32<u8>, RoaringBitmap>, word_docids: LinkedHashMap<SmallVec32<u8>, RoaringBitmap>,
word_docids_limit: usize, word_docids_limit: usize,
words_pairs_proximities_docids: LinkedHashMap<(SmallVec32<u8>, SmallVec32<u8>, u8), RoaringBitmap>, words_pairs_proximities_docids: LinkedHashMap<(SmallVec32<u8>, SmallVec32<u8>, u8), RoaringBitmap>,
@ -56,6 +59,7 @@ pub struct Store {
impl Store { impl Store {
pub fn new( pub fn new(
searchable_fields: HashSet<u8>,
linked_hash_map_size: Option<usize>, linked_hash_map_size: Option<usize>,
max_nb_chunks: Option<usize>, max_nb_chunks: Option<usize>,
max_memory: Option<usize>, max_memory: Option<usize>,
@ -101,18 +105,22 @@ impl Store {
})?; })?;
Ok(Store { Ok(Store {
// Indexing parameters.
searchable_fields,
// Caches
word_docids: LinkedHashMap::with_capacity(linked_hash_map_size), word_docids: LinkedHashMap::with_capacity(linked_hash_map_size),
word_docids_limit: linked_hash_map_size, word_docids_limit: linked_hash_map_size,
words_pairs_proximities_docids: LinkedHashMap::with_capacity(linked_hash_map_size), words_pairs_proximities_docids: LinkedHashMap::with_capacity(linked_hash_map_size),
words_pairs_proximities_docids_limit: linked_hash_map_size, words_pairs_proximities_docids_limit: linked_hash_map_size,
// MTBL parameters
chunk_compression_type, chunk_compression_type,
chunk_compression_level, chunk_compression_level,
chunk_fusing_shrink_size, chunk_fusing_shrink_size,
// MTBL sorters
main_sorter, main_sorter,
word_docids_sorter, word_docids_sorter,
words_pairs_proximities_docids_sorter, words_pairs_proximities_docids_sorter,
// MTBL writers
docid_word_positions_writer, docid_word_positions_writer,
documents_writer, documents_writer,
}) })
@ -309,6 +317,7 @@ impl Store {
} }
for (attr, content) in document.iter() { for (attr, content) in document.iter() {
if self.searchable_fields.contains(&attr) {
use serde_json::Value; use serde_json::Value;
let content: Cow<str> = match serde_json::from_slice(content) { let content: Cow<str> = match serde_json::from_slice(content) {
Ok(string) => string, Ok(string) => string,
@ -328,6 +337,7 @@ impl Store {
words_positions.entry(word).or_insert_with(SmallVec32::new).push(position); words_positions.entry(word).or_insert_with(SmallVec32::new).push(position);
} }
} }
}
// We write the document in the documents store. // We write the document in the documents store.
self.write_document(document_id, &words_positions, value)?; self.write_document(document_id, &words_positions, value)?;

View File

@ -404,6 +404,59 @@ impl Transform<'_, '_> {
documents_file, documents_file,
}) })
} }
/// Returns a `TransformOutput` with a file that contains the documents of the index
/// with the attributes reordered accordingly to the `FieldsIdsMap` given as argument.
// TODO this can be done in parallel by using the rayon `ThreadPool`.
pub fn remap_index_documents(
self,
primary_key: u8,
fields_ids_map: FieldsIdsMap,
) -> anyhow::Result<TransformOutput>
{
let current_fields_ids_map = self.index.fields_ids_map(self.rtxn)?;
let users_ids_documents_ids = self.index.users_ids_documents_ids(self.rtxn)?;
let documents_ids = self.index.documents_ids(self.rtxn)?;
let documents_count = documents_ids.len() as usize;
// We create a final writer to write the new documents in order from the sorter.
let file = tempfile::tempfile()?;
let mut writer = create_writer(self.chunk_compression_type, self.chunk_compression_level, file)?;
let mut obkv_buffer = Vec::new();
for result in self.index.documents.iter(self.rtxn)? {
let (docid, obkv) = result?;
let docid = docid.get();
obkv_buffer.clear();
let mut obkv_writer = obkv::KvWriter::new(&mut obkv_buffer);
// We iterate over the new `FieldsIdsMap` ids in order and construct the new obkv.
for (id, name) in fields_ids_map.iter() {
if let Some(val) = current_fields_ids_map.id(name).and_then(|id| obkv.get(id)) {
obkv_writer.insert(id, val)?;
}
}
let buffer = obkv_writer.into_inner()?;
writer.insert(docid.to_be_bytes(), buffer)?;
}
// Once we have written all the documents, we extract
// the file and reset the seek to be able to read it again.
let mut documents_file = writer.into_inner()?;
documents_file.seek(SeekFrom::Start(0))?;
Ok(TransformOutput {
primary_key,
fields_ids_map,
users_ids_documents_ids: users_ids_documents_ids.map_data(Cow::into_owned)?,
new_documents_ids: documents_ids,
replaced_documents_ids: RoaringBitmap::default(),
documents_count,
documents_file,
})
}
} }
/// Only the last value associated with an id is kept. /// Only the last value associated with an id is kept.

View File

@ -1,17 +1,53 @@
use anyhow::Context; use anyhow::Context;
use crate::Index; use grenad::CompressionType;
use rayon::ThreadPool;
pub struct Settings<'t, 'u, 'i> { use crate::update::index_documents::{Transform, IndexDocumentsMethod};
use crate::update::{ClearDocuments, IndexDocuments};
use crate::{Index, FieldsIdsMap};
pub struct Settings<'a, 't, 'u, 'i> {
wtxn: &'t mut heed::RwTxn<'i, 'u>, wtxn: &'t mut heed::RwTxn<'i, 'u>,
index: &'i Index, index: &'i Index,
// If the field is set to `None` it means that it hasn't been set by the user, pub(crate) log_every_n: Option<usize>,
pub(crate) max_nb_chunks: Option<usize>,
pub(crate) max_memory: Option<usize>,
pub(crate) linked_hash_map_size: Option<usize>,
pub(crate) chunk_compression_type: CompressionType,
pub(crate) chunk_compression_level: Option<u32>,
pub(crate) chunk_fusing_shrink_size: Option<u64>,
pub(crate) thread_pool: Option<&'a ThreadPool>,
// If a struct field is set to `None` it means that it hasn't been set by the user,
// however if it is `Some(None)` it means that the user forced a reset of the setting. // however if it is `Some(None)` it means that the user forced a reset of the setting.
searchable_fields: Option<Option<Vec<String>>>,
displayed_fields: Option<Option<Vec<String>>>, displayed_fields: Option<Option<Vec<String>>>,
} }
impl<'t, 'u, 'i> Settings<'t, 'u, 'i> { impl<'a, 't, 'u, 'i> Settings<'a, 't, 'u, 'i> {
pub fn new(wtxn: &'t mut heed::RwTxn<'i, 'u>, index: &'i Index) -> Settings<'t, 'u, 'i> { pub fn new(wtxn: &'t mut heed::RwTxn<'i, 'u>, index: &'i Index) -> Settings<'a, 't, 'u, 'i> {
Settings { wtxn, index, displayed_fields: None } Settings {
wtxn,
index,
log_every_n: None,
max_nb_chunks: None,
max_memory: None,
linked_hash_map_size: None,
chunk_compression_type: CompressionType::None,
chunk_compression_level: None,
chunk_fusing_shrink_size: None,
thread_pool: None,
searchable_fields: None,
displayed_fields: None,
}
}
pub fn reset_searchable_fields(&mut self) {
self.searchable_fields = Some(None);
}
pub fn set_searchable_fields(&mut self, names: Vec<String>) {
self.searchable_fields = Some(Some(names));
} }
pub fn reset_displayed_fields(&mut self) { pub fn reset_displayed_fields(&mut self) {
@ -22,8 +58,131 @@ impl<'t, 'u, 'i> Settings<'t, 'u, 'i> {
self.displayed_fields = Some(Some(names)); self.displayed_fields = Some(Some(names));
} }
pub fn execute(self) -> anyhow::Result<()> { pub fn execute<F>(self, progress_callback: F) -> anyhow::Result<()>
// Check that the displayed attributes parameters has been specified. where
F: Fn(usize, usize) + Sync
{
// Check that the searchable attributes have been specified.
if let Some(value) = self.searchable_fields {
let current_displayed_fields = self.index.displayed_fields(self.wtxn)?;
let current_fields_ids_map = self.index.fields_ids_map(self.wtxn)?;
let result = match value {
Some(fields_names) => {
let mut fields_ids_map = current_fields_ids_map.clone();
let searchable_fields: Vec<_> =
fields_names.iter()
.map(|name| fields_ids_map.insert(name))
.collect::<Option<Vec<_>>>()
.context("field id limit reached")?;
// If the searchable fields are ordered we don't have to generate a new `FieldsIdsMap`.
if searchable_fields.windows(2).all(|win| win[0] < win[1]) {
(
fields_ids_map,
Some(searchable_fields),
current_displayed_fields.map(ToOwned::to_owned),
)
} else {
// We create or generate the fields ids corresponding to those names.
let mut fields_ids_map = FieldsIdsMap::new();
let mut searchable_fields = Vec::new();
for name in fields_names {
let id = fields_ids_map.insert(&name).context("field id limit reached")?;
searchable_fields.push(id);
}
// We complete the new FieldsIdsMap with the previous names.
for (_id, name) in current_fields_ids_map.iter() {
fields_ids_map.insert(name).context("field id limit reached")?;
}
// We must also update the displayed fields according to the new `FieldsIdsMap`.
let displayed_fields = match current_displayed_fields {
Some(fields) => {
let mut displayed_fields = Vec::new();
for id in fields {
let name = current_fields_ids_map.name(*id).unwrap();
let id = fields_ids_map.id(name).context("field id limit reached")?;
displayed_fields.push(id);
}
Some(displayed_fields)
},
None => None,
};
(fields_ids_map, Some(searchable_fields), displayed_fields)
}
},
None => (
current_fields_ids_map.clone(),
None,
current_displayed_fields.map(ToOwned::to_owned),
),
};
let (mut fields_ids_map, searchable_fields, displayed_fields) = result;
let transform = Transform {
rtxn: &self.wtxn,
index: self.index,
chunk_compression_type: self.chunk_compression_type,
chunk_compression_level: self.chunk_compression_level,
chunk_fusing_shrink_size: self.chunk_fusing_shrink_size,
max_nb_chunks: self.max_nb_chunks,
max_memory: self.max_memory,
index_documents_method: IndexDocumentsMethod::ReplaceDocuments,
autogenerate_docids: false,
};
// We compute or generate the new primary key field id.
let primary_key = match self.index.primary_key(&self.wtxn)? {
Some(id) => {
let name = current_fields_ids_map.name(id).unwrap();
fields_ids_map.insert(name).context("field id limit reached")?
},
None => fields_ids_map.insert("id").context("field id limit reached")?,
};
// We remap the documents fields based on the new `FieldsIdsMap`.
let output = transform.remap_index_documents(primary_key, fields_ids_map.clone())?;
// We write the new FieldsIdsMap to the database
// this way next indexing methods will be based on that.
self.index.put_fields_ids_map(self.wtxn, &fields_ids_map)?;
// The new searchable fields are also written down to make sure
// that the IndexDocuments system takes only these ones into account.
match searchable_fields {
Some(fields) => self.index.put_searchable_fields(self.wtxn, &fields)?,
None => self.index.delete_searchable_fields(self.wtxn).map(drop)?,
}
// We write the displayed fields into the database here
// to make sure that the right fields are displayed.
match displayed_fields {
Some(fields) => self.index.put_displayed_fields(self.wtxn, &fields)?,
None => self.index.delete_displayed_fields(self.wtxn).map(drop)?,
}
// We clear the full database (words-fst, documents ids and documents content).
ClearDocuments::new(self.wtxn, self.index).execute()?;
// We index the generated `TransformOutput` which must contain
// all the documents with fields in the newly defined searchable order.
let mut indexing_builder = IndexDocuments::new(self.wtxn, self.index);
indexing_builder.log_every_n = self.log_every_n;
indexing_builder.max_nb_chunks = self.max_nb_chunks;
indexing_builder.max_memory = self.max_memory;
indexing_builder.linked_hash_map_size = self.linked_hash_map_size;
indexing_builder.chunk_compression_type = self.chunk_compression_type;
indexing_builder.chunk_compression_level = self.chunk_compression_level;
indexing_builder.chunk_fusing_shrink_size = self.chunk_fusing_shrink_size;
indexing_builder.thread_pool = self.thread_pool;
indexing_builder.execute_raw(output, progress_callback)?;
}
// Check that the displayed attributes have been specified.
if let Some(value) = self.displayed_fields { if let Some(value) = self.displayed_fields {
match value { match value {
// If it has been set, and it was a list of fields names, we create // If it has been set, and it was a list of fields names, we create
@ -60,6 +219,109 @@ mod tests {
use crate::update::{IndexDocuments, UpdateFormat}; use crate::update::{IndexDocuments, UpdateFormat};
use heed::EnvOpenOptions; use heed::EnvOpenOptions;
#[test]
fn set_and_reset_searchable_fields() {
let path = tempfile::tempdir().unwrap();
let mut options = EnvOpenOptions::new();
options.map_size(10 * 1024 * 1024); // 10 MB
let index = Index::new(options, &path).unwrap();
// First we send 3 documents with ids from 1 to 3.
let mut wtxn = index.write_txn().unwrap();
let content = &b"name,age\nkevin,23\nkevina,21\nbenoit,34\n"[..];
let mut builder = IndexDocuments::new(&mut wtxn, &index);
builder.update_format(UpdateFormat::Csv);
builder.execute(content, |_, _| ()).unwrap();
wtxn.commit().unwrap();
// We change the searchable fields to be the "name" field only.
let mut wtxn = index.write_txn().unwrap();
let mut builder = Settings::new(&mut wtxn, &index);
builder.set_searchable_fields(vec!["name".into()]);
builder.execute(|_, _| ()).unwrap();
wtxn.commit().unwrap();
// Check that the searchable field is correctly set to "name" only.
let rtxn = index.read_txn().unwrap();
// When we search for something that is not in
// the searchable fields it must not return any document.
let result = index.search(&rtxn).query("23").execute().unwrap();
assert!(result.documents_ids.is_empty());
// When we search for something that is in the searchable fields
// we must find the appropriate document.
let result = index.search(&rtxn).query(r#""kevin""#).execute().unwrap();
let documents = index.documents(&rtxn, result.documents_ids).unwrap();
assert_eq!(documents.len(), 1);
assert_eq!(documents[0].1.get(0), Some(&br#""kevin""#[..]));
drop(rtxn);
// We change the searchable fields to be the "name" field only.
let mut wtxn = index.write_txn().unwrap();
let mut builder = Settings::new(&mut wtxn, &index);
builder.reset_searchable_fields();
builder.execute(|_, _| ()).unwrap();
wtxn.commit().unwrap();
// Check that the searchable field have been reset and documents are found now.
let rtxn = index.read_txn().unwrap();
let searchable_fields = index.searchable_fields(&rtxn).unwrap();
assert_eq!(searchable_fields, None);
let result = index.search(&rtxn).query("23").execute().unwrap();
assert_eq!(result.documents_ids.len(), 1);
let documents = index.documents(&rtxn, result.documents_ids).unwrap();
assert_eq!(documents[0].1.get(0), Some(&br#""kevin""#[..]));
drop(rtxn);
}
#[test]
fn mixup_searchable_with_displayed_fields() {
let path = tempfile::tempdir().unwrap();
let mut options = EnvOpenOptions::new();
options.map_size(10 * 1024 * 1024); // 10 MB
let index = Index::new(options, &path).unwrap();
// First we send 3 documents with ids from 1 to 3.
let mut wtxn = index.write_txn().unwrap();
let content = &b"name,age\nkevin,23\nkevina,21\nbenoit,34\n"[..];
let mut builder = IndexDocuments::new(&mut wtxn, &index);
builder.update_format(UpdateFormat::Csv);
builder.execute(content, |_, _| ()).unwrap();
wtxn.commit().unwrap();
// In the same transaction we change the displayed fields to be only the "age".
// We also change the searchable fields to be the "name" field only.
let mut wtxn = index.write_txn().unwrap();
let mut builder = Settings::new(&mut wtxn, &index);
builder.set_displayed_fields(vec!["age".into()]);
builder.set_searchable_fields(vec!["name".into()]);
builder.execute(|_, _| ()).unwrap();
wtxn.commit().unwrap();
// Check that the displayed fields are correctly set to `None` (default value).
let rtxn = index.read_txn().unwrap();
let fields_ids_map = index.fields_ids_map(&rtxn).unwrap();
let fields_ids = index.displayed_fields(&rtxn).unwrap();
let age_id = fields_ids_map.id("age").unwrap();
assert_eq!(fields_ids, Some(&[age_id][..]));
drop(rtxn);
// We change the searchable fields to be the "name" field only.
let mut wtxn = index.write_txn().unwrap();
let mut builder = Settings::new(&mut wtxn, &index);
builder.reset_searchable_fields();
builder.execute(|_, _| ()).unwrap();
wtxn.commit().unwrap();
// Check that the displayed fields always contains only the "age" field.
let rtxn = index.read_txn().unwrap();
let fields_ids_map = index.fields_ids_map(&rtxn).unwrap();
let fields_ids = index.displayed_fields(&rtxn).unwrap();
let age_id = fields_ids_map.id("age").unwrap();
assert_eq!(fields_ids, Some(&[age_id][..]));
drop(rtxn);
}
#[test] #[test]
fn default_displayed_fields() { fn default_displayed_fields() {
let path = tempfile::tempdir().unwrap(); let path = tempfile::tempdir().unwrap();
@ -99,7 +361,7 @@ mod tests {
// In the same transaction we change the displayed fields to be only the age. // In the same transaction we change the displayed fields to be only the age.
let mut builder = Settings::new(&mut wtxn, &index); let mut builder = Settings::new(&mut wtxn, &index);
builder.set_displayed_fields(vec!["age".into()]); builder.set_displayed_fields(vec!["age".into()]);
builder.execute().unwrap(); builder.execute(|_, _| ()).unwrap();
wtxn.commit().unwrap(); wtxn.commit().unwrap();
// Check that the displayed fields are correctly set to only the "age" field. // Check that the displayed fields are correctly set to only the "age" field.
@ -114,7 +376,7 @@ mod tests {
let mut wtxn = index.write_txn().unwrap(); let mut wtxn = index.write_txn().unwrap();
let mut builder = Settings::new(&mut wtxn, &index); let mut builder = Settings::new(&mut wtxn, &index);
builder.reset_displayed_fields(); builder.reset_displayed_fields();
builder.execute().unwrap(); builder.execute(|_, _| ()).unwrap();
wtxn.commit().unwrap(); wtxn.commit().unwrap();
// Check that the displayed fields are correctly set to `None` (default value). // Check that the displayed fields are correctly set to `None` (default value).

View File

@ -103,8 +103,19 @@ impl<'a> UpdateBuilder<'a> {
self, self,
wtxn: &'t mut heed::RwTxn<'i, 'u>, wtxn: &'t mut heed::RwTxn<'i, 'u>,
index: &'i Index, index: &'i Index,
) -> Settings<'t, 'u, 'i> ) -> Settings<'a, 't, 'u, 'i>
{ {
Settings::new(wtxn, index) let mut builder = Settings::new(wtxn, index);
builder.log_every_n = self.log_every_n;
builder.max_nb_chunks = self.max_nb_chunks;
builder.max_memory = self.max_memory;
builder.linked_hash_map_size = self.linked_hash_map_size;
builder.chunk_compression_type = self.chunk_compression_type;
builder.chunk_compression_level = self.chunk_compression_level;
builder.chunk_fusing_shrink_size = self.chunk_fusing_shrink_size;
builder.thread_pool = self.thread_pool;
builder
} }
} }

View File

@ -220,6 +220,7 @@ mod tests {
} }
#[test] #[test]
#[ignore]
fn long_running_update() { fn long_running_update() {
let dir = tempfile::tempdir().unwrap(); let dir = tempfile::tempdir().unwrap();
let options = EnvOpenOptions::new(); let options = EnvOpenOptions::new();