mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-15 13:58:36 +02:00
Introduce the UpdateBuilder type along with some update operations
This commit is contained in:
parent
adacc7977d
commit
b14cca2ad9
9 changed files with 382 additions and 39 deletions
|
@ -199,10 +199,10 @@ fn biggest_value_sizes(index: &Index, rtxn: &heed::RoTxn, limit: usize) -> anyho
|
|||
let mut heap = BinaryHeap::with_capacity(limit + 1);
|
||||
|
||||
if limit > 0 {
|
||||
if let Some(fst) = index.fst(rtxn)? {
|
||||
heap.push(Reverse((fst.as_fst().as_bytes().len(), format!("words-fst"), main_name)));
|
||||
if heap.len() > limit { heap.pop(); }
|
||||
}
|
||||
let words_fst = index.words_fst(rtxn)?;
|
||||
|
||||
heap.push(Reverse((words_fst.as_fst().as_bytes().len(), format!("words-fst"), main_name)));
|
||||
if heap.len() > limit { heap.pop(); }
|
||||
|
||||
if let Some(documents) = index.main.get::<_, Str, ByteSlice>(rtxn, "documents")? {
|
||||
heap.push(Reverse((documents.len(), format!("documents"), main_name)));
|
||||
|
@ -265,13 +265,8 @@ fn export_words_fst(index: &Index, rtxn: &heed::RoTxn, output: PathBuf) -> anyho
|
|||
let mut output = File::create(&output)
|
||||
.with_context(|| format!("failed to create {} file", output.display()))?;
|
||||
|
||||
match index.fst(rtxn)? {
|
||||
Some(fst) => output.write_all(fst.as_fst().as_bytes())?,
|
||||
None => {
|
||||
let fst = fst::Set::default();
|
||||
output.write_all(fst.as_fst().as_bytes())?;
|
||||
},
|
||||
}
|
||||
let words_fst = index.words_fst(rtxn)?;
|
||||
output.write_all(words_fst.as_fst().as_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -62,7 +62,7 @@ pub fn run(opt: Opt) -> anyhow::Result<()> {
|
|||
let result = index.search(&rtxn).query(query).execute().unwrap();
|
||||
|
||||
let mut stdout = io::stdout();
|
||||
let fields_ids_map = index.fields_ids_map(&rtxn)?.unwrap_or_default();
|
||||
let fields_ids_map = index.fields_ids_map(&rtxn)?;
|
||||
let documents = index.documents(&rtxn, result.documents_ids.iter().cloned())?;
|
||||
|
||||
for (_id, record) in documents {
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use std::borrow::Cow;
|
||||
use std::collections::HashSet;
|
||||
use std::fs::{File, create_dir_all};
|
||||
use std::{mem, io};
|
||||
|
@ -156,13 +155,10 @@ pub fn run(opt: Opt) -> anyhow::Result<()> {
|
|||
UpdateMeta::DocumentsAddition => {
|
||||
// We must use the write transaction of the update here.
|
||||
let rtxn = env_cloned.read_txn()?;
|
||||
let fields_ids_map = index_cloned.fields_ids_map(&rtxn)?.unwrap_or_default();
|
||||
let documents_ids = index_cloned.documents_ids(&rtxn)?.unwrap_or_default();
|
||||
let fields_ids_map = index_cloned.fields_ids_map(&rtxn)?;
|
||||
let documents_ids = index_cloned.documents_ids(&rtxn)?;
|
||||
let available_documents_ids = AvailableDocumentsIds::from_documents_ids(&documents_ids);
|
||||
let users_ids_documents_ids = match index_cloned.users_ids_documents_ids(&rtxn).unwrap() {
|
||||
Some(map) => map.map_data(Cow::Borrowed).unwrap(),
|
||||
None => fst::Map::default().map_data(Cow::Owned).unwrap(),
|
||||
};
|
||||
let users_ids_documents_ids = index_cloned.users_ids_documents_ids(&rtxn).unwrap();
|
||||
|
||||
let transform = Transform {
|
||||
fields_ids_map,
|
||||
|
@ -395,7 +391,7 @@ pub fn run(opt: Opt) -> anyhow::Result<()> {
|
|||
let SearchResult { found_words, documents_ids } = search.execute().unwrap();
|
||||
|
||||
let mut documents = Vec::new();
|
||||
let fields_ids_map = index.fields_ids_map(&rtxn).unwrap().unwrap_or_default();
|
||||
let fields_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||
|
||||
for (_id, record) in index.documents(&rtxn, documents_ids).unwrap() {
|
||||
let mut record = record.iter()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue