Rewrite the indexer to use one MTBL by database

This allows us to avoid prefixing keys and appending into LMDB databases
This commit is contained in:
Clément Renault 2020-10-04 13:17:58 +02:00
parent 770f29fd05
commit ce8e56ee18
No known key found for this signature in database
GPG Key ID: 92ADA4E935E71FA4
3 changed files with 227 additions and 162 deletions

View File

@ -1,5 +1,5 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::convert::{TryFrom, TryInto}; use std::convert::TryFrom;
use std::fs::File; use std::fs::File;
use std::io::{self, Read, Write}; use std::io::{self, Read, Write};
use std::iter::FromIterator; use std::iter::FromIterator;
@ -7,12 +7,12 @@ use std::path::PathBuf;
use std::{iter, thread}; use std::{iter, thread};
use std::time::Instant; use std::time::Instant;
use anyhow::Context; use anyhow::{Context, bail};
use bstr::ByteSlice as _; use bstr::ByteSlice as _;
use csv::StringRecord; use csv::StringRecord;
use flate2::read::GzDecoder; use flate2::read::GzDecoder;
use fst::IntoStreamer; use fst::IntoStreamer;
use heed::{EnvOpenOptions, BytesEncode, types::*}; use heed::{EnvOpenOptions, BytesEncode, types::ByteSlice};
use linked_hash_map::LinkedHashMap; use linked_hash_map::LinkedHashMap;
use log::{debug, info}; use log::{debug, info};
use memmap::Mmap; use memmap::Mmap;
@ -20,24 +20,20 @@ use oxidized_mtbl::{Reader, Writer, Merger, Sorter, CompressionType};
use rayon::prelude::*; use rayon::prelude::*;
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use structopt::StructOpt; use structopt::StructOpt;
use tempfile::tempfile;
use milli::heed_codec::{CsvStringRecordCodec, BoRoaringBitmapCodec, CboRoaringBitmapCodec}; use milli::heed_codec::{CsvStringRecordCodec, BoRoaringBitmapCodec, CboRoaringBitmapCodec};
use milli::tokenizer::{simple_tokenizer, only_token}; use milli::tokenizer::{simple_tokenizer, only_token};
use milli::{SmallVec32, Index, Position, DocumentId, BEU32}; use milli::{SmallVec32, Index, Position, DocumentId};
const LMDB_MAX_KEY_LENGTH: usize = 511; const LMDB_MAX_KEY_LENGTH: usize = 511;
const MAX_POSITION: usize = 1000; const MAX_POSITION: usize = 1000;
const MAX_ATTRIBUTES: usize = u32::max_value() as usize / MAX_POSITION; const MAX_ATTRIBUTES: usize = u32::max_value() as usize / MAX_POSITION;
const HEADERS_KEY: &[u8] = b"\0headers"; const WORDS_FST_KEY: &[u8] = milli::WORDS_FST_KEY.as_bytes();
const DOCUMENTS_IDS_KEY: &[u8] = b"\x04documents-ids"; const HEADERS_KEY: &[u8] = milli::HEADERS_KEY.as_bytes();
const WORDS_FST_KEY: &[u8] = b"\x06words-fst"; const DOCUMENTS_IDS_KEY: &[u8] = milli::DOCUMENTS_IDS_KEY.as_bytes();
const HEADERS_BYTE: u8 = 0;
const WORD_DOCID_POSITIONS_BYTE: u8 = 1;
const WORD_DOCIDS_BYTE: u8 = 2;
const WORDS_PROXIMITIES_BYTE: u8 = 5;
const DOCUMENTS_IDS_BYTE: u8 = 4;
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
#[global_allocator] #[global_allocator]
@ -90,7 +86,7 @@ struct IndexerOpt {
max_nb_chunks: Option<usize>, max_nb_chunks: Option<usize>,
/// MTBL max memory in bytes. /// MTBL max memory in bytes.
#[structopt(long, default_value = "1335885824")] // 1.25 GB #[structopt(long, default_value = "346030080")] // 330 MB
max_memory: usize, max_memory: usize,
/// Size of the linked hash map cache when indexing. /// Size of the linked hash map cache when indexing.
@ -129,15 +125,43 @@ fn lmdb_key_valid_size(key: &[u8]) -> bool {
!key.is_empty() && key.len() <= LMDB_MAX_KEY_LENGTH !key.is_empty() && key.len() <= LMDB_MAX_KEY_LENGTH
} }
fn create_writer(type_: CompressionType, level: Option<u32>, file: File) -> Writer<File> { fn create_writer(typ: CompressionType, level: Option<u32>, file: File) -> Writer<File> {
let mut builder = Writer::builder(); let mut builder = Writer::builder();
builder.compression_type(type_); builder.compression_type(typ);
if let Some(level) = level { if let Some(level) = level {
builder.compression_level(level); builder.compression_level(level);
} }
builder.build(file) builder.build(file)
} }
fn writer_into_reader(writer: Writer<File>) -> anyhow::Result<Reader<Mmap>> {
let file = writer.into_inner()?;
let mmap = unsafe { Mmap::map(&file)? };
Reader::new(mmap).map_err(Into::into)
}
fn create_sorter(
merge: MergeFn,
chunk_compression_type: CompressionType,
chunk_compression_level: Option<u32>,
max_nb_chunks: Option<usize>,
max_memory: Option<usize>,
) -> Sorter<MergeFn>
{
let mut builder = Sorter::builder(merge);
builder.chunk_compression_type(chunk_compression_type);
if let Some(level) = chunk_compression_level {
builder.chunk_compression_level(level);
}
if let Some(nb_chunks) = max_nb_chunks {
builder.max_nb_chunks(nb_chunks);
}
if let Some(memory) = max_memory {
builder.max_memory(memory);
}
builder.build()
}
/// Outputs a list of all pairs of words with the shortest proximity between 1 and 7 inclusive. /// Outputs a list of all pairs of words with the shortest proximity between 1 and 7 inclusive.
/// ///
/// This list is used by the engine to calculate the documents containing words that are /// This list is used by the engine to calculate the documents containing words that are
@ -180,10 +204,24 @@ struct Store {
words_pairs_proximities_docids: LinkedHashMap<(SmallVec32<u8>, SmallVec32<u8>, u8), RoaringBitmap>, words_pairs_proximities_docids: LinkedHashMap<(SmallVec32<u8>, SmallVec32<u8>, u8), RoaringBitmap>,
words_pairs_proximities_docids_limit: usize, words_pairs_proximities_docids_limit: usize,
documents_ids: RoaringBitmap, documents_ids: RoaringBitmap,
sorter: Sorter<MergeFn>, // MTBL parameters
documents_writer: Writer<File>,
chunk_compression_type: CompressionType, chunk_compression_type: CompressionType,
chunk_compression_level: Option<u32>, chunk_compression_level: Option<u32>,
// MTBL sorters
main_sorter: Sorter<MergeFn>,
word_docids_sorter: Sorter<MergeFn>,
docid_word_positions_sorter: Sorter<MergeFn>,
words_pairs_proximities_docids_sorter: Sorter<MergeFn>,
// MTBL writers
documents_writer: Writer<File>,
}
struct Readers {
main: Reader<Mmap>,
word_docids: Reader<Mmap>,
docid_word_positions: Reader<Mmap>,
words_pairs_proximities_docids: Reader<Mmap>,
documents: Reader<Mmap>,
} }
impl Store { impl Store {
@ -195,24 +233,41 @@ impl Store {
chunk_compression_level: Option<u32>, chunk_compression_level: Option<u32>,
) -> anyhow::Result<Store> ) -> anyhow::Result<Store>
{ {
let mut builder = Sorter::builder(merge as MergeFn); let main_sorter = create_sorter(
builder.chunk_compression_type(chunk_compression_type); main_merge,
if let Some(level) = chunk_compression_level { chunk_compression_type,
builder.chunk_compression_level(level); chunk_compression_level,
} max_nb_chunks,
if let Some(nb_chunks) = max_nb_chunks { max_memory,
builder.max_nb_chunks(nb_chunks); );
} let word_docids_sorter = create_sorter(
if let Some(memory) = max_memory { word_docids_merge,
builder.max_memory(memory); chunk_compression_type,
} chunk_compression_level,
max_nb_chunks,
max_memory,
);
let docid_word_positions_sorter = create_sorter(
docid_word_positions_merge,
chunk_compression_type,
chunk_compression_level,
max_nb_chunks,
max_memory,
);
let words_pairs_proximities_docids_sorter = create_sorter(
words_pairs_proximities_docids_merge,
chunk_compression_type,
chunk_compression_level,
max_nb_chunks,
max_memory,
);
let mut documents_builder = Writer::builder(); let mut documents_builder = Writer::builder();
documents_builder.compression_type(chunk_compression_type); documents_builder.compression_type(chunk_compression_type);
if let Some(level) = chunk_compression_level { if let Some(level) = chunk_compression_level {
documents_builder.compression_level(level); documents_builder.compression_level(level);
} }
let documents_writer = tempfile::tempfile().map(|f| documents_builder.build(f))?; let documents_writer = tempfile().map(|f| documents_builder.build(f))?;
Ok(Store { Ok(Store {
word_docids: LinkedHashMap::with_capacity(linked_hash_map_size), word_docids: LinkedHashMap::with_capacity(linked_hash_map_size),
@ -220,10 +275,15 @@ impl Store {
words_pairs_proximities_docids: LinkedHashMap::with_capacity(linked_hash_map_size), words_pairs_proximities_docids: LinkedHashMap::with_capacity(linked_hash_map_size),
words_pairs_proximities_docids_limit: linked_hash_map_size, words_pairs_proximities_docids_limit: linked_hash_map_size,
documents_ids: RoaringBitmap::new(), documents_ids: RoaringBitmap::new(),
sorter: builder.build(),
documents_writer,
chunk_compression_type, chunk_compression_type,
chunk_compression_level, chunk_compression_level,
main_sorter,
word_docids_sorter,
docid_word_positions_sorter,
words_pairs_proximities_docids_sorter,
documents_writer,
}) })
} }
@ -241,7 +301,7 @@ impl Store {
if self.word_docids.len() == self.word_docids_limit { if self.word_docids.len() == self.word_docids_limit {
// Removing the front element is equivalent to removing the LRU element. // Removing the front element is equivalent to removing the LRU element.
let lru = self.word_docids.pop_front(); let lru = self.word_docids.pop_front();
Self::write_word_docids(&mut self.sorter, lru)?; Self::write_word_docids(&mut self.word_docids_sorter, lru)?;
} }
} }
} }
@ -279,7 +339,7 @@ impl Store {
// Removing front elements is equivalent to removing the LRUs. // Removing front elements is equivalent to removing the LRUs.
let iter = iter::from_fn(|| self.words_pairs_proximities_docids.pop_front()); let iter = iter::from_fn(|| self.words_pairs_proximities_docids.pop_front());
iter.take(overflow).for_each(|x| lrus.push(x)); iter.take(overflow).for_each(|x| lrus.push(x));
Self::write_words_pairs_proximities(&mut self.sorter, lrus)?; Self::write_words_pairs_proximities(&mut self.words_pairs_proximities_docids_sorter, lrus)?;
} }
Ok(()) Ok(())
@ -288,7 +348,7 @@ impl Store {
fn write_headers(&mut self, headers: &StringRecord) -> anyhow::Result<()> { fn write_headers(&mut self, headers: &StringRecord) -> anyhow::Result<()> {
let headers = CsvStringRecordCodec::bytes_encode(headers) let headers = CsvStringRecordCodec::bytes_encode(headers)
.with_context(|| format!("could not encode csv record"))?; .with_context(|| format!("could not encode csv record"))?;
Ok(self.sorter.insert(HEADERS_KEY, headers)?) Ok(self.main_sorter.insert(HEADERS_KEY, headers)?)
} }
fn write_document( fn write_document(
@ -312,7 +372,7 @@ impl Store {
self.documents_ids.insert(document_id); self.documents_ids.insert(document_id);
self.documents_writer.insert(document_id.to_be_bytes(), record)?; self.documents_writer.insert(document_id.to_be_bytes(), record)?;
Self::write_docid_word_positions(&mut self.sorter, document_id, words_positions)?; Self::write_docid_word_positions(&mut self.docid_word_positions_sorter, document_id, words_positions)?;
Ok(()) Ok(())
} }
@ -322,12 +382,11 @@ impl Store {
iter: impl IntoIterator<Item=((SmallVec32<u8>, SmallVec32<u8>, u8), RoaringBitmap)>, iter: impl IntoIterator<Item=((SmallVec32<u8>, SmallVec32<u8>, u8), RoaringBitmap)>,
) -> anyhow::Result<()> ) -> anyhow::Result<()>
{ {
// words proximities keys are all prefixed let mut key = Vec::new();
let mut key = vec![WORDS_PROXIMITIES_BYTE];
let mut buffer = Vec::new(); let mut buffer = Vec::new();
for ((w1, w2, min_prox), docids) in iter { for ((w1, w2, min_prox), docids) in iter {
key.truncate(1); key.clear();
key.extend_from_slice(w1.as_bytes()); key.extend_from_slice(w1.as_bytes());
key.push(0); key.push(0);
key.extend_from_slice(w2.as_bytes()); key.extend_from_slice(w2.as_bytes());
@ -352,11 +411,8 @@ impl Store {
words_positions: &HashMap<String, SmallVec32<Position>>, words_positions: &HashMap<String, SmallVec32<Position>>,
) -> anyhow::Result<()> ) -> anyhow::Result<()>
{ {
// postings positions ids keys are all prefixed
let mut key = vec![WORD_DOCID_POSITIONS_BYTE];
// We prefix the words by the document id. // We prefix the words by the document id.
key.extend_from_slice(&id.to_be_bytes()); let mut key = id.to_be_bytes().to_vec();
let base_size = key.len(); let base_size = key.len();
for (word, positions) in words_positions { for (word, positions) in words_positions {
@ -378,12 +434,11 @@ impl Store {
fn write_word_docids<I>(sorter: &mut Sorter<MergeFn>, iter: I) -> anyhow::Result<()> fn write_word_docids<I>(sorter: &mut Sorter<MergeFn>, iter: I) -> anyhow::Result<()>
where I: IntoIterator<Item=(SmallVec32<u8>, RoaringBitmap)> where I: IntoIterator<Item=(SmallVec32<u8>, RoaringBitmap)>
{ {
// postings positions ids keys are all prefixed let mut key = Vec::new();
let mut key = vec![WORD_DOCIDS_BYTE];
let mut buffer = Vec::new(); let mut buffer = Vec::new();
for (word, ids) in iter { for (word, ids) in iter {
key.truncate(1); key.clear();
key.extend_from_slice(&word); key.extend_from_slice(&word);
// We serialize the document ids into a buffer // We serialize the document ids into a buffer
buffer.clear(); buffer.clear();
@ -412,7 +467,7 @@ impl Store {
thread_index: usize, thread_index: usize,
num_threads: usize, num_threads: usize,
log_every_n: usize, log_every_n: usize,
) -> anyhow::Result<(Reader<Mmap>, Reader<Mmap>)> ) -> anyhow::Result<Readers>
{ {
debug!("{:?}: Indexing in a Store...", thread_index); debug!("{:?}: Indexing in a Store...", thread_index);
@ -453,50 +508,63 @@ impl Store {
document_id = document_id + 1; document_id = document_id + 1;
} }
let (reader, docs_reader) = self.finish()?; let readers = self.finish()?;
debug!("{:?}: Store created!", thread_index); debug!("{:?}: Store created!", thread_index);
Ok((reader, docs_reader)) Ok(readers)
} }
fn finish(mut self) -> anyhow::Result<(Reader<Mmap>, Reader<Mmap>)> { fn finish(mut self) -> anyhow::Result<Readers> {
let compression_type = self.chunk_compression_type; let comp_type = self.chunk_compression_type;
let compression_level = self.chunk_compression_level; let comp_level = self.chunk_compression_level;
Self::write_word_docids(&mut self.sorter, self.word_docids)?; Self::write_word_docids(&mut self.word_docids_sorter, self.word_docids)?;
Self::write_documents_ids(&mut self.sorter, self.documents_ids)?; Self::write_documents_ids(&mut self.main_sorter, self.documents_ids)?;
Self::write_words_pairs_proximities(&mut self.sorter, self.words_pairs_proximities_docids)?; Self::write_words_pairs_proximities(
&mut self.words_pairs_proximities_docids_sorter,
self.words_pairs_proximities_docids,
)?;
let wtr_file = tempfile::tempfile()?; let mut word_docids_wtr = tempfile().map(|f| create_writer(comp_type, comp_level, f))?;
let mut wtr = create_writer(compression_type, compression_level, wtr_file);
let mut builder = fst::SetBuilder::memory(); let mut builder = fst::SetBuilder::memory();
let mut iter = self.sorter.into_iter()?; let mut iter = self.word_docids_sorter.into_iter()?;
while let Some(result) = iter.next() { while let Some(result) = iter.next() {
let (key, val) = result?; let (word, val) = result?;
if let Some((&WORD_DOCIDS_BYTE, word)) = key.split_first() {
// This is a lexicographically ordered word position // This is a lexicographically ordered word position
// we use the key to construct the words fst. // we use the key to construct the words fst.
builder.insert(word)?; builder.insert(word)?;
} word_docids_wtr.insert(word, val)?;
wtr.insert(key, val)?;
} }
let fst = builder.into_set(); let fst = builder.into_set();
wtr.insert(WORDS_FST_KEY, fst.as_fst().as_bytes())?; self.main_sorter.insert(WORDS_FST_KEY, fst.as_fst().as_bytes())?;
let docs_file = self.documents_writer.into_inner()?; let mut main_wtr = tempfile().map(|f| create_writer(comp_type, comp_level, f))?;
let docs_mmap = unsafe { Mmap::map(&docs_file)? }; self.main_sorter.write_into(&mut main_wtr)?;
let docs_reader = Reader::new(docs_mmap)?;
let file = wtr.into_inner()?; let mut docid_word_positions_wtr = tempfile().map(|f| create_writer(comp_type, comp_level, f))?;
let mmap = unsafe { Mmap::map(&file)? }; self.docid_word_positions_sorter.write_into(&mut docid_word_positions_wtr)?;
let reader = Reader::new(mmap)?;
Ok((reader, docs_reader)) let mut words_pairs_proximities_docids_wtr = tempfile().map(|f| create_writer(comp_type, comp_level, f))?;
self.words_pairs_proximities_docids_sorter.write_into(&mut words_pairs_proximities_docids_wtr)?;
let main = writer_into_reader(main_wtr)?;
let word_docids = writer_into_reader(word_docids_wtr)?;
let docid_word_positions = writer_into_reader(docid_word_positions_wtr)?;
let words_pairs_proximities_docids = writer_into_reader(words_pairs_proximities_docids_wtr)?;
let documents = writer_into_reader(self.documents_writer)?;
Ok(Readers {
main,
word_docids,
docid_word_positions,
words_pairs_proximities_docids,
documents,
})
} }
} }
fn merge(key: &[u8], values: &[Vec<u8>]) -> Result<Vec<u8>, ()> { fn main_merge(key: &[u8], values: &[Vec<u8>]) -> Result<Vec<u8>, ()> {
match key { match key {
WORDS_FST_KEY => { WORDS_FST_KEY => {
let fsts: Vec<_> = values.iter().map(|v| fst::Set::new(v).unwrap()).collect(); let fsts: Vec<_> = values.iter().map(|v| fst::Set::new(v).unwrap()).collect();
@ -510,12 +578,16 @@ fn merge(key: &[u8], values: &[Vec<u8>]) -> Result<Vec<u8>, ()> {
build.extend_stream(op.into_stream()).unwrap(); build.extend_stream(op.into_stream()).unwrap();
Ok(build.into_inner().unwrap()) Ok(build.into_inner().unwrap())
}, },
key => match key[0] { HEADERS_KEY => {
HEADERS_BYTE | WORD_DOCID_POSITIONS_BYTE => {
assert!(values.windows(2).all(|vs| vs[0] == vs[1])); assert!(values.windows(2).all(|vs| vs[0] == vs[1]));
Ok(values[0].to_vec()) Ok(values[0].to_vec())
}, },
DOCUMENTS_IDS_BYTE | WORD_DOCIDS_BYTE => { DOCUMENTS_IDS_KEY => word_docids_merge(&[], values),
otherwise => panic!("wut {:?}", otherwise),
}
}
fn word_docids_merge(_key: &[u8], values: &[Vec<u8>]) -> Result<Vec<u8>, ()> {
let (head, tail) = values.split_first().unwrap(); let (head, tail) = values.split_first().unwrap();
let mut head = RoaringBitmap::deserialize_from(head.as_slice()).unwrap(); let mut head = RoaringBitmap::deserialize_from(head.as_slice()).unwrap();
@ -527,8 +599,14 @@ fn merge(key: &[u8], values: &[Vec<u8>]) -> Result<Vec<u8>, ()> {
let mut vec = Vec::with_capacity(head.serialized_size()); let mut vec = Vec::with_capacity(head.serialized_size());
head.serialize_into(&mut vec).unwrap(); head.serialize_into(&mut vec).unwrap();
Ok(vec) Ok(vec)
}, }
WORDS_PROXIMITIES_BYTE => {
fn docid_word_positions_merge(_key: &[u8], values: &[Vec<u8>]) -> Result<Vec<u8>, ()> {
assert!(values.windows(2).all(|vs| vs[0] == vs[1]));
Ok(values[0].to_vec())
}
fn words_pairs_proximities_docids_merge(_key: &[u8], values: &[Vec<u8>]) -> Result<Vec<u8>, ()> {
let (head, tail) = values.split_first().unwrap(); let (head, tail) = values.split_first().unwrap();
let mut head = CboRoaringBitmapCodec::deserialize_from(head.as_slice()).unwrap(); let mut head = CboRoaringBitmapCodec::deserialize_from(head.as_slice()).unwrap();
@ -540,49 +618,18 @@ fn merge(key: &[u8], values: &[Vec<u8>]) -> Result<Vec<u8>, ()> {
let mut vec = Vec::new(); let mut vec = Vec::new();
CboRoaringBitmapCodec::serialize_into(&head, &mut vec).unwrap(); CboRoaringBitmapCodec::serialize_into(&head, &mut vec).unwrap();
Ok(vec) Ok(vec)
},
otherwise => panic!("wut {:?}", otherwise),
}
}
} }
// TODO merge with the previous values fn documents_merge(key: &[u8], _values: &[Vec<u8>]) -> Result<Vec<u8>, ()> {
// TODO store the documents in a compressed MTBL panic!("impossible to merge documents ({:?})", key.as_bstr())
// TODO prefer using iter.append when possible, it is way faster (4x) to inject ordered entries.
fn lmdb_writer(wtxn: &mut heed::RwTxn, index: &Index, key: &[u8], val: &[u8]) -> anyhow::Result<()> {
if key == WORDS_FST_KEY {
// Write the words fst
index.main.put::<_, Str, ByteSlice>(wtxn, "words-fst", val)?;
}
else if key == HEADERS_KEY {
// Write the headers
index.main.put::<_, Str, ByteSlice>(wtxn, "headers", val)?;
}
else if key == DOCUMENTS_IDS_KEY {
// Write the documents ids list
index.main.put::<_, Str, ByteSlice>(wtxn, "documents-ids", val)?;
}
else if key.starts_with(&[WORD_DOCIDS_BYTE]) {
// Write the postings lists
index.word_docids.as_polymorph()
.put::<_, ByteSlice, ByteSlice>(wtxn, &key[1..], val)?;
}
else if key.starts_with(&[WORD_DOCID_POSITIONS_BYTE]) {
// Write the postings lists
index.docid_word_positions.as_polymorph()
.put::<_, ByteSlice, ByteSlice>(wtxn, &key[1..], val)?;
} else if key.starts_with(&[WORDS_PROXIMITIES_BYTE]) {
// Write the word pair proximity document ids
index.word_pair_proximity_docids.as_polymorph()
.put::<_, ByteSlice, ByteSlice>(wtxn, &key[1..], val)?;
} }
Ok(()) fn merge_into_lmdb_database(
} wtxn: &mut heed::RwTxn,
database: heed::PolyDatabase,
fn merge_into_lmdb<F>(sources: Vec<Reader<Mmap>>, mut f: F) -> anyhow::Result<()> sources: Vec<Reader<Mmap>>,
where F: FnMut(&[u8], &[u8]) -> anyhow::Result<()> merge: MergeFn,
{ ) -> anyhow::Result<()> {
debug!("Merging {} MTBL stores...", sources.len()); debug!("Merging {} MTBL stores...", sources.len());
let before = Instant::now(); let before = Instant::now();
@ -590,10 +637,11 @@ where F: FnMut(&[u8], &[u8]) -> anyhow::Result<()>
builder.extend(sources); builder.extend(sources);
let merger = builder.build(); let merger = builder.build();
let mut iter = merger.into_merge_iter()?; let mut in_iter = merger.into_merge_iter()?;
while let Some(result) = iter.next() { let mut out_iter = database.iter_mut::<_, ByteSlice, ByteSlice>(wtxn)?;
while let Some(result) = in_iter.next() {
let (k, v) = result?; let (k, v) = result?;
(f)(&k, &v).with_context(|| format!("writing {:?} into LMDB", k.as_bstr()))?; out_iter.append(k, v).with_context(|| format!("writing {:?} into LMDB", k.as_bstr()))?;
} }
debug!("MTBL stores merged in {:.02?}!", before.elapsed()); debug!("MTBL stores merged in {:.02?}!", before.elapsed());
@ -666,6 +714,10 @@ fn main() -> anyhow::Result<()> {
rayon::ThreadPoolBuilder::new().num_threads(jobs).build_global()?; rayon::ThreadPoolBuilder::new().num_threads(jobs).build_global()?;
} }
if opt.database.exists() {
bail!("Database ({}) already exists, delete it to continue.", opt.database.display());
}
std::fs::create_dir_all(&opt.database)?; std::fs::create_dir_all(&opt.database)?;
let env = EnvOpenOptions::new() let env = EnvOpenOptions::new()
.map_size(opt.database_size) .map_size(opt.database_size)
@ -698,27 +750,41 @@ fn main() -> anyhow::Result<()> {
}) })
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
let mut stores = Vec::with_capacity(readers.len()); let mut main_stores = Vec::with_capacity(readers.len());
let mut docs_stores = Vec::with_capacity(readers.len()); let mut word_docids_stores = Vec::with_capacity(readers.len());
readers.into_iter().for_each(|(s, d)| { let mut docid_word_positions_stores = Vec::with_capacity(readers.len());
stores.push(s); let mut words_pairs_proximities_docids_stores = Vec::with_capacity(readers.len());
docs_stores.push(d); let mut documents_stores = Vec::with_capacity(readers.len());
readers.into_iter().for_each(|readers| {
main_stores.push(readers.main);
word_docids_stores.push(readers.word_docids);
docid_word_positions_stores.push(readers.docid_word_positions);
words_pairs_proximities_docids_stores.push(readers.words_pairs_proximities_docids);
documents_stores.push(readers.documents);
}); });
let mut wtxn = env.write_txn()?; let mut wtxn = env.write_txn()?;
// We merge the postings lists into LMDB. debug!("Writing the main elements into LMDB on disk...");
debug!("We are writing the postings lists into LMDB on disk..."); merge_into_lmdb_database(&mut wtxn, index.main, main_stores, main_merge)?;
merge_into_lmdb(stores, |k, v| lmdb_writer(&mut wtxn, &index, k, v))?;
// We merge the documents into LMDB. debug!("Writing the words docids into LMDB on disk...");
debug!("We are writing the documents into LMDB on disk..."); let db = *index.word_docids.as_polymorph();
merge_into_lmdb(docs_stores, |k, v| { merge_into_lmdb_database(&mut wtxn, db, word_docids_stores, word_docids_merge)?;
let id = k.try_into().map(u32::from_be_bytes)?;
Ok(index.documents.put(&mut wtxn, &BEU32::new(id), v)?)
})?;
// Retrieve the number of documents. debug!("Writing the docid word positions into LMDB on disk...");
let db = *index.docid_word_positions.as_polymorph();
merge_into_lmdb_database(&mut wtxn, db, docid_word_positions_stores, docid_word_positions_merge)?;
debug!("Writing the words pairs proximities docids into LMDB on disk...");
let db = *index.word_pair_proximity_docids.as_polymorph();
merge_into_lmdb_database(&mut wtxn, db, words_pairs_proximities_docids_stores, words_pairs_proximities_docids_merge)?;
debug!("Writing the documents into LMDB on disk...");
let db = *index.documents.as_polymorph();
merge_into_lmdb_database(&mut wtxn, db, documents_stores, documents_merge)?;
debug!("Retrieving the number of documents...");
let count = index.number_of_documents(&wtxn)?; let count = index.number_of_documents(&wtxn)?;
wtxn.commit()?; wtxn.commit()?;

View File

@ -83,7 +83,6 @@ async fn main() -> anyhow::Result<()> {
.timestamp(stderrlog::Timestamp::Off) .timestamp(stderrlog::Timestamp::Off)
.init()?; .init()?;
std::fs::create_dir_all(&opt.database)?;
let env = EnvOpenOptions::new() let env = EnvOpenOptions::new()
.map_size(opt.database_size) .map_size(opt.database_size)
.max_dbs(10) .max_dbs(10)

View File

@ -32,9 +32,9 @@ pub type DocumentId = u32;
pub type Attribute = u32; pub type Attribute = u32;
pub type Position = u32; pub type Position = u32;
const WORDS_FST_KEY: &str = "words-fst"; pub const WORDS_FST_KEY: &str = "words-fst";
const HEADERS_KEY: &str = "headers"; pub const HEADERS_KEY: &str = "headers";
const DOCUMENTS_IDS_KEY: &str = "documents-ids"; pub const DOCUMENTS_IDS_KEY: &str = "documents-ids";
#[derive(Clone)] #[derive(Clone)]
pub struct Index { pub struct Index {
@ -53,7 +53,7 @@ pub struct Index {
impl Index { impl Index {
pub fn new(env: &heed::Env) -> anyhow::Result<Index> { pub fn new(env: &heed::Env) -> anyhow::Result<Index> {
Ok(Index { Ok(Index {
main: env.create_poly_database(None)?, main: env.create_poly_database(Some("main"))?,
word_docids: env.create_database(Some("word-docids"))?, word_docids: env.create_database(Some("word-docids"))?,
docid_word_positions: env.create_database(Some("docid-word-positions"))?, docid_word_positions: env.create_database(Some("docid-word-positions"))?,
word_pair_proximity_docids: env.create_database(Some("word-pair-proximity-docids"))?, word_pair_proximity_docids: env.create_database(Some("word-pair-proximity-docids"))?,