MeiliSearch/crates/milli/examples/index.rs

115 lines
4.2 KiB
Rust
Raw Normal View History

2023-03-27 18:34:10 +02:00
use std::error::Error;
use std::fs::File;
use std::io::{BufRead, BufReader, Cursor, Seek};
use std::path::Path;
2023-03-22 14:50:41 +01:00
use heed::EnvOpenOptions;
2023-03-27 18:34:10 +02:00
use milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
use milli::update::{IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings};
use milli::{Index, Object};
2023-03-27 18:34:10 +02:00
fn usage(error: &str, program_name: &str) -> String {
format!(
"{}. Usage: {} <PATH-TO-INDEX> <PATH-TO-DATASET> [searchable_fields] [filterable_fields]",
error, program_name
)
}
2023-03-22 14:50:41 +01:00
fn main() -> Result<(), Box<dyn Error>> {
2023-03-27 18:34:10 +02:00
let mut args = std::env::args();
let program_name = args.next().expect("No program name");
let index_path =
args.next().unwrap_or_else(|| panic!("{}", usage("Missing path to index.", &program_name)));
let dataset_path = args
.next()
.unwrap_or_else(|| panic!("{}", usage("Missing path to source dataset.", &program_name)));
2023-04-24 10:07:49 +02:00
// let primary_key = args.next().unwrap_or_else(|| "id".into());
2023-03-27 18:34:10 +02:00
// "title overview"
let searchable_fields: Vec<String> = args
.next()
.map(|arg| arg.split_whitespace().map(ToString::to_string).collect())
.unwrap_or_default();
2023-04-24 10:07:49 +02:00
println!("{searchable_fields:?}");
2023-03-27 18:34:10 +02:00
// "release_date genres"
let filterable_fields: Vec<String> = args
.next()
.map(|arg| arg.split_whitespace().map(ToString::to_string).collect())
.unwrap_or_default();
2023-03-22 14:50:41 +01:00
let mut options = EnvOpenOptions::new();
options.map_size(100 * 1024 * 1024 * 1024); // 100 GB
2023-03-27 18:34:10 +02:00
std::fs::create_dir_all(&index_path).unwrap();
let index = Index::new(options, index_path).unwrap();
2023-03-22 14:50:41 +01:00
let mut wtxn = index.write_txn().unwrap();
let config = IndexerConfig::default();
let mut builder = Settings::new(&mut wtxn, &index, &config);
2023-04-24 10:07:49 +02:00
// builder.set_primary_key(primary_key);
2023-03-22 14:50:41 +01:00
let searchable_fields = searchable_fields.iter().map(|s| s.to_string()).collect();
builder.set_searchable_fields(searchable_fields);
2023-03-27 18:34:10 +02:00
let filterable_fields = filterable_fields.iter().map(|s| s.to_string()).collect();
builder.set_filterable_fields(filterable_fields);
2023-03-22 14:50:41 +01:00
builder.execute(|_| (), || false).unwrap();
let config = IndexerConfig::default();
let indexing_config = IndexDocumentsConfig::default();
2023-04-24 10:07:49 +02:00
2023-03-22 14:50:41 +01:00
let builder =
IndexDocuments::new(&mut wtxn, &index, &config, indexing_config, |_| (), || false).unwrap();
let documents = documents_from(
2023-03-27 18:34:10 +02:00
&dataset_path,
Path::new(&dataset_path).extension().unwrap_or_default().to_str().unwrap_or_default(),
2023-03-22 14:50:41 +01:00
);
let (builder, user_error) = builder.add_documents(documents).unwrap();
user_error.unwrap();
builder.execute().unwrap();
wtxn.commit().unwrap();
index.prepare_for_closing().wait();
Ok(())
}
fn documents_from(filename: &str, filetype: &str) -> DocumentsBatchReader<impl BufRead + Seek> {
let reader = File::open(filename)
.unwrap_or_else(|_| panic!("could not find the dataset in: {}", filename));
let reader = BufReader::new(reader);
let documents = match filetype {
"csv" => documents_from_csv(reader).unwrap(),
"json" => documents_from_json(reader).unwrap(),
"jsonl" => documents_from_jsonl(reader).unwrap(),
otherwise => panic!("invalid update format {:?}", otherwise),
};
DocumentsBatchReader::from_reader(Cursor::new(documents)).unwrap()
}
fn documents_from_jsonl(reader: impl BufRead) -> milli::Result<Vec<u8>> {
let mut documents = DocumentsBatchBuilder::new(Vec::new());
for result in serde_json::Deserializer::from_reader(reader).into_iter::<Object>() {
let object = result.unwrap();
documents.append_json_object(&object)?;
}
documents.into_inner().map_err(Into::into)
}
fn documents_from_json(reader: impl BufRead) -> milli::Result<Vec<u8>> {
let mut documents = DocumentsBatchBuilder::new(Vec::new());
documents.append_json_array(reader)?;
documents.into_inner().map_err(Into::into)
}
fn documents_from_csv(reader: impl BufRead) -> milli::Result<Vec<u8>> {
let csv = csv::Reader::from_reader(reader);
let mut documents = DocumentsBatchBuilder::new(Vec::new());
documents.append_csv(csv)?;
documents.into_inner().map_err(Into::into)
}