2018-10-09 18:23:35 +02:00
|
|
|
#[macro_use] extern crate serde_derive;
|
|
|
|
|
2018-10-03 16:21:33 +02:00
|
|
|
use std::collections::BTreeMap;
|
|
|
|
use std::path::PathBuf;
|
|
|
|
use std::fs::File;
|
|
|
|
use std::io;
|
|
|
|
|
|
|
|
use csv::ReaderBuilder;
|
2018-10-21 16:40:41 +02:00
|
|
|
use pentium::{MetadataBuilder, DocIndex, Tokenizer, CommonWords};
|
2018-10-09 18:23:35 +02:00
|
|
|
use rocksdb::{SstFileWriter, EnvOptions, ColumnFamilyOptions};
|
|
|
|
use structopt::StructOpt;
|
2018-10-03 16:21:33 +02:00
|
|
|
|
2018-10-09 18:23:35 +02:00
|
|
|
#[derive(Debug, StructOpt)]
|
|
|
|
pub struct CommandCsv {
|
|
|
|
/// The stop word file, each word must be separated by a newline.
|
|
|
|
#[structopt(long = "stop-words", parse(from_os_str))]
|
|
|
|
pub stop_words: PathBuf,
|
|
|
|
|
|
|
|
/// The csv file to index.
|
|
|
|
#[structopt(parse(from_os_str))]
|
|
|
|
pub products: PathBuf,
|
|
|
|
}
|
2018-10-03 16:21:33 +02:00
|
|
|
|
|
|
|
#[derive(Debug, Deserialize)]
|
|
|
|
struct Product {
|
|
|
|
id: u64,
|
|
|
|
title: String,
|
|
|
|
description: String,
|
2018-10-09 18:23:35 +02:00
|
|
|
image: String,
|
2018-10-03 16:21:33 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct CsvIndexer {
|
|
|
|
common_words: CommonWords,
|
|
|
|
products: PathBuf,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl CsvIndexer {
|
|
|
|
pub fn from_command(command: CommandCsv) -> io::Result<CsvIndexer> {
|
2018-10-09 18:23:35 +02:00
|
|
|
let common_words = CommonWords::from_file(command.stop_words)?;
|
2018-10-03 16:21:33 +02:00
|
|
|
let products = command.products;
|
|
|
|
|
|
|
|
Ok(CsvIndexer { common_words, products })
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn index(self) {
|
2018-10-09 18:23:35 +02:00
|
|
|
let random_name = PathBuf::from(moby_name_gen::random_name());
|
|
|
|
let map_file = random_name.with_extension("map");
|
|
|
|
let idx_file = random_name.with_extension("idx");
|
|
|
|
let sst_file = random_name.with_extension("sst");
|
2018-10-03 16:21:33 +02:00
|
|
|
|
|
|
|
let env_options = EnvOptions::new();
|
|
|
|
let cf_options = ColumnFamilyOptions::new();
|
|
|
|
let mut sst_file_writer = SstFileWriter::new(env_options, cf_options);
|
2018-10-09 18:23:35 +02:00
|
|
|
let sst_file = sst_file.to_str().unwrap();
|
2018-10-03 16:21:33 +02:00
|
|
|
sst_file_writer.open(&sst_file).expect("open the sst file");
|
|
|
|
|
|
|
|
let map = File::create(&map_file).unwrap();
|
|
|
|
let indexes = File::create(&idx_file).unwrap();
|
|
|
|
let mut builder = MetadataBuilder::new(map, indexes);
|
|
|
|
let mut fields = BTreeMap::new();
|
|
|
|
|
|
|
|
let mut rdr = ReaderBuilder::new().from_path(&self.products).expect("reading product file");
|
|
|
|
let mut errors = 0;
|
|
|
|
|
|
|
|
for result in rdr.deserialize() {
|
|
|
|
let product: Product = match result {
|
|
|
|
Ok(product) => product,
|
|
|
|
Err(e) => { eprintln!("{:?}", e); errors += 1; continue },
|
|
|
|
};
|
|
|
|
|
2018-10-09 18:23:35 +02:00
|
|
|
{
|
|
|
|
let string_id = product.id.to_string();
|
|
|
|
insert_document_words(&mut builder, product.id, 0, Some((0, string_id.as_str())));
|
|
|
|
|
|
|
|
let key = format!("{}-id", product.id);
|
|
|
|
let value = string_id;
|
|
|
|
fields.insert(key, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
2018-11-15 17:55:20 +01:00
|
|
|
let title = Tokenizer::new(&product.title).filter(|&(_, w)| !self.common_words.contains(w));
|
2018-10-09 18:23:35 +02:00
|
|
|
insert_document_words(&mut builder, product.id, 1, title);
|
|
|
|
|
|
|
|
let key = format!("{}-title", product.id);
|
|
|
|
let value = product.title;
|
|
|
|
fields.insert(key, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
2018-11-15 17:55:20 +01:00
|
|
|
let description = Tokenizer::new(&product.description).filter(|&(_, w)| !self.common_words.contains(w));
|
2018-10-09 18:23:35 +02:00
|
|
|
insert_document_words(&mut builder, product.id, 2, description);
|
|
|
|
|
|
|
|
let key = format!("{}-description", product.id);
|
|
|
|
let value = product.description;
|
|
|
|
fields.insert(key, value);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
let key = format!("{}-image", product.id);
|
|
|
|
let value = product.image;
|
|
|
|
fields.insert(key, value);
|
|
|
|
}
|
2018-10-03 16:21:33 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
for (key, value) in fields {
|
|
|
|
sst_file_writer.put(key.as_bytes(), value.as_bytes()).unwrap();
|
|
|
|
}
|
|
|
|
let _sst_file_info = sst_file_writer.finish().unwrap();
|
|
|
|
|
|
|
|
builder.finish().unwrap();
|
|
|
|
|
|
|
|
println!("Found {} errorneous lines", errors);
|
|
|
|
println!("Succesfully created {:?} dump.", random_name);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-17 13:35:34 +02:00
|
|
|
fn insert_document_words<'a, I, A, B>(builder: &mut MetadataBuilder<A, B>, doc_id: u64, attr: u8, words: I)
|
2018-10-03 16:21:33 +02:00
|
|
|
where A: io::Write,
|
|
|
|
B: io::Write,
|
|
|
|
I: IntoIterator<Item=(usize, &'a str)>,
|
|
|
|
{
|
|
|
|
for (index, word) in words {
|
|
|
|
let doc_index = DocIndex {
|
2018-10-17 13:35:34 +02:00
|
|
|
document_id: doc_id,
|
2018-10-03 16:21:33 +02:00
|
|
|
attribute: attr,
|
|
|
|
attribute_index: index as u32,
|
|
|
|
};
|
|
|
|
// insert the exact representation
|
|
|
|
let word_lower = word.to_lowercase();
|
|
|
|
|
|
|
|
// and the unidecoded lowercased version
|
2018-10-09 18:23:35 +02:00
|
|
|
let word_unidecoded = unidecode::unidecode(word).to_lowercase();
|
2018-10-03 16:21:33 +02:00
|
|
|
if word_lower != word_unidecoded {
|
|
|
|
builder.insert(word_unidecoded, doc_index);
|
|
|
|
}
|
|
|
|
|
|
|
|
builder.insert(word_lower, doc_index);
|
|
|
|
}
|
|
|
|
}
|
2018-10-09 18:23:35 +02:00
|
|
|
|
|
|
|
fn main() {
|
|
|
|
let command = CommandCsv::from_args();
|
|
|
|
let indexer = CsvIndexer::from_command(command).unwrap();
|
|
|
|
indexer.index();
|
|
|
|
}
|