mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-03 11:57:07 +02:00
feat: Make a CLI that handle compile-time features
This commit is contained in:
parent
a066c084fe
commit
474028145d
22 changed files with 811 additions and 774 deletions
20
src/common_words.rs
Normal file
20
src/common_words.rs
Normal file
|
@ -0,0 +1,20 @@
|
|||
use std::io::{self, BufReader, BufRead};
|
||||
use std::collections::HashSet;
|
||||
use std::path::Path;
|
||||
use std::fs::File;
|
||||
|
||||
pub type CommonWords = HashSet<String>;
|
||||
|
||||
pub fn from_file<P>(path: P) -> io::Result<CommonWords>
|
||||
where P: AsRef<Path>,
|
||||
{
|
||||
let file = File::open(path)?;
|
||||
let file = BufReader::new(file);
|
||||
let mut set = HashSet::new();
|
||||
for line in file.lines().filter_map(|l| l.ok()) {
|
||||
for word in line.split_whitespace() {
|
||||
set.insert(word.to_owned());
|
||||
}
|
||||
}
|
||||
Ok(set)
|
||||
}
|
122
src/index/csv.rs
Normal file
122
src/index/csv.rs
Normal file
|
@ -0,0 +1,122 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::path::PathBuf;
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
|
||||
use rocksdb::{SstFileWriter, EnvOptions, ColumnFamilyOptions};
|
||||
use raptor::{MetadataBuilder, DocIndex, Tokenizer};
|
||||
use unidecode::unidecode;
|
||||
use csv::ReaderBuilder;
|
||||
|
||||
use crate::common_words::{self, CommonWords};
|
||||
use crate::index::csv_feature::CommandCsv;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Product {
|
||||
#[serde(rename = "_unit_id")]
|
||||
id: u64,
|
||||
#[serde(rename = "product_title")]
|
||||
title: String,
|
||||
#[serde(rename = "product_image")]
|
||||
image: String,
|
||||
#[serde(rename = "product_description")]
|
||||
description: String,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CsvIndexer {
|
||||
common_words: CommonWords,
|
||||
products: PathBuf,
|
||||
}
|
||||
|
||||
impl CsvIndexer {
|
||||
pub fn from_command(command: CommandCsv) -> io::Result<CsvIndexer> {
|
||||
let common_words = common_words::from_file(command.stop_words)?;
|
||||
let products = command.products;
|
||||
|
||||
Ok(CsvIndexer { common_words, products })
|
||||
}
|
||||
|
||||
pub fn index(self) {
|
||||
let random_name = moby_name_gen::random_name();
|
||||
let map_file = format!("{}.map", random_name);
|
||||
let idx_file = format!("{}.idx", random_name);
|
||||
let sst_file = format!("{}.sst", random_name);
|
||||
|
||||
let env_options = EnvOptions::new();
|
||||
let cf_options = ColumnFamilyOptions::new();
|
||||
let mut sst_file_writer = SstFileWriter::new(env_options, cf_options);
|
||||
sst_file_writer.open(&sst_file).expect("open the sst file");
|
||||
|
||||
let map = File::create(&map_file).unwrap();
|
||||
let indexes = File::create(&idx_file).unwrap();
|
||||
let mut builder = MetadataBuilder::new(map, indexes);
|
||||
let mut fields = BTreeMap::new();
|
||||
|
||||
let mut rdr = ReaderBuilder::new().from_path(&self.products).expect("reading product file");
|
||||
let mut errors = 0;
|
||||
|
||||
for result in rdr.deserialize() {
|
||||
let product: Product = match result {
|
||||
Ok(product) => product,
|
||||
Err(e) => { eprintln!("{:?}", e); errors += 1; continue },
|
||||
};
|
||||
|
||||
let title = Tokenizer::new(&product.title);
|
||||
let title = title.iter().filter(|&(_, w)| !self.common_words.contains(w));
|
||||
insert_document_words(&mut builder, product.id, 0, title);
|
||||
|
||||
let description = Tokenizer::new(&product.description);
|
||||
let description = description.iter().filter(|&(_, w)| !self.common_words.contains(w));
|
||||
insert_document_words(&mut builder, product.id, 1, description);
|
||||
|
||||
// TODO simplify this by using functions and
|
||||
// use the MetadataBuilder internal BTreeMap ?
|
||||
let key = format!("{}-title", product.id);
|
||||
let value = product.title;
|
||||
fields.insert(key, value);
|
||||
|
||||
let key = format!("{}-description", product.id);
|
||||
let value = product.description;
|
||||
fields.insert(key, value);
|
||||
|
||||
let key = format!("{}-image", product.id);
|
||||
let value = product.image;
|
||||
fields.insert(key, value);
|
||||
}
|
||||
|
||||
for (key, value) in fields {
|
||||
sst_file_writer.put(key.as_bytes(), value.as_bytes()).unwrap();
|
||||
}
|
||||
let _sst_file_info = sst_file_writer.finish().unwrap();
|
||||
|
||||
builder.finish().unwrap();
|
||||
|
||||
println!("Found {} errorneous lines", errors);
|
||||
println!("Succesfully created {:?} dump.", random_name);
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_document_words<'a, I, A, B>(builder: &mut MetadataBuilder<A, B>, doc_index: u64, attr: u8, words: I)
|
||||
where A: io::Write,
|
||||
B: io::Write,
|
||||
I: IntoIterator<Item=(usize, &'a str)>,
|
||||
{
|
||||
for (index, word) in words {
|
||||
let doc_index = DocIndex {
|
||||
document: doc_index,
|
||||
attribute: attr,
|
||||
attribute_index: index as u32,
|
||||
};
|
||||
// insert the exact representation
|
||||
let word_lower = word.to_lowercase();
|
||||
|
||||
// and the unidecoded lowercased version
|
||||
let word_unidecoded = unidecode(word).to_lowercase();
|
||||
if word_lower != word_unidecoded {
|
||||
builder.insert(word_unidecoded, doc_index);
|
||||
}
|
||||
|
||||
builder.insert(word_lower, doc_index);
|
||||
}
|
||||
}
|
112
src/index/json_lines.rs
Normal file
112
src/index/json_lines.rs
Normal file
|
@ -0,0 +1,112 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::path::PathBuf;
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader, BufRead};
|
||||
|
||||
use serde_json::from_str;
|
||||
use rocksdb::{SstFileWriter, EnvOptions, ColumnFamilyOptions};
|
||||
use raptor::{MetadataBuilder, DocIndex, Tokenizer};
|
||||
use unidecode::unidecode;
|
||||
|
||||
use crate::common_words::{self, CommonWords};
|
||||
use crate::index::jsonlines_feature::CommandJsonLines;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Product {
|
||||
title: String,
|
||||
group_id: u64,
|
||||
ft: String,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct JsonLinesIndexer {
|
||||
common_words: CommonWords,
|
||||
products: PathBuf,
|
||||
}
|
||||
|
||||
impl JsonLinesIndexer {
|
||||
pub fn from_command(command: CommandJsonLines) -> io::Result<JsonLinesIndexer> {
|
||||
let common_words = common_words::from_file(command.stop_words)?;
|
||||
let products = command.products;
|
||||
|
||||
Ok(JsonLinesIndexer { common_words, products })
|
||||
}
|
||||
|
||||
pub fn index(self) {
|
||||
let data = File::open(&self.products).unwrap();
|
||||
let data = BufReader::new(data);
|
||||
|
||||
// TODO add a subcommand to pack these files in a tar.xxx archive
|
||||
let random_name = moby_name_gen::random_name();
|
||||
let map_file = format!("{}.map", random_name);
|
||||
let idx_file = format!("{}.idx", random_name);
|
||||
let sst_file = format!("{}.sst", random_name);
|
||||
|
||||
let env_options = EnvOptions::new();
|
||||
let cf_options = ColumnFamilyOptions::new();
|
||||
let mut sst_file_writer = SstFileWriter::new(env_options, cf_options);
|
||||
sst_file_writer.open(&sst_file).expect("open the sst file");
|
||||
|
||||
let map = File::create(&map_file).unwrap();
|
||||
let indexes = File::create(&idx_file).unwrap();
|
||||
let mut builder = MetadataBuilder::new(map, indexes);
|
||||
let mut fields = BTreeMap::new();
|
||||
|
||||
for line in data.lines() {
|
||||
let line = line.unwrap();
|
||||
|
||||
let product: Product = from_str(&line).unwrap();
|
||||
|
||||
let title = Tokenizer::new(&product.title);
|
||||
let title = title.iter().filter(|&(_, w)| !self.common_words.contains(w));
|
||||
insert_document_words(&mut builder, product.group_id, 0, title);
|
||||
|
||||
let description = Tokenizer::new(&product.ft);
|
||||
let description = description.iter().filter(|&(_, w)| !self.common_words.contains(w));
|
||||
insert_document_words(&mut builder, product.group_id, 1, description);
|
||||
|
||||
// TODO simplify this by using functions and
|
||||
// use the MetadataBuilder internal BTreeMap ?
|
||||
let key = format!("{}-title", product.group_id);
|
||||
let value = product.title;
|
||||
fields.insert(key, value);
|
||||
|
||||
let key = format!("{}-description", product.group_id);
|
||||
let value = product.ft;
|
||||
fields.insert(key, value);
|
||||
}
|
||||
|
||||
for (key, value) in fields {
|
||||
sst_file_writer.put(key.as_bytes(), value.as_bytes()).unwrap();
|
||||
}
|
||||
let _sst_file_info = sst_file_writer.finish().unwrap();
|
||||
|
||||
builder.finish().unwrap();
|
||||
|
||||
println!("Succesfully created {:?} dump.", random_name);
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_document_words<'a, I, A, B>(builder: &mut MetadataBuilder<A, B>, doc_index: u64, attr: u8, words: I)
|
||||
where A: io::Write,
|
||||
B: io::Write,
|
||||
I: IntoIterator<Item=(usize, &'a str)>,
|
||||
{
|
||||
for (index, word) in words {
|
||||
let doc_index = DocIndex {
|
||||
document: doc_index,
|
||||
attribute: attr,
|
||||
attribute_index: index as u32,
|
||||
};
|
||||
// insert the exact representation
|
||||
let word_lower = word.to_lowercase();
|
||||
|
||||
// and the unidecoded lowercased version
|
||||
let word_unidecoded = unidecode(word).to_lowercase();
|
||||
if word_lower != word_unidecoded {
|
||||
builder.insert(word_unidecoded, doc_index);
|
||||
}
|
||||
|
||||
builder.insert(word_lower, doc_index);
|
||||
}
|
||||
}
|
71
src/index/mod.rs
Normal file
71
src/index/mod.rs
Normal file
|
@ -0,0 +1,71 @@
|
|||
#[cfg(feature = "index-csv")]
|
||||
mod csv;
|
||||
|
||||
#[cfg(feature = "index-jsonlines")]
|
||||
mod json_lines;
|
||||
|
||||
use structopt::StructOpt;
|
||||
|
||||
#[derive(Debug, StructOpt)]
|
||||
pub enum CommandIndex {
|
||||
|
||||
#[cfg(feature = "index-jsonlines")]
|
||||
/// Index files encoded as json lines.
|
||||
#[structopt(name = "json-lines")]
|
||||
JsonLines(self::jsonlines_feature::CommandJsonLines),
|
||||
|
||||
#[cfg(feature = "index-csv")]
|
||||
/// Index files encoded as csv.
|
||||
#[structopt(name = "csv")]
|
||||
Csv(self::csv_feature::CommandCsv),
|
||||
}
|
||||
|
||||
#[cfg(feature = "index-jsonlines")]
|
||||
pub mod jsonlines_feature {
|
||||
use std::error;
|
||||
use std::path::PathBuf;
|
||||
use structopt::StructOpt;
|
||||
|
||||
#[derive(Debug, StructOpt)]
|
||||
pub struct CommandJsonLines {
|
||||
/// The stop word file, each word must be separated by a newline.
|
||||
#[structopt(long = "stop-words", parse(from_os_str))]
|
||||
pub stop_words: PathBuf,
|
||||
|
||||
/// The csv file to index.
|
||||
#[structopt(parse(from_os_str))]
|
||||
pub products: PathBuf,
|
||||
}
|
||||
|
||||
pub fn json_lines(command: CommandJsonLines) -> Result<(), Box<error::Error>> {
|
||||
use super::json_lines::JsonLinesIndexer;
|
||||
|
||||
let indexer = JsonLinesIndexer::from_command(command)?;
|
||||
Ok(indexer.index())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "index-csv")]
|
||||
pub mod csv_feature {
|
||||
use std::error;
|
||||
use std::path::PathBuf;
|
||||
use structopt::StructOpt;
|
||||
|
||||
#[derive(Debug, StructOpt)]
|
||||
pub struct CommandCsv {
|
||||
/// The stop word file, each word must be separated by a newline.
|
||||
#[structopt(long = "stop-words", parse(from_os_str))]
|
||||
pub stop_words: PathBuf,
|
||||
|
||||
/// The csv file to index.
|
||||
#[structopt(parse(from_os_str))]
|
||||
pub products: PathBuf,
|
||||
}
|
||||
|
||||
pub fn csv(command: CommandCsv) -> Result<(), Box<error::Error>> {
|
||||
use super::csv::CsvIndexer;
|
||||
|
||||
let indexer = CsvIndexer::from_command(command)?;
|
||||
Ok(indexer.index())
|
||||
}
|
||||
}
|
50
src/main.rs
Normal file
50
src/main.rs
Normal file
|
@ -0,0 +1,50 @@
|
|||
#[macro_use] extern crate serde_derive;
|
||||
|
||||
#[cfg(feature = "index")]
|
||||
mod index;
|
||||
#[cfg(feature = "serve")]
|
||||
mod serve;
|
||||
mod common_words;
|
||||
|
||||
use structopt::StructOpt;
|
||||
|
||||
#[derive(Debug, StructOpt)]
|
||||
#[structopt(name = "raptor-cli", about = "A command line to do raptor operations.")]
|
||||
enum Commands {
|
||||
#[cfg(feature = "index")]
|
||||
/// Index files of different format.
|
||||
#[structopt(name = "index")]
|
||||
Index(index::CommandIndex),
|
||||
|
||||
#[cfg(feature = "serve")]
|
||||
/// Serve indexes.
|
||||
#[structopt(name = "serve")]
|
||||
Serve(serve::CommandServe),
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let ret = match Commands::from_args() {
|
||||
|
||||
#[cfg(feature = "index")]
|
||||
Commands::Index(i) => match i {
|
||||
|
||||
#[cfg(feature = "index-jsonlines")]
|
||||
index::CommandIndex::JsonLines(command) => index::jsonlines_feature::json_lines(command),
|
||||
|
||||
#[cfg(feature = "index-csv")]
|
||||
index::CommandIndex::Csv(command) => index::csv_feature::csv(command),
|
||||
},
|
||||
|
||||
#[cfg(feature = "serve")]
|
||||
Commands::Serve(s) => match s {
|
||||
|
||||
#[cfg(feature = "serve-http")]
|
||||
serve::CommandServe::Http(command) => serve::http_feature::http(command),
|
||||
|
||||
#[cfg(feature = "serve-console")]
|
||||
serve::CommandServe::Console(command) => serve::console_feature::console(command),
|
||||
},
|
||||
};
|
||||
|
||||
if let Err(e) = ret { eprintln!("{}", e) }
|
||||
}
|
72
src/serve/console.rs
Normal file
72
src/serve/console.rs
Normal file
|
@ -0,0 +1,72 @@
|
|||
use std::str::from_utf8_unchecked;
|
||||
use std::io::{self, Write};
|
||||
|
||||
use fst::Streamer;
|
||||
use elapsed::measure_time;
|
||||
use rocksdb::{DB, DBOptions, IngestExternalFileOptions};
|
||||
use raptor::{automaton, Metadata, RankedStream};
|
||||
|
||||
use crate::serve::console_feature::CommandConsole;
|
||||
use crate::common_words::{self, CommonWords};
|
||||
|
||||
pub struct ConsoleSearch {
|
||||
common_words: CommonWords,
|
||||
metadata: Metadata,
|
||||
db: DB,
|
||||
}
|
||||
|
||||
impl ConsoleSearch {
|
||||
pub fn from_command(command: CommandConsole) -> io::Result<ConsoleSearch> {
|
||||
let common_words = common_words::from_file(command.stop_words)?;
|
||||
|
||||
let meta_name = command.meta_name.display();
|
||||
let map_file = format!("{}.map", meta_name);
|
||||
let idx_file = format!("{}.idx", meta_name);
|
||||
let sst_file = format!("{}.sst", meta_name);
|
||||
let metadata = unsafe { Metadata::from_paths(map_file, idx_file).unwrap() };
|
||||
|
||||
let rocksdb = "rocksdb/storage";
|
||||
let db = DB::open_default(rocksdb).unwrap();
|
||||
db.ingest_external_file(&IngestExternalFileOptions::new(), &[&sst_file]).unwrap();
|
||||
drop(db);
|
||||
let db = DB::open_for_read_only(DBOptions::default(), rocksdb, false).unwrap();
|
||||
|
||||
Ok(ConsoleSearch { common_words, metadata, db })
|
||||
}
|
||||
|
||||
pub fn serve(self) {
|
||||
loop {
|
||||
print!("Searching for: ");
|
||||
io::stdout().flush().unwrap();
|
||||
|
||||
let mut query = String::new();
|
||||
io::stdin().read_line(&mut query).unwrap();
|
||||
let query = query.trim().to_lowercase();
|
||||
|
||||
if query.is_empty() { break }
|
||||
|
||||
let (elapsed, _) = measure_time(|| search(&self.metadata, &self.db, &self.common_words, &query));
|
||||
println!("Finished in {}", elapsed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn search(metadata: &Metadata, database: &DB, common_words: &CommonWords, query: &str) {
|
||||
let mut automatons = Vec::new();
|
||||
for query in query.split_whitespace().filter(|q| !common_words.contains(*q)) {
|
||||
let lev = automaton::build(query);
|
||||
automatons.push(lev);
|
||||
}
|
||||
|
||||
let mut stream = RankedStream::new(&metadata, automatons, 20);
|
||||
while let Some(document) = stream.next() {
|
||||
print!("{:?}", document.document_id);
|
||||
|
||||
let title_key = format!("{}-title", document.document_id);
|
||||
let title = database.get(title_key.as_bytes()).unwrap().unwrap();
|
||||
let title = unsafe { from_utf8_unchecked(&title) };
|
||||
print!(" {:?}", title);
|
||||
|
||||
println!();
|
||||
}
|
||||
}
|
120
src/serve/http.rs
Normal file
120
src/serve/http.rs
Normal file
|
@ -0,0 +1,120 @@
|
|||
use std::str::from_utf8_unchecked;
|
||||
use std::io::{self, Write};
|
||||
use std::net::SocketAddr;
|
||||
use std::error::Error;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rocksdb::{DB, DBOptions, IngestExternalFileOptions};
|
||||
use raptor::{automaton, Metadata};
|
||||
use raptor::rank::RankedStream;
|
||||
use fst::Streamer;
|
||||
use warp::Filter;
|
||||
|
||||
use crate::serve::http_feature::CommandHttp;
|
||||
use crate::common_words::{self, CommonWords};
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct Document<'a> {
|
||||
id: u64,
|
||||
title: &'a str,
|
||||
description: &'a str,
|
||||
image: &'a str,
|
||||
}
|
||||
|
||||
pub struct HttpServer {
|
||||
listen_addr: SocketAddr,
|
||||
common_words: Arc<CommonWords>,
|
||||
metadata: Arc<Metadata>,
|
||||
db: Arc<DB>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct SearchQuery { q: String }
|
||||
|
||||
impl HttpServer {
|
||||
pub fn from_command(command: CommandHttp) -> io::Result<HttpServer> {
|
||||
let common_words = common_words::from_file(command.stop_words)?;
|
||||
|
||||
let meta_name = command.meta_name.display();
|
||||
let map_file = format!("{}.map", meta_name);
|
||||
let idx_file = format!("{}.idx", meta_name);
|
||||
let sst_file = format!("{}.sst", meta_name);
|
||||
let metadata = unsafe { Metadata::from_paths(map_file, idx_file).unwrap() };
|
||||
|
||||
let rocksdb = "rocksdb/storage";
|
||||
let db = DB::open_default(rocksdb).unwrap();
|
||||
db.ingest_external_file(&IngestExternalFileOptions::new(), &[&sst_file]).unwrap();
|
||||
drop(db);
|
||||
let db = DB::open_for_read_only(DBOptions::default(), rocksdb, false).unwrap();
|
||||
|
||||
Ok(HttpServer {
|
||||
listen_addr: command.listen_addr,
|
||||
common_words: Arc::new(common_words),
|
||||
metadata: Arc::new(metadata),
|
||||
db: Arc::new(db),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn serve(self) {
|
||||
let HttpServer { listen_addr, common_words, metadata, db } = self;
|
||||
|
||||
let routes = warp::path("search")
|
||||
.and(warp::query())
|
||||
.map(move |query: SearchQuery| {
|
||||
let body = search(metadata.clone(), db.clone(), common_words.clone(), &query.q).unwrap();
|
||||
body
|
||||
})
|
||||
.with(warp::reply::with::header("Content-Type", "application/json"))
|
||||
.with(warp::reply::with::header("Access-Control-Allow-Origin", "*"));
|
||||
|
||||
warp::serve(routes).run(listen_addr)
|
||||
}
|
||||
}
|
||||
|
||||
fn search<M, D, C>(metadata: M, database: D, common_words: C, query: &str) -> Result<String, Box<Error>>
|
||||
where M: AsRef<Metadata>,
|
||||
D: AsRef<DB>,
|
||||
C: AsRef<CommonWords>,
|
||||
{
|
||||
let mut automatons = Vec::new();
|
||||
for query in query.split_whitespace().map(str::to_lowercase) {
|
||||
if common_words.as_ref().contains(&query) { continue }
|
||||
let lev = automaton::build(&query);
|
||||
automatons.push(lev);
|
||||
}
|
||||
|
||||
let mut stream = RankedStream::new(metadata.as_ref(), automatons, 20);
|
||||
let mut body = Vec::new();
|
||||
write!(&mut body, "[")?;
|
||||
|
||||
let mut first = true;
|
||||
while let Some(document) = stream.next() {
|
||||
let title_key = format!("{}-title", document.document_id);
|
||||
let title = database.as_ref().get(title_key.as_bytes()).unwrap().unwrap();
|
||||
let title = unsafe { from_utf8_unchecked(&title) };
|
||||
|
||||
let description_key = format!("{}-description", document.document_id);
|
||||
let description = database.as_ref().get(description_key.as_bytes()).unwrap().unwrap();
|
||||
let description = unsafe { from_utf8_unchecked(&description) };
|
||||
|
||||
let image_key = format!("{}-image", document.document_id);
|
||||
let image = database.as_ref().get(image_key.as_bytes()).unwrap().unwrap();
|
||||
let image = unsafe { from_utf8_unchecked(&image) };
|
||||
|
||||
let document = Document {
|
||||
id: document.document_id,
|
||||
title: title,
|
||||
description: description,
|
||||
image: image,
|
||||
};
|
||||
|
||||
if !first { write!(&mut body, ",")? }
|
||||
serde_json::to_writer(&mut body, &document)?;
|
||||
|
||||
first = false;
|
||||
}
|
||||
|
||||
write!(&mut body, "]")?;
|
||||
|
||||
Ok(String::from_utf8(body)?)
|
||||
}
|
76
src/serve/mod.rs
Normal file
76
src/serve/mod.rs
Normal file
|
@ -0,0 +1,76 @@
|
|||
#[cfg(feature = "serve-http")]
|
||||
mod http;
|
||||
|
||||
#[cfg(feature = "serve-console")]
|
||||
mod console;
|
||||
|
||||
use structopt::StructOpt;
|
||||
|
||||
#[derive(Debug, StructOpt)]
|
||||
pub enum CommandServe {
|
||||
|
||||
#[cfg(feature = "serve-http")]
|
||||
/// Serve an index under an http protocol.
|
||||
#[structopt(name = "http")]
|
||||
Http(self::http_feature::CommandHttp),
|
||||
|
||||
#[cfg(feature = "serve-console")]
|
||||
/// Serve an index under a simple console.
|
||||
#[structopt(name = "console")]
|
||||
Console(self::console_feature::CommandConsole),
|
||||
}
|
||||
|
||||
#[cfg(feature = "serve-http")]
|
||||
pub mod http_feature {
|
||||
use std::error;
|
||||
use std::path::PathBuf;
|
||||
use std::net::SocketAddr;
|
||||
use structopt::StructOpt;
|
||||
|
||||
#[derive(Debug, StructOpt)]
|
||||
pub struct CommandHttp {
|
||||
/// The address and port to bind the server to.
|
||||
#[structopt(short = "l", default_value = "127.0.0.1:3030")]
|
||||
pub listen_addr: SocketAddr,
|
||||
|
||||
/// The stop word file, each word must be separated by a newline.
|
||||
#[structopt(long = "stop-words", parse(from_os_str))]
|
||||
pub stop_words: PathBuf,
|
||||
|
||||
/// Meta file name (e.g. relaxed-colden).
|
||||
#[structopt(parse(from_os_str))]
|
||||
pub meta_name: PathBuf,
|
||||
}
|
||||
|
||||
pub fn http(command: CommandHttp) -> Result<(), Box<error::Error>> {
|
||||
use super::http::HttpServer;
|
||||
|
||||
let server = HttpServer::from_command(command)?;
|
||||
Ok(server.serve())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "serve-console")]
|
||||
pub mod console_feature {
|
||||
use std::error;
|
||||
use std::path::PathBuf;
|
||||
use structopt::StructOpt;
|
||||
|
||||
#[derive(Debug, StructOpt)]
|
||||
pub struct CommandConsole {
|
||||
/// The stop word file, each word must be separated by a newline.
|
||||
#[structopt(long = "stop-words", parse(from_os_str))]
|
||||
pub stop_words: PathBuf,
|
||||
|
||||
/// Meta file name (e.g. relaxed-colden).
|
||||
#[structopt(parse(from_os_str))]
|
||||
pub meta_name: PathBuf,
|
||||
}
|
||||
|
||||
pub fn console(command: CommandConsole) -> Result<(), Box<error::Error>> {
|
||||
use super::console::ConsoleSearch;
|
||||
|
||||
let search = ConsoleSearch::from_command(command)?;
|
||||
Ok(search.serve())
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue