Merge pull request #71 from meilisearch/cleanup-useless-build-rs

Cleanup useless custom build file
This commit is contained in:
Clément Renault 2021-01-15 15:45:47 +01:00 committed by GitHub
commit 2fa5808e3f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 1 additions and 349075 deletions

View File

@ -1,27 +0,0 @@
use std::env;
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::PathBuf;
use fst::SetBuilder;
fn main() {
let chinese_words_txt = "chinese-words.txt";
let out_dir = env::var("OUT_DIR").unwrap();
let chinese_words_fst = PathBuf::from(out_dir).join("chinese-words.fst");
// Tell Cargo that if the given file changes, to rerun this build script.
println!("cargo:rerun-if-changed={}", chinese_words_txt);
let chinese_words_txt = File::open(chinese_words_txt).map(BufReader::new).unwrap();
let chinese_words_fst = File::create(chinese_words_fst).unwrap();
let mut builder = SetBuilder::new(chinese_words_fst).unwrap();
for result in chinese_words_txt.lines() {
let line = result.unwrap();
if let Some(s) = line.split(' ').next() {
builder.insert(s).unwrap();
}
}
builder.finish().unwrap();
}

File diff suppressed because it is too large Load Diff

2
http-ui/Cargo.lock generated
View File

@ -1041,7 +1041,7 @@ checksum = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
[[package]]
name = "meilisearch-tokenizer"
version = "0.1.1"
source = "git+https://github.com/meilisearch/Tokenizer.git?branch=token-eq#daeb4a4ac91081f1c592e3ebb3ec5d8dcb4e6976"
source = "git+https://github.com/meilisearch/Tokenizer.git?branch=main#147b6154b1b34cb8f5da2df6a416b7da191bc850"
dependencies = [
"character_converter",
"cow-utils",

View File

@ -28,7 +28,6 @@ use tokio::sync::broadcast;
use warp::filters::ws::Message;
use warp::{Filter, http::Response};
use meilisearch_tokenizer::{Analyzer, AnalyzerConfig};
use fst::Set;
use milli::update::UpdateIndexingStep::*;
use milli::update::{UpdateBuilder, IndexDocumentsMethod, UpdateFormat};