Cargo fmt pass

This commit is contained in:
Clément Renault 2019-10-18 13:05:28 +02:00
parent 47d777c8f7
commit ca26a0f2e4
No known key found for this signature in database
GPG key ID: 92ADA4E935E71FA4
48 changed files with 1599 additions and 979 deletions

View file

@ -1,13 +1,12 @@
use zlmdb::Result as ZResult;
use crate::update::{Update, next_update_id};
use crate::store;
use crate::update::{next_update_id, Update};
use zlmdb::Result as ZResult;
pub fn apply_customs_update(
writer: &mut zlmdb::RwTxn,
main_store: store::Main,
customs: &[u8],
) -> ZResult<()>
{
) -> ZResult<()> {
main_store.put_customs(writer, customs)
}
@ -16,8 +15,7 @@ pub fn push_customs_update(
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
customs: Vec<u8>,
) -> ZResult<u64>
{
) -> ZResult<u64> {
let last_update_id = next_update_id(writer, updates_store, updates_results_store)?;
let update = Update::Customs(customs);

View file

@ -1,14 +1,14 @@
use std::collections::{HashMap, HashSet};
use fst::{SetBuilder, set::OpBuilder};
use sdset::{SetOperation, duo::Union};
use fst::{set::OpBuilder, SetBuilder};
use sdset::{duo::Union, SetOperation};
use serde::Serialize;
use crate::raw_indexer::RawIndexer;
use crate::serde::{extract_document_id, Serializer, RamDocumentStore};
use crate::serde::{extract_document_id, RamDocumentStore, Serializer};
use crate::store;
use crate::update::{Update, next_update_id, apply_documents_deletion};
use crate::{MResult, Error, RankedMap};
use crate::update::{apply_documents_deletion, next_update_id, Update};
use crate::{Error, MResult, RankedMap};
pub struct DocumentsAddition<D> {
updates_store: store::Updates,
@ -22,8 +22,7 @@ impl<D> DocumentsAddition<D> {
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
updates_notifier: crossbeam_channel::Sender<()>,
) -> DocumentsAddition<D>
{
) -> DocumentsAddition<D> {
DocumentsAddition {
updates_store,
updates_results_store,
@ -37,7 +36,8 @@ impl<D> DocumentsAddition<D> {
}
pub fn finalize(self, writer: &mut zlmdb::RwTxn) -> MResult<u64>
where D: serde::Serialize
where
D: serde::Serialize,
{
let _ = self.updates_notifier.send(());
let update_id = push_documents_addition(
@ -51,7 +51,7 @@ impl<D> DocumentsAddition<D> {
}
impl<D> Extend<D> for DocumentsAddition<D> {
fn extend<T: IntoIterator<Item=D>>(&mut self, iter: T) {
fn extend<T: IntoIterator<Item = D>>(&mut self, iter: T) {
self.documents.extend(iter)
}
}
@ -61,8 +61,7 @@ pub fn push_documents_addition<D: serde::Serialize>(
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
addition: Vec<D>,
) -> MResult<u64>
{
) -> MResult<u64> {
let mut values = Vec::with_capacity(addition.len());
for add in addition {
let vec = serde_json::to_vec(&add)?;
@ -87,8 +86,7 @@ pub fn apply_documents_addition(
docs_words_store: store::DocsWords,
mut ranked_map: RankedMap,
addition: Vec<serde_json::Value>,
) -> MResult<()>
{
) -> MResult<()> {
let mut document_ids = HashSet::new();
let mut document_store = RamDocumentStore::new();
let mut document_fields_counts = HashMap::new();
@ -182,7 +180,7 @@ pub fn apply_documents_addition(
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap()
},
}
None => delta_words,
};

View file

@ -1,13 +1,13 @@
use std::collections::{HashMap, HashSet, BTreeSet};
use std::collections::{BTreeSet, HashMap, HashSet};
use fst::{SetBuilder, Streamer};
use meilidb_schema::Schema;
use sdset::{SetBuf, SetOperation, duo::DifferenceByKey};
use sdset::{duo::DifferenceByKey, SetBuf, SetOperation};
use crate::{DocumentId, RankedMap, MResult, Error};
use crate::serde::extract_document_id;
use crate::update::{Update, next_update_id};
use crate::store;
use crate::update::{next_update_id, Update};
use crate::{DocumentId, Error, MResult, RankedMap};
pub struct DocumentsDeletion {
updates_store: store::Updates,
@ -21,8 +21,7 @@ impl DocumentsDeletion {
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
updates_notifier: crossbeam_channel::Sender<()>,
) -> DocumentsDeletion
{
) -> DocumentsDeletion {
DocumentsDeletion {
updates_store,
updates_results_store,
@ -36,7 +35,8 @@ impl DocumentsDeletion {
}
pub fn delete_document<D>(&mut self, schema: &Schema, document: D) -> MResult<()>
where D: serde::Serialize,
where
D: serde::Serialize,
{
let identifier = schema.identifier_name();
let document_id = match extract_document_id(identifier, &document)? {
@ -62,7 +62,7 @@ impl DocumentsDeletion {
}
impl Extend<DocumentId> for DocumentsDeletion {
fn extend<T: IntoIterator<Item=DocumentId>>(&mut self, iter: T) {
fn extend<T: IntoIterator<Item = DocumentId>>(&mut self, iter: T) {
self.documents.extend(iter)
}
}
@ -72,8 +72,7 @@ pub fn push_documents_deletion(
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
deletion: Vec<DocumentId>,
) -> MResult<u64>
{
) -> MResult<u64> {
let last_update_id = next_update_id(writer, updates_store, updates_results_store)?;
let update = Update::DocumentsDeletion(deletion);
@ -91,8 +90,7 @@ pub fn apply_documents_deletion(
docs_words_store: store::DocsWords,
mut ranked_map: RankedMap,
deletion: Vec<DocumentId>,
) -> MResult<()>
{
) -> MResult<()> {
let idset = SetBuf::from_dirty(deletion);
let schema = match main_store.schema(writer)? {
@ -101,10 +99,17 @@ pub fn apply_documents_deletion(
};
// collect the ranked attributes according to the schema
let ranked_attrs: Vec<_> = schema.iter()
.filter_map(|(_, attr, prop)| {
if prop.is_ranked() { Some(attr) } else { None }
})
let ranked_attrs: Vec<_> = schema
.iter()
.filter_map(
|(_, attr, prop)| {
if prop.is_ranked() {
Some(attr)
} else {
None
}
},
)
.collect();
let mut words_document_ids = HashMap::new();
@ -118,7 +123,10 @@ pub fn apply_documents_deletion(
let mut stream = words.stream();
while let Some(word) = stream.next() {
let word = word.to_vec();
words_document_ids.entry(word).or_insert_with(Vec::new).push(id);
words_document_ids
.entry(word)
.or_insert_with(Vec::new)
.push(id);
}
}
}
@ -167,7 +175,7 @@ pub fn apply_documents_deletion(
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap()
},
}
None => fst::Set::default(),
};

View file

@ -6,21 +6,21 @@ mod synonyms_addition;
mod synonyms_deletion;
pub use self::customs_update::{apply_customs_update, push_customs_update};
pub use self::documents_addition::{DocumentsAddition, apply_documents_addition};
pub use self::documents_deletion::{DocumentsDeletion, apply_documents_deletion};
pub use self::documents_addition::{apply_documents_addition, DocumentsAddition};
pub use self::documents_deletion::{apply_documents_deletion, DocumentsDeletion};
pub use self::schema_update::{apply_schema_update, push_schema_update};
pub use self::synonyms_addition::{SynonymsAddition, apply_synonyms_addition};
pub use self::synonyms_deletion::{SynonymsDeletion, apply_synonyms_deletion};
pub use self::synonyms_addition::{apply_synonyms_addition, SynonymsAddition};
pub use self::synonyms_deletion::{apply_synonyms_deletion, SynonymsDeletion};
use std::time::{Duration, Instant};
use std::collections::BTreeMap;
use std::cmp;
use std::collections::BTreeMap;
use std::time::{Duration, Instant};
use log::debug;
use serde::{Serialize, Deserialize};
use serde::{Deserialize, Serialize};
use zlmdb::Result as ZResult;
use crate::{store, MResult, DocumentId, RankedMap};
use crate::{store, DocumentId, MResult, RankedMap};
use meilidb_schema::Schema;
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -68,8 +68,7 @@ pub fn update_status(
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
update_id: u64,
) -> MResult<UpdateStatus>
{
) -> MResult<UpdateStatus> {
match updates_results_store.update_result(reader, update_id)? {
Some(result) => Ok(UpdateStatus::Processed(result)),
None => {
@ -86,8 +85,7 @@ pub fn next_update_id(
writer: &mut zlmdb::RwTxn,
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
) -> ZResult<u64>
{
) -> ZResult<u64> {
let last_update_id = updates_store.last_update_id(writer)?;
let last_update_id = last_update_id.map(|(n, _)| n);
@ -100,7 +98,10 @@ pub fn next_update_id(
Ok(new_update_id)
}
pub fn update_task(writer: &mut zlmdb::RwTxn, index: store::Index) -> MResult<Option<UpdateResult>> {
pub fn update_task(
writer: &mut zlmdb::RwTxn,
index: store::Index,
) -> MResult<Option<UpdateResult>> {
let (update_id, update) = match index.updates.pop_front(writer)? {
Some(value) => value,
None => return Ok(None),
@ -112,11 +113,13 @@ pub fn update_task(writer: &mut zlmdb::RwTxn, index: store::Index) -> MResult<Op
Update::Schema(schema) => {
let start = Instant::now();
let update_type = UpdateType::Schema { schema: schema.clone() };
let update_type = UpdateType::Schema {
schema: schema.clone(),
};
let result = apply_schema_update(writer, index.main, &schema);
(update_type, result, start.elapsed())
},
}
Update::Customs(customs) => {
let start = Instant::now();
@ -133,7 +136,9 @@ pub fn update_task(writer: &mut zlmdb::RwTxn, index: store::Index) -> MResult<Op
None => RankedMap::default(),
};
let update_type = UpdateType::DocumentsAddition { number: documents.len() };
let update_type = UpdateType::DocumentsAddition {
number: documents.len(),
};
let result = apply_documents_addition(
writer,
@ -147,7 +152,7 @@ pub fn update_task(writer: &mut zlmdb::RwTxn, index: store::Index) -> MResult<Op
);
(update_type, result, start.elapsed())
},
}
Update::DocumentsDeletion(documents) => {
let start = Instant::now();
@ -156,7 +161,9 @@ pub fn update_task(writer: &mut zlmdb::RwTxn, index: store::Index) -> MResult<Op
None => RankedMap::default(),
};
let update_type = UpdateType::DocumentsDeletion { number: documents.len() };
let update_type = UpdateType::DocumentsDeletion {
number: documents.len(),
};
let result = apply_documents_deletion(
writer,
@ -170,38 +177,35 @@ pub fn update_task(writer: &mut zlmdb::RwTxn, index: store::Index) -> MResult<Op
);
(update_type, result, start.elapsed())
},
}
Update::SynonymsAddition(synonyms) => {
let start = Instant::now();
let update_type = UpdateType::SynonymsAddition { number: synonyms.len() };
let update_type = UpdateType::SynonymsAddition {
number: synonyms.len(),
};
let result = apply_synonyms_addition(
writer,
index.main,
index.synonyms,
synonyms,
);
let result = apply_synonyms_addition(writer, index.main, index.synonyms, synonyms);
(update_type, result, start.elapsed())
},
}
Update::SynonymsDeletion(synonyms) => {
let start = Instant::now();
let update_type = UpdateType::SynonymsDeletion { number: synonyms.len() };
let update_type = UpdateType::SynonymsDeletion {
number: synonyms.len(),
};
let result = apply_synonyms_deletion(
writer,
index.main,
index.synonyms,
synonyms,
);
let result = apply_synonyms_deletion(writer, index.main, index.synonyms, synonyms);
(update_type, result, start.elapsed())
},
}
};
debug!("Processed update number {} {:?} {:?}", update_id, update_type, result);
debug!(
"Processed update number {} {:?} {:?}",
update_id, update_type, result
);
let detailed_duration = DetailedDuration { main: duration };
let status = UpdateResult {
@ -211,7 +215,9 @@ pub fn update_task(writer: &mut zlmdb::RwTxn, index: store::Index) -> MResult<Op
detailed_duration,
};
index.updates_results.put_update_result(writer, update_id, &status)?;
index
.updates_results
.put_update_result(writer, update_id, &status)?;
Ok(Some(status))
}

View file

@ -1,18 +1,19 @@
use crate::update::{next_update_id, Update};
use crate::{error::UnsupportedOperation, store, MResult};
use meilidb_schema::Schema;
use crate::{store, error::UnsupportedOperation, MResult};
use crate::update::{Update, next_update_id};
pub fn apply_schema_update(
writer: &mut zlmdb::RwTxn,
main_store: store::Main,
new_schema: &Schema,
) -> MResult<()>
{
) -> MResult<()> {
if let Some(_) = main_store.schema(writer)? {
return Err(UnsupportedOperation::SchemaAlreadyExists.into())
return Err(UnsupportedOperation::SchemaAlreadyExists.into());
}
main_store.put_schema(writer, new_schema).map_err(Into::into)
main_store
.put_schema(writer, new_schema)
.map_err(Into::into)
}
pub fn push_schema_update(
@ -20,8 +21,7 @@ pub fn push_schema_update(
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
schema: Schema,
) -> MResult<u64>
{
) -> MResult<u64> {
let last_update_id = next_update_id(writer, updates_store, updates_results_store)?;
let update = Update::Schema(schema);

View file

@ -1,10 +1,10 @@
use std::collections::BTreeMap;
use fst::{SetBuilder, set::OpBuilder};
use fst::{set::OpBuilder, SetBuilder};
use sdset::SetBuf;
use crate::automaton::normalize_str;
use crate::update::{Update, next_update_id};
use crate::update::{next_update_id, Update};
use crate::{store, MResult};
pub struct SynonymsAddition {
@ -19,8 +19,7 @@ impl SynonymsAddition {
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
updates_notifier: crossbeam_channel::Sender<()>,
) -> SynonymsAddition
{
) -> SynonymsAddition {
SynonymsAddition {
updates_store,
updates_results_store,
@ -30,13 +29,17 @@ impl SynonymsAddition {
}
pub fn add_synonym<S, T, I>(&mut self, synonym: S, alternatives: I)
where S: AsRef<str>,
T: AsRef<str>,
I: IntoIterator<Item=T>,
where
S: AsRef<str>,
T: AsRef<str>,
I: IntoIterator<Item = T>,
{
let synonym = normalize_str(synonym.as_ref());
let alternatives = alternatives.into_iter().map(|s| s.as_ref().to_lowercase());
self.synonyms.entry(synonym).or_insert_with(Vec::new).extend(alternatives);
self.synonyms
.entry(synonym)
.or_insert_with(Vec::new)
.extend(alternatives);
}
pub fn finalize(self, writer: &mut zlmdb::RwTxn) -> MResult<u64> {
@ -56,8 +59,7 @@ pub fn push_synonyms_addition(
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
addition: BTreeMap<String, Vec<String>>,
) -> MResult<u64>
{
) -> MResult<u64> {
let last_update_id = next_update_id(writer, updates_store, updates_results_store)?;
let update = Update::SynonymsAddition(addition);
@ -71,8 +73,7 @@ pub fn apply_synonyms_addition(
main_store: store::Main,
synonyms_store: store::Synonyms,
addition: BTreeMap<String, Vec<String>>,
) -> MResult<()>
{
) -> MResult<()> {
let mut synonyms_builder = SetBuilder::memory();
for (word, alternatives) in addition {
@ -107,7 +108,7 @@ pub fn apply_synonyms_addition(
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap()
},
}
None => delta_synonyms,
};

View file

@ -1,11 +1,11 @@
use std::collections::BTreeMap;
use std::iter::FromIterator;
use fst::{SetBuilder, set::OpBuilder};
use fst::{set::OpBuilder, SetBuilder};
use sdset::SetBuf;
use crate::automaton::normalize_str;
use crate::update::{Update, next_update_id};
use crate::update::{next_update_id, Update};
use crate::{store, MResult};
pub struct SynonymsDeletion {
@ -20,8 +20,7 @@ impl SynonymsDeletion {
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
updates_notifier: crossbeam_channel::Sender<()>,
) -> SynonymsDeletion
{
) -> SynonymsDeletion {
SynonymsDeletion {
updates_store,
updates_results_store,
@ -36,9 +35,10 @@ impl SynonymsDeletion {
}
pub fn delete_specific_alternatives_of<S, T, I>(&mut self, synonym: S, alternatives: I)
where S: AsRef<str>,
T: AsRef<str>,
I: Iterator<Item=T>,
where
S: AsRef<str>,
T: AsRef<str>,
I: Iterator<Item = T>,
{
let synonym = normalize_str(synonym.as_ref());
let value = self.synonyms.entry(synonym).or_insert(None);
@ -66,8 +66,7 @@ pub fn push_synonyms_deletion(
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
deletion: BTreeMap<String, Option<Vec<String>>>,
) -> MResult<u64>
{
) -> MResult<u64> {
let last_update_id = next_update_id(writer, updates_store, updates_results_store)?;
let update = Update::SynonymsDeletion(deletion);
@ -81,8 +80,7 @@ pub fn apply_synonyms_deletion(
main_store: store::Main,
synonyms_store: store::Synonyms,
deletion: BTreeMap<String, Option<Vec<String>>>,
) -> MResult<()>
{
) -> MResult<()> {
let mut delete_whole_synonym_builder = SetBuilder::memory();
for (synonym, alternatives) in deletion {
@ -98,9 +96,7 @@ pub fn apply_synonyms_deletion(
let alternatives = SetBuf::from_dirty(alternatives);
let mut builder = SetBuilder::memory();
builder.extend_iter(alternatives).unwrap();
builder.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap()
builder.into_inner().and_then(fst::Set::from_bytes).unwrap()
};
let op = OpBuilder::new()
@ -124,7 +120,7 @@ pub fn apply_synonyms_deletion(
} else {
synonyms_store.put_synonyms(writer, synonym.as_bytes(), &alternatives)?;
}
},
}
None => {
delete_whole_synonym_builder.insert(&synonym).unwrap();
synonyms_store.del_synonyms(writer, synonym.as_bytes())?;
@ -150,7 +146,7 @@ pub fn apply_synonyms_deletion(
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap()
},
}
None => fst::Set::default(),
};