Cargo clippy pass

This commit is contained in:
Clément Renault 2019-10-18 13:21:41 +02:00
parent ca26a0f2e4
commit 9dce41ed6b
No known key found for this signature in database
GPG Key ID: 92ADA4E935E71FA4
16 changed files with 82 additions and 66 deletions

View File

@ -102,7 +102,7 @@ pub struct QueryEnhancerBuilder<'a, S> {
impl<S: AsRef<str>> QueryEnhancerBuilder<'_, S> { impl<S: AsRef<str>> QueryEnhancerBuilder<'_, S> {
pub fn new(query: &[S]) -> QueryEnhancerBuilder<S> { pub fn new(query: &[S]) -> QueryEnhancerBuilder<S> {
// we initialize origins query indices based on their positions // we initialize origins query indices based on their positions
let origins: Vec<_> = (0..query.len() + 1).collect(); let origins: Vec<_> = (0..=query.len()).collect();
let real_to_origin = origins.iter().map(|&o| (o..o + 1, (o, 1))).collect(); let real_to_origin = origins.iter().map(|&o| (o..o + 1, (o, 1))).collect();
QueryEnhancerBuilder { QueryEnhancerBuilder {

View File

@ -57,7 +57,7 @@ fn multiword_rewrite_matches(
let match_ = TmpMatch { let match_ = TmpMatch {
query_index, query_index,
word_index, word_index,
..match_.clone() ..*match_
}; };
padded_matches.push((*id, match_)); padded_matches.push((*id, match_));
} }
@ -72,7 +72,7 @@ fn multiword_rewrite_matches(
let padmatch = TmpMatch { let padmatch = TmpMatch {
query_index, query_index,
word_index, word_index,
..match_.clone() ..*match_
}; };
for (_, nmatch_) in next_group { for (_, nmatch_) in next_group {
@ -89,7 +89,7 @@ fn multiword_rewrite_matches(
let match_ = TmpMatch { let match_ = TmpMatch {
query_index, query_index,
word_index, word_index,
..match_.clone() ..*match_
}; };
padded_matches.push((*id, match_)); padded_matches.push((*id, match_));
biggest = biggest.max(i + 1); biggest = biggest.max(i + 1);
@ -116,7 +116,7 @@ fn multiword_rewrite_matches(
let match_ = TmpMatch { let match_ = TmpMatch {
query_index, query_index,
word_index, word_index,
..match_.clone() ..*match_
}; };
padded_matches.push((*id, match_)); padded_matches.push((*id, match_));
} }
@ -141,9 +141,9 @@ fn fetch_raw_documents(
automatons: &[Automaton], automatons: &[Automaton],
query_enhancer: &QueryEnhancer, query_enhancer: &QueryEnhancer,
searchables: Option<&ReorderedAttrs>, searchables: Option<&ReorderedAttrs>,
main_store: &store::Main, main_store: store::Main,
postings_lists_store: &store::PostingsLists, postings_lists_store: store::PostingsLists,
documents_fields_counts_store: &store::DocumentsFieldsCounts, documents_fields_counts_store: store::DocumentsFieldsCounts,
) -> MResult<Vec<RawDocument>> { ) -> MResult<Vec<RawDocument>> {
let mut matches = Vec::new(); let mut matches = Vec::new();
let mut highlights = Vec::new(); let mut highlights = Vec::new();
@ -370,11 +370,11 @@ where
let (automaton_producer, query_enhancer) = let (automaton_producer, query_enhancer) =
AutomatonProducer::new(reader, query, main_store, synonyms_store)?; AutomatonProducer::new(reader, query, main_store, synonyms_store)?;
let mut automaton_producer = automaton_producer.into_iter(); let automaton_producer = automaton_producer.into_iter();
let mut automatons = Vec::new(); let mut automatons = Vec::new();
// aggregate automatons groups by groups after time // aggregate automatons groups by groups after time
while let Some(auts) = automaton_producer.next() { for auts in automaton_producer {
automatons.extend(auts); automatons.extend(auts);
// we must retrieve the documents associated // we must retrieve the documents associated
@ -384,9 +384,9 @@ where
&automatons, &automatons,
&query_enhancer, &query_enhancer,
searchable_attrs.as_ref(), searchable_attrs.as_ref(),
&main_store, main_store,
&postings_lists_store, postings_lists_store,
&documents_fields_counts_store, documents_fields_counts_store,
)?; )?;
// stop processing when time is running out // stop processing when time is running out
@ -447,7 +447,7 @@ where
// those must be returned // those must be returned
let documents = raw_documents_processed let documents = raw_documents_processed
.into_iter() .into_iter()
.map(|d| Document::from_raw(d)) .map(Document::from_raw)
.collect(); .collect();
Ok(documents) Ok(documents)
@ -483,11 +483,11 @@ where
let (automaton_producer, query_enhancer) = let (automaton_producer, query_enhancer) =
AutomatonProducer::new(reader, query, main_store, synonyms_store)?; AutomatonProducer::new(reader, query, main_store, synonyms_store)?;
let mut automaton_producer = automaton_producer.into_iter(); let automaton_producer = automaton_producer.into_iter();
let mut automatons = Vec::new(); let mut automatons = Vec::new();
// aggregate automatons groups by groups after time // aggregate automatons groups by groups after time
while let Some(auts) = automaton_producer.next() { for auts in automaton_producer {
automatons.extend(auts); automatons.extend(auts);
// we must retrieve the documents associated // we must retrieve the documents associated
@ -497,9 +497,9 @@ where
&automatons, &automatons,
&query_enhancer, &query_enhancer,
searchable_attrs.as_ref(), searchable_attrs.as_ref(),
&main_store, main_store,
&postings_lists_store, postings_lists_store,
&documents_fields_counts_store, documents_fields_counts_store,
)?; )?;
// stop processing when time is running out // stop processing when time is running out
@ -620,7 +620,7 @@ where
// those must be returned // those must be returned
let documents = raw_documents_processed let documents = raw_documents_processed
.into_iter() .into_iter()
.map(|d| Document::from_raw(d)) .map(Document::from_raw)
.collect(); .collect();
Ok(documents) Ok(documents)

View File

@ -15,6 +15,10 @@ impl RankedMap {
self.0.len() self.0.len()
} }
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
pub fn insert(&mut self, document: DocumentId, attribute: SchemaAttr, number: Number) { pub fn insert(&mut self, document: DocumentId, attribute: SchemaAttr, number: Number) {
self.0.insert((document, attribute), number); self.0.insert((document, attribute), number);
} }

View File

@ -152,6 +152,12 @@ impl RawIndexer {
} }
} }
impl Default for RawIndexer {
fn default() -> Self {
Self::new()
}
}
fn index_token( fn index_token(
token: Token, token: Token,
id: DocumentId, id: DocumentId,

View File

@ -119,3 +119,9 @@ impl RamDocumentStore {
self.0 self.0
} }
} }
impl Default for RamDocumentStore {
fn default() -> Self {
Self::new()
}
}

View File

@ -11,7 +11,7 @@ pub struct DocsWords {
impl DocsWords { impl DocsWords {
pub fn put_doc_words( pub fn put_doc_words(
&self, self,
writer: &mut zlmdb::RwTxn, writer: &mut zlmdb::RwTxn,
document_id: DocumentId, document_id: DocumentId,
words: &fst::Set, words: &fst::Set,
@ -22,7 +22,7 @@ impl DocsWords {
} }
pub fn del_doc_words( pub fn del_doc_words(
&self, self,
writer: &mut zlmdb::RwTxn, writer: &mut zlmdb::RwTxn,
document_id: DocumentId, document_id: DocumentId,
) -> ZResult<bool> { ) -> ZResult<bool> {
@ -31,7 +31,7 @@ impl DocsWords {
} }
pub fn doc_words( pub fn doc_words(
&self, self,
reader: &zlmdb::RoTxn, reader: &zlmdb::RoTxn,
document_id: DocumentId, document_id: DocumentId,
) -> ZResult<Option<fst::Set>> { ) -> ZResult<Option<fst::Set>> {

View File

@ -12,7 +12,7 @@ pub struct DocumentsFields {
impl DocumentsFields { impl DocumentsFields {
pub fn put_document_field( pub fn put_document_field(
&self, self,
writer: &mut zlmdb::RwTxn, writer: &mut zlmdb::RwTxn,
document_id: DocumentId, document_id: DocumentId,
attribute: SchemaAttr, attribute: SchemaAttr,
@ -23,7 +23,7 @@ impl DocumentsFields {
} }
pub fn del_all_document_fields( pub fn del_all_document_fields(
&self, self,
writer: &mut zlmdb::RwTxn, writer: &mut zlmdb::RwTxn,
document_id: DocumentId, document_id: DocumentId,
) -> ZResult<usize> { ) -> ZResult<usize> {
@ -33,7 +33,7 @@ impl DocumentsFields {
} }
pub fn document_attribute<'txn>( pub fn document_attribute<'txn>(
&self, self,
reader: &'txn zlmdb::RoTxn, reader: &'txn zlmdb::RoTxn,
document_id: DocumentId, document_id: DocumentId,
attribute: SchemaAttr, attribute: SchemaAttr,
@ -43,7 +43,7 @@ impl DocumentsFields {
} }
pub fn document_fields<'txn>( pub fn document_fields<'txn>(
&self, self,
reader: &'txn zlmdb::RoTxn, reader: &'txn zlmdb::RoTxn,
document_id: DocumentId, document_id: DocumentId,
) -> ZResult<DocumentFieldsIter<'txn>> { ) -> ZResult<DocumentFieldsIter<'txn>> {
@ -67,7 +67,7 @@ impl<'txn> Iterator for DocumentFieldsIter<'txn> {
let attr = SchemaAttr(key.attr.get()); let attr = SchemaAttr(key.attr.get());
Some(Ok((attr, bytes))) Some(Ok((attr, bytes)))
} }
Some(Err(e)) => Some(Err(e.into())), Some(Err(e)) => Some(Err(e)),
None => None, None => None,
} }
} }

View File

@ -11,7 +11,7 @@ pub struct DocumentsFieldsCounts {
impl DocumentsFieldsCounts { impl DocumentsFieldsCounts {
pub fn put_document_field_count( pub fn put_document_field_count(
&self, self,
writer: &mut zlmdb::RwTxn, writer: &mut zlmdb::RwTxn,
document_id: DocumentId, document_id: DocumentId,
attribute: SchemaAttr, attribute: SchemaAttr,
@ -22,7 +22,7 @@ impl DocumentsFieldsCounts {
} }
pub fn del_all_document_fields_counts( pub fn del_all_document_fields_counts(
&self, self,
writer: &mut zlmdb::RwTxn, writer: &mut zlmdb::RwTxn,
document_id: DocumentId, document_id: DocumentId,
) -> ZResult<usize> { ) -> ZResult<usize> {
@ -33,7 +33,7 @@ impl DocumentsFieldsCounts {
} }
pub fn document_field_count( pub fn document_field_count(
&self, self,
reader: &zlmdb::RoTxn, reader: &zlmdb::RoTxn,
document_id: DocumentId, document_id: DocumentId,
attribute: SchemaAttr, attribute: SchemaAttr,
@ -46,7 +46,7 @@ impl DocumentsFieldsCounts {
} }
pub fn document_fields_counts<'txn>( pub fn document_fields_counts<'txn>(
&self, self,
reader: &'txn zlmdb::RoTxn, reader: &'txn zlmdb::RoTxn,
document_id: DocumentId, document_id: DocumentId,
) -> ZResult<DocumentFieldsCountsIter<'txn>> { ) -> ZResult<DocumentFieldsCountsIter<'txn>> {
@ -57,7 +57,7 @@ impl DocumentsFieldsCounts {
} }
pub fn documents_ids<'txn>( pub fn documents_ids<'txn>(
&self, self,
reader: &'txn zlmdb::RoTxn, reader: &'txn zlmdb::RoTxn,
) -> ZResult<DocumentsIdsIter<'txn>> { ) -> ZResult<DocumentsIdsIter<'txn>> {
let iter = self.documents_fields_counts.iter(reader)?; let iter = self.documents_fields_counts.iter(reader)?;
@ -68,7 +68,7 @@ impl DocumentsFieldsCounts {
} }
pub fn all_documents_fields_counts<'txn>( pub fn all_documents_fields_counts<'txn>(
&self, self,
reader: &'txn zlmdb::RoTxn, reader: &'txn zlmdb::RoTxn,
) -> ZResult<AllDocumentsFieldsCountsIter<'txn>> { ) -> ZResult<AllDocumentsFieldsCountsIter<'txn>> {
let iter = self.documents_fields_counts.iter(reader)?; let iter = self.documents_fields_counts.iter(reader)?;
@ -89,7 +89,7 @@ impl Iterator for DocumentFieldsCountsIter<'_> {
let attr = SchemaAttr(key.attr.get()); let attr = SchemaAttr(key.attr.get());
Some(Ok((attr, count))) Some(Ok((attr, count)))
} }
Some(Err(e)) => Some(Err(e.into())), Some(Err(e)) => Some(Err(e)),
None => None, None => None,
} }
} }
@ -113,7 +113,7 @@ impl Iterator for DocumentsIdsIter<'_> {
return Some(Ok(document_id)); return Some(Ok(document_id));
} }
} }
Err(e) => return Some(Err(e.into())), Err(e) => return Some(Err(e)),
} }
} }
None None
@ -134,7 +134,7 @@ impl<'r> Iterator for AllDocumentsFieldsCountsIter<'r> {
let attr = SchemaAttr(key.attr.get()); let attr = SchemaAttr(key.attr.get());
Some(Ok((docid, attr, count))) Some(Ok((docid, attr, count)))
} }
Some(Err(e)) => Some(Err(e.into())), Some(Err(e)) => Some(Err(e)),
None => None, None => None,
} }
} }

View File

@ -17,12 +17,12 @@ pub struct Main {
} }
impl Main { impl Main {
pub fn put_words_fst(&self, writer: &mut zlmdb::RwTxn, fst: &fst::Set) -> ZResult<()> { pub fn put_words_fst(self, writer: &mut zlmdb::RwTxn, fst: &fst::Set) -> ZResult<()> {
let bytes = fst.as_fst().as_bytes(); let bytes = fst.as_fst().as_bytes();
self.main.put::<Str, ByteSlice>(writer, WORDS_KEY, bytes) self.main.put::<Str, ByteSlice>(writer, WORDS_KEY, bytes)
} }
pub fn words_fst(&self, reader: &zlmdb::RoTxn) -> ZResult<Option<fst::Set>> { pub fn words_fst(self, reader: &zlmdb::RoTxn) -> ZResult<Option<fst::Set>> {
match self.main.get::<Str, ByteSlice>(reader, WORDS_KEY)? { match self.main.get::<Str, ByteSlice>(reader, WORDS_KEY)? {
Some(bytes) => { Some(bytes) => {
let len = bytes.len(); let len = bytes.len();
@ -34,31 +34,31 @@ impl Main {
} }
} }
pub fn put_schema(&self, writer: &mut zlmdb::RwTxn, schema: &Schema) -> ZResult<()> { pub fn put_schema(self, writer: &mut zlmdb::RwTxn, schema: &Schema) -> ZResult<()> {
self.main self.main
.put::<Str, Serde<Schema>>(writer, SCHEMA_KEY, schema) .put::<Str, Serde<Schema>>(writer, SCHEMA_KEY, schema)
} }
pub fn schema(&self, reader: &zlmdb::RoTxn) -> ZResult<Option<Schema>> { pub fn schema(self, reader: &zlmdb::RoTxn) -> ZResult<Option<Schema>> {
self.main.get::<Str, Serde<Schema>>(reader, SCHEMA_KEY) self.main.get::<Str, Serde<Schema>>(reader, SCHEMA_KEY)
} }
pub fn put_ranked_map(&self, writer: &mut zlmdb::RwTxn, ranked_map: &RankedMap) -> ZResult<()> { pub fn put_ranked_map(self, writer: &mut zlmdb::RwTxn, ranked_map: &RankedMap) -> ZResult<()> {
self.main self.main
.put::<Str, Serde<RankedMap>>(writer, RANKED_MAP_KEY, &ranked_map) .put::<Str, Serde<RankedMap>>(writer, RANKED_MAP_KEY, &ranked_map)
} }
pub fn ranked_map(&self, reader: &zlmdb::RoTxn) -> ZResult<Option<RankedMap>> { pub fn ranked_map(self, reader: &zlmdb::RoTxn) -> ZResult<Option<RankedMap>> {
self.main self.main
.get::<Str, Serde<RankedMap>>(reader, RANKED_MAP_KEY) .get::<Str, Serde<RankedMap>>(reader, RANKED_MAP_KEY)
} }
pub fn put_synonyms_fst(&self, writer: &mut zlmdb::RwTxn, fst: &fst::Set) -> ZResult<()> { pub fn put_synonyms_fst(self, writer: &mut zlmdb::RwTxn, fst: &fst::Set) -> ZResult<()> {
let bytes = fst.as_fst().as_bytes(); let bytes = fst.as_fst().as_bytes();
self.main.put::<Str, ByteSlice>(writer, SYNONYMS_KEY, bytes) self.main.put::<Str, ByteSlice>(writer, SYNONYMS_KEY, bytes)
} }
pub fn synonyms_fst(&self, reader: &zlmdb::RoTxn) -> ZResult<Option<fst::Set>> { pub fn synonyms_fst(self, reader: &zlmdb::RoTxn) -> ZResult<Option<fst::Set>> {
match self.main.get::<Str, ByteSlice>(reader, SYNONYMS_KEY)? { match self.main.get::<Str, ByteSlice>(reader, SYNONYMS_KEY)? {
Some(bytes) => { Some(bytes) => {
let len = bytes.len(); let len = bytes.len();
@ -70,7 +70,7 @@ impl Main {
} }
} }
pub fn put_number_of_documents<F>(&self, writer: &mut zlmdb::RwTxn, f: F) -> ZResult<u64> pub fn put_number_of_documents<F>(self, writer: &mut zlmdb::RwTxn, f: F) -> ZResult<u64>
where where
F: Fn(u64) -> u64, F: Fn(u64) -> u64,
{ {
@ -80,7 +80,7 @@ impl Main {
Ok(new) Ok(new)
} }
pub fn number_of_documents(&self, reader: &zlmdb::RoTxn) -> ZResult<u64> { pub fn number_of_documents(self, reader: &zlmdb::RoTxn) -> ZResult<u64> {
match self match self
.main .main
.get::<Str, OwnedType<u64>>(reader, NUMBER_OF_DOCUMENTS_KEY)? .get::<Str, OwnedType<u64>>(reader, NUMBER_OF_DOCUMENTS_KEY)?
@ -90,12 +90,12 @@ impl Main {
} }
} }
pub fn put_customs(&self, writer: &mut zlmdb::RwTxn, customs: &[u8]) -> ZResult<()> { pub fn put_customs(self, writer: &mut zlmdb::RwTxn, customs: &[u8]) -> ZResult<()> {
self.main self.main
.put::<Str, ByteSlice>(writer, CUSTOMS_KEY, customs) .put::<Str, ByteSlice>(writer, CUSTOMS_KEY, customs)
} }
pub fn customs<'txn>(&self, reader: &'txn zlmdb::RoTxn) -> ZResult<Option<&'txn [u8]>> { pub fn customs<'txn>(self, reader: &'txn zlmdb::RoTxn) -> ZResult<Option<&'txn [u8]>> {
self.main.get::<Str, ByteSlice>(reader, CUSTOMS_KEY) self.main.get::<Str, ByteSlice>(reader, CUSTOMS_KEY)
} }
} }

View File

@ -106,7 +106,7 @@ impl Index {
let attributes = match attributes { let attributes = match attributes {
Some(attributes) => attributes Some(attributes) => attributes
.into_iter() .iter()
.map(|name| schema.attribute(name)) .map(|name| schema.attribute(name))
.collect(), .collect(),
None => None, None => None,

View File

@ -11,7 +11,7 @@ pub struct PostingsLists {
impl PostingsLists { impl PostingsLists {
pub fn put_postings_list( pub fn put_postings_list(
&self, self,
writer: &mut zlmdb::RwTxn, writer: &mut zlmdb::RwTxn,
word: &[u8], word: &[u8],
words_indexes: &Set<DocIndex>, words_indexes: &Set<DocIndex>,
@ -19,12 +19,12 @@ impl PostingsLists {
self.postings_lists.put(writer, word, words_indexes) self.postings_lists.put(writer, word, words_indexes)
} }
pub fn del_postings_list(&self, writer: &mut zlmdb::RwTxn, word: &[u8]) -> ZResult<bool> { pub fn del_postings_list(self, writer: &mut zlmdb::RwTxn, word: &[u8]) -> ZResult<bool> {
self.postings_lists.delete(writer, word) self.postings_lists.delete(writer, word)
} }
pub fn postings_list<'txn>( pub fn postings_list<'txn>(
&self, self,
reader: &'txn zlmdb::RoTxn, reader: &'txn zlmdb::RoTxn,
word: &[u8], word: &[u8],
) -> ZResult<Option<Cow<'txn, Set<DocIndex>>>> { ) -> ZResult<Option<Cow<'txn, Set<DocIndex>>>> {

View File

@ -9,7 +9,7 @@ pub struct Synonyms {
impl Synonyms { impl Synonyms {
pub fn put_synonyms( pub fn put_synonyms(
&self, self,
writer: &mut zlmdb::RwTxn, writer: &mut zlmdb::RwTxn,
word: &[u8], word: &[u8],
synonyms: &fst::Set, synonyms: &fst::Set,
@ -18,11 +18,11 @@ impl Synonyms {
self.synonyms.put(writer, word, bytes) self.synonyms.put(writer, word, bytes)
} }
pub fn del_synonyms(&self, writer: &mut zlmdb::RwTxn, word: &[u8]) -> ZResult<bool> { pub fn del_synonyms(self, writer: &mut zlmdb::RwTxn, word: &[u8]) -> ZResult<bool> {
self.synonyms.delete(writer, word) self.synonyms.delete(writer, word)
} }
pub fn synonyms(&self, reader: &zlmdb::RoTxn, word: &[u8]) -> ZResult<Option<fst::Set>> { pub fn synonyms(self, reader: &zlmdb::RoTxn, word: &[u8]) -> ZResult<Option<fst::Set>> {
match self.synonyms.get(reader, word)? { match self.synonyms.get(reader, word)? {
Some(bytes) => { Some(bytes) => {
let len = bytes.len(); let len = bytes.len();

View File

@ -36,7 +36,7 @@ pub struct Updates {
impl Updates { impl Updates {
// TODO do not trigger deserialize if possible // TODO do not trigger deserialize if possible
pub fn last_update_id(&self, reader: &zlmdb::RoTxn) -> ZResult<Option<(u64, Update)>> { pub fn last_update_id(self, reader: &zlmdb::RoTxn) -> ZResult<Option<(u64, Update)>> {
match self.updates.last(reader)? { match self.updates.last(reader)? {
Some((key, data)) => Ok(Some((key.get(), data))), Some((key, data)) => Ok(Some((key.get(), data))),
None => Ok(None), None => Ok(None),
@ -44,7 +44,7 @@ impl Updates {
} }
// TODO do not trigger deserialize if possible // TODO do not trigger deserialize if possible
fn first_update_id(&self, reader: &zlmdb::RoTxn) -> ZResult<Option<(u64, Update)>> { fn first_update_id(self, reader: &zlmdb::RoTxn) -> ZResult<Option<(u64, Update)>> {
match self.updates.first(reader)? { match self.updates.first(reader)? {
Some((key, data)) => Ok(Some((key.get(), data))), Some((key, data)) => Ok(Some((key.get(), data))),
None => Ok(None), None => Ok(None),
@ -52,13 +52,13 @@ impl Updates {
} }
// TODO do not trigger deserialize if possible // TODO do not trigger deserialize if possible
pub fn contains(&self, reader: &zlmdb::RoTxn, update_id: u64) -> ZResult<bool> { pub fn contains(self, reader: &zlmdb::RoTxn, update_id: u64) -> ZResult<bool> {
let update_id = BEU64::new(update_id); let update_id = BEU64::new(update_id);
self.updates.get(reader, &update_id).map(|v| v.is_some()) self.updates.get(reader, &update_id).map(|v| v.is_some())
} }
pub fn put_update( pub fn put_update(
&self, self,
writer: &mut zlmdb::RwTxn, writer: &mut zlmdb::RwTxn,
update_id: u64, update_id: u64,
update: &Update, update: &Update,
@ -68,7 +68,7 @@ impl Updates {
self.updates.put(writer, &update_id, update) self.updates.put(writer, &update_id, update)
} }
pub fn pop_front(&self, writer: &mut zlmdb::RwTxn) -> ZResult<Option<(u64, Update)>> { pub fn pop_front(self, writer: &mut zlmdb::RwTxn) -> ZResult<Option<(u64, Update)>> {
match self.first_update_id(writer)? { match self.first_update_id(writer)? {
Some((update_id, update)) => { Some((update_id, update)) => {
let key = BEU64::new(update_id); let key = BEU64::new(update_id);

View File

@ -9,7 +9,7 @@ pub struct UpdatesResults {
} }
impl UpdatesResults { impl UpdatesResults {
pub fn last_update_id(&self, reader: &zlmdb::RoTxn) -> ZResult<Option<(u64, UpdateResult)>> { pub fn last_update_id(self, reader: &zlmdb::RoTxn) -> ZResult<Option<(u64, UpdateResult)>> {
match self.updates_results.last(reader)? { match self.updates_results.last(reader)? {
Some((key, data)) => Ok(Some((key.get(), data))), Some((key, data)) => Ok(Some((key.get(), data))),
None => Ok(None), None => Ok(None),
@ -17,7 +17,7 @@ impl UpdatesResults {
} }
pub fn put_update_result( pub fn put_update_result(
&self, self,
writer: &mut zlmdb::RwTxn, writer: &mut zlmdb::RwTxn,
update_id: u64, update_id: u64,
update_result: &UpdateResult, update_result: &UpdateResult,
@ -27,7 +27,7 @@ impl UpdatesResults {
} }
pub fn update_result( pub fn update_result(
&self, self,
reader: &zlmdb::RoTxn, reader: &zlmdb::RoTxn,
update_id: u64, update_id: u64,
) -> ZResult<Option<UpdateResult>> { ) -> ZResult<Option<UpdateResult>> {

View File

@ -7,7 +7,7 @@ pub fn apply_schema_update(
main_store: store::Main, main_store: store::Main,
new_schema: &Schema, new_schema: &Schema,
) -> MResult<()> { ) -> MResult<()> {
if let Some(_) = main_store.schema(writer)? { if main_store.schema(writer)?.is_some() {
return Err(UnsupportedOperation::SchemaAlreadyExists.into()); return Err(UnsupportedOperation::SchemaAlreadyExists.into());
} }

View File

@ -176,7 +176,7 @@ where
let current = iter.next().map(|s| Tokenizer::new(s).peekable()); let current = iter.next().map(|s| Tokenizer::new(s).peekable());
SeqTokenizer { SeqTokenizer {
inner: iter, inner: iter,
current: current, current,
word_offset: 0, word_offset: 0,
char_offset: 0, char_offset: 0,
} }