mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-09 22:48:54 +01:00
Remove unimplemented from document changes
This commit is contained in:
parent
fe69385bd7
commit
da61408e52
@ -2,7 +2,7 @@ use heed::RoTxn;
|
||||
use obkv::KvReader;
|
||||
|
||||
use crate::update::new::KvReaderFieldId;
|
||||
use crate::{DocumentId, FieldId, Index};
|
||||
use crate::{DocumentId, FieldId, Index, Result};
|
||||
|
||||
pub enum DocumentChange {
|
||||
Deletion(Deletion),
|
||||
@ -52,8 +52,12 @@ impl Deletion {
|
||||
self.docid
|
||||
}
|
||||
|
||||
pub fn current(&self, rtxn: &RoTxn, index: &Index) -> &KvReader<FieldId> {
|
||||
unimplemented!()
|
||||
pub fn current<'a>(
|
||||
&self,
|
||||
rtxn: &'a RoTxn,
|
||||
index: &'a Index,
|
||||
) -> Result<Option<&'a KvReader<FieldId>>> {
|
||||
index.documents.get(rtxn, &self.docid).map_err(crate::Error::from)
|
||||
}
|
||||
}
|
||||
|
||||
@ -67,7 +71,7 @@ impl Insertion {
|
||||
}
|
||||
|
||||
pub fn new(&self) -> &KvReader<FieldId> {
|
||||
unimplemented!()
|
||||
self.new.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
@ -85,11 +89,15 @@ impl Update {
|
||||
self.docid
|
||||
}
|
||||
|
||||
pub fn current(&self, rtxn: &RoTxn, index: &Index) -> &KvReader<FieldId> {
|
||||
unimplemented!()
|
||||
pub fn current<'a>(
|
||||
&self,
|
||||
rtxn: &'a RoTxn,
|
||||
index: &'a Index,
|
||||
) -> Result<Option<&'a KvReader<FieldId>>> {
|
||||
index.documents.get(rtxn, &self.docid).map_err(crate::Error::from)
|
||||
}
|
||||
|
||||
pub fn new(&self) -> &KvReader<FieldId> {
|
||||
unimplemented!()
|
||||
self.new.as_ref()
|
||||
}
|
||||
}
|
||||
|
@ -115,7 +115,7 @@ impl SearchableExtractor for WordDocidsExtractor {
|
||||
cached_sorter.insert_del_u32(word.as_bytes(), inner.docid()).unwrap();
|
||||
};
|
||||
document_tokenizer.tokenize_document(
|
||||
inner.current(rtxn, index),
|
||||
inner.current(rtxn, index)?.unwrap(),
|
||||
fields_ids_map,
|
||||
&mut token_fn,
|
||||
)?;
|
||||
@ -125,7 +125,7 @@ impl SearchableExtractor for WordDocidsExtractor {
|
||||
cached_sorter.insert_del_u32(word.as_bytes(), inner.docid()).unwrap();
|
||||
};
|
||||
document_tokenizer.tokenize_document(
|
||||
inner.current(rtxn, index),
|
||||
inner.current(rtxn, index)?.unwrap(),
|
||||
fields_ids_map,
|
||||
&mut token_fn,
|
||||
)?;
|
||||
|
Loading…
Reference in New Issue
Block a user