mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-30 00:34:26 +01:00
Fix the ARC cache
This commit is contained in:
parent
2fcae719ad
commit
fc4013a43f
@ -2,6 +2,7 @@ use std::collections::hash_map::Entry;
|
|||||||
use std::collections::{HashMap, BTreeSet};
|
use std::collections::{HashMap, BTreeSet};
|
||||||
use std::convert::{TryFrom, TryInto};
|
use std::convert::{TryFrom, TryInto};
|
||||||
use std::io;
|
use std::io;
|
||||||
|
use std::ops::BitOr;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||||
|
|
||||||
@ -74,7 +75,7 @@ fn index_csv<R: io::Read>(wtxn: &mut heed::RwTxn, mut rdr: csv::Reader<R>, index
|
|||||||
None => {
|
None => {
|
||||||
let mut ids = index.word_positions.get(wtxn, &word)?.unwrap_or_default();
|
let mut ids = index.word_positions.get(wtxn, &word)?.unwrap_or_default();
|
||||||
ids.insert(position);
|
ids.insert(position);
|
||||||
for (word, ids) in word_positions.insert(word.clone(), ids) {
|
for (word, ids) in word_positions.insert(word.clone(), ids, RoaringBitmap::bitor) {
|
||||||
index.word_positions.put(wtxn, &word, &ids)?;
|
index.word_positions.put(wtxn, &word, &ids)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -90,7 +91,7 @@ fn index_csv<R: io::Read>(wtxn: &mut heed::RwTxn, mut rdr: csv::Reader<R>, index
|
|||||||
None => {
|
None => {
|
||||||
let mut ids = index.word_position_docids.get(wtxn, &key)?.unwrap_or_default();
|
let mut ids = index.word_position_docids.get(wtxn, &key)?.unwrap_or_default();
|
||||||
ids.insert(position);
|
ids.insert(position);
|
||||||
for ((word, position), ids) in word_position_docids.insert((word.clone(), position), ids) {
|
for ((word, position), ids) in word_position_docids.insert((word.clone(), position), ids, RoaringBitmap::bitor) {
|
||||||
let mut key = word.as_bytes().to_vec();
|
let mut key = word.as_bytes().to_vec();
|
||||||
key.extend_from_slice(&position.to_be_bytes());
|
key.extend_from_slice(&position.to_be_bytes());
|
||||||
index.word_position_docids.put(wtxn, &key, &ids)?;
|
index.word_position_docids.put(wtxn, &key, &ids)?;
|
||||||
@ -123,7 +124,7 @@ fn index_csv<R: io::Read>(wtxn: &mut heed::RwTxn, mut rdr: csv::Reader<R>, index
|
|||||||
let iter = index.word_positions.as_polymorph().iter::<_, Str, DecodeIgnore>(wtxn)?;
|
let iter = index.word_positions.as_polymorph().iter::<_, Str, DecodeIgnore>(wtxn)?;
|
||||||
for result in iter {
|
for result in iter {
|
||||||
let (word, ()) = result?;
|
let (word, ()) = result?;
|
||||||
new_words.insert(word.clone());
|
new_words.insert(word);
|
||||||
}
|
}
|
||||||
|
|
||||||
let new_words_fst = fst::Set::from_iter(new_words)?;
|
let new_words_fst = fst::Set::from_iter(new_words)?;
|
||||||
|
42
src/cache.rs
42
src/cache.rs
@ -34,25 +34,6 @@ impl<K: PartialEq> PartialEq for KeyRef<K> {
|
|||||||
|
|
||||||
impl<K: Eq> Eq for KeyRef<K> {}
|
impl<K: Eq> Eq for KeyRef<K> {}
|
||||||
|
|
||||||
#[cfg(feature = "nightly")]
|
|
||||||
#[doc(hidden)]
|
|
||||||
pub auto trait NotKeyRef {}
|
|
||||||
|
|
||||||
#[cfg(feature = "nightly")]
|
|
||||||
impl<K> !NotKeyRef for KeyRef<K> {}
|
|
||||||
|
|
||||||
#[cfg(feature = "nightly")]
|
|
||||||
impl<K, D> Borrow<D> for KeyRef<K>
|
|
||||||
where
|
|
||||||
K: Borrow<D>,
|
|
||||||
D: NotKeyRef + ?Sized,
|
|
||||||
{
|
|
||||||
fn borrow(&self) -> &D {
|
|
||||||
unsafe { &*self.k }.borrow()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(not(feature = "nightly"))]
|
|
||||||
impl<K> Borrow<K> for KeyRef<K> {
|
impl<K> Borrow<K> for KeyRef<K> {
|
||||||
fn borrow(&self) -> &K {
|
fn borrow(&self) -> &K {
|
||||||
unsafe { &*self.k }
|
unsafe { &*self.k }
|
||||||
@ -88,7 +69,7 @@ impl<K, V> LruEntry<K, V> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An LRU Cache
|
/// An LRU Cache.
|
||||||
pub struct LruCache<K, V> {
|
pub struct LruCache<K, V> {
|
||||||
map: FastMap8<KeyRef<K>, Box<LruEntry<K, V>>>,
|
map: FastMap8<KeyRef<K>, Box<LruEntry<K, V>>>,
|
||||||
cap: usize,
|
cap: usize,
|
||||||
@ -100,13 +81,6 @@ pub struct LruCache<K, V> {
|
|||||||
|
|
||||||
impl<K: Hash + Eq, V> LruCache<K, V> {
|
impl<K: Hash + Eq, V> LruCache<K, V> {
|
||||||
/// Creates a new LRU Cache that holds at most `cap` items.
|
/// Creates a new LRU Cache that holds at most `cap` items.
|
||||||
///
|
|
||||||
/// # Example
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// use lru::LruCache;
|
|
||||||
/// let mut cache: LruCache<isize, &str> = LruCache::new(10);
|
|
||||||
/// ```
|
|
||||||
pub fn new(cap: usize) -> LruCache<K, V> {
|
pub fn new(cap: usize) -> LruCache<K, V> {
|
||||||
let mut map = FastMap8::default();
|
let mut map = FastMap8::default();
|
||||||
map.reserve(cap);
|
map.reserve(cap);
|
||||||
@ -114,13 +88,6 @@ impl<K: Hash + Eq, V> LruCache<K, V> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new LRU Cache that never automatically evicts items.
|
/// Creates a new LRU Cache that never automatically evicts items.
|
||||||
///
|
|
||||||
/// # Example
|
|
||||||
///
|
|
||||||
/// ```
|
|
||||||
/// use lru::LruCache;
|
|
||||||
/// let mut cache: LruCache<isize, &str> = LruCache::unbounded();
|
|
||||||
/// ```
|
|
||||||
pub fn unbounded() -> LruCache<K, V> {
|
pub fn unbounded() -> LruCache<K, V> {
|
||||||
LruCache::construct(usize::MAX, HashMap::default())
|
LruCache::construct(usize::MAX, HashMap::default())
|
||||||
}
|
}
|
||||||
@ -443,13 +410,16 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert(&mut self, key: K, value: V) -> Vec<(K, V)> {
|
pub fn insert<F>(&mut self, key: K, value: V, mut merge: F) -> Vec<(K, V)>
|
||||||
|
where F: FnMut(V, V) -> V
|
||||||
|
{
|
||||||
let mut evicted = Vec::new();
|
let mut evicted = Vec::new();
|
||||||
if self.frequent_set.contains_key(&key) {
|
if self.frequent_set.contains_key(&key) {
|
||||||
evicted.extend(self.frequent_set.insert(key, value));
|
evicted.extend(self.frequent_set.insert(key, value));
|
||||||
return evicted;
|
return evicted;
|
||||||
}
|
}
|
||||||
if self.recent_set.remove(&key).is_some() {
|
if let Some(prev_value) = self.recent_set.remove(&key) {
|
||||||
|
let value = (merge)(prev_value, value);
|
||||||
evicted.extend(self.frequent_set.insert(key, value));
|
evicted.extend(self.frequent_set.insert(key, value));
|
||||||
return evicted;
|
return evicted;
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user