mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-12-02 09:35:45 +01:00
Run cargo fmt
This commit is contained in:
parent
ecda7af89f
commit
66c606d7f9
@ -28,7 +28,11 @@ impl ExternalDocumentsIds {
|
|||||||
self.0.is_empty(rtxn).map_err(Into::into)
|
self.0.is_empty(rtxn).map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get<A: AsRef<str>>(&self, rtxn: &RoTxn<'_>, external_id: A) -> heed::Result<Option<u32>> {
|
pub fn get<A: AsRef<str>>(
|
||||||
|
&self,
|
||||||
|
rtxn: &RoTxn<'_>,
|
||||||
|
external_id: A,
|
||||||
|
) -> heed::Result<Option<u32>> {
|
||||||
self.0.get(rtxn, external_id.as_ref())
|
self.0.get(rtxn, external_id.as_ref())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -51,7 +55,11 @@ impl ExternalDocumentsIds {
|
|||||||
///
|
///
|
||||||
/// - If attempting to delete a document that doesn't exist
|
/// - If attempting to delete a document that doesn't exist
|
||||||
/// - If attempting to create a document that already exists
|
/// - If attempting to create a document that already exists
|
||||||
pub fn apply(&self, wtxn: &mut RwTxn<'_>, operations: Vec<DocumentOperation>) -> heed::Result<()> {
|
pub fn apply(
|
||||||
|
&self,
|
||||||
|
wtxn: &mut RwTxn<'_>,
|
||||||
|
operations: Vec<DocumentOperation>,
|
||||||
|
) -> heed::Result<()> {
|
||||||
for DocumentOperation { external_id, internal_id, kind } in operations {
|
for DocumentOperation { external_id, internal_id, kind } in operations {
|
||||||
match kind {
|
match kind {
|
||||||
DocumentOperationKind::Create => {
|
DocumentOperationKind::Create => {
|
||||||
|
@ -375,7 +375,11 @@ impl Index {
|
|||||||
/* primary key */
|
/* primary key */
|
||||||
|
|
||||||
/// Writes the documents primary key, this is the field name that is used to store the id.
|
/// Writes the documents primary key, this is the field name that is used to store the id.
|
||||||
pub(crate) fn put_primary_key(&self, wtxn: &mut RwTxn<'_>, primary_key: &str) -> heed::Result<()> {
|
pub(crate) fn put_primary_key(
|
||||||
|
&self,
|
||||||
|
wtxn: &mut RwTxn<'_>,
|
||||||
|
primary_key: &str,
|
||||||
|
) -> heed::Result<()> {
|
||||||
self.set_updated_at(wtxn, &OffsetDateTime::now_utc())?;
|
self.set_updated_at(wtxn, &OffsetDateTime::now_utc())?;
|
||||||
self.main.remap_types::<Str, Str>().put(wtxn, main_key::PRIMARY_KEY_KEY, primary_key)
|
self.main.remap_types::<Str, Str>().put(wtxn, main_key::PRIMARY_KEY_KEY, primary_key)
|
||||||
}
|
}
|
||||||
@ -531,7 +535,10 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Delete the documents ids that are faceted with a _geo field.
|
/// Delete the documents ids that are faceted with a _geo field.
|
||||||
pub(crate) fn delete_geo_faceted_documents_ids(&self, wtxn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
pub(crate) fn delete_geo_faceted_documents_ids(
|
||||||
|
&self,
|
||||||
|
wtxn: &mut RwTxn<'_>,
|
||||||
|
) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(wtxn, main_key::GEO_FACETED_DOCUMENTS_IDS_KEY)
|
self.main.remap_key_type::<Str>().delete(wtxn, main_key::GEO_FACETED_DOCUMENTS_IDS_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -763,7 +770,10 @@ impl Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Identical to `user_defined_searchable_fields`, but returns ids instead.
|
/// Identical to `user_defined_searchable_fields`, but returns ids instead.
|
||||||
pub fn user_defined_searchable_fields_ids(&self, rtxn: &RoTxn<'_>) -> Result<Option<Vec<FieldId>>> {
|
pub fn user_defined_searchable_fields_ids(
|
||||||
|
&self,
|
||||||
|
rtxn: &RoTxn<'_>,
|
||||||
|
) -> Result<Option<Vec<FieldId>>> {
|
||||||
match self.user_defined_searchable_fields(rtxn)? {
|
match self.user_defined_searchable_fields(rtxn)? {
|
||||||
Some(fields) => {
|
Some(fields) => {
|
||||||
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
let fields_ids_map = self.fields_ids_map(rtxn)?;
|
||||||
@ -1198,7 +1208,10 @@ impl Index {
|
|||||||
.unwrap_or_default())
|
.unwrap_or_default())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn synonyms(&self, rtxn: &RoTxn<'_>) -> heed::Result<HashMap<Vec<String>, Vec<Vec<String>>>> {
|
pub fn synonyms(
|
||||||
|
&self,
|
||||||
|
rtxn: &RoTxn<'_>,
|
||||||
|
) -> heed::Result<HashMap<Vec<String>, Vec<Vec<String>>>> {
|
||||||
Ok(self
|
Ok(self
|
||||||
.main
|
.main
|
||||||
.remap_types::<Str, SerdeBincode<_>>()
|
.remap_types::<Str, SerdeBincode<_>>()
|
||||||
@ -1384,7 +1397,11 @@ impl Index {
|
|||||||
.unwrap_or(DEFAULT_MIN_WORD_LEN_ONE_TYPO))
|
.unwrap_or(DEFAULT_MIN_WORD_LEN_ONE_TYPO))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn put_min_word_len_one_typo(&self, txn: &mut RwTxn<'_>, val: u8) -> heed::Result<()> {
|
pub(crate) fn put_min_word_len_one_typo(
|
||||||
|
&self,
|
||||||
|
txn: &mut RwTxn<'_>,
|
||||||
|
val: u8,
|
||||||
|
) -> heed::Result<()> {
|
||||||
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
||||||
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
||||||
// because by default, we authorize typos.
|
// because by default, we authorize typos.
|
||||||
@ -1403,7 +1420,11 @@ impl Index {
|
|||||||
.unwrap_or(DEFAULT_MIN_WORD_LEN_TWO_TYPOS))
|
.unwrap_or(DEFAULT_MIN_WORD_LEN_TWO_TYPOS))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn put_min_word_len_two_typos(&self, txn: &mut RwTxn<'_>, val: u8) -> heed::Result<()> {
|
pub(crate) fn put_min_word_len_two_typos(
|
||||||
|
&self,
|
||||||
|
txn: &mut RwTxn<'_>,
|
||||||
|
val: u8,
|
||||||
|
) -> heed::Result<()> {
|
||||||
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
// It is not possible to put a bool in heed with OwnedType, so we put a u8 instead. We
|
||||||
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
// identify 0 as being false, and anything else as true. The absence of a value is true,
|
||||||
// because by default, we authorize typos.
|
// because by default, we authorize typos.
|
||||||
@ -1467,7 +1488,11 @@ impl Index {
|
|||||||
self.main.remap_types::<Str, BEU64>().get(txn, main_key::MAX_VALUES_PER_FACET)
|
self.main.remap_types::<Str, BEU64>().get(txn, main_key::MAX_VALUES_PER_FACET)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn put_max_values_per_facet(&self, txn: &mut RwTxn<'_>, val: u64) -> heed::Result<()> {
|
pub(crate) fn put_max_values_per_facet(
|
||||||
|
&self,
|
||||||
|
txn: &mut RwTxn<'_>,
|
||||||
|
val: u64,
|
||||||
|
) -> heed::Result<()> {
|
||||||
self.main.remap_types::<Str, BEU64>().put(txn, main_key::MAX_VALUES_PER_FACET, &val)
|
self.main.remap_types::<Str, BEU64>().put(txn, main_key::MAX_VALUES_PER_FACET, &val)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1508,7 +1533,10 @@ impl Index {
|
|||||||
self.main.remap_types::<Str, BEU64>().put(txn, main_key::PAGINATION_MAX_TOTAL_HITS, &val)
|
self.main.remap_types::<Str, BEU64>().put(txn, main_key::PAGINATION_MAX_TOTAL_HITS, &val)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn delete_pagination_max_total_hits(&self, txn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
pub(crate) fn delete_pagination_max_total_hits(
|
||||||
|
&self,
|
||||||
|
txn: &mut RwTxn<'_>,
|
||||||
|
) -> heed::Result<bool> {
|
||||||
self.main.remap_key_type::<Str>().delete(txn, main_key::PAGINATION_MAX_TOTAL_HITS)
|
self.main.remap_key_type::<Str>().delete(txn, main_key::PAGINATION_MAX_TOTAL_HITS)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1544,7 +1572,10 @@ impl Index {
|
|||||||
self.script_language_docids.get(rtxn, key)
|
self.script_language_docids.get(rtxn, key)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn script_language(&self, rtxn: &RoTxn<'_>) -> heed::Result<HashMap<Script, Vec<Language>>> {
|
pub fn script_language(
|
||||||
|
&self,
|
||||||
|
rtxn: &RoTxn<'_>,
|
||||||
|
) -> heed::Result<HashMap<Script, Vec<Language>>> {
|
||||||
let mut script_language: HashMap<Script, Vec<Language>> = HashMap::new();
|
let mut script_language: HashMap<Script, Vec<Language>> = HashMap::new();
|
||||||
let mut script_language_doc_count: Vec<(Script, Language, u64)> = Vec::new();
|
let mut script_language_doc_count: Vec<(Script, Language, u64)> = Vec::new();
|
||||||
let mut total = 0;
|
let mut total = 0;
|
||||||
|
@ -229,7 +229,10 @@ pub fn obkv_to_json(
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Transform every field of a raw obkv store into a JSON Object.
|
/// Transform every field of a raw obkv store into a JSON Object.
|
||||||
pub fn all_obkv_to_json(obkv: obkv::KvReaderU16<'_>, fields_ids_map: &FieldsIdsMap) -> Result<Object> {
|
pub fn all_obkv_to_json(
|
||||||
|
obkv: obkv::KvReaderU16<'_>,
|
||||||
|
fields_ids_map: &FieldsIdsMap,
|
||||||
|
) -> Result<Object> {
|
||||||
let all_keys = obkv.iter().map(|(k, _v)| k).collect::<Vec<_>>();
|
let all_keys = obkv.iter().map(|(k, _v)| k).collect::<Vec<_>>();
|
||||||
obkv_to_json(all_keys.as_slice(), fields_ids_map, obkv)
|
obkv_to_json(all_keys.as_slice(), fields_ids_map, obkv)
|
||||||
}
|
}
|
||||||
|
@ -97,7 +97,7 @@ impl SearchLogger<QueryGraph> for VisualSearchLogger {
|
|||||||
fn next_bucket_ranking_rule(
|
fn next_bucket_ranking_rule(
|
||||||
&mut self,
|
&mut self,
|
||||||
ranking_rule_idx: usize,
|
ranking_rule_idx: usize,
|
||||||
_ranking_rule: &dyn RankingRule<'_,QueryGraph>,
|
_ranking_rule: &dyn RankingRule<'_, QueryGraph>,
|
||||||
universe: &RoaringBitmap,
|
universe: &RoaringBitmap,
|
||||||
bucket: &RoaringBitmap,
|
bucket: &RoaringBitmap,
|
||||||
) {
|
) {
|
||||||
|
@ -244,7 +244,12 @@ impl<'t> Matcher<'t, '_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the bounds in byte index of the crop window.
|
/// Returns the bounds in byte index of the crop window.
|
||||||
fn crop_bounds(&self, tokens: &[Token<'_>], matches: &[Match], crop_size: usize) -> (usize, usize) {
|
fn crop_bounds(
|
||||||
|
&self,
|
||||||
|
tokens: &[Token<'_>],
|
||||||
|
matches: &[Match],
|
||||||
|
crop_size: usize,
|
||||||
|
) -> (usize, usize) {
|
||||||
// if there is no match, we start from the beginning of the string by default.
|
// if there is no match, we start from the beginning of the string by default.
|
||||||
let first_match_word_position = matches.first().map(|m| m.word_position).unwrap_or(0);
|
let first_match_word_position = matches.first().map(|m| m.word_position).unwrap_or(0);
|
||||||
let first_match_token_position = matches.first().map(|m| m.token_position).unwrap_or(0);
|
let first_match_token_position = matches.first().map(|m| m.token_position).unwrap_or(0);
|
||||||
|
@ -775,7 +775,10 @@ pub fn execute_search(
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_sort_criteria(ctx: &SearchContext<'_>, sort_criteria: Option<&Vec<AscDesc>>) -> Result<()> {
|
fn check_sort_criteria(
|
||||||
|
ctx: &SearchContext<'_>,
|
||||||
|
sort_criteria: Option<&Vec<AscDesc>>,
|
||||||
|
) -> Result<()> {
|
||||||
let sort_criteria = if let Some(sort_criteria) = sort_criteria {
|
let sort_criteria = if let Some(sort_criteria) = sort_criteria {
|
||||||
sort_criteria
|
sort_criteria
|
||||||
} else {
|
} else {
|
||||||
|
@ -297,7 +297,12 @@ impl PhraseBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// precondition: token has kind Word or StopWord
|
// precondition: token has kind Word or StopWord
|
||||||
fn push_word(&mut self, ctx: &mut SearchContext<'_>, token: &charabia::Token<'_>, position: u16) {
|
fn push_word(
|
||||||
|
&mut self,
|
||||||
|
ctx: &mut SearchContext<'_>,
|
||||||
|
token: &charabia::Token<'_>,
|
||||||
|
position: u16,
|
||||||
|
) {
|
||||||
if self.is_empty() {
|
if self.is_empty() {
|
||||||
self.start = position;
|
self.start = position;
|
||||||
}
|
}
|
||||||
|
@ -68,7 +68,7 @@ pub fn extract_geo_points<R: io::Read + io::Seek>(
|
|||||||
|
|
||||||
/// Extract the finite floats lat and lng from two bytes slices.
|
/// Extract the finite floats lat and lng from two bytes slices.
|
||||||
fn extract_lat_lng(
|
fn extract_lat_lng(
|
||||||
document: &obkv::KvReader<'_,FieldId>,
|
document: &obkv::KvReader<'_, FieldId>,
|
||||||
settings: &InnerIndexSettings,
|
settings: &InnerIndexSettings,
|
||||||
deladd: DelAdd,
|
deladd: DelAdd,
|
||||||
document_id: impl Fn() -> Value,
|
document_id: impl Fn() -> Value,
|
||||||
|
@ -1422,7 +1422,11 @@ impl InnerIndexSettings {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// find and insert the new field ids
|
// find and insert the new field ids
|
||||||
pub fn recompute_searchables(&mut self, wtxn: &mut heed::RwTxn<'_>, index: &Index) -> Result<()> {
|
pub fn recompute_searchables(
|
||||||
|
&mut self,
|
||||||
|
wtxn: &mut heed::RwTxn<'_>,
|
||||||
|
index: &Index,
|
||||||
|
) -> Result<()> {
|
||||||
let searchable_fields = self
|
let searchable_fields = self
|
||||||
.user_defined_searchable_fields
|
.user_defined_searchable_fields
|
||||||
.as_ref()
|
.as_ref()
|
||||||
|
Loading…
Reference in New Issue
Block a user