mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-04 20:37:15 +02:00
Replace hardcoded string with constants
This commit is contained in:
parent
fc23a0ee52
commit
44eb153619
26 changed files with 160 additions and 132 deletions
|
@ -7,8 +7,8 @@ use serde_json::value::RawValue;
|
|||
|
||||
use super::vector_document::VectorDocument;
|
||||
use super::{KvReaderFieldId, KvWriterFieldId};
|
||||
use crate::constants::{RESERVED_GEO_FIELD_NAME, RESERVED_VECTORS_FIELD_NAME};
|
||||
use crate::documents::FieldIdMapper;
|
||||
use crate::vector::parsed_vectors::RESERVED_VECTORS_FIELD_NAME;
|
||||
use crate::{DocumentId, GlobalFieldsIdsMap, Index, InternalError, Result, UserError};
|
||||
|
||||
/// A view into a document that can represent either the current version from the DB,
|
||||
|
@ -80,7 +80,7 @@ impl<'t, Mapper: FieldIdMapper> Document<'t> for DocumentFromDb<'t, Mapper> {
|
|||
Err(error) => return Some(Err(error.into())),
|
||||
};
|
||||
|
||||
if name == RESERVED_VECTORS_FIELD_NAME || name == "_geo" {
|
||||
if name == RESERVED_VECTORS_FIELD_NAME || name == RESERVED_GEO_FIELD_NAME {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -100,7 +100,7 @@ impl<'t, Mapper: FieldIdMapper> Document<'t> for DocumentFromDb<'t, Mapper> {
|
|||
}
|
||||
|
||||
fn geo_field(&self) -> Result<Option<&'t RawValue>> {
|
||||
self.field("_geo")
|
||||
self.field(RESERVED_GEO_FIELD_NAME)
|
||||
}
|
||||
|
||||
fn top_level_fields_count(&self) -> usize {
|
||||
|
@ -115,7 +115,7 @@ impl<'t, Mapper: FieldIdMapper> Document<'t> for DocumentFromDb<'t, Mapper> {
|
|||
}
|
||||
|
||||
fn top_level_field(&self, k: &str) -> Result<Option<&'t RawValue>> {
|
||||
if k == RESERVED_VECTORS_FIELD_NAME || k == "_geo" {
|
||||
if k == RESERVED_VECTORS_FIELD_NAME || k == RESERVED_GEO_FIELD_NAME {
|
||||
return Ok(None);
|
||||
}
|
||||
self.field(k)
|
||||
|
@ -367,7 +367,9 @@ where
|
|||
}
|
||||
|
||||
if let Some(geo_value) = document.geo_field()? {
|
||||
let fid = fields_ids_map.id_or_insert("_geo").ok_or(UserError::AttributeLimitReached)?;
|
||||
let fid = fields_ids_map
|
||||
.id_or_insert(RESERVED_GEO_FIELD_NAME)
|
||||
.ok_or(UserError::AttributeLimitReached)?;
|
||||
fields_ids_map.id_or_insert("_geo.lat").ok_or(UserError::AttributeLimitReached)?;
|
||||
fields_ids_map.id_or_insert("_geo.lng").ok_or(UserError::AttributeLimitReached)?;
|
||||
unordered_field_buffer.push((fid, geo_value));
|
||||
|
@ -409,7 +411,9 @@ impl<'doc> Versions<'doc> {
|
|||
}
|
||||
|
||||
pub fn iter_top_level_fields(&self) -> impl Iterator<Item = (&'doc str, &'doc RawValue)> + '_ {
|
||||
self.data.iter().filter(|(k, _)| *k != RESERVED_VECTORS_FIELD_NAME && *k != "_geo")
|
||||
self.data
|
||||
.iter()
|
||||
.filter(|(k, _)| *k != RESERVED_VECTORS_FIELD_NAME && *k != RESERVED_GEO_FIELD_NAME)
|
||||
}
|
||||
|
||||
pub fn vectors_field(&self) -> Option<&'doc RawValue> {
|
||||
|
@ -417,7 +421,7 @@ impl<'doc> Versions<'doc> {
|
|||
}
|
||||
|
||||
pub fn geo_field(&self) -> Option<&'doc RawValue> {
|
||||
self.data.get("_geo")
|
||||
self.data.get(RESERVED_GEO_FIELD_NAME)
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
|
@ -429,7 +433,7 @@ impl<'doc> Versions<'doc> {
|
|||
}
|
||||
|
||||
pub fn top_level_field(&self, k: &str) -> Option<&'doc RawValue> {
|
||||
if k == RESERVED_VECTORS_FIELD_NAME || k == "_geo" {
|
||||
if k == RESERVED_VECTORS_FIELD_NAME || k == RESERVED_GEO_FIELD_NAME {
|
||||
return None;
|
||||
}
|
||||
self.data.get(k)
|
||||
|
|
|
@ -4,6 +4,7 @@ use bumpalo::Bump;
|
|||
use hashbrown::HashMap;
|
||||
|
||||
use super::DelAddRoaringBitmap;
|
||||
use crate::constants::RESERVED_GEO_FIELD_NAME;
|
||||
use crate::update::new::channel::DocumentsSender;
|
||||
use crate::update::new::document::{write_to_obkv, Document as _};
|
||||
use crate::update::new::indexer::document_changes::{DocumentChangeContext, Extractor};
|
||||
|
@ -62,8 +63,10 @@ impl<'a, 'b, 'extractor> Extractor<'extractor> for DocumentsExtractor<'a, 'b> {
|
|||
context.index,
|
||||
&context.db_fields_ids_map,
|
||||
)?;
|
||||
let geo_iter =
|
||||
content.geo_field().transpose().map(|res| res.map(|rv| ("_geo", rv)));
|
||||
let geo_iter = content
|
||||
.geo_field()
|
||||
.transpose()
|
||||
.map(|res| res.map(|rv| (RESERVED_GEO_FIELD_NAME, rv)));
|
||||
for res in content.iter_top_level_fields().chain(geo_iter) {
|
||||
let (f, _) = res?;
|
||||
let entry = document_extractor_data
|
||||
|
@ -79,8 +82,10 @@ impl<'a, 'b, 'extractor> Extractor<'extractor> for DocumentsExtractor<'a, 'b> {
|
|||
let docid = update.docid();
|
||||
let content =
|
||||
update.current(&context.rtxn, context.index, &context.db_fields_ids_map)?;
|
||||
let geo_iter =
|
||||
content.geo_field().transpose().map(|res| res.map(|rv| ("_geo", rv)));
|
||||
let geo_iter = content
|
||||
.geo_field()
|
||||
.transpose()
|
||||
.map(|res| res.map(|rv| (RESERVED_GEO_FIELD_NAME, rv)));
|
||||
for res in content.iter_top_level_fields().chain(geo_iter) {
|
||||
let (f, _) = res?;
|
||||
let entry = document_extractor_data
|
||||
|
@ -90,8 +95,10 @@ impl<'a, 'b, 'extractor> Extractor<'extractor> for DocumentsExtractor<'a, 'b> {
|
|||
*entry -= 1;
|
||||
}
|
||||
let content = update.updated();
|
||||
let geo_iter =
|
||||
content.geo_field().transpose().map(|res| res.map(|rv| ("_geo", rv)));
|
||||
let geo_iter = content
|
||||
.geo_field()
|
||||
.transpose()
|
||||
.map(|res| res.map(|rv| (RESERVED_GEO_FIELD_NAME, rv)));
|
||||
for res in content.iter_top_level_fields().chain(geo_iter) {
|
||||
let (f, _) = res?;
|
||||
let entry = document_extractor_data
|
||||
|
@ -121,8 +128,10 @@ impl<'a, 'b, 'extractor> Extractor<'extractor> for DocumentsExtractor<'a, 'b> {
|
|||
DocumentChange::Insertion(insertion) => {
|
||||
let docid = insertion.docid();
|
||||
let content = insertion.inserted();
|
||||
let geo_iter =
|
||||
content.geo_field().transpose().map(|res| res.map(|rv| ("_geo", rv)));
|
||||
let geo_iter = content
|
||||
.geo_field()
|
||||
.transpose()
|
||||
.map(|res| res.map(|rv| (RESERVED_GEO_FIELD_NAME, rv)));
|
||||
for res in content.iter_top_level_fields().chain(geo_iter) {
|
||||
let (f, _) = res?;
|
||||
let entry = document_extractor_data
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use serde_json::Value;
|
||||
|
||||
use crate::constants::RESERVED_GEO_FIELD_NAME;
|
||||
use crate::update::new::document::Document;
|
||||
use crate::update::new::extract::geo::extract_geo_coordinates;
|
||||
use crate::update::new::extract::perm_json_p;
|
||||
|
@ -69,7 +70,7 @@ pub fn extract_document_facets<'doc>(
|
|||
}
|
||||
}
|
||||
|
||||
if attributes_to_extract.contains(&"_geo") {
|
||||
if attributes_to_extract.contains(&RESERVED_GEO_FIELD_NAME) {
|
||||
if let Some(geo_value) = document.geo_field()? {
|
||||
if let Some([lat, lng]) = extract_geo_coordinates(external_document_id, geo_value)? {
|
||||
let (lat_fid, lng_fid) = field_id_map
|
||||
|
|
|
@ -9,6 +9,7 @@ use heed::RoTxn;
|
|||
use serde_json::value::RawValue;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::constants::RESERVED_GEO_FIELD_NAME;
|
||||
use crate::error::GeoError;
|
||||
use crate::update::new::document::Document;
|
||||
use crate::update::new::indexer::document_changes::{DocumentChangeContext, Extractor};
|
||||
|
@ -28,8 +29,8 @@ impl GeoExtractor {
|
|||
index: &Index,
|
||||
grenad_parameters: GrenadParameters,
|
||||
) -> Result<Option<Self>> {
|
||||
let is_sortable = index.sortable_fields(rtxn)?.contains("_geo");
|
||||
let is_filterable = index.filterable_fields(rtxn)?.contains("_geo");
|
||||
let is_sortable = index.sortable_fields(rtxn)?.contains(RESERVED_GEO_FIELD_NAME);
|
||||
let is_filterable = index.filterable_fields(rtxn)?.contains(RESERVED_GEO_FIELD_NAME);
|
||||
if is_sortable || is_filterable {
|
||||
Ok(Some(GeoExtractor { grenad_parameters }))
|
||||
} else {
|
||||
|
|
|
@ -10,11 +10,10 @@ use serde_json::value::RawValue;
|
|||
|
||||
use super::document::{Document, DocumentFromDb, DocumentFromVersions, Versions};
|
||||
use super::indexer::de::DeserrRawValue;
|
||||
use crate::constants::RESERVED_VECTORS_FIELD_NAME;
|
||||
use crate::documents::FieldIdMapper;
|
||||
use crate::index::IndexEmbeddingConfig;
|
||||
use crate::vector::parsed_vectors::{
|
||||
RawVectors, RawVectorsError, VectorOrArrayOfVectors, RESERVED_VECTORS_FIELD_NAME,
|
||||
};
|
||||
use crate::vector::parsed_vectors::{RawVectors, RawVectorsError, VectorOrArrayOfVectors};
|
||||
use crate::vector::{ArroyWrapper, Embedding, EmbeddingConfigs};
|
||||
use crate::{DocumentId, Index, InternalError, Result, UserError};
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue