Replace hardcoded string with constants

This commit is contained in:
Gnosnay 2024-12-24 00:39:44 +08:00
parent fc23a0ee52
commit 44eb153619
26 changed files with 160 additions and 132 deletions

View file

@ -103,6 +103,7 @@ impl<'t, 'i> ClearDocuments<'t, 'i> {
#[cfg(test)]
mod tests {
use super::*;
use crate::constants::RESERVED_GEO_FIELD_NAME;
use crate::index::tests::TempIndex;
#[test]
@ -114,7 +115,7 @@ mod tests {
.add_documents_using_wtxn(&mut wtxn, documents!([
{ "id": 0, "name": "kevin", "age": 20 },
{ "id": 1, "name": "kevina" },
{ "id": 2, "name": "benoit", "country": "France", "_geo": { "lng": 42, "lat": 35 } }
{ "id": 2, "name": "benoit", "country": "France", RESERVED_GEO_FIELD_NAME: { "lng": 42, "lat": 35 } }
]))
.unwrap();

View file

@ -5,6 +5,7 @@ use std::result::Result as StdResult;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use crate::constants::RESERVED_GEO_FIELD_NAME;
use crate::documents::{
DocumentIdExtractionError, DocumentsBatchIndex, DocumentsBatchReader,
EnrichedDocumentsBatchReader, PrimaryKey, DEFAULT_PRIMARY_KEY,
@ -93,10 +94,10 @@ pub fn enrich_documents_batch<R: Read + Seek>(
// If the settings specifies that a _geo field must be used therefore we must check the
// validity of it in all the documents of this batch and this is when we return `Some`.
let geo_field_id = match documents_batch_index.id("_geo") {
let geo_field_id = match documents_batch_index.id(RESERVED_GEO_FIELD_NAME) {
Some(geo_field_id)
if index.sortable_fields(rtxn)?.contains("_geo")
|| index.filterable_fields(rtxn)?.contains("_geo") =>
if index.sortable_fields(rtxn)?.contains(RESERVED_GEO_FIELD_NAME)
|| index.filterable_fields(rtxn)?.contains(RESERVED_GEO_FIELD_NAME) =>
{
Some(geo_field_id)
}

View file

@ -13,13 +13,14 @@ use roaring::RoaringBitmap;
use serde_json::Value;
use super::helpers::{create_writer, writer_into_reader, GrenadParameters};
use crate::constants::RESERVED_VECTORS_FIELD_NAME;
use crate::error::FaultSource;
use crate::index::IndexEmbeddingConfig;
use crate::prompt::{FieldsIdsMapWithMetadata, Prompt};
use crate::update::del_add::{DelAdd, KvReaderDelAdd, KvWriterDelAdd};
use crate::update::settings::InnerIndexSettingsDiff;
use crate::vector::error::{EmbedErrorKind, PossibleEmbeddingMistakes, UnusedVectorsDistribution};
use crate::vector::parsed_vectors::{ParsedVectorsDiff, VectorState, RESERVED_VECTORS_FIELD_NAME};
use crate::vector::parsed_vectors::{ParsedVectorsDiff, VectorState};
use crate::vector::settings::ReindexAction;
use crate::vector::{Embedder, Embedding};
use crate::{try_split_array_at, DocumentId, FieldId, Result, ThreadPoolNoAbort};

View file

@ -27,6 +27,7 @@ pub use self::enrich::{extract_finite_float_from_value, DocumentId};
pub use self::helpers::*;
pub use self::transform::{Transform, TransformOutput};
use super::new::StdResult;
use crate::documents::{obkv_to_object, DocumentsBatchReader};
use crate::error::{Error, InternalError};
use crate::index::{PrefixSearch, PrefixSettings};
@ -763,6 +764,7 @@ mod tests {
use maplit::hashset;
use super::*;
use crate::constants::RESERVED_GEO_FIELD_NAME;
use crate::documents::mmap_from_objects;
use crate::index::tests::TempIndex;
use crate::index::IndexEmbeddingConfig;
@ -944,12 +946,12 @@ mod tests {
index.index_documents_config.update_method = IndexDocumentsMethod::ReplaceDocuments;
index.add_documents(documents!([
{ "id": 2, "title": "Pride and Prejudice", "author": "Jane Austin", "genre": "romance", "price": 3.5, "_geo": { "lat": 12, "lng": 42 } },
{ "id": 2, "title": "Pride and Prejudice", "author": "Jane Austin", "genre": "romance", "price": 3.5, RESERVED_GEO_FIELD_NAME: { "lat": 12, "lng": 42 } },
{ "id": 456, "title": "Le Petit Prince", "author": "Antoine de Saint-Exupéry", "genre": "adventure" , "price": 10.0 },
{ "id": 1, "title": "Alice In Wonderland", "author": "Lewis Carroll", "genre": "fantasy", "price": 25.99 },
{ "id": 1344, "title": "The Hobbit", "author": "J. R. R. Tolkien", "genre": "fantasy" },
{ "id": 4, "title": "Harry Potter and the Half-Blood Prince", "author": "J. K. Rowling", "genre": "fantasy" },
{ "id": 42, "title": "The Hitchhiker's Guide to the Galaxy", "author": "Douglas Adams", "_geo": { "lat": 35, "lng": 23 } }
{ "id": 42, "title": "The Hitchhiker's Guide to the Galaxy", "author": "Douglas Adams", RESERVED_GEO_FIELD_NAME: { "lat": 35, "lng": 23 } }
])).unwrap();
db_snap!(index, word_docids, "initial");
@ -989,18 +991,18 @@ mod tests {
// We send 6 documents and mix the ones that have _geo and those that don't have it.
index
.add_documents(documents!([
{ "id": 2, "price": 3.5, "_geo": { "lat": 12, "lng": 42 } },
{ "id": 2, "price": 3.5, RESERVED_GEO_FIELD_NAME: { "lat": 12, "lng": 42 } },
{ "id": 456 },
{ "id": 1 },
{ "id": 1344 },
{ "id": 4 },
{ "id": 42, "_geo": { "lat": 35, "lng": 23 } }
{ "id": 42, RESERVED_GEO_FIELD_NAME: { "lat": 35, "lng": 23 } }
]))
.unwrap();
index
.update_settings(|settings| {
settings.set_filterable_fields(hashset!(S("_geo")));
settings.set_filterable_fields(hashset!(S(RESERVED_GEO_FIELD_NAME)));
})
.unwrap();
}
@ -1012,13 +1014,13 @@ mod tests {
index
.update_settings(|settings| {
settings.set_filterable_fields(hashset!(S("_geo")));
settings.set_filterable_fields(hashset!(S(RESERVED_GEO_FIELD_NAME)));
})
.unwrap();
let error = index
.add_documents(documents!([
{ "id": 0, "_geo": { "lng": 42 } }
{ "id": 0, RESERVED_GEO_FIELD_NAME: { "lng": 42 } }
]))
.unwrap_err();
assert_eq!(
@ -1028,7 +1030,7 @@ mod tests {
let error = index
.add_documents(documents!([
{ "id": 0, "_geo": { "lat": 42 } }
{ "id": 0, RESERVED_GEO_FIELD_NAME: { "lat": 42 } }
]))
.unwrap_err();
assert_eq!(
@ -1038,7 +1040,7 @@ mod tests {
let error = index
.add_documents(documents!([
{ "id": 0, "_geo": { "lat": "lol", "lng": 42 } }
{ "id": 0, RESERVED_GEO_FIELD_NAME: { "lat": "lol", "lng": 42 } }
]))
.unwrap_err();
assert_eq!(
@ -1048,7 +1050,7 @@ mod tests {
let error = index
.add_documents(documents!([
{ "id": 0, "_geo": { "lat": [12, 13], "lng": 42 } }
{ "id": 0, RESERVED_GEO_FIELD_NAME: { "lat": [12, 13], "lng": 42 } }
]))
.unwrap_err();
assert_eq!(
@ -1058,7 +1060,7 @@ mod tests {
let error = index
.add_documents(documents!([
{ "id": 0, "_geo": { "lat": 12, "lng": "hello" } }
{ "id": 0, RESERVED_GEO_FIELD_NAME: { "lat": 12, "lng": "hello" } }
]))
.unwrap_err();
assert_eq!(
@ -1076,7 +1078,7 @@ mod tests {
{ "objectId": 123, "title": "Pride and Prejudice", "comment": "A great book" },
{ "objectId": 456, "title": "Le Petit Prince", "comment": "A french book" },
{ "objectId": 1, "title": "Alice In Wonderland", "comment": "A weird book" },
{ "objectId": 30, "title": "Hamlet", "_geo": { "lat": 12, "lng": 89 } }
{ "objectId": 30, "title": "Hamlet", RESERVED_GEO_FIELD_NAME: { "lat": 12, "lng": 89 } }
]))
.unwrap();
@ -1091,7 +1093,7 @@ mod tests {
index
.add_documents(documents!([
{ "objectId": 30, "title": "Hamlet", "_geo": { "lat": 12, "lng": 89 } }
{ "objectId": 30, "title": "Hamlet", RESERVED_GEO_FIELD_NAME: { "lat": 12, "lng": 89 } }
]))
.unwrap();
@ -1102,7 +1104,7 @@ mod tests {
index
.add_documents(documents!([
{ "objectId": 30, "title": "Hamlet", "_geo": { "lat": 12, "lng": 89 } }
{ "objectId": 30, "title": "Hamlet", RESERVED_GEO_FIELD_NAME: { "lat": 12, "lng": 89 } }
]))
.unwrap();
}
@ -3146,34 +3148,34 @@ mod tests {
index
.update_settings_using_wtxn(&mut wtxn, |settings| {
settings.set_primary_key(S("id"));
settings.set_filterable_fields(hashset!(S("_geo")));
settings.set_sortable_fields(hashset!(S("_geo")));
settings.set_filterable_fields(hashset!(S(RESERVED_GEO_FIELD_NAME)));
settings.set_sortable_fields(hashset!(S(RESERVED_GEO_FIELD_NAME)));
})
.unwrap();
wtxn.commit().unwrap();
let mut wtxn = index.write_txn().unwrap();
index.add_documents_using_wtxn(&mut wtxn, documents!([
{ "id": "1", "city": "Lille", "_geo": { "lat": 50.6299, "lng": 3.0569 } },
{ "id": "2", "city": "Mons-en-Barœul", "_geo": { "lat": 50.6415, "lng": 3.1106 } },
{ "id": "3", "city": "Hellemmes", "_geo": { "lat": 50.6312, "lng": 3.1106 } },
{ "id": "4", "city": "Villeneuve-d'Ascq", "_geo": { "lat": 50.6224, "lng": 3.1476 } },
{ "id": "5", "city": "Hem", "_geo": { "lat": 50.6552, "lng": 3.1897 } },
{ "id": "6", "city": "Roubaix", "_geo": { "lat": 50.6924, "lng": 3.1763 } },
{ "id": "7", "city": "Tourcoing", "_geo": { "lat": 50.7263, "lng": 3.1541 } },
{ "id": "8", "city": "Mouscron", "_geo": { "lat": 50.7453, "lng": 3.2206 } },
{ "id": "9", "city": "Tournai", "_geo": { "lat": 50.6053, "lng": 3.3758 } },
{ "id": "10", "city": "Ghent", "_geo": { "lat": 51.0537, "lng": 3.6957 } },
{ "id": "11", "city": "Brussels", "_geo": { "lat": 50.8466, "lng": 4.3370 } },
{ "id": "12", "city": "Charleroi", "_geo": { "lat": 50.4095, "lng": 4.4347 } },
{ "id": "13", "city": "Mons", "_geo": { "lat": 50.4502, "lng": 3.9623 } },
{ "id": "14", "city": "Valenciennes", "_geo": { "lat": 50.3518, "lng": 3.5326 } },
{ "id": "15", "city": "Arras", "_geo": { "lat": 50.2844, "lng": 2.7637 } },
{ "id": "16", "city": "Cambrai", "_geo": { "lat": 50.1793, "lng": 3.2189 } },
{ "id": "17", "city": "Bapaume", "_geo": { "lat": 50.1112, "lng": 2.8547 } },
{ "id": "18", "city": "Amiens", "_geo": { "lat": 49.9314, "lng": 2.2710 } },
{ "id": "19", "city": "Compiègne", "_geo": { "lat": 49.4449, "lng": 2.7913 } },
{ "id": "20", "city": "Paris", "_geo": { "lat": 48.9021, "lng": 2.3708 } }
{ "id": "1", "city": "Lille", RESERVED_GEO_FIELD_NAME: { "lat": 50.6299, "lng": 3.0569 } },
{ "id": "2", "city": "Mons-en-Barœul", RESERVED_GEO_FIELD_NAME: { "lat": 50.6415, "lng": 3.1106 } },
{ "id": "3", "city": "Hellemmes", RESERVED_GEO_FIELD_NAME: { "lat": 50.6312, "lng": 3.1106 } },
{ "id": "4", "city": "Villeneuve-d'Ascq", RESERVED_GEO_FIELD_NAME: { "lat": 50.6224, "lng": 3.1476 } },
{ "id": "5", "city": "Hem", RESERVED_GEO_FIELD_NAME: { "lat": 50.6552, "lng": 3.1897 } },
{ "id": "6", "city": "Roubaix", RESERVED_GEO_FIELD_NAME: { "lat": 50.6924, "lng": 3.1763 } },
{ "id": "7", "city": "Tourcoing", RESERVED_GEO_FIELD_NAME: { "lat": 50.7263, "lng": 3.1541 } },
{ "id": "8", "city": "Mouscron", RESERVED_GEO_FIELD_NAME: { "lat": 50.7453, "lng": 3.2206 } },
{ "id": "9", "city": "Tournai", RESERVED_GEO_FIELD_NAME: { "lat": 50.6053, "lng": 3.3758 } },
{ "id": "10", "city": "Ghent", RESERVED_GEO_FIELD_NAME: { "lat": 51.0537, "lng": 3.6957 } },
{ "id": "11", "city": "Brussels", RESERVED_GEO_FIELD_NAME: { "lat": 50.8466, "lng": 4.3370 } },
{ "id": "12", "city": "Charleroi", RESERVED_GEO_FIELD_NAME: { "lat": 50.4095, "lng": 4.4347 } },
{ "id": "13", "city": "Mons", RESERVED_GEO_FIELD_NAME: { "lat": 50.4502, "lng": 3.9623 } },
{ "id": "14", "city": "Valenciennes", RESERVED_GEO_FIELD_NAME: { "lat": 50.3518, "lng": 3.5326 } },
{ "id": "15", "city": "Arras", RESERVED_GEO_FIELD_NAME: { "lat": 50.2844, "lng": 2.7637 } },
{ "id": "16", "city": "Cambrai", RESERVED_GEO_FIELD_NAME: { "lat": 50.1793, "lng": 3.2189 } },
{ "id": "17", "city": "Bapaume", RESERVED_GEO_FIELD_NAME: { "lat": 50.1112, "lng": 2.8547 } },
{ "id": "18", "city": "Amiens", RESERVED_GEO_FIELD_NAME: { "lat": 49.9314, "lng": 2.2710 } },
{ "id": "19", "city": "Compiègne", RESERVED_GEO_FIELD_NAME: { "lat": 49.4449, "lng": 2.7913 } },
{ "id": "20", "city": "Paris", RESERVED_GEO_FIELD_NAME: { "lat": 48.9021, "lng": 2.3708 } }
])).unwrap();
wtxn.commit().unwrap();

View file

@ -836,10 +836,8 @@ impl<'a, 'i> Transform<'a, 'i> {
})
.collect();
let old_vectors_fid = settings_diff
.old
.fields_ids_map
.id(crate::vector::parsed_vectors::RESERVED_VECTORS_FIELD_NAME);
let old_vectors_fid =
settings_diff.old.fields_ids_map.id(crate::constants::RESERVED_VECTORS_FIELD_NAME);
// We initialize the sorter with the user indexing settings.
let mut flattened_sorter =

View file

@ -137,8 +137,7 @@ pub(crate) fn write_typed_chunk_into_index(
let _entered = span.enter();
let fields_ids_map = index.fields_ids_map(wtxn)?;
let vectors_fid =
fields_ids_map.id(crate::vector::parsed_vectors::RESERVED_VECTORS_FIELD_NAME);
let vectors_fid = fields_ids_map.id(crate::constants::RESERVED_VECTORS_FIELD_NAME);
let mut builder = MergerBuilder::new(KeepLatestObkv);
for typed_chunk in typed_chunks {

View file

@ -7,8 +7,8 @@ use serde_json::value::RawValue;
use super::vector_document::VectorDocument;
use super::{KvReaderFieldId, KvWriterFieldId};
use crate::constants::{RESERVED_GEO_FIELD_NAME, RESERVED_VECTORS_FIELD_NAME};
use crate::documents::FieldIdMapper;
use crate::vector::parsed_vectors::RESERVED_VECTORS_FIELD_NAME;
use crate::{DocumentId, GlobalFieldsIdsMap, Index, InternalError, Result, UserError};
/// A view into a document that can represent either the current version from the DB,
@ -80,7 +80,7 @@ impl<'t, Mapper: FieldIdMapper> Document<'t> for DocumentFromDb<'t, Mapper> {
Err(error) => return Some(Err(error.into())),
};
if name == RESERVED_VECTORS_FIELD_NAME || name == "_geo" {
if name == RESERVED_VECTORS_FIELD_NAME || name == RESERVED_GEO_FIELD_NAME {
continue;
}
@ -100,7 +100,7 @@ impl<'t, Mapper: FieldIdMapper> Document<'t> for DocumentFromDb<'t, Mapper> {
}
fn geo_field(&self) -> Result<Option<&'t RawValue>> {
self.field("_geo")
self.field(RESERVED_GEO_FIELD_NAME)
}
fn top_level_fields_count(&self) -> usize {
@ -115,7 +115,7 @@ impl<'t, Mapper: FieldIdMapper> Document<'t> for DocumentFromDb<'t, Mapper> {
}
fn top_level_field(&self, k: &str) -> Result<Option<&'t RawValue>> {
if k == RESERVED_VECTORS_FIELD_NAME || k == "_geo" {
if k == RESERVED_VECTORS_FIELD_NAME || k == RESERVED_GEO_FIELD_NAME {
return Ok(None);
}
self.field(k)
@ -367,7 +367,9 @@ where
}
if let Some(geo_value) = document.geo_field()? {
let fid = fields_ids_map.id_or_insert("_geo").ok_or(UserError::AttributeLimitReached)?;
let fid = fields_ids_map
.id_or_insert(RESERVED_GEO_FIELD_NAME)
.ok_or(UserError::AttributeLimitReached)?;
fields_ids_map.id_or_insert("_geo.lat").ok_or(UserError::AttributeLimitReached)?;
fields_ids_map.id_or_insert("_geo.lng").ok_or(UserError::AttributeLimitReached)?;
unordered_field_buffer.push((fid, geo_value));
@ -409,7 +411,9 @@ impl<'doc> Versions<'doc> {
}
pub fn iter_top_level_fields(&self) -> impl Iterator<Item = (&'doc str, &'doc RawValue)> + '_ {
self.data.iter().filter(|(k, _)| *k != RESERVED_VECTORS_FIELD_NAME && *k != "_geo")
self.data
.iter()
.filter(|(k, _)| *k != RESERVED_VECTORS_FIELD_NAME && *k != RESERVED_GEO_FIELD_NAME)
}
pub fn vectors_field(&self) -> Option<&'doc RawValue> {
@ -417,7 +421,7 @@ impl<'doc> Versions<'doc> {
}
pub fn geo_field(&self) -> Option<&'doc RawValue> {
self.data.get("_geo")
self.data.get(RESERVED_GEO_FIELD_NAME)
}
pub fn len(&self) -> usize {
@ -429,7 +433,7 @@ impl<'doc> Versions<'doc> {
}
pub fn top_level_field(&self, k: &str) -> Option<&'doc RawValue> {
if k == RESERVED_VECTORS_FIELD_NAME || k == "_geo" {
if k == RESERVED_VECTORS_FIELD_NAME || k == RESERVED_GEO_FIELD_NAME {
return None;
}
self.data.get(k)

View file

@ -4,6 +4,7 @@ use bumpalo::Bump;
use hashbrown::HashMap;
use super::DelAddRoaringBitmap;
use crate::constants::RESERVED_GEO_FIELD_NAME;
use crate::update::new::channel::DocumentsSender;
use crate::update::new::document::{write_to_obkv, Document as _};
use crate::update::new::indexer::document_changes::{DocumentChangeContext, Extractor};
@ -62,8 +63,10 @@ impl<'a, 'b, 'extractor> Extractor<'extractor> for DocumentsExtractor<'a, 'b> {
context.index,
&context.db_fields_ids_map,
)?;
let geo_iter =
content.geo_field().transpose().map(|res| res.map(|rv| ("_geo", rv)));
let geo_iter = content
.geo_field()
.transpose()
.map(|res| res.map(|rv| (RESERVED_GEO_FIELD_NAME, rv)));
for res in content.iter_top_level_fields().chain(geo_iter) {
let (f, _) = res?;
let entry = document_extractor_data
@ -79,8 +82,10 @@ impl<'a, 'b, 'extractor> Extractor<'extractor> for DocumentsExtractor<'a, 'b> {
let docid = update.docid();
let content =
update.current(&context.rtxn, context.index, &context.db_fields_ids_map)?;
let geo_iter =
content.geo_field().transpose().map(|res| res.map(|rv| ("_geo", rv)));
let geo_iter = content
.geo_field()
.transpose()
.map(|res| res.map(|rv| (RESERVED_GEO_FIELD_NAME, rv)));
for res in content.iter_top_level_fields().chain(geo_iter) {
let (f, _) = res?;
let entry = document_extractor_data
@ -90,8 +95,10 @@ impl<'a, 'b, 'extractor> Extractor<'extractor> for DocumentsExtractor<'a, 'b> {
*entry -= 1;
}
let content = update.updated();
let geo_iter =
content.geo_field().transpose().map(|res| res.map(|rv| ("_geo", rv)));
let geo_iter = content
.geo_field()
.transpose()
.map(|res| res.map(|rv| (RESERVED_GEO_FIELD_NAME, rv)));
for res in content.iter_top_level_fields().chain(geo_iter) {
let (f, _) = res?;
let entry = document_extractor_data
@ -121,8 +128,10 @@ impl<'a, 'b, 'extractor> Extractor<'extractor> for DocumentsExtractor<'a, 'b> {
DocumentChange::Insertion(insertion) => {
let docid = insertion.docid();
let content = insertion.inserted();
let geo_iter =
content.geo_field().transpose().map(|res| res.map(|rv| ("_geo", rv)));
let geo_iter = content
.geo_field()
.transpose()
.map(|res| res.map(|rv| (RESERVED_GEO_FIELD_NAME, rv)));
for res in content.iter_top_level_fields().chain(geo_iter) {
let (f, _) = res?;
let entry = document_extractor_data

View file

@ -1,5 +1,6 @@
use serde_json::Value;
use crate::constants::RESERVED_GEO_FIELD_NAME;
use crate::update::new::document::Document;
use crate::update::new::extract::geo::extract_geo_coordinates;
use crate::update::new::extract::perm_json_p;
@ -69,7 +70,7 @@ pub fn extract_document_facets<'doc>(
}
}
if attributes_to_extract.contains(&"_geo") {
if attributes_to_extract.contains(&RESERVED_GEO_FIELD_NAME) {
if let Some(geo_value) = document.geo_field()? {
if let Some([lat, lng]) = extract_geo_coordinates(external_document_id, geo_value)? {
let (lat_fid, lng_fid) = field_id_map

View file

@ -9,6 +9,7 @@ use heed::RoTxn;
use serde_json::value::RawValue;
use serde_json::Value;
use crate::constants::RESERVED_GEO_FIELD_NAME;
use crate::error::GeoError;
use crate::update::new::document::Document;
use crate::update::new::indexer::document_changes::{DocumentChangeContext, Extractor};
@ -28,8 +29,8 @@ impl GeoExtractor {
index: &Index,
grenad_parameters: GrenadParameters,
) -> Result<Option<Self>> {
let is_sortable = index.sortable_fields(rtxn)?.contains("_geo");
let is_filterable = index.filterable_fields(rtxn)?.contains("_geo");
let is_sortable = index.sortable_fields(rtxn)?.contains(RESERVED_GEO_FIELD_NAME);
let is_filterable = index.filterable_fields(rtxn)?.contains(RESERVED_GEO_FIELD_NAME);
if is_sortable || is_filterable {
Ok(Some(GeoExtractor { grenad_parameters }))
} else {

View file

@ -10,11 +10,10 @@ use serde_json::value::RawValue;
use super::document::{Document, DocumentFromDb, DocumentFromVersions, Versions};
use super::indexer::de::DeserrRawValue;
use crate::constants::RESERVED_VECTORS_FIELD_NAME;
use crate::documents::FieldIdMapper;
use crate::index::IndexEmbeddingConfig;
use crate::vector::parsed_vectors::{
RawVectors, RawVectorsError, VectorOrArrayOfVectors, RESERVED_VECTORS_FIELD_NAME,
};
use crate::vector::parsed_vectors::{RawVectors, RawVectorsError, VectorOrArrayOfVectors};
use crate::vector::{ArroyWrapper, Embedding, EmbeddingConfigs};
use crate::{DocumentId, Index, InternalError, Result, UserError};

View file

@ -14,6 +14,7 @@ use time::OffsetDateTime;
use super::del_add::DelAddOperation;
use super::index_documents::{IndexDocumentsConfig, Transform};
use super::IndexerConfig;
use crate::constants::{RESERVED_GEO_FIELD_NAME, RESERVED_VECTORS_FIELD_NAME};
use crate::criterion::Criterion;
use crate::error::UserError;
use crate::index::{
@ -25,7 +26,6 @@ use crate::prompt::default_max_bytes;
use crate::proximity::ProximityPrecision;
use crate::update::index_documents::IndexDocumentsMethod;
use crate::update::{IndexDocuments, UpdateIndexingStep};
use crate::vector::parsed_vectors::RESERVED_VECTORS_FIELD_NAME;
use crate::vector::settings::{
check_set, check_unset, EmbedderAction, EmbedderSource, EmbeddingSettings, ReindexAction,
WriteBackToDocuments,
@ -1535,7 +1535,7 @@ impl InnerIndexSettings {
.filter_map(|(field, count)| (count != 0).then_some(field))
.collect();
// index.fields_ids_map($a)? ==>> fields_ids_map
let geo_fields_ids = match fields_ids_map.id("_geo") {
let geo_fields_ids = match fields_ids_map.id(RESERVED_GEO_FIELD_NAME) {
Some(gfid) => {
let is_sortable = index.sortable_fields_ids(rtxn)?.contains(&gfid);
let is_filterable = index.filterable_fields_ids(rtxn)?.contains(&gfid);