Merge branch 'main' into indexer-edition-2024

This commit is contained in:
ManyTheFish 2024-09-25 07:37:32 +02:00
commit 974272f2e9
94 changed files with 8510 additions and 4616 deletions

View file

@ -258,6 +258,12 @@ only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and undersco
},
#[error("`.embedders.{embedder_name}.dimensions`: `dimensions` cannot be zero")]
InvalidSettingsDimensions { embedder_name: String },
#[error(
"`.embedders.{embedder_name}.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors."
)]
InvalidDisableBinaryQuantization { embedder_name: String },
#[error("`.embedders.{embedder_name}.documentTemplateMaxBytes`: `documentTemplateMaxBytes` cannot be zero")]
InvalidSettingsDocumentTemplateMaxBytes { embedder_name: String },
#[error("`.embedders.{embedder_name}.url`: could not parse `{url}`: {inner_error}")]
InvalidUrl { embedder_name: String, inner_error: url::ParseError, url: String },
#[error("Document editions cannot modify a document's primary key")]

View file

@ -21,7 +21,7 @@ use crate::heed_codec::{BEU16StrCodec, FstSetCodec, StrBEU16Codec, StrRefCodec};
use crate::order_by_map::OrderByMap;
use crate::proximity::ProximityPrecision;
use crate::vector::parsed_vectors::RESERVED_VECTORS_FIELD_NAME;
use crate::vector::{Embedding, EmbeddingConfig};
use crate::vector::{ArroyWrapper, Embedding, EmbeddingConfig};
use crate::{
default_criteria, CboRoaringBitmapCodec, Criterion, DocumentId, ExternalDocumentsIds,
FacetDistribution, FieldDistribution, FieldId, FieldIdMapMissingEntry, FieldIdWordCountCodec,
@ -162,7 +162,7 @@ pub struct Index {
/// Maps an embedder name to its id in the arroy store.
pub embedder_category_id: Database<Str, U8>,
/// Vector store based on arroy™.
pub vector_arroy: arroy::Database<arroy::distances::Angular>,
pub vector_arroy: arroy::Database<Unspecified>,
/// Maps the document id to the document as an obkv store.
pub(crate) documents: Database<BEU32, ObkvCodec>,
@ -1622,15 +1622,17 @@ impl Index {
&'a self,
rtxn: &'a RoTxn<'a>,
embedder_id: u8,
) -> impl Iterator<Item = Result<arroy::Reader<'a, arroy::distances::Angular>>> + 'a {
quantized: bool,
) -> impl Iterator<Item = Result<ArroyWrapper>> + 'a {
crate::vector::arroy_db_range_for_embedder(embedder_id).map_while(move |k| {
arroy::Reader::open(rtxn, k, self.vector_arroy)
.map(Some)
.or_else(|e| match e {
arroy::Error::MissingMetadata(_) => Ok(None),
e => Err(e.into()),
})
.transpose()
let reader = ArroyWrapper::new(self.vector_arroy, k, quantized);
// Here we don't care about the dimensions, but we want to know if we can read
// in the database or if its metadata are missing because there is no document with that many vectors.
match reader.dimensions(rtxn) {
Ok(_) => Some(Ok(reader)),
Err(arroy::Error::MissingMetadata(_)) => None,
Err(e) => Some(Err(e.into())),
}
})
}
@ -1652,32 +1654,18 @@ impl Index {
docid: DocumentId,
) -> Result<BTreeMap<String, Vec<Embedding>>> {
let mut res = BTreeMap::new();
for row in self.embedder_category_id.iter(rtxn)? {
let (embedder_name, embedder_id) = row?;
let embedder_id = (embedder_id as u16) << 8;
let mut embeddings = Vec::new();
'vectors: for i in 0..=u8::MAX {
let reader = arroy::Reader::open(rtxn, embedder_id | (i as u16), self.vector_arroy)
.map(Some)
.or_else(|e| match e {
arroy::Error::MissingMetadata(_) => Ok(None),
e => Err(e),
})
.transpose();
let Some(reader) = reader else {
break 'vectors;
};
let embedding = reader?.item_vector(rtxn, docid)?;
if let Some(embedding) = embedding {
embeddings.push(embedding)
} else {
break 'vectors;
}
}
res.insert(embedder_name.to_owned(), embeddings);
let embedding_configs = self.embedding_configs(rtxn)?;
for config in embedding_configs {
let embedder_id = self.embedder_category_id.get(rtxn, &config.name)?.unwrap();
let embeddings = self
.arroy_readers(rtxn, embedder_id, config.config.quantized())
.map_while(|reader| {
reader
.and_then(|r| r.item_vector(rtxn, docid).map_err(|e| e.into()))
.transpose()
})
.collect::<Result<Vec<_>>>()?;
res.insert(config.name.to_owned(), embeddings);
}
Ok(res)
}

View file

@ -5,7 +5,7 @@ use liquid::{ObjectView, ValueView};
use super::document::Document;
use super::fields::Fields;
use crate::FieldsIdsMap;
use super::FieldsIdsMapWithMetadata;
#[derive(Debug, Clone)]
pub struct Context<'a> {
@ -14,7 +14,7 @@ pub struct Context<'a> {
}
impl<'a> Context<'a> {
pub fn new(document: &'a Document<'a>, field_id_map: &'a FieldsIdsMap) -> Self {
pub fn new(document: &'a Document<'a>, field_id_map: &'a FieldsIdsMapWithMetadata<'a>) -> Self {
Self { document, fields: Fields::new(document, field_id_map) }
}
}

View file

@ -4,16 +4,20 @@ use liquid::model::{
use liquid::{ObjectView, ValueView};
use super::document::Document;
use crate::FieldsIdsMap;
use super::{FieldMetadata, FieldsIdsMapWithMetadata};
#[derive(Debug, Clone)]
pub struct Fields<'a>(Vec<FieldValue<'a>>);
impl<'a> Fields<'a> {
pub fn new(document: &'a Document<'a>, field_id_map: &'a FieldsIdsMap) -> Self {
pub fn new(document: &'a Document<'a>, field_id_map: &'a FieldsIdsMapWithMetadata<'a>) -> Self {
Self(
std::iter::repeat(document)
.zip(field_id_map.iter())
.map(|(document, (_fid, name))| FieldValue { document, name })
.map(|(document, (fid, name))| FieldValue {
document,
name,
metadata: field_id_map.metadata(fid).unwrap_or_default(),
})
.collect(),
)
}
@ -23,6 +27,7 @@ impl<'a> Fields<'a> {
pub struct FieldValue<'a> {
name: &'a str,
document: &'a Document<'a>,
metadata: FieldMetadata,
}
impl<'a> ValueView for FieldValue<'a> {
@ -74,6 +79,10 @@ impl<'a> FieldValue<'a> {
self.document.get(self.name).unwrap_or(&LiquidValue::Nil)
}
pub fn is_searchable(&self) -> &bool {
&self.metadata.searchable
}
pub fn is_empty(&self) -> bool {
self.size() == 0
}
@ -89,12 +98,14 @@ impl<'a> ObjectView for FieldValue<'a> {
}
fn keys<'k>(&'k self) -> Box<dyn Iterator<Item = KStringCow<'k>> + 'k> {
Box::new(["name", "value"].iter().map(|&x| KStringCow::from_static(x)))
Box::new(["name", "value", "is_searchable"].iter().map(|&x| KStringCow::from_static(x)))
}
fn values<'k>(&'k self) -> Box<dyn Iterator<Item = &'k dyn ValueView> + 'k> {
Box::new(
std::iter::once(self.name() as &dyn ValueView).chain(std::iter::once(self.value())),
std::iter::once(self.name() as &dyn ValueView)
.chain(std::iter::once(self.value()))
.chain(std::iter::once(self.is_searchable() as &dyn ValueView)),
)
}
@ -103,13 +114,14 @@ impl<'a> ObjectView for FieldValue<'a> {
}
fn contains_key(&self, index: &str) -> bool {
index == "name" || index == "value"
index == "name" || index == "value" || index == "is_searchable"
}
fn get<'s>(&'s self, index: &str) -> Option<&'s dyn ValueView> {
match index {
"name" => Some(self.name()),
"value" => Some(self.value()),
"is_searchable" => Some(self.is_searchable()),
_ => None,
}
}

View file

@ -4,28 +4,33 @@ pub(crate) mod error;
mod fields;
mod template_checker;
use std::collections::BTreeMap;
use std::convert::TryFrom;
use std::num::NonZeroUsize;
use std::ops::Deref;
use error::{NewPromptError, RenderPromptError};
use self::context::Context;
use self::document::Document;
use crate::update::del_add::DelAdd;
use crate::FieldsIdsMap;
use crate::{FieldId, FieldsIdsMap};
pub struct Prompt {
template: liquid::Template,
template_text: String,
max_bytes: Option<NonZeroUsize>,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct PromptData {
pub template: String,
pub max_bytes: Option<NonZeroUsize>,
}
impl From<Prompt> for PromptData {
fn from(value: Prompt) -> Self {
Self { template: value.template_text }
Self { template: value.template_text, max_bytes: value.max_bytes }
}
}
@ -33,14 +38,18 @@ impl TryFrom<PromptData> for Prompt {
type Error = NewPromptError;
fn try_from(value: PromptData) -> Result<Self, Self::Error> {
Prompt::new(value.template)
Prompt::new(value.template, value.max_bytes)
}
}
impl Clone for Prompt {
fn clone(&self) -> Self {
let template_text = self.template_text.clone();
Self { template: new_template(&template_text).unwrap(), template_text }
Self {
template: new_template(&template_text).unwrap(),
template_text,
max_bytes: self.max_bytes,
}
}
}
@ -53,25 +62,35 @@ fn default_template() -> liquid::Template {
}
fn default_template_text() -> &'static str {
"{% for field in fields %} \
"{% for field in fields %}\
{% if field.is_searchable and field.value != nil %}\
{{ field.name }}: {{ field.value }}\n\
{% endif %}\
{% endfor %}"
}
pub fn default_max_bytes() -> NonZeroUsize {
NonZeroUsize::new(400).unwrap()
}
impl Default for Prompt {
fn default() -> Self {
Self { template: default_template(), template_text: default_template_text().into() }
Self {
template: default_template(),
template_text: default_template_text().into(),
max_bytes: Some(default_max_bytes()),
}
}
}
impl Default for PromptData {
fn default() -> Self {
Self { template: default_template_text().into() }
Self { template: default_template_text().into(), max_bytes: Some(default_max_bytes()) }
}
}
impl Prompt {
pub fn new(template: String) -> Result<Self, NewPromptError> {
pub fn new(template: String, max_bytes: Option<NonZeroUsize>) -> Result<Self, NewPromptError> {
let this = Self {
template: liquid::ParserBuilder::with_stdlib()
.build()
@ -79,6 +98,7 @@ impl Prompt {
.parse(&template)
.map_err(NewPromptError::cannot_parse_template)?,
template_text: template,
max_bytes,
};
// render template with special object that's OK with `doc.*` and `fields.*`
@ -93,20 +113,72 @@ impl Prompt {
&self,
document: &obkv::KvReaderU16,
side: DelAdd,
field_id_map: &FieldsIdsMap,
field_id_map: &FieldsIdsMapWithMetadata,
) -> Result<String, RenderPromptError> {
let document = Document::new(document, side, field_id_map);
let context = Context::new(&document, field_id_map);
self.template.render(&context).map_err(RenderPromptError::missing_context)
let mut rendered =
self.template.render(&context).map_err(RenderPromptError::missing_context)?;
if let Some(max_bytes) = self.max_bytes {
truncate(&mut rendered, max_bytes.get());
}
Ok(rendered)
}
}
fn truncate(s: &mut String, max_bytes: usize) {
if max_bytes >= s.len() {
return;
}
for i in (0..=max_bytes).rev() {
if s.is_char_boundary(i) {
s.truncate(i);
break;
}
}
}
pub struct FieldsIdsMapWithMetadata<'a> {
fields_ids_map: &'a FieldsIdsMap,
metadata: BTreeMap<FieldId, FieldMetadata>,
}
impl<'a> FieldsIdsMapWithMetadata<'a> {
pub fn new(fields_ids_map: &'a FieldsIdsMap, searchable_fields_ids: &'_ [FieldId]) -> Self {
let mut metadata: BTreeMap<FieldId, FieldMetadata> =
fields_ids_map.ids().map(|id| (id, Default::default())).collect();
for searchable_field_id in searchable_fields_ids {
let Some(metadata) = metadata.get_mut(searchable_field_id) else { continue };
metadata.searchable = true;
}
Self { fields_ids_map, metadata }
}
pub fn metadata(&self, field_id: FieldId) -> Option<FieldMetadata> {
self.metadata.get(&field_id).copied()
}
}
impl<'a> Deref for FieldsIdsMapWithMetadata<'a> {
type Target = FieldsIdsMap;
fn deref(&self) -> &Self::Target {
self.fields_ids_map
}
}
#[derive(Debug, Default, Clone, Copy)]
pub struct FieldMetadata {
pub searchable: bool,
}
#[cfg(test)]
mod test {
use super::Prompt;
use crate::error::FaultSource;
use crate::prompt::error::{NewPromptError, NewPromptErrorKind};
use crate::prompt::truncate;
#[test]
fn default_template() {
@ -116,18 +188,18 @@ mod test {
#[test]
fn empty_template() {
Prompt::new("".into()).unwrap();
Prompt::new("".into(), None).unwrap();
}
#[test]
fn template_ok() {
Prompt::new("{{doc.title}}: {{doc.overview}}".into()).unwrap();
Prompt::new("{{doc.title}}: {{doc.overview}}".into(), None).unwrap();
}
#[test]
fn template_syntax() {
assert!(matches!(
Prompt::new("{{doc.title: {{doc.overview}}".into()),
Prompt::new("{{doc.title: {{doc.overview}}".into(), None),
Err(NewPromptError {
kind: NewPromptErrorKind::CannotParseTemplate(_),
fault: FaultSource::User
@ -138,7 +210,7 @@ mod test {
#[test]
fn template_missing_doc() {
assert!(matches!(
Prompt::new("{{title}}: {{overview}}".into()),
Prompt::new("{{title}}: {{overview}}".into(), None),
Err(NewPromptError {
kind: NewPromptErrorKind::InvalidFieldsInTemplate(_),
fault: FaultSource::User
@ -148,29 +220,62 @@ mod test {
#[test]
fn template_nested_doc() {
Prompt::new("{{doc.actor.firstName}}: {{doc.actor.lastName}}".into()).unwrap();
Prompt::new("{{doc.actor.firstName}}: {{doc.actor.lastName}}".into(), None).unwrap();
}
#[test]
fn template_fields() {
Prompt::new("{% for field in fields %}{{field}}{% endfor %}".into()).unwrap();
Prompt::new("{% for field in fields %}{{field}}{% endfor %}".into(), None).unwrap();
}
#[test]
fn template_fields_ok() {
Prompt::new("{% for field in fields %}{{field.name}}: {{field.value}}{% endfor %}".into())
.unwrap();
Prompt::new(
"{% for field in fields %}{{field.name}}: {{field.value}}{% endfor %}".into(),
None,
)
.unwrap();
}
#[test]
fn template_fields_invalid() {
assert!(matches!(
// intentionally garbled field
Prompt::new("{% for field in fields %}{{field.vaelu}} {% endfor %}".into()),
Prompt::new("{% for field in fields %}{{field.vaelu}} {% endfor %}".into(), None),
Err(NewPromptError {
kind: NewPromptErrorKind::InvalidFieldsInTemplate(_),
fault: FaultSource::User
})
));
}
// todo: test truncation
#[test]
fn template_truncation() {
let mut s = "インテル ザー ビーグル".to_string();
truncate(&mut s, 42);
assert_eq!(s, "インテル ザー ビーグル");
assert_eq!(s.len(), 32);
truncate(&mut s, 32);
assert_eq!(s, "インテル ザー ビーグル");
truncate(&mut s, 31);
assert_eq!(s, "インテル ザー ビーグ");
truncate(&mut s, 30);
assert_eq!(s, "インテル ザー ビーグ");
truncate(&mut s, 28);
assert_eq!(s, "インテル ザー ビー");
truncate(&mut s, 26);
assert_eq!(s, "インテル ザー ビー");
truncate(&mut s, 25);
assert_eq!(s, "インテル ザー ビ");
assert_eq!("".len(), 3);
truncate(&mut s, 3);
assert_eq!(s, "");
truncate(&mut s, 2);
assert_eq!(s, "");
}
}

View file

@ -1,4 +1,5 @@
use std::collections::{BTreeMap, HashMap, HashSet};
use std::fmt::Display;
use std::ops::ControlFlow;
use std::{fmt, mem};
@ -37,6 +38,15 @@ pub enum OrderBy {
Count,
}
impl Display for OrderBy {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
OrderBy::Lexicographic => f.write_str("alphabetically"),
OrderBy::Count => f.write_str("by count"),
}
}
}
pub struct FacetDistribution<'a> {
facets: Option<HashMap<String, OrderBy>>,
candidates: Option<RoaringBitmap>,
@ -100,7 +110,6 @@ impl<'a> FacetDistribution<'a> {
let mut lexicographic_distribution = BTreeMap::new();
let mut key_buffer: Vec<_> = field_id.to_be_bytes().to_vec();
let distribution_prelength = distribution.len();
let db = self.index.field_id_docid_facet_f64s;
for docid in candidates {
key_buffer.truncate(mem::size_of::<FieldId>());
@ -113,23 +122,21 @@ impl<'a> FacetDistribution<'a> {
for result in iter {
let ((_, _, value), ()) = result?;
*lexicographic_distribution.entry(value.to_string()).or_insert(0) += 1;
if lexicographic_distribution.len() - distribution_prelength
== self.max_values_per_facet
{
break;
}
}
}
distribution.extend(lexicographic_distribution);
distribution.extend(
lexicographic_distribution
.into_iter()
.take(self.max_values_per_facet.saturating_sub(distribution.len())),
);
}
FacetType::String => {
let mut normalized_distribution = BTreeMap::new();
let mut key_buffer: Vec<_> = field_id.to_be_bytes().to_vec();
let db = self.index.field_id_docid_facet_strings;
'outer: for docid in candidates {
for docid in candidates {
key_buffer.truncate(mem::size_of::<FieldId>());
key_buffer.extend_from_slice(&docid.to_be_bytes());
let iter = db
@ -144,14 +151,14 @@ impl<'a> FacetDistribution<'a> {
.or_insert_with(|| (original_value, 0));
*count += 1;
if normalized_distribution.len() == self.max_values_per_facet {
break 'outer;
}
// we'd like to break here if we have enough facet values, but we are collecting them by increasing docid,
// so higher ranked facets could be in later docids
}
}
let iter = normalized_distribution
.into_iter()
.take(self.max_values_per_facet.saturating_sub(distribution.len()))
.map(|(_normalized, (original, count))| (original.to_string(), count));
distribution.extend(iter);
}
@ -467,7 +474,7 @@ mod tests {
.execute()
.unwrap();
milli_snap!(format!("{map:?}"), @r###"{"colour": {"Blue": 1}}"###);
milli_snap!(format!("{map:?}"), @r###"{"colour": {"Blue": 2}}"###);
let map = FacetDistribution::new(&txn, &index)
.facets(iter::once(("colour", OrderBy::Count)))

View file

@ -12,7 +12,7 @@ use serde_json::Value;
use super::facet_range_search;
use crate::error::{Error, UserError};
use crate::heed_codec::facet::{
FacetGroupKey, FacetGroupKeyCodec, FacetGroupValueCodec, OrderedF64Codec,
FacetGroupKey, FacetGroupKeyCodec, FacetGroupValue, FacetGroupValueCodec, OrderedF64Codec,
};
use crate::index::db_name::FACET_ID_STRING_DOCIDS;
use crate::{
@ -336,6 +336,24 @@ impl<'a> Filter<'a> {
return Ok(docids);
}
Condition::StartsWith { keyword: _, word } => {
let value = crate::normalize_facet(word.value());
let base = FacetGroupKey { field_id, level: 0, left_bound: value.as_str() };
let docids = strings_db
.prefix_iter(rtxn, &base)?
.map(|result| -> Result<RoaringBitmap> {
match result {
Ok((_facet_group_key, FacetGroupValue { bitmap, .. })) => Ok(bitmap),
Err(_e) => Err(InternalError::from(SerializationError::Decoding {
db_name: Some(FACET_ID_STRING_DOCIDS),
})
.into()),
}
})
.union()?;
return Ok(docids);
}
};
let mut output = RoaringBitmap::new();

View file

@ -190,7 +190,7 @@ impl<'a> Search<'a> {
return Ok(return_keyword_results(self.limit, self.offset, keyword_results));
};
// no embedder, no semantic search
let Some(SemanticSearch { vector, embedder_name, embedder }) = semantic else {
let Some(SemanticSearch { vector, embedder_name, embedder, quantized }) = semantic else {
return Ok(return_keyword_results(self.limit, self.offset, keyword_results));
};
@ -212,7 +212,7 @@ impl<'a> Search<'a> {
};
search.semantic =
Some(SemanticSearch { vector: Some(vector_query), embedder_name, embedder });
Some(SemanticSearch { vector: Some(vector_query), embedder_name, embedder, quantized });
// TODO: would be better to have two distinct functions at this point
let vector_results = search.execute()?;

View file

@ -32,6 +32,7 @@ pub struct SemanticSearch {
vector: Option<Vec<f32>>,
embedder_name: String,
embedder: Arc<Embedder>,
quantized: bool,
}
pub struct Search<'a> {
@ -89,9 +90,10 @@ impl<'a> Search<'a> {
&mut self,
embedder_name: String,
embedder: Arc<Embedder>,
quantized: bool,
vector: Option<Vec<f32>>,
) -> &mut Search<'a> {
self.semantic = Some(SemanticSearch { embedder_name, embedder, vector });
self.semantic = Some(SemanticSearch { embedder_name, embedder, quantized, vector });
self
}
@ -206,7 +208,7 @@ impl<'a> Search<'a> {
degraded,
used_negative_operator,
} = match self.semantic.as_ref() {
Some(SemanticSearch { vector: Some(vector), embedder_name, embedder }) => {
Some(SemanticSearch { vector: Some(vector), embedder_name, embedder, quantized }) => {
execute_vector_search(
&mut ctx,
vector,
@ -219,6 +221,7 @@ impl<'a> Search<'a> {
self.limit,
embedder_name,
embedder,
*quantized,
self.time_budget.clone(),
self.ranking_score_threshold,
)?

View file

@ -312,6 +312,7 @@ fn get_ranking_rules_for_placeholder_search<'ctx>(
Ok(ranking_rules)
}
#[allow(clippy::too_many_arguments)]
fn get_ranking_rules_for_vector<'ctx>(
ctx: &SearchContext<'ctx>,
sort_criteria: &Option<Vec<AscDesc>>,
@ -320,6 +321,7 @@ fn get_ranking_rules_for_vector<'ctx>(
target: &[f32],
embedder_name: &str,
embedder: &Embedder,
quantized: bool,
) -> Result<Vec<BoxRankingRule<'ctx, PlaceholderQuery>>> {
// query graph search
@ -347,6 +349,7 @@ fn get_ranking_rules_for_vector<'ctx>(
limit_plus_offset,
embedder_name,
embedder,
quantized,
)?;
ranking_rules.push(Box::new(vector_sort));
vector = true;
@ -576,6 +579,7 @@ pub fn execute_vector_search(
length: usize,
embedder_name: &str,
embedder: &Embedder,
quantized: bool,
time_budget: TimeBudget,
ranking_score_threshold: Option<f64>,
) -> Result<PartialSearchResult> {
@ -591,6 +595,7 @@ pub fn execute_vector_search(
vector,
embedder_name,
embedder,
quantized,
)?;
let mut placeholder_search_logger = logger::DefaultSearchLogger;

View file

@ -16,6 +16,7 @@ pub struct VectorSort<Q: RankingRuleQueryTrait> {
limit: usize,
distribution_shift: Option<DistributionShift>,
embedder_index: u8,
quantized: bool,
}
impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
@ -26,6 +27,7 @@ impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
limit: usize,
embedder_name: &str,
embedder: &Embedder,
quantized: bool,
) -> Result<Self> {
let embedder_index = ctx
.index
@ -41,6 +43,7 @@ impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
limit,
distribution_shift: embedder.distribution(),
embedder_index,
quantized,
})
}
@ -49,16 +52,12 @@ impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
ctx: &mut SearchContext<'_>,
vector_candidates: &RoaringBitmap,
) -> Result<()> {
let readers: std::result::Result<Vec<_>, _> =
ctx.index.arroy_readers(ctx.txn, self.embedder_index).collect();
let readers = readers?;
let target = &self.target;
let mut results = Vec::new();
for reader in readers.iter() {
for reader in ctx.index.arroy_readers(ctx.txn, self.embedder_index, self.quantized) {
let nns_by_vector =
reader.nns_by_vector(ctx.txn, target, self.limit, None, Some(vector_candidates))?;
reader?.nns_by_vector(ctx.txn, target, self.limit, Some(vector_candidates))?;
results.extend(nns_by_vector.into_iter());
}
results.sort_unstable_by_key(|(_, distance)| OrderedFloat(*distance));

View file

@ -18,9 +18,11 @@ pub struct Similar<'a> {
embedder_name: String,
embedder: Arc<Embedder>,
ranking_score_threshold: Option<f64>,
quantized: bool,
}
impl<'a> Similar<'a> {
#[allow(clippy::too_many_arguments)]
pub fn new(
id: DocumentId,
offset: usize,
@ -29,6 +31,7 @@ impl<'a> Similar<'a> {
rtxn: &'a heed::RoTxn<'a>,
embedder_name: String,
embedder: Arc<Embedder>,
quantized: bool,
) -> Self {
Self {
id,
@ -40,6 +43,7 @@ impl<'a> Similar<'a> {
embedder_name,
embedder,
ranking_score_threshold: None,
quantized,
}
}
@ -67,19 +71,13 @@ impl<'a> Similar<'a> {
.get(self.rtxn, &self.embedder_name)?
.ok_or_else(|| crate::UserError::InvalidEmbedder(self.embedder_name.to_owned()))?;
let readers: std::result::Result<Vec<_>, _> =
self.index.arroy_readers(self.rtxn, embedder_index).collect();
let readers = readers?;
let mut results = Vec::new();
for reader in readers.iter() {
let nns_by_item = reader.nns_by_item(
for reader in self.index.arroy_readers(self.rtxn, embedder_index, self.quantized) {
let nns_by_item = reader?.nns_by_item(
self.rtxn,
self.id,
self.limit + self.offset + 1,
None,
Some(&universe),
)?;
if let Some(mut nns_by_item) = nns_by_item {

View file

@ -15,14 +15,14 @@ use serde_json::Value;
use super::helpers::{create_writer, writer_into_reader, GrenadParameters};
use crate::error::FaultSource;
use crate::index::IndexEmbeddingConfig;
use crate::prompt::Prompt;
use crate::prompt::{FieldsIdsMapWithMetadata, Prompt};
use crate::update::del_add::{DelAdd, KvReaderDelAdd, KvWriterDelAdd};
use crate::update::settings::InnerIndexSettingsDiff;
use crate::vector::error::{EmbedErrorKind, PossibleEmbeddingMistakes, UnusedVectorsDistribution};
use crate::vector::parsed_vectors::{ParsedVectorsDiff, VectorState, RESERVED_VECTORS_FIELD_NAME};
use crate::vector::settings::{EmbedderAction, ReindexAction};
use crate::vector::settings::ReindexAction;
use crate::vector::{Embedder, Embeddings};
use crate::{try_split_array_at, DocumentId, FieldId, FieldsIdsMap, Result, ThreadPoolNoAbort};
use crate::{try_split_array_at, DocumentId, FieldId, Result, ThreadPoolNoAbort};
/// The length of the elements that are always in the buffer when inserting new values.
const TRUNCATE_SIZE: usize = size_of::<DocumentId>();
@ -189,7 +189,13 @@ pub fn extract_vector_points<R: io::Read + io::Seek>(
let reindex_vectors = settings_diff.reindex_vectors();
let old_fields_ids_map = &settings_diff.old.fields_ids_map;
let old_fields_ids_map =
FieldsIdsMapWithMetadata::new(old_fields_ids_map, &settings_diff.old.searchable_fields_ids);
let new_fields_ids_map = &settings_diff.new.fields_ids_map;
let new_fields_ids_map =
FieldsIdsMapWithMetadata::new(new_fields_ids_map, &settings_diff.new.searchable_fields_ids);
// the vector field id may have changed
let old_vectors_fid = old_fields_ids_map.id(RESERVED_VECTORS_FIELD_NAME);
@ -202,65 +208,65 @@ pub fn extract_vector_points<R: io::Read + io::Seek>(
if reindex_vectors {
for (name, action) in settings_diff.embedding_config_updates.iter() {
match action {
EmbedderAction::WriteBackToDocuments(_) => continue, // already deleted
EmbedderAction::Reindex(action) => {
let Some((embedder_name, (embedder, prompt))) = configs.remove_entry(name)
else {
tracing::error!(embedder = name, "Requested embedder config not found");
continue;
};
if let Some(action) = action.reindex() {
let Some((embedder_name, (embedder, prompt, _quantized))) =
configs.remove_entry(name)
else {
tracing::error!(embedder = name, "Requested embedder config not found");
continue;
};
// (docid, _index) -> KvWriterDelAdd -> Vector
let manual_vectors_writer = create_writer(
indexer.chunk_compression_type,
indexer.chunk_compression_level,
tempfile::tempfile()?,
);
// (docid, _index) -> KvWriterDelAdd -> Vector
let manual_vectors_writer = create_writer(
indexer.chunk_compression_type,
indexer.chunk_compression_level,
tempfile::tempfile()?,
);
// (docid) -> (prompt)
let prompts_writer = create_writer(
indexer.chunk_compression_type,
indexer.chunk_compression_level,
tempfile::tempfile()?,
);
// (docid) -> (prompt)
let prompts_writer = create_writer(
indexer.chunk_compression_type,
indexer.chunk_compression_level,
tempfile::tempfile()?,
);
// (docid) -> ()
let remove_vectors_writer = create_writer(
indexer.chunk_compression_type,
indexer.chunk_compression_level,
tempfile::tempfile()?,
);
// (docid) -> ()
let remove_vectors_writer = create_writer(
indexer.chunk_compression_type,
indexer.chunk_compression_level,
tempfile::tempfile()?,
);
let action = match action {
ReindexAction::FullReindex => ExtractionAction::SettingsFullReindex,
ReindexAction::RegeneratePrompts => {
let Some((_, old_prompt)) = old_configs.get(name) else {
tracing::error!(embedder = name, "Old embedder config not found");
continue;
};
let action = match action {
ReindexAction::FullReindex => ExtractionAction::SettingsFullReindex,
ReindexAction::RegeneratePrompts => {
let Some((_, old_prompt, _quantized)) = old_configs.get(name) else {
tracing::error!(embedder = name, "Old embedder config not found");
continue;
};
ExtractionAction::SettingsRegeneratePrompts { old_prompt }
}
};
ExtractionAction::SettingsRegeneratePrompts { old_prompt }
}
};
extractors.push(EmbedderVectorExtractor {
embedder_name,
embedder,
prompt,
prompts_writer,
remove_vectors_writer,
manual_vectors_writer,
add_to_user_provided: RoaringBitmap::new(),
action,
});
}
extractors.push(EmbedderVectorExtractor {
embedder_name,
embedder,
prompt,
prompts_writer,
remove_vectors_writer,
manual_vectors_writer,
add_to_user_provided: RoaringBitmap::new(),
action,
});
} else {
continue;
}
}
} else {
// document operation
for (embedder_name, (embedder, prompt)) in configs.into_iter() {
for (embedder_name, (embedder, prompt, _quantized)) in configs.into_iter() {
// (docid, _index) -> KvWriterDelAdd -> Vector
let manual_vectors_writer = create_writer(
indexer.chunk_compression_type,
@ -376,7 +382,7 @@ pub fn extract_vector_points<R: io::Read + io::Seek>(
);
continue;
}
regenerate_prompt(obkv, prompt, new_fields_ids_map)?
regenerate_prompt(obkv, prompt, &new_fields_ids_map)?
}
},
// prompt regeneration is only triggered for existing embedders
@ -393,7 +399,7 @@ pub fn extract_vector_points<R: io::Read + io::Seek>(
regenerate_if_prompt_changed(
obkv,
(old_prompt, prompt),
(old_fields_ids_map, new_fields_ids_map),
(&old_fields_ids_map, &new_fields_ids_map),
)?
} else {
// we can simply ignore user provided vectors as they are not regenerated and are
@ -409,7 +415,7 @@ pub fn extract_vector_points<R: io::Read + io::Seek>(
prompt,
(add_to_user_provided, remove_from_user_provided),
(old, new),
(old_fields_ids_map, new_fields_ids_map),
(&old_fields_ids_map, &new_fields_ids_map),
document_id,
embedder_name,
embedder_is_manual,
@ -479,7 +485,10 @@ fn extract_vector_document_diff(
prompt: &Prompt,
(add_to_user_provided, remove_from_user_provided): (&mut RoaringBitmap, &mut RoaringBitmap),
(old, new): (VectorState, VectorState),
(old_fields_ids_map, new_fields_ids_map): (&FieldsIdsMap, &FieldsIdsMap),
(old_fields_ids_map, new_fields_ids_map): (
&FieldsIdsMapWithMetadata,
&FieldsIdsMapWithMetadata,
),
document_id: impl Fn() -> Value,
embedder_name: &str,
embedder_is_manual: bool,
@ -599,7 +608,10 @@ fn extract_vector_document_diff(
fn regenerate_if_prompt_changed(
obkv: &obkv::KvReader<FieldId>,
(old_prompt, new_prompt): (&Prompt, &Prompt),
(old_fields_ids_map, new_fields_ids_map): (&FieldsIdsMap, &FieldsIdsMap),
(old_fields_ids_map, new_fields_ids_map): (
&FieldsIdsMapWithMetadata,
&FieldsIdsMapWithMetadata,
),
) -> Result<VectorStateDelta> {
let old_prompt =
old_prompt.render(obkv, DelAdd::Deletion, old_fields_ids_map).unwrap_or(Default::default());
@ -614,7 +626,7 @@ fn regenerate_if_prompt_changed(
fn regenerate_prompt(
obkv: &obkv::KvReader<FieldId>,
prompt: &Prompt,
new_fields_ids_map: &FieldsIdsMap,
new_fields_ids_map: &FieldsIdsMapWithMetadata,
) -> Result<VectorStateDelta> {
let prompt = prompt.render(obkv, DelAdd::Addition, new_fields_ids_map)?;

View file

@ -37,7 +37,7 @@ use crate::update::index_documents::parallel::ImmutableObkvs;
use crate::update::{
IndexerConfig, UpdateIndexingStep, WordPrefixDocids, WordPrefixIntegerDocids, WordsPrefixesFst,
};
use crate::vector::EmbeddingConfigs;
use crate::vector::{ArroyWrapper, EmbeddingConfigs};
use crate::{CboRoaringBitmapCodec, Index, Object, Result};
static MERGED_DATABASE_COUNT: usize = 7;
@ -673,6 +673,24 @@ where
let number_of_documents = self.index.number_of_documents(self.wtxn)?;
let mut rng = rand::rngs::StdRng::seed_from_u64(42);
// If an embedder wasn't used in the typedchunk but must be binary quantized
// we should insert it in `dimension`
for (name, action) in settings_diff.embedding_config_updates.iter() {
if action.is_being_quantized && !dimension.contains_key(name.as_str()) {
let index = self.index.embedder_category_id.get(self.wtxn, name)?.ok_or(
InternalError::DatabaseMissingEntry {
db_name: "embedder_category_id",
key: None,
},
)?;
let first_id = crate::vector::arroy_db_range_for_embedder(index).next().unwrap();
let reader =
ArroyWrapper::new(self.index.vector_arroy, first_id, action.was_quantized);
let dim = reader.dimensions(self.wtxn)?;
dimension.insert(name.to_string(), dim);
}
}
for (embedder_name, dimension) in dimension {
let wtxn = &mut *self.wtxn;
let vector_arroy = self.index.vector_arroy;
@ -680,13 +698,23 @@ where
let embedder_index = self.index.embedder_category_id.get(wtxn, &embedder_name)?.ok_or(
InternalError::DatabaseMissingEntry { db_name: "embedder_category_id", key: None },
)?;
let embedder_config = settings_diff.embedding_config_updates.get(&embedder_name);
let was_quantized = settings_diff
.old
.embedding_configs
.get(&embedder_name)
.map_or(false, |conf| conf.2);
let is_quantizing = embedder_config.map_or(false, |action| action.is_being_quantized);
pool.install(|| {
for k in crate::vector::arroy_db_range_for_embedder(embedder_index) {
let writer = arroy::Writer::new(vector_arroy, k, dimension);
if writer.need_build(wtxn)? {
writer.build(wtxn, &mut rng, None)?;
} else if writer.is_empty(wtxn)? {
let mut writer = ArroyWrapper::new(vector_arroy, k, was_quantized);
if is_quantizing {
writer.quantize(wtxn, k, dimension)?;
}
if writer.need_build(wtxn, dimension)? {
writer.build(wtxn, &mut rng, dimension)?;
} else if writer.is_empty(wtxn, dimension)? {
break;
}
}
@ -2734,11 +2762,13 @@ mod tests {
api_key: Setting::NotSet,
dimensions: Setting::Set(3),
document_template: Setting::NotSet,
document_template_max_bytes: Setting::NotSet,
url: Setting::NotSet,
request: Setting::NotSet,
response: Setting::NotSet,
distribution: Setting::NotSet,
headers: Setting::NotSet,
binary_quantized: Setting::NotSet,
}),
);
settings.set_embedder_settings(embedders);
@ -2767,7 +2797,7 @@ mod tests {
std::sync::Arc::new(crate::vector::Embedder::new(embedder.embedder_options).unwrap());
let res = index
.search(&rtxn)
.semantic(embedder_name, embedder, Some([0.0, 1.0, 2.0].to_vec()))
.semantic(embedder_name, embedder, false, Some([0.0, 1.0, 2.0].to_vec()))
.execute()
.unwrap();
assert_eq!(res.documents_ids.len(), 3);

View file

@ -29,7 +29,8 @@ use crate::update::index_documents::GrenadParameters;
use crate::update::settings::{InnerIndexSettings, InnerIndexSettingsDiff};
use crate::update::{AvailableIds, UpdateIndexingStep};
use crate::vector::parsed_vectors::{ExplicitVectors, VectorOrArrayOfVectors};
use crate::vector::settings::{EmbedderAction, WriteBackToDocuments};
use crate::vector::settings::WriteBackToDocuments;
use crate::vector::ArroyWrapper;
use crate::{
is_faceted_by, FieldDistribution, FieldId, FieldIdMapMissingEntry, FieldsIdsMap, Index, Result,
};
@ -992,19 +993,17 @@ impl<'a, 'i> Transform<'a, 'i> {
None
};
let readers: Result<
BTreeMap<&str, (Vec<arroy::Reader<'_, arroy::distances::Angular>>, &RoaringBitmap)>,
> = settings_diff
let readers: Result<BTreeMap<&str, (Vec<ArroyWrapper>, &RoaringBitmap)>> = settings_diff
.embedding_config_updates
.iter()
.filter_map(|(name, action)| {
if let EmbedderAction::WriteBackToDocuments(WriteBackToDocuments {
embedder_id,
user_provided,
}) = action
if let Some(WriteBackToDocuments { embedder_id, user_provided }) =
action.write_back()
{
let readers: Result<Vec<_>> =
self.index.arroy_readers(wtxn, *embedder_id).collect();
let readers: Result<Vec<_>> = self
.index
.arroy_readers(wtxn, *embedder_id, action.was_quantized)
.collect();
match readers {
Ok(readers) => Some(Ok((name.as_str(), (readers, user_provided)))),
Err(error) => Some(Err(error)),
@ -1107,23 +1106,14 @@ impl<'a, 'i> Transform<'a, 'i> {
}
}
let mut writers = Vec::new();
// delete all vectors from the embedders that need removal
for (_, (readers, _)) in readers {
for reader in readers {
let dimensions = reader.dimensions();
let arroy_index = reader.index();
drop(reader);
let writer = arroy::Writer::new(self.index.vector_arroy, arroy_index, dimensions);
writers.push(writer);
let dimensions = reader.dimensions(wtxn)?;
reader.clear(wtxn, dimensions)?;
}
}
for writer in writers {
writer.clear(wtxn)?;
}
let grenad_params = GrenadParameters {
chunk_compression_type: self.indexer_settings.chunk_compression_type,
chunk_compression_level: self.indexer_settings.chunk_compression_level,

View file

@ -26,6 +26,7 @@ use crate::update::index_documents::helpers::{
as_cloneable_grenad, try_split_array_at, KeepLatestObkv,
};
use crate::update::settings::InnerIndexSettingsDiff;
use crate::vector::ArroyWrapper;
use crate::{
lat_lng_to_xyz, CboRoaringBitmapCodec, DocumentId, FieldId, GeoPoint, Index, InternalError,
Result, SerializationError, U8StrStrCodec,
@ -661,9 +662,14 @@ pub(crate) fn write_typed_chunk_into_index(
let embedder_index = index.embedder_category_id.get(wtxn, &embedder_name)?.ok_or(
InternalError::DatabaseMissingEntry { db_name: "embedder_category_id", key: None },
)?;
let binary_quantized = settings_diff
.old
.embedding_configs
.get(&embedder_name)
.map_or(false, |conf| conf.2);
// FIXME: allow customizing distance
let writers: Vec<_> = crate::vector::arroy_db_range_for_embedder(embedder_index)
.map(|k| arroy::Writer::new(index.vector_arroy, k, expected_dimension))
.map(|k| ArroyWrapper::new(index.vector_arroy, k, binary_quantized))
.collect();
// remove vectors for docids we want them removed
@ -674,7 +680,7 @@ pub(crate) fn write_typed_chunk_into_index(
for writer in &writers {
// Uses invariant: vectors are packed in the first writers.
if !writer.del_item(wtxn, docid)? {
if !writer.del_item(wtxn, expected_dimension, docid)? {
break;
}
}
@ -706,7 +712,7 @@ pub(crate) fn write_typed_chunk_into_index(
)));
}
for (embedding, writer) in embeddings.iter().zip(&writers) {
writer.add_item(wtxn, docid, embedding)?;
writer.add_item(wtxn, expected_dimension, docid, embedding)?;
}
}
@ -729,7 +735,7 @@ pub(crate) fn write_typed_chunk_into_index(
break;
};
if candidate == vector {
writer.del_item(wtxn, docid)?;
writer.del_item(wtxn, expected_dimension, docid)?;
deleted_index = Some(index);
}
}
@ -746,8 +752,13 @@ pub(crate) fn write_typed_chunk_into_index(
if let Some((last_index, vector)) = last_index_with_a_vector {
// unwrap: computed the index from the list of writers
let writer = writers.get(last_index).unwrap();
writer.del_item(wtxn, docid)?;
writers.get(deleted_index).unwrap().add_item(wtxn, docid, &vector)?;
writer.del_item(wtxn, expected_dimension, docid)?;
writers.get(deleted_index).unwrap().add_item(
wtxn,
expected_dimension,
docid,
&vector,
)?;
}
}
}
@ -757,8 +768,8 @@ pub(crate) fn write_typed_chunk_into_index(
// overflow was detected during vector extraction.
for writer in &writers {
if !writer.contains_item(wtxn, docid)? {
writer.add_item(wtxn, docid, &vector)?;
if !writer.contains_item(wtxn, expected_dimension, docid)? {
writer.add_item(wtxn, expected_dimension, docid, &vector)?;
break;
}
}

View file

@ -1,5 +1,6 @@
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use std::convert::TryInto;
use std::num::NonZeroUsize;
use std::result::Result as StdResult;
use std::sync::Arc;
@ -19,6 +20,7 @@ use crate::index::{
IndexEmbeddingConfig, DEFAULT_MIN_WORD_LEN_ONE_TYPO, DEFAULT_MIN_WORD_LEN_TWO_TYPOS,
};
use crate::order_by_map::OrderByMap;
use crate::prompt::default_max_bytes;
use crate::proximity::ProximityPrecision;
use crate::update::index_documents::IndexDocumentsMethod;
use crate::update::{IndexDocuments, UpdateIndexingStep};
@ -952,7 +954,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
let old_configs = self.index.embedding_configs(self.wtxn)?;
let remove_all: Result<BTreeMap<String, EmbedderAction>> = old_configs
.into_iter()
.map(|IndexEmbeddingConfig { name, config: _, user_provided }| -> Result<_> {
.map(|IndexEmbeddingConfig { name, config, user_provided }| -> Result<_> {
let embedder_id =
self.index.embedder_category_id.get(self.wtxn, &name)?.ok_or(
crate::InternalError::DatabaseMissingEntry {
@ -962,10 +964,10 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
)?;
Ok((
name,
EmbedderAction::WriteBackToDocuments(WriteBackToDocuments {
embedder_id,
user_provided,
}),
EmbedderAction::with_write_back(
WriteBackToDocuments { embedder_id, user_provided },
config.quantized(),
),
))
})
.collect();
@ -1002,7 +1004,8 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
match joined {
// updated config
EitherOrBoth::Both((name, (old, user_provided)), (_, new)) => {
let settings_diff = SettingsDiff::from_settings(old, new);
let was_quantized = old.binary_quantized.set().unwrap_or_default();
let settings_diff = SettingsDiff::from_settings(&name, old, new)?;
match settings_diff {
SettingsDiff::Remove => {
tracing::debug!(
@ -1021,25 +1024,29 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
self.index.embedder_category_id.delete(self.wtxn, &name)?;
embedder_actions.insert(
name,
EmbedderAction::WriteBackToDocuments(WriteBackToDocuments {
embedder_id,
user_provided,
}),
EmbedderAction::with_write_back(
WriteBackToDocuments { embedder_id, user_provided },
was_quantized,
),
);
}
SettingsDiff::Reindex { action, updated_settings } => {
SettingsDiff::Reindex { action, updated_settings, quantize } => {
tracing::debug!(
embedder = name,
user_provided = user_provided.len(),
?action,
"reindex embedder"
);
embedder_actions.insert(name.clone(), EmbedderAction::Reindex(action));
embedder_actions.insert(
name.clone(),
EmbedderAction::with_reindex(action, was_quantized)
.with_is_being_quantized(quantize),
);
let new =
validate_embedding_settings(Setting::Set(updated_settings), &name)?;
updated_configs.insert(name, (new, user_provided));
}
SettingsDiff::UpdateWithoutReindex { updated_settings } => {
SettingsDiff::UpdateWithoutReindex { updated_settings, quantize } => {
tracing::debug!(
embedder = name,
user_provided = user_provided.len(),
@ -1047,6 +1054,12 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
);
let new =
validate_embedding_settings(Setting::Set(updated_settings), &name)?;
if quantize {
embedder_actions.insert(
name.clone(),
EmbedderAction::default().with_is_being_quantized(true),
);
}
updated_configs.insert(name, (new, user_provided));
}
}
@ -1065,8 +1078,10 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
&mut setting,
);
let setting = validate_embedding_settings(setting, &name)?;
embedder_actions
.insert(name.clone(), EmbedderAction::Reindex(ReindexAction::FullReindex));
embedder_actions.insert(
name.clone(),
EmbedderAction::with_reindex(ReindexAction::FullReindex, false),
);
updated_configs.insert(name, (setting, RoaringBitmap::new()));
}
}
@ -1080,19 +1095,14 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
let mut find_free_index =
move || free_indices.find(|(_, free)| **free).map(|(index, _)| index as u8);
for (name, action) in embedder_actions.iter() {
match action {
EmbedderAction::Reindex(ReindexAction::RegeneratePrompts) => {
/* cannot be a new embedder, so has to have an id already */
}
EmbedderAction::Reindex(ReindexAction::FullReindex) => {
if self.index.embedder_category_id.get(self.wtxn, name)?.is_none() {
let id = find_free_index()
.ok_or(UserError::TooManyEmbedders(updated_configs.len()))?;
tracing::debug!(embedder = name, id, "assigning free id to new embedder");
self.index.embedder_category_id.put(self.wtxn, name, &id)?;
}
}
EmbedderAction::WriteBackToDocuments(_) => { /* already removed */ }
// ignore actions that are not possible for a new embedder
if matches!(action.reindex(), Some(ReindexAction::FullReindex))
&& self.index.embedder_category_id.get(self.wtxn, name)?.is_none()
{
let id =
find_free_index().ok_or(UserError::TooManyEmbedders(updated_configs.len()))?;
tracing::debug!(embedder = name, id, "assigning free id to new embedder");
self.index.embedder_category_id.put(self.wtxn, name, &id)?;
}
}
let updated_configs: Vec<IndexEmbeddingConfig> = updated_configs
@ -1238,7 +1248,7 @@ impl InnerIndexSettingsDiff {
old_settings: InnerIndexSettings,
new_settings: InnerIndexSettings,
primary_key_id: Option<FieldId>,
embedding_config_updates: BTreeMap<String, EmbedderAction>,
mut embedding_config_updates: BTreeMap<String, EmbedderAction>,
settings_update_only: bool,
) -> Self {
let only_additional_fields = match (
@ -1273,6 +1283,39 @@ impl InnerIndexSettingsDiff {
let cache_user_defined_searchables = old_settings.user_defined_searchable_fields
!= new_settings.user_defined_searchable_fields;
// if the user-defined searchables changed, then we need to reindex prompts.
if cache_user_defined_searchables {
for (embedder_name, (config, _, _quantized)) in
new_settings.embedding_configs.inner_as_ref()
{
let was_quantized =
old_settings.embedding_configs.get(embedder_name).map_or(false, |conf| conf.2);
// skip embedders that don't use document templates
if !config.uses_document_template() {
continue;
}
// note: this could currently be entry.or_insert(..), but we're future-proofing with an explicit match
// this always makes the code clearer by explicitly handling the cases
match embedding_config_updates.entry(embedder_name.clone()) {
std::collections::btree_map::Entry::Vacant(entry) => {
entry.insert(EmbedderAction::with_reindex(
ReindexAction::RegeneratePrompts,
was_quantized,
));
}
std::collections::btree_map::Entry::Occupied(entry) => {
let EmbedderAction {
was_quantized: _,
is_being_quantized: _,
write_back: _, // We are deleting this embedder, so no point in regeneration
reindex: _, // We are already fully reindexing
} = entry.get();
}
};
}
}
InnerIndexSettingsDiff {
old: old_settings,
new: new_settings,
@ -1518,7 +1561,7 @@ fn embedders(embedding_configs: Vec<IndexEmbeddingConfig>) -> Result<EmbeddingCo
.map(
|IndexEmbeddingConfig {
name,
config: EmbeddingConfig { embedder_options, prompt },
config: EmbeddingConfig { embedder_options, prompt, quantized },
..
}| {
let prompt = Arc::new(prompt.try_into().map_err(crate::Error::from)?);
@ -1528,7 +1571,7 @@ fn embedders(embedding_configs: Vec<IndexEmbeddingConfig>) -> Result<EmbeddingCo
.map_err(crate::vector::Error::from)
.map_err(crate::Error::from)?,
);
Ok((name, (embedder, prompt)))
Ok((name, (embedder, prompt, quantized.unwrap_or_default())))
},
)
.collect();
@ -1547,16 +1590,31 @@ fn validate_prompt(
api_key,
dimensions,
document_template: Setting::Set(template),
document_template_max_bytes,
url,
request,
response,
distribution,
headers,
binary_quantized: binary_quantize,
}) => {
let max_bytes = match document_template_max_bytes.set() {
Some(max_bytes) => NonZeroUsize::new(max_bytes).ok_or_else(|| {
crate::error::UserError::InvalidSettingsDocumentTemplateMaxBytes {
embedder_name: name.to_owned(),
}
})?,
None => default_max_bytes(),
};
// validate
let template = crate::prompt::Prompt::new(template)
.map(|prompt| crate::prompt::PromptData::from(prompt).template)
.map_err(|inner| UserError::InvalidPromptForEmbeddings(name.to_owned(), inner))?;
let template = crate::prompt::Prompt::new(
template,
// always specify a max_bytes
Some(max_bytes),
)
.map(|prompt| crate::prompt::PromptData::from(prompt).template)
.map_err(|inner| UserError::InvalidPromptForEmbeddings(name.to_owned(), inner))?;
Ok(Setting::Set(EmbeddingSettings {
source,
@ -1565,11 +1623,13 @@ fn validate_prompt(
api_key,
dimensions,
document_template: Setting::Set(template),
document_template_max_bytes,
url,
request,
response,
distribution,
headers,
binary_quantized: binary_quantize,
}))
}
new => Ok(new),
@ -1589,11 +1649,13 @@ pub fn validate_embedding_settings(
api_key,
dimensions,
document_template,
document_template_max_bytes,
url,
request,
response,
distribution,
headers,
binary_quantized: binary_quantize,
} = settings;
if let Some(0) = dimensions.set() {
@ -1628,11 +1690,13 @@ pub fn validate_embedding_settings(
api_key,
dimensions,
document_template,
document_template_max_bytes,
url,
request,
response,
distribution,
headers,
binary_quantized: binary_quantize,
}));
};
match inferred_source {
@ -1700,6 +1764,12 @@ pub fn validate_embedding_settings(
inferred_source,
name,
)?;
check_unset(
&document_template_max_bytes,
EmbeddingSettings::DOCUMENT_TEMPLATE_MAX_BYTES,
inferred_source,
name,
)?;
check_set(&dimensions, EmbeddingSettings::DIMENSIONS, inferred_source, name)?;
check_unset(&url, EmbeddingSettings::URL, inferred_source, name)?;
@ -1722,11 +1792,13 @@ pub fn validate_embedding_settings(
api_key,
dimensions,
document_template,
document_template_max_bytes,
url,
request,
response,
distribution,
headers,
binary_quantized: binary_quantize,
}))
}

View file

@ -1,8 +1,12 @@
use std::collections::HashMap;
use std::sync::Arc;
use arroy::distances::{Angular, BinaryQuantizedAngular};
use arroy::ItemId;
use deserr::{DeserializeError, Deserr};
use heed::{RoTxn, RwTxn, Unspecified};
use ordered_float::OrderedFloat;
use roaring::RoaringBitmap;
use serde::{Deserialize, Serialize};
use self::error::{EmbedError, NewEmbedderError};
@ -26,6 +30,171 @@ pub type Embedding = Vec<f32>;
pub const REQUEST_PARALLELISM: usize = 40;
pub struct ArroyWrapper {
quantized: bool,
index: u16,
database: arroy::Database<Unspecified>,
}
impl ArroyWrapper {
pub fn new(database: arroy::Database<Unspecified>, index: u16, quantized: bool) -> Self {
Self { database, index, quantized }
}
pub fn index(&self) -> u16 {
self.index
}
pub fn dimensions(&self, rtxn: &RoTxn) -> Result<usize, arroy::Error> {
if self.quantized {
Ok(arroy::Reader::open(rtxn, self.index, self.quantized_db())?.dimensions())
} else {
Ok(arroy::Reader::open(rtxn, self.index, self.angular_db())?.dimensions())
}
}
pub fn quantize(
&mut self,
wtxn: &mut RwTxn,
index: u16,
dimension: usize,
) -> Result<(), arroy::Error> {
if !self.quantized {
let writer = arroy::Writer::new(self.angular_db(), index, dimension);
writer.prepare_changing_distance::<BinaryQuantizedAngular>(wtxn)?;
self.quantized = true;
}
Ok(())
}
pub fn need_build(&self, rtxn: &RoTxn, dimension: usize) -> Result<bool, arroy::Error> {
if self.quantized {
arroy::Writer::new(self.quantized_db(), self.index, dimension).need_build(rtxn)
} else {
arroy::Writer::new(self.angular_db(), self.index, dimension).need_build(rtxn)
}
}
pub fn build<R: rand::Rng + rand::SeedableRng>(
&self,
wtxn: &mut RwTxn,
rng: &mut R,
dimension: usize,
) -> Result<(), arroy::Error> {
if self.quantized {
arroy::Writer::new(self.quantized_db(), self.index, dimension).build(wtxn, rng, None)
} else {
arroy::Writer::new(self.angular_db(), self.index, dimension).build(wtxn, rng, None)
}
}
pub fn add_item(
&self,
wtxn: &mut RwTxn,
dimension: usize,
item_id: arroy::ItemId,
vector: &[f32],
) -> Result<(), arroy::Error> {
if self.quantized {
arroy::Writer::new(self.quantized_db(), self.index, dimension)
.add_item(wtxn, item_id, vector)
} else {
arroy::Writer::new(self.angular_db(), self.index, dimension)
.add_item(wtxn, item_id, vector)
}
}
pub fn del_item(
&self,
wtxn: &mut RwTxn,
dimension: usize,
item_id: arroy::ItemId,
) -> Result<bool, arroy::Error> {
if self.quantized {
arroy::Writer::new(self.quantized_db(), self.index, dimension).del_item(wtxn, item_id)
} else {
arroy::Writer::new(self.angular_db(), self.index, dimension).del_item(wtxn, item_id)
}
}
pub fn clear(&self, wtxn: &mut RwTxn, dimension: usize) -> Result<(), arroy::Error> {
if self.quantized {
arroy::Writer::new(self.quantized_db(), self.index, dimension).clear(wtxn)
} else {
arroy::Writer::new(self.angular_db(), self.index, dimension).clear(wtxn)
}
}
pub fn is_empty(&self, rtxn: &RoTxn, dimension: usize) -> Result<bool, arroy::Error> {
if self.quantized {
arroy::Writer::new(self.quantized_db(), self.index, dimension).is_empty(rtxn)
} else {
arroy::Writer::new(self.angular_db(), self.index, dimension).is_empty(rtxn)
}
}
pub fn contains_item(
&self,
rtxn: &RoTxn,
dimension: usize,
item: arroy::ItemId,
) -> Result<bool, arroy::Error> {
if self.quantized {
arroy::Writer::new(self.quantized_db(), self.index, dimension).contains_item(rtxn, item)
} else {
arroy::Writer::new(self.angular_db(), self.index, dimension).contains_item(rtxn, item)
}
}
pub fn nns_by_item(
&self,
rtxn: &RoTxn,
item: ItemId,
limit: usize,
filter: Option<&RoaringBitmap>,
) -> Result<Option<Vec<(ItemId, f32)>>, arroy::Error> {
if self.quantized {
arroy::Reader::open(rtxn, self.index, self.quantized_db())?
.nns_by_item(rtxn, item, limit, None, None, filter)
} else {
arroy::Reader::open(rtxn, self.index, self.angular_db())?
.nns_by_item(rtxn, item, limit, None, None, filter)
}
}
pub fn nns_by_vector(
&self,
txn: &RoTxn,
item: &[f32],
limit: usize,
filter: Option<&RoaringBitmap>,
) -> Result<Vec<(ItemId, f32)>, arroy::Error> {
if self.quantized {
arroy::Reader::open(txn, self.index, self.quantized_db())?
.nns_by_vector(txn, item, limit, None, None, filter)
} else {
arroy::Reader::open(txn, self.index, self.angular_db())?
.nns_by_vector(txn, item, limit, None, None, filter)
}
}
pub fn item_vector(&self, rtxn: &RoTxn, docid: u32) -> Result<Option<Vec<f32>>, arroy::Error> {
if self.quantized {
arroy::Reader::open(rtxn, self.index, self.quantized_db())?.item_vector(rtxn, docid)
} else {
arroy::Reader::open(rtxn, self.index, self.angular_db())?.item_vector(rtxn, docid)
}
}
fn angular_db(&self) -> arroy::Database<Angular> {
self.database.remap_data_type()
}
fn quantized_db(&self) -> arroy::Database<BinaryQuantizedAngular> {
self.database.remap_data_type()
}
}
/// One or multiple embeddings stored consecutively in a flat vector.
pub struct Embeddings<F> {
data: Vec<F>,
@ -124,62 +293,48 @@ pub struct EmbeddingConfig {
pub embedder_options: EmbedderOptions,
/// Document template
pub prompt: PromptData,
/// If this embedder is binary quantized
pub quantized: Option<bool>,
// TODO: add metrics and anything needed
}
impl EmbeddingConfig {
pub fn quantized(&self) -> bool {
self.quantized.unwrap_or_default()
}
}
/// Map of embedder configurations.
///
/// Each configuration is mapped to a name.
#[derive(Clone, Default)]
pub struct EmbeddingConfigs(HashMap<String, (Arc<Embedder>, Arc<Prompt>)>);
pub struct EmbeddingConfigs(HashMap<String, (Arc<Embedder>, Arc<Prompt>, bool)>);
impl EmbeddingConfigs {
/// Create the map from its internal component.s
pub fn new(data: HashMap<String, (Arc<Embedder>, Arc<Prompt>)>) -> Self {
pub fn new(data: HashMap<String, (Arc<Embedder>, Arc<Prompt>, bool)>) -> Self {
Self(data)
}
/// Get an embedder configuration and template from its name.
pub fn get(&self, name: &str) -> Option<(Arc<Embedder>, Arc<Prompt>)> {
pub fn get(&self, name: &str) -> Option<(Arc<Embedder>, Arc<Prompt>, bool)> {
self.0.get(name).cloned()
}
/// Get the default embedder configuration, if any.
pub fn get_default(&self) -> Option<(Arc<Embedder>, Arc<Prompt>)> {
self.get(self.get_default_embedder_name())
}
pub fn inner_as_ref(&self) -> &HashMap<String, (Arc<Embedder>, Arc<Prompt>)> {
pub fn inner_as_ref(&self) -> &HashMap<String, (Arc<Embedder>, Arc<Prompt>, bool)> {
&self.0
}
pub fn into_inner(self) -> HashMap<String, (Arc<Embedder>, Arc<Prompt>)> {
pub fn into_inner(self) -> HashMap<String, (Arc<Embedder>, Arc<Prompt>, bool)> {
self.0
}
/// Get the name of the default embedder configuration.
///
/// The default embedder is determined as follows:
///
/// - If there is only one embedder, it is always the default.
/// - If there are multiple embedders and one of them is called `default`, then that one is the default embedder.
/// - In all other cases, there is no default embedder.
pub fn get_default_embedder_name(&self) -> &str {
let mut it = self.0.keys();
let first_name = it.next();
let second_name = it.next();
match (first_name, second_name) {
(None, _) => "default",
(Some(first), None) => first,
(Some(_), Some(_)) => "default",
}
}
}
impl IntoIterator for EmbeddingConfigs {
type Item = (String, (Arc<Embedder>, Arc<Prompt>));
type Item = (String, (Arc<Embedder>, Arc<Prompt>, bool));
type IntoIter = std::collections::hash_map::IntoIter<String, (Arc<Embedder>, Arc<Prompt>)>;
type IntoIter =
std::collections::hash_map::IntoIter<String, (Arc<Embedder>, Arc<Prompt>, bool)>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
@ -305,6 +460,16 @@ impl Embedder {
Embedder::Rest(embedder) => embedder.distribution(),
}
}
pub fn uses_document_template(&self) -> bool {
match self {
Embedder::HuggingFace(_)
| Embedder::OpenAi(_)
| Embedder::Ollama(_)
| Embedder::Rest(_) => true,
Embedder::UserProvided(_) => false,
}
}
}
/// Describes the mean and sigma of distribution of embedding similarity in the embedding space.

View file

@ -66,11 +66,11 @@ pub enum EmbeddingModel {
// # WARNING
//
// If ever adding a model, make sure to add it to the list of supported models below.
#[default]
#[serde(rename = "text-embedding-ada-002")]
#[deserr(rename = "text-embedding-ada-002")]
TextEmbeddingAda002,
#[default]
#[serde(rename = "text-embedding-3-small")]
#[deserr(rename = "text-embedding-3-small")]
TextEmbedding3Small,

View file

@ -1,11 +1,12 @@
use std::collections::BTreeMap;
use std::num::NonZeroUsize;
use deserr::Deserr;
use roaring::RoaringBitmap;
use serde::{Deserialize, Serialize};
use super::{ollama, openai, DistributionShift};
use crate::prompt::PromptData;
use crate::prompt::{default_max_bytes, PromptData};
use crate::update::Setting;
use crate::vector::EmbeddingConfig;
use crate::UserError;
@ -31,9 +32,15 @@ pub struct EmbeddingSettings {
pub dimensions: Setting<usize>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)]
pub binary_quantized: Setting<bool>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)]
pub document_template: Setting<String>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)]
pub document_template_max_bytes: Setting<usize>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)]
pub url: Setting<String>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)]
@ -81,23 +88,63 @@ pub enum ReindexAction {
pub enum SettingsDiff {
Remove,
Reindex { action: ReindexAction, updated_settings: EmbeddingSettings },
UpdateWithoutReindex { updated_settings: EmbeddingSettings },
Reindex { action: ReindexAction, updated_settings: EmbeddingSettings, quantize: bool },
UpdateWithoutReindex { updated_settings: EmbeddingSettings, quantize: bool },
}
pub enum EmbedderAction {
WriteBackToDocuments(WriteBackToDocuments),
Reindex(ReindexAction),
#[derive(Default, Debug)]
pub struct EmbedderAction {
pub was_quantized: bool,
pub is_being_quantized: bool,
pub write_back: Option<WriteBackToDocuments>,
pub reindex: Option<ReindexAction>,
}
impl EmbedderAction {
pub fn is_being_quantized(&self) -> bool {
self.is_being_quantized
}
pub fn write_back(&self) -> Option<&WriteBackToDocuments> {
self.write_back.as_ref()
}
pub fn reindex(&self) -> Option<&ReindexAction> {
self.reindex.as_ref()
}
pub fn with_is_being_quantized(mut self, quantize: bool) -> Self {
self.is_being_quantized = quantize;
self
}
pub fn with_write_back(write_back: WriteBackToDocuments, was_quantized: bool) -> Self {
Self {
was_quantized,
is_being_quantized: false,
write_back: Some(write_back),
reindex: None,
}
}
pub fn with_reindex(reindex: ReindexAction, was_quantized: bool) -> Self {
Self { was_quantized, is_being_quantized: false, write_back: None, reindex: Some(reindex) }
}
}
#[derive(Debug)]
pub struct WriteBackToDocuments {
pub embedder_id: u8,
pub user_provided: RoaringBitmap,
}
impl SettingsDiff {
pub fn from_settings(old: EmbeddingSettings, new: Setting<EmbeddingSettings>) -> Self {
match new {
pub fn from_settings(
embedder_name: &str,
old: EmbeddingSettings,
new: Setting<EmbeddingSettings>,
) -> Result<Self, UserError> {
let ret = match new {
Setting::Set(new) => {
let EmbeddingSettings {
mut source,
@ -111,6 +158,8 @@ impl SettingsDiff {
mut response,
mut distribution,
mut headers,
mut document_template_max_bytes,
binary_quantized: mut binary_quantize,
} = old;
let EmbeddingSettings {
@ -125,8 +174,18 @@ impl SettingsDiff {
response: new_response,
distribution: new_distribution,
headers: new_headers,
document_template_max_bytes: new_document_template_max_bytes,
binary_quantized: new_binary_quantize,
} = new;
if matches!(binary_quantize, Setting::Set(true))
&& matches!(new_binary_quantize, Setting::Set(false))
{
return Err(UserError::InvalidDisableBinaryQuantization {
embedder_name: embedder_name.to_string(),
});
}
let mut reindex_action = None;
// **Warning**: do not use short-circuiting || here, we want all these operations applied
@ -142,6 +201,7 @@ impl SettingsDiff {
&mut request,
&mut response,
&mut document_template,
&mut document_template_max_bytes,
&mut headers,
)
}
@ -165,6 +225,7 @@ impl SettingsDiff {
_ => {}
}
}
let binary_quantize_changed = binary_quantize.apply(new_binary_quantize);
if url.apply(new_url) {
match source {
// do not regenerate on an url change in OpenAI
@ -190,6 +251,23 @@ impl SettingsDiff {
);
}
if document_template_max_bytes.apply(new_document_template_max_bytes) {
let previous_document_template_max_bytes =
document_template_max_bytes.set().unwrap_or(default_max_bytes().get());
let new_document_template_max_bytes =
new_document_template_max_bytes.set().unwrap_or(default_max_bytes().get());
// only reindex if the size increased. Reasoning:
// - size decrease is a performance optimization, so we don't reindex and we keep the more accurate vectors
// - size increase is an accuracy optimization, so we want to reindex
if new_document_template_max_bytes > previous_document_template_max_bytes {
ReindexAction::push_action(
&mut reindex_action,
ReindexAction::RegeneratePrompts,
)
}
}
distribution.apply(new_distribution);
api_key.apply(new_api_key);
headers.apply(new_headers);
@ -206,16 +284,28 @@ impl SettingsDiff {
response,
distribution,
headers,
document_template_max_bytes,
binary_quantized: binary_quantize,
};
match reindex_action {
Some(action) => Self::Reindex { action, updated_settings },
None => Self::UpdateWithoutReindex { updated_settings },
Some(action) => Self::Reindex {
action,
updated_settings,
quantize: binary_quantize_changed,
},
None => Self::UpdateWithoutReindex {
updated_settings,
quantize: binary_quantize_changed,
},
}
}
Setting::Reset => Self::Remove,
Setting::NotSet => Self::UpdateWithoutReindex { updated_settings: old },
}
Setting::NotSet => {
Self::UpdateWithoutReindex { updated_settings: old, quantize: false }
}
};
Ok(ret)
}
}
@ -239,6 +329,7 @@ fn apply_default_for_source(
request: &mut Setting<serde_json::Value>,
response: &mut Setting<serde_json::Value>,
document_template: &mut Setting<String>,
document_template_max_bytes: &mut Setting<usize>,
headers: &mut Setting<BTreeMap<String, String>>,
) {
match source {
@ -286,6 +377,7 @@ fn apply_default_for_source(
*request = Setting::NotSet;
*response = Setting::NotSet;
*document_template = Setting::NotSet;
*document_template_max_bytes = Setting::NotSet;
*headers = Setting::NotSet;
}
Setting::NotSet => {}
@ -316,6 +408,7 @@ impl EmbeddingSettings {
pub const API_KEY: &'static str = "apiKey";
pub const DIMENSIONS: &'static str = "dimensions";
pub const DOCUMENT_TEMPLATE: &'static str = "documentTemplate";
pub const DOCUMENT_TEMPLATE_MAX_BYTES: &'static str = "documentTemplateMaxBytes";
pub const URL: &'static str = "url";
pub const REQUEST: &'static str = "request";
@ -458,7 +551,9 @@ impl std::fmt::Display for EmbedderSource {
impl From<EmbeddingConfig> for EmbeddingSettings {
fn from(value: EmbeddingConfig) -> Self {
let EmbeddingConfig { embedder_options, prompt } = value;
let EmbeddingConfig { embedder_options, prompt, quantized } = value;
let document_template_max_bytes =
Setting::Set(prompt.max_bytes.unwrap_or(default_max_bytes()).get());
match embedder_options {
super::EmbedderOptions::HuggingFace(super::hf::EmbedderOptions {
model,
@ -471,11 +566,13 @@ impl From<EmbeddingConfig> for EmbeddingSettings {
api_key: Setting::NotSet,
dimensions: Setting::NotSet,
document_template: Setting::Set(prompt.template),
document_template_max_bytes,
url: Setting::NotSet,
request: Setting::NotSet,
response: Setting::NotSet,
headers: Setting::NotSet,
distribution: Setting::some_or_not_set(distribution),
binary_quantized: Setting::some_or_not_set(quantized),
},
super::EmbedderOptions::OpenAi(super::openai::EmbedderOptions {
url,
@ -490,11 +587,13 @@ impl From<EmbeddingConfig> for EmbeddingSettings {
api_key: Setting::some_or_not_set(api_key),
dimensions: Setting::some_or_not_set(dimensions),
document_template: Setting::Set(prompt.template),
document_template_max_bytes,
url: Setting::some_or_not_set(url),
request: Setting::NotSet,
response: Setting::NotSet,
headers: Setting::NotSet,
distribution: Setting::some_or_not_set(distribution),
binary_quantized: Setting::some_or_not_set(quantized),
},
super::EmbedderOptions::Ollama(super::ollama::EmbedderOptions {
embedding_model,
@ -509,11 +608,13 @@ impl From<EmbeddingConfig> for EmbeddingSettings {
api_key: Setting::some_or_not_set(api_key),
dimensions: Setting::some_or_not_set(dimensions),
document_template: Setting::Set(prompt.template),
document_template_max_bytes,
url: Setting::some_or_not_set(url),
request: Setting::NotSet,
response: Setting::NotSet,
headers: Setting::NotSet,
distribution: Setting::some_or_not_set(distribution),
binary_quantized: Setting::some_or_not_set(quantized),
},
super::EmbedderOptions::UserProvided(super::manual::EmbedderOptions {
dimensions,
@ -525,11 +626,13 @@ impl From<EmbeddingConfig> for EmbeddingSettings {
api_key: Setting::NotSet,
dimensions: Setting::Set(dimensions),
document_template: Setting::NotSet,
document_template_max_bytes: Setting::NotSet,
url: Setting::NotSet,
request: Setting::NotSet,
response: Setting::NotSet,
headers: Setting::NotSet,
distribution: Setting::some_or_not_set(distribution),
binary_quantized: Setting::some_or_not_set(quantized),
},
super::EmbedderOptions::Rest(super::rest::EmbedderOptions {
api_key,
@ -546,11 +649,13 @@ impl From<EmbeddingConfig> for EmbeddingSettings {
api_key: Setting::some_or_not_set(api_key),
dimensions: Setting::some_or_not_set(dimensions),
document_template: Setting::Set(prompt.template),
document_template_max_bytes,
url: Setting::Set(url),
request: Setting::Set(request),
response: Setting::Set(response),
distribution: Setting::some_or_not_set(distribution),
headers: Setting::Set(headers),
binary_quantized: Setting::some_or_not_set(quantized),
},
}
}
@ -566,13 +671,17 @@ impl From<EmbeddingSettings> for EmbeddingConfig {
api_key,
dimensions,
document_template,
document_template_max_bytes,
url,
request,
response,
distribution,
headers,
binary_quantized,
} = value;
this.quantized = binary_quantized.set();
if let Some(source) = source.set() {
match source {
EmbedderSource::OpenAi => {
@ -648,7 +757,12 @@ impl From<EmbeddingSettings> for EmbeddingConfig {
}
if let Setting::Set(template) = document_template {
this.prompt = PromptData { template }
let max_bytes = document_template_max_bytes
.set()
.and_then(NonZeroUsize::new)
.unwrap_or(default_max_bytes());
this.prompt = PromptData { template, max_bytes: Some(max_bytes) }
}
this