2023-07-26 09:33:42 +02:00
use std ::collections ::{ BTreeMap , BTreeSet , HashMap , HashSet } ;
2023-12-07 23:05:26 +01:00
use std ::convert ::TryInto ;
2021-06-14 16:46:19 +02:00
use std ::result ::Result as StdResult ;
2023-12-07 23:05:26 +01:00
use std ::sync ::Arc ;
2020-11-11 17:08:18 +01:00
2023-06-28 18:52:32 +02:00
use charabia ::{ Normalize , Tokenizer , TokenizerBuilder } ;
2023-02-13 18:45:13 +01:00
use deserr ::{ DeserializeError , Deserr } ;
2023-11-15 15:46:37 +01:00
use itertools ::{ EitherOrBoth , Itertools } ;
2024-05-22 15:27:09 +02:00
use roaring ::RoaringBitmap ;
2021-04-07 13:33:44 +02:00
use serde ::{ Deserialize , Deserializer , Serialize , Serializer } ;
2022-02-15 11:41:55 +01:00
use time ::OffsetDateTime ;
2020-11-02 15:31:20 +01:00
2024-05-29 17:46:28 +02:00
use super ::del_add ::DelAddOperation ;
2021-12-08 14:12:07 +01:00
use super ::index_documents ::{ IndexDocumentsConfig , Transform } ;
use super ::IndexerConfig ;
2021-06-08 17:03:27 +02:00
use crate ::criterion ::Criterion ;
2021-06-14 16:46:19 +02:00
use crate ::error ::UserError ;
2024-05-30 11:50:30 +02:00
use crate ::index ::{
IndexEmbeddingConfig , DEFAULT_MIN_WORD_LEN_ONE_TYPO , DEFAULT_MIN_WORD_LEN_TWO_TYPOS ,
} ;
2024-03-20 10:08:28 +01:00
use crate ::order_by_map ::OrderByMap ;
2023-12-06 15:49:02 +01:00
use crate ::proximity ::ProximityPrecision ;
2021-12-08 14:12:07 +01:00
use crate ::update ::index_documents ::IndexDocumentsMethod ;
2022-12-06 11:38:15 +01:00
use crate ::update ::{ IndexDocuments , UpdateIndexingStep } ;
2024-05-21 17:08:45 +02:00
use crate ::vector ::parsed_vectors ::RESERVED_VECTORS_FIELD_NAME ;
2024-06-12 14:04:54 +02:00
use crate ::vector ::settings ::{
check_set , check_unset , EmbedderAction , EmbedderSource , EmbeddingSettings , ReindexAction ,
WriteBackToDocuments ,
} ;
2023-12-13 15:38:44 +01:00
use crate ::vector ::{ Embedder , EmbeddingConfig , EmbeddingConfigs } ;
2024-03-20 13:37:26 +01:00
use crate ::{ FieldId , FieldsIdsMap , Index , Result } ;
2021-04-07 13:33:44 +02:00
2022-10-13 22:02:54 +02:00
#[ derive(Debug, Clone, PartialEq, Eq, Copy) ]
2021-04-07 13:33:44 +02:00
pub enum Setting < T > {
Set ( T ) ,
Reset ,
2021-04-07 14:06:14 +02:00
NotSet ,
2021-04-07 13:33:44 +02:00
}
2023-02-13 18:45:13 +01:00
impl < T , E > Deserr < E > for Setting < T >
2023-01-11 12:14:17 +01:00
where
2023-02-13 18:45:13 +01:00
T : Deserr < E > ,
2023-01-11 12:14:17 +01:00
E : DeserializeError ,
{
fn deserialize_from_value < V : deserr ::IntoValue > (
value : deserr ::Value < V > ,
location : deserr ::ValuePointerRef ,
) -> std ::result ::Result < Self , E > {
match value {
deserr ::Value ::Null = > Ok ( Setting ::Reset ) ,
_ = > T ::deserialize_from_value ( value , location ) . map ( Setting ::Set ) ,
}
}
}
2021-04-07 13:33:44 +02:00
impl < T > Default for Setting < T > {
fn default ( ) -> Self {
Self ::NotSet
}
}
impl < T > Setting < T > {
2021-10-13 13:05:07 +02:00
pub fn set ( self ) -> Option < T > {
match self {
Self ::Set ( value ) = > Some ( value ) ,
_ = > None ,
}
}
pub const fn as_ref ( & self ) -> Setting < & T > {
match * self {
Self ::Set ( ref value ) = > Setting ::Set ( value ) ,
Self ::Reset = > Setting ::Reset ,
Self ::NotSet = > Setting ::NotSet ,
}
}
2021-04-07 13:33:44 +02:00
pub const fn is_not_set ( & self ) -> bool {
matches! ( self , Self ::NotSet )
}
2022-04-18 22:48:22 +02:00
/// If `Self` is `Reset`, then map self to `Set` with the provided `val`.
pub fn or_reset ( self , val : T ) -> Self {
match self {
Self ::Reset = > Self ::Set ( val ) ,
otherwise = > otherwise ,
}
}
2023-11-15 15:46:37 +01:00
2023-12-20 17:07:18 +01:00
/// Returns `true` if applying the new setting changed this setting
pub fn apply ( & mut self , new : Self ) -> bool
where
T : PartialEq + Eq ,
{
2023-11-15 15:46:37 +01:00
if let Setting ::NotSet = new {
2023-12-20 17:07:18 +01:00
return false ;
}
if self = = & new {
return false ;
2023-11-15 15:46:37 +01:00
}
* self = new ;
2023-12-20 17:07:18 +01:00
true
2023-11-15 15:46:37 +01:00
}
2021-04-07 13:33:44 +02:00
}
impl < T : Serialize > Serialize for Setting < T > {
2021-06-16 18:33:33 +02:00
fn serialize < S > ( & self , serializer : S ) -> StdResult < S ::Ok , S ::Error >
where
S : Serializer ,
{
2021-04-07 13:33:44 +02:00
match self {
Self ::Set ( value ) = > Some ( value ) ,
// Usually not_set isn't serialized by setting skip_serializing_if field attribute
Self ::NotSet | Self ::Reset = > None ,
2021-06-16 18:33:33 +02:00
}
. serialize ( serializer )
2021-04-07 13:33:44 +02:00
}
}
impl < ' de , T : Deserialize < ' de > > Deserialize < ' de > for Setting < T > {
2021-06-16 18:33:33 +02:00
fn deserialize < D > ( deserializer : D ) -> StdResult < Self , D ::Error >
where
D : Deserializer < ' de > ,
{
2021-04-07 13:33:44 +02:00
Deserialize ::deserialize ( deserializer ) . map ( | x | match x {
Some ( x ) = > Self ::Set ( x ) ,
None = > Self ::Reset , // Reset is forced by sending null value
} )
}
}
2020-11-03 13:20:11 +01:00
2023-11-22 18:21:19 +01:00
pub struct Settings < ' a , ' t , ' i > {
wtxn : & ' t mut heed ::RwTxn < ' i > ,
2020-11-02 15:31:20 +01:00
index : & ' i Index ,
2021-12-08 14:12:07 +01:00
indexer_config : & ' a IndexerConfig ,
2020-11-03 13:20:11 +01:00
2021-04-07 13:33:44 +02:00
searchable_fields : Setting < Vec < String > > ,
displayed_fields : Setting < Vec < String > > ,
2021-06-01 12:19:55 +02:00
filterable_fields : Setting < HashSet < String > > ,
2021-08-23 11:37:18 +02:00
sortable_fields : Setting < HashSet < String > > ,
2023-01-11 12:14:17 +01:00
criteria : Setting < Vec < Criterion > > ,
2021-04-07 13:33:44 +02:00
stop_words : Setting < BTreeSet < String > > ,
2023-07-24 17:00:18 +02:00
non_separator_tokens : Setting < BTreeSet < String > > ,
separator_tokens : Setting < BTreeSet < String > > ,
dictionary : Setting < BTreeSet < String > > ,
2021-06-01 16:29:14 +02:00
distinct_field : Setting < String > ,
2023-07-26 09:33:42 +02:00
synonyms : Setting < BTreeMap < String , Vec < String > > > ,
2021-06-15 13:45:20 +02:00
primary_key : Setting < String > ,
2022-03-16 10:03:18 +01:00
authorize_typos : Setting < bool > ,
2022-03-31 18:23:12 +02:00
min_word_len_two_typos : Setting < u8 > ,
min_word_len_one_typo : Setting < u8 > ,
2022-03-22 09:55:49 +01:00
exact_words : Setting < BTreeSet < String > > ,
2022-04-04 21:04:49 +02:00
/// Attributes on which typo tolerance is disabled.
2022-03-22 19:07:59 +01:00
exact_attributes : Setting < HashSet < String > > ,
2022-06-08 16:54:16 +02:00
max_values_per_facet : Setting < usize > ,
2024-03-20 10:08:28 +01:00
sort_facet_values_by : Setting < OrderByMap > ,
2022-06-22 12:00:45 +02:00
pagination_max_total_hits : Setting < usize > ,
2023-12-06 15:49:02 +01:00
proximity_precision : Setting < ProximityPrecision > ,
2023-11-15 15:46:37 +01:00
embedder_settings : Setting < BTreeMap < String , Setting < EmbeddingSettings > > > ,
2024-03-11 18:24:21 +01:00
search_cutoff : Setting < u64 > ,
2020-11-02 15:31:20 +01:00
}
2023-11-22 18:21:19 +01:00
impl < ' a , ' t , ' i > Settings < ' a , ' t , ' i > {
2021-12-08 14:12:07 +01:00
pub fn new (
2023-11-22 18:21:19 +01:00
wtxn : & ' t mut heed ::RwTxn < ' i > ,
2021-12-08 14:12:07 +01:00
index : & ' i Index ,
indexer_config : & ' a IndexerConfig ,
2023-11-22 18:21:19 +01:00
) -> Settings < ' a , ' t , ' i > {
2020-11-03 13:20:11 +01:00
Settings {
wtxn ,
index ,
2021-04-07 13:33:44 +02:00
searchable_fields : Setting ::NotSet ,
displayed_fields : Setting ::NotSet ,
2021-06-01 12:19:55 +02:00
filterable_fields : Setting ::NotSet ,
2021-08-23 11:37:18 +02:00
sortable_fields : Setting ::NotSet ,
2021-04-07 13:33:44 +02:00
criteria : Setting ::NotSet ,
stop_words : Setting ::NotSet ,
2023-07-24 17:00:18 +02:00
non_separator_tokens : Setting ::NotSet ,
separator_tokens : Setting ::NotSet ,
dictionary : Setting ::NotSet ,
2021-06-01 16:29:14 +02:00
distinct_field : Setting ::NotSet ,
2021-04-07 10:53:57 +02:00
synonyms : Setting ::NotSet ,
2021-06-15 13:45:20 +02:00
primary_key : Setting ::NotSet ,
2022-03-16 10:03:18 +01:00
authorize_typos : Setting ::NotSet ,
2022-03-21 14:03:31 +01:00
exact_words : Setting ::NotSet ,
2022-04-29 16:13:18 +02:00
min_word_len_two_typos : Setting ::NotSet ,
min_word_len_one_typo : Setting ::NotSet ,
exact_attributes : Setting ::NotSet ,
2022-06-08 16:54:16 +02:00
max_values_per_facet : Setting ::NotSet ,
2023-06-22 17:13:40 +02:00
sort_facet_values_by : Setting ::NotSet ,
2022-06-22 12:00:45 +02:00
pagination_max_total_hits : Setting ::NotSet ,
2023-12-06 15:49:02 +01:00
proximity_precision : Setting ::NotSet ,
2023-11-15 15:46:37 +01:00
embedder_settings : Setting ::NotSet ,
2024-03-11 18:24:21 +01:00
search_cutoff : Setting ::NotSet ,
2022-03-22 09:55:49 +01:00
indexer_config ,
2020-11-03 13:20:11 +01:00
}
2020-11-02 15:31:20 +01:00
}
2020-11-03 13:42:29 +01:00
pub fn reset_searchable_fields ( & mut self ) {
2021-04-07 13:33:44 +02:00
self . searchable_fields = Setting ::Reset ;
2020-11-03 13:42:29 +01:00
}
pub fn set_searchable_fields ( & mut self , names : Vec < String > ) {
2021-04-07 13:33:44 +02:00
self . searchable_fields = Setting ::Set ( names ) ;
2020-11-03 13:42:29 +01:00
}
2020-11-02 15:31:20 +01:00
pub fn reset_displayed_fields ( & mut self ) {
2021-04-07 13:33:44 +02:00
self . displayed_fields = Setting ::Reset ;
2020-11-02 15:31:20 +01:00
}
pub fn set_displayed_fields ( & mut self , names : Vec < String > ) {
2021-04-07 13:33:44 +02:00
self . displayed_fields = Setting ::Set ( names ) ;
2020-11-02 15:31:20 +01:00
}
2021-06-01 12:19:55 +02:00
pub fn reset_filterable_fields ( & mut self ) {
self . filterable_fields = Setting ::Reset ;
2021-01-20 17:27:43 +01:00
}
2021-06-01 12:19:55 +02:00
pub fn set_filterable_fields ( & mut self , names : HashSet < String > ) {
self . filterable_fields = Setting ::Set ( names ) ;
2020-11-11 17:08:18 +01:00
}
2021-08-23 11:37:18 +02:00
pub fn set_sortable_fields ( & mut self , names : HashSet < String > ) {
self . sortable_fields = Setting ::Set ( names ) ;
}
2021-08-25 17:44:16 +02:00
pub fn reset_sortable_fields ( & mut self ) {
self . sortable_fields = Setting ::Reset ;
}
2020-12-04 12:02:22 +01:00
pub fn reset_criteria ( & mut self ) {
2021-04-07 13:33:44 +02:00
self . criteria = Setting ::Reset ;
2020-12-04 12:02:22 +01:00
}
2023-01-11 12:14:17 +01:00
pub fn set_criteria ( & mut self , criteria : Vec < Criterion > ) {
2021-04-07 13:33:44 +02:00
self . criteria = Setting ::Set ( criteria ) ;
2020-12-04 12:02:22 +01:00
}
2021-03-29 19:15:47 +02:00
pub fn reset_stop_words ( & mut self ) {
2021-04-07 13:33:44 +02:00
self . stop_words = Setting ::Reset ;
2021-03-29 19:15:47 +02:00
}
pub fn set_stop_words ( & mut self , stop_words : BTreeSet < String > ) {
2021-06-16 18:33:33 +02:00
self . stop_words =
if stop_words . is_empty ( ) { Setting ::Reset } else { Setting ::Set ( stop_words ) }
2021-03-29 19:15:47 +02:00
}
2023-07-24 17:00:18 +02:00
pub fn reset_non_separator_tokens ( & mut self ) {
self . non_separator_tokens = Setting ::Reset ;
}
pub fn set_non_separator_tokens ( & mut self , non_separator_tokens : BTreeSet < String > ) {
self . non_separator_tokens = if non_separator_tokens . is_empty ( ) {
Setting ::Reset
} else {
Setting ::Set ( non_separator_tokens )
}
}
pub fn reset_separator_tokens ( & mut self ) {
self . separator_tokens = Setting ::Reset ;
}
pub fn set_separator_tokens ( & mut self , separator_tokens : BTreeSet < String > ) {
self . separator_tokens = if separator_tokens . is_empty ( ) {
Setting ::Reset
} else {
Setting ::Set ( separator_tokens )
}
}
pub fn reset_dictionary ( & mut self ) {
self . dictionary = Setting ::Reset ;
}
pub fn set_dictionary ( & mut self , dictionary : BTreeSet < String > ) {
self . dictionary =
if dictionary . is_empty ( ) { Setting ::Reset } else { Setting ::Set ( dictionary ) }
}
2021-06-01 16:29:14 +02:00
pub fn reset_distinct_field ( & mut self ) {
self . distinct_field = Setting ::Reset ;
2021-04-07 10:53:57 +02:00
}
2021-06-01 16:29:14 +02:00
pub fn set_distinct_field ( & mut self , distinct_field : String ) {
self . distinct_field = Setting ::Set ( distinct_field ) ;
2021-04-07 12:38:48 +02:00
}
2021-04-07 10:53:57 +02:00
pub fn reset_synonyms ( & mut self ) {
self . synonyms = Setting ::Reset ;
}
2023-07-26 09:33:42 +02:00
pub fn set_synonyms ( & mut self , synonyms : BTreeMap < String , Vec < String > > ) {
2021-06-16 18:33:33 +02:00
self . synonyms = if synonyms . is_empty ( ) { Setting ::Reset } else { Setting ::Set ( synonyms ) }
2021-04-07 12:38:48 +02:00
}
2021-06-15 13:45:20 +02:00
pub fn reset_primary_key ( & mut self ) {
self . primary_key = Setting ::Reset ;
}
pub fn set_primary_key ( & mut self , primary_key : String ) {
self . primary_key = Setting ::Set ( primary_key ) ;
}
2022-03-16 10:03:18 +01:00
pub fn set_autorize_typos ( & mut self , val : bool ) {
self . authorize_typos = Setting ::Set ( val ) ;
}
pub fn reset_authorize_typos ( & mut self ) {
self . authorize_typos = Setting ::Reset ;
}
2022-03-31 18:23:12 +02:00
pub fn set_min_word_len_two_typos ( & mut self , val : u8 ) {
self . min_word_len_two_typos = Setting ::Set ( val ) ;
2022-03-21 13:03:06 +01:00
}
2022-03-31 18:23:12 +02:00
pub fn reset_min_word_len_two_typos ( & mut self ) {
self . min_word_len_two_typos = Setting ::Reset ;
2022-03-21 13:03:06 +01:00
}
2022-03-31 18:23:12 +02:00
pub fn set_min_word_len_one_typo ( & mut self , val : u8 ) {
self . min_word_len_one_typo = Setting ::Set ( val ) ;
2022-03-21 13:03:06 +01:00
}
2022-03-31 18:23:12 +02:00
pub fn reset_min_word_len_one_typo ( & mut self ) {
self . min_word_len_one_typo = Setting ::Reset ;
2022-03-21 13:03:06 +01:00
}
2022-03-22 09:55:49 +01:00
pub fn set_exact_words ( & mut self , words : BTreeSet < String > ) {
2022-03-21 14:03:31 +01:00
self . exact_words = Setting ::Set ( words ) ;
}
pub fn reset_exact_words ( & mut self ) {
self . exact_words = Setting ::Reset ;
}
2022-03-22 19:07:59 +01:00
pub fn set_exact_attributes ( & mut self , attrs : HashSet < String > ) {
self . exact_attributes = Setting ::Set ( attrs ) ;
}
pub fn reset_exact_attributes ( & mut self ) {
self . exact_attributes = Setting ::Reset ;
}
2022-06-08 16:54:16 +02:00
pub fn set_max_values_per_facet ( & mut self , value : usize ) {
self . max_values_per_facet = Setting ::Set ( value ) ;
}
pub fn reset_max_values_per_facet ( & mut self ) {
self . max_values_per_facet = Setting ::Reset ;
}
2024-03-20 10:08:28 +01:00
pub fn set_sort_facet_values_by ( & mut self , value : OrderByMap ) {
2023-06-22 17:13:40 +02:00
self . sort_facet_values_by = Setting ::Set ( value ) ;
}
pub fn reset_sort_facet_values_by ( & mut self ) {
self . sort_facet_values_by = Setting ::Reset ;
}
2022-06-22 12:00:45 +02:00
pub fn set_pagination_max_total_hits ( & mut self , value : usize ) {
self . pagination_max_total_hits = Setting ::Set ( value ) ;
2022-06-08 16:54:16 +02:00
}
2022-06-22 12:00:45 +02:00
pub fn reset_pagination_max_total_hits ( & mut self ) {
self . pagination_max_total_hits = Setting ::Reset ;
2022-06-08 16:54:16 +02:00
}
2023-12-06 15:49:02 +01:00
pub fn set_proximity_precision ( & mut self , value : ProximityPrecision ) {
self . proximity_precision = Setting ::Set ( value ) ;
}
pub fn reset_proximity_precision ( & mut self ) {
self . proximity_precision = Setting ::Reset ;
}
2023-11-15 15:46:37 +01:00
pub fn set_embedder_settings ( & mut self , value : BTreeMap < String , Setting < EmbeddingSettings > > ) {
self . embedder_settings = Setting ::Set ( value ) ;
}
pub fn reset_embedder_settings ( & mut self ) {
self . embedder_settings = Setting ::Reset ;
}
2024-03-11 18:24:21 +01:00
pub fn set_search_cutoff ( & mut self , value : u64 ) {
self . search_cutoff = Setting ::Set ( value ) ;
}
pub fn reset_search_cutoff ( & mut self ) {
self . search_cutoff = Setting ::Reset ;
}
2024-01-23 09:42:48 +01:00
#[ tracing::instrument(
level = " trace "
2024-03-26 13:27:43 +01:00
skip ( self , progress_callback , should_abort , settings_diff ) ,
2024-01-23 09:42:48 +01:00
target = " indexing::documents "
) ]
2022-10-05 17:41:07 +02:00
fn reindex < FP , FA > (
& mut self ,
progress_callback : & FP ,
should_abort : & FA ,
2024-03-26 13:27:43 +01:00
settings_diff : InnerIndexSettingsDiff ,
2022-10-05 17:41:07 +02:00
) -> Result < ( ) >
2021-06-01 16:29:14 +02:00
where
2022-10-05 17:41:07 +02:00
FP : Fn ( UpdateIndexingStep ) + Sync ,
FA : Fn ( ) -> bool + Sync ,
2020-11-03 13:20:11 +01:00
{
2021-01-20 17:27:43 +01:00
// if the settings are set before any document update, we don't need to do anything, and
// will set the primary key during the first document addition.
2022-10-13 22:02:54 +02:00
if self . index . number_of_documents ( self . wtxn ) ? = = 0 {
2021-04-07 13:33:44 +02:00
return Ok ( ( ) ) ;
2021-01-20 17:27:43 +01:00
}
2020-11-13 22:35:02 +01:00
2021-12-08 14:12:07 +01:00
let transform = Transform ::new (
2022-03-23 17:28:41 +01:00
self . wtxn ,
2022-10-13 22:02:54 +02:00
self . index ,
self . indexer_config ,
2021-12-08 14:12:07 +01:00
IndexDocumentsMethod ::ReplaceDocuments ,
false ,
2022-03-23 17:28:41 +01:00
) ? ;
2021-01-20 17:27:43 +01:00
2022-12-06 11:38:15 +01:00
// We clear the databases and remap the documents fields based on the new `FieldsIdsMap`.
2024-03-26 13:27:43 +01:00
let output = transform . prepare_for_documents_reindexing ( self . wtxn , settings_diff ) ? ;
2023-12-07 23:05:26 +01:00
2021-01-20 17:27:43 +01:00
// We index the generated `TransformOutput` which must contain
// all the documents with fields in the newly defined searchable order.
2021-12-08 14:12:07 +01:00
let indexing_builder = IndexDocuments ::new (
self . wtxn ,
self . index ,
2022-10-13 22:02:54 +02:00
self . indexer_config ,
2021-12-08 14:12:07 +01:00
IndexDocumentsConfig ::default ( ) ,
2022-10-05 17:41:07 +02:00
& progress_callback ,
& should_abort ,
2022-03-23 17:28:41 +01:00
) ? ;
2023-12-07 23:05:26 +01:00
2021-12-08 14:12:07 +01:00
indexing_builder . execute_raw ( output ) ? ;
2021-06-01 16:29:14 +02:00
2021-01-20 17:27:43 +01:00
Ok ( ( ) )
}
2020-11-11 17:08:18 +01:00
2021-06-14 16:46:19 +02:00
fn update_displayed ( & mut self ) -> Result < bool > {
2021-01-20 17:27:43 +01:00
match self . displayed_fields {
2021-04-07 13:33:44 +02:00
Setting ::Set ( ref fields ) = > {
2021-01-20 17:27:43 +01:00
// fields are deduplicated, only the first occurrence is taken into account
2021-06-16 18:33:33 +02:00
let names : Vec < _ > = fields . iter ( ) . unique ( ) . map ( String ::as_str ) . collect ( ) ;
2021-01-20 17:27:43 +01:00
self . index . put_displayed_fields ( self . wtxn , & names ) ? ;
2020-11-13 22:35:02 +01:00
}
2021-06-16 18:33:33 +02:00
Setting ::Reset = > {
self . index . delete_displayed_fields ( self . wtxn ) ? ;
}
2021-04-07 13:33:44 +02:00
Setting ::NotSet = > return Ok ( false ) ,
2020-11-13 22:35:02 +01:00
}
2021-01-20 17:27:43 +01:00
Ok ( true )
}
2020-11-03 13:20:11 +01:00
2021-06-14 16:46:19 +02:00
fn update_distinct_field ( & mut self ) -> Result < bool > {
2021-06-01 16:29:14 +02:00
match self . distinct_field {
2021-04-07 12:38:48 +02:00
Setting ::Set ( ref attr ) = > {
2022-10-13 22:02:54 +02:00
self . index . put_distinct_field ( self . wtxn , attr ) ? ;
2021-04-07 12:38:48 +02:00
}
2021-06-16 18:33:33 +02:00
Setting ::Reset = > {
self . index . delete_distinct_field ( self . wtxn ) ? ;
}
2021-04-07 12:38:48 +02:00
Setting ::NotSet = > return Ok ( false ) ,
}
Ok ( true )
}
2024-05-13 16:49:08 +02:00
/// Updates the index's searchable attributes.
2021-06-14 16:46:19 +02:00
fn update_searchable ( & mut self ) -> Result < bool > {
2021-01-20 17:27:43 +01:00
match self . searchable_fields {
2021-04-07 13:33:44 +02:00
Setting ::Set ( ref fields ) = > {
2022-11-26 19:26:39 +01:00
// Check to see if the searchable fields changed before doing anything else
let old_fields = self . index . searchable_fields ( self . wtxn ) ? ;
2024-05-06 14:49:45 +02:00
let did_change = {
let new_fields = fields . iter ( ) . map ( String ::as_str ) . collect ::< Vec < _ > > ( ) ;
new_fields ! = old_fields
2022-11-26 19:26:39 +01:00
} ;
2022-11-24 04:33:06 +01:00
if ! did_change {
return Ok ( false ) ;
}
2024-05-13 16:49:08 +02:00
// Since we're updating the settings we can only add new fields at the end of the field id map
2024-05-15 15:02:26 +02:00
let mut fields_ids_map = self . index . fields_ids_map ( self . wtxn ) ? ;
2021-01-20 17:27:43 +01:00
// fields are deduplicated, only the first occurrence is taken into account
2021-06-16 18:33:33 +02:00
let names = fields . iter ( ) . unique ( ) . map ( String ::as_str ) . collect ::< Vec < _ > > ( ) ;
2021-01-20 17:27:43 +01:00
// Add all the searchable attributes to the field map, and then add the
// remaining fields from the old field map to the new one
for name in names . iter ( ) {
2024-05-13 16:49:08 +02:00
// The fields ids map won't change the field id of already present elements thus only the
// new fields will be inserted.
2024-05-15 15:02:26 +02:00
fields_ids_map . insert ( name ) . ok_or ( UserError ::AttributeLimitReached ) ? ;
2020-11-13 22:35:02 +01:00
}
2020-11-03 13:20:11 +01:00
2022-05-16 15:22:52 +02:00
self . index . put_all_searchable_fields_from_fields_ids_map (
self . wtxn ,
& names ,
2024-05-21 17:08:45 +02:00
& fields_ids_map . nested_ids ( RESERVED_VECTORS_FIELD_NAME ) ,
2024-05-15 15:02:26 +02:00
& fields_ids_map ,
2022-05-16 15:22:52 +02:00
) ? ;
2024-05-15 15:02:26 +02:00
self . index . put_fields_ids_map ( self . wtxn , & fields_ids_map ) ? ;
2022-11-24 04:01:46 +01:00
Ok ( true )
2021-01-20 17:27:43 +01:00
}
2022-11-24 04:01:46 +01:00
Setting ::Reset = > Ok ( self . index . delete_all_searchable_fields ( self . wtxn ) ? ) ,
2022-11-24 04:33:06 +01:00
Setting ::NotSet = > Ok ( false ) ,
2020-11-03 13:20:11 +01:00
}
2021-01-20 17:27:43 +01:00
}
2020-11-03 13:20:11 +01:00
2021-06-14 16:46:19 +02:00
fn update_stop_words ( & mut self ) -> Result < bool > {
2021-03-29 19:15:47 +02:00
match self . stop_words {
2021-04-07 13:33:44 +02:00
Setting ::Set ( ref stop_words ) = > {
2021-03-29 19:15:47 +02:00
let current = self . index . stop_words ( self . wtxn ) ? ;
2023-06-28 18:52:32 +02:00
// Apply an unlossy normalization on stop_words
2023-07-10 09:26:59 +02:00
let stop_words : BTreeSet < String > = stop_words
2023-06-28 18:52:32 +02:00
. iter ( )
2023-07-10 09:26:59 +02:00
. map ( | w | w . as_str ( ) . normalize ( & Default ::default ( ) ) . into_owned ( ) )
. collect ( ) ;
2023-06-28 18:52:32 +02:00
2021-03-29 19:15:47 +02:00
// since we can't compare a BTreeSet with an FST we are going to convert the
// BTreeSet to an FST and then compare bytes per bytes the two FSTs.
2023-07-10 09:26:59 +02:00
let fst = fst ::Set ::from_iter ( stop_words . into_iter ( ) ) ? ;
2021-03-29 19:15:47 +02:00
// Does the new FST differ from the previous one?
2021-06-16 18:33:33 +02:00
if current
. map_or ( true , | current | current . as_fst ( ) . as_bytes ( ) ! = fst . as_fst ( ) . as_bytes ( ) )
{
2021-03-29 19:15:47 +02:00
// we want to re-create our FST.
self . index . put_stop_words ( self . wtxn , & fst ) ? ;
Ok ( true )
} else {
Ok ( false )
}
}
2021-04-07 13:33:44 +02:00
Setting ::Reset = > Ok ( self . index . delete_stop_words ( self . wtxn ) ? ) ,
Setting ::NotSet = > Ok ( false ) ,
2021-03-29 19:15:47 +02:00
}
}
2023-07-24 17:00:18 +02:00
fn update_non_separator_tokens ( & mut self ) -> Result < bool > {
2023-07-25 15:01:42 +02:00
let changes = match self . non_separator_tokens {
2023-07-24 17:00:18 +02:00
Setting ::Set ( ref non_separator_tokens ) = > {
let current = self . index . non_separator_tokens ( self . wtxn ) ? ;
// Does the new list differ from the previous one?
if current . map_or ( true , | current | & current ! = non_separator_tokens ) {
2023-07-24 18:42:26 +02:00
self . index . put_non_separator_tokens ( self . wtxn , non_separator_tokens ) ? ;
2023-07-25 15:01:42 +02:00
true
2023-07-24 17:00:18 +02:00
} else {
2023-07-25 15:01:42 +02:00
false
2023-07-24 17:00:18 +02:00
}
}
2023-07-25 15:01:42 +02:00
Setting ::Reset = > self . index . delete_non_separator_tokens ( self . wtxn ) ? ,
Setting ::NotSet = > false ,
} ;
// the synonyms must be updated if non separator tokens have been updated.
2023-07-26 09:33:42 +02:00
if changes & & self . synonyms = = Setting ::NotSet {
self . synonyms = Setting ::Set ( self . index . user_defined_synonyms ( self . wtxn ) ? ) ;
2023-07-24 17:00:18 +02:00
}
2023-07-25 15:01:42 +02:00
Ok ( changes )
2023-07-24 17:00:18 +02:00
}
fn update_separator_tokens ( & mut self ) -> Result < bool > {
2023-07-25 15:01:42 +02:00
let changes = match self . separator_tokens {
2023-07-24 17:00:18 +02:00
Setting ::Set ( ref separator_tokens ) = > {
let current = self . index . separator_tokens ( self . wtxn ) ? ;
// Does the new list differ from the previous one?
if current . map_or ( true , | current | & current ! = separator_tokens ) {
2023-07-24 18:42:26 +02:00
self . index . put_separator_tokens ( self . wtxn , separator_tokens ) ? ;
2023-07-25 15:01:42 +02:00
true
2023-07-24 17:00:18 +02:00
} else {
2023-07-25 15:01:42 +02:00
false
2023-07-24 17:00:18 +02:00
}
}
2023-07-25 15:01:42 +02:00
Setting ::Reset = > self . index . delete_separator_tokens ( self . wtxn ) ? ,
Setting ::NotSet = > false ,
} ;
// the synonyms must be updated if separator tokens have been updated.
2023-07-26 09:33:42 +02:00
if changes & & self . synonyms = = Setting ::NotSet {
self . synonyms = Setting ::Set ( self . index . user_defined_synonyms ( self . wtxn ) ? ) ;
2023-07-24 17:00:18 +02:00
}
2023-07-25 15:01:42 +02:00
Ok ( changes )
2023-07-24 17:00:18 +02:00
}
fn update_dictionary ( & mut self ) -> Result < bool > {
2023-07-25 15:01:42 +02:00
let changes = match self . dictionary {
2023-07-24 17:00:18 +02:00
Setting ::Set ( ref dictionary ) = > {
let current = self . index . dictionary ( self . wtxn ) ? ;
// Does the new list differ from the previous one?
if current . map_or ( true , | current | & current ! = dictionary ) {
2023-07-24 18:42:26 +02:00
self . index . put_dictionary ( self . wtxn , dictionary ) ? ;
2023-07-25 15:01:42 +02:00
true
2023-07-24 17:00:18 +02:00
} else {
2023-07-25 15:01:42 +02:00
false
2023-07-24 17:00:18 +02:00
}
}
2023-07-25 15:01:42 +02:00
Setting ::Reset = > self . index . delete_dictionary ( self . wtxn ) ? ,
Setting ::NotSet = > false ,
} ;
// the synonyms must be updated if dictionary has been updated.
2023-07-26 09:33:42 +02:00
if changes & & self . synonyms = = Setting ::NotSet {
self . synonyms = Setting ::Set ( self . index . user_defined_synonyms ( self . wtxn ) ? ) ;
2023-07-24 17:00:18 +02:00
}
2023-07-25 15:01:42 +02:00
Ok ( changes )
2023-07-24 17:00:18 +02:00
}
2021-06-14 16:46:19 +02:00
fn update_synonyms ( & mut self ) -> Result < bool > {
2021-04-07 10:53:57 +02:00
match self . synonyms {
2023-07-26 09:33:42 +02:00
Setting ::Set ( ref user_synonyms ) = > {
2023-06-28 18:52:32 +02:00
fn normalize ( tokenizer : & Tokenizer , text : & str ) -> Vec < String > {
2022-06-02 15:47:28 +02:00
tokenizer
. tokenize ( text )
2021-06-16 18:33:33 +02:00
. filter_map ( | token | {
2023-09-04 14:39:52 +02:00
if token . is_word ( ) & & ! token . lemma ( ) . is_empty ( ) {
2022-06-02 15:47:28 +02:00
Some ( token . lemma ( ) . to_string ( ) )
2021-06-16 18:33:33 +02:00
} else {
None
}
} )
2021-04-09 21:56:20 +02:00
. collect ::< Vec < _ > > ( )
}
2021-04-07 10:53:57 +02:00
2022-06-02 15:47:28 +02:00
let mut builder = TokenizerBuilder ::new ( ) ;
2021-04-07 10:53:57 +02:00
let stop_words = self . index . stop_words ( self . wtxn ) ? ;
2022-06-02 15:47:28 +02:00
if let Some ( ref stop_words ) = stop_words {
builder . stop_words ( stop_words ) ;
2021-04-07 10:53:57 +02:00
}
2023-07-25 15:01:42 +02:00
let separators = self . index . allowed_separators ( self . wtxn ) ? ;
let separators : Option < Vec < _ > > =
separators . as_ref ( ) . map ( | x | x . iter ( ) . map ( String ::as_str ) . collect ( ) ) ;
if let Some ( ref separators ) = separators {
builder . separators ( separators ) ;
}
let dictionary = self . index . dictionary ( self . wtxn ) ? ;
let dictionary : Option < Vec < _ > > =
dictionary . as_ref ( ) . map ( | x | x . iter ( ) . map ( String ::as_str ) . collect ( ) ) ;
if let Some ( ref dictionary ) = dictionary {
builder . words_dict ( dictionary ) ;
}
2022-06-02 15:47:28 +02:00
let tokenizer = builder . build ( ) ;
2021-04-07 10:53:57 +02:00
2021-04-09 21:56:20 +02:00
let mut new_synonyms = HashMap ::new ( ) ;
2023-07-26 09:33:42 +02:00
for ( word , synonyms ) in user_synonyms {
2021-04-09 21:56:20 +02:00
// Normalize both the word and associated synonyms.
2022-06-02 15:47:28 +02:00
let normalized_word = normalize ( & tokenizer , word ) ;
2023-08-08 16:52:36 +02:00
let normalized_synonyms : Vec < _ > = synonyms
. iter ( )
. map ( | synonym | normalize ( & tokenizer , synonym ) )
. filter ( | synonym | ! synonym . is_empty ( ) )
. collect ( ) ;
2021-04-09 21:56:20 +02:00
// Store the normalized synonyms under the normalized word,
// merging the possible duplicate words.
2023-08-08 16:52:36 +02:00
if ! normalized_word . is_empty ( ) & & ! normalized_synonyms . is_empty ( ) {
let entry = new_synonyms . entry ( normalized_word ) . or_insert_with ( Vec ::new ) ;
entry . extend ( normalized_synonyms . into_iter ( ) ) ;
}
2021-04-09 21:56:20 +02:00
}
2021-04-07 10:53:57 +02:00
2021-04-09 21:56:20 +02:00
// Make sure that we don't have duplicate synonyms.
2021-06-16 18:33:33 +02:00
new_synonyms . iter_mut ( ) . for_each ( | ( _ , synonyms ) | {
synonyms . sort_unstable ( ) ;
synonyms . dedup ( ) ;
} ) ;
2021-04-09 21:56:20 +02:00
let old_synonyms = self . index . synonyms ( self . wtxn ) ? ;
2021-04-07 10:53:57 +02:00
if new_synonyms ! = old_synonyms {
2023-07-27 14:21:19 +02:00
self . index . put_synonyms ( self . wtxn , & new_synonyms , user_synonyms ) ? ;
2021-04-07 10:53:57 +02:00
Ok ( true )
} else {
Ok ( false )
}
}
Setting ::Reset = > Ok ( self . index . delete_synonyms ( self . wtxn ) ? ) ,
Setting ::NotSet = > Ok ( false ) ,
}
}
2022-03-22 19:07:59 +01:00
fn update_exact_attributes ( & mut self ) -> Result < bool > {
match self . exact_attributes {
Setting ::Set ( ref attrs ) = > {
2022-11-26 19:33:19 +01:00
let old_attrs = self . index . exact_attributes ( self . wtxn ) ? ;
let old_attrs = old_attrs . into_iter ( ) . map ( String ::from ) . collect ::< HashSet < _ > > ( ) ;
2022-11-23 21:50:53 +01:00
if attrs ! = & old_attrs {
let attrs = attrs . iter ( ) . map ( String ::as_str ) . collect ::< Vec < _ > > ( ) ;
self . index . put_exact_attributes ( self . wtxn , & attrs ) ? ;
Ok ( true )
} else {
Ok ( false )
}
2022-03-22 19:07:59 +01:00
}
2022-11-23 21:50:53 +01:00
Setting ::Reset = > Ok ( self . index . delete_exact_attributes ( self . wtxn ) ? ) ,
2022-03-22 19:07:59 +01:00
Setting ::NotSet = > Ok ( false ) ,
}
}
2021-06-14 16:46:19 +02:00
fn update_filterable ( & mut self ) -> Result < ( ) > {
2021-06-01 12:19:55 +02:00
match self . filterable_fields {
2021-04-07 13:33:44 +02:00
Setting ::Set ( ref fields ) = > {
2021-04-28 17:58:16 +02:00
let mut new_facets = HashSet ::new ( ) ;
for name in fields {
new_facets . insert ( name . clone ( ) ) ;
2021-01-20 17:27:43 +01:00
}
2021-06-01 12:19:55 +02:00
self . index . put_filterable_fields ( self . wtxn , & new_facets ) ? ;
2020-11-02 15:31:20 +01:00
}
2021-06-16 18:33:33 +02:00
Setting ::Reset = > {
self . index . delete_filterable_fields ( self . wtxn ) ? ;
}
2021-06-01 16:29:14 +02:00
Setting ::NotSet = > ( ) ,
2020-11-02 15:31:20 +01:00
}
2021-06-01 16:29:14 +02:00
Ok ( ( ) )
2021-01-20 17:27:43 +01:00
}
2020-11-02 15:31:20 +01:00
2021-08-23 11:37:18 +02:00
fn update_sortable ( & mut self ) -> Result < ( ) > {
match self . sortable_fields {
Setting ::Set ( ref fields ) = > {
let mut new_fields = HashSet ::new ( ) ;
for name in fields {
new_fields . insert ( name . clone ( ) ) ;
}
self . index . put_sortable_fields ( self . wtxn , & new_fields ) ? ;
}
Setting ::Reset = > {
self . index . delete_sortable_fields ( self . wtxn ) ? ;
}
Setting ::NotSet = > ( ) ,
}
Ok ( ( ) )
}
2021-06-14 16:46:19 +02:00
fn update_criteria ( & mut self ) -> Result < ( ) > {
2023-01-11 12:14:17 +01:00
match & self . criteria {
Setting ::Set ( criteria ) = > {
self . index . put_criteria ( self . wtxn , criteria ) ? ;
2020-12-04 12:02:22 +01:00
}
2021-06-16 18:33:33 +02:00
Setting ::Reset = > {
self . index . delete_criteria ( self . wtxn ) ? ;
}
2021-04-07 13:33:44 +02:00
Setting ::NotSet = > ( ) ,
2020-12-04 12:02:22 +01:00
}
2020-11-02 15:31:20 +01:00
Ok ( ( ) )
}
2021-01-20 17:27:43 +01:00
2021-06-15 13:45:20 +02:00
fn update_primary_key ( & mut self ) -> Result < ( ) > {
match self . primary_key {
Setting ::Set ( ref primary_key ) = > {
2022-10-13 22:02:54 +02:00
if self . index . number_of_documents ( self . wtxn ) ? = = 0 {
2021-06-15 13:45:20 +02:00
let mut fields_ids_map = self . index . fields_ids_map ( self . wtxn ) ? ;
fields_ids_map . insert ( primary_key ) . ok_or ( UserError ::AttributeLimitReached ) ? ;
self . index . put_fields_ids_map ( self . wtxn , & fields_ids_map ) ? ;
self . index . put_primary_key ( self . wtxn , primary_key ) ? ;
Ok ( ( ) )
} else {
2023-03-26 18:18:39 +02:00
let curr_primary_key = self . index . primary_key ( self . wtxn ) ? . unwrap ( ) . to_string ( ) ;
if primary_key = = & curr_primary_key {
Ok ( ( ) )
} else {
Err ( UserError ::PrimaryKeyCannotBeChanged ( curr_primary_key ) . into ( ) )
}
2021-06-15 13:45:20 +02:00
}
2021-06-16 18:33:33 +02:00
}
2021-06-15 13:45:20 +02:00
Setting ::Reset = > {
2022-10-13 22:02:54 +02:00
if self . index . number_of_documents ( self . wtxn ) ? = = 0 {
2021-06-15 13:45:20 +02:00
self . index . delete_primary_key ( self . wtxn ) ? ;
Ok ( ( ) )
} else {
2021-10-26 17:49:35 +02:00
let primary_key = self . index . primary_key ( self . wtxn ) ? . unwrap ( ) ;
Err ( UserError ::PrimaryKeyCannotBeChanged ( primary_key . to_string ( ) ) . into ( ) )
2021-06-15 13:45:20 +02:00
}
2021-06-16 18:33:33 +02:00
}
2021-06-15 13:45:20 +02:00
Setting ::NotSet = > Ok ( ( ) ) ,
}
}
2022-03-16 10:03:18 +01:00
fn update_authorize_typos ( & mut self ) -> Result < ( ) > {
match self . authorize_typos {
Setting ::Set ( flag ) = > {
self . index . put_authorize_typos ( self . wtxn , flag ) ? ;
Ok ( ( ) )
}
Setting ::Reset = > {
self . index . put_authorize_typos ( self . wtxn , true ) ? ;
Ok ( ( ) )
}
Setting ::NotSet = > Ok ( ( ) ) ,
}
}
2022-03-21 13:03:06 +01:00
fn update_min_typo_word_len ( & mut self ) -> Result < ( ) > {
2022-04-18 22:48:22 +02:00
let one = self . min_word_len_one_typo . or_reset ( DEFAULT_MIN_WORD_LEN_ONE_TYPO ) ;
let two = self . min_word_len_two_typos . or_reset ( DEFAULT_MIN_WORD_LEN_TWO_TYPOS ) ;
match ( one , two ) {
2022-03-21 13:03:06 +01:00
( Setting ::Set ( one ) , Setting ::Set ( two ) ) = > {
2022-03-31 13:50:18 +02:00
if one > two {
2022-03-31 18:44:51 +02:00
return Err ( UserError ::InvalidMinTypoWordLenSetting ( one , two ) . into ( ) ) ;
2022-03-31 13:50:18 +02:00
} else {
2022-10-13 22:02:54 +02:00
self . index . put_min_word_len_one_typo ( self . wtxn , one ) ? ;
self . index . put_min_word_len_two_typos ( self . wtxn , two ) ? ;
2022-03-21 13:03:06 +01:00
}
}
( Setting ::Set ( one ) , _ ) = > {
2022-10-13 22:02:54 +02:00
let two = self . index . min_word_len_two_typos ( self . wtxn ) ? ;
2022-03-31 18:44:51 +02:00
if one > two {
return Err ( UserError ::InvalidMinTypoWordLenSetting ( one , two ) . into ( ) ) ;
2022-03-31 13:50:18 +02:00
} else {
2022-10-13 22:02:54 +02:00
self . index . put_min_word_len_one_typo ( self . wtxn , one ) ? ;
2022-03-21 13:03:06 +01:00
}
}
( _ , Setting ::Set ( two ) ) = > {
2022-10-13 22:02:54 +02:00
let one = self . index . min_word_len_one_typo ( self . wtxn ) ? ;
2022-03-31 18:44:51 +02:00
if one > two {
return Err ( UserError ::InvalidMinTypoWordLenSetting ( one , two ) . into ( ) ) ;
2022-03-31 13:50:18 +02:00
} else {
2022-10-13 22:02:54 +02:00
self . index . put_min_word_len_two_typos ( self . wtxn , two ) ? ;
2022-03-21 13:03:06 +01:00
}
}
_ = > ( ) ,
}
Ok ( ( ) )
}
2022-03-21 14:03:31 +01:00
fn update_exact_words ( & mut self ) -> Result < ( ) > {
match self . exact_words {
Setting ::Set ( ref mut words ) = > {
2023-06-28 18:52:32 +02:00
fn normalize ( tokenizer : & Tokenizer , text : & str ) -> String {
2022-06-02 15:47:28 +02:00
tokenizer . tokenize ( text ) . map ( | token | token . lemma ( ) . to_string ( ) ) . collect ( )
2022-04-21 14:09:33 +02:00
}
2022-06-02 15:47:28 +02:00
let mut builder = TokenizerBuilder ::new ( ) ;
2022-04-21 14:09:33 +02:00
let stop_words = self . index . stop_words ( self . wtxn ) ? ;
2022-06-02 15:47:28 +02:00
if let Some ( ref stop_words ) = stop_words {
builder . stop_words ( stop_words ) ;
2022-04-21 14:09:33 +02:00
}
2022-06-02 15:47:28 +02:00
let tokenizer = builder . build ( ) ;
2022-04-21 14:09:33 +02:00
let mut words : Vec < _ > =
2022-06-02 15:47:28 +02:00
words . iter ( ) . map ( | word | normalize ( & tokenizer , word ) ) . collect ( ) ;
2022-04-21 14:09:33 +02:00
// normalization could reorder words
words . sort_unstable ( ) ;
2022-03-22 09:55:49 +01:00
let words = fst ::Set ::from_iter ( words . iter ( ) ) ? ;
2022-10-13 22:02:54 +02:00
self . index . put_exact_words ( self . wtxn , & words ) ? ;
2022-03-21 14:03:31 +01:00
}
Setting ::Reset = > {
2022-10-13 22:02:54 +02:00
self . index . put_exact_words ( self . wtxn , & fst ::Set ::default ( ) ) ? ;
2022-03-21 14:03:31 +01:00
}
Setting ::NotSet = > ( ) ,
}
Ok ( ( ) )
}
2022-06-08 17:28:23 +02:00
fn update_max_values_per_facet ( & mut self ) -> Result < ( ) > {
match self . max_values_per_facet {
Setting ::Set ( max ) = > {
2023-11-27 11:52:22 +01:00
self . index . put_max_values_per_facet ( self . wtxn , max as u64 ) ? ;
2022-06-08 17:28:23 +02:00
}
Setting ::Reset = > {
2022-10-13 22:02:54 +02:00
self . index . delete_max_values_per_facet ( self . wtxn ) ? ;
2022-06-08 17:28:23 +02:00
}
Setting ::NotSet = > ( ) ,
}
Ok ( ( ) )
}
2023-06-22 17:13:40 +02:00
fn update_sort_facet_values_by ( & mut self ) -> Result < ( ) > {
match self . sort_facet_values_by . as_ref ( ) {
Setting ::Set ( value ) = > {
self . index . put_sort_facet_values_by ( self . wtxn , value ) ? ;
}
Setting ::Reset = > {
self . index . delete_sort_facet_values_by ( self . wtxn ) ? ;
}
Setting ::NotSet = > ( ) ,
}
Ok ( ( ) )
}
2022-06-22 12:00:45 +02:00
fn update_pagination_max_total_hits ( & mut self ) -> Result < ( ) > {
match self . pagination_max_total_hits {
2022-06-08 17:31:21 +02:00
Setting ::Set ( max ) = > {
2023-11-27 11:52:22 +01:00
self . index . put_pagination_max_total_hits ( self . wtxn , max as u64 ) ? ;
2022-06-08 17:31:21 +02:00
}
Setting ::Reset = > {
2022-10-13 22:02:54 +02:00
self . index . delete_pagination_max_total_hits ( self . wtxn ) ? ;
2022-06-08 17:31:21 +02:00
}
Setting ::NotSet = > ( ) ,
}
Ok ( ( ) )
}
2023-12-06 15:49:02 +01:00
fn update_proximity_precision ( & mut self ) -> Result < bool > {
let changed = match self . proximity_precision {
Setting ::Set ( new ) = > {
let old = self . index . proximity_precision ( self . wtxn ) ? ;
if old = = Some ( new ) {
false
} else {
self . index . put_proximity_precision ( self . wtxn , new ) ? ;
true
}
}
Setting ::Reset = > self . index . delete_proximity_precision ( self . wtxn ) ? ,
Setting ::NotSet = > false ,
} ;
Ok ( changed )
}
2024-06-12 14:04:54 +02:00
fn update_embedding_configs ( & mut self ) -> Result < BTreeMap < String , EmbedderAction > > {
match std ::mem ::take ( & mut self . embedder_settings ) {
Setting ::Set ( configs ) = > self . update_embedding_configs_set ( configs ) ,
Setting ::Reset = > {
// all vectors should be written back to documents
2023-11-15 15:46:37 +01:00
let old_configs = self . index . embedding_configs ( self . wtxn ) ? ;
2024-06-12 14:04:54 +02:00
let remove_all : Result < BTreeMap < String , EmbedderAction > > = old_configs
2023-11-15 15:46:37 +01:00
. into_iter ( )
2024-06-12 14:04:54 +02:00
. map ( | IndexEmbeddingConfig { name , config : _ , user_provided } | -> Result < _ > {
let embedder_id =
self . index . embedder_category_id . get ( self . wtxn , & name ) ? . ok_or (
crate ::InternalError ::DatabaseMissingEntry {
db_name : crate ::index ::db_name ::VECTOR_EMBEDDER_CATEGORY_ID ,
key : None ,
} ,
) ? ;
Ok ( (
name ,
EmbedderAction ::WriteBackToDocuments ( WriteBackToDocuments {
embedder_id ,
user_provided ,
} ) ,
) )
} )
. collect ( ) ;
let remove_all = remove_all ? ;
self . index . embedder_category_id . clear ( self . wtxn ) ? ;
self . index . delete_embedding_configs ( self . wtxn ) ? ;
Ok ( remove_all )
}
Setting ::NotSet = > Ok ( Default ::default ( ) ) ,
}
}
fn update_embedding_configs_set (
& mut self ,
configs : BTreeMap < String , Setting < EmbeddingSettings > > ,
) -> Result < BTreeMap < String , EmbedderAction > > {
use crate ::vector ::settings ::SettingsDiff ;
let old_configs = self . index . embedding_configs ( self . wtxn ) ? ;
let old_configs : BTreeMap < String , ( EmbeddingSettings , RoaringBitmap ) > = old_configs
. into_iter ( )
. map ( | IndexEmbeddingConfig { name , config , user_provided } | {
( name , ( config . into ( ) , user_provided ) )
} )
. collect ( ) ;
let mut updated_configs = BTreeMap ::new ( ) ;
let mut embedder_actions = BTreeMap ::new ( ) ;
for joined in old_configs
. into_iter ( )
. merge_join_by ( configs . into_iter ( ) , | ( left , _ ) , ( right , _ ) | left . cmp ( right ) )
{
match joined {
// updated config
EitherOrBoth ::Both ( ( name , ( old , user_provided ) ) , ( _ , new ) ) = > {
let settings_diff = SettingsDiff ::from_settings ( old , new ) ;
match settings_diff {
SettingsDiff ::Remove = > {
tracing ::debug! (
embedder = name ,
user_provided = user_provided . len ( ) ,
" removing embedder "
) ;
let embedder_id =
self . index . embedder_category_id . get ( self . wtxn , & name ) ? . ok_or (
crate ::InternalError ::DatabaseMissingEntry {
db_name : crate ::index ::db_name ::VECTOR_EMBEDDER_CATEGORY_ID ,
key : None ,
} ,
) ? ;
// free id immediately
self . index . embedder_category_id . delete ( self . wtxn , & name ) ? ;
embedder_actions . insert (
name ,
EmbedderAction ::WriteBackToDocuments ( WriteBackToDocuments {
embedder_id ,
user_provided ,
} ) ,
) ;
2023-11-15 15:46:37 +01:00
}
2024-06-12 14:04:54 +02:00
SettingsDiff ::Reindex { action , updated_settings } = > {
tracing ::debug! (
embedder = name ,
user_provided = user_provided . len ( ) ,
? action ,
" reindex embedder "
2023-12-20 17:12:34 +01:00
) ;
2024-06-12 14:04:54 +02:00
embedder_actions . insert ( name . clone ( ) , EmbedderAction ::Reindex ( action ) ) ;
let new =
validate_embedding_settings ( Setting ::Set ( updated_settings ) , & name ) ? ;
updated_configs . insert ( name , ( new , user_provided ) ) ;
}
SettingsDiff ::UpdateWithoutReindex { updated_settings } = > {
tracing ::debug! (
embedder = name ,
user_provided = user_provided . len ( ) ,
" update without reindex embedder "
2024-02-07 10:39:19 +01:00
) ;
2024-06-12 14:04:54 +02:00
let new =
validate_embedding_settings ( Setting ::Set ( updated_settings ) , & name ) ? ;
updated_configs . insert ( name , ( new , user_provided ) ) ;
2023-11-15 15:46:37 +01:00
}
}
}
2024-06-12 14:04:54 +02:00
// unchanged config
EitherOrBoth ::Left ( ( name , ( setting , user_provided ) ) ) = > {
tracing ::debug! ( embedder = name , " unchanged embedder " ) ;
updated_configs . insert ( name , ( Setting ::Set ( setting ) , user_provided ) ) ;
2023-12-12 21:19:48 +01:00
}
2024-06-12 14:04:54 +02:00
// new config
EitherOrBoth ::Right ( ( name , mut setting ) ) = > {
tracing ::debug! ( embedder = name , " new embedder " ) ;
// apply the default source in case the source was not set so that it gets validated
crate ::vector ::settings ::EmbeddingSettings ::apply_default_source ( & mut setting ) ;
crate ::vector ::settings ::EmbeddingSettings ::apply_default_openai_model (
& mut setting ,
) ;
let setting = validate_embedding_settings ( setting , & name ) ? ;
embedder_actions
. insert ( name . clone ( ) , EmbedderAction ::Reindex ( ReindexAction ::FullReindex ) ) ;
updated_configs . insert ( name , ( setting , RoaringBitmap ::new ( ) ) ) ;
2023-11-15 15:46:37 +01:00
}
}
2024-06-12 14:04:54 +02:00
}
let mut free_indices : [ bool ; u8 ::MAX as usize ] = [ true ; u8 ::MAX as usize ] ;
for res in self . index . embedder_category_id . iter ( self . wtxn ) ? {
let ( _name , id ) = res ? ;
free_indices [ id as usize ] = false ;
}
let mut free_indices = free_indices . iter_mut ( ) . enumerate ( ) ;
let mut find_free_index =
move | | free_indices . find ( | ( _ , free ) | * * free ) . map ( | ( index , _ ) | index as u8 ) ;
for ( name , action ) in embedder_actions . iter ( ) {
match action {
EmbedderAction ::Reindex ( ReindexAction ::RegeneratePrompts ) = > {
/* cannot be a new embedder, so has to have an id already */
}
EmbedderAction ::Reindex ( ReindexAction ::FullReindex ) = > {
if self . index . embedder_category_id . get ( self . wtxn , name ) ? . is_none ( ) {
let id = find_free_index ( )
. ok_or ( UserError ::TooManyEmbedders ( updated_configs . len ( ) ) ) ? ;
tracing ::debug! ( embedder = name , id , " assigning free id to new embedder " ) ;
self . index . embedder_category_id . put ( self . wtxn , name , & id ) ? ;
}
}
EmbedderAction ::WriteBackToDocuments ( _ ) = > { /* already removed */ }
2023-11-15 15:46:37 +01:00
}
2024-04-03 11:19:45 +02:00
}
2024-06-12 14:04:54 +02:00
let updated_configs : Vec < IndexEmbeddingConfig > = updated_configs
. into_iter ( )
. filter_map ( | ( name , ( config , user_provided ) ) | match config {
Setting ::Set ( config ) = > {
Some ( IndexEmbeddingConfig { name , config : config . into ( ) , user_provided } )
}
Setting ::Reset = > None ,
Setting ::NotSet = > Some ( IndexEmbeddingConfig {
name ,
config : EmbeddingSettings ::default ( ) . into ( ) ,
user_provided ,
} ) ,
} )
. collect ( ) ;
if updated_configs . is_empty ( ) {
self . index . delete_embedding_configs ( self . wtxn ) ? ;
} else {
self . index . put_embedding_configs ( self . wtxn , updated_configs ) ? ;
}
Ok ( embedder_actions )
2023-11-15 15:46:37 +01:00
}
2024-03-11 18:24:21 +01:00
fn update_search_cutoff ( & mut self ) -> Result < bool > {
let changed = match self . search_cutoff {
Setting ::Set ( new ) = > {
let old = self . index . search_cutoff ( self . wtxn ) ? ;
if old = = Some ( new ) {
false
} else {
self . index . put_search_cutoff ( self . wtxn , new ) ? ;
true
}
}
Setting ::Reset = > self . index . delete_search_cutoff ( self . wtxn ) ? ,
Setting ::NotSet = > false ,
} ;
Ok ( changed )
}
2022-10-05 17:41:07 +02:00
pub fn execute < FP , FA > ( mut self , progress_callback : FP , should_abort : FA ) -> Result < ( ) >
2021-06-16 18:33:33 +02:00
where
2022-10-05 17:41:07 +02:00
FP : Fn ( UpdateIndexingStep ) + Sync ,
FA : Fn ( ) -> bool + Sync ,
2021-03-29 19:15:47 +02:00
{
2022-02-15 11:41:55 +01:00
self . index . set_updated_at ( self . wtxn , & OffsetDateTime ::now_utc ( ) ) ? ;
2021-06-01 16:29:14 +02:00
2024-04-15 13:30:41 +02:00
let old_inner_settings = InnerIndexSettings ::from_index ( self . index , self . wtxn ) ? ;
2021-06-01 16:29:14 +02:00
2024-03-20 13:37:26 +01:00
// never trigger re-indexing
2021-03-29 19:15:47 +02:00
self . update_displayed ( ) ? ;
2021-06-01 16:29:14 +02:00
self . update_distinct_field ( ) ? ;
2021-03-29 19:15:47 +02:00
self . update_criteria ( ) ? ;
2021-06-15 13:45:20 +02:00
self . update_primary_key ( ) ? ;
2022-03-16 10:03:18 +01:00
self . update_authorize_typos ( ) ? ;
2022-03-21 13:03:06 +01:00
self . update_min_typo_word_len ( ) ? ;
2022-03-21 14:03:31 +01:00
self . update_exact_words ( ) ? ;
2022-06-08 17:28:23 +02:00
self . update_max_values_per_facet ( ) ? ;
2023-06-22 17:13:40 +02:00
self . update_sort_facet_values_by ( ) ? ;
2022-06-22 12:00:45 +02:00
self . update_pagination_max_total_hits ( ) ? ;
2024-03-20 13:37:26 +01:00
self . update_search_cutoff ( ) ? ;
2021-06-01 16:29:14 +02:00
2024-03-20 13:37:26 +01:00
// could trigger re-indexing
self . update_filterable ( ) ? ;
self . update_sortable ( ) ? ;
self . update_stop_words ( ) ? ;
self . update_non_separator_tokens ( ) ? ;
self . update_separator_tokens ( ) ? ;
self . update_dictionary ( ) ? ;
self . update_synonyms ( ) ? ;
self . update_searchable ( ) ? ;
self . update_exact_attributes ( ) ? ;
self . update_proximity_precision ( ) ? ;
2024-06-12 14:04:54 +02:00
let embedding_config_updates = self . update_embedding_configs ( ) ? ;
2022-03-22 19:07:59 +01:00
2024-05-22 16:05:55 +02:00
let mut new_inner_settings = InnerIndexSettings ::from_index ( self . index , self . wtxn ) ? ;
new_inner_settings . recompute_facets ( self . wtxn , self . index ) ? ;
let primary_key_id = self
. index
. primary_key ( self . wtxn ) ?
. and_then ( | name | new_inner_settings . fields_ids_map . id ( name ) ) ;
2024-05-30 11:17:03 +02:00
let settings_update_only = true ;
let inner_settings_diff = InnerIndexSettingsDiff ::new (
old_inner_settings ,
new_inner_settings ,
2024-05-22 16:05:55 +02:00
primary_key_id ,
2024-06-12 14:04:54 +02:00
embedding_config_updates ,
2024-05-30 11:17:03 +02:00
settings_update_only ,
) ;
2024-03-11 18:24:21 +01:00
2024-03-20 13:37:26 +01:00
if inner_settings_diff . any_reindexing_needed ( ) {
2024-03-26 13:27:43 +01:00
self . reindex ( & progress_callback , & should_abort , inner_settings_diff ) ? ;
2021-01-20 17:27:43 +01:00
}
2021-06-01 16:29:14 +02:00
2021-03-29 19:15:47 +02:00
Ok ( ( ) )
}
2020-11-02 15:31:20 +01:00
}
2024-04-03 11:19:45 +02:00
pub struct InnerIndexSettingsDiff {
pub ( crate ) old : InnerIndexSettings ,
pub ( crate ) new : InnerIndexSettings ,
2024-05-22 16:05:55 +02:00
pub ( crate ) primary_key_id : Option < FieldId > ,
2024-06-12 14:04:54 +02:00
pub ( crate ) embedding_config_updates : BTreeMap < String , EmbedderAction > ,
2024-04-03 11:19:45 +02:00
pub ( crate ) settings_update_only : bool ,
2024-05-30 11:17:03 +02:00
/// The set of only the additional searchable fields.
/// If any other searchable field has been modified, is set to None.
pub ( crate ) only_additional_fields : Option < HashSet < String > > ,
2024-05-30 15:39:05 +02:00
// Cache the check to see if all the stop_words, allowed_separators, dictionary,
// exact_attributes, proximity_precision are different.
pub ( crate ) cache_reindex_searchable_without_user_defined : bool ,
2024-06-05 12:48:44 +02:00
// Cache the check to see if the user_defined_searchables are different.
2024-05-30 15:39:05 +02:00
pub ( crate ) cache_user_defined_searchables : bool ,
2024-06-05 12:48:44 +02:00
// Cache the check to see if the exact_attributes are different.
pub ( crate ) cache_exact_attributes : bool ,
2024-03-20 13:37:26 +01:00
}
impl InnerIndexSettingsDiff {
2024-05-30 12:08:27 +02:00
#[ tracing::instrument(level = " trace " , skip_all, target = " indexing::settings " ) ]
2024-05-30 11:17:03 +02:00
pub ( crate ) fn new (
old_settings : InnerIndexSettings ,
new_settings : InnerIndexSettings ,
primary_key_id : Option < FieldId > ,
2024-06-12 14:04:54 +02:00
embedding_config_updates : BTreeMap < String , EmbedderAction > ,
2024-05-30 11:17:03 +02:00
settings_update_only : bool ,
) -> Self {
let only_additional_fields = match (
& old_settings . user_defined_searchable_fields ,
& new_settings . user_defined_searchable_fields ,
) {
( None , None ) | ( Some ( _ ) , None ) | ( None , Some ( _ ) ) = > None , // None means *
( Some ( old ) , Some ( new ) ) = > {
let old : HashSet < _ > = old . iter ( ) . cloned ( ) . collect ( ) ;
let new : HashSet < _ > = new . iter ( ) . cloned ( ) . collect ( ) ;
if old . difference ( & new ) . next ( ) . is_none ( ) {
// if no field has been removed return only the additional ones
2024-06-05 10:51:16 +02:00
Some ( & new - & old ) . filter ( | x | ! x . is_empty ( ) )
2024-05-30 11:17:03 +02:00
} else {
None
}
}
} ;
2024-05-30 15:39:05 +02:00
let cache_reindex_searchable_without_user_defined = {
old_settings . stop_words . as_ref ( ) . map ( | set | set . as_fst ( ) . as_bytes ( ) )
! = new_settings . stop_words . as_ref ( ) . map ( | set | set . as_fst ( ) . as_bytes ( ) )
| | old_settings . allowed_separators ! = new_settings . allowed_separators
| | old_settings . dictionary ! = new_settings . dictionary
| | old_settings . proximity_precision ! = new_settings . proximity_precision
} ;
2024-06-05 12:48:44 +02:00
let cache_exact_attributes = old_settings . exact_attributes ! = new_settings . exact_attributes ;
2024-05-30 15:39:05 +02:00
let cache_user_defined_searchables = old_settings . user_defined_searchable_fields
! = new_settings . user_defined_searchable_fields ;
2024-05-30 11:17:03 +02:00
InnerIndexSettingsDiff {
old : old_settings ,
new : new_settings ,
primary_key_id ,
2024-06-12 14:04:54 +02:00
embedding_config_updates ,
2024-05-30 11:17:03 +02:00
settings_update_only ,
only_additional_fields ,
2024-05-30 15:39:05 +02:00
cache_reindex_searchable_without_user_defined ,
cache_user_defined_searchables ,
2024-06-05 12:48:44 +02:00
cache_exact_attributes ,
2024-05-30 11:17:03 +02:00
}
}
2024-04-03 11:19:45 +02:00
pub fn any_reindexing_needed ( & self ) -> bool {
2024-03-20 13:37:26 +01:00
self . reindex_searchable ( ) | | self . reindex_facets ( ) | | self . reindex_vectors ( )
}
2024-04-03 11:19:45 +02:00
pub fn reindex_searchable ( & self ) -> bool {
2024-06-05 12:48:44 +02:00
self . cache_reindex_searchable_without_user_defined
| | self . cache_exact_attributes
| | self . cache_user_defined_searchables
}
pub fn reindex_proximities ( & self ) -> bool {
// if any searchable settings force the reindexing
( self . cache_reindex_searchable_without_user_defined | | self . cache_user_defined_searchables )
// and if any settings needs the proximity database created
& & ( self . old . proximity_precision = = ProximityPrecision ::ByAttribute
2024-06-05 16:00:24 +02:00
| | self . new . proximity_precision = = ProximityPrecision ::ByAttribute )
2024-03-20 13:37:26 +01:00
}
2024-05-29 17:46:28 +02:00
pub fn reindex_searchable_id ( & self , id : FieldId ) -> Option < DelAddOperation > {
2024-06-05 12:48:44 +02:00
if self . cache_reindex_searchable_without_user_defined | | self . cache_exact_attributes {
2024-05-29 17:46:28 +02:00
Some ( DelAddOperation ::DeletionAndAddition )
2024-05-30 11:17:03 +02:00
} else if let Some ( only_additional_fields ) = & self . only_additional_fields {
2024-05-29 17:46:28 +02:00
let additional_field = self . new . fields_ids_map . name ( id ) . unwrap ( ) ;
if only_additional_fields . contains ( additional_field ) {
Some ( DelAddOperation ::Addition )
} else {
None
}
2024-05-30 15:39:05 +02:00
} else if self . cache_user_defined_searchables {
2024-05-29 17:46:28 +02:00
Some ( DelAddOperation ::DeletionAndAddition )
} else {
None
}
}
2024-04-03 11:19:45 +02:00
pub fn reindex_facets ( & self ) -> bool {
let existing_fields = & self . new . existing_fields ;
2024-03-20 13:37:26 +01:00
if existing_fields . iter ( ) . any ( | field | field . contains ( '.' ) ) {
return true ;
}
2024-04-03 11:19:45 +02:00
let old_faceted_fields = & self . old . user_defined_faceted_fields ;
2024-03-20 13:37:26 +01:00
if old_faceted_fields . iter ( ) . any ( | field | field . contains ( '.' ) ) {
return true ;
}
// If there is new faceted fields we indicate that we must reindex as we must
// index new fields as facets. It means that the distinct attribute,
// an Asc/Desc criterion or a filtered attribute as be added or removed.
2024-04-03 11:19:45 +02:00
let new_faceted_fields = & self . new . user_defined_faceted_fields ;
2024-03-20 13:37:26 +01:00
if new_faceted_fields . iter ( ) . any ( | field | field . contains ( '.' ) ) {
return true ;
}
2024-05-22 16:05:55 +02:00
( existing_fields - old_faceted_fields ) ! = ( existing_fields - new_faceted_fields )
2024-03-20 13:37:26 +01:00
}
2024-04-03 11:19:45 +02:00
pub fn reindex_vectors ( & self ) -> bool {
2024-06-12 14:04:54 +02:00
! self . embedding_config_updates . is_empty ( )
2024-03-20 13:37:26 +01:00
}
2024-04-03 11:19:45 +02:00
pub fn settings_update_only ( & self ) -> bool {
self . settings_update_only
}
2024-05-20 12:27:26 +02:00
pub fn run_geo_indexing ( & self ) -> bool {
self . old . geo_fields_ids ! = self . new . geo_fields_ids
| | ( ! self . settings_update_only & & self . new . geo_fields_ids . is_some ( ) )
}
2024-05-29 11:26:07 +02:00
pub fn modified_faceted_fields ( & self ) -> HashSet < String > {
& self . old . user_defined_faceted_fields ^ & self . new . user_defined_faceted_fields
}
2024-03-20 13:37:26 +01:00
}
2024-03-26 13:27:43 +01:00
#[ derive(Clone) ]
2024-03-20 13:37:26 +01:00
pub ( crate ) struct InnerIndexSettings {
2024-03-26 13:27:43 +01:00
pub stop_words : Option < fst ::Set < Vec < u8 > > > ,
pub allowed_separators : Option < BTreeSet < String > > ,
pub dictionary : Option < BTreeSet < String > > ,
pub fields_ids_map : FieldsIdsMap ,
pub user_defined_faceted_fields : HashSet < String > ,
pub user_defined_searchable_fields : Option < Vec < String > > ,
pub faceted_fields_ids : HashSet < FieldId > ,
2024-05-06 14:49:45 +02:00
pub searchable_fields_ids : Vec < FieldId > ,
2024-03-26 13:27:43 +01:00
pub exact_attributes : HashSet < FieldId > ,
pub proximity_precision : ProximityPrecision ,
pub embedding_configs : EmbeddingConfigs ,
pub existing_fields : HashSet < String > ,
2024-05-20 12:27:26 +02:00
pub geo_fields_ids : Option < ( FieldId , FieldId ) > ,
2024-05-21 17:08:45 +02:00
pub non_searchable_fields_ids : Vec < FieldId > ,
pub non_faceted_fields_ids : Vec < FieldId > ,
2024-03-20 13:37:26 +01:00
}
impl InnerIndexSettings {
2024-03-26 13:27:43 +01:00
pub fn from_index ( index : & Index , rtxn : & heed ::RoTxn ) -> Result < Self > {
2024-03-20 13:37:26 +01:00
let stop_words = index . stop_words ( rtxn ) ? ;
let stop_words = stop_words . map ( | sw | sw . map_data ( Vec ::from ) . unwrap ( ) ) ;
let allowed_separators = index . allowed_separators ( rtxn ) ? ;
let dictionary = index . dictionary ( rtxn ) ? ;
2024-05-21 13:39:46 +02:00
let mut fields_ids_map = index . fields_ids_map ( rtxn ) ? ;
2024-03-26 13:27:43 +01:00
let user_defined_searchable_fields = index . user_defined_searchable_fields ( rtxn ) ? ;
let user_defined_searchable_fields =
user_defined_searchable_fields . map ( | sf | sf . into_iter ( ) . map ( String ::from ) . collect ( ) ) ;
let user_defined_faceted_fields = index . user_defined_faceted_fields ( rtxn ) ? ;
2024-05-21 17:08:45 +02:00
let mut searchable_fields_ids = index . searchable_fields_ids ( rtxn ) ? ;
let mut faceted_fields_ids = index . faceted_fields_ids ( rtxn ) ? ;
2024-03-20 13:37:26 +01:00
let exact_attributes = index . exact_attributes_ids ( rtxn ) ? ;
let proximity_precision = index . proximity_precision ( rtxn ) ? . unwrap_or_default ( ) ;
2024-03-26 13:27:43 +01:00
let embedding_configs = embedders ( index . embedding_configs ( rtxn ) ? ) ? ;
2024-03-20 13:37:26 +01:00
let existing_fields : HashSet < _ > = index
. field_distribution ( rtxn ) ?
. into_iter ( )
. filter_map ( | ( field , count ) | ( count ! = 0 ) . then_some ( field ) )
. collect ( ) ;
2024-05-20 12:27:26 +02:00
// index.fields_ids_map($a)? ==>> fields_ids_map
let geo_fields_ids = match fields_ids_map . id ( " _geo " ) {
Some ( gfid ) = > {
let is_sortable = index . sortable_fields_ids ( rtxn ) ? . contains ( & gfid ) ;
let is_filterable = index . filterable_fields_ids ( rtxn ) ? . contains ( & gfid ) ;
// if `_geo` is faceted then we get the `lat` and `lng`
if is_sortable | | is_filterable {
let field_ids = fields_ids_map
. insert ( " _geo.lat " )
. zip ( fields_ids_map . insert ( " _geo.lng " ) )
. ok_or ( UserError ::AttributeLimitReached ) ? ;
Some ( field_ids )
} else {
None
}
}
None = > None ,
} ;
2024-03-20 13:37:26 +01:00
2024-05-21 17:08:45 +02:00
let vectors_fids = fields_ids_map . nested_ids ( RESERVED_VECTORS_FIELD_NAME ) ;
searchable_fields_ids . retain ( | id | ! vectors_fids . contains ( id ) ) ;
faceted_fields_ids . retain ( | id | ! vectors_fids . contains ( id ) ) ;
2024-03-20 13:37:26 +01:00
Ok ( Self {
stop_words ,
allowed_separators ,
dictionary ,
fields_ids_map ,
2024-03-26 13:27:43 +01:00
user_defined_faceted_fields ,
user_defined_searchable_fields ,
faceted_fields_ids ,
searchable_fields_ids ,
2024-03-20 13:37:26 +01:00
exact_attributes ,
proximity_precision ,
embedding_configs ,
existing_fields ,
2024-05-20 12:27:26 +02:00
geo_fields_ids ,
2024-05-21 17:08:45 +02:00
non_searchable_fields_ids : vectors_fids . clone ( ) ,
non_faceted_fields_ids : vectors_fids . clone ( ) ,
2024-03-20 13:37:26 +01:00
} )
}
2024-03-26 13:27:43 +01:00
// find and insert the new field ids
pub fn recompute_facets ( & mut self , wtxn : & mut heed ::RwTxn , index : & Index ) -> Result < ( ) > {
let new_facets = self
. fields_ids_map
2024-05-21 17:08:45 +02:00
. iter ( )
. filter ( | ( fid , _field ) | ! self . non_faceted_fields_ids . contains ( fid ) )
. filter ( | ( _fid , field ) | crate ::is_faceted ( field , & self . user_defined_faceted_fields ) )
. map ( | ( _fid , field ) | field . to_string ( ) )
2024-03-26 13:27:43 +01:00
. collect ( ) ;
index . put_faceted_fields ( wtxn , & new_facets ) ? ;
self . faceted_fields_ids = index . faceted_fields_ids ( wtxn ) ? ;
Ok ( ( ) )
}
// find and insert the new field ids
pub fn recompute_searchables ( & mut self , wtxn : & mut heed ::RwTxn , index : & Index ) -> Result < ( ) > {
2024-05-07 17:56:40 +02:00
let searchable_fields = self
. user_defined_searchable_fields
. as_ref ( )
. map ( | searchable | searchable . iter ( ) . map ( | s | s . as_str ( ) ) . collect ::< Vec < _ > > ( ) ) ;
2024-03-26 13:27:43 +01:00
// in case new fields were introduced we're going to recreate the searchable fields.
2024-05-15 17:16:10 +02:00
if let Some ( searchable_fields ) = searchable_fields {
2024-03-26 13:27:43 +01:00
index . put_all_searchable_fields_from_fields_ids_map (
wtxn ,
& searchable_fields ,
2024-05-21 17:08:45 +02:00
& self . non_searchable_fields_ids ,
2024-03-26 13:27:43 +01:00
& self . fields_ids_map ,
) ? ;
}
2024-05-07 17:56:40 +02:00
let searchable_fields_ids = index . searchable_fields_ids ( wtxn ) ? ;
self . searchable_fields_ids = searchable_fields_ids ;
2024-03-26 13:27:43 +01:00
Ok ( ( ) )
}
}
2024-05-30 11:50:30 +02:00
fn embedders ( embedding_configs : Vec < IndexEmbeddingConfig > ) -> Result < EmbeddingConfigs > {
2024-03-26 13:27:43 +01:00
let res : Result < _ > = embedding_configs
. into_iter ( )
2024-05-30 11:50:30 +02:00
. map (
| IndexEmbeddingConfig {
name ,
config : EmbeddingConfig { embedder_options , prompt } ,
..
} | {
let prompt = Arc ::new ( prompt . try_into ( ) . map_err ( crate ::Error ::from ) ? ) ;
let embedder = Arc ::new (
Embedder ::new ( embedder_options . clone ( ) )
. map_err ( crate ::vector ::Error ::from )
. map_err ( crate ::Error ::from ) ? ,
) ;
Ok ( ( name , ( embedder , prompt ) ) )
} ,
)
2024-03-26 13:27:43 +01:00
. collect ( ) ;
res . map ( EmbeddingConfigs ::new )
2024-03-20 13:37:26 +01:00
}
2023-11-15 15:46:37 +01:00
fn validate_prompt (
name : & str ,
new : Setting < EmbeddingSettings > ,
) -> Result < Setting < EmbeddingSettings > > {
match new {
Setting ::Set ( EmbeddingSettings {
2023-12-20 17:12:34 +01:00
source ,
model ,
revision ,
api_key ,
dimensions ,
document_template : Setting ::Set ( template ) ,
2024-03-25 10:05:38 +01:00
url ,
query ,
input_field ,
path_to_embeddings ,
embedding_object ,
input_type ,
2024-03-27 11:50:22 +01:00
distribution ,
2023-11-15 15:46:37 +01:00
} ) = > {
// validate
2023-12-13 22:06:39 +01:00
let template = crate ::prompt ::Prompt ::new ( template )
2023-11-15 15:46:37 +01:00
. map ( | prompt | crate ::prompt ::PromptData ::from ( prompt ) . template )
. map_err ( | inner | UserError ::InvalidPromptForEmbeddings ( name . to_owned ( ) , inner ) ) ? ;
Ok ( Setting ::Set ( EmbeddingSettings {
2023-12-20 17:12:34 +01:00
source ,
model ,
revision ,
api_key ,
dimensions ,
document_template : Setting ::Set ( template ) ,
2024-03-25 10:05:38 +01:00
url ,
query ,
input_field ,
path_to_embeddings ,
embedding_object ,
input_type ,
2024-03-27 11:50:22 +01:00
distribution ,
2023-11-15 15:46:37 +01:00
} ) )
}
new = > Ok ( new ) ,
}
}
2023-12-20 17:12:34 +01:00
pub fn validate_embedding_settings (
settings : Setting < EmbeddingSettings > ,
name : & str ,
) -> Result < Setting < EmbeddingSettings > > {
let settings = validate_prompt ( name , settings ) ? ;
let Setting ::Set ( settings ) = settings else { return Ok ( settings ) } ;
2024-03-25 10:05:38 +01:00
let EmbeddingSettings {
source ,
model ,
revision ,
api_key ,
dimensions ,
document_template ,
url ,
query ,
input_field ,
path_to_embeddings ,
embedding_object ,
input_type ,
2024-03-27 11:50:22 +01:00
distribution ,
2024-03-25 10:05:38 +01:00
} = settings ;
2024-02-07 11:48:47 +01:00
if let Some ( 0 ) = dimensions . set ( ) {
return Err ( crate ::error ::UserError ::InvalidSettingsDimensions {
embedder_name : name . to_owned ( ) ,
}
. into ( ) ) ;
}
2024-03-25 10:05:58 +01:00
if let Some ( url ) = url . as_ref ( ) . set ( ) {
url ::Url ::parse ( url ) . map_err ( | error | crate ::error ::UserError ::InvalidUrl {
embedder_name : name . to_owned ( ) ,
inner_error : error ,
url : url . to_owned ( ) ,
} ) ? ;
}
2023-12-20 17:12:34 +01:00
let Some ( inferred_source ) = source . set ( ) else {
return Ok ( Setting ::Set ( EmbeddingSettings {
source ,
model ,
revision ,
api_key ,
dimensions ,
document_template ,
2024-03-25 10:05:38 +01:00
url ,
query ,
input_field ,
path_to_embeddings ,
embedding_object ,
input_type ,
2024-03-27 11:50:22 +01:00
distribution ,
2023-12-20 17:12:34 +01:00
} ) ) ;
} ;
match inferred_source {
EmbedderSource ::OpenAi = > {
2024-03-25 11:39:33 +01:00
check_unset ( & revision , EmbeddingSettings ::REVISION , inferred_source , name ) ? ;
check_unset ( & url , EmbeddingSettings ::URL , inferred_source , name ) ? ;
check_unset ( & query , EmbeddingSettings ::QUERY , inferred_source , name ) ? ;
check_unset ( & input_field , EmbeddingSettings ::INPUT_FIELD , inferred_source , name ) ? ;
check_unset (
& path_to_embeddings ,
EmbeddingSettings ::PATH_TO_EMBEDDINGS ,
inferred_source ,
name ,
) ? ;
check_unset (
& embedding_object ,
EmbeddingSettings ::EMBEDDING_OBJECT ,
inferred_source ,
name ,
) ? ;
check_unset ( & input_type , EmbeddingSettings ::INPUT_TYPE , inferred_source , name ) ? ;
2024-03-25 10:05:38 +01:00
2023-12-20 17:12:34 +01:00
if let Setting ::Set ( model ) = & model {
2024-02-07 10:39:19 +01:00
let model = crate ::vector ::openai ::EmbeddingModel ::from_name ( model . as_str ( ) )
. ok_or ( crate ::error ::UserError ::InvalidOpenAiModel {
2023-12-20 17:12:34 +01:00
embedder_name : name . to_owned ( ) ,
model : model . clone ( ) ,
2024-02-07 10:39:19 +01:00
} ) ? ;
if let Setting ::Set ( dimensions ) = dimensions {
if ! model . supports_overriding_dimensions ( )
& & dimensions ! = model . default_dimensions ( )
{
return Err ( crate ::error ::UserError ::InvalidOpenAiModelDimensions {
embedder_name : name . to_owned ( ) ,
model : model . name ( ) ,
dimensions ,
expected_dimensions : model . default_dimensions ( ) ,
}
. into ( ) ) ;
}
2024-02-07 11:48:47 +01:00
if dimensions > model . default_dimensions ( ) {
return Err ( crate ::error ::UserError ::InvalidOpenAiModelDimensionsMax {
embedder_name : name . to_owned ( ) ,
model : model . name ( ) ,
dimensions ,
max_dimensions : model . default_dimensions ( ) ,
}
. into ( ) ) ;
}
2024-02-07 10:39:19 +01:00
}
2023-12-20 17:12:34 +01:00
}
}
2024-03-20 10:08:28 +01:00
EmbedderSource ::Ollama = > {
// Dimensions get inferred, only model name is required
2024-03-25 11:39:33 +01:00
check_unset ( & dimensions , EmbeddingSettings ::DIMENSIONS , inferred_source , name ) ? ;
check_set ( & model , EmbeddingSettings ::MODEL , inferred_source , name ) ? ;
check_unset ( & revision , EmbeddingSettings ::REVISION , inferred_source , name ) ? ;
check_unset ( & query , EmbeddingSettings ::QUERY , inferred_source , name ) ? ;
check_unset ( & input_field , EmbeddingSettings ::INPUT_FIELD , inferred_source , name ) ? ;
check_unset (
& path_to_embeddings ,
EmbeddingSettings ::PATH_TO_EMBEDDINGS ,
inferred_source ,
name ,
) ? ;
check_unset (
& embedding_object ,
EmbeddingSettings ::EMBEDDING_OBJECT ,
inferred_source ,
name ,
) ? ;
check_unset ( & input_type , EmbeddingSettings ::INPUT_TYPE , inferred_source , name ) ? ;
2024-03-20 10:08:28 +01:00
}
2023-12-20 17:12:34 +01:00
EmbedderSource ::HuggingFace = > {
2024-03-25 11:39:33 +01:00
check_unset ( & api_key , EmbeddingSettings ::API_KEY , inferred_source , name ) ? ;
check_unset ( & dimensions , EmbeddingSettings ::DIMENSIONS , inferred_source , name ) ? ;
check_unset ( & url , EmbeddingSettings ::URL , inferred_source , name ) ? ;
check_unset ( & query , EmbeddingSettings ::QUERY , inferred_source , name ) ? ;
check_unset ( & input_field , EmbeddingSettings ::INPUT_FIELD , inferred_source , name ) ? ;
check_unset (
& path_to_embeddings ,
EmbeddingSettings ::PATH_TO_EMBEDDINGS ,
inferred_source ,
name ,
) ? ;
check_unset (
& embedding_object ,
EmbeddingSettings ::EMBEDDING_OBJECT ,
inferred_source ,
name ,
) ? ;
check_unset ( & input_type , EmbeddingSettings ::INPUT_TYPE , inferred_source , name ) ? ;
2023-12-20 17:12:34 +01:00
}
EmbedderSource ::UserProvided = > {
2024-03-25 11:39:33 +01:00
check_unset ( & model , EmbeddingSettings ::MODEL , inferred_source , name ) ? ;
check_unset ( & revision , EmbeddingSettings ::REVISION , inferred_source , name ) ? ;
check_unset ( & api_key , EmbeddingSettings ::API_KEY , inferred_source , name ) ? ;
check_unset (
& document_template ,
EmbeddingSettings ::DOCUMENT_TEMPLATE ,
inferred_source ,
name ,
) ? ;
check_set ( & dimensions , EmbeddingSettings ::DIMENSIONS , inferred_source , name ) ? ;
check_unset ( & url , EmbeddingSettings ::URL , inferred_source , name ) ? ;
check_unset ( & query , EmbeddingSettings ::QUERY , inferred_source , name ) ? ;
check_unset ( & input_field , EmbeddingSettings ::INPUT_FIELD , inferred_source , name ) ? ;
check_unset (
& path_to_embeddings ,
EmbeddingSettings ::PATH_TO_EMBEDDINGS ,
inferred_source ,
name ,
) ? ;
check_unset (
& embedding_object ,
EmbeddingSettings ::EMBEDDING_OBJECT ,
inferred_source ,
name ,
) ? ;
check_unset ( & input_type , EmbeddingSettings ::INPUT_TYPE , inferred_source , name ) ? ;
2024-03-25 10:05:38 +01:00
}
EmbedderSource ::Rest = > {
2024-03-25 11:39:33 +01:00
check_unset ( & model , EmbeddingSettings ::MODEL , inferred_source , name ) ? ;
check_unset ( & revision , EmbeddingSettings ::REVISION , inferred_source , name ) ? ;
check_set ( & url , EmbeddingSettings ::URL , inferred_source , name ) ? ;
2023-12-20 17:12:34 +01:00
}
}
Ok ( Setting ::Set ( EmbeddingSettings {
source ,
model ,
revision ,
api_key ,
dimensions ,
document_template ,
2024-03-25 10:05:38 +01:00
url ,
query ,
input_field ,
path_to_embeddings ,
embedding_object ,
input_type ,
2024-03-27 11:50:22 +01:00
distribution ,
2023-12-20 17:12:34 +01:00
} ) )
}
2020-11-02 15:31:20 +01:00
#[ cfg(test) ]
mod tests {
2022-12-06 15:22:10 +01:00
use big_s ::S ;
2023-11-27 11:52:22 +01:00
use heed ::types ::Bytes ;
2023-07-26 09:33:42 +02:00
use maplit ::{ btreemap , btreeset , hashset } ;
2024-05-06 14:49:45 +02:00
use meili_snap ::snapshot ;
2022-12-06 15:22:10 +01:00
2021-06-16 18:33:33 +02:00
use super ::* ;
2021-06-15 13:45:20 +02:00
use crate ::error ::Error ;
2022-03-31 14:12:00 +02:00
use crate ::index ::tests ::TempIndex ;
2023-10-25 14:49:25 +02:00
use crate ::update ::ClearDocuments ;
2024-05-15 17:16:10 +02:00
use crate ::{ db_snap , Criterion , Filter , SearchResult } ;
2021-01-20 17:27:43 +01:00
2020-11-03 18:22:40 +01:00
#[ test ]
fn set_and_reset_searchable_fields ( ) {
2022-08-02 15:13:06 +02:00
let index = TempIndex ::new ( ) ;
2020-11-03 18:22:40 +01:00
// First we send 3 documents with ids from 1 to 3.
let mut wtxn = index . write_txn ( ) . unwrap ( ) ;
2021-08-31 11:44:15 +02:00
2022-08-02 15:13:06 +02:00
index
. add_documents_using_wtxn (
& mut wtxn ,
documents! ( [
{ " id " : 1 , " name " : " kevin " , " age " : 23 } ,
{ " id " : 2 , " name " : " kevina " , " age " : 21 } ,
{ " id " : 3 , " name " : " benoit " , " age " : 34 }
] ) ,
)
. unwrap ( ) ;
2020-11-03 18:22:40 +01:00
// We change the searchable fields to be the "name" field only.
2022-08-02 15:13:06 +02:00
index
. update_settings_using_wtxn ( & mut wtxn , | settings | {
settings . set_searchable_fields ( vec! [ " name " . into ( ) ] ) ;
} )
. unwrap ( ) ;
2020-11-03 18:22:40 +01:00
wtxn . commit ( ) . unwrap ( ) ;
2024-05-15 17:16:10 +02:00
db_snap! ( index , fields_ids_map , @ r ###"
0 id |
1 name |
2 age |
" ###);
db_snap! ( index , searchable_fields , @ r ### "["name"]"### ) ;
db_snap! ( index , fieldids_weights_map , @ r ###"
fid weight
1 0 |
" ###);
2020-11-03 18:22:40 +01:00
// Check that the searchable field is correctly set to "name" only.
let rtxn = index . read_txn ( ) . unwrap ( ) ;
// When we search for something that is not in
// the searchable fields it must not return any document.
let result = index . search ( & rtxn ) . query ( " 23 " ) . execute ( ) . unwrap ( ) ;
2024-05-29 17:46:28 +02:00
assert_eq! ( result . documents_ids , Vec ::< u32 > ::new ( ) ) ;
2020-11-03 18:22:40 +01:00
// When we search for something that is in the searchable fields
// we must find the appropriate document.
let result = index . search ( & rtxn ) . query ( r # ""kevin""# ) . execute ( ) . unwrap ( ) ;
let documents = index . documents ( & rtxn , result . documents_ids ) . unwrap ( ) ;
2024-05-14 16:56:08 +02:00
let fid_map = index . fields_ids_map ( & rtxn ) . unwrap ( ) ;
2020-11-03 18:22:40 +01:00
assert_eq! ( documents . len ( ) , 1 ) ;
2024-05-14 16:56:08 +02:00
assert_eq! ( documents [ 0 ] . 1. get ( fid_map . id ( " name " ) . unwrap ( ) ) , Some ( & br # ""kevin""# [ .. ] ) ) ;
2020-11-03 18:22:40 +01:00
drop ( rtxn ) ;
// We change the searchable fields to be the "name" field only.
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
settings . reset_searchable_fields ( ) ;
} )
. unwrap ( ) ;
2020-11-03 18:22:40 +01:00
2024-05-15 17:16:10 +02:00
db_snap! ( index , fields_ids_map , @ r ###"
0 id |
1 name |
2 age |
" ###);
db_snap! ( index , searchable_fields , @ r ### "["id", "name", "age"]"### ) ;
db_snap! ( index , fieldids_weights_map , @ r ###"
fid weight
0 0 |
2024-05-16 01:06:33 +02:00
1 0 |
2 0 |
2024-05-15 17:16:10 +02:00
" ###);
2020-11-03 18:22:40 +01:00
// Check that the searchable field have been reset and documents are found now.
let rtxn = index . read_txn ( ) . unwrap ( ) ;
2024-05-14 16:56:08 +02:00
let fid_map = index . fields_ids_map ( & rtxn ) . unwrap ( ) ;
let user_defined_searchable_fields = index . user_defined_searchable_fields ( & rtxn ) . unwrap ( ) ;
snapshot! ( format! ( " {user_defined_searchable_fields:?} " ) , @ " None " ) ;
// the searchable fields should contain all the fields
2020-11-03 18:22:40 +01:00
let searchable_fields = index . searchable_fields ( & rtxn ) . unwrap ( ) ;
2024-05-14 16:56:08 +02:00
snapshot! ( format! ( " {searchable_fields:?} " ) , @ r ### "["id", "name", "age"]"### ) ;
2020-11-03 18:22:40 +01:00
let result = index . search ( & rtxn ) . query ( " 23 " ) . execute ( ) . unwrap ( ) ;
assert_eq! ( result . documents_ids . len ( ) , 1 ) ;
let documents = index . documents ( & rtxn , result . documents_ids ) . unwrap ( ) ;
2024-05-14 16:56:08 +02:00
assert_eq! ( documents [ 0 ] . 1. get ( fid_map . id ( " name " ) . unwrap ( ) ) , Some ( & br # ""kevin""# [ .. ] ) ) ;
2020-11-03 18:22:40 +01:00
}
#[ test ]
fn mixup_searchable_with_displayed_fields ( ) {
2022-08-02 15:13:06 +02:00
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . autogenerate_docids = true ;
2020-11-03 18:22:40 +01:00
let mut wtxn = index . write_txn ( ) . unwrap ( ) ;
2022-08-02 15:13:06 +02:00
// First we send 3 documents with ids from 1 to 3.
index
. add_documents_using_wtxn (
& mut wtxn ,
documents! ( [
{ " name " : " kevin " , " age " : 23 } ,
{ " name " : " kevina " , " age " : 21 } ,
{ " name " : " benoit " , " age " : 34 }
] ) ,
)
. unwrap ( ) ;
2020-11-03 18:22:40 +01:00
// In the same transaction we change the displayed fields to be only the "age".
// We also change the searchable fields to be the "name" field only.
2022-08-02 15:13:06 +02:00
index
. update_settings_using_wtxn ( & mut wtxn , | settings | {
settings . set_displayed_fields ( vec! [ " age " . into ( ) ] ) ;
settings . set_searchable_fields ( vec! [ " name " . into ( ) ] ) ;
} )
. unwrap ( ) ;
2020-11-03 18:22:40 +01:00
wtxn . commit ( ) . unwrap ( ) ;
// Check that the displayed fields are correctly set to `None` (default value).
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let fields_ids = index . displayed_fields ( & rtxn ) . unwrap ( ) ;
2021-01-20 17:27:43 +01:00
assert_eq! ( fields_ids . unwrap ( ) , ( & [ " age " ] [ .. ] ) ) ;
2020-11-03 18:22:40 +01:00
drop ( rtxn ) ;
// We change the searchable fields to be the "name" field only.
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
settings . reset_searchable_fields ( ) ;
} )
. unwrap ( ) ;
2020-11-03 18:22:40 +01:00
// Check that the displayed fields always contains only the "age" field.
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let fields_ids = index . displayed_fields ( & rtxn ) . unwrap ( ) ;
2021-01-20 17:27:43 +01:00
assert_eq! ( fields_ids . unwrap ( ) , & [ " age " ] [ .. ] ) ;
2020-11-03 18:22:40 +01:00
}
2020-11-02 15:31:20 +01:00
#[ test ]
fn default_displayed_fields ( ) {
2022-08-02 15:13:06 +02:00
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . autogenerate_docids = true ;
2020-11-02 15:31:20 +01:00
// First we send 3 documents with ids from 1 to 3.
2022-08-02 15:13:06 +02:00
index
. add_documents ( documents! ( [
{ " name " : " kevin " , " age " : 23 } ,
{ " name " : " kevina " , " age " : 21 } ,
{ " name " : " benoit " , " age " : 34 }
] ) )
. unwrap ( ) ;
2020-11-02 15:31:20 +01:00
// Check that the displayed fields are correctly set to `None` (default value).
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let fields_ids = index . displayed_fields ( & rtxn ) . unwrap ( ) ;
assert_eq! ( fields_ids , None ) ;
}
#[ test ]
fn set_and_reset_displayed_field ( ) {
2022-08-02 15:13:06 +02:00
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . autogenerate_docids = true ;
2020-11-02 15:31:20 +01:00
let mut wtxn = index . write_txn ( ) . unwrap ( ) ;
2022-08-02 15:13:06 +02:00
index
. add_documents_using_wtxn (
& mut wtxn ,
documents! ( [
{ " name " : " kevin " , " age " : 23 } ,
{ " name " : " kevina " , " age " : 21 } ,
{ " name " : " benoit " , " age " : 34 }
] ) ,
)
. unwrap ( ) ;
index
. update_settings_using_wtxn ( & mut wtxn , | settings | {
settings . set_displayed_fields ( vec! [ " age " . into ( ) ] ) ;
} )
. unwrap ( ) ;
2020-11-02 15:31:20 +01:00
wtxn . commit ( ) . unwrap ( ) ;
// Check that the displayed fields are correctly set to only the "age" field.
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let fields_ids = index . displayed_fields ( & rtxn ) . unwrap ( ) ;
2021-01-20 17:27:43 +01:00
assert_eq! ( fields_ids . unwrap ( ) , & [ " age " ] [ .. ] ) ;
2020-11-02 15:31:20 +01:00
drop ( rtxn ) ;
// We reset the fields ids to become `None`, the default value.
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
settings . reset_displayed_fields ( ) ;
} )
. unwrap ( ) ;
2020-11-02 15:31:20 +01:00
// Check that the displayed fields are correctly set to `None` (default value).
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let fields_ids = index . displayed_fields ( & rtxn ) . unwrap ( ) ;
assert_eq! ( fields_ids , None ) ;
}
2020-11-13 14:49:48 +01:00
#[ test ]
2021-06-01 12:19:55 +02:00
fn set_filterable_fields ( ) {
2022-08-02 15:13:06 +02:00
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . autogenerate_docids = true ;
2021-12-08 14:12:07 +01:00
2021-06-01 12:19:55 +02:00
// Set the filterable fields to be the age.
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
settings . set_filterable_fields ( hashset! { S ( " age " ) } ) ;
} )
. unwrap ( ) ;
2020-11-13 14:49:48 +01:00
// Then index some documents.
2022-08-02 15:13:06 +02:00
index
. add_documents ( documents! ( [
{ " name " : " kevin " , " age " : 23 } ,
{ " name " : " kevina " , " age " : 21 } ,
{ " name " : " benoit " , " age " : 34 }
] ) )
. unwrap ( ) ;
2020-11-13 14:49:48 +01:00
// Check that the displayed fields are correctly set.
let rtxn = index . read_txn ( ) . unwrap ( ) ;
2021-06-01 12:19:55 +02:00
let fields_ids = index . filterable_fields ( & rtxn ) . unwrap ( ) ;
2021-06-16 18:33:33 +02:00
assert_eq! ( fields_ids , hashset! { S ( " age " ) } ) ;
2020-11-18 16:29:07 +01:00
// Only count the field_id 0 and level 0 facet values.
2021-05-03 15:58:47 +02:00
// TODO we must support typed CSVs for numbers to be understood.
2021-08-31 11:44:15 +02:00
let fidmap = index . fields_ids_map ( & rtxn ) . unwrap ( ) ;
for document in index . all_documents ( & rtxn ) . unwrap ( ) {
let document = document . unwrap ( ) ;
let json = crate ::obkv_to_json ( & fidmap . ids ( ) . collect ::< Vec < _ > > ( ) , & fidmap , document . 1 )
. unwrap ( ) ;
println! ( " json: {:?} " , json ) ;
}
2021-06-16 18:33:33 +02:00
let count = index
. facet_id_f64_docids
2023-11-27 11:52:22 +01:00
. remap_key_type ::< Bytes > ( )
2021-08-31 11:44:15 +02:00
// The faceted field id is 1u16
. prefix_iter ( & rtxn , & [ 0 , 1 , 0 ] )
2021-06-16 18:33:33 +02:00
. unwrap ( )
. count ( ) ;
2020-11-15 11:06:51 +01:00
assert_eq! ( count , 3 ) ;
drop ( rtxn ) ;
// Index a little more documents with new and current facets values.
2022-08-02 15:13:06 +02:00
index
. add_documents ( documents! ( [
{ " name " : " kevin2 " , " age " : 23 } ,
{ " name " : " kevina2 " , " age " : 21 } ,
{ " name " : " benoit " , " age " : 35 }
] ) )
. unwrap ( ) ;
2020-11-15 11:06:51 +01:00
let rtxn = index . read_txn ( ) . unwrap ( ) ;
2020-11-18 16:29:07 +01:00
// Only count the field_id 0 and level 0 facet values.
2021-06-16 18:33:33 +02:00
let count = index
. facet_id_f64_docids
2023-11-27 11:52:22 +01:00
. remap_key_type ::< Bytes > ( )
2021-08-31 11:44:15 +02:00
. prefix_iter ( & rtxn , & [ 0 , 1 , 0 ] )
2021-06-16 18:33:33 +02:00
. unwrap ( )
. count ( ) ;
2020-11-15 11:06:51 +01:00
assert_eq! ( count , 4 ) ;
2024-03-19 12:11:46 +01:00
// Set the filterable fields to be the age and the name.
index
. update_settings ( | settings | {
settings . set_filterable_fields ( hashset! { S ( " age " ) , S ( " name " ) } ) ;
} )
. unwrap ( ) ;
// Check that the displayed fields are correctly set.
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let fields_ids = index . filterable_fields ( & rtxn ) . unwrap ( ) ;
assert_eq! ( fields_ids , hashset! { S ( " age " ) , S ( " name " ) } ) ;
let rtxn = index . read_txn ( ) . unwrap ( ) ;
// Only count the field_id 0 and level 0 facet values.
let count = index
. facet_id_f64_docids
. remap_key_type ::< Bytes > ( )
. prefix_iter ( & rtxn , & [ 0 , 1 , 0 ] )
. unwrap ( )
. count ( ) ;
assert_eq! ( count , 4 ) ;
let rtxn = index . read_txn ( ) . unwrap ( ) ;
// Only count the field_id 0 and level 0 facet values.
let count = index
. facet_id_string_docids
. remap_key_type ::< Bytes > ( )
2024-03-19 14:20:46 +01:00
. prefix_iter ( & rtxn , & [ 0 , 0 ] )
2024-03-19 12:11:46 +01:00
. unwrap ( )
. count ( ) ;
assert_eq! ( count , 5 ) ;
// Remove the age from the filterable fields.
index
. update_settings ( | settings | {
settings . set_filterable_fields ( hashset! { S ( " name " ) } ) ;
} )
. unwrap ( ) ;
// Check that the displayed fields are correctly set.
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let fields_ids = index . filterable_fields ( & rtxn ) . unwrap ( ) ;
assert_eq! ( fields_ids , hashset! { S ( " name " ) } ) ;
let rtxn = index . read_txn ( ) . unwrap ( ) ;
// Only count the field_id 0 and level 0 facet values.
let count = index
. facet_id_f64_docids
. remap_key_type ::< Bytes > ( )
. prefix_iter ( & rtxn , & [ 0 , 1 , 0 ] )
. unwrap ( )
. count ( ) ;
assert_eq! ( count , 0 ) ;
let rtxn = index . read_txn ( ) . unwrap ( ) ;
// Only count the field_id 0 and level 0 facet values.
let count = index
. facet_id_string_docids
. remap_key_type ::< Bytes > ( )
2024-03-19 14:20:46 +01:00
. prefix_iter ( & rtxn , & [ 0 , 0 ] )
2024-03-19 12:11:46 +01:00
. unwrap ( )
. count ( ) ;
assert_eq! ( count , 5 ) ;
2021-03-29 19:15:47 +02:00
}
2021-06-01 16:29:14 +02:00
#[ test ]
fn set_asc_desc_field ( ) {
2022-08-02 15:13:06 +02:00
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . autogenerate_docids = true ;
2021-06-01 16:29:14 +02:00
// Set the filterable fields to be the age.
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
settings . set_displayed_fields ( vec! [ S ( " name " ) ] ) ;
2023-01-11 12:14:17 +01:00
settings . set_criteria ( vec! [ Criterion ::Asc ( " age " . to_owned ( ) ) ] ) ;
2022-08-02 15:13:06 +02:00
} )
. unwrap ( ) ;
2021-06-01 16:29:14 +02:00
// Then index some documents.
2022-08-02 15:13:06 +02:00
index
. add_documents ( documents! ( [
{ " name " : " kevin " , " age " : 23 } ,
{ " name " : " kevina " , " age " : 21 } ,
{ " name " : " benoit " , " age " : 34 }
] ) )
. unwrap ( ) ;
2021-06-01 16:29:14 +02:00
// Run an empty query just to ensure that the search results are ordered.
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let SearchResult { documents_ids , .. } = index . search ( & rtxn ) . execute ( ) . unwrap ( ) ;
let documents = index . documents ( & rtxn , documents_ids ) . unwrap ( ) ;
// Fetch the documents "age" field in the ordre in which the documents appear.
let age_field_id = index . fields_ids_map ( & rtxn ) . unwrap ( ) . id ( " age " ) . unwrap ( ) ;
let iter = documents . into_iter ( ) . map ( | ( _ , doc ) | {
let bytes = doc . get ( age_field_id ) . unwrap ( ) ;
let string = std ::str ::from_utf8 ( bytes ) . unwrap ( ) ;
string . parse ::< u32 > ( ) . unwrap ( )
} ) ;
assert_eq! ( iter . collect ::< Vec < _ > > ( ) , vec! [ 21 , 23 , 34 ] ) ;
}
#[ test ]
fn set_distinct_field ( ) {
2022-08-02 15:13:06 +02:00
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . autogenerate_docids = true ;
2021-06-01 16:29:14 +02:00
// Set the filterable fields to be the age.
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
// Don't display the generated `id` field.
settings . set_displayed_fields ( vec! [ S ( " name " ) , S ( " age " ) ] ) ;
settings . set_distinct_field ( S ( " age " ) ) ;
} )
. unwrap ( ) ;
2021-06-01 16:29:14 +02:00
// Then index some documents.
2022-08-02 15:13:06 +02:00
index
. add_documents ( documents! ( [
{ " name " : " kevin " , " age " : 23 } ,
{ " name " : " kevina " , " age " : 21 } ,
{ " name " : " benoit " , " age " : 34 } ,
{ " name " : " bernard " , " age " : 34 } ,
{ " name " : " bertrand " , " age " : 34 } ,
{ " name " : " bernie " , " age " : 34 } ,
{ " name " : " ben " , " age " : 34 }
] ) )
. unwrap ( ) ;
2022-03-23 17:28:41 +01:00
// Run an empty query just to ensure that the search results are ordered.
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let SearchResult { documents_ids , .. } = index . search ( & rtxn ) . execute ( ) . unwrap ( ) ;
// There must be at least one document with a 34 as the age.
assert_eq! ( documents_ids . len ( ) , 3 ) ;
}
#[ test ]
fn set_nested_distinct_field ( ) {
2022-08-02 15:13:06 +02:00
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . autogenerate_docids = true ;
2022-03-23 17:28:41 +01:00
// Set the filterable fields to be the age.
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
// Don't display the generated `id` field.
settings . set_displayed_fields ( vec! [ S ( " person " ) ] ) ;
settings . set_distinct_field ( S ( " person.age " ) ) ;
} )
. unwrap ( ) ;
2022-03-23 17:28:41 +01:00
// Then index some documents.
2022-08-02 15:13:06 +02:00
index
. add_documents ( documents! ( [
{ " person " : { " name " : " kevin " , " age " : 23 } } ,
{ " person " : { " name " : " kevina " , " age " : 21 } } ,
{ " person " : { " name " : " benoit " , " age " : 34 } } ,
{ " person " : { " name " : " bernard " , " age " : 34 } } ,
{ " person " : { " name " : " bertrand " , " age " : 34 } } ,
{ " person " : { " name " : " bernie " , " age " : 34 } } ,
{ " person " : { " name " : " ben " , " age " : 34 } }
] ) )
. unwrap ( ) ;
2021-06-01 16:29:14 +02:00
// Run an empty query just to ensure that the search results are ordered.
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let SearchResult { documents_ids , .. } = index . search ( & rtxn ) . execute ( ) . unwrap ( ) ;
// There must be at least one document with a 34 as the age.
assert_eq! ( documents_ids . len ( ) , 3 ) ;
}
2021-03-29 19:15:47 +02:00
#[ test ]
fn default_stop_words ( ) {
2022-08-02 15:13:06 +02:00
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . autogenerate_docids = true ;
2021-03-29 19:15:47 +02:00
// First we send 3 documents with ids from 1 to 3.
2022-08-02 15:13:06 +02:00
index
. add_documents ( documents! ( [
{ " name " : " kevin " , " age " : 23 } ,
{ " name " : " kevina " , " age " : 21 } ,
{ " name " : " benoit " , " age " : 34 }
] ) )
. unwrap ( ) ;
2021-03-29 19:15:47 +02:00
// Ensure there is no stop_words by default
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let stop_words = index . stop_words ( & rtxn ) . unwrap ( ) ;
assert! ( stop_words . is_none ( ) ) ;
}
#[ test ]
fn set_and_reset_stop_words ( ) {
2022-08-02 15:13:06 +02:00
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . autogenerate_docids = true ;
2021-03-29 19:15:47 +02:00
let mut wtxn = index . write_txn ( ) . unwrap ( ) ;
2022-08-02 15:13:06 +02:00
// First we send 3 documents with ids from 1 to 3.
index
. add_documents_using_wtxn (
& mut wtxn ,
documents! ( [
{ " name " : " kevin " , " age " : 23 , " maxim " : " I love dogs " } ,
{ " name " : " kevina " , " age " : 21 , " maxim " : " Doggos are the best " } ,
{ " name " : " benoit " , " age " : 34 , " maxim " : " The crepes are really good " } ,
] ) ,
)
. unwrap ( ) ;
2021-03-29 19:15:47 +02:00
// In the same transaction we provide some stop_words
2021-04-07 13:33:44 +02:00
let set = btreeset! { " i " . to_string ( ) , " the " . to_string ( ) , " are " . to_string ( ) } ;
2022-08-02 15:13:06 +02:00
index
. update_settings_using_wtxn ( & mut wtxn , | settings | {
settings . set_stop_words ( set . clone ( ) ) ;
} )
. unwrap ( ) ;
2021-03-29 19:15:47 +02:00
wtxn . commit ( ) . unwrap ( ) ;
// Ensure stop_words are effectively stored
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let stop_words = index . stop_words ( & rtxn ) . unwrap ( ) ;
assert! ( stop_words . is_some ( ) ) ; // at this point the index should return something
let stop_words = stop_words . unwrap ( ) ;
let expected = fst ::Set ::from_iter ( & set ) . unwrap ( ) ;
assert_eq! ( stop_words . as_fst ( ) . as_bytes ( ) , expected . as_fst ( ) . as_bytes ( ) ) ;
// when we search for something that is a non prefix stop_words it should be ignored
2021-04-08 21:21:20 +02:00
// thus we should get a placeholder search (all the results = 3)
2021-03-29 19:15:47 +02:00
let result = index . search ( & rtxn ) . query ( " the " ) . execute ( ) . unwrap ( ) ;
2021-04-08 21:21:20 +02:00
assert_eq! ( result . documents_ids . len ( ) , 3 ) ;
2021-03-29 19:15:47 +02:00
let result = index . search ( & rtxn ) . query ( " i " ) . execute ( ) . unwrap ( ) ;
2021-04-08 21:21:20 +02:00
assert_eq! ( result . documents_ids . len ( ) , 3 ) ;
2021-03-29 19:15:47 +02:00
let result = index . search ( & rtxn ) . query ( " are " ) . execute ( ) . unwrap ( ) ;
2021-04-08 21:21:20 +02:00
assert_eq! ( result . documents_ids . len ( ) , 3 ) ;
2021-03-29 19:15:47 +02:00
let result = index . search ( & rtxn ) . query ( " dog " ) . execute ( ) . unwrap ( ) ;
assert_eq! ( result . documents_ids . len ( ) , 2 ) ; // we have two maxims talking about doggos
let result = index . search ( & rtxn ) . query ( " benoît " ) . execute ( ) . unwrap ( ) ;
assert_eq! ( result . documents_ids . len ( ) , 1 ) ; // there is one benoit in our data
// now we'll reset the stop_words and ensure it's None
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
settings . reset_stop_words ( ) ;
} )
. unwrap ( ) ;
2021-03-29 19:15:47 +02:00
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let stop_words = index . stop_words ( & rtxn ) . unwrap ( ) ;
assert! ( stop_words . is_none ( ) ) ;
// now we can search for the stop words
let result = index . search ( & rtxn ) . query ( " the " ) . execute ( ) . unwrap ( ) ;
assert_eq! ( result . documents_ids . len ( ) , 2 ) ;
let result = index . search ( & rtxn ) . query ( " i " ) . execute ( ) . unwrap ( ) ;
assert_eq! ( result . documents_ids . len ( ) , 1 ) ;
let result = index . search ( & rtxn ) . query ( " are " ) . execute ( ) . unwrap ( ) ;
assert_eq! ( result . documents_ids . len ( ) , 2 ) ;
// the rest of the search is still not impacted
let result = index . search ( & rtxn ) . query ( " dog " ) . execute ( ) . unwrap ( ) ;
assert_eq! ( result . documents_ids . len ( ) , 2 ) ; // we have two maxims talking about doggos
let result = index . search ( & rtxn ) . query ( " benoît " ) . execute ( ) . unwrap ( ) ;
assert_eq! ( result . documents_ids . len ( ) , 1 ) ; // there is one benoit in our data
2020-11-13 14:49:48 +01:00
}
2021-01-20 17:27:43 +01:00
2021-04-09 21:56:20 +02:00
#[ test ]
fn set_and_reset_synonyms ( ) {
2022-08-02 15:13:06 +02:00
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . autogenerate_docids = true ;
2021-04-09 21:56:20 +02:00
let mut wtxn = index . write_txn ( ) . unwrap ( ) ;
2022-08-02 15:13:06 +02:00
// Send 3 documents with ids from 1 to 3.
index
. add_documents_using_wtxn (
& mut wtxn ,
documents! ( [
{ " name " : " kevin " , " age " : 23 , " maxim " : " I love dogs " } ,
{ " name " : " kevina " , " age " : 21 , " maxim " : " Doggos are the best " } ,
{ " name " : " benoit " , " age " : 34 , " maxim " : " The crepes are really good " } ,
] ) ,
)
. unwrap ( ) ;
2021-04-09 21:56:20 +02:00
// In the same transaction provide some synonyms
2022-08-02 15:13:06 +02:00
index
. update_settings_using_wtxn ( & mut wtxn , | settings | {
2023-07-26 09:33:42 +02:00
settings . set_synonyms ( btreemap! {
2022-08-02 15:13:06 +02:00
" blini " . to_string ( ) = > vec! [ " crepes " . to_string ( ) ] ,
" super like " . to_string ( ) = > vec! [ " love " . to_string ( ) ] ,
" puppies " . to_string ( ) = > vec! [ " dogs " . to_string ( ) , " doggos " . to_string ( ) ]
} ) ;
} )
. unwrap ( ) ;
2021-04-09 21:56:20 +02:00
wtxn . commit ( ) . unwrap ( ) ;
// Ensure synonyms are effectively stored
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let synonyms = index . synonyms ( & rtxn ) . unwrap ( ) ;
assert! ( ! synonyms . is_empty ( ) ) ; // at this point the index should return something
// Check that we can use synonyms
let result = index . search ( & rtxn ) . query ( " blini " ) . execute ( ) . unwrap ( ) ;
assert_eq! ( result . documents_ids . len ( ) , 1 ) ;
let result = index . search ( & rtxn ) . query ( " super like " ) . execute ( ) . unwrap ( ) ;
assert_eq! ( result . documents_ids . len ( ) , 1 ) ;
let result = index . search ( & rtxn ) . query ( " puppies " ) . execute ( ) . unwrap ( ) ;
assert_eq! ( result . documents_ids . len ( ) , 2 ) ;
// Reset the synonyms
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
settings . reset_synonyms ( ) ;
} )
. unwrap ( ) ;
2021-04-09 21:56:20 +02:00
// Ensure synonyms are reset
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let synonyms = index . synonyms ( & rtxn ) . unwrap ( ) ;
assert! ( synonyms . is_empty ( ) ) ;
// Check that synonyms are no longer work
let result = index . search ( & rtxn ) . query ( " blini " ) . execute ( ) . unwrap ( ) ;
assert! ( result . documents_ids . is_empty ( ) ) ;
let result = index . search ( & rtxn ) . query ( " super like " ) . execute ( ) . unwrap ( ) ;
assert! ( result . documents_ids . is_empty ( ) ) ;
let result = index . search ( & rtxn ) . query ( " puppies " ) . execute ( ) . unwrap ( ) ;
assert! ( result . documents_ids . is_empty ( ) ) ;
}
2023-09-04 14:39:33 +02:00
#[ test ]
fn thai_synonyms ( ) {
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . autogenerate_docids = true ;
let mut wtxn = index . write_txn ( ) . unwrap ( ) ;
// Send 3 documents with ids from 1 to 3.
index
. add_documents_using_wtxn (
& mut wtxn ,
documents! ( [
{ " name " : " ยี่ปุ่น " } ,
{ " name " : " ญี่ปุ่น " } ,
] ) ,
)
. unwrap ( ) ;
// In the same transaction provide some synonyms
index
. update_settings_using_wtxn ( & mut wtxn , | settings | {
settings . set_synonyms ( btreemap! {
2023-09-04 14:39:52 +02:00
" japanese " . to_string ( ) = > vec! [ S ( " ญี่ปุ่น " ) , S ( " ยี่ปุ่น " ) ] ,
2023-09-04 14:39:33 +02:00
} ) ;
} )
. unwrap ( ) ;
wtxn . commit ( ) . unwrap ( ) ;
// Ensure synonyms are effectively stored
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let synonyms = index . synonyms ( & rtxn ) . unwrap ( ) ;
assert! ( ! synonyms . is_empty ( ) ) ; // at this point the index should return something
// Check that we can use synonyms
let result = index . search ( & rtxn ) . query ( " japanese " ) . execute ( ) . unwrap ( ) ;
assert_eq! ( result . documents_ids . len ( ) , 2 ) ;
}
2021-01-20 17:27:43 +01:00
#[ test ]
fn setting_searchable_recomputes_other_settings ( ) {
2022-08-02 15:13:06 +02:00
let index = TempIndex ::new ( ) ;
2021-01-20 17:27:43 +01:00
// Set all the settings except searchable
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
settings . set_displayed_fields ( vec! [ " hello " . to_string ( ) ] ) ;
settings . set_filterable_fields ( hashset! { S ( " age " ) , S ( " toto " ) } ) ;
2023-01-11 12:14:17 +01:00
settings . set_criteria ( vec! [ Criterion ::Asc ( S ( " toto " ) ) ] ) ;
2022-08-02 15:13:06 +02:00
} )
. unwrap ( ) ;
2021-01-20 17:27:43 +01:00
// check the output
let rtxn = index . read_txn ( ) . unwrap ( ) ;
assert_eq! ( & [ " hello " ] [ .. ] , index . displayed_fields ( & rtxn ) . unwrap ( ) . unwrap ( ) ) ;
// since no documents have been pushed the primary key is still unset
assert! ( index . primary_key ( & rtxn ) . unwrap ( ) . is_none ( ) ) ;
assert_eq! ( vec! [ Criterion ::Asc ( " toto " . to_string ( ) ) ] , index . criteria ( & rtxn ) . unwrap ( ) ) ;
drop ( rtxn ) ;
// We set toto and age as searchable to force reordering of the fields
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
settings . set_searchable_fields ( vec! [ " toto " . to_string ( ) , " age " . to_string ( ) ] ) ;
} )
. unwrap ( ) ;
2021-01-20 17:27:43 +01:00
let rtxn = index . read_txn ( ) . unwrap ( ) ;
assert_eq! ( & [ " hello " ] [ .. ] , index . displayed_fields ( & rtxn ) . unwrap ( ) . unwrap ( ) ) ;
assert! ( index . primary_key ( & rtxn ) . unwrap ( ) . is_none ( ) ) ;
assert_eq! ( vec! [ Criterion ::Asc ( " toto " . to_string ( ) ) ] , index . criteria ( & rtxn ) . unwrap ( ) ) ;
}
2021-06-01 15:48:38 +02:00
#[ test ]
fn setting_not_filterable_cant_filter ( ) {
2022-08-02 15:13:06 +02:00
let index = TempIndex ::new ( ) ;
2021-06-01 15:48:38 +02:00
// Set all the settings except searchable
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
settings . set_displayed_fields ( vec! [ " hello " . to_string ( ) ] ) ;
// It is only Asc(toto), there is a facet database but it is denied to filter with toto.
2023-01-11 12:14:17 +01:00
settings . set_criteria ( vec! [ Criterion ::Asc ( S ( " toto " ) ) ] ) ;
2022-08-02 15:13:06 +02:00
} )
. unwrap ( ) ;
2021-06-01 15:48:38 +02:00
let rtxn = index . read_txn ( ) . unwrap ( ) ;
2021-12-09 11:13:12 +01:00
let filter = Filter ::from_str ( " toto = 32 " ) . unwrap ( ) . unwrap ( ) ;
2021-11-06 01:32:12 +01:00
let _ = filter . evaluate ( & rtxn , & index ) . unwrap_err ( ) ;
2021-06-01 15:48:38 +02:00
}
2021-06-15 13:45:20 +02:00
#[ test ]
fn setting_primary_key ( ) {
2022-08-02 15:13:06 +02:00
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . autogenerate_docids = true ;
2021-06-15 13:45:20 +02:00
let mut wtxn = index . write_txn ( ) . unwrap ( ) ;
2022-08-02 15:13:06 +02:00
// Set the primary key settings
index
. update_settings_using_wtxn ( & mut wtxn , | settings | {
settings . set_primary_key ( S ( " mykey " ) ) ;
} )
. unwrap ( ) ;
2021-06-15 13:45:20 +02:00
assert_eq! ( index . primary_key ( & wtxn ) . unwrap ( ) , Some ( " mykey " ) ) ;
// Then index some documents with the "mykey" primary key.
2022-08-02 15:13:06 +02:00
index
. add_documents_using_wtxn (
& mut wtxn ,
documents! ( [
{ " mykey " : 1 , " name " : " kevin " , " age " : 23 } ,
{ " mykey " : 2 , " name " : " kevina " , " age " : 21 } ,
{ " mykey " : 3 , " name " : " benoit " , " age " : 34 } ,
{ " mykey " : 4 , " name " : " bernard " , " age " : 34 } ,
{ " mykey " : 5 , " name " : " bertrand " , " age " : 34 } ,
{ " mykey " : 6 , " name " : " bernie " , " age " : 34 } ,
{ " mykey " : 7 , " name " : " ben " , " age " : 34 }
] ) ,
)
. unwrap ( ) ;
2021-06-15 13:45:20 +02:00
wtxn . commit ( ) . unwrap ( ) ;
2023-03-26 18:18:39 +02:00
// Updating settings with the same primary key should do nothing
let mut wtxn = index . write_txn ( ) . unwrap ( ) ;
index
. update_settings_using_wtxn ( & mut wtxn , | settings | {
settings . set_primary_key ( S ( " mykey " ) ) ;
} )
. unwrap ( ) ;
assert_eq! ( index . primary_key ( & wtxn ) . unwrap ( ) , Some ( " mykey " ) ) ;
wtxn . commit ( ) . unwrap ( ) ;
// Updating the settings with a different (or no) primary key causes an error
2021-06-15 13:45:20 +02:00
let mut wtxn = index . write_txn ( ) . unwrap ( ) ;
2022-08-02 15:13:06 +02:00
let error = index
. update_settings_using_wtxn ( & mut wtxn , | settings | {
settings . reset_primary_key ( ) ;
} )
. unwrap_err ( ) ;
assert! ( matches! ( error , Error ::UserError ( UserError ::PrimaryKeyCannotBeChanged ( _ ) ) ) ) ;
2023-11-23 12:07:35 +01:00
wtxn . abort ( ) ;
2021-06-15 13:45:20 +02:00
// But if we clear the database...
let mut wtxn = index . write_txn ( ) . unwrap ( ) ;
2021-11-03 13:12:01 +01:00
let builder = ClearDocuments ::new ( & mut wtxn , & index ) ;
2021-06-15 13:45:20 +02:00
builder . execute ( ) . unwrap ( ) ;
2022-08-02 15:13:06 +02:00
wtxn . commit ( ) . unwrap ( ) ;
2021-06-15 13:45:20 +02:00
// ...we can change the primary key
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
settings . set_primary_key ( S ( " myid " ) ) ;
} )
. unwrap ( ) ;
2021-06-15 13:45:20 +02:00
}
2021-07-22 17:04:38 +02:00
#[ test ]
fn setting_impact_relevancy ( ) {
2022-08-02 15:13:06 +02:00
let mut index = TempIndex ::new ( ) ;
index . index_documents_config . autogenerate_docids = true ;
2021-07-22 17:04:38 +02:00
// Set the genres setting
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
settings . set_filterable_fields ( hashset! { S ( " genres " ) } ) ;
} )
. unwrap ( ) ;
2021-07-22 17:04:38 +02:00
2022-08-02 15:13:06 +02:00
index . add_documents ( documents! ( [
2021-07-22 17:04:38 +02:00
{
" id " : 11 ,
" title " : " Star Wars " ,
" overview " :
" Princess Leia is captured and held hostage by the evil Imperial forces in their effort to take over the galactic Empire. Venturesome Luke Skywalker and dashing captain Han Solo team together with the loveable robot duo R2-D2 and C-3PO to rescue the beautiful princess and restore peace and justice in the Empire. " ,
" genres " : [ " Adventure " , " Action " , " Science Fiction " ] ,
" poster " : " https://image.tmdb.org/t/p/w500/6FfCtAuVAW8XJjZ7eWeLibRLWTw.jpg " ,
" release_date " : 233366400
} ,
{
" id " : 30 ,
" title " : " Magnetic Rose " ,
" overview " : " " ,
" genres " : [ " Animation " , " Science Fiction " ] ,
" poster " : " https://image.tmdb.org/t/p/w500/gSuHDeWemA1menrwfMRChnSmMVN.jpg " ,
" release_date " : 819676800
}
2022-08-02 15:13:06 +02:00
] ) ) . unwrap ( ) ;
2021-07-22 17:04:38 +02:00
let rtxn = index . read_txn ( ) . unwrap ( ) ;
let SearchResult { documents_ids , .. } = index . search ( & rtxn ) . query ( " S " ) . execute ( ) . unwrap ( ) ;
let first_id = documents_ids [ 0 ] ;
let documents = index . documents ( & rtxn , documents_ids ) . unwrap ( ) ;
let ( _ , content ) = documents . iter ( ) . find ( | ( id , _ ) | * id = = first_id ) . unwrap ( ) ;
let fid = index . fields_ids_map ( & rtxn ) . unwrap ( ) . id ( " title " ) . unwrap ( ) ;
let line = std ::str ::from_utf8 ( content . get ( fid ) . unwrap ( ) ) . unwrap ( ) ;
assert_eq! ( line , r # ""Star Wars""# ) ;
}
2022-03-31 14:12:00 +02:00
#[ test ]
fn test_disable_typo ( ) {
let index = TempIndex ::new ( ) ;
let mut txn = index . write_txn ( ) . unwrap ( ) ;
assert! ( index . authorize_typos ( & txn ) . unwrap ( ) ) ;
2022-08-02 15:13:06 +02:00
index
. update_settings_using_wtxn ( & mut txn , | settings | {
settings . set_autorize_typos ( false ) ;
} )
. unwrap ( ) ;
2022-03-31 14:12:00 +02:00
assert! ( ! index . authorize_typos ( & txn ) . unwrap ( ) ) ;
}
2022-03-31 13:50:18 +02:00
#[ test ]
fn update_min_word_len_for_typo ( ) {
let index = TempIndex ::new ( ) ;
// Set the genres setting
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
settings . set_min_word_len_one_typo ( 8 ) ;
settings . set_min_word_len_two_typos ( 8 ) ;
} )
. unwrap ( ) ;
2022-03-31 13:50:18 +02:00
let txn = index . read_txn ( ) . unwrap ( ) ;
2022-03-31 18:23:12 +02:00
assert_eq! ( index . min_word_len_one_typo ( & txn ) . unwrap ( ) , 8 ) ;
assert_eq! ( index . min_word_len_two_typos ( & txn ) . unwrap ( ) , 8 ) ;
2022-04-18 22:48:22 +02:00
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
settings . reset_min_word_len_one_typo ( ) ;
settings . reset_min_word_len_two_typos ( ) ;
} )
. unwrap ( ) ;
2022-04-18 22:48:22 +02:00
let txn = index . read_txn ( ) . unwrap ( ) ;
assert_eq! ( index . min_word_len_one_typo ( & txn ) . unwrap ( ) , DEFAULT_MIN_WORD_LEN_ONE_TYPO ) ;
assert_eq! ( index . min_word_len_two_typos ( & txn ) . unwrap ( ) , DEFAULT_MIN_WORD_LEN_TWO_TYPOS ) ;
2022-03-31 13:50:18 +02:00
}
#[ test ]
fn update_invalid_min_word_len_for_typo ( ) {
let index = TempIndex ::new ( ) ;
// Set the genres setting
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
settings . set_min_word_len_one_typo ( 10 ) ;
settings . set_min_word_len_two_typos ( 7 ) ;
} )
. unwrap_err ( ) ;
2022-03-31 13:50:18 +02:00
}
2022-04-21 13:52:09 +02:00
#[ test ]
fn update_exact_words_normalization ( ) {
let index = TempIndex ::new ( ) ;
let mut txn = index . write_txn ( ) . unwrap ( ) ;
2022-08-02 15:13:06 +02:00
// Set the genres setting
index
. update_settings_using_wtxn ( & mut txn , | settings | {
let words = btreeset! { S ( " Ab " ) , S ( " ac " ) } ;
settings . set_exact_words ( words ) ;
} )
. unwrap ( ) ;
2022-04-21 13:52:09 +02:00
2022-05-24 09:15:49 +02:00
let exact_words = index . exact_words ( & txn ) . unwrap ( ) . unwrap ( ) ;
2022-04-21 13:52:09 +02:00
for word in exact_words . into_fst ( ) . stream ( ) . into_str_vec ( ) . unwrap ( ) {
assert! ( word . 0 = = " ac " | | word . 0 = = " ab " ) ;
}
}
2022-04-29 16:13:18 +02:00
#[ test ]
fn test_correct_settings_init ( ) {
let index = TempIndex ::new ( ) ;
2022-08-02 15:13:06 +02:00
index
. update_settings ( | settings | {
// we don't actually update the settings, just check their content
let Settings {
wtxn : _ ,
index : _ ,
indexer_config : _ ,
searchable_fields ,
displayed_fields ,
filterable_fields ,
sortable_fields ,
criteria ,
stop_words ,
2023-07-24 17:00:18 +02:00
non_separator_tokens ,
separator_tokens ,
dictionary ,
2022-08-02 15:13:06 +02:00
distinct_field ,
synonyms ,
primary_key ,
authorize_typos ,
min_word_len_two_typos ,
min_word_len_one_typo ,
exact_words ,
exact_attributes ,
max_values_per_facet ,
2023-06-22 17:13:40 +02:00
sort_facet_values_by ,
2022-08-02 15:13:06 +02:00
pagination_max_total_hits ,
2023-12-06 15:49:02 +01:00
proximity_precision ,
2023-11-15 15:46:37 +01:00
embedder_settings ,
2024-03-11 18:24:21 +01:00
search_cutoff ,
2022-08-02 15:13:06 +02:00
} = settings ;
assert! ( matches! ( searchable_fields , Setting ::NotSet ) ) ;
assert! ( matches! ( displayed_fields , Setting ::NotSet ) ) ;
assert! ( matches! ( filterable_fields , Setting ::NotSet ) ) ;
assert! ( matches! ( sortable_fields , Setting ::NotSet ) ) ;
assert! ( matches! ( criteria , Setting ::NotSet ) ) ;
assert! ( matches! ( stop_words , Setting ::NotSet ) ) ;
2023-07-24 17:00:18 +02:00
assert! ( matches! ( non_separator_tokens , Setting ::NotSet ) ) ;
assert! ( matches! ( separator_tokens , Setting ::NotSet ) ) ;
assert! ( matches! ( dictionary , Setting ::NotSet ) ) ;
2022-08-02 15:13:06 +02:00
assert! ( matches! ( distinct_field , Setting ::NotSet ) ) ;
assert! ( matches! ( synonyms , Setting ::NotSet ) ) ;
assert! ( matches! ( primary_key , Setting ::NotSet ) ) ;
assert! ( matches! ( authorize_typos , Setting ::NotSet ) ) ;
assert! ( matches! ( min_word_len_two_typos , Setting ::NotSet ) ) ;
assert! ( matches! ( min_word_len_one_typo , Setting ::NotSet ) ) ;
assert! ( matches! ( exact_words , Setting ::NotSet ) ) ;
assert! ( matches! ( exact_attributes , Setting ::NotSet ) ) ;
assert! ( matches! ( max_values_per_facet , Setting ::NotSet ) ) ;
2023-06-22 17:13:40 +02:00
assert! ( matches! ( sort_facet_values_by , Setting ::NotSet ) ) ;
2022-08-02 15:13:06 +02:00
assert! ( matches! ( pagination_max_total_hits , Setting ::NotSet ) ) ;
2023-12-06 15:49:02 +01:00
assert! ( matches! ( proximity_precision , Setting ::NotSet ) ) ;
2023-11-15 15:46:37 +01:00
assert! ( matches! ( embedder_settings , Setting ::NotSet ) ) ;
2024-03-11 18:24:21 +01:00
assert! ( matches! ( search_cutoff , Setting ::NotSet ) ) ;
2022-08-02 15:13:06 +02:00
} )
. unwrap ( ) ;
2022-04-29 16:13:18 +02:00
}
2022-11-08 14:23:14 +01:00
#[ test ]
fn settings_must_ignore_soft_deleted ( ) {
use serde_json ::json ;
let index = TempIndex ::new ( ) ;
let mut docs = vec! [ ] ;
for i in 0 .. 10 {
docs . push ( json! ( { " id " : i , " title " : format ! ( " {:x} " , i ) } ) ) ;
}
index . add_documents ( documents! { docs } ) . unwrap ( ) ;
2023-10-25 14:49:25 +02:00
index . delete_documents ( ( 0 .. 5 ) . map ( | id | id . to_string ( ) ) . collect ( ) ) ;
2022-11-08 14:23:14 +01:00
2023-10-25 14:49:25 +02:00
let mut wtxn = index . write_txn ( ) . unwrap ( ) ;
2022-11-08 14:23:14 +01:00
index
. update_settings_using_wtxn ( & mut wtxn , | settings | {
settings . set_searchable_fields ( vec! [ " id " . to_string ( ) ] ) ;
} )
. unwrap ( ) ;
wtxn . commit ( ) . unwrap ( ) ;
let rtxn = index . write_txn ( ) . unwrap ( ) ;
let docs : StdResult < Vec < _ > , _ > = index . all_documents ( & rtxn ) . unwrap ( ) . collect ( ) ;
let docs = docs . unwrap ( ) ;
assert_eq! ( docs . len ( ) , 5 ) ;
}
2020-11-02 15:31:20 +01:00
}