Merge branch 'main' into default-key

This commit is contained in:
Mubelotix 2025-07-08 12:21:46 +02:00
commit ff8d48d2f1
No known key found for this signature in database
GPG key ID: 0406DF6C3A69B942
192 changed files with 8571 additions and 2120 deletions

View file

@ -32,7 +32,7 @@ jobs:
- name: Build deb package
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
- name: Upload debian pkg to release
uses: svenstaro/upload-release-action@2.7.0
uses: svenstaro/upload-release-action@2.11.1
with:
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
file: target/debian/meilisearch.deb

View file

@ -51,7 +51,7 @@ jobs:
# No need to upload binaries for dry run (cron)
- name: Upload binaries to release
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.7.0
uses: svenstaro/upload-release-action@2.11.1
with:
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
file: target/release/meilisearch
@ -81,7 +81,7 @@ jobs:
# No need to upload binaries for dry run (cron)
- name: Upload binaries to release
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.7.0
uses: svenstaro/upload-release-action@2.11.1
with:
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
file: target/release/${{ matrix.artifact_name }}
@ -113,7 +113,7 @@ jobs:
- name: Upload the binary to release
# No need to upload binaries for dry run (cron)
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.7.0
uses: svenstaro/upload-release-action@2.11.1
with:
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
file: target/${{ matrix.target }}/release/meilisearch
@ -178,7 +178,7 @@ jobs:
- name: Upload the binary to release
# No need to upload binaries for dry run (cron)
if: github.event_name == 'release'
uses: svenstaro/upload-release-action@2.7.0
uses: svenstaro/upload-release-action@2.11.1
with:
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
file: target/${{ matrix.target }}/release/meilisearch

View file

@ -29,7 +29,7 @@ jobs:
- name: Setup test with Rust stable
uses: dtolnay/rust-toolchain@1.85
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.7.8
uses: Swatinem/rust-cache@v2.8.0
- name: Run cargo check without any default features
uses: actions-rs/cargo@v1
with:
@ -51,7 +51,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.7.8
uses: Swatinem/rust-cache@v2.8.0
- uses: dtolnay/rust-toolchain@1.85
- name: Run cargo check without any default features
uses: actions-rs/cargo@v1
@ -155,7 +155,7 @@ jobs:
apt-get install build-essential -y
- uses: dtolnay/rust-toolchain@1.85
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.7.8
uses: Swatinem/rust-cache@v2.8.0
- name: Run tests in debug
uses: actions-rs/cargo@v1
with:
@ -172,7 +172,7 @@ jobs:
profile: minimal
components: clippy
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.7.8
uses: Swatinem/rust-cache@v2.8.0
- name: Run cargo clippy
uses: actions-rs/cargo@v1
with:
@ -191,7 +191,7 @@ jobs:
override: true
components: rustfmt
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.7.8
uses: Swatinem/rust-cache@v2.8.0
- name: Run cargo fmt
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate

3
.gitignore vendored
View file

@ -18,5 +18,8 @@
## ... unreviewed
*.snap.new
# Database snapshot
crates/meilisearch/db.snapshot
# Fuzzcheck data for the facet indexing fuzz test
crates/milli/fuzz/update::facet::incremental::fuzz::fuzz/

57
Cargo.lock generated
View file

@ -580,7 +580,7 @@ source = "git+https://github.com/meilisearch/bbqueue#cbb87cc707b5af415ef203bdaf2
[[package]]
name = "benchmarks"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"anyhow",
"bumpalo",
@ -770,7 +770,7 @@ dependencies = [
[[package]]
name = "build-info"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"anyhow",
"time",
@ -1774,7 +1774,7 @@ dependencies = [
[[package]]
name = "dump"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"anyhow",
"big_s",
@ -2006,7 +2006,7 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "file-store"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"tempfile",
"thiserror 2.0.12",
@ -2028,7 +2028,7 @@ dependencies = [
[[package]]
name = "filter-parser"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"insta",
"nom",
@ -2049,7 +2049,7 @@ dependencies = [
[[package]]
name = "flatten-serde-json"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"criterion",
"serde_json",
@ -2194,7 +2194,7 @@ dependencies = [
[[package]]
name = "fuzzers"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"arbitrary",
"bumpalo",
@ -2994,9 +2994,10 @@ dependencies = [
[[package]]
name = "index-scheduler"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"anyhow",
"backoff",
"big_s",
"bincode",
"bumpalo",
@ -3229,7 +3230,7 @@ dependencies = [
[[package]]
name = "json-depth-checker"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"criterion",
"serde_json",
@ -3723,7 +3724,7 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
[[package]]
name = "meili-snap"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"insta",
"md5",
@ -3734,7 +3735,7 @@ dependencies = [
[[package]]
name = "meilisearch"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"actix-cors",
"actix-http",
@ -3744,7 +3745,6 @@ dependencies = [
"actix-web-lab",
"anyhow",
"async-openai",
"async-trait",
"brotli",
"bstr",
"build-info",
@ -3830,7 +3830,7 @@ dependencies = [
[[package]]
name = "meilisearch-auth"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"base64 0.22.1",
"enum-iterator",
@ -3849,12 +3849,13 @@ dependencies = [
[[package]]
name = "meilisearch-types"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"actix-web",
"anyhow",
"bumpalo",
"bumparaw-collections",
"byte-unit",
"convert_case 0.8.0",
"csv",
"deserr",
@ -3883,7 +3884,7 @@ dependencies = [
[[package]]
name = "meilitool"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"anyhow",
"clap",
@ -3917,7 +3918,7 @@ dependencies = [
[[package]]
name = "milli"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"allocator-api2 0.3.0",
"arroy",
@ -3969,7 +3970,6 @@ dependencies = [
"ordered-float 5.0.0",
"rand 0.8.5",
"rayon",
"rayon-par-bridge",
"rhai",
"roaring",
"rstar",
@ -3987,7 +3987,6 @@ dependencies = [
"time",
"tokenizers",
"tracing",
"uell",
"ureq",
"url",
"utoipa",
@ -4471,7 +4470,7 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "permissive-json-pointer"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"big_s",
"serde_json",
@ -5002,15 +5001,6 @@ dependencies = [
"crossbeam-utils",
]
[[package]]
name = "rayon-par-bridge"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb6a14d8f65834aca6b0fe4cbbd7a27e639cd3efb1f2a32de9942368f1991de8"
dependencies = [
"rayon",
]
[[package]]
name = "reborrow"
version = "0.5.5"
@ -6457,15 +6447,6 @@ version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971"
[[package]]
name = "uell"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40de5982e28612e20330e77d81f1559b74f66caf3c7fc10b19ada4843f4b4fd7"
dependencies = [
"bumpalo",
]
[[package]]
name = "ug"
version = "0.4.0"
@ -7277,7 +7258,7 @@ dependencies = [
[[package]]
name = "xtask"
version = "1.15.2"
version = "1.16.0"
dependencies = [
"anyhow",
"build-info",

View file

@ -22,7 +22,7 @@ members = [
]
[workspace.package]
version = "1.15.2"
version = "1.16.0"
authors = [
"Quentin de Quelen <quentin@dequelen.me>",
"Clément Renault <clement@meilisearch.com>",

View file

@ -11,7 +11,7 @@ use milli::heed::{EnvOpenOptions, RwTxn};
use milli::progress::Progress;
use milli::update::new::indexer;
use milli::update::{IndexerConfig, Settings};
use milli::vector::EmbeddingConfigs;
use milli::vector::RuntimeEmbedders;
use milli::{FilterableAttributesRule, Index};
use rand::seq::SliceRandom;
use rand_chacha::rand_core::SeedableRng;
@ -65,7 +65,7 @@ fn setup_settings<'t>(
let sortable_fields = sortable_fields.iter().map(|s| s.to_string()).collect();
builder.set_sortable_fields(sortable_fields);
builder.execute(|_| (), || false).unwrap();
builder.execute(&|| false, &Progress::default(), Default::default()).unwrap();
}
fn setup_index_with_settings(
@ -166,9 +166,10 @@ fn indexing_songs_default(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -232,9 +233,10 @@ fn reindexing_songs_default(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -276,9 +278,10 @@ fn reindexing_songs_default(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -344,9 +347,10 @@ fn deleting_songs_in_batches_default(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -420,9 +424,10 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -464,9 +469,10 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -504,9 +510,10 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -571,9 +578,10 @@ fn indexing_songs_without_faceted_numbers(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -637,9 +645,10 @@ fn indexing_songs_without_faceted_fields(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -703,9 +712,10 @@ fn indexing_wiki(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -768,9 +778,10 @@ fn reindexing_wiki(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -812,9 +823,10 @@ fn reindexing_wiki(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -879,9 +891,10 @@ fn deleting_wiki_in_batches_default(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -955,9 +968,10 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1000,9 +1014,10 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1041,9 +1056,10 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1107,9 +1123,10 @@ fn indexing_movies_default(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1172,9 +1189,10 @@ fn reindexing_movies_default(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1216,9 +1234,10 @@ fn reindexing_movies_default(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1283,9 +1302,10 @@ fn deleting_movies_in_batches_default(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1331,9 +1351,10 @@ fn delete_documents_from_ids(index: Index, document_ids_to_delete: Vec<RoaringBi
new_fields_ids_map,
Some(primary_key),
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1395,9 +1416,10 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1439,9 +1461,10 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1479,9 +1502,10 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1568,9 +1592,10 @@ fn indexing_nested_movies_default(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1658,9 +1683,10 @@ fn deleting_nested_movies_in_batches_default(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1740,9 +1766,10 @@ fn indexing_nested_movies_without_faceted_fields(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1806,9 +1833,10 @@ fn indexing_geo(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1871,9 +1899,10 @@ fn reindexing_geo(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1915,9 +1944,10 @@ fn reindexing_geo(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();
@ -1982,9 +2012,10 @@ fn deleting_geo_in_batches_default(c: &mut Criterion) {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();

View file

@ -13,7 +13,7 @@ use milli::heed::EnvOpenOptions;
use milli::progress::Progress;
use milli::update::new::indexer;
use milli::update::{IndexerConfig, Settings};
use milli::vector::EmbeddingConfigs;
use milli::vector::RuntimeEmbedders;
use milli::{Criterion, Filter, Index, Object, TermsMatchingStrategy};
use serde_json::Value;
@ -90,7 +90,7 @@ pub fn base_setup(conf: &Conf) -> Index {
(conf.configure)(&mut builder);
builder.execute(|_| (), || false).unwrap();
builder.execute(&|| false, &Progress::default(), Default::default()).unwrap();
wtxn.commit().unwrap();
let config = IndexerConfig::default();
@ -125,9 +125,10 @@ pub fn base_setup(conf: &Conf) -> Index {
new_fields_ids_map,
primary_key,
&document_changes,
EmbeddingConfigs::default(),
RuntimeEmbedders::default(),
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();

View file

@ -1,12 +1,17 @@
#![allow(clippy::type_complexity)]
#![allow(clippy::wrong_self_convention)]
use std::collections::BTreeMap;
use meilisearch_types::batches::BatchId;
use meilisearch_types::byte_unit::Byte;
use meilisearch_types::error::ResponseError;
use meilisearch_types::keys::Key;
use meilisearch_types::milli::update::IndexDocumentsMethod;
use meilisearch_types::settings::Unchecked;
use meilisearch_types::tasks::{Details, IndexSwap, KindWithContent, Status, Task, TaskId};
use meilisearch_types::tasks::{
Details, ExportIndexSettings, IndexSwap, KindWithContent, Status, Task, TaskId,
};
use meilisearch_types::InstanceUid;
use roaring::RoaringBitmap;
use serde::{Deserialize, Serialize};
@ -141,6 +146,12 @@ pub enum KindDump {
instance_uid: Option<InstanceUid>,
},
SnapshotCreation,
Export {
url: String,
api_key: Option<String>,
payload_size: Option<Byte>,
indexes: BTreeMap<String, ExportIndexSettings>,
},
UpgradeDatabase {
from: (u32, u32, u32),
},
@ -213,6 +224,15 @@ impl From<KindWithContent> for KindDump {
KindDump::DumpCreation { keys, instance_uid }
}
KindWithContent::SnapshotCreation => KindDump::SnapshotCreation,
KindWithContent::Export { url, api_key, payload_size, indexes } => KindDump::Export {
url,
api_key,
payload_size,
indexes: indexes
.into_iter()
.map(|(pattern, settings)| (pattern.to_string(), settings))
.collect(),
},
KindWithContent::UpgradeDatabase { from: version } => {
KindDump::UpgradeDatabase { from: version }
}
@ -329,6 +349,7 @@ pub(crate) mod test {
write_channel_congestion: None,
internal_database_sizes: Default::default(),
},
embedder_stats: Default::default(),
enqueued_at: Some(BatchEnqueuedAt {
earliest: datetime!(2022-11-11 0:00 UTC),
oldest: datetime!(2022-11-11 0:00 UTC),

View file

@ -116,6 +116,15 @@ impl DumpReader {
}
}
pub fn chat_completions_settings(
&mut self,
) -> Result<Box<dyn Iterator<Item = Result<(String, v6::ChatCompletionSettings)>> + '_>> {
match self {
DumpReader::Current(current) => current.chat_completions_settings(),
DumpReader::Compat(_compat) => Ok(Box::new(std::iter::empty())),
}
}
pub fn features(&self) -> Result<Option<v6::RuntimeTogglableFeatures>> {
match self {
DumpReader::Current(current) => Ok(current.features()),

View file

@ -1,3 +1,4 @@
use std::ffi::OsStr;
use std::fs::{self, File};
use std::io::{BufRead, BufReader, ErrorKind};
use std::path::Path;
@ -21,6 +22,7 @@ pub type Unchecked = meilisearch_types::settings::Unchecked;
pub type Task = crate::TaskDump;
pub type Batch = meilisearch_types::batches::Batch;
pub type Key = meilisearch_types::keys::Key;
pub type ChatCompletionSettings = meilisearch_types::features::ChatCompletionSettings;
pub type RuntimeTogglableFeatures = meilisearch_types::features::RuntimeTogglableFeatures;
pub type Network = meilisearch_types::features::Network;
@ -192,6 +194,34 @@ impl V6Reader {
)
}
pub fn chat_completions_settings(
&mut self,
) -> Result<Box<dyn Iterator<Item = Result<(String, ChatCompletionSettings)>> + '_>> {
let entries = match fs::read_dir(self.dump.path().join("chat-completions-settings")) {
Ok(entries) => entries,
Err(e) if e.kind() == ErrorKind::NotFound => return Ok(Box::new(std::iter::empty())),
Err(e) => return Err(e.into()),
};
Ok(Box::new(
entries
.map(|entry| -> Result<Option<_>> {
let entry = entry?;
let file_name = entry.file_name();
let path = Path::new(&file_name);
if entry.file_type()?.is_file() && path.extension() == Some(OsStr::new("json"))
{
let name = path.file_stem().unwrap().to_str().unwrap().to_string();
let file = File::open(entry.path())?;
let settings = serde_json::from_reader(file)?;
Ok(Some((name, settings)))
} else {
Ok(None)
}
})
.filter_map(|entry| entry.transpose()),
))
}
pub fn features(&self) -> Option<RuntimeTogglableFeatures> {
self.features
}

View file

@ -5,7 +5,7 @@ use std::path::PathBuf;
use flate2::write::GzEncoder;
use flate2::Compression;
use meilisearch_types::batches::Batch;
use meilisearch_types::features::{Network, RuntimeTogglableFeatures};
use meilisearch_types::features::{ChatCompletionSettings, Network, RuntimeTogglableFeatures};
use meilisearch_types::keys::Key;
use meilisearch_types::settings::{Checked, Settings};
use serde_json::{Map, Value};
@ -51,6 +51,10 @@ impl DumpWriter {
KeyWriter::new(self.dir.path().to_path_buf())
}
pub fn create_chat_completions_settings(&self) -> Result<ChatCompletionsSettingsWriter> {
ChatCompletionsSettingsWriter::new(self.dir.path().join("chat-completions-settings"))
}
pub fn create_tasks_queue(&self) -> Result<TaskWriter> {
TaskWriter::new(self.dir.path().join("tasks"))
}
@ -104,6 +108,24 @@ impl KeyWriter {
}
}
pub struct ChatCompletionsSettingsWriter {
path: PathBuf,
}
impl ChatCompletionsSettingsWriter {
pub(crate) fn new(path: PathBuf) -> Result<Self> {
std::fs::create_dir(&path)?;
Ok(ChatCompletionsSettingsWriter { path })
}
pub fn push_settings(&mut self, name: &str, settings: &ChatCompletionSettings) -> Result<()> {
let mut settings_file = File::create(self.path.join(name).with_extension("json"))?;
serde_json::to_writer(&mut settings_file, &settings)?;
settings_file.flush()?;
Ok(())
}
}
pub struct TaskWriter {
queue: BufWriter<File>,
update_files: PathBuf,

View file

@ -13,7 +13,7 @@ use milli::heed::EnvOpenOptions;
use milli::progress::Progress;
use milli::update::new::indexer;
use milli::update::IndexerConfig;
use milli::vector::EmbeddingConfigs;
use milli::vector::RuntimeEmbedders;
use milli::Index;
use serde_json::Value;
use tempfile::TempDir;
@ -89,7 +89,7 @@ fn main() {
let mut new_fields_ids_map = db_fields_ids_map.clone();
let indexer_alloc = Bump::new();
let embedders = EmbeddingConfigs::default();
let embedders = RuntimeEmbedders::default();
let mut indexer = indexer::DocumentOperation::new();
let mut operations = Vec::new();
@ -144,6 +144,7 @@ fn main() {
embedders,
&|| false,
&Progress::default(),
&Default::default(),
)
.unwrap();

View file

@ -44,6 +44,7 @@ time = { version = "0.3.41", features = [
tracing = "0.1.41"
ureq = "2.12.1"
uuid = { version = "1.17.0", features = ["serde", "v4"] }
backoff = "0.4.0"
[dev-dependencies]
big_s = "1.0.2"

View file

@ -4,6 +4,7 @@ use std::io;
use dump::{KindDump, TaskDump, UpdateFile};
use meilisearch_types::batches::{Batch, BatchId};
use meilisearch_types::heed::RwTxn;
use meilisearch_types::index_uid_pattern::IndexUidPattern;
use meilisearch_types::milli;
use meilisearch_types::tasks::{Kind, KindWithContent, Status, Task};
use roaring::RoaringBitmap;
@ -211,6 +212,23 @@ impl<'a> Dump<'a> {
KindWithContent::DumpCreation { keys, instance_uid }
}
KindDump::SnapshotCreation => KindWithContent::SnapshotCreation,
KindDump::Export { url, api_key, payload_size, indexes } => {
KindWithContent::Export {
url,
api_key,
payload_size,
indexes: indexes
.into_iter()
.map(|(pattern, settings)| {
Ok((
IndexUidPattern::try_from(pattern)
.map_err(|_| Error::CorruptedDump)?,
settings,
))
})
.collect::<Result<_, Error>>()?,
}
}
KindDump::UpgradeDatabase { from } => KindWithContent::UpgradeDatabase { from },
},
};

View file

@ -151,6 +151,10 @@ pub enum Error {
CorruptedTaskQueue,
#[error(transparent)]
DatabaseUpgrade(Box<Self>),
#[error(transparent)]
Export(Box<Self>),
#[error("Failed to export documents to remote server {code} ({type}): {message} <{link}>")]
FromRemoteWhenExporting { message: String, code: String, r#type: String, link: String },
#[error("Failed to rollback for index `{index}`: {rollback_outcome} ")]
RollbackFailed { index: String, rollback_outcome: RollbackOutcome },
#[error(transparent)]
@ -212,6 +216,7 @@ impl Error {
| Error::BatchNotFound(_)
| Error::TaskDeletionWithEmptyQuery
| Error::TaskCancelationWithEmptyQuery
| Error::FromRemoteWhenExporting { .. }
| Error::AbortedTask
| Error::Dump(_)
| Error::Heed(_)
@ -221,6 +226,7 @@ impl Error {
| Error::IoError(_)
| Error::Persist(_)
| Error::FeatureNotEnabled(_)
| Error::Export(_)
| Error::Anyhow(_) => true,
Error::CreateBatch(_)
| Error::CorruptedTaskQueue
@ -282,6 +288,7 @@ impl ErrorCode for Error {
Error::Dump(e) => e.error_code(),
Error::Milli { error, .. } => error.error_code(),
Error::ProcessBatchPanicked(_) => Code::Internal,
Error::FromRemoteWhenExporting { .. } => Code::Internal,
Error::Heed(e) => e.error_code(),
Error::HeedTransaction(e) => e.error_code(),
Error::FileStore(e) => e.error_code(),
@ -294,6 +301,7 @@ impl ErrorCode for Error {
Error::CorruptedTaskQueue => Code::Internal,
Error::CorruptedDump => Code::Internal,
Error::DatabaseUpgrade(_) => Code::Internal,
Error::Export(_) => Code::Internal,
Error::RollbackFailed { .. } => Code::Internal,
Error::UnrecoverableError(_) => Code::Internal,
Error::IndexSchedulerVersionMismatch { .. } => Code::Internal,

View file

@ -144,6 +144,19 @@ impl RoFeatures {
.into())
}
}
pub fn check_multimodal(&self, disabled_action: &'static str) -> Result<()> {
if self.runtime.multimodal {
Ok(())
} else {
Err(FeatureNotEnabledError {
disabled_action,
feature: "multimodal",
issue_link: "https://github.com/orgs/meilisearch/discussions/846",
}
.into())
}
}
}
impl FeatureData {

View file

@ -289,6 +289,9 @@ fn snapshot_details(d: &Details) -> String {
Details::IndexSwap { swaps } => {
format!("{{ swaps: {swaps:?} }}")
}
Details::Export { url, api_key, payload_size, indexes } => {
format!("{{ url: {url:?}, api_key: {api_key:?}, payload_size: {payload_size:?}, indexes: {indexes:?} }}")
}
Details::UpgradeDatabase { from, to } => {
format!("{{ from: {from:?}, to: {to:?} }}")
}
@ -343,6 +346,7 @@ pub fn snapshot_batch(batch: &Batch) -> String {
uid,
details,
stats,
embedder_stats,
started_at,
finished_at,
progress: _,
@ -366,6 +370,12 @@ pub fn snapshot_batch(batch: &Batch) -> String {
snap.push_str(&format!("uid: {uid}, "));
snap.push_str(&format!("details: {}, ", serde_json::to_string(details).unwrap()));
snap.push_str(&format!("stats: {}, ", serde_json::to_string(&stats).unwrap()));
if !embedder_stats.skip_serializing() {
snap.push_str(&format!(
"embedder stats: {}, ",
serde_json::to_string(&embedder_stats).unwrap()
));
}
snap.push_str(&format!("stop reason: {}, ", serde_json::to_string(&stop_reason).unwrap()));
snap.push('}');
snap

View file

@ -57,12 +57,15 @@ use meilisearch_types::features::{
use meilisearch_types::heed::byteorder::BE;
use meilisearch_types::heed::types::{DecodeIgnore, SerdeJson, Str, I128};
use meilisearch_types::heed::{self, Database, Env, RoTxn, WithoutTls};
use meilisearch_types::milli::index::IndexEmbeddingConfig;
use meilisearch_types::milli::update::IndexerConfig;
use meilisearch_types::milli::vector::{Embedder, EmbedderOptions, EmbeddingConfigs};
use meilisearch_types::milli::vector::json_template::JsonTemplate;
use meilisearch_types::milli::vector::{
Embedder, EmbedderOptions, RuntimeEmbedder, RuntimeEmbedders, RuntimeFragment,
};
use meilisearch_types::milli::{self, Index};
use meilisearch_types::task_view::TaskView;
use meilisearch_types::tasks::{KindWithContent, Task};
use milli::vector::db::IndexEmbeddingConfig;
use processing::ProcessingTasks;
pub use queue::Query;
use queue::Queue;
@ -851,29 +854,42 @@ impl IndexScheduler {
&self,
index_uid: String,
embedding_configs: Vec<IndexEmbeddingConfig>,
) -> Result<EmbeddingConfigs> {
) -> Result<RuntimeEmbedders> {
let res: Result<_> = embedding_configs
.into_iter()
.map(
|IndexEmbeddingConfig {
name,
config: milli::vector::EmbeddingConfig { embedder_options, prompt, quantized },
..
}| {
let prompt = Arc::new(
prompt
.try_into()
.map_err(meilisearch_types::milli::Error::from)
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?,
);
fragments,
}|
-> Result<(String, Arc<RuntimeEmbedder>)> {
let document_template = prompt
.try_into()
.map_err(meilisearch_types::milli::Error::from)
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?;
let fragments = fragments
.into_inner()
.into_iter()
.map(|fragment| {
let value = embedder_options.fragment(&fragment.name).unwrap();
let template = JsonTemplate::new(value.clone()).unwrap();
RuntimeFragment { name: fragment.name, id: fragment.id, template }
})
.collect();
// optimistically return existing embedder
{
let embedders = self.embedders.read().unwrap();
if let Some(embedder) = embedders.get(&embedder_options) {
return Ok((
name,
(embedder.clone(), prompt, quantized.unwrap_or_default()),
let runtime = Arc::new(RuntimeEmbedder::new(
embedder.clone(),
document_template,
fragments,
quantized.unwrap_or_default(),
));
return Ok((name, runtime));
}
}
@ -889,11 +905,19 @@ impl IndexScheduler {
let mut embedders = self.embedders.write().unwrap();
embedders.insert(embedder_options, embedder.clone());
}
Ok((name, (embedder, prompt, quantized.unwrap_or_default())))
let runtime = Arc::new(RuntimeEmbedder::new(
embedder.clone(),
document_template,
fragments,
quantized.unwrap_or_default(),
));
Ok((name, runtime))
},
)
.collect();
res.map(EmbeddingConfigs::new)
res.map(RuntimeEmbedders::new)
}
pub fn chat_settings(&self, uid: &str) -> Result<Option<ChatCompletionSettings>> {

View file

@ -103,6 +103,7 @@ make_enum_progress! {
pub enum DumpCreationProgress {
StartTheDumpCreation,
DumpTheApiKeys,
DumpTheChatCompletionSettings,
DumpTheTasks,
DumpTheBatches,
DumpTheIndexes,
@ -175,8 +176,17 @@ make_enum_progress! {
}
}
make_enum_progress! {
pub enum Export {
EnsuringCorrectnessOfTheTarget,
ExportingTheSettings,
ExportingTheDocuments,
}
}
make_atomic_progress!(Task alias AtomicTaskStep => "task" );
make_atomic_progress!(Document alias AtomicDocumentStep => "document" );
make_atomic_progress!(Index alias AtomicIndexStep => "index" );
make_atomic_progress!(Batch alias AtomicBatchStep => "batch" );
make_atomic_progress!(UpdateFile alias AtomicUpdateFileStep => "update file" );

View file

@ -179,6 +179,7 @@ impl BatchQueue {
progress: None,
details: batch.details,
stats: batch.stats,
embedder_stats: batch.embedder_stats.as_ref().into(),
started_at: batch.started_at,
finished_at: batch.finished_at,
enqueued_at: batch.enqueued_at,

View file

@ -71,6 +71,7 @@ impl From<KindWithContent> for AutobatchKind {
KindWithContent::TaskCancelation { .. }
| KindWithContent::TaskDeletion { .. }
| KindWithContent::DumpCreation { .. }
| KindWithContent::Export { .. }
| KindWithContent::UpgradeDatabase { .. }
| KindWithContent::SnapshotCreation => {
panic!("The autobatcher should never be called with tasks that don't apply to an index.")

View file

@ -1,4 +1,5 @@
use std::fmt;
use std::io::ErrorKind;
use meilisearch_types::heed::RoTxn;
use meilisearch_types::milli::update::IndexDocumentsMethod;
@ -47,6 +48,9 @@ pub(crate) enum Batch {
IndexSwap {
task: Task,
},
Export {
task: Task,
},
UpgradeDatabase {
tasks: Vec<Task>,
},
@ -103,6 +107,7 @@ impl Batch {
Batch::TaskCancelation { task, .. }
| Batch::Dump(task)
| Batch::IndexCreation { task, .. }
| Batch::Export { task }
| Batch::IndexUpdate { task, .. } => {
RoaringBitmap::from_sorted_iter(std::iter::once(task.uid)).unwrap()
}
@ -142,6 +147,7 @@ impl Batch {
| TaskDeletions(_)
| SnapshotCreation(_)
| Dump(_)
| Export { .. }
| UpgradeDatabase { .. }
| IndexSwap { .. } => None,
IndexOperation { op, .. } => Some(op.index_uid()),
@ -167,6 +173,7 @@ impl fmt::Display for Batch {
Batch::IndexUpdate { .. } => f.write_str("IndexUpdate")?,
Batch::IndexDeletion { .. } => f.write_str("IndexDeletion")?,
Batch::IndexSwap { .. } => f.write_str("IndexSwap")?,
Batch::Export { .. } => f.write_str("Export")?,
Batch::UpgradeDatabase { .. } => f.write_str("UpgradeDatabase")?,
};
match index_uid {
@ -426,9 +433,10 @@ impl IndexScheduler {
/// 0. We get the *last* task to cancel.
/// 1. We get the tasks to upgrade.
/// 2. We get the *next* task to delete.
/// 3. We get the *next* snapshot to process.
/// 4. We get the *next* dump to process.
/// 5. We get the *next* tasks to process for a specific index.
/// 3. We get the *next* export to process.
/// 4. We get the *next* snapshot to process.
/// 5. We get the *next* dump to process.
/// 6. We get the *next* tasks to process for a specific index.
#[tracing::instrument(level = "trace", skip(self, rtxn), target = "indexing::scheduler")]
pub(crate) fn create_next_batch(
&self,
@ -500,7 +508,17 @@ impl IndexScheduler {
return Ok(Some((Batch::TaskDeletions(tasks), current_batch)));
}
// 3. we batch the snapshot.
// 3. we batch the export.
let to_export = self.queue.tasks.get_kind(rtxn, Kind::Export)? & enqueued;
if !to_export.is_empty() {
let task_id = to_export.iter().next().expect("There must be at least one export task");
let mut task = self.queue.tasks.get_task(rtxn, task_id)?.unwrap();
current_batch.processing([&mut task]);
current_batch.reason(BatchStopReason::TaskKindCannotBeBatched { kind: Kind::Export });
return Ok(Some((Batch::Export { task }, current_batch)));
}
// 4. we batch the snapshot.
let to_snapshot = self.queue.tasks.get_kind(rtxn, Kind::SnapshotCreation)? & enqueued;
if !to_snapshot.is_empty() {
let mut tasks = self.queue.tasks.get_existing_tasks(rtxn, to_snapshot)?;
@ -510,7 +528,7 @@ impl IndexScheduler {
return Ok(Some((Batch::SnapshotCreation(tasks), current_batch)));
}
// 4. we batch the dumps.
// 5. we batch the dumps.
let to_dump = self.queue.tasks.get_kind(rtxn, Kind::DumpCreation)? & enqueued;
if let Some(to_dump) = to_dump.min() {
let mut task =
@ -523,7 +541,7 @@ impl IndexScheduler {
return Ok(Some((Batch::Dump(task), current_batch)));
}
// 5. We make a batch from the unprioritised tasks. Start by taking the next enqueued task.
// 6. We make a batch from the unprioritised tasks. Start by taking the next enqueued task.
let task_id = if let Some(task_id) = enqueued.min() { task_id } else { return Ok(None) };
let mut task =
self.queue.tasks.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
@ -577,7 +595,11 @@ impl IndexScheduler {
.and_then(|task| task.ok_or(Error::CorruptedTaskQueue))?;
if let Some(uuid) = task.content_uuid() {
let content_size = self.queue.file_store.compute_size(uuid)?;
let content_size = match self.queue.file_store.compute_size(uuid) {
Ok(content_size) => content_size,
Err(file_store::Error::IoError(err)) if err.kind() == ErrorKind::NotFound => 0,
Err(otherwise) => return Err(otherwise.into()),
};
total_size = total_size.saturating_add(content_size);
}

View file

@ -4,6 +4,7 @@ mod autobatcher_test;
mod create_batch;
mod process_batch;
mod process_dump_creation;
mod process_export;
mod process_index_operation;
mod process_snapshot_creation;
mod process_upgrade;

View file

@ -162,8 +162,13 @@ impl IndexScheduler {
.set_currently_updating_index(Some((index_uid.clone(), index.clone())));
let pre_commit_dabases_sizes = index.database_sizes(&index_wtxn)?;
let (tasks, congestion) =
self.apply_index_operation(&mut index_wtxn, &index, op, &progress)?;
let (tasks, congestion) = self.apply_index_operation(
&mut index_wtxn,
&index,
op,
&progress,
current_batch.embedder_stats.clone(),
)?;
{
progress.update_progress(FinalizingIndexStep::Committing);
@ -238,10 +243,12 @@ impl IndexScheduler {
);
builder.set_primary_key(primary_key);
let must_stop_processing = self.scheduler.must_stop_processing.clone();
builder
.execute(
|indexing_step| tracing::debug!(update = ?indexing_step),
|| must_stop_processing.get(),
&|| must_stop_processing.get(),
&progress,
current_batch.embedder_stats.clone(),
)
.map_err(|e| Error::from_milli(e, Some(index_uid.to_string())))?;
index_wtxn.commit()?;
@ -361,6 +368,46 @@ impl IndexScheduler {
task.status = Status::Succeeded;
Ok((vec![task], ProcessBatchInfo::default()))
}
Batch::Export { mut task } => {
let KindWithContent::Export { url, api_key, payload_size, indexes } = &task.kind
else {
unreachable!()
};
let ret = catch_unwind(AssertUnwindSafe(|| {
self.process_export(
url,
api_key.as_deref(),
payload_size.as_ref(),
indexes,
progress,
)
}));
let stats = match ret {
Ok(Ok(stats)) => stats,
Ok(Err(Error::AbortedTask)) => return Err(Error::AbortedTask),
Ok(Err(e)) => return Err(Error::Export(Box::new(e))),
Err(e) => {
let msg = match e.downcast_ref::<&'static str>() {
Some(s) => *s,
None => match e.downcast_ref::<String>() {
Some(s) => &s[..],
None => "Box<dyn Any>",
},
};
return Err(Error::Export(Box::new(Error::ProcessBatchPanicked(
msg.to_string(),
))));
}
};
task.status = Status::Succeeded;
if let Some(Details::Export { indexes, .. }) = task.details.as_mut() {
*indexes = stats;
}
Ok((vec![task], ProcessBatchInfo::default()))
}
Batch::UpgradeDatabase { mut tasks } => {
let KindWithContent::UpgradeDatabase { from } = tasks.last().unwrap().kind else {
unreachable!();
@ -708,9 +755,11 @@ impl IndexScheduler {
from.1,
from.2
);
match std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
let ret = catch_unwind(std::panic::AssertUnwindSafe(|| {
self.process_rollback(from, progress)
})) {
}));
match ret {
Ok(Ok(())) => {}
Ok(Err(err)) => return Err(Error::DatabaseUpgrade(Box::new(err))),
Err(e) => {

View file

@ -43,7 +43,16 @@ impl IndexScheduler {
let rtxn = self.env.read_txn()?;
// 2. dump the tasks
// 2. dump the chat completion settings
// TODO should I skip the export if the chat completion has been disabled?
progress.update_progress(DumpCreationProgress::DumpTheChatCompletionSettings);
let mut dump_chat_completion_settings = dump.create_chat_completions_settings()?;
for result in self.chat_settings.iter(&rtxn)? {
let (name, chat_settings) = result?;
dump_chat_completion_settings.push_settings(name, &chat_settings)?;
}
// 3. dump the tasks
progress.update_progress(DumpCreationProgress::DumpTheTasks);
let mut dump_tasks = dump.create_tasks_queue()?;
@ -81,7 +90,7 @@ impl IndexScheduler {
let mut dump_content_file = dump_tasks.push_task(&t.into())?;
// 2.1. Dump the `content_file` associated with the task if there is one and the task is not finished yet.
// 3.1. Dump the `content_file` associated with the task if there is one and the task is not finished yet.
if let Some(content_file) = content_file {
if self.scheduler.must_stop_processing.get() {
return Err(Error::AbortedTask);
@ -105,7 +114,7 @@ impl IndexScheduler {
}
dump_tasks.flush()?;
// 3. dump the batches
// 4. dump the batches
progress.update_progress(DumpCreationProgress::DumpTheBatches);
let mut dump_batches = dump.create_batches_queue()?;
@ -138,7 +147,7 @@ impl IndexScheduler {
}
dump_batches.flush()?;
// 4. Dump the indexes
// 5. Dump the indexes
progress.update_progress(DumpCreationProgress::DumpTheIndexes);
let nb_indexes = self.index_mapper.index_mapping.len(&rtxn)? as u32;
let mut count = 0;
@ -165,9 +174,6 @@ impl IndexScheduler {
let fields_ids_map = index.fields_ids_map(&rtxn)?;
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
let embedding_configs = index
.embedding_configs(&rtxn)
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
let nb_documents = index
.number_of_documents(&rtxn)
@ -178,7 +184,7 @@ impl IndexScheduler {
let documents = index
.all_documents(&rtxn)
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
// 4.1. Dump the documents
// 5.1. Dump the documents
for ret in documents {
if self.scheduler.must_stop_processing.get() {
return Err(Error::AbortedTask);
@ -221,16 +227,12 @@ impl IndexScheduler {
return Err(Error::from_milli(user_err, Some(uid.to_string())));
};
for (embedder_name, embeddings) in embeddings {
let user_provided = embedding_configs
.iter()
.find(|conf| conf.name == embedder_name)
.is_some_and(|conf| conf.user_provided.contains(id));
for (embedder_name, (embeddings, regenerate)) in embeddings {
let embeddings = ExplicitVectors {
embeddings: Some(VectorOrArrayOfVectors::from_array_of_vectors(
embeddings,
)),
regenerate: !user_provided,
regenerate,
};
vectors.insert(embedder_name, serde_json::to_value(embeddings).unwrap());
}
@ -240,7 +242,7 @@ impl IndexScheduler {
atomic.fetch_add(1, Ordering::Relaxed);
}
// 4.2. Dump the settings
// 5.2. Dump the settings
let settings = meilisearch_types::settings::settings(
index,
&rtxn,
@ -251,7 +253,7 @@ impl IndexScheduler {
Ok(())
})?;
// 5. Dump experimental feature settings
// 6. Dump experimental feature settings
progress.update_progress(DumpCreationProgress::DumpTheExperimentalFeatures);
let features = self.features().runtime_features();
dump.create_experimental_features(features)?;

View file

@ -0,0 +1,365 @@
use std::collections::BTreeMap;
use std::io::{self, Write as _};
use std::sync::atomic;
use std::time::Duration;
use backoff::ExponentialBackoff;
use byte_unit::Byte;
use flate2::write::GzEncoder;
use flate2::Compression;
use meilisearch_types::index_uid_pattern::IndexUidPattern;
use meilisearch_types::milli::constants::RESERVED_VECTORS_FIELD_NAME;
use meilisearch_types::milli::progress::{Progress, VariableNameStep};
use meilisearch_types::milli::update::{request_threads, Setting};
use meilisearch_types::milli::vector::parsed_vectors::{ExplicitVectors, VectorOrArrayOfVectors};
use meilisearch_types::milli::{self, obkv_to_json, Filter, InternalError};
use meilisearch_types::settings::{self, SecretPolicy};
use meilisearch_types::tasks::{DetailsExportIndexSettings, ExportIndexSettings};
use serde::Deserialize;
use ureq::{json, Response};
use super::MustStopProcessing;
use crate::processing::AtomicDocumentStep;
use crate::{Error, IndexScheduler, Result};
impl IndexScheduler {
pub(super) fn process_export(
&self,
base_url: &str,
api_key: Option<&str>,
payload_size: Option<&Byte>,
indexes: &BTreeMap<IndexUidPattern, ExportIndexSettings>,
progress: Progress,
) -> Result<BTreeMap<IndexUidPattern, DetailsExportIndexSettings>> {
#[cfg(test)]
self.maybe_fail(crate::test_utils::FailureLocation::ProcessExport)?;
let indexes: Vec<_> = self
.index_names()?
.into_iter()
.flat_map(|uid| {
indexes
.iter()
.find(|(pattern, _)| pattern.matches_str(&uid))
.map(|(pattern, settings)| (pattern, uid, settings))
})
.collect();
let mut output = BTreeMap::new();
let agent = ureq::AgentBuilder::new().timeout(Duration::from_secs(5)).build();
let must_stop_processing = self.scheduler.must_stop_processing.clone();
for (i, (_pattern, uid, export_settings)) in indexes.iter().enumerate() {
if must_stop_processing.get() {
return Err(Error::AbortedTask);
}
progress.update_progress(VariableNameStep::<ExportIndex>::new(
format!("Exporting index `{uid}`"),
i as u32,
indexes.len() as u32,
));
let ExportIndexSettings { filter, override_settings } = export_settings;
let index = self.index(uid)?;
let index_rtxn = index.read_txn()?;
// First, check if the index already exists
let url = format!("{base_url}/indexes/{uid}");
let response = retry(&must_stop_processing, || {
let mut request = agent.get(&url);
if let Some(api_key) = api_key {
request = request.set("Authorization", &format!("Bearer {api_key}"));
}
request.send_bytes(Default::default()).map_err(into_backoff_error)
});
let index_exists = match response {
Ok(response) => response.status() == 200,
Err(Error::FromRemoteWhenExporting { code, .. }) if code == "index_not_found" => {
false
}
Err(e) => return Err(e),
};
let primary_key = index
.primary_key(&index_rtxn)
.map_err(|e| Error::from_milli(e.into(), Some(uid.to_string())))?;
// Create the index
if !index_exists {
let url = format!("{base_url}/indexes");
retry(&must_stop_processing, || {
let mut request = agent.post(&url);
if let Some(api_key) = api_key {
request = request.set("Authorization", &format!("Bearer {api_key}"));
}
let index_param = json!({ "uid": uid, "primaryKey": primary_key });
request.send_json(&index_param).map_err(into_backoff_error)
})?;
}
// Patch the index primary key
if index_exists && *override_settings {
let url = format!("{base_url}/indexes/{uid}");
retry(&must_stop_processing, || {
let mut request = agent.patch(&url);
if let Some(api_key) = api_key {
request = request.set("Authorization", &format!("Bearer {api_key}"));
}
let index_param = json!({ "primaryKey": primary_key });
request.send_json(&index_param).map_err(into_backoff_error)
})?;
}
// Send the index settings
if !index_exists || *override_settings {
let mut settings =
settings::settings(&index, &index_rtxn, SecretPolicy::RevealSecrets)
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
// Remove the experimental chat setting if not enabled
if self.features().check_chat_completions("exporting chat settings").is_err() {
settings.chat = Setting::NotSet;
}
// Retry logic for sending settings
let url = format!("{base_url}/indexes/{uid}/settings");
let bearer = api_key.map(|api_key| format!("Bearer {api_key}"));
retry(&must_stop_processing, || {
let mut request = agent.patch(&url);
if let Some(bearer) = bearer.as_ref() {
request = request.set("Authorization", bearer);
}
request.send_json(settings.clone()).map_err(into_backoff_error)
})?;
}
let filter = filter
.as_ref()
.map(Filter::from_json)
.transpose()
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?
.flatten();
let filter_universe = filter
.map(|f| f.evaluate(&index_rtxn, &index))
.transpose()
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
let whole_universe = index
.documents_ids(&index_rtxn)
.map_err(|e| Error::from_milli(e.into(), Some(uid.to_string())))?;
let universe = filter_universe.unwrap_or(whole_universe);
let fields_ids_map = index.fields_ids_map(&index_rtxn)?;
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
// We don't need to keep this one alive as we will
// spawn many threads to process the documents
drop(index_rtxn);
let total_documents = universe.len() as u32;
let (step, progress_step) = AtomicDocumentStep::new(total_documents);
progress.update_progress(progress_step);
output.insert(
IndexUidPattern::new_unchecked(uid.clone()),
DetailsExportIndexSettings {
settings: (*export_settings).clone(),
matched_documents: Some(total_documents as u64),
},
);
let limit = payload_size.map(|ps| ps.as_u64() as usize).unwrap_or(50 * 1024 * 1024); // defaults to 50 MiB
let documents_url = format!("{base_url}/indexes/{uid}/documents");
request_threads()
.broadcast(|ctx| {
let index_rtxn = index
.read_txn()
.map_err(|e| Error::from_milli(e.into(), Some(uid.to_string())))?;
let mut buffer = Vec::new();
let mut tmp_buffer = Vec::new();
let mut compressed_buffer = Vec::new();
for (i, docid) in universe.iter().enumerate() {
if i % ctx.num_threads() != ctx.index() {
continue;
}
let document = index
.document(&index_rtxn, docid)
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
let mut document = obkv_to_json(&all_fields, &fields_ids_map, document)
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
// TODO definitely factorize this code
'inject_vectors: {
let embeddings = index
.embeddings(&index_rtxn, docid)
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
if embeddings.is_empty() {
break 'inject_vectors;
}
let vectors = document
.entry(RESERVED_VECTORS_FIELD_NAME)
.or_insert(serde_json::Value::Object(Default::default()));
let serde_json::Value::Object(vectors) = vectors else {
return Err(Error::from_milli(
milli::Error::UserError(
milli::UserError::InvalidVectorsMapType {
document_id: {
if let Ok(Some(Ok(index))) = index
.external_id_of(
&index_rtxn,
std::iter::once(docid),
)
.map(|it| it.into_iter().next())
{
index
} else {
format!("internal docid={docid}")
}
},
value: vectors.clone(),
},
),
Some(uid.to_string()),
));
};
for (embedder_name, (embeddings, regenerate)) in embeddings {
let embeddings = ExplicitVectors {
embeddings: Some(
VectorOrArrayOfVectors::from_array_of_vectors(embeddings),
),
regenerate,
};
vectors.insert(
embedder_name,
serde_json::to_value(embeddings).unwrap(),
);
}
}
tmp_buffer.clear();
serde_json::to_writer(&mut tmp_buffer, &document)
.map_err(milli::InternalError::from)
.map_err(|e| Error::from_milli(e.into(), Some(uid.to_string())))?;
// Make sure we put at least one document in the buffer even
// though we might go above the buffer limit before sending
if !buffer.is_empty() && buffer.len() + tmp_buffer.len() > limit {
// We compress the documents before sending them
let mut encoder =
GzEncoder::new(&mut compressed_buffer, Compression::default());
encoder
.write_all(&buffer)
.map_err(|e| Error::from_milli(e.into(), Some(uid.clone())))?;
encoder
.finish()
.map_err(|e| Error::from_milli(e.into(), Some(uid.clone())))?;
retry(&must_stop_processing, || {
let mut request = agent.post(&documents_url);
request = request.set("Content-Type", "application/x-ndjson");
request = request.set("Content-Encoding", "gzip");
if let Some(api_key) = api_key {
request = request
.set("Authorization", &(format!("Bearer {api_key}")));
}
request.send_bytes(&compressed_buffer).map_err(into_backoff_error)
})?;
buffer.clear();
compressed_buffer.clear();
}
buffer.extend_from_slice(&tmp_buffer);
if i % 100 == 0 {
step.fetch_add(100, atomic::Ordering::Relaxed);
}
}
retry(&must_stop_processing, || {
let mut request = agent.post(&documents_url);
request = request.set("Content-Type", "application/x-ndjson");
if let Some(api_key) = api_key {
request = request.set("Authorization", &(format!("Bearer {api_key}")));
}
request.send_bytes(&buffer).map_err(into_backoff_error)
})?;
Ok(())
})
.map_err(|e| {
Error::from_milli(
milli::Error::InternalError(InternalError::PanicInThreadPool(e)),
Some(uid.to_string()),
)
})?;
step.store(total_documents, atomic::Ordering::Relaxed);
}
Ok(output)
}
}
fn retry<F>(must_stop_processing: &MustStopProcessing, send_request: F) -> Result<ureq::Response>
where
F: Fn() -> Result<ureq::Response, backoff::Error<ureq::Error>>,
{
match backoff::retry(ExponentialBackoff::default(), || {
if must_stop_processing.get() {
return Err(backoff::Error::Permanent(ureq::Error::Status(
u16::MAX,
// 444: Connection Closed Without Response
Response::new(444, "Abort", "Aborted task").unwrap(),
)));
}
send_request()
}) {
Ok(response) => Ok(response),
Err(backoff::Error::Permanent(e)) => Err(ureq_error_into_error(e)),
Err(backoff::Error::Transient { err, retry_after: _ }) => Err(ureq_error_into_error(err)),
}
}
fn into_backoff_error(err: ureq::Error) -> backoff::Error<ureq::Error> {
match err {
// Those code status must trigger an automatic retry
// <https://www.restapitutorial.com/advanced/responses/retries>
ureq::Error::Status(408 | 429 | 500 | 502 | 503 | 504, _) => {
backoff::Error::Transient { err, retry_after: None }
}
ureq::Error::Status(_, _) => backoff::Error::Permanent(err),
ureq::Error::Transport(_) => backoff::Error::Transient { err, retry_after: None },
}
}
/// Converts a `ureq::Error` into an `Error`.
fn ureq_error_into_error(error: ureq::Error) -> Error {
#[derive(Deserialize)]
struct MeiliError {
message: String,
code: String,
r#type: String,
link: String,
}
match error {
// This is a workaround to handle task abortion - the error propagation path
// makes it difficult to cleanly surface the abortion at this level.
ureq::Error::Status(u16::MAX, _) => Error::AbortedTask,
ureq::Error::Status(_, response) => match response.into_json() {
Ok(MeiliError { message, code, r#type, link }) => {
Error::FromRemoteWhenExporting { message, code, r#type, link }
}
Err(e) => e.into(),
},
ureq::Error::Transport(transport) => io::Error::new(io::ErrorKind::Other, transport).into(),
}
}
enum ExportIndex {}

View file

@ -1,8 +1,10 @@
use std::sync::Arc;
use bumpalo::collections::CollectIn;
use bumpalo::Bump;
use meilisearch_types::heed::RwTxn;
use meilisearch_types::milli::documents::PrimaryKey;
use meilisearch_types::milli::progress::Progress;
use meilisearch_types::milli::progress::{EmbedderStats, Progress};
use meilisearch_types::milli::update::new::indexer::{self, UpdateByFunction};
use meilisearch_types::milli::update::DocumentAdditionResult;
use meilisearch_types::milli::{self, ChannelCongestion, Filter};
@ -24,7 +26,7 @@ impl IndexScheduler {
/// The list of processed tasks.
#[tracing::instrument(
level = "trace",
skip(self, index_wtxn, index, progress),
skip(self, index_wtxn, index, progress, embedder_stats),
target = "indexing::scheduler"
)]
pub(crate) fn apply_index_operation<'i>(
@ -33,6 +35,7 @@ impl IndexScheduler {
index: &'i Index,
operation: IndexOperation,
progress: &Progress,
embedder_stats: Arc<EmbedderStats>,
) -> Result<(Vec<Task>, Option<ChannelCongestion>)> {
let indexer_alloc = Bump::new();
let started_processing_at = std::time::Instant::now();
@ -86,8 +89,9 @@ impl IndexScheduler {
let mut content_files_iter = content_files.iter();
let mut indexer = indexer::DocumentOperation::new();
let embedders = index
.embedding_configs()
.embedding_configs(index_wtxn)
.map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?;
.map_err(|e| Error::from_milli(e.into(), Some(index_uid.clone())))?;
let embedders = self.embedders(index_uid.clone(), embedders)?;
for operation in operations {
match operation {
@ -177,6 +181,7 @@ impl IndexScheduler {
embedders,
&|| must_stop_processing.get(),
progress,
&embedder_stats,
)
.map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?,
);
@ -270,8 +275,9 @@ impl IndexScheduler {
})
.unwrap()?;
let embedders = index
.embedding_configs()
.embedding_configs(index_wtxn)
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?;
.map_err(|err| Error::from_milli(err.into(), Some(index_uid.clone())))?;
let embedders = self.embedders(index_uid.clone(), embedders)?;
progress.update_progress(DocumentEditionProgress::Indexing);
@ -288,6 +294,7 @@ impl IndexScheduler {
embedders,
&|| must_stop_processing.get(),
progress,
&embedder_stats,
)
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?,
);
@ -418,8 +425,9 @@ impl IndexScheduler {
indexer.delete_documents_by_docids(to_delete);
let document_changes = indexer.into_changes(&indexer_alloc, primary_key);
let embedders = index
.embedding_configs()
.embedding_configs(index_wtxn)
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?;
.map_err(|err| Error::from_milli(err.into(), Some(index_uid.clone())))?;
let embedders = self.embedders(index_uid.clone(), embedders)?;
progress.update_progress(DocumentDeletionProgress::Indexing);
@ -436,6 +444,7 @@ impl IndexScheduler {
embedders,
&|| must_stop_processing.get(),
progress,
&embedder_stats,
)
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?,
);
@ -468,14 +477,11 @@ impl IndexScheduler {
}
progress.update_progress(SettingsProgress::ApplyTheSettings);
builder
.execute(
|indexing_step| tracing::debug!(update = ?indexing_step),
|| must_stop_processing.get(),
)
let congestion = builder
.execute(&|| must_stop_processing.get(), progress, embedder_stats)
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?;
Ok((tasks, None))
Ok((tasks, congestion))
}
IndexOperation::DocumentClearAndSetting {
index_uid,
@ -491,6 +497,7 @@ impl IndexScheduler {
tasks: cleared_tasks,
},
progress,
embedder_stats.clone(),
)?;
let (settings_tasks, _congestion) = self.apply_index_operation(
@ -498,6 +505,7 @@ impl IndexScheduler {
index,
IndexOperation::Settings { index_uid, settings, tasks: settings_tasks },
progress,
embedder_stats,
)?;
let mut tasks = settings_tasks;

View file

@ -0,0 +1,17 @@
---
source: crates/index-scheduler/src/scheduler/test.rs
expression: config.embedder_options
---
{
"Rest": {
"api_key": "My super secret",
"distribution": null,
"dimensions": 4,
"url": "http://localhost:7777",
"request": "{{text}}",
"search_fragments": {},
"indexing_fragments": {},
"response": "{{embedding}}",
"headers": {}
}
}

View file

@ -0,0 +1,12 @@
---
source: crates/index-scheduler/src/scheduler/test_embedders.rs
expression: simple_hf_config.embedder_options
---
{
"HuggingFace": {
"model": "sentence-transformers/all-MiniLM-L6-v2",
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
"distribution": null,
"pooling": "useModel"
}
}

View file

@ -0,0 +1,15 @@
---
source: crates/index-scheduler/src/scheduler/test_embedders.rs
expression: doc
---
{
"doggo": "Intel",
"breed": "beagle",
"_vectors": {
"noise": [
0.1,
0.2,
0.3
]
}
}

View file

@ -0,0 +1,15 @@
---
source: crates/index-scheduler/src/scheduler/test_embedders.rs
expression: doc
---
{
"doggo": "kefir",
"breed": "patou",
"_vectors": {
"noise": [
0.1,
0.2,
0.3
]
}
}

View file

@ -1,12 +1,17 @@
---
source: crates/index-scheduler/src/scheduler/test_embedders.rs
expression: simple_hf_config.embedder_options
expression: fakerest_config.embedder_options
---
{
"HuggingFace": {
"model": "sentence-transformers/all-MiniLM-L6-v2",
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
"Rest": {
"api_key": "My super secret",
"distribution": null,
"pooling": "useModel"
"dimensions": 384,
"url": "http://localhost:7777",
"request": "{{text}}",
"search_fragments": {},
"indexing_fragments": {},
"response": "{{embedding}}",
"headers": {}
}
}

View file

@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
----------------------------------------------------------------------
### Status:
enqueued [0,]

View file

@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
----------------------------------------------------------------------
### Status:
enqueued []

View file

@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_embedders.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
2 {uid: 2, batch_uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
----------------------------------------------------------------------

View file

@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_embedders.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
----------------------------------------------------------------------

View file

@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_embedders.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
----------------------------------------------------------------------
### Status:

View file

@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_embedders.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
----------------------------------------------------------------------
### Status:

View file

@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_embedders.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
----------------------------------------------------------------------
### Status:
enqueued [0,]

View file

@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_embedders.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), pooling: NotSet, api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, indexing_fragments: NotSet, search_fragments: NotSet, request: NotSet, response: NotSet, headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
----------------------------------------------------------------------
### Status:
enqueued []

View file

@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
3 {uid: 3, batch_uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggo` already exists.", error_code: "index_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_already_exists" }, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
@ -57,7 +57,7 @@ girafo: { number_of_documents: 0, field_distribution: {} }
[timestamp] [4,]
----------------------------------------------------------------------
### All Batches:
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.16.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
1 {uid: 1, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
2 {uid: 2, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
3 {uid: 3, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }

View file

@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
----------------------------------------------------------------------
### Status:
enqueued [0,]

View file

@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
----------------------------------------------------------------------
### Status:

View file

@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
----------------------------------------------------------------------
### Status:
@ -37,7 +37,7 @@ catto [1,]
[timestamp] [0,]
----------------------------------------------------------------------
### All Batches:
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.16.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
----------------------------------------------------------------------
### Batch to tasks mapping:
0 [0,]

View file

@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
----------------------------------------------------------------------
@ -40,7 +40,7 @@ doggo [2,]
[timestamp] [0,]
----------------------------------------------------------------------
### All Batches:
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.16.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
----------------------------------------------------------------------
### Batch to tasks mapping:
0 [0,]

View file

@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
3 {uid: 3, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
@ -43,7 +43,7 @@ doggo [2,3,]
[timestamp] [0,]
----------------------------------------------------------------------
### All Batches:
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.16.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
----------------------------------------------------------------------
### Batch to tasks mapping:
0 [0,]

View file

@ -3,11 +3,11 @@ use std::collections::BTreeMap;
use big_s::S;
use meili_snap::{json_string, snapshot};
use meilisearch_auth::AuthFilter;
use meilisearch_types::milli::index::IndexEmbeddingConfig;
use meilisearch_types::milli::update::IndexDocumentsMethod::*;
use meilisearch_types::milli::{self};
use meilisearch_types::settings::SettingEmbeddingSettings;
use meilisearch_types::tasks::{IndexSwap, KindWithContent};
use milli::vector::db::IndexEmbeddingConfig;
use roaring::RoaringBitmap;
use crate::insta_snapshot::snapshot_index_scheduler;
@ -690,11 +690,20 @@ fn test_settings_update() {
let index = index_scheduler.index("doggos").unwrap();
let rtxn = index.read_txn().unwrap();
let configs = index.embedding_configs(&rtxn).unwrap();
let IndexEmbeddingConfig { name, config, user_provided } = configs.first().unwrap();
let embedders = index.embedding_configs();
let configs = embedders.embedding_configs(&rtxn).unwrap();
let IndexEmbeddingConfig { name, config, fragments } = configs.first().unwrap();
let info = embedders.embedder_info(&rtxn, name).unwrap().unwrap();
insta::assert_snapshot!(info.embedder_id, @"0");
insta::assert_debug_snapshot!(info.embedding_status.user_provided_docids(), @"RoaringBitmap<[]>");
insta::assert_debug_snapshot!(info.embedding_status.skip_regenerate_docids(), @"RoaringBitmap<[]>");
insta::assert_snapshot!(name, @"default");
insta::assert_debug_snapshot!(user_provided, @"RoaringBitmap<[]>");
insta::assert_json_snapshot!(config.embedder_options);
insta::assert_debug_snapshot!(fragments, @r###"
FragmentConfigs(
[],
)
"###);
}
#[test]
@ -732,6 +741,7 @@ fn basic_get_stats() {
"documentDeletion": 0,
"documentEdition": 0,
"dumpCreation": 0,
"export": 0,
"indexCreation": 3,
"indexDeletion": 0,
"indexSwap": 0,
@ -765,6 +775,7 @@ fn basic_get_stats() {
"documentDeletion": 0,
"documentEdition": 0,
"dumpCreation": 0,
"export": 0,
"indexCreation": 3,
"indexDeletion": 0,
"indexSwap": 0,
@ -805,6 +816,7 @@ fn basic_get_stats() {
"documentDeletion": 0,
"documentEdition": 0,
"dumpCreation": 0,
"export": 0,
"indexCreation": 3,
"indexDeletion": 0,
"indexSwap": 0,
@ -846,6 +858,7 @@ fn basic_get_stats() {
"documentDeletion": 0,
"documentEdition": 0,
"dumpCreation": 0,
"export": 0,
"indexCreation": 3,
"indexDeletion": 0,
"indexSwap": 0,

View file

@ -3,13 +3,14 @@ use std::collections::BTreeMap;
use big_s::S;
use insta::assert_json_snapshot;
use meili_snap::{json_string, snapshot};
use meilisearch_types::milli::index::IndexEmbeddingConfig;
use meilisearch_types::milli::update::Setting;
use meilisearch_types::milli::vector::settings::EmbeddingSettings;
use meilisearch_types::milli::vector::SearchQuery;
use meilisearch_types::milli::{self, obkv_to_json};
use meilisearch_types::settings::{SettingEmbeddingSettings, Settings, Unchecked};
use meilisearch_types::tasks::KindWithContent;
use milli::update::IndexDocumentsMethod::*;
use milli::vector::db::IndexEmbeddingConfig;
use crate::insta_snapshot::snapshot_index_scheduler;
use crate::test_utils::read_json;
@ -85,28 +86,51 @@ fn import_vectors() {
let index = index_scheduler.index("doggos").unwrap();
let rtxn = index.read_txn().unwrap();
let configs = index.embedding_configs(&rtxn).unwrap();
let embedders = index.embedding_configs();
let configs = embedders.embedding_configs(&rtxn).unwrap();
// for consistency with the below
#[allow(clippy::get_first)]
let IndexEmbeddingConfig { name, config: fakerest_config, user_provided } =
let IndexEmbeddingConfig { name, config: fakerest_config, fragments } =
configs.get(0).unwrap();
let info = embedders.embedder_info(&rtxn, name).unwrap().unwrap();
insta::assert_snapshot!(info.embedder_id, @"0");
insta::assert_debug_snapshot!(info.embedding_status.user_provided_docids(), @"RoaringBitmap<[]>");
insta::assert_debug_snapshot!(info.embedding_status.skip_regenerate_docids(), @"RoaringBitmap<[]>");
insta::assert_snapshot!(name, @"A_fakerest");
insta::assert_debug_snapshot!(user_provided, @"RoaringBitmap<[]>");
insta::assert_debug_snapshot!(fragments, @r###"
FragmentConfigs(
[],
)
"###);
insta::assert_json_snapshot!(fakerest_config.embedder_options);
let fakerest_name = name.clone();
let IndexEmbeddingConfig { name, config: simple_hf_config, user_provided } =
let IndexEmbeddingConfig { name, config: simple_hf_config, fragments } =
configs.get(1).unwrap();
let info = embedders.embedder_info(&rtxn, name).unwrap().unwrap();
insta::assert_snapshot!(info.embedder_id, @"1");
insta::assert_debug_snapshot!(info.embedding_status.user_provided_docids(), @"RoaringBitmap<[]>");
insta::assert_debug_snapshot!(info.embedding_status.skip_regenerate_docids(), @"RoaringBitmap<[]>");
insta::assert_snapshot!(name, @"B_small_hf");
insta::assert_debug_snapshot!(user_provided, @"RoaringBitmap<[]>");
insta::assert_debug_snapshot!(fragments, @r###"
FragmentConfigs(
[],
)
"###);
insta::assert_json_snapshot!(simple_hf_config.embedder_options);
let simple_hf_name = name.clone();
let configs = index_scheduler.embedders("doggos".to_string(), configs).unwrap();
let (hf_embedder, _, _) = configs.get(&simple_hf_name).unwrap();
let beagle_embed = hf_embedder.embed_search("Intel the beagle best doggo", None).unwrap();
let lab_embed = hf_embedder.embed_search("Max the lab best doggo", None).unwrap();
let patou_embed = hf_embedder.embed_search("kefir the patou best doggo", None).unwrap();
let hf_runtime = configs.get(&simple_hf_name).unwrap();
let hf_embedder = &hf_runtime.embedder;
let beagle_embed = hf_embedder
.embed_search(SearchQuery::Text("Intel the beagle best doggo"), None)
.unwrap();
let lab_embed =
hf_embedder.embed_search(SearchQuery::Text("Max the lab best doggo"), None).unwrap();
let patou_embed = hf_embedder
.embed_search(SearchQuery::Text("kefir the patou best doggo"), None)
.unwrap();
(fakerest_name, simple_hf_name, beagle_embed, lab_embed, patou_embed)
};
@ -166,22 +190,38 @@ fn import_vectors() {
let rtxn = index.read_txn().unwrap();
// Ensure the document have been inserted into the relevant bitamp
let configs = index.embedding_configs(&rtxn).unwrap();
let embedders = index.embedding_configs();
let configs = embedders.embedding_configs(&rtxn).unwrap();
// for consistency with the below
#[allow(clippy::get_first)]
let IndexEmbeddingConfig { name, config: _, user_provided: user_defined } =
configs.get(0).unwrap();
let IndexEmbeddingConfig { name, config: _, fragments } = configs.get(0).unwrap();
let info = embedders.embedder_info(&rtxn, name).unwrap().unwrap();
insta::assert_snapshot!(info.embedder_id, @"0");
insta::assert_debug_snapshot!(info.embedding_status.user_provided_docids(), @"RoaringBitmap<[0]>");
insta::assert_debug_snapshot!(info.embedding_status.skip_regenerate_docids(), @"RoaringBitmap<[0]>");
insta::assert_snapshot!(name, @"A_fakerest");
insta::assert_debug_snapshot!(user_defined, @"RoaringBitmap<[0]>");
insta::assert_debug_snapshot!(fragments, @r###"
FragmentConfigs(
[],
)
"###);
let IndexEmbeddingConfig { name, config: _, user_provided } = configs.get(1).unwrap();
let IndexEmbeddingConfig { name, config: _, fragments } = configs.get(1).unwrap();
let info = embedders.embedder_info(&rtxn, name).unwrap().unwrap();
insta::assert_snapshot!(info.embedder_id, @"1");
insta::assert_debug_snapshot!(info.embedding_status.user_provided_docids(), @"RoaringBitmap<[0]>");
insta::assert_debug_snapshot!(info.embedding_status.skip_regenerate_docids(), @"RoaringBitmap<[]>");
insta::assert_snapshot!(name, @"B_small_hf");
insta::assert_debug_snapshot!(user_provided, @"RoaringBitmap<[]>");
insta::assert_debug_snapshot!(fragments, @r###"
FragmentConfigs(
[],
)
"###);
let embeddings = index.embeddings(&rtxn, 0).unwrap();
assert_json_snapshot!(embeddings[&simple_hf_name][0] == lab_embed, @"true");
assert_json_snapshot!(embeddings[&fakerest_name][0] == beagle_embed, @"true");
assert_json_snapshot!(embeddings[&simple_hf_name].0[0] == lab_embed, @"true");
assert_json_snapshot!(embeddings[&fakerest_name].0[0] == beagle_embed, @"true");
let doc = index.documents(&rtxn, std::iter::once(0)).unwrap()[0].1;
let fields_ids_map = index.fields_ids_map(&rtxn).unwrap();
@ -239,25 +279,41 @@ fn import_vectors() {
let index = index_scheduler.index("doggos").unwrap();
let rtxn = index.read_txn().unwrap();
let embedders = index.embedding_configs();
// Ensure the document have been inserted into the relevant bitamp
let configs = index.embedding_configs(&rtxn).unwrap();
let configs = embedders.embedding_configs(&rtxn).unwrap();
// for consistency with the below
#[allow(clippy::get_first)]
let IndexEmbeddingConfig { name, config: _, user_provided: user_defined } =
configs.get(0).unwrap();
let IndexEmbeddingConfig { name, config: _, fragments } = configs.get(0).unwrap();
let info = embedders.embedder_info(&rtxn, name).unwrap().unwrap();
insta::assert_snapshot!(info.embedder_id, @"0");
insta::assert_debug_snapshot!(info.embedding_status.user_provided_docids(), @"RoaringBitmap<[0]>");
insta::assert_debug_snapshot!(info.embedding_status.skip_regenerate_docids(), @"RoaringBitmap<[0]>");
insta::assert_snapshot!(name, @"A_fakerest");
insta::assert_debug_snapshot!(user_defined, @"RoaringBitmap<[0]>");
insta::assert_debug_snapshot!(fragments, @r###"
FragmentConfigs(
[],
)
"###);
let IndexEmbeddingConfig { name, config: _, user_provided } = configs.get(1).unwrap();
let IndexEmbeddingConfig { name, config: _, fragments } = configs.get(1).unwrap();
let info = embedders.embedder_info(&rtxn, name).unwrap().unwrap();
insta::assert_snapshot!(info.embedder_id, @"1");
insta::assert_debug_snapshot!(info.embedding_status.user_provided_docids(), @"RoaringBitmap<[]>");
insta::assert_debug_snapshot!(info.embedding_status.skip_regenerate_docids(), @"RoaringBitmap<[]>");
insta::assert_snapshot!(name, @"B_small_hf");
insta::assert_debug_snapshot!(user_provided, @"RoaringBitmap<[]>");
insta::assert_debug_snapshot!(fragments, @r###"
FragmentConfigs(
[],
)
"###);
let embeddings = index.embeddings(&rtxn, 0).unwrap();
// automatically changed to patou because set to regenerate
assert_json_snapshot!(embeddings[&simple_hf_name][0] == patou_embed, @"true");
assert_json_snapshot!(embeddings[&simple_hf_name].0[0] == patou_embed, @"true");
// remained beagle
assert_json_snapshot!(embeddings[&fakerest_name][0] == beagle_embed, @"true");
assert_json_snapshot!(embeddings[&fakerest_name].0[0] == beagle_embed, @"true");
let doc = index.documents(&rtxn, std::iter::once(0)).unwrap()[0].1;
let fields_ids_map = index.fields_ids_map(&rtxn).unwrap();
@ -399,8 +455,8 @@ fn import_vectors_first_and_embedder_later() {
.collect::<Vec<_>>();
// the all the vectors linked to the new specified embedder have been removed
// Only the unknown embedders stays in the document DB
snapshot!(serde_json::to_string(&documents).unwrap(), @r###"[{"id":0,"doggo":"kefir"},{"id":1,"doggo":"intel","_vectors":{"unknown embedder":[1.0,2.0,3.0]}},{"id":2,"doggo":"max","_vectors":{"unknown embedder":[4.0,5.0]}},{"id":3,"doggo":"marcel"},{"id":4,"doggo":"sora"}]"###);
let conf = index.embedding_configs(&rtxn).unwrap();
snapshot!(serde_json::to_string(&documents).unwrap(), @r###"[{"id":0,"doggo":"kefir"},{"id":1,"doggo":"intel","_vectors":{"unknown embedder":[1,2,3]}},{"id":2,"doggo":"max","_vectors":{"unknown embedder":[4,5]}},{"id":3,"doggo":"marcel"},{"id":4,"doggo":"sora"}]"###);
let conf = index.embedding_configs().embedding_configs(&rtxn).unwrap();
// even though we specified the vector for the ID 3, it shouldn't be marked
// as user provided since we explicitely marked it as NOT user provided.
snapshot!(format!("{conf:#?}"), @r###"
@ -426,19 +482,28 @@ fn import_vectors_first_and_embedder_later() {
},
quantized: None,
},
user_provided: RoaringBitmap<[1, 2]>,
fragments: FragmentConfigs(
[],
),
},
]
"###);
let info =
index.embedding_configs().embedder_info(&rtxn, "my_doggo_embedder").unwrap().unwrap();
insta::assert_snapshot!(info.embedder_id, @"0");
insta::assert_debug_snapshot!(info.embedding_status.user_provided_docids(), @"RoaringBitmap<[1, 2, 3]>");
insta::assert_debug_snapshot!(info.embedding_status.skip_regenerate_docids(), @"RoaringBitmap<[1, 2]>");
let docid = index.external_documents_ids.get(&rtxn, "0").unwrap().unwrap();
let embeddings = index.embeddings(&rtxn, docid).unwrap();
let embedding = &embeddings["my_doggo_embedder"];
let (embedding, _) = &embeddings["my_doggo_embedder"];
assert!(!embedding.is_empty(), "{embedding:?}");
// the document with the id 3 should keep its original embedding
let docid = index.external_documents_ids.get(&rtxn, "3").unwrap().unwrap();
let embeddings = index.embeddings(&rtxn, docid).unwrap();
let embeddings = &embeddings["my_doggo_embedder"];
let (embeddings, _) = &embeddings["my_doggo_embedder"];
snapshot!(embeddings.len(), @"1");
assert!(embeddings[0].iter().all(|i| *i == 3.0), "{:?}", embeddings[0]);
@ -493,7 +558,7 @@ fn import_vectors_first_and_embedder_later() {
"###);
let embeddings = index.embeddings(&rtxn, docid).unwrap();
let embedding = &embeddings["my_doggo_embedder"];
let (embedding, _) = &embeddings["my_doggo_embedder"];
assert!(!embedding.is_empty());
assert!(!embedding[0].iter().all(|i| *i == 3.0), "{:?}", embedding[0]);
@ -501,7 +566,7 @@ fn import_vectors_first_and_embedder_later() {
// the document with the id 4 should generate an embedding
let docid = index.external_documents_ids.get(&rtxn, "4").unwrap().unwrap();
let embeddings = index.embeddings(&rtxn, docid).unwrap();
let embedding = &embeddings["my_doggo_embedder"];
let (embedding, _) = &embeddings["my_doggo_embedder"];
assert!(!embedding.is_empty());
}
@ -603,33 +668,35 @@ fn delete_document_containing_vector() {
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
.collect::<Vec<_>>();
snapshot!(serde_json::to_string(&documents).unwrap(), @r###"[{"id":0,"doggo":"kefir"}]"###);
let conf = index.embedding_configs(&rtxn).unwrap();
let conf = index.embedding_configs().embedding_configs(&rtxn).unwrap();
snapshot!(format!("{conf:#?}"), @r###"
[
IndexEmbeddingConfig {
name: "manual",
config: EmbeddingConfig {
embedder_options: UserProvided(
EmbedderOptions {
dimensions: 3,
distribution: None,
},
),
prompt: PromptData {
template: "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
max_bytes: Some(
400,
),
[
IndexEmbeddingConfig {
name: "manual",
config: EmbeddingConfig {
embedder_options: UserProvided(
EmbedderOptions {
dimensions: 3,
distribution: None,
},
quantized: None,
),
prompt: PromptData {
template: "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
max_bytes: Some(
400,
),
},
user_provided: RoaringBitmap<[0]>,
quantized: None,
},
]
"###);
fragments: FragmentConfigs(
[],
),
},
]
"###);
let docid = index.external_documents_ids.get(&rtxn, "0").unwrap().unwrap();
let embeddings = index.embeddings(&rtxn, docid).unwrap();
let embedding = &embeddings["manual"];
let (embedding, _) = &embeddings["manual"];
assert!(!embedding.is_empty(), "{embedding:?}");
index_scheduler
@ -647,30 +714,32 @@ fn delete_document_containing_vector() {
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
.collect::<Vec<_>>();
snapshot!(serde_json::to_string(&documents).unwrap(), @"[]");
let conf = index.embedding_configs(&rtxn).unwrap();
let conf = index.embedding_configs().embedding_configs(&rtxn).unwrap();
snapshot!(format!("{conf:#?}"), @r###"
[
IndexEmbeddingConfig {
name: "manual",
config: EmbeddingConfig {
embedder_options: UserProvided(
EmbedderOptions {
dimensions: 3,
distribution: None,
},
),
prompt: PromptData {
template: "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
max_bytes: Some(
400,
),
[
IndexEmbeddingConfig {
name: "manual",
config: EmbeddingConfig {
embedder_options: UserProvided(
EmbedderOptions {
dimensions: 3,
distribution: None,
},
quantized: None,
),
prompt: PromptData {
template: "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
max_bytes: Some(
400,
),
},
user_provided: RoaringBitmap<[]>,
quantized: None,
},
]
"###);
fragments: FragmentConfigs(
[],
),
},
]
"###);
}
#[test]
@ -800,7 +869,7 @@ fn delete_embedder_with_user_provided_vectors() {
.unwrap()
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
.collect::<Vec<_>>();
snapshot!(serde_json::to_string(&documents).unwrap(), @r###"[{"id":0,"doggo":"kefir","_vectors":{"manual":{"embeddings":[[0.0,0.0,0.0]],"regenerate":false}}},{"id":1,"doggo":"intel","_vectors":{"manual":{"embeddings":[[1.0,1.0,1.0]],"regenerate":false}}}]"###);
snapshot!(serde_json::to_string(&documents).unwrap(), @r###"[{"id":0,"doggo":"kefir","_vectors":{"manual":{"regenerate":false,"embeddings":[[0.0,0.0,0.0]]}}},{"id":1,"doggo":"intel","_vectors":{"manual":{"regenerate":false,"embeddings":[[1.0,1.0,1.0]]}}}]"###);
}
{
@ -835,6 +904,6 @@ fn delete_embedder_with_user_provided_vectors() {
.collect::<Vec<_>>();
// FIXME: redaction
snapshot!(json_string!(serde_json::to_string(&documents).unwrap(), { "[]._vectors.doggo_embedder.embeddings" => "[vector]" }), @r###""[{\"id\":0,\"doggo\":\"kefir\",\"_vectors\":{\"manual\":{\"embeddings\":[[0.0,0.0,0.0]],\"regenerate\":false},\"my_doggo_embedder\":{\"embeddings\":[[1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0]],\"regenerate\":false}}},{\"id\":1,\"doggo\":\"intel\",\"_vectors\":{\"manual\":{\"embeddings\":[[1.0,1.0,1.0]],\"regenerate\":false}}}]""###);
snapshot!(json_string!(serde_json::to_string(&documents).unwrap(), { "[]._vectors.doggo_embedder.embeddings" => "[vector]" }), @r###""[{\"id\":0,\"doggo\":\"kefir\",\"_vectors\":{\"manual\":{\"regenerate\":false,\"embeddings\":[[0.0,0.0,0.0]]},\"my_doggo_embedder\":{\"regenerate\":false,\"embeddings\":[[1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0]]}}},{\"id\":1,\"doggo\":\"intel\",\"_vectors\":{\"manual\":{\"regenerate\":false,\"embeddings\":[[1.0,1.0,1.0]]}}}]""###);
}
}

View file

@ -37,6 +37,7 @@ pub(crate) enum FailureLocation {
InsideCreateBatch,
InsideProcessBatch,
PanicInsideProcessBatch,
ProcessExport,
ProcessUpgrade,
AcquiringWtxn,
UpdatingTaskAfterProcessBatchSuccess { task_uid: u32 },

View file

@ -1,7 +1,9 @@
//! Utility functions on the DBs. Mainly getter and setters.
use crate::milli::progress::EmbedderStats;
use std::collections::{BTreeSet, HashSet};
use std::ops::Bound;
use std::sync::Arc;
use meilisearch_types::batches::{Batch, BatchEnqueuedAt, BatchId, BatchStats};
use meilisearch_types::heed::{Database, RoTxn, RwTxn};
@ -27,6 +29,7 @@ pub struct ProcessingBatch {
pub uid: BatchId,
pub details: DetailsView,
pub stats: BatchStats,
pub embedder_stats: Arc<EmbedderStats>,
pub statuses: HashSet<Status>,
pub kinds: HashSet<Kind>,
@ -48,6 +51,7 @@ impl ProcessingBatch {
uid,
details: DetailsView::default(),
stats: BatchStats::default(),
embedder_stats: Default::default(),
statuses,
kinds: HashSet::default(),
@ -146,6 +150,7 @@ impl ProcessingBatch {
progress: None,
details: self.details.clone(),
stats: self.stats.clone(),
embedder_stats: self.embedder_stats.as_ref().into(),
started_at: self.started_at,
finished_at: self.finished_at,
enqueued_at: self.enqueued_at,
@ -273,6 +278,7 @@ pub fn swap_index_uid_in_task(task: &mut Task, swap: (&str, &str)) {
K::TaskCancelation { .. }
| K::TaskDeletion { .. }
| K::DumpCreation { .. }
| K::Export { .. }
| K::UpgradeDatabase { .. }
| K::SnapshotCreation => (),
};
@ -600,6 +606,9 @@ impl crate::IndexScheduler {
Details::Dump { dump_uid: _ } => {
assert_eq!(kind.as_kind(), Kind::DumpCreation);
}
Details::Export { url: _, api_key: _, payload_size: _, indexes: _ } => {
assert_eq!(kind.as_kind(), Kind::Export);
}
Details::UpgradeDatabase { from: _, to: _ } => {
assert_eq!(kind.as_kind(), Kind::UpgradeDatabase);
}

View file

@ -15,6 +15,7 @@ actix-web = { version = "4.11.0", default-features = false }
anyhow = "1.0.98"
bumpalo = "3.18.1"
bumparaw-collections = "0.1.4"
byte-unit = { version = "5.1.6", features = ["serde"] }
convert_case = "0.8.0"
csv = "1.3.1"
deserr = { version = "0.6.3", features = ["actix-web"] }

View file

@ -3,7 +3,7 @@ use serde::Serialize;
use time::{Duration, OffsetDateTime};
use utoipa::ToSchema;
use crate::batches::{Batch, BatchId, BatchStats};
use crate::batches::{Batch, BatchId, BatchStats, EmbedderStatsView};
use crate::task_view::DetailsView;
use crate::tasks::serialize_duration;
@ -14,7 +14,7 @@ pub struct BatchView {
pub uid: BatchId,
pub progress: Option<ProgressView>,
pub details: DetailsView,
pub stats: BatchStats,
pub stats: BatchStatsView,
#[serde(serialize_with = "serialize_duration", default)]
pub duration: Option<Duration>,
#[serde(with = "time::serde::rfc3339", default)]
@ -25,13 +25,26 @@ pub struct BatchView {
pub batch_strategy: String,
}
#[derive(Debug, Clone, Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct BatchStatsView {
#[serde(flatten)]
pub stats: BatchStats,
#[serde(skip_serializing_if = "EmbedderStatsView::skip_serializing", default)]
pub embedder_requests: EmbedderStatsView,
}
impl BatchView {
pub fn from_batch(batch: &Batch) -> Self {
Self {
uid: batch.uid,
progress: batch.progress.clone(),
details: batch.details.clone(),
stats: batch.stats.clone(),
stats: BatchStatsView {
stats: batch.stats.clone(),
embedder_requests: batch.embedder_stats.clone(),
},
duration: batch.finished_at.map(|finished_at| finished_at - batch.started_at),
started_at: batch.started_at,
finished_at: batch.finished_at,

View file

@ -1,6 +1,6 @@
use std::collections::BTreeMap;
use milli::progress::ProgressView;
use milli::progress::{EmbedderStats, ProgressView};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use utoipa::ToSchema;
@ -19,6 +19,8 @@ pub struct Batch {
pub progress: Option<ProgressView>,
pub details: DetailsView,
pub stats: BatchStats,
#[serde(skip_serializing_if = "EmbedderStatsView::skip_serializing", default)]
pub embedder_stats: EmbedderStatsView,
#[serde(with = "time::serde::rfc3339")]
pub started_at: OffsetDateTime,
@ -43,6 +45,7 @@ impl PartialEq for Batch {
progress,
details,
stats,
embedder_stats,
started_at,
finished_at,
enqueued_at,
@ -53,6 +56,7 @@ impl PartialEq for Batch {
&& progress.is_none() == other.progress.is_none()
&& details == &other.details
&& stats == &other.stats
&& embedder_stats == &other.embedder_stats
&& started_at == &other.started_at
&& finished_at == &other.finished_at
&& enqueued_at == &other.enqueued_at
@ -83,3 +87,30 @@ pub struct BatchStats {
#[serde(default, skip_serializing_if = "serde_json::Map::is_empty")]
pub internal_database_sizes: serde_json::Map<String, serde_json::Value>,
}
#[derive(Default, Debug, Clone, PartialEq, Eq, Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct EmbedderStatsView {
pub total: usize,
pub failed: usize,
#[serde(skip_serializing_if = "Option::is_none", default)]
pub last_error: Option<String>,
}
impl From<&EmbedderStats> for EmbedderStatsView {
fn from(stats: &EmbedderStats) -> Self {
let errors = stats.errors.read().unwrap_or_else(|p| p.into_inner());
Self {
total: stats.total_count.load(std::sync::atomic::Ordering::Relaxed),
failed: errors.1 as usize,
last_error: errors.0.clone(),
}
}
}
impl EmbedderStatsView {
pub fn skip_serializing(&self) -> bool {
self.total == 0 && self.failed == 0 && self.last_error.is_none()
}
}

View file

@ -301,6 +301,7 @@ InvalidFacetSearchQuery , InvalidRequest , BAD_REQU
InvalidFacetSearchName , InvalidRequest , BAD_REQUEST ;
FacetSearchDisabled , InvalidRequest , BAD_REQUEST ;
InvalidSearchVector , InvalidRequest , BAD_REQUEST ;
InvalidSearchMedia , InvalidRequest , BAD_REQUEST ;
InvalidSearchShowMatchesPosition , InvalidRequest , BAD_REQUEST ;
InvalidSearchShowRankingScore , InvalidRequest , BAD_REQUEST ;
InvalidSimilarShowRankingScore , InvalidRequest , BAD_REQUEST ;
@ -308,6 +309,7 @@ InvalidSearchShowRankingScoreDetails , InvalidRequest , BAD_REQU
InvalidSimilarShowRankingScoreDetails , InvalidRequest , BAD_REQUEST ;
InvalidSearchSort , InvalidRequest , BAD_REQUEST ;
InvalidSearchDistinct , InvalidRequest , BAD_REQUEST ;
InvalidSearchMediaAndVector , InvalidRequest , BAD_REQUEST ;
InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ;
InvalidSettingsProximityPrecision , InvalidRequest , BAD_REQUEST ;
@ -389,6 +391,13 @@ InvalidDocumentEditionContext , InvalidRequest , BAD_REQU
InvalidDocumentEditionFunctionFilter , InvalidRequest , BAD_REQUEST ;
EditDocumentsByFunctionError , InvalidRequest , BAD_REQUEST ;
InvalidSettingsIndexChat , InvalidRequest , BAD_REQUEST ;
// Export
InvalidExportUrl , InvalidRequest , BAD_REQUEST ;
InvalidExportApiKey , InvalidRequest , BAD_REQUEST ;
InvalidExportPayloadSize , InvalidRequest , BAD_REQUEST ;
InvalidExportIndexesPatterns , InvalidRequest , BAD_REQUEST ;
InvalidExportIndexFilter , InvalidRequest , BAD_REQUEST ;
InvalidExportIndexOverrideSettings , InvalidRequest , BAD_REQUEST ;
// Experimental features - Chat Completions
UnimplementedExternalFunctionCalling , InvalidRequest , NOT_IMPLEMENTED ;
UnimplementedNonStreamingChatCompletions , InvalidRequest , NOT_IMPLEMENTED ;
@ -457,6 +466,7 @@ impl ErrorCode for milli::Error {
| UserError::MissingSourceForNested { .. }
| UserError::InvalidSettingsEmbedder { .. } => Code::InvalidSettingsEmbedders,
UserError::TooManyEmbedders(_) => Code::InvalidSettingsEmbedders,
UserError::TooManyFragments(_) => Code::InvalidSettingsEmbedders,
UserError::InvalidPromptForEmbeddings(..) => Code::InvalidSettingsEmbedders,
UserError::NoPrimaryKeyCandidateFound => Code::IndexPrimaryKeyNoCandidateFound,
UserError::MultiplePrimaryKeyCandidatesFound { .. } => {

View file

@ -21,6 +21,7 @@ pub struct RuntimeTogglableFeatures {
pub get_task_documents_route: bool,
pub composite_embedders: bool,
pub chat_completions: bool,
pub multimodal: bool,
}
#[derive(Default, Debug, Clone, Copy)]
@ -114,7 +115,6 @@ pub enum ChatCompletionSource {
OpenAi,
AzureOpenAi,
Mistral,
Gemini,
VLlm,
}
@ -134,7 +134,6 @@ impl ChatCompletionSource {
AzureOpenAi if Self::old_openai_model(model) => System,
AzureOpenAi => Developer,
Mistral => System,
Gemini => System,
VLlm => System,
}
}
@ -154,7 +153,6 @@ impl ChatCompletionSource {
match self {
OpenAi => Some("https://api.openai.com/v1/"),
Mistral => Some("https://api.mistral.ai/v1/"),
Gemini => Some("https://generativelanguage.googleapis.com/v1beta/openai/"),
AzureOpenAi | VLlm => None,
}
}

View file

@ -12,7 +12,7 @@ use crate::index_uid::{IndexUid, IndexUidFormatError};
/// An index uid pattern is composed of only ascii alphanumeric characters, - and _, between 1 and 400
/// bytes long and optionally ending with a *.
#[derive(Serialize, Deserialize, Deserr, Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Serialize, Deserialize, Deserr, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[deserr(try_from(&String) = FromStr::from_str -> IndexUidPatternFormatError)]
pub struct IndexUidPattern(String);

View file

@ -335,6 +335,9 @@ pub enum Action {
#[serde(rename = "experimental.update")]
#[deserr(rename = "experimental.update")]
ExperimentalFeaturesUpdate,
#[serde(rename = "export")]
#[deserr(rename = "export")]
Export,
#[serde(rename = "network.get")]
#[deserr(rename = "network.get")]
NetworkGet,
@ -502,6 +505,8 @@ pub mod actions {
pub const EXPERIMENTAL_FEATURES_GET: u8 = ExperimentalFeaturesGet.repr();
pub const EXPERIMENTAL_FEATURES_UPDATE: u8 = ExperimentalFeaturesUpdate.repr();
pub const EXPORT: u8 = Export.repr();
pub const NETWORK_GET: u8 = NetworkGet.repr();
pub const NETWORK_UPDATE: u8 = NetworkUpdate.repr();

View file

@ -18,7 +18,7 @@ pub mod versioning;
pub use milli::{heed, Index};
use uuid::Uuid;
pub use versioning::VERSION_FILE_NAME;
pub use {milli, serde_cs};
pub use {byte_unit, milli, serde_cs};
pub type Document = serde_json::Map<String, serde_json::Value>;
pub type InstanceUid = Uuid;

View file

@ -9,10 +9,11 @@ use std::str::FromStr;
use deserr::{DeserializeError, Deserr, ErrorKind, MergeWithError, ValuePointerRef};
use fst::IntoStreamer;
use milli::disabled_typos_terms::DisabledTyposTerms;
use milli::index::{IndexEmbeddingConfig, PrefixSearch};
use milli::index::PrefixSearch;
use milli::proximity::ProximityPrecision;
pub use milli::update::ChatSettings;
use milli::update::Setting;
use milli::vector::db::IndexEmbeddingConfig;
use milli::{Criterion, CriterionError, FilterableAttributesRule, Index, DEFAULT_VALUES_PER_FACET};
use serde::{Deserialize, Serialize, Serializer};
use utoipa::ToSchema;
@ -500,8 +501,11 @@ impl Settings<Unchecked> {
let Setting::Set(mut configs) = self.embedders else { return Ok(self) };
for (name, config) in configs.iter_mut() {
let config_to_check = std::mem::take(config);
let checked_config =
milli::update::validate_embedding_settings(config_to_check.inner, name)?;
let checked_config = milli::update::validate_embedding_settings(
config_to_check.inner,
name,
milli::vector::settings::EmbeddingValidationContext::SettingsPartialUpdate,
)?;
*config = SettingEmbeddingSettings { inner: checked_config };
}
self.embedders = Setting::Set(configs);
@ -751,6 +755,7 @@ pub fn apply_settings_to_builder(
builder.reset_min_word_len_two_typos();
builder.reset_exact_words();
builder.reset_exact_attributes();
builder.reset_disable_on_numbers();
}
Setting::NotSet => (),
}
@ -910,6 +915,7 @@ pub fn settings(
};
let embedders: BTreeMap<_, _> = index
.embedding_configs()
.embedding_configs(rtxn)?
.into_iter()
.map(|IndexEmbeddingConfig { name, config, .. }| {
@ -968,6 +974,7 @@ pub fn settings(
if let SecretPolicy::HideSecrets = secret_policy {
settings.hide_secrets()
}
Ok(settings)
}

View file

@ -1,3 +1,6 @@
use std::collections::BTreeMap;
use byte_unit::UnitType;
use milli::Object;
use serde::{Deserialize, Serialize};
use time::{Duration, OffsetDateTime};
@ -6,7 +9,9 @@ use utoipa::ToSchema;
use crate::batches::BatchId;
use crate::error::ResponseError;
use crate::settings::{Settings, Unchecked};
use crate::tasks::{serialize_duration, Details, IndexSwap, Kind, Status, Task, TaskId};
use crate::tasks::{
serialize_duration, Details, DetailsExportIndexSettings, IndexSwap, Kind, Status, Task, TaskId,
};
#[derive(Debug, Clone, PartialEq, Serialize, ToSchema)]
#[serde(rename_all = "camelCase")]
@ -118,6 +123,15 @@ pub struct DetailsView {
pub upgrade_from: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub upgrade_to: Option<String>,
// exporting
#[serde(skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub api_key: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub payload_size: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub indexes: Option<BTreeMap<String, DetailsExportIndexSettings>>,
}
impl DetailsView {
@ -238,6 +252,34 @@ impl DetailsView {
Some(left)
}
},
url: match (self.url.clone(), other.url.clone()) {
(None, None) => None,
(None, Some(url)) | (Some(url), None) => Some(url),
// We should never be able to batch multiple exports at the same time.
// So we return the first one we encounter but that shouldn't be an issue anyway.
(Some(left), Some(_right)) => Some(left),
},
api_key: match (self.api_key.clone(), other.api_key.clone()) {
(None, None) => None,
(None, Some(key)) | (Some(key), None) => Some(key),
// We should never be able to batch multiple exports at the same time.
// So we return the first one we encounter but that shouldn't be an issue anyway.
(Some(left), Some(_right)) => Some(left),
},
payload_size: match (self.payload_size.clone(), other.payload_size.clone()) {
(None, None) => None,
(None, Some(size)) | (Some(size), None) => Some(size),
// We should never be able to batch multiple exports at the same time.
// So we return the first one we encounter but that shouldn't be an issue anyway.
(Some(left), Some(_right)) => Some(left),
},
indexes: match (self.indexes.clone(), other.indexes.clone()) {
(None, None) => None,
(None, Some(indexes)) | (Some(indexes), None) => Some(indexes),
// We should never be able to batch multiple exports at the same time.
// So we return the first one we encounter but that shouldn't be an issue anyway.
(Some(left), Some(_right)) => Some(left),
},
// We want the earliest version
upgrade_from: match (self.upgrade_from.clone(), other.upgrade_from.clone()) {
(None, None) => None,
@ -327,6 +369,22 @@ impl From<Details> for DetailsView {
Details::IndexSwap { swaps } => {
DetailsView { swaps: Some(swaps), ..Default::default() }
}
Details::Export { url, api_key, payload_size, indexes } => DetailsView {
url: Some(url),
api_key: api_key.map(|mut api_key| {
hide_secret(&mut api_key);
api_key
}),
payload_size: payload_size
.map(|ps| ps.get_appropriate_unit(UnitType::Both).to_string()),
indexes: Some(
indexes
.into_iter()
.map(|(pattern, settings)| (pattern.to_string(), settings))
.collect(),
),
..Default::default()
},
Details::UpgradeDatabase { from, to } => DetailsView {
upgrade_from: Some(format!("v{}.{}.{}", from.0, from.1, from.2)),
upgrade_to: Some(format!("v{}.{}.{}", to.0, to.1, to.2)),
@ -335,3 +393,21 @@ impl From<Details> for DetailsView {
}
}
}
// We definitely need to factorize the code to hide the secret key
fn hide_secret(secret: &mut String) {
match secret.len() {
x if x < 10 => {
secret.replace_range(.., "XXX...");
}
x if x < 20 => {
secret.replace_range(2.., "XXXX...");
}
x if x < 30 => {
secret.replace_range(3.., "XXXXX...");
}
_x => {
secret.replace_range(5.., "XXXXXX...");
}
}
}

View file

@ -1,19 +1,22 @@
use core::fmt;
use std::collections::HashSet;
use std::collections::{BTreeMap, HashSet};
use std::fmt::{Display, Write};
use std::str::FromStr;
use byte_unit::Byte;
use enum_iterator::Sequence;
use milli::update::IndexDocumentsMethod;
use milli::Object;
use roaring::RoaringBitmap;
use serde::{Deserialize, Serialize, Serializer};
use serde_json::Value;
use time::{Duration, OffsetDateTime};
use utoipa::ToSchema;
use utoipa::{schema, ToSchema};
use uuid::Uuid;
use crate::batches::BatchId;
use crate::error::ResponseError;
use crate::index_uid_pattern::IndexUidPattern;
use crate::keys::Key;
use crate::settings::{Settings, Unchecked};
use crate::{versioning, InstanceUid};
@ -50,6 +53,7 @@ impl Task {
| SnapshotCreation
| TaskCancelation { .. }
| TaskDeletion { .. }
| Export { .. }
| UpgradeDatabase { .. }
| IndexSwap { .. } => None,
DocumentAdditionOrUpdate { index_uid, .. }
@ -86,6 +90,7 @@ impl Task {
| KindWithContent::TaskDeletion { .. }
| KindWithContent::DumpCreation { .. }
| KindWithContent::SnapshotCreation
| KindWithContent::Export { .. }
| KindWithContent::UpgradeDatabase { .. } => None,
}
}
@ -108,11 +113,11 @@ pub enum KindWithContent {
},
DocumentDeletionByFilter {
index_uid: String,
filter_expr: serde_json::Value,
filter_expr: Value,
},
DocumentEdition {
index_uid: String,
filter_expr: Option<serde_json::Value>,
filter_expr: Option<Value>,
context: Option<milli::Object>,
function: String,
},
@ -152,6 +157,12 @@ pub enum KindWithContent {
instance_uid: Option<InstanceUid>,
},
SnapshotCreation,
Export {
url: String,
api_key: Option<String>,
payload_size: Option<Byte>,
indexes: BTreeMap<IndexUidPattern, ExportIndexSettings>,
},
UpgradeDatabase {
from: (u32, u32, u32),
},
@ -163,6 +174,13 @@ pub struct IndexSwap {
pub indexes: (String, String),
}
#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "camelCase")]
pub struct ExportIndexSettings {
pub filter: Option<Value>,
pub override_settings: bool,
}
impl KindWithContent {
pub fn as_kind(&self) -> Kind {
match self {
@ -180,6 +198,7 @@ impl KindWithContent {
KindWithContent::TaskDeletion { .. } => Kind::TaskDeletion,
KindWithContent::DumpCreation { .. } => Kind::DumpCreation,
KindWithContent::SnapshotCreation => Kind::SnapshotCreation,
KindWithContent::Export { .. } => Kind::Export,
KindWithContent::UpgradeDatabase { .. } => Kind::UpgradeDatabase,
}
}
@ -192,6 +211,7 @@ impl KindWithContent {
| SnapshotCreation
| TaskCancelation { .. }
| TaskDeletion { .. }
| Export { .. }
| UpgradeDatabase { .. } => vec![],
DocumentAdditionOrUpdate { index_uid, .. }
| DocumentEdition { index_uid, .. }
@ -269,6 +289,14 @@ impl KindWithContent {
}),
KindWithContent::DumpCreation { .. } => Some(Details::Dump { dump_uid: None }),
KindWithContent::SnapshotCreation => None,
KindWithContent::Export { url, api_key, payload_size, indexes } => {
Some(Details::Export {
url: url.clone(),
api_key: api_key.clone(),
payload_size: *payload_size,
indexes: indexes.iter().map(|(p, s)| (p.clone(), s.clone().into())).collect(),
})
}
KindWithContent::UpgradeDatabase { from } => Some(Details::UpgradeDatabase {
from: (from.0, from.1, from.2),
to: (
@ -335,6 +363,14 @@ impl KindWithContent {
}),
KindWithContent::DumpCreation { .. } => Some(Details::Dump { dump_uid: None }),
KindWithContent::SnapshotCreation => None,
KindWithContent::Export { url, api_key, payload_size, indexes } => {
Some(Details::Export {
url: url.clone(),
api_key: api_key.clone(),
payload_size: *payload_size,
indexes: indexes.iter().map(|(p, s)| (p.clone(), s.clone().into())).collect(),
})
}
KindWithContent::UpgradeDatabase { from } => Some(Details::UpgradeDatabase {
from: *from,
to: (
@ -383,6 +419,14 @@ impl From<&KindWithContent> for Option<Details> {
}),
KindWithContent::DumpCreation { .. } => Some(Details::Dump { dump_uid: None }),
KindWithContent::SnapshotCreation => None,
KindWithContent::Export { url, api_key, payload_size, indexes } => {
Some(Details::Export {
url: url.clone(),
api_key: api_key.clone(),
payload_size: *payload_size,
indexes: indexes.iter().map(|(p, s)| (p.clone(), s.clone().into())).collect(),
})
}
KindWithContent::UpgradeDatabase { from } => Some(Details::UpgradeDatabase {
from: *from,
to: (
@ -499,6 +543,7 @@ pub enum Kind {
TaskDeletion,
DumpCreation,
SnapshotCreation,
Export,
UpgradeDatabase,
}
@ -516,6 +561,7 @@ impl Kind {
| Kind::TaskCancelation
| Kind::TaskDeletion
| Kind::DumpCreation
| Kind::Export
| Kind::UpgradeDatabase
| Kind::SnapshotCreation => false,
}
@ -536,6 +582,7 @@ impl Display for Kind {
Kind::TaskDeletion => write!(f, "taskDeletion"),
Kind::DumpCreation => write!(f, "dumpCreation"),
Kind::SnapshotCreation => write!(f, "snapshotCreation"),
Kind::Export => write!(f, "export"),
Kind::UpgradeDatabase => write!(f, "upgradeDatabase"),
}
}
@ -568,6 +615,8 @@ impl FromStr for Kind {
Ok(Kind::DumpCreation)
} else if kind.eq_ignore_ascii_case("snapshotCreation") {
Ok(Kind::SnapshotCreation)
} else if kind.eq_ignore_ascii_case("export") {
Ok(Kind::Export)
} else if kind.eq_ignore_ascii_case("upgradeDatabase") {
Ok(Kind::UpgradeDatabase)
} else {
@ -643,12 +692,33 @@ pub enum Details {
IndexSwap {
swaps: Vec<IndexSwap>,
},
Export {
url: String,
api_key: Option<String>,
payload_size: Option<Byte>,
indexes: BTreeMap<IndexUidPattern, DetailsExportIndexSettings>,
},
UpgradeDatabase {
from: (u32, u32, u32),
to: (u32, u32, u32),
},
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, ToSchema)]
#[schema(rename_all = "camelCase")]
pub struct DetailsExportIndexSettings {
#[serde(flatten)]
pub settings: ExportIndexSettings,
#[serde(skip_serializing_if = "Option::is_none")]
pub matched_documents: Option<u64>,
}
impl From<ExportIndexSettings> for DetailsExportIndexSettings {
fn from(settings: ExportIndexSettings) -> Self {
DetailsExportIndexSettings { settings, matched_documents: None }
}
}
impl Details {
pub fn to_failed(&self) -> Self {
let mut details = self.clone();
@ -667,6 +737,7 @@ impl Details {
Self::SettingsUpdate { .. }
| Self::IndexInfo { .. }
| Self::Dump { .. }
| Self::Export { .. }
| Self::UpgradeDatabase { .. }
| Self::IndexSwap { .. } => (),
}

View file

@ -28,7 +28,6 @@ actix-web = { version = "4.11.0", default-features = false, features = [
"rustls-0_23",
] }
anyhow = { version = "1.0.98", features = ["backtrace"] }
async-trait = "0.1.88"
bstr = "1.12.0"
byte-unit = { version = "5.1.6", features = ["serde"] }
bytes = "1.10.1"
@ -170,5 +169,5 @@ german = ["meilisearch-types/german"]
turkish = ["meilisearch-types/turkish"]
[package.metadata.mini-dashboard]
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.19/build.zip"
sha1 = "7974430d5277c97f67cf6e95eec6faaac2788834"
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.20/build.zip"
sha1 = "82a7ddd7bf14bb5323c3d235d2b62892a98b6a59"

View file

@ -197,11 +197,13 @@ struct Infos {
experimental_max_number_of_batched_tasks: usize,
experimental_limit_batched_tasks_total_size: u64,
experimental_network: bool,
experimental_multimodal: bool,
experimental_chat_completions: bool,
experimental_get_task_documents_route: bool,
experimental_composite_embedders: bool,
experimental_embedding_cache_entries: usize,
experimental_no_snapshot_compaction: bool,
experimental_no_edition_2024_for_settings: bool,
gpu_enabled: bool,
db_path: bool,
import_dump: bool,
@ -286,8 +288,12 @@ impl Infos {
ScheduleSnapshot::Enabled(interval) => Some(interval),
};
let IndexerOpts { max_indexing_memory, max_indexing_threads, skip_index_budget: _ } =
indexer_options;
let IndexerOpts {
max_indexing_memory,
max_indexing_threads,
skip_index_budget: _,
experimental_no_edition_2024_for_settings,
} = indexer_options;
let RuntimeTogglableFeatures {
metrics,
@ -298,6 +304,7 @@ impl Infos {
get_task_documents_route,
composite_embedders,
chat_completions,
multimodal,
} = features;
// We're going to override every sensible information.
@ -317,6 +324,7 @@ impl Infos {
experimental_reduce_indexing_memory_usage,
experimental_network: network,
experimental_chat_completions: chat_completions,
experimental_multimodal: multimodal,
experimental_get_task_documents_route: get_task_documents_route,
experimental_composite_embedders: composite_embedders,
experimental_embedding_cache_entries,
@ -350,6 +358,7 @@ impl Infos {
ssl_require_auth,
ssl_resumption,
ssl_tickets,
experimental_no_edition_2024_for_settings,
}
}
}

View file

@ -76,8 +76,10 @@ pub enum MeilisearchHttpError {
DocumentFormat(#[from] DocumentFormatError),
#[error(transparent)]
Join(#[from] JoinError),
#[error("Invalid request: missing `hybrid` parameter when `vector` is present.")]
#[error("Invalid request: missing `hybrid` parameter when `vector` or `media` are present.")]
MissingSearchHybrid,
#[error("Invalid request: both `media` and `vector` parameters are present.")]
MediaAndVector,
}
impl MeilisearchHttpError {
@ -111,6 +113,7 @@ impl ErrorCode for MeilisearchHttpError {
MeilisearchHttpError::DocumentFormat(e) => e.error_code(),
MeilisearchHttpError::Join(_) => Code::Internal,
MeilisearchHttpError::MissingSearchHybrid => Code::MissingSearchHybrid,
MeilisearchHttpError::MediaAndVector => Code::InvalidSearchMediaAndVector,
MeilisearchHttpError::FederationOptionsInNonFederatedRequest(_) => {
Code::InvalidMultiSearchFederationOptions
}

View file

@ -37,6 +37,7 @@ use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
use meilisearch_auth::{open_auth_store_env, AuthController};
use meilisearch_types::milli::constants::VERSION_MAJOR;
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
use meilisearch_types::milli::progress::{EmbedderStats, Progress};
use meilisearch_types::milli::update::{
default_thread_pool_and_threads, IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig,
};
@ -463,6 +464,7 @@ fn import_dump(
index_scheduler: &mut IndexScheduler,
auth: &mut AuthController,
) -> Result<(), anyhow::Error> {
let progress = Progress::default();
let reader = File::open(dump_path)?;
let mut dump_reader = dump::DumpReader::open(reader)?;
@ -496,14 +498,20 @@ fn import_dump(
keys.push(key);
}
// 3. Import the runtime features and network
// 3. Import the `ChatCompletionSettings`s.
for result in dump_reader.chat_completions_settings()? {
let (name, settings) = result?;
index_scheduler.put_chat_settings(&name, &settings)?;
}
// 4. Import the runtime features and network
let features = dump_reader.features()?.unwrap_or_default();
index_scheduler.put_runtime_features(features)?;
let network = dump_reader.network()?.cloned().unwrap_or_default();
index_scheduler.put_network(network)?;
// 3.1 Use all cpus to process dump if `max_indexing_threads` not configured
// 4.1 Use all cpus to process dump if `max_indexing_threads` not configured
let backup_config;
let base_config = index_scheduler.indexer_config();
@ -520,7 +528,7 @@ fn import_dump(
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
// try to process tasks while we're trying to import the indexes.
// 4. Import the indexes.
// 5. Import the indexes.
for index_reader in dump_reader.indexes()? {
let mut index_reader = index_reader?;
let metadata = index_reader.metadata();
@ -533,20 +541,20 @@ fn import_dump(
let mut wtxn = index.write_txn()?;
let mut builder = milli::update::Settings::new(&mut wtxn, &index, indexer_config);
// 4.1 Import the primary key if there is one.
// 5.1 Import the primary key if there is one.
if let Some(ref primary_key) = metadata.primary_key {
builder.set_primary_key(primary_key.to_string());
}
// 4.2 Import the settings.
// 5.2 Import the settings.
tracing::info!("Importing the settings.");
let settings = index_reader.settings()?;
apply_settings_to_builder(&settings, &mut builder);
builder
.execute(|indexing_step| tracing::debug!("update: {:?}", indexing_step), || false)?;
let embedder_stats: Arc<EmbedderStats> = Default::default();
builder.execute(&|| false, &progress, embedder_stats.clone())?;
// 4.3 Import the documents.
// 4.3.1 We need to recreate the grenad+obkv format accepted by the index.
// 5.3 Import the documents.
// 5.3.1 We need to recreate the grenad+obkv format accepted by the index.
tracing::info!("Importing the documents.");
let file = tempfile::tempfile()?;
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
@ -557,11 +565,11 @@ fn import_dump(
// This flush the content of the batch builder.
let file = builder.into_inner()?.into_inner()?;
// 4.3.2 We feed it to the milli index.
// 5.3.2 We feed it to the milli index.
let reader = BufReader::new(file);
let reader = DocumentsBatchReader::from_reader(reader)?;
let embedder_configs = index.embedding_configs(&wtxn)?;
let embedder_configs = index.embedding_configs().embedding_configs(&wtxn)?;
let embedders = index_scheduler.embedders(uid.to_string(), embedder_configs)?;
let builder = milli::update::IndexDocuments::new(
@ -574,6 +582,7 @@ fn import_dump(
},
|indexing_step| tracing::trace!("update: {:?}", indexing_step),
|| false,
&embedder_stats,
)?;
let builder = builder.with_embedders(embedders);
@ -588,15 +597,15 @@ fn import_dump(
index_scheduler.refresh_index_stats(&uid)?;
}
// 5. Import the queue
// 6. Import the queue
let mut index_scheduler_dump = index_scheduler.register_dumped_task()?;
// 5.1. Import the batches
// 6.1. Import the batches
for ret in dump_reader.batches()? {
let batch = ret?;
index_scheduler_dump.register_dumped_batch(batch)?;
}
// 5.2. Import the tasks
// 6.2. Import the tasks
for ret in dump_reader.tasks()? {
let (task, file) = ret?;
index_scheduler_dump.register_dumped_task(task, file)?;

View file

@ -15,6 +15,33 @@ lazy_static! {
"Meilisearch number of degraded search requests"
))
.expect("Can't create a metric");
pub static ref MEILISEARCH_CHAT_SEARCH_REQUESTS: IntCounterVec = register_int_counter_vec!(
opts!(
"meilisearch_chat_search_requests",
"Meilisearch number of search requests performed by the chat route itself"
),
&["type"]
)
.expect("Can't create a metric");
pub static ref MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE: IntCounterVec = register_int_counter_vec!(
opts!("meilisearch_chat_prompt_tokens_usage", "Meilisearch Chat Prompt Tokens Usage"),
&["workspace", "model"]
)
.expect("Can't create a metric");
pub static ref MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE: IntCounterVec =
register_int_counter_vec!(
opts!(
"meilisearch_chat_completion_tokens_usage",
"Meilisearch Chat Completion Tokens Usage"
),
&["workspace", "model"]
)
.expect("Can't create a metric");
pub static ref MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE: IntCounterVec = register_int_counter_vec!(
opts!("meilisearch_chat_total_tokens_usage", "Meilisearch Chat Total Tokens Usage"),
&["workspace", "model"]
)
.expect("Can't create a metric");
pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge =
register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch DB Size In Bytes"))
.expect("Can't create a metric");

View file

@ -53,6 +53,8 @@ const MEILI_EXPERIMENTAL_DUMPLESS_UPGRADE: &str = "MEILI_EXPERIMENTAL_DUMPLESS_U
const MEILI_EXPERIMENTAL_REPLICATION_PARAMETERS: &str = "MEILI_EXPERIMENTAL_REPLICATION_PARAMETERS";
const MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE: &str = "MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE";
const MEILI_EXPERIMENTAL_CONTAINS_FILTER: &str = "MEILI_EXPERIMENTAL_CONTAINS_FILTER";
const MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_SETTINGS: &str =
"MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_SETTINGS";
const MEILI_EXPERIMENTAL_ENABLE_METRICS: &str = "MEILI_EXPERIMENTAL_ENABLE_METRICS";
const MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE: &str = "MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE";
const MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER: &str = "MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER";
@ -62,7 +64,7 @@ const MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE: &str =
const MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS: &str =
"MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS";
const MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE: &str =
"MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_SIZE";
"MEILI_EXPERIMENTAL_LIMIT_BATCHED_TASKS_TOTAL_SIZE";
const MEILI_EXPERIMENTAL_EMBEDDING_CACHE_ENTRIES: &str =
"MEILI_EXPERIMENTAL_EMBEDDING_CACHE_ENTRIES";
const MEILI_EXPERIMENTAL_NO_SNAPSHOT_COMPACTION: &str = "MEILI_EXPERIMENTAL_NO_SNAPSHOT_COMPACTION";
@ -749,12 +751,25 @@ pub struct IndexerOpts {
#[clap(skip)]
#[serde(skip)]
pub skip_index_budget: bool,
/// Experimental no edition 2024 for settings feature. For more information,
/// see: <https://github.com/orgs/meilisearch/discussions/847>
///
/// Enables the experimental no edition 2024 for settings feature.
#[clap(long, env = MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_SETTINGS)]
#[serde(default)]
pub experimental_no_edition_2024_for_settings: bool,
}
impl IndexerOpts {
/// Exports the values to their corresponding env vars if they are not set.
pub fn export_to_env(self) {
let IndexerOpts { max_indexing_memory, max_indexing_threads, skip_index_budget: _ } = self;
let IndexerOpts {
max_indexing_memory,
max_indexing_threads,
skip_index_budget: _,
experimental_no_edition_2024_for_settings,
} = self;
if let Some(max_indexing_memory) = max_indexing_memory.0 {
export_to_env_if_not_present(
MEILI_MAX_INDEXING_MEMORY,
@ -767,6 +782,12 @@ impl IndexerOpts {
max_indexing_threads.to_string(),
);
}
if experimental_no_edition_2024_for_settings {
export_to_env_if_not_present(
MEILI_EXPERIMENTAL_NO_EDITION_2024_FOR_SETTINGS,
experimental_no_edition_2024_for_settings.to_string(),
);
}
}
}
@ -785,7 +806,12 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
max_threads: *other.max_indexing_threads,
max_positions_per_attributes: None,
skip_index_budget: other.skip_index_budget,
..Default::default()
experimental_no_edition_2024_for_settings: other
.experimental_no_edition_2024_for_settings,
chunk_compression_type: Default::default(),
chunk_compression_level: Default::default(),
documents_chunk_size: Default::default(),
max_nb_chunks: Default::default(),
})
}
}

View file

@ -0,0 +1,135 @@
use std::collections::BinaryHeap;
use serde_json::{json, Value};
use crate::analytics::Aggregate;
#[derive(Default)]
pub struct ChatCompletionAggregator {
// requests
total_received: usize,
total_succeeded: usize,
time_spent: BinaryHeap<usize>,
// chat completion specific metrics
total_messages: usize,
total_streamed_requests: usize,
total_non_streamed_requests: usize,
// model usage tracking
models_used: std::collections::HashMap<String, usize>,
}
impl ChatCompletionAggregator {
pub fn from_request(model: &str, message_count: usize, is_stream: bool) -> Self {
let mut models_used = std::collections::HashMap::new();
models_used.insert(model.to_string(), 1);
Self {
total_received: 1,
total_succeeded: 0,
time_spent: BinaryHeap::new(),
total_messages: message_count,
total_streamed_requests: if is_stream { 1 } else { 0 },
total_non_streamed_requests: if is_stream { 0 } else { 1 },
models_used,
}
}
pub fn succeed(&mut self, time_spent: std::time::Duration) {
self.total_succeeded += 1;
self.time_spent.push(time_spent.as_millis() as usize);
}
}
impl Aggregate for ChatCompletionAggregator {
fn event_name(&self) -> &'static str {
"Chat Completion POST"
}
fn aggregate(mut self: Box<Self>, new: Box<Self>) -> Box<Self> {
let Self {
total_received,
total_succeeded,
mut time_spent,
total_messages,
total_streamed_requests,
total_non_streamed_requests,
models_used,
..
} = *new;
// Aggregate time spent
self.time_spent.append(&mut time_spent);
// Aggregate counters
self.total_received = self.total_received.saturating_add(total_received);
self.total_succeeded = self.total_succeeded.saturating_add(total_succeeded);
self.total_messages = self.total_messages.saturating_add(total_messages);
self.total_streamed_requests =
self.total_streamed_requests.saturating_add(total_streamed_requests);
self.total_non_streamed_requests =
self.total_non_streamed_requests.saturating_add(total_non_streamed_requests);
// Aggregate model usage
for (model, count) in models_used {
*self.models_used.entry(model).or_insert(0) += count;
}
self
}
fn into_event(self: Box<Self>) -> Value {
let Self {
total_received,
total_succeeded,
time_spent,
total_messages,
total_streamed_requests,
total_non_streamed_requests,
models_used,
..
} = *self;
// Compute time statistics
let time_spent: Vec<usize> = time_spent.into_sorted_vec();
let (max_time, min_time, avg_time) = if time_spent.is_empty() {
(0, 0, 0)
} else {
let max_time = time_spent.last().unwrap_or(&0);
let min_time = time_spent.first().unwrap_or(&0);
let sum: usize = time_spent.iter().sum();
let avg_time = sum / time_spent.len();
(*max_time, *min_time, avg_time)
};
// Compute average messages per request
let avg_messages_per_request =
if total_received > 0 { total_messages as f64 / total_received as f64 } else { 0.0 };
// Compute streaming vs non-streaming proportions
let streaming_ratio = if total_received > 0 {
total_streamed_requests as f64 / total_received as f64
} else {
0.0
};
json!({
"total_received": total_received,
"total_succeeded": total_succeeded,
"time_spent": {
"max": max_time,
"min": min_time,
"avg": avg_time
},
"total_messages": total_messages,
"avg_messages_per_request": avg_messages_per_request,
"total_streamed_requests": total_streamed_requests,
"total_non_streamed_requests": total_non_streamed_requests,
"streaming_ratio": streaming_ratio,
"models_used": models_used,
})
}
}

View file

@ -13,9 +13,9 @@ use async_openai::types::{
ChatCompletionRequestDeveloperMessageContent, ChatCompletionRequestMessage,
ChatCompletionRequestSystemMessage, ChatCompletionRequestSystemMessageContent,
ChatCompletionRequestToolMessage, ChatCompletionRequestToolMessageContent,
ChatCompletionStreamResponseDelta, ChatCompletionToolArgs, ChatCompletionToolType,
CreateChatCompletionRequest, CreateChatCompletionStreamResponse, FinishReason, FunctionCall,
FunctionCallStream, FunctionObjectArgs,
ChatCompletionStreamOptions, ChatCompletionStreamResponseDelta, ChatCompletionToolArgs,
ChatCompletionToolType, CreateChatCompletionRequest, CreateChatCompletionStreamResponse,
FinishReason, FunctionCall, FunctionCallStream, FunctionObjectArgs,
};
use async_openai::Client;
use bumpalo::Bump;
@ -36,6 +36,7 @@ use serde_json::json;
use tokio::runtime::Handle;
use tokio::sync::mpsc::error::SendError;
use super::chat_completion_analytics::ChatCompletionAggregator;
use super::config::Config;
use super::errors::{MistralError, OpenAiOutsideError, StreamErrorEvent};
use super::utils::format_documents;
@ -43,10 +44,15 @@ use super::{
ChatsParam, MEILI_APPEND_CONVERSATION_MESSAGE_NAME, MEILI_SEARCH_IN_INDEX_FUNCTION_NAME,
MEILI_SEARCH_PROGRESS_NAME, MEILI_SEARCH_SOURCES_NAME,
};
use crate::analytics::Analytics;
use crate::error::MeilisearchHttpError;
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::{extract_token_from_request, GuardedData, Policy as _};
use crate::metrics::MEILISEARCH_DEGRADED_SEARCH_REQUESTS;
use crate::metrics::{
MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE, MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE,
MEILISEARCH_CHAT_SEARCH_REQUESTS, MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE,
MEILISEARCH_DEGRADED_SEARCH_REQUESTS,
};
use crate::routes::chats::utils::SseEventSender;
use crate::routes::indexes::search::search_kind;
use crate::search::{add_search_rules, prepare_search, search_from_kind, SearchQuery};
@ -64,6 +70,7 @@ async fn chat(
req: HttpRequest,
search_queue: web::Data<SearchQueue>,
web::Json(chat_completion): web::Json<CreateChatCompletionRequest>,
analytics: web::Data<Analytics>,
) -> impl Responder {
let ChatsParam { workspace_uid } = chats_param.into_inner();
@ -76,6 +83,7 @@ async fn chat(
&workspace_uid,
req,
chat_completion,
analytics,
)
.await,
)
@ -88,6 +96,7 @@ async fn chat(
&workspace_uid,
req,
chat_completion,
analytics,
)
.await,
)
@ -281,7 +290,7 @@ async fn process_search_request(
let output = output?;
let mut documents = Vec::new();
if let Ok((ref rtxn, ref search_result)) = output {
// aggregate.succeed(search_result);
MEILISEARCH_CHAT_SEARCH_REQUESTS.with_label_values(&["internal"]).inc();
if search_result.degraded {
MEILISEARCH_DEGRADED_SEARCH_REQUESTS.inc();
}
@ -315,9 +324,18 @@ async fn non_streamed_chat(
workspace_uid: &str,
req: HttpRequest,
chat_completion: CreateChatCompletionRequest,
analytics: web::Data<Analytics>,
) -> Result<HttpResponse, ResponseError> {
index_scheduler.features().check_chat_completions("using the /chats chat completions route")?;
// Create analytics aggregator
let aggregate = ChatCompletionAggregator::from_request(
&chat_completion.model,
chat_completion.messages.len(),
false, // non_streamed_chat is not streaming
);
let start_time = std::time::Instant::now();
if let Some(n) = chat_completion.n.filter(|&n| n != 1) {
return Err(ResponseError::from_msg(
format!("You tried to specify n = {n} but only single choices are supported (n = 1)."),
@ -414,6 +432,11 @@ async fn non_streamed_chat(
}
}
// Record success in analytics
let mut aggregate = aggregate;
aggregate.succeed(start_time.elapsed());
analytics.publish(aggregate, &req);
Ok(HttpResponse::Ok().json(response))
}
@ -424,6 +447,7 @@ async fn streamed_chat(
workspace_uid: &str,
req: HttpRequest,
mut chat_completion: CreateChatCompletionRequest,
analytics: web::Data<Analytics>,
) -> Result<impl Responder, ResponseError> {
index_scheduler.features().check_chat_completions("using the /chats chat completions route")?;
let filters = index_scheduler.filters();
@ -445,6 +469,14 @@ async fn streamed_chat(
}
};
// Create analytics aggregator
let mut aggregate = ChatCompletionAggregator::from_request(
&chat_completion.model,
chat_completion.messages.len(),
true, // streamed_chat is always streaming
);
let start_time = std::time::Instant::now();
let config = Config::new(&chat_settings);
let auth_token = extract_token_from_request(&req)?.unwrap().to_string();
let system_role = chat_settings.source.system_role(&chat_completion.model);
@ -460,6 +492,7 @@ async fn streamed_chat(
let (tx, rx) = tokio::sync::mpsc::channel(10);
let tx = SseEventSender::new(tx);
let workspace_uid = workspace_uid.to_string();
let _join_handle = Handle::current().spawn(async move {
let client = Client::with_config(config.clone());
let mut global_tool_calls = HashMap::<u32, Call>::new();
@ -469,6 +502,7 @@ async fn streamed_chat(
let output = run_conversation(
&index_scheduler,
&auth_ctrl,
&workspace_uid,
&search_queue,
&auth_token,
&client,
@ -490,6 +524,10 @@ async fn streamed_chat(
let _ = tx.stop().await;
});
// Record success in analytics after the stream is set up
aggregate.succeed(start_time.elapsed());
analytics.publish(aggregate, &req);
Ok(Sse::from_infallible_receiver(rx).with_retry_duration(Duration::from_secs(10)))
}
@ -502,6 +540,7 @@ async fn run_conversation<C: async_openai::config::Config>(
Data<IndexScheduler>,
>,
auth_ctrl: &web::Data<AuthController>,
workspace_uid: &str,
search_queue: &web::Data<SearchQueue>,
auth_token: &str,
client: &Client<C>,
@ -511,13 +550,34 @@ async fn run_conversation<C: async_openai::config::Config>(
global_tool_calls: &mut HashMap<u32, Call>,
function_support: FunctionSupport,
) -> Result<ControlFlow<Option<FinishReason>, ()>, SendError<Event>> {
use DbChatCompletionSource::*;
let mut finish_reason = None;
chat_completion.stream_options = match source {
OpenAi | AzureOpenAi => Some(ChatCompletionStreamOptions { include_usage: true }),
Mistral | VLlm => None,
};
// safety: unwrap: can only happens if `stream` was set to `false`
let mut response = client.chat().create_stream(chat_completion.clone()).await.unwrap();
while let Some(result) = response.next().await {
match result {
Ok(resp) => {
let choice = &resp.choices[0];
if let Some(usage) = resp.usage.as_ref() {
MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE
.with_label_values(&[workspace_uid, &chat_completion.model])
.inc_by(usage.prompt_tokens as u64);
MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE
.with_label_values(&[workspace_uid, &chat_completion.model])
.inc_by(usage.completion_tokens as u64);
MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE
.with_label_values(&[workspace_uid, &chat_completion.model])
.inc_by(usage.total_tokens as u64);
}
let choice = match resp.choices.first() {
Some(choice) => choice,
None => break,
};
finish_reason = choice.finish_reason;
let ChatCompletionStreamResponseDelta { ref tool_calls, .. } = &choice.delta;

View file

@ -13,7 +13,7 @@ impl Config {
pub fn new(chat_settings: &DbChatSettings) -> Self {
use meilisearch_types::features::ChatCompletionSource::*;
match chat_settings.source {
OpenAi | Mistral | Gemini | VLlm => {
OpenAi | Mistral | VLlm => {
let mut config = OpenAIConfig::default();
if let Some(org_id) = chat_settings.org_id.as_ref() {
config = config.with_org_id(org_id);

View file

@ -19,6 +19,7 @@ use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::GuardedData;
use crate::routes::PAGINATION_DEFAULT_LIMIT;
mod chat_completion_analytics;
pub mod chat_completions;
mod config;
mod errors;

View file

@ -218,7 +218,6 @@ pub enum ChatCompletionSource {
#[default]
OpenAi,
Mistral,
Gemini,
AzureOpenAi,
VLlm,
}
@ -229,7 +228,6 @@ impl From<ChatCompletionSource> for DbChatCompletionSource {
match source {
OpenAi => DbChatCompletionSource::OpenAi,
Mistral => DbChatCompletionSource::Mistral,
Gemini => DbChatCompletionSource::Gemini,
AzureOpenAi => DbChatCompletionSource::AzureOpenAi,
VLlm => DbChatCompletionSource::VLlm,
}

View file

@ -0,0 +1,183 @@
use std::collections::BTreeMap;
use std::convert::Infallible;
use std::str::FromStr as _;
use actix_web::web::{self, Data};
use actix_web::{HttpRequest, HttpResponse};
use byte_unit::Byte;
use deserr::actix_web::AwebJson;
use deserr::Deserr;
use index_scheduler::IndexScheduler;
use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid_pattern::IndexUidPattern;
use meilisearch_types::keys::actions;
use meilisearch_types::tasks::{ExportIndexSettings as DbExportIndexSettings, KindWithContent};
use serde::Serialize;
use serde_json::Value;
use tracing::debug;
use utoipa::{OpenApi, ToSchema};
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::GuardedData;
use crate::routes::export_analytics::ExportAnalytics;
use crate::routes::{get_task_id, is_dry_run, SummarizedTaskView};
use crate::Opt;
#[derive(OpenApi)]
#[openapi(
paths(export),
tags((
name = "Export",
description = "The `/export` route allows you to trigger an export process to a remote Meilisearch instance.",
external_docs(url = "https://www.meilisearch.com/docs/reference/api/export"),
)),
)]
pub struct ExportApi;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(export)));
}
#[utoipa::path(
post,
path = "",
tag = "Export",
security(("Bearer" = ["export", "*"])),
responses(
(status = 202, description = "Export successfully enqueued", body = SummarizedTaskView, content_type = "application/json", example = json!(
{
"taskUid": 1,
"status": "enqueued",
"type": "export",
"enqueuedAt": "2021-08-11T09:25:53.000000Z"
})),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
"message": "The Authorization header is missing. It must use the bearer authorization method.",
"code": "missing_authorization_header",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
}
)),
)
)]
async fn export(
index_scheduler: GuardedData<ActionPolicy<{ actions::EXPORT }>, Data<IndexScheduler>>,
export: AwebJson<Export, DeserrJsonError>,
req: HttpRequest,
opt: web::Data<Opt>,
analytics: Data<Analytics>,
) -> Result<HttpResponse, ResponseError> {
let export = export.into_inner();
debug!(returns = ?export, "Trigger export");
let analytics_aggregate = ExportAnalytics::from_export(&export);
let Export { url, api_key, payload_size, indexes } = export;
let indexes = match indexes {
Some(indexes) => indexes
.into_iter()
.map(|(pattern, ExportIndexSettings { filter, override_settings })| {
(pattern, DbExportIndexSettings { filter, override_settings })
})
.collect(),
None => BTreeMap::from([(
IndexUidPattern::new_unchecked("*"),
DbExportIndexSettings::default(),
)]),
};
let task = KindWithContent::Export {
url,
api_key,
payload_size: payload_size.map(|ByteWithDeserr(bytes)| bytes),
indexes,
};
let uid = get_task_id(&req, &opt)?;
let dry_run = is_dry_run(&req, &opt)?;
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
.await??
.into();
analytics.publish(analytics_aggregate, &req);
Ok(HttpResponse::Ok().json(task))
}
#[derive(Debug, Deserr, ToSchema, Serialize)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct Export {
#[schema(value_type = Option<String>, example = json!("https://ms-1234.heaven.meilisearch.com"))]
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidExportUrl>)]
pub url: String,
#[schema(value_type = Option<String>, example = json!("1234abcd"))]
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidExportApiKey>)]
pub api_key: Option<String>,
#[schema(value_type = Option<String>, example = json!("24MiB"))]
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidExportPayloadSize>)]
pub payload_size: Option<ByteWithDeserr>,
#[schema(value_type = Option<BTreeMap<String, ExportIndexSettings>>, example = json!({ "*": { "filter": null } }))]
#[deserr(default)]
#[serde(default)]
pub indexes: Option<BTreeMap<IndexUidPattern, ExportIndexSettings>>,
}
/// A wrapper around the `Byte` type that implements `Deserr`.
#[derive(Debug, Serialize)]
#[serde(transparent)]
pub struct ByteWithDeserr(pub Byte);
impl<E> deserr::Deserr<E> for ByteWithDeserr
where
E: deserr::DeserializeError,
{
fn deserialize_from_value<V: deserr::IntoValue>(
value: deserr::Value<V>,
location: deserr::ValuePointerRef,
) -> Result<Self, E> {
use deserr::{ErrorKind, Value, ValueKind};
match value {
Value::Integer(integer) => Ok(ByteWithDeserr(Byte::from_u64(integer))),
Value::String(string) => Byte::from_str(&string).map(ByteWithDeserr).map_err(|e| {
deserr::take_cf_content(E::error::<Infallible>(
None,
ErrorKind::Unexpected { msg: e.to_string() },
location,
))
}),
actual => Err(deserr::take_cf_content(E::error(
None,
ErrorKind::IncorrectValueKind {
actual,
accepted: &[ValueKind::Integer, ValueKind::String],
},
location,
))),
}
}
}
#[derive(Debug, Deserr, ToSchema, Serialize)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
#[schema(rename_all = "camelCase")]
pub struct ExportIndexSettings {
#[schema(value_type = Option<String>, example = json!("genres = action"))]
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidExportIndexFilter>)]
pub filter: Option<Value>,
#[schema(value_type = Option<bool>, example = json!(true))]
#[serde(default)]
#[deserr(default, error = DeserrJsonError<InvalidExportIndexOverrideSettings>)]
pub override_settings: bool,
}

View file

@ -0,0 +1,111 @@
use url::Url;
use crate::analytics::Aggregate;
use crate::routes::export::Export;
#[derive(Default)]
pub struct ExportAnalytics {
total_received: usize,
has_api_key: bool,
sum_exports_meilisearch_cloud: usize,
sum_index_patterns: usize,
sum_patterns_with_filter: usize,
sum_patterns_with_override_settings: usize,
payload_sizes: Vec<u64>,
}
impl ExportAnalytics {
pub fn from_export(export: &Export) -> Self {
let Export { url, api_key, payload_size, indexes } = export;
let url = Url::parse(url).ok();
let is_meilisearch_cloud = url.as_ref().and_then(Url::host_str).is_some_and(|host| {
host.ends_with("meilisearch.dev")
|| host.ends_with("meilisearch.com")
|| host.ends_with("meilisearch.io")
});
let has_api_key = api_key.is_some();
let index_patterns_count = indexes.as_ref().map_or(0, |indexes| indexes.len());
let patterns_with_filter_count = indexes.as_ref().map_or(0, |indexes| {
indexes.values().filter(|settings| settings.filter.is_some()).count()
});
let patterns_with_override_settings_count = indexes.as_ref().map_or(0, |indexes| {
indexes.values().filter(|settings| settings.override_settings).count()
});
let payload_sizes =
if let Some(crate::routes::export::ByteWithDeserr(byte_size)) = payload_size {
vec![byte_size.as_u64()]
} else {
vec![]
};
Self {
total_received: 1,
has_api_key,
sum_exports_meilisearch_cloud: is_meilisearch_cloud as usize,
sum_index_patterns: index_patterns_count,
sum_patterns_with_filter: patterns_with_filter_count,
sum_patterns_with_override_settings: patterns_with_override_settings_count,
payload_sizes,
}
}
}
impl Aggregate for ExportAnalytics {
fn event_name(&self) -> &'static str {
"Export Triggered"
}
fn aggregate(mut self: Box<Self>, other: Box<Self>) -> Box<Self> {
self.total_received += other.total_received;
self.has_api_key |= other.has_api_key;
self.sum_exports_meilisearch_cloud += other.sum_exports_meilisearch_cloud;
self.sum_index_patterns += other.sum_index_patterns;
self.sum_patterns_with_filter += other.sum_patterns_with_filter;
self.sum_patterns_with_override_settings += other.sum_patterns_with_override_settings;
self.payload_sizes.extend(other.payload_sizes);
self
}
fn into_event(self: Box<Self>) -> serde_json::Value {
let avg_payload_size = if self.payload_sizes.is_empty() {
None
} else {
Some(self.payload_sizes.iter().sum::<u64>() / self.payload_sizes.len() as u64)
};
let avg_exports_meilisearch_cloud = if self.total_received == 0 {
None
} else {
Some(self.sum_exports_meilisearch_cloud as f64 / self.total_received as f64)
};
let avg_index_patterns = if self.total_received == 0 {
None
} else {
Some(self.sum_index_patterns as f64 / self.total_received as f64)
};
let avg_patterns_with_filter = if self.total_received == 0 {
None
} else {
Some(self.sum_patterns_with_filter as f64 / self.total_received as f64)
};
let avg_patterns_with_override_settings = if self.total_received == 0 {
None
} else {
Some(self.sum_patterns_with_override_settings as f64 / self.total_received as f64)
};
serde_json::json!({
"total_received": self.total_received,
"has_api_key": self.has_api_key,
"avg_exports_meilisearch_cloud": avg_exports_meilisearch_cloud,
"avg_index_patterns": avg_index_patterns,
"avg_patterns_with_filter": avg_patterns_with_filter,
"avg_patterns_with_override_settings": avg_patterns_with_override_settings,
"avg_payload_size": avg_payload_size,
})
}
}

View file

@ -54,6 +54,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
get_task_documents_route: Some(false),
composite_embedders: Some(false),
chat_completions: Some(false),
multimodal: Some(false),
})),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
@ -100,6 +101,8 @@ pub struct RuntimeTogglableFeatures {
pub composite_embedders: Option<bool>,
#[deserr(default)]
pub chat_completions: Option<bool>,
#[deserr(default)]
pub multimodal: Option<bool>,
}
impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogglableFeatures {
@ -113,6 +116,7 @@ impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogg
get_task_documents_route,
composite_embedders,
chat_completions,
multimodal,
} = value;
Self {
@ -124,6 +128,7 @@ impl From<meilisearch_types::features::RuntimeTogglableFeatures> for RuntimeTogg
get_task_documents_route: Some(get_task_documents_route),
composite_embedders: Some(composite_embedders),
chat_completions: Some(chat_completions),
multimodal: Some(multimodal),
}
}
}
@ -138,6 +143,7 @@ pub struct PatchExperimentalFeatureAnalytics {
get_task_documents_route: bool,
composite_embedders: bool,
chat_completions: bool,
multimodal: bool,
}
impl Aggregate for PatchExperimentalFeatureAnalytics {
@ -155,6 +161,7 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
get_task_documents_route: new.get_task_documents_route,
composite_embedders: new.composite_embedders,
chat_completions: new.chat_completions,
multimodal: new.multimodal,
})
}
@ -181,6 +188,7 @@ impl Aggregate for PatchExperimentalFeatureAnalytics {
get_task_documents_route: Some(false),
composite_embedders: Some(false),
chat_completions: Some(false),
multimodal: Some(false),
})),
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
{
@ -223,6 +231,7 @@ async fn patch_features(
.composite_embedders
.unwrap_or(old_features.composite_embedders),
chat_completions: new_features.0.chat_completions.unwrap_or(old_features.chat_completions),
multimodal: new_features.0.multimodal.unwrap_or(old_features.multimodal),
};
// explicitly destructure for analytics rather than using the `Serialize` implementation, because
@ -237,6 +246,7 @@ async fn patch_features(
get_task_documents_route,
composite_embedders,
chat_completions,
multimodal,
} = new_features;
analytics.publish(
@ -249,6 +259,7 @@ async fn patch_features(
get_task_documents_route,
composite_embedders,
chat_completions,
multimodal,
},
&req,
);

View file

@ -1452,7 +1452,6 @@ fn some_documents<'a, 't: 'a>(
) -> Result<impl Iterator<Item = Result<Document, ResponseError>> + 'a, ResponseError> {
let fields_ids_map = index.fields_ids_map(rtxn)?;
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
let embedding_configs = index.embedding_configs(rtxn)?;
Ok(index.iter_documents(rtxn, doc_ids)?.map(move |ret| {
ret.map_err(ResponseError::from).and_then(|(key, document)| -> Result<_, ResponseError> {
@ -1468,15 +1467,9 @@ fn some_documents<'a, 't: 'a>(
Some(Value::Object(map)) => map,
_ => Default::default(),
};
for (name, vector) in index.embeddings(rtxn, key)? {
let user_provided = embedding_configs
.iter()
.find(|conf| conf.name == name)
.is_some_and(|conf| conf.user_provided.contains(key));
let embeddings = ExplicitVectors {
embeddings: Some(vector.into()),
regenerate: !user_provided,
};
for (name, (vector, regenerate)) in index.embeddings(rtxn, key)? {
let embeddings =
ExplicitVectors { embeddings: Some(vector.into()), regenerate };
vectors.insert(
name,
serde_json::to_value(embeddings).map_err(MeilisearchHttpError::from)?,

View file

@ -56,6 +56,8 @@ pub struct FacetSearchQuery {
pub q: Option<String>,
#[deserr(default, error = DeserrJsonError<InvalidSearchVector>)]
pub vector: Option<Vec<f32>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchMedia>)]
pub media: Option<Value>,
#[deserr(default, error = DeserrJsonError<InvalidSearchHybridQuery>)]
pub hybrid: Option<HybridQuery>,
#[deserr(default, error = DeserrJsonError<InvalidSearchFilter>)]
@ -94,6 +96,7 @@ impl FacetSearchAggregator {
facet_name,
vector,
q,
media,
filter,
matching_strategy,
attributes_to_search_on,
@ -108,6 +111,7 @@ impl FacetSearchAggregator {
facet_names: Some(facet_name.clone()).into_iter().collect(),
additional_search_parameters_provided: q.is_some()
|| vector.is_some()
|| media.is_some()
|| filter.is_some()
|| *matching_strategy != MatchingStrategy::default()
|| attributes_to_search_on.is_some()
@ -291,6 +295,7 @@ impl From<FacetSearchQuery> for SearchQuery {
facet_name: _,
q,
vector,
media,
filter,
matching_strategy,
attributes_to_search_on,
@ -312,6 +317,7 @@ impl From<FacetSearchQuery> for SearchQuery {
SearchQuery {
q,
media,
offset: DEFAULT_SEARCH_OFFSET(),
limit: DEFAULT_SEARCH_LIMIT(),
page,

View file

@ -205,6 +205,8 @@ impl TryFrom<SearchQueryGet> for SearchQuery {
Ok(Self {
q: other.q,
// `media` not supported for `GET`
media: None,
vector: other.vector.map(CS::into_inner),
offset: other.offset.0,
limit: other.limit.0,
@ -481,28 +483,30 @@ pub fn search_kind(
index_uid: String,
index: &milli::Index,
) -> Result<SearchKind, ResponseError> {
let is_placeholder_query =
if let Some(q) = query.q.as_deref() { q.trim().is_empty() } else { true };
let non_placeholder_query = !is_placeholder_query;
let is_media = query.media.is_some();
// handle with care, the order of cases matters, the semantics is subtle
match (query.q.as_deref(), &query.hybrid, query.vector.as_deref()) {
// empty query, no vector => placeholder search
(Some(q), _, None) if q.trim().is_empty() => Ok(SearchKind::KeywordOnly),
// no query, no vector => placeholder search
(None, _, None) => Ok(SearchKind::KeywordOnly),
// hybrid.semantic_ratio == 1.0 => vector
(_, Some(HybridQuery { semantic_ratio, embedder }), v) if **semantic_ratio == 1.0 => {
SearchKind::semantic(index_scheduler, index_uid, index, embedder, v.map(|v| v.len()))
}
// hybrid.semantic_ratio == 0.0 => keyword
(_, Some(HybridQuery { semantic_ratio, embedder: _ }), _) if **semantic_ratio == 0.0 => {
match (is_media, non_placeholder_query, &query.hybrid, query.vector.as_deref()) {
// media + vector => error
(true, _, _, Some(_)) => Err(MeilisearchHttpError::MediaAndVector.into()),
// media + !hybrid => error
(true, _, None, _) => Err(MeilisearchHttpError::MissingSearchHybrid.into()),
// vector + !hybrid => error
(_, _, None, Some(_)) => Err(MeilisearchHttpError::MissingSearchHybrid.into()),
// hybrid S0 => keyword
(_, _, Some(HybridQuery { semantic_ratio, embedder: _ }), _) if **semantic_ratio == 0.0 => {
Ok(SearchKind::KeywordOnly)
}
// no query, hybrid, vector => semantic
(None, Some(HybridQuery { semantic_ratio: _, embedder }), Some(v)) => {
SearchKind::semantic(index_scheduler, index_uid, index, embedder, Some(v.len()))
// !q + !vector => placeholder search
(false, false, _, None) => Ok(SearchKind::KeywordOnly),
// hybrid S100 => semantic
(_, _, Some(HybridQuery { semantic_ratio, embedder }), v) if **semantic_ratio == 1.0 => {
SearchKind::semantic(index_scheduler, index_uid, index, embedder, v.map(|v| v.len()))
}
// query, no hybrid, no vector => keyword
(Some(_), None, None) => Ok(SearchKind::KeywordOnly),
// query, hybrid, maybe vector => hybrid
(Some(_), Some(HybridQuery { semantic_ratio, embedder }), v) => SearchKind::hybrid(
// q + hybrid => hybrid
(_, true, Some(HybridQuery { semantic_ratio, embedder }), v) => SearchKind::hybrid(
index_scheduler,
index_uid,
index,
@ -510,7 +514,11 @@ pub fn search_kind(
**semantic_ratio,
v.map(|v| v.len()),
),
(_, None, Some(_)) => Err(MeilisearchHttpError::MissingSearchHybrid.into()),
// !q + hybrid => semantic
(_, false, Some(HybridQuery { semantic_ratio: _, embedder }), v) => {
SearchKind::semantic(index_scheduler, index_uid, index, embedder, v.map(|v| v.len()))
}
// q => keyword
(false, true, None, None) => Ok(SearchKind::KeywordOnly),
}
}

View file

@ -61,6 +61,8 @@ pub struct SearchAggregator<Method: AggregateMethod> {
semantic_ratio: bool,
hybrid: bool,
retrieve_vectors: bool,
// Number of requests containing `media`
total_media: usize,
// every time a search is done, we increment the counter linked to the used settings
matching_strategy: HashMap<String, usize>,
@ -101,6 +103,7 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
let SearchQuery {
q,
vector,
media,
offset,
limit,
page,
@ -175,6 +178,11 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
if let Some(ref vector) = vector {
ret.max_vector_size = vector.len();
}
if media.is_some() {
ret.total_media = 1;
}
ret.retrieve_vectors |= retrieve_vectors;
if query.is_finite_pagination() {
@ -277,6 +285,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
show_ranking_score_details,
semantic_ratio,
hybrid,
total_media,
total_degraded,
total_used_negative_operator,
ranking_score_threshold,
@ -327,6 +336,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
self.retrieve_vectors |= retrieve_vectors;
self.semantic_ratio |= semantic_ratio;
self.hybrid |= hybrid;
self.total_media += total_media;
// pagination
self.max_limit = self.max_limit.max(max_limit);
@ -403,6 +413,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
show_ranking_score_details,
semantic_ratio,
hybrid,
total_media,
total_degraded,
total_used_negative_operator,
ranking_score_threshold,
@ -450,6 +461,7 @@ impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
"hybrid": {
"enabled": hybrid,
"semantic_ratio": semantic_ratio,
"total_media": total_media,
},
"pagination": {
"max_limit": max_limit,

View file

@ -755,6 +755,14 @@ fn validate_settings(
if matches!(embedder.indexing_embedder, Setting::Set(_)) {
features.check_composite_embedders("setting `indexingEmbedder`")?;
}
if matches!(embedder.indexing_fragments, Setting::Set(_)) {
features.check_multimodal("setting `indexingFragments`")?;
}
if matches!(embedder.search_fragments, Setting::Set(_)) {
features.check_multimodal("setting `searchFragments`")?;
}
}
}

View file

@ -2,6 +2,7 @@ use std::collections::BTreeMap;
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use export::Export;
use index_scheduler::IndexScheduler;
use meilisearch_auth::AuthController;
use meilisearch_types::batch_view::BatchView;
@ -54,6 +55,8 @@ mod api_key;
pub mod batches;
pub mod chats;
mod dump;
mod export;
mod export_analytics;
pub mod features;
pub mod indexes;
mod logs;
@ -84,6 +87,7 @@ mod tasks_test;
(path = "/multi-search", api = multi_search::MultiSearchApi),
(path = "/swap-indexes", api = swap_indexes::SwapIndexesApi),
(path = "/experimental-features", api = features::ExperimentalFeaturesApi),
(path = "/export", api = export::ExportApi),
(path = "/network", api = network::NetworkApi),
),
paths(get_health, get_version, get_stats),
@ -95,7 +99,7 @@ mod tasks_test;
url = "/",
description = "Local server",
)),
components(schemas(PaginationView<KeyView>, PaginationView<IndexView>, IndexView, DocumentDeletionByFilter, AllBatches, BatchStats, ProgressStepView, ProgressView, BatchView, RuntimeTogglableFeatures, SwapIndexesPayload, DocumentEditionByFunction, MergeFacets, FederationOptions, SearchQueryWithIndex, Federation, FederatedSearch, FederatedSearchResult, SearchResults, SearchResultWithIndex, SimilarQuery, SimilarResult, PaginationView<serde_json::Value>, BrowseQuery, UpdateIndexRequest, IndexUid, IndexCreateRequest, KeyView, Action, CreateApiKey, UpdateStderrLogs, LogMode, GetLogs, IndexStats, Stats, HealthStatus, HealthResponse, VersionResponse, Code, ErrorType, AllTasks, TaskView, Status, DetailsView, ResponseError, Settings<Unchecked>, Settings<Checked>, TypoSettings, MinWordSizeTyposSetting, FacetingSettings, PaginationSettings, SummarizedTaskView, Kind, Network, Remote, FilterableAttributesRule, FilterableAttributesPatterns, AttributePatterns, FilterableAttributesFeatures, FilterFeatures))
components(schemas(PaginationView<KeyView>, PaginationView<IndexView>, IndexView, DocumentDeletionByFilter, AllBatches, BatchStats, ProgressStepView, ProgressView, BatchView, RuntimeTogglableFeatures, SwapIndexesPayload, DocumentEditionByFunction, MergeFacets, FederationOptions, SearchQueryWithIndex, Federation, FederatedSearch, FederatedSearchResult, SearchResults, SearchResultWithIndex, SimilarQuery, SimilarResult, PaginationView<serde_json::Value>, BrowseQuery, UpdateIndexRequest, IndexUid, IndexCreateRequest, KeyView, Action, CreateApiKey, UpdateStderrLogs, LogMode, GetLogs, IndexStats, Stats, HealthStatus, HealthResponse, VersionResponse, Code, ErrorType, AllTasks, TaskView, Status, DetailsView, ResponseError, Settings<Unchecked>, Settings<Checked>, TypoSettings, MinWordSizeTyposSetting, FacetingSettings, PaginationSettings, SummarizedTaskView, Kind, Network, Remote, FilterableAttributesRule, FilterableAttributesPatterns, AttributePatterns, FilterableAttributesFeatures, FilterFeatures, Export))
)]
pub struct MeilisearchApi;
@ -115,6 +119,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(web::scope("/metrics").configure(metrics::configure))
.service(web::scope("/experimental-features").configure(features::configure))
.service(web::scope("/network").configure(network::configure))
.service(web::scope("/export").configure(export::configure))
.service(web::scope("/chats").configure(chats::configure));
#[cfg(feature = "swagger")]

View file

@ -42,6 +42,7 @@ impl MultiSearchAggregator {
federation_options,
q: _,
vector: _,
media: _,
offset: _,
limit: _,
page: _,

View file

@ -228,7 +228,7 @@ mod tests {
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r#"
{
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"

View file

@ -64,6 +64,8 @@ pub struct SearchQuery {
pub q: Option<String>,
#[deserr(default, error = DeserrJsonError<InvalidSearchVector>)]
pub vector: Option<Vec<f32>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchMedia>)]
pub media: Option<serde_json::Value>,
#[deserr(default, error = DeserrJsonError<InvalidSearchHybridQuery>)]
pub hybrid: Option<HybridQuery>,
#[deserr(default = DEFAULT_SEARCH_OFFSET(), error = DeserrJsonError<InvalidSearchOffset>)]
@ -147,6 +149,7 @@ impl From<SearchParameters> for SearchQuery {
ranking_score_threshold: ranking_score_threshold.map(RankingScoreThreshold::from),
q: None,
vector: None,
media: None,
offset: DEFAULT_SEARCH_OFFSET(),
page: None,
hits_per_page: None,
@ -220,6 +223,7 @@ impl fmt::Debug for SearchQuery {
let Self {
q,
vector,
media,
hybrid,
offset,
limit,
@ -274,6 +278,9 @@ impl fmt::Debug for SearchQuery {
);
}
}
if let Some(media) = media {
debug.field("media", media);
}
if let Some(hybrid) = hybrid {
debug.field("hybrid", &hybrid);
}
@ -399,10 +406,10 @@ impl SearchKind {
route: Route,
) -> Result<(String, Arc<Embedder>, bool), ResponseError> {
let rtxn = index.read_txn()?;
let embedder_configs = index.embedding_configs(&rtxn)?;
let embedder_configs = index.embedding_configs().embedding_configs(&rtxn)?;
let embedders = index_scheduler.embedders(index_uid, embedder_configs)?;
let (embedder, _, quantized) = embedders
let (embedder, quantized) = embedders
.get(embedder_name)
.ok_or(match route {
Route::Search | Route::MultiSearch => {
@ -412,6 +419,7 @@ impl SearchKind {
milli::UserError::InvalidSimilarEmbedder(embedder_name.to_owned())
}
})
.map(|runtime| (runtime.embedder.clone(), runtime.is_quantized))
.map_err(milli::Error::from)?;
if let Some(vector_len) = vector_len {
@ -481,8 +489,10 @@ pub struct SearchQueryWithIndex {
pub index_uid: IndexUid,
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
pub q: Option<String>,
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
#[deserr(default, error = DeserrJsonError<InvalidSearchVector>)]
pub vector: Option<Vec<f32>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchMedia>)]
pub media: Option<serde_json::Value>,
#[deserr(default, error = DeserrJsonError<InvalidSearchHybridQuery>)]
pub hybrid: Option<HybridQuery>,
#[deserr(default, error = DeserrJsonError<InvalidSearchOffset>)]
@ -563,6 +573,7 @@ impl SearchQueryWithIndex {
let SearchQuery {
q,
vector,
media,
hybrid,
offset,
limit,
@ -593,6 +604,7 @@ impl SearchQueryWithIndex {
index_uid,
q,
vector,
media,
hybrid,
offset: if offset == DEFAULT_SEARCH_OFFSET() { None } else { Some(offset) },
limit: if limit == DEFAULT_SEARCH_LIMIT() { None } else { Some(limit) },
@ -627,6 +639,7 @@ impl SearchQueryWithIndex {
federation_options,
q,
vector,
media,
offset,
limit,
page,
@ -657,6 +670,7 @@ impl SearchQueryWithIndex {
SearchQuery {
q,
vector,
media,
offset: offset.unwrap_or(DEFAULT_SEARCH_OFFSET()),
limit: limit.unwrap_or(DEFAULT_SEARCH_LIMIT()),
page,
@ -958,6 +972,9 @@ pub fn prepare_search<'t>(
time_budget: TimeBudget,
features: RoFeatures,
) -> Result<(milli::Search<'t>, bool, usize, usize), ResponseError> {
if query.media.is_some() {
features.check_multimodal("passing `media` in a search query")?;
}
let mut search = index.search(rtxn);
search.time_budget(time_budget);
if let Some(ranking_score_threshold) = query.ranking_score_threshold {
@ -983,14 +1000,27 @@ pub fn prepare_search<'t>(
let deadline = std::time::Instant::now() + std::time::Duration::from_secs(10);
let q = query.q.as_deref();
let media = query.media.as_ref();
let search_query = match (q, media) {
(Some(text), None) => milli::vector::SearchQuery::Text(text),
(q, media) => milli::vector::SearchQuery::Media { q, media },
};
embedder
.embed_search(query.q.as_ref().unwrap(), Some(deadline))
.embed_search(search_query, Some(deadline))
.map_err(milli::vector::Error::from)
.map_err(milli::Error::from)?
}
};
search.semantic(embedder_name.clone(), embedder.clone(), *quantized, Some(vector));
search.semantic(
embedder_name.clone(),
embedder.clone(),
*quantized,
Some(vector),
query.media.clone(),
);
}
SearchKind::Hybrid { embedder_name, embedder, quantized, semantic_ratio: _ } => {
if let Some(q) = &query.q {
@ -1002,6 +1032,7 @@ pub fn prepare_search<'t>(
embedder.clone(),
*quantized,
query.vector.clone(),
query.media.clone(),
);
}
}
@ -1126,6 +1157,7 @@ pub fn perform_search(
locales,
// already used in prepare_search
vector: _,
media: _,
hybrid: _,
offset: _,
ranking_score_threshold: _,
@ -1328,7 +1360,6 @@ struct HitMaker<'a> {
vectors_fid: Option<FieldId>,
retrieve_vectors: RetrieveVectors,
to_retrieve_ids: BTreeSet<FieldId>,
embedding_configs: Vec<index::IndexEmbeddingConfig>,
formatter_builder: MatcherBuilder<'a>,
formatted_options: BTreeMap<FieldId, FormatOptions>,
show_ranking_score: bool,
@ -1443,8 +1474,6 @@ impl<'a> HitMaker<'a> {
&displayed_ids,
);
let embedding_configs = index.embedding_configs(rtxn)?;
Ok(Self {
index,
rtxn,
@ -1453,7 +1482,6 @@ impl<'a> HitMaker<'a> {
vectors_fid,
retrieve_vectors,
to_retrieve_ids,
embedding_configs,
formatter_builder,
formatted_options,
show_ranking_score: format.show_ranking_score,
@ -1499,14 +1527,8 @@ impl<'a> HitMaker<'a> {
Some(Value::Object(map)) => map,
_ => Default::default(),
};
for (name, vector) in self.index.embeddings(self.rtxn, id)? {
let user_provided = self
.embedding_configs
.iter()
.find(|conf| conf.name == name)
.is_some_and(|conf| conf.user_provided.contains(id));
let embeddings =
ExplicitVectors { embeddings: Some(vector.into()), regenerate: !user_provided };
for (name, (vector, regenerate)) in self.index.embeddings(self.rtxn, id)? {
let embeddings = ExplicitVectors { embeddings: Some(vector.into()), regenerate };
vectors.insert(
name,
serde_json::to_value(embeddings).map_err(InternalError::SerdeJson)?,

View file

@ -93,7 +93,7 @@ async fn create_api_key_bad_actions() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `*.get`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`",
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `export`, `network.get`, `network.update`, `chatCompletions`, `chats.*`, `chats.get`, `chats.delete`, `chatsSettings.*`, `chatsSettings.get`, `chatsSettings.update`",
"code": "invalid_api_key_actions",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"

View file

@ -42,7 +42,7 @@ async fn batch_bad_types() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r#"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `export`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"

View file

@ -465,6 +465,7 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
skip_index_budget: true,
// Having 2 threads makes the tests way faster
max_indexing_threads: MaxThreads::from_str("2").unwrap(),
experimental_no_edition_2024_for_settings: false,
},
experimental_enable_metrics: false,
..Parser::parse_from(None as Option<&str>)

View file

@ -293,7 +293,7 @@ async fn add_csv_document() {
"enqueuedAt": "[date]"
}
"#);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -358,7 +358,7 @@ async fn add_csv_document_with_types() {
"enqueuedAt": "[date]"
}
"#);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -434,7 +434,7 @@ async fn add_csv_document_with_custom_delimiter() {
"enqueuedAt": "[date]"
}
"#);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -991,7 +991,7 @@ async fn add_documents_no_index_creation() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@ -1068,7 +1068,7 @@ async fn document_addition_with_primary_key() {
}
"#);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
snapshot!(code, @"200 OK");
@ -1120,7 +1120,7 @@ async fn document_addition_with_huge_int_primary_key() {
let (response, code) = index.add_documents(documents, Some("primary")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(response,
@r###"
{
@ -1178,7 +1178,7 @@ async fn replace_document() {
}
"#);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@ -1190,7 +1190,7 @@ async fn replace_document() {
let (task, code) = index.add_documents(documents, None).await;
snapshot!(code,@"202 Accepted");
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
@ -1362,7 +1362,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1399,7 +1399,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (value, _code) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.failed();
server.wait_task(value.uid()).await.failed();
let (response, code) = index.get_task(value.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1436,7 +1436,7 @@ async fn error_add_documents_bad_document_id() {
}
]);
let (value, _code) = index.add_documents(documents, None).await;
index.wait_task(value.uid()).await.failed();
server.wait_task(value.uid()).await.failed();
let (response, code) = index.get_task(value.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1478,7 +1478,7 @@ async fn error_add_documents_missing_document_id() {
}
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -1527,7 +1527,7 @@ async fn error_document_field_limit_reached_in_one_document() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(code, @"202 Accepted");
// Documents without a primary key are not accepted.
snapshot!(response,
@ -1576,7 +1576,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@ -1611,7 +1611,7 @@ async fn error_document_field_limit_reached_over_multiple_documents() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@ -1660,7 +1660,7 @@ async fn error_document_field_limit_reached_in_one_nested_document() {
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
// Documents without a primary key are not accepted.
snapshot!(response,
@ -1705,7 +1705,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@ -1741,7 +1741,7 @@ async fn error_document_field_limit_reached_over_multiple_documents_with_nested_
let (response, code) = index.update_documents(documents, Some("id")).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(code, @"202 Accepted");
snapshot!(response,
@r###"
@ -1790,7 +1790,7 @@ async fn add_documents_with_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
let response = index.wait_task(task.uid()).await.succeeded();
let response = server.wait_task(task.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r#"
{
@ -1914,7 +1914,7 @@ async fn update_documents_with_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
let response = index.wait_task(task.uid()).await.succeeded();
let response = server.wait_task(task.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r#"
{
@ -1983,7 +1983,7 @@ async fn update_documents_with_geo_field() {
}
]);
let (task, _status_code) = index.update_documents(updated_documents, None).await;
let response = index.wait_task(task.uid()).await.succeeded();
let response = server.wait_task(task.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2097,7 +2097,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".indexUid" => "[uuid]" }),
@ -2135,7 +2135,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2173,7 +2173,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2211,7 +2211,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2249,7 +2249,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2287,7 +2287,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2325,7 +2325,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2363,7 +2363,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2401,7 +2401,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2439,7 +2439,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2477,7 +2477,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2515,7 +2515,7 @@ async fn add_documents_invalid_geo_field() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@ -2556,7 +2556,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2593,7 +2593,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2630,7 +2630,7 @@ async fn add_documents_invalid_geo_field() {
let (response, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let response = index.wait_task(response.uid()).await.failed();
let response = server.wait_task(response.uid()).await.failed();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
@r###"
{
@ -2674,7 +2674,7 @@ async fn add_invalid_geo_and_then_settings() {
]);
let (ret, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let ret = index.wait_task(ret.uid()).await.succeeded();
let ret = server.wait_task(ret.uid()).await.succeeded();
snapshot!(ret, @r###"
{
"uid": "[uid]",
@ -2697,7 +2697,7 @@ async fn add_invalid_geo_and_then_settings() {
let (ret, code) = index.update_settings(json!({ "sortableAttributes": ["_geo"] })).await;
snapshot!(code, @"202 Accepted");
let ret = index.wait_task(ret.uid()).await.failed();
let ret = server.wait_task(ret.uid()).await.failed();
snapshot!(ret, @r###"
{
"uid": "[uid]",
@ -2765,7 +2765,7 @@ async fn error_primary_key_inference() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
@ -2806,7 +2806,7 @@ async fn error_primary_key_inference() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
@ -2845,7 +2845,7 @@ async fn error_primary_key_inference() {
]);
let (task, _status_code) = index.add_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
@ -2884,12 +2884,12 @@ async fn add_documents_with_primary_key_twice() {
]);
let (task, _status_code) = index.add_documents(documents.clone(), Some("title")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _code) = index.get_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
let (task, _status_code) = index.add_documents(documents, Some("title")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, _code) = index.get_task(task.uid()).await;
assert_eq!(response["status"], "succeeded");
}
@ -2922,7 +2922,7 @@ async fn batch_several_documents_addition() {
// wait first batch of documents to finish
let finished_tasks = futures::future::join_all(waiter).await;
for (task, _code) in finished_tasks {
index.wait_task(task.uid()).await;
server.wait_task(task.uid()).await;
}
// run a second completely failing batch
@ -2936,7 +2936,7 @@ async fn batch_several_documents_addition() {
// wait second batch of documents to finish
let finished_tasks = futures::future::join_all(waiter).await;
for (task, _code) in finished_tasks {
index.wait_task(task.uid()).await;
server.wait_task(task.uid()).await;
}
let (response, _code) = index.filtered_tasks(&[], &["failed"], &[]).await;

View file

@ -5,11 +5,12 @@ use crate::json;
#[actix_rt::test]
async fn delete_one_document_unexisting_index() {
let server = Server::new_shared();
let index = shared_does_not_exists_index().await;
let (task, code) = index.delete_document_by_filter_fail(json!({"filter": "a = b"})).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
}
#[actix_rt::test]
@ -19,7 +20,7 @@ async fn delete_one_unexisting_document() {
index.create(None).await;
let (response, code) = index.delete_document(0).await;
assert_eq!(code, 202, "{response}");
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
}
#[actix_rt::test]
@ -28,10 +29,10 @@ async fn delete_one_document() {
let index = server.unique_index();
let (task, _status_code) =
index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, status_code) = index.delete_document(0).await;
assert_eq!(status_code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (_response, code) = index.get_document(0, None).await;
assert_eq!(code, 404);
@ -44,7 +45,7 @@ async fn clear_all_documents_unexisting_index() {
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
}
#[actix_rt::test]
@ -57,11 +58,11 @@ async fn clear_all_documents() {
None,
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await.succeeded();
let _update = server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
@ -72,11 +73,11 @@ async fn clear_all_documents_empty_index() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _status_code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.clear_all_documents().await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await.succeeded();
let _update = server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert!(response["results"].as_array().unwrap().is_empty());
@ -95,7 +96,7 @@ async fn error_delete_batch_unexisting_index() {
});
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await.failed();
let response = server.wait_task(task.uid()).await.failed();
assert_eq!(response["error"], expected_response);
}
@ -104,11 +105,11 @@ async fn delete_batch() {
let server = Server::new_shared();
let index = server.unique_index();
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, code) = index.delete_batch(vec![1, 0]).await;
assert_eq!(code, 202);
let _update = index.wait_task(task.uid()).await.succeeded();
let _update = server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
@ -120,11 +121,11 @@ async fn delete_no_document_batch() {
let server = Server::new_shared();
let index = server.unique_index();
let (task,_status_code) = index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }, { "id": 3, "content": "foobar" }]), Some("id")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.delete_batch(vec![]).await;
assert_eq!(code, 202, "{response}");
let _update = index.wait_task(response.uid()).await.succeeded();
let _update = server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 3);
@ -146,7 +147,7 @@ async fn delete_document_by_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (stats, _) = index.stats().await;
snapshot!(json_string!(stats, {
@ -180,7 +181,7 @@ async fn delete_document_by_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -253,7 +254,7 @@ async fn delete_document_by_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -328,7 +329,7 @@ async fn delete_document_by_complex_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.delete_document_by_filter(
json!({ "filter": ["color != red", "color != green", "color EXISTS"] }),
@ -345,7 +346,7 @@ async fn delete_document_by_complex_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",
@ -404,7 +405,7 @@ async fn delete_document_by_complex_filter() {
}
"###);
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
snapshot!(json_string!(response, { ".uid" => "[uid]", ".batchUid" => "[batch_uid]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" }), @r###"
{
"uid": "[uid]",

View file

@ -23,7 +23,7 @@ async fn error_get_unexisting_document() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document(1, None).await;
@ -43,7 +43,7 @@ async fn get_document() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let documents = json!([
{
"id": 0,
@ -52,7 +52,7 @@ async fn get_document() {
]);
let (task, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document(0, None).await;
assert_eq!(code, 200);
assert_eq!(
@ -276,7 +276,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
let server = Server::new_shared();
let index = server.unique_index();
let (task, _code) = index.create(None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let documents = json!([
{
@ -293,7 +293,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
]);
let (task, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_document(0, Some(json!({ "fields": ["content"] }))).await;
assert_eq!(code, 200);
@ -369,7 +369,7 @@ async fn get_document_by_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.fetch_documents(json!({})).await;
let (response2, code2) = index.get_all_documents_raw("").await;
@ -525,7 +525,7 @@ async fn get_document_by_ids() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index
.fetch_documents(json!({
@ -651,7 +651,7 @@ async fn get_document_invalid_ids() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.fetch_documents(json!({"ids": ["0", "illegal/docid"] })).await;
let (response2, code2) = index.get_all_documents_raw("?ids=0,illegal/docid").await;
@ -683,7 +683,7 @@ async fn get_document_not_found_ids() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.fetch_documents(json!({"ids": ["0", 3, 42] })).await;
let (response2, code2) = index.get_all_documents_raw("?ids=0,3,42").await;
@ -726,7 +726,7 @@ async fn get_document_by_ids_and_filter() {
Some("id"),
)
.await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) =
index.fetch_documents(json!({"ids": [2], "filter": "color = blue" })).await;
@ -854,7 +854,7 @@ async fn get_document_with_vectors() {
]);
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
server.wait_task(value.uid()).await.succeeded();
// by default you shouldn't see the `_vectors` object
let (documents, _code) = index.get_all_documents(Default::default()).await;

View file

@ -34,7 +34,7 @@ async fn document_update_with_primary_key() {
let (response, code) = index.update_documents(documents, Some("primary")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
assert_eq!(code, 200);
@ -63,7 +63,7 @@ async fn update_document() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@ -75,7 +75,7 @@ async fn update_document() {
let (response, code) = index.update_documents(documents, None).await;
assert_eq!(code, 202, "response: {}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
assert_eq!(code, 200);
@ -107,7 +107,7 @@ async fn update_document_gzip_encoded() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@ -119,7 +119,7 @@ async fn update_document_gzip_encoded() {
let (response, code) = index.update_documents(documents, None).await;
assert_eq!(code, 202, "response: {}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get_task(response.uid()).await;
assert_eq!(code, 200);
@ -142,7 +142,7 @@ async fn update_larger_dataset() {
let index = server.unique_index();
let documents = serde_json::from_str(include_str!("../assets/test_set.json")).unwrap();
let (task, _code) = index.update_documents(documents, None).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get_task(task.uid()).await;
assert_eq!(code, 200);
assert_eq!(response["type"], "documentAdditionOrUpdate");
@ -166,7 +166,7 @@ async fn error_update_documents_bad_document_id() {
}
]);
let (task, _code) = index.update_documents(documents, None).await;
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
assert_eq!(response["status"], json!("failed"));
assert_eq!(
response["error"]["message"],
@ -194,7 +194,7 @@ async fn error_update_documents_missing_document_id() {
}
]);
let (task, _code) = index.update_documents(documents, None).await;
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
assert_eq!(
response["error"]["message"],
@ -219,7 +219,7 @@ async fn update_faceted_document() {
}))
.await;
assert_eq!("202", code.as_str(), "{:?}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents: Vec<_> = (0..1000)
.map(|id| {
@ -233,7 +233,7 @@ async fn update_faceted_document() {
let (response, code) = index.add_documents(documents.into(), None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{
@ -245,7 +245,7 @@ async fn update_faceted_document() {
let (response, code) = index.update_documents(documents, None).await;
assert_eq!(code, 202, "response: {}", response);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
index
.search(json!({"limit": 10}), |response, code| {

View file

@ -2188,7 +2188,8 @@ async fn import_dump_v6_containing_experimental_features() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
@ -2314,7 +2315,8 @@ async fn import_dump_v6_containing_batches_and_enqueued_tasks() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
@ -2420,7 +2422,8 @@ async fn generate_and_import_dump_containing_vectors() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);

View file

@ -25,7 +25,8 @@ async fn experimental_features() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
@ -41,7 +42,8 @@ async fn experimental_features() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
@ -57,7 +59,8 @@ async fn experimental_features() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
@ -74,7 +77,8 @@ async fn experimental_features() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
@ -91,7 +95,8 @@ async fn experimental_features() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
}
@ -115,7 +120,8 @@ async fn experimental_feature_metrics() {
"network": false,
"getTaskDocumentsRoute": false,
"compositeEmbedders": false,
"chatCompletions": false
"chatCompletions": false,
"multimodal": false
}
"###);
@ -162,7 +168,7 @@ async fn errors() {
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{
"message": "Unknown field `NotAFeature`: expected one of `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`, `network`, `getTaskDocumentsRoute`, `compositeEmbedders`, `chatCompletions`",
"message": "Unknown field `NotAFeature`: expected one of `metrics`, `logsRoute`, `editDocumentsByFunction`, `containsFilter`, `network`, `getTaskDocumentsRoute`, `compositeEmbedders`, `chatCompletions`, `multimodal`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"

View file

@ -17,7 +17,7 @@ async fn create_index_no_primary_key() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -34,7 +34,7 @@ async fn create_index_with_gzip_encoded_request() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -83,7 +83,7 @@ async fn create_index_with_zlib_encoded_request() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -100,7 +100,7 @@ async fn create_index_with_brotli_encoded_request() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await;
let response = server.wait_task(response.uid()).await;
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -117,7 +117,7 @@ async fn create_index_with_primary_key() {
assert_eq!(response["status"], "enqueued");
let response = index.wait_task(response.uid()).await.succeeded();
let response = server.wait_task(response.uid()).await.succeeded();
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "indexCreation");
@ -132,7 +132,7 @@ async fn create_index_with_invalid_primary_key() {
let index = server.unique_index();
let (response, code) = index.add_documents(documents, Some("title")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.failed();
server.wait_task(response.uid()).await.failed();
let (response, code) = index.get().await;
assert_eq!(code, 200);
@ -142,7 +142,7 @@ async fn create_index_with_invalid_primary_key() {
let (response, code) = index.add_documents(documents, Some("id")).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.failed();
server.wait_task(response.uid()).await.failed();
let (response, code) = index.get().await;
assert_eq!(code, 200);
@ -181,7 +181,7 @@ async fn error_create_existing_index() {
let (task, _) = index.create(Some("primary")).await;
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
let msg = format!(
"Index `{}` already exists.",
task["indexUid"].as_str().expect("indexUid should exist").trim_matches('"')

View file

@ -9,7 +9,7 @@ async fn create_and_delete_index() {
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
assert_eq!(index.get().await.1, 200);
@ -17,18 +17,19 @@ async fn create_and_delete_index() {
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
assert_eq!(index.get().await.1, 404);
}
#[actix_rt::test]
async fn error_delete_unexisting_index() {
let server = Server::new_shared();
let index = shared_does_not_exists_index().await;
let (task, code) = index.delete_index_fail().await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.failed();
server.wait_task(task.uid()).await.failed();
let expected_response = json!({
"message": "Index `DOES_NOT_EXISTS` not found.",
@ -37,7 +38,7 @@ async fn error_delete_unexisting_index() {
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
let response = index.wait_task(task.uid()).await;
let response = server.wait_task(task.uid()).await;
assert_eq!(response["status"], "failed");
assert_eq!(response["error"], expected_response);
}
@ -58,7 +59,7 @@ async fn loop_delete_add_documents() {
}
for task in tasks {
let response = index.wait_task(task).await.succeeded();
let response = server.wait_task(task).await.succeeded();
assert_eq!(response["status"], "succeeded", "{}", response);
}
}

View file

@ -12,7 +12,7 @@ async fn create_and_get_index() {
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.get().await;

View file

@ -9,7 +9,7 @@ async fn stats() {
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.stats().await;
@ -32,7 +32,7 @@ async fn stats() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(response.uid()).await.succeeded();
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index.stats().await;

View file

@ -12,10 +12,10 @@ async fn update_primary_key() {
let (task, code) = index.create(None).await;
assert_eq!(code, 202);
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (task, _status_code) = index.update(Some("primary")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
let (response, code) = index.get().await;
@ -42,12 +42,12 @@ async fn create_and_update_with_different_encoding() {
let (create_task, code) = index.create(None).await;
assert_eq!(code, 202);
index.wait_task(create_task.uid()).await.succeeded();
server.wait_task(create_task.uid()).await.succeeded();
let index = index.with_encoder(Encoder::Brotli);
let (task, _status_code) = index.update(Some("primary")).await;
index.wait_task(task.uid()).await.succeeded();
server.wait_task(task.uid()).await.succeeded();
}
#[actix_rt::test]
@ -58,23 +58,24 @@ async fn update_nothing() {
assert_eq!(code, 202);
index.wait_task(task1.uid()).await.succeeded();
server.wait_task(task1.uid()).await.succeeded();
let (task2, code) = index.update(None).await;
assert_eq!(code, 202);
index.wait_task(task2.uid()).await.succeeded();
server.wait_task(task2.uid()).await.succeeded();
}
#[actix_rt::test]
async fn error_update_existing_primary_key() {
let server = Server::new_shared();
let index = shared_index_with_documents().await;
let (update_task, code) = index.update_index_fail(Some("primary")).await;
assert_eq!(code, 202);
let response = index.wait_task(update_task.uid()).await.failed();
let response = server.wait_task(update_task.uid()).await.failed();
let expected_response = json!({
"message": format!("Index `{}`: Index already has a primary key: `id`.", index.uid),
@ -88,12 +89,13 @@ async fn error_update_existing_primary_key() {
#[actix_rt::test]
async fn error_update_unexisting_index() {
let server = Server::new_shared();
let index = shared_does_not_exists_index().await;
let (task, code) = index.update_index_fail(Some("my-primary-key")).await;
assert_eq!(code, 202);
let response = index.wait_task(task.uid()).await.failed();
let response = server.wait_task(task.uid()).await.failed();
let expected_response = json!({
"message": format!("Index `{}` not found.", index.uid),

Some files were not shown because too many files have changed in this diff Show more