mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-06-18 20:57:35 +02:00
Merge pull request #5674 from meilisearch/release-v1.15.2
Bring back v1.15.2 to main
This commit is contained in:
commit
abb399b802
34
Cargo.lock
generated
34
Cargo.lock
generated
@ -580,7 +580,7 @@ source = "git+https://github.com/meilisearch/bbqueue#cbb87cc707b5af415ef203bdaf2
|
||||
|
||||
[[package]]
|
||||
name = "benchmarks"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bumpalo",
|
||||
@ -791,7 +791,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "build-info"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"time",
|
||||
@ -1787,7 +1787,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "dump"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"big_s",
|
||||
@ -2009,7 +2009,7 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
|
||||
|
||||
[[package]]
|
||||
name = "file-store"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"tempfile",
|
||||
"thiserror 2.0.12",
|
||||
@ -2031,7 +2031,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "filter-parser"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"insta",
|
||||
"nom",
|
||||
@ -2051,7 +2051,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "flatten-serde-json"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"criterion",
|
||||
"serde_json",
|
||||
@ -2196,7 +2196,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "fuzzers"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"arbitrary",
|
||||
"bumpalo",
|
||||
@ -2996,7 +2996,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "index-scheduler"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"big_s",
|
||||
@ -3231,7 +3231,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "json-depth-checker"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"criterion",
|
||||
"serde_json",
|
||||
@ -3717,7 +3717,7 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
|
||||
|
||||
[[package]]
|
||||
name = "meili-snap"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"insta",
|
||||
"md5",
|
||||
@ -3728,7 +3728,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"actix-cors",
|
||||
"actix-http",
|
||||
@ -3824,7 +3824,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-auth"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"enum-iterator",
|
||||
@ -3843,7 +3843,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-types"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"actix-web",
|
||||
"anyhow",
|
||||
@ -3877,7 +3877,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilitool"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@ -3911,7 +3911,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "milli"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"allocator-api2",
|
||||
"arroy",
|
||||
@ -4416,7 +4416,7 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
|
||||
|
||||
[[package]]
|
||||
name = "permissive-json-pointer"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"big_s",
|
||||
"serde_json",
|
||||
@ -7197,7 +7197,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "xtask"
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"build-info",
|
||||
|
@ -22,7 +22,7 @@ members = [
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "1.15.1"
|
||||
version = "1.15.2"
|
||||
authors = [
|
||||
"Quentin de Quelen <quentin@dequelen.me>",
|
||||
"Clément Renault <clement@meilisearch.com>",
|
||||
|
@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 15, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
3 {uid: 3, batch_uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggo` already exists.", error_code: "index_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_already_exists" }, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
@ -57,7 +57,7 @@ girafo: { number_of_documents: 0, field_distribution: {} }
|
||||
[timestamp] [4,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.1"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
1 {uid: 1, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
||||
2 {uid: 2, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
||||
3 {uid: 3, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
||||
|
@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 15, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,]
|
||||
|
@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 15, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
|
@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 15, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
@ -37,7 +37,7 @@ catto [1,]
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.1"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
----------------------------------------------------------------------
|
||||
### Batch to tasks mapping:
|
||||
0 [0,]
|
||||
|
@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 15, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
----------------------------------------------------------------------
|
||||
@ -40,7 +40,7 @@ doggo [2,]
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.1"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
----------------------------------------------------------------------
|
||||
### Batch to tasks mapping:
|
||||
0 [0,]
|
||||
|
@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 15, 1) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
3 {uid: 3, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
@ -43,7 +43,7 @@ doggo [2,3,]
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.1"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||
----------------------------------------------------------------------
|
||||
### Batch to tasks mapping:
|
||||
0 [0,]
|
||||
|
@ -107,7 +107,7 @@ impl ChatCompletionSettings {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum ChatCompletionSource {
|
||||
#[default]
|
||||
|
@ -23,7 +23,10 @@ use futures::StreamExt;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
use meilisearch_types::features::{ChatCompletionPrompts as DbChatCompletionPrompts, SystemRole};
|
||||
use meilisearch_types::features::{
|
||||
ChatCompletionPrompts as DbChatCompletionPrompts,
|
||||
ChatCompletionSource as DbChatCompletionSource, SystemRole,
|
||||
};
|
||||
use meilisearch_types::keys::actions;
|
||||
use meilisearch_types::milli::index::ChatConfig;
|
||||
use meilisearch_types::milli::{all_obkv_to_json, obkv_to_json, TimeBudget};
|
||||
@ -34,7 +37,7 @@ use tokio::runtime::Handle;
|
||||
use tokio::sync::mpsc::error::SendError;
|
||||
|
||||
use super::config::Config;
|
||||
use super::errors::StreamErrorEvent;
|
||||
use super::errors::{MistralError, OpenAiOutsideError, StreamErrorEvent};
|
||||
use super::utils::format_documents;
|
||||
use super::{
|
||||
ChatsParam, MEILI_APPEND_CONVERSATION_MESSAGE_NAME, MEILI_SEARCH_IN_INDEX_FUNCTION_NAME,
|
||||
@ -469,6 +472,7 @@ async fn streamed_chat(
|
||||
&search_queue,
|
||||
&auth_token,
|
||||
&client,
|
||||
chat_settings.source,
|
||||
&mut chat_completion,
|
||||
&tx,
|
||||
&mut global_tool_calls,
|
||||
@ -501,6 +505,7 @@ async fn run_conversation<C: async_openai::config::Config>(
|
||||
search_queue: &web::Data<SearchQueue>,
|
||||
auth_token: &str,
|
||||
client: &Client<C>,
|
||||
source: DbChatCompletionSource,
|
||||
chat_completion: &mut CreateChatCompletionRequest,
|
||||
tx: &SseEventSender,
|
||||
global_tool_calls: &mut HashMap<u32, Call>,
|
||||
@ -595,7 +600,13 @@ async fn run_conversation<C: async_openai::config::Config>(
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
let error = StreamErrorEvent::from_openai_error(error).await.unwrap();
|
||||
let result = match source {
|
||||
DbChatCompletionSource::Mistral => {
|
||||
StreamErrorEvent::from_openai_error::<MistralError>(error).await
|
||||
}
|
||||
_ => StreamErrorEvent::from_openai_error::<OpenAiOutsideError>(error).await,
|
||||
};
|
||||
let error = result.unwrap_or_else(StreamErrorEvent::from_reqwest_error);
|
||||
tx.send_error(&error).await?;
|
||||
return Ok(ControlFlow::Break(None));
|
||||
}
|
||||
|
@ -24,7 +24,8 @@ impl Config {
|
||||
if let Some(api_key) = chat_settings.api_key.as_ref() {
|
||||
config = config.with_api_key(api_key);
|
||||
}
|
||||
if let Some(base_url) = chat_settings.base_url.as_ref() {
|
||||
let base_url = chat_settings.base_url.as_deref();
|
||||
if let Some(base_url) = chat_settings.source.base_url().or(base_url) {
|
||||
config = config.with_api_base(base_url);
|
||||
}
|
||||
Self::OpenAiCompatible(config)
|
||||
|
@ -4,6 +4,39 @@ use meilisearch_types::error::ResponseError;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
/// The error type which is always `error`.
|
||||
const ERROR_TYPE: &str = "error";
|
||||
|
||||
/// The error struct returned by the Mistral API.
|
||||
///
|
||||
/// ```json
|
||||
/// {
|
||||
/// "object": "error",
|
||||
/// "message": "Service tier capacity exceeded for this model.",
|
||||
/// "type": "invalid_request_error",
|
||||
/// "param": null,
|
||||
/// "code": null
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct MistralError {
|
||||
message: String,
|
||||
r#type: String,
|
||||
param: Option<String>,
|
||||
code: Option<String>,
|
||||
}
|
||||
|
||||
impl From<MistralError> for StreamErrorEvent {
|
||||
fn from(error: MistralError) -> Self {
|
||||
let MistralError { message, r#type, param, code } = error;
|
||||
StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: ERROR_TYPE.to_owned(),
|
||||
error: StreamError { r#type, code, message, param, event_id: None },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct OpenAiOutsideError {
|
||||
/// Emitted when an error occurs.
|
||||
@ -23,6 +56,17 @@ pub struct OpenAiInnerError {
|
||||
r#type: String,
|
||||
}
|
||||
|
||||
impl From<OpenAiOutsideError> for StreamErrorEvent {
|
||||
fn from(error: OpenAiOutsideError) -> Self {
|
||||
let OpenAiOutsideError { error: OpenAiInnerError { code, message, param, r#type } } = error;
|
||||
StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError { r#type, code, message, param, event_id: None },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An error that occurs during the streaming process.
|
||||
///
|
||||
/// It directly comes from the OpenAI API and you can
|
||||
@ -54,13 +98,15 @@ pub struct StreamError {
|
||||
}
|
||||
|
||||
impl StreamErrorEvent {
|
||||
const ERROR_TYPE: &str = "error";
|
||||
|
||||
pub async fn from_openai_error(error: OpenAIError) -> Result<Self, reqwest::Error> {
|
||||
pub async fn from_openai_error<E>(error: OpenAIError) -> Result<Self, reqwest::Error>
|
||||
where
|
||||
E: serde::de::DeserializeOwned,
|
||||
Self: From<E>,
|
||||
{
|
||||
match error {
|
||||
OpenAIError::Reqwest(e) => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: Self::ERROR_TYPE.to_string(),
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "internal_reqwest_error".to_string(),
|
||||
code: Some("internal".to_string()),
|
||||
@ -71,7 +117,7 @@ impl StreamErrorEvent {
|
||||
}),
|
||||
OpenAIError::ApiError(ApiError { message, r#type, param, code }) => {
|
||||
Ok(StreamErrorEvent {
|
||||
r#type: Self::ERROR_TYPE.to_string(),
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
error: StreamError {
|
||||
r#type: r#type.unwrap_or_else(|| "unknown".to_string()),
|
||||
@ -84,7 +130,7 @@ impl StreamErrorEvent {
|
||||
}
|
||||
OpenAIError::JSONDeserialize(error) => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: Self::ERROR_TYPE.to_string(),
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "json_deserialize_error".to_string(),
|
||||
code: Some("internal".to_string()),
|
||||
@ -96,30 +142,16 @@ impl StreamErrorEvent {
|
||||
OpenAIError::FileSaveError(_) | OpenAIError::FileReadError(_) => unreachable!(),
|
||||
OpenAIError::StreamError(error) => match error {
|
||||
EventSourceError::InvalidStatusCode(_status_code, response) => {
|
||||
let OpenAiOutsideError {
|
||||
error: OpenAiInnerError { code, message, param, r#type },
|
||||
} = response.json().await?;
|
||||
|
||||
Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: Self::ERROR_TYPE.to_string(),
|
||||
error: StreamError { r#type, code, message, param, event_id: None },
|
||||
})
|
||||
let error = response.json::<E>().await?;
|
||||
Ok(StreamErrorEvent::from(error))
|
||||
}
|
||||
EventSourceError::InvalidContentType(_header_value, response) => {
|
||||
let OpenAiOutsideError {
|
||||
error: OpenAiInnerError { code, message, param, r#type },
|
||||
} = response.json().await?;
|
||||
|
||||
Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: Self::ERROR_TYPE.to_string(),
|
||||
error: StreamError { r#type, code, message, param, event_id: None },
|
||||
})
|
||||
let error = response.json::<E>().await?;
|
||||
Ok(StreamErrorEvent::from(error))
|
||||
}
|
||||
EventSourceError::Utf8(error) => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: Self::ERROR_TYPE.to_string(),
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "invalid_utf8_error".to_string(),
|
||||
code: None,
|
||||
@ -130,7 +162,7 @@ impl StreamErrorEvent {
|
||||
}),
|
||||
EventSourceError::Parser(error) => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: Self::ERROR_TYPE.to_string(),
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "parser_error".to_string(),
|
||||
code: None,
|
||||
@ -141,7 +173,7 @@ impl StreamErrorEvent {
|
||||
}),
|
||||
EventSourceError::Transport(error) => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: Self::ERROR_TYPE.to_string(),
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "transport_error".to_string(),
|
||||
code: None,
|
||||
@ -152,7 +184,7 @@ impl StreamErrorEvent {
|
||||
}),
|
||||
EventSourceError::InvalidLastEventId(message) => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: Self::ERROR_TYPE.to_string(),
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "invalid_last_event_id".to_string(),
|
||||
code: None,
|
||||
@ -163,7 +195,7 @@ impl StreamErrorEvent {
|
||||
}),
|
||||
EventSourceError::StreamEnded => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: Self::ERROR_TYPE.to_string(),
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "stream_ended".to_string(),
|
||||
code: None,
|
||||
@ -175,7 +207,7 @@ impl StreamErrorEvent {
|
||||
},
|
||||
OpenAIError::InvalidArgument(message) => Ok(StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: Self::ERROR_TYPE.to_string(),
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "invalid_argument".to_string(),
|
||||
code: None,
|
||||
@ -191,7 +223,7 @@ impl StreamErrorEvent {
|
||||
let ResponseError { code, message, .. } = error;
|
||||
StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: Self::ERROR_TYPE.to_string(),
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "response_error".to_string(),
|
||||
code: Some(code.as_str().to_string()),
|
||||
@ -201,4 +233,18 @@ impl StreamErrorEvent {
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_reqwest_error(error: reqwest::Error) -> Self {
|
||||
StreamErrorEvent {
|
||||
event_id: Uuid::new_v4().to_string(),
|
||||
r#type: ERROR_TYPE.to_string(),
|
||||
error: StreamError {
|
||||
r#type: "reqwest_error".to_string(),
|
||||
code: None,
|
||||
message: error.to_string(),
|
||||
param: None,
|
||||
event_id: None,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2054,3 +2054,76 @@ async fn test_exact_typos_terms() {
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_changing_unrelated_settings() {
|
||||
let server = Server::new_shared();
|
||||
let index = server.unique_index();
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (task, _status_code) = index.add_documents(documents, None).await;
|
||||
index.wait_task(task.uid()).await.succeeded();
|
||||
|
||||
index
|
||||
.search(json!({"q": "Dragon"}), |response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]), @r###"
|
||||
[
|
||||
{
|
||||
"title": "How to Train Your Dragon: The Hidden World",
|
||||
"id": "166428",
|
||||
"color": [
|
||||
"green",
|
||||
"red"
|
||||
]
|
||||
}
|
||||
]
|
||||
"###);
|
||||
})
|
||||
.await;
|
||||
|
||||
let (task, _status_code) =
|
||||
index.update_settings(json!({ "filterableAttributes": ["title"] })).await;
|
||||
let r = index.wait_task(task.uid()).await.succeeded();
|
||||
snapshot!(r["status"], @r###""succeeded""###);
|
||||
|
||||
index
|
||||
.search(json!({"q": "Dragon"}), |response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]), @r###"
|
||||
[
|
||||
{
|
||||
"title": "How to Train Your Dragon: The Hidden World",
|
||||
"id": "166428",
|
||||
"color": [
|
||||
"green",
|
||||
"red"
|
||||
]
|
||||
}
|
||||
]
|
||||
"###);
|
||||
})
|
||||
.await;
|
||||
|
||||
let (task, _status_code) = index.update_settings(json!({ "filterableAttributes": [] })).await;
|
||||
let r = index.wait_task(task.uid()).await.succeeded();
|
||||
snapshot!(r["status"], @r###""succeeded""###);
|
||||
|
||||
index
|
||||
.search(json!({"q": "Dragon"}), |response, code| {
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response["hits"]), @r###"
|
||||
[
|
||||
{
|
||||
"title": "How to Train Your Dragon: The Hidden World",
|
||||
"id": "166428",
|
||||
"color": [
|
||||
"green",
|
||||
"red"
|
||||
]
|
||||
}
|
||||
]
|
||||
"###);
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ async fn version_too_old() {
|
||||
std::fs::write(db_path.join("VERSION"), "1.11.9999").unwrap();
|
||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.15.1");
|
||||
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.15.2");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -58,7 +58,7 @@ async fn version_requires_downgrade() {
|
||||
std::fs::write(db_path.join("VERSION"), format!("{major}.{minor}.{patch}")).unwrap();
|
||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||
snapshot!(err, @"Database version 1.15.2 is higher than the Meilisearch version 1.15.1. Downgrade is not supported");
|
||||
snapshot!(err, @"Database version 1.15.3 is higher than the Meilisearch version 1.15.2. Downgrade is not supported");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.1"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.1"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.1"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.1"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.1"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.1"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.1"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.1"
|
||||
"upgradeTo": "v1.15.2"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
@ -29,7 +29,6 @@ pub fn extract_docid_word_positions<R: io::Read + io::Seek>(
|
||||
let max_positions_per_attributes = max_positions_per_attributes
|
||||
.map_or(MAX_POSITION_PER_ATTRIBUTE, |max| max.min(MAX_POSITION_PER_ATTRIBUTE));
|
||||
let max_memory = indexer.max_memory_by_thread();
|
||||
let force_reindexing = settings_diff.reindex_searchable();
|
||||
|
||||
// initialize destination values.
|
||||
let mut documents_ids = RoaringBitmap::new();
|
||||
@ -43,6 +42,12 @@ pub fn extract_docid_word_positions<R: io::Read + io::Seek>(
|
||||
true,
|
||||
);
|
||||
|
||||
let force_reindexing = settings_diff.reindex_searchable();
|
||||
let skip_indexing = !force_reindexing && settings_diff.settings_update_only();
|
||||
if skip_indexing {
|
||||
return sorter_into_reader(docid_word_positions_sorter, indexer);
|
||||
}
|
||||
|
||||
// initialize buffers.
|
||||
let mut del_buffers = Buffers::default();
|
||||
let mut add_buffers = Buffers::default();
|
||||
|
@ -111,6 +111,8 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
|
||||
let prompt = chunks.prompt();
|
||||
|
||||
let old_vectors = old_vectors.vectors_for_key(embedder_name)?.unwrap();
|
||||
|
||||
// case where we have a `_vectors` field in the updated document
|
||||
if let Some(new_vectors) = new_vectors.as_ref().and_then(|new_vectors| {
|
||||
new_vectors.vectors_for_key(embedder_name).transpose()
|
||||
}) {
|
||||
@ -130,18 +132,9 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
|
||||
error: error.to_string(),
|
||||
})?,
|
||||
)?;
|
||||
// regenerate if the new `_vectors` fields is set to.
|
||||
} else if new_vectors.regenerate {
|
||||
let new_rendered = prompt.render_document(
|
||||
update.external_document_id(),
|
||||
update.current(
|
||||
&context.rtxn,
|
||||
context.index,
|
||||
context.db_fields_ids_map,
|
||||
)?,
|
||||
context.new_fields_ids_map,
|
||||
&context.doc_alloc,
|
||||
)?;
|
||||
let old_rendered = prompt.render_document(
|
||||
update.external_document_id(),
|
||||
update.merged(
|
||||
&context.rtxn,
|
||||
@ -151,7 +144,31 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
|
||||
context.new_fields_ids_map,
|
||||
&context.doc_alloc,
|
||||
)?;
|
||||
if new_rendered != old_rendered {
|
||||
let must_regenerate = if !old_vectors.regenerate {
|
||||
// we just enabled `regenerate`
|
||||
true
|
||||
} else {
|
||||
let old_rendered = prompt.render_document(
|
||||
update.external_document_id(),
|
||||
update.current(
|
||||
&context.rtxn,
|
||||
context.index,
|
||||
context.db_fields_ids_map,
|
||||
)?,
|
||||
context.new_fields_ids_map,
|
||||
&context.doc_alloc,
|
||||
);
|
||||
|
||||
if let Ok(old_rendered) = old_rendered {
|
||||
// must regenerate if the rendered changed
|
||||
new_rendered != old_rendered
|
||||
} else {
|
||||
// cannot check previous rendered, better regenerate
|
||||
true
|
||||
}
|
||||
};
|
||||
|
||||
if must_regenerate {
|
||||
chunks.set_autogenerated(
|
||||
update.docid(),
|
||||
update.external_document_id(),
|
||||
@ -160,17 +177,8 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
|
||||
)?;
|
||||
}
|
||||
}
|
||||
// no `_vectors` field, so only regenerate if the document is already set to in the DB.
|
||||
} else if old_vectors.regenerate {
|
||||
let old_rendered = prompt.render_document(
|
||||
update.external_document_id(),
|
||||
update.current(
|
||||
&context.rtxn,
|
||||
context.index,
|
||||
context.db_fields_ids_map,
|
||||
)?,
|
||||
context.new_fields_ids_map,
|
||||
&context.doc_alloc,
|
||||
)?;
|
||||
let new_rendered = prompt.render_document(
|
||||
update.external_document_id(),
|
||||
update.merged(
|
||||
@ -181,7 +189,28 @@ impl<'extractor> Extractor<'extractor> for EmbeddingExtractor<'_, '_> {
|
||||
context.new_fields_ids_map,
|
||||
&context.doc_alloc,
|
||||
)?;
|
||||
if new_rendered != old_rendered {
|
||||
|
||||
let must_regenerate = {
|
||||
let old_rendered = prompt.render_document(
|
||||
update.external_document_id(),
|
||||
update.current(
|
||||
&context.rtxn,
|
||||
context.index,
|
||||
context.db_fields_ids_map,
|
||||
)?,
|
||||
context.new_fields_ids_map,
|
||||
&context.doc_alloc,
|
||||
);
|
||||
if let Ok(old_rendered) = old_rendered {
|
||||
// regenerate if the rendered version changed
|
||||
new_rendered != old_rendered
|
||||
} else {
|
||||
// if we cannot render the previous version of the documents, let's regenerate
|
||||
true
|
||||
}
|
||||
};
|
||||
|
||||
if must_regenerate {
|
||||
chunks.set_autogenerated(
|
||||
update.docid(),
|
||||
update.external_document_id(),
|
||||
|
Loading…
x
Reference in New Issue
Block a user