4941: Implement the binary quantization in meilisearch r=irevoire a=irevoire

# Pull Request

## Related issue
Fixes https://github.com/meilisearch/meilisearch/issues/4873

## What does this PR do?
- Add a settings for the binary quantization
- Once enabled, the bq cannot be disabled

TODO:
- [ ] Missing a bunch of tests

Co-authored-by: Tamo <tamo@meilisearch.com>
This commit is contained in:
meili-bors[bot] 2024-09-19 15:50:24 +00:00 committed by GitHub
commit 462a2329f1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
38 changed files with 4107 additions and 3355 deletions

View file

@ -643,12 +643,19 @@ fn embedder_analytics(
.max()
});
let binary_quantization_used = setting.as_ref().map(|map| {
map.values()
.filter_map(|config| config.clone().set())
.any(|config| config.binary_quantized.set().is_some())
});
json!(
{
"total": setting.as_ref().map(|s| s.len()),
"sources": sources,
"document_template_used": document_template_used,
"document_template_max_bytes": document_template_max_bytes
"document_template_max_bytes": document_template_max_bytes,
"binary_quantization_used": binary_quantization_used,
}
)
}

View file

@ -102,7 +102,7 @@ async fn similar(
let index = index_scheduler.index(&index_uid)?;
let (embedder_name, embedder) =
let (embedder_name, embedder, quantized) =
SearchKind::embedder(&index_scheduler, &index, &query.embedder, None)?;
tokio::task::spawn_blocking(move || {
@ -111,6 +111,7 @@ async fn similar(
query,
embedder_name,
embedder,
quantized,
retrieve_vectors,
index_scheduler.features(),
)

View file

@ -274,8 +274,8 @@ pub struct HybridQuery {
#[derive(Clone)]
pub enum SearchKind {
KeywordOnly,
SemanticOnly { embedder_name: String, embedder: Arc<Embedder> },
Hybrid { embedder_name: String, embedder: Arc<Embedder>, semantic_ratio: f32 },
SemanticOnly { embedder_name: String, embedder: Arc<Embedder>, quantized: bool },
Hybrid { embedder_name: String, embedder: Arc<Embedder>, quantized: bool, semantic_ratio: f32 },
}
impl SearchKind {
@ -285,9 +285,9 @@ impl SearchKind {
embedder_name: &str,
vector_len: Option<usize>,
) -> Result<Self, ResponseError> {
let (embedder_name, embedder) =
let (embedder_name, embedder, quantized) =
Self::embedder(index_scheduler, index, embedder_name, vector_len)?;
Ok(Self::SemanticOnly { embedder_name, embedder })
Ok(Self::SemanticOnly { embedder_name, embedder, quantized })
}
pub(crate) fn hybrid(
@ -297,9 +297,9 @@ impl SearchKind {
semantic_ratio: f32,
vector_len: Option<usize>,
) -> Result<Self, ResponseError> {
let (embedder_name, embedder) =
let (embedder_name, embedder, quantized) =
Self::embedder(index_scheduler, index, embedder_name, vector_len)?;
Ok(Self::Hybrid { embedder_name, embedder, semantic_ratio })
Ok(Self::Hybrid { embedder_name, embedder, quantized, semantic_ratio })
}
pub(crate) fn embedder(
@ -307,16 +307,14 @@ impl SearchKind {
index: &Index,
embedder_name: &str,
vector_len: Option<usize>,
) -> Result<(String, Arc<Embedder>), ResponseError> {
) -> Result<(String, Arc<Embedder>, bool), ResponseError> {
let embedder_configs = index.embedding_configs(&index.read_txn()?)?;
let embedders = index_scheduler.embedders(embedder_configs)?;
let embedder = embedders.get(embedder_name);
let embedder = embedder
let (embedder, _, quantized) = embedders
.get(embedder_name)
.ok_or(milli::UserError::InvalidEmbedder(embedder_name.to_owned()))
.map_err(milli::Error::from)?
.0;
.map_err(milli::Error::from)?;
if let Some(vector_len) = vector_len {
if vector_len != embedder.dimensions() {
@ -330,7 +328,7 @@ impl SearchKind {
}
}
Ok((embedder_name.to_owned(), embedder))
Ok((embedder_name.to_owned(), embedder, quantized))
}
}
@ -791,7 +789,7 @@ fn prepare_search<'t>(
search.query(q);
}
}
SearchKind::SemanticOnly { embedder_name, embedder } => {
SearchKind::SemanticOnly { embedder_name, embedder, quantized } => {
let vector = match query.vector.clone() {
Some(vector) => vector,
None => {
@ -805,14 +803,19 @@ fn prepare_search<'t>(
}
};
search.semantic(embedder_name.clone(), embedder.clone(), Some(vector));
search.semantic(embedder_name.clone(), embedder.clone(), *quantized, Some(vector));
}
SearchKind::Hybrid { embedder_name, embedder, semantic_ratio: _ } => {
SearchKind::Hybrid { embedder_name, embedder, quantized, semantic_ratio: _ } => {
if let Some(q) = &query.q {
search.query(q);
}
// will be embedded in hybrid search if necessary
search.semantic(embedder_name.clone(), embedder.clone(), query.vector.clone());
search.semantic(
embedder_name.clone(),
embedder.clone(),
*quantized,
query.vector.clone(),
);
}
}
@ -1441,6 +1444,7 @@ pub fn perform_similar(
query: SimilarQuery,
embedder_name: String,
embedder: Arc<Embedder>,
quantized: bool,
retrieve_vectors: RetrieveVectors,
features: RoFeatures,
) -> Result<SimilarResult, ResponseError> {
@ -1469,8 +1473,16 @@ pub fn perform_similar(
));
};
let mut similar =
milli::Similar::new(internal_id, offset, limit, index, &rtxn, embedder_name, embedder);
let mut similar = milli::Similar::new(
internal_id,
offset,
limit,
index,
&rtxn,
embedder_name,
embedder,
quantized,
);
if let Some(ref filter) = query.filter {
if let Some(facets) = parse_filter(filter, Code::InvalidSimilarFilter, features)? {

View file

@ -0,0 +1,380 @@
use meili_snap::{json_string, snapshot};
use crate::common::{GetAllDocumentsOptions, Server};
use crate::json;
use crate::vector::generate_default_user_provided_documents;
#[actix_rt::test]
async fn retrieve_binary_quantize_status_in_the_settings() {
let server = Server::new().await;
let index = server.index("doggo");
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
snapshot!(code, @"200 OK");
snapshot!(value, @r###"
{
"vectorStore": true,
"metrics": false,
"logsRoute": false,
"editDocumentsByFunction": false,
"containsFilter": false
}
"###);
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let (settings, code) = index.settings().await;
snapshot!(code, @"200 OK");
snapshot!(settings["embedders"]["manual"], @r###"{"source":"userProvided","dimensions":3}"###);
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
"binaryQuantized": false,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let (settings, code) = index.settings().await;
snapshot!(code, @"200 OK");
snapshot!(settings["embedders"]["manual"], @r###"{"source":"userProvided","dimensions":3,"binaryQuantized":false}"###);
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
"binaryQuantized": true,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let (settings, code) = index.settings().await;
snapshot!(code, @"200 OK");
snapshot!(settings["embedders"]["manual"], @r###"{"source":"userProvided","dimensions":3,"binaryQuantized":true}"###);
}
#[actix_rt::test]
async fn binary_quantize_before_sending_documents() {
let server = Server::new().await;
let index = server.index("doggo");
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
snapshot!(code, @"200 OK");
snapshot!(value, @r###"
{
"vectorStore": true,
"metrics": false,
"logsRoute": false,
"editDocumentsByFunction": false,
"containsFilter": false
}
"###);
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
"binaryQuantized": true,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{"id": 0, "name": "kefir", "_vectors": { "manual": [-1.2, -2.3, 3.2] }},
{"id": 1, "name": "echo", "_vectors": { "manual": [2.5, 1.5, -130] }},
]);
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
// Make sure the documents are binary quantized
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
.await;
snapshot!(json_string!(documents), @r###"
{
"results": [
{
"id": 0,
"name": "kefir",
"_vectors": {
"manual": {
"embeddings": [
[
-1.0,
-1.0,
1.0
]
],
"regenerate": false
}
}
},
{
"id": 1,
"name": "echo",
"_vectors": {
"manual": {
"embeddings": [
[
1.0,
1.0,
-1.0
]
],
"regenerate": false
}
}
}
],
"offset": 0,
"limit": 20,
"total": 2
}
"###);
}
#[actix_rt::test]
async fn binary_quantize_after_sending_documents() {
let server = Server::new().await;
let index = server.index("doggo");
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
snapshot!(code, @"200 OK");
snapshot!(value, @r###"
{
"vectorStore": true,
"metrics": false,
"logsRoute": false,
"editDocumentsByFunction": false,
"containsFilter": false
}
"###);
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{"id": 0, "name": "kefir", "_vectors": { "manual": [-1.2, -2.3, 3.2] }},
{"id": 1, "name": "echo", "_vectors": { "manual": [2.5, 1.5, -130] }},
]);
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(value.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
"binaryQuantized": true,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
// Make sure the documents are binary quantized
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
.await;
snapshot!(json_string!(documents), @r###"
{
"results": [
{
"id": 0,
"name": "kefir",
"_vectors": {
"manual": {
"embeddings": [
[
-1.0,
-1.0,
1.0
]
],
"regenerate": false
}
}
},
{
"id": 1,
"name": "echo",
"_vectors": {
"manual": {
"embeddings": [
[
1.0,
1.0,
-1.0
]
],
"regenerate": false
}
}
}
],
"offset": 0,
"limit": 20,
"total": 2
}
"###);
}
#[actix_rt::test]
async fn try_to_disable_binary_quantization() {
let server = Server::new().await;
let index = server.index("doggo");
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
snapshot!(code, @"200 OK");
snapshot!(value, @r###"
{
"vectorStore": true,
"metrics": false,
"logsRoute": false,
"editDocumentsByFunction": false,
"containsFilter": false
}
"###);
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
"binaryQuantized": true,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
"binaryQuantized": false,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
let ret = server.wait_task(response.uid()).await;
snapshot!(ret, @r###"
{
"uid": "[uid]",
"indexUid": "doggo",
"status": "failed",
"type": "settingsUpdate",
"canceledBy": null,
"details": {
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
"binaryQuantized": false
}
}
},
"error": {
"message": "`.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.",
"code": "invalid_settings_embedders",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_settings_embedders"
},
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
"###);
}
#[actix_rt::test]
async fn binary_quantize_clear_documents() {
let server = Server::new().await;
let index = generate_default_user_provided_documents(&server).await;
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"binaryQuantized": true,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let (value, _code) = index.clear_all_documents().await;
index.wait_task(value.uid()).await.succeeded();
// Make sure the documents DB has been cleared
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
.await;
snapshot!(json_string!(documents), @r###"
{
"results": [],
"offset": 0,
"limit": 20,
"total": 0
}
"###);
// Make sure the arroy DB has been cleared
let (documents, _code) =
index.search_post(json!({ "hybrid": { "embedder": "manual" }, "vector": [1, 1, 1] })).await;
snapshot!(documents, @r###"
{
"hits": [],
"query": "",
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 0,
"semanticHitCount": 0
}
"###);
}

View file

@ -1,3 +1,4 @@
mod binary_quantized;
mod openai;
mod rest;
mod settings;