From 88d27949cd0aef343277393391c77d00409d5533 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 12 Mar 2024 10:56:16 +0100 Subject: [PATCH 01/19] Add documentation for benchmarks --- BENCHMARKS.md | 354 ++++++++++++++++++++++++++++++++++++++++++++++++ CONTRIBUTING.md | 24 ++++ 2 files changed, 378 insertions(+) create mode 100644 BENCHMARKS.md diff --git a/BENCHMARKS.md b/BENCHMARKS.md new file mode 100644 index 000000000..dd69864cc --- /dev/null +++ b/BENCHMARKS.md @@ -0,0 +1,354 @@ +# Benchmarks + +Currently this repository hosts two kinds of benchmarks: + +1. The older "milli benchmarks", that use [criterion](https://github.com/bheisler/criterion.rs) and live in the "benchmarks" directory. +2. The newer "bench" that are workload-based and so split between the [`workloads`](./workloads/) directory and the [`xtask::bench`](./xtask/src/bench/) module. + +This document describes the newer "bench" benchmarks. For more details on the "milli benchmarks", see [benchmarks/README.md](./benchmarks/README.md). + +## Design philosophy for the benchmarks + +The newer "bench" benchmarks are **integration** benchmarks, in the sense that they spawn an actual Meilisearch server and measure its performance end-to-end, including HTTP request overhead. + +Since this is prone to fluctuating, the benchmarks regain a bit of precision by measuring the runtime of the individual spans using the [logging machinery](./CONTRIBUTING.md#logging) of Meilisearch. + +A span roughly translates to a function call. The benchmark runner collects all the spans by name using the [logs route](https://github.com/orgs/meilisearch/discussions/721) and sums their runtime. The processed results are then sent to the [benchmark dashboard](https://bench.meilisearch.dev), which is in charge of storing and presenting the data. + +## Running the benchmarks + +Benchmarks can run locally or in CI. + +### Locally + +#### With a local benchmark dashboard + +The benchmarks dashboard lives in its [own repository](https://github.com/meilisearch/benchboard). We provide binaries for Ubuntu/Debian, but you can build from source for other platforms (MacOS should work as it was developed under that platform). + +Run the `benchboard` binary to create a fresh database of results. By default it will serve the results and the API to gather results on `http://localhost:9001`. + +From the Meilisearch repository, you can then run benchmarks with: + +```sh +cargo xtask bench -- workloads/my_workload_1.json .. +``` + +This command will build and run Meilisearch locally on port 7700, so make sure that this port is available. +To run benchmarks on a different commit, just use the usual git command to get back to the desired commit. + +#### Without a local benchmark dashboard + +To work with the raw results, you can also skip using a local benchmark dashboard. + +Run: + +```sh +cargo xtask bench --no-dashboard -- workloads/my_workload_1.json workloads/my_workload_2.json .. +``` + +For processing the results, look at [Looking at benchmark results/Without dashboard](#without-dashboard). + +### In CI + +We have dedicated runners to run workloads on CI. Currently, there are three ways of running the CI: + +1. Automatically, on every push to `main`. +2. Manually, by clicking the [`Run workflow`](https://github.com/meilisearch/meilisearch/actions/workflows/bench-manual.yml) button and specifying the target reference (tag, commit or branch) as well as one or multiple workloads to run. The workloads must exist in the Meilisearch repository (conventionally, in the [`workloads`](./workloads/) directory) on the target reference. Globbing (e.g., `workloads/*.json`) works. +3. Manually on a PR, by posting a comment containing a `/bench` command, followed by one or multiple workloads to run. Globbing works. The workloads must exist in the Meilisearch repository in the branch of the PR. + ``` + /bench workloads/movies*.json /hackernews_1M.json + ``` + +## Looking at benchmark results + +### On the dashboard + +Results are available on the global dashboard used by CI at or on your [local dashboard](#with-a-local-benchmark-dashboard). + +The dashboard homepage presents three sections: + +1. The latest invocations (a call to `cargo xtask bench`, either local or by CI) with their reason (generally set to some helpful link in CI) and their status. +2. The latest workloads ran on `main`. +3. The latest workloads ran on other references. + +By default, the workload shows the total runtime delta with the latest applicable commit on `main`. The latest applicable commit is the latest commit for workload invocations that do not originate on `main`, and the latest previous commit for workload invocations that originate on `main`. + +You can explicitly request a detailed comparison by span with the `main` branch, the branch or origin, or any previous commit, by clicking the links at the bottom of the workload invocation. + +In the detailed comparison view, the spans are sorted by improvements, regressions, stable (no statistically significant change) and unstable (the span runtime is comparable to its standard deviation). + +You can click on the name of any span to get a box plot comparing the target commit with multiple commits of the selected branch. + +### Without dashboard + +After the workloads are done running, the reports will live in the Meilisearch repository, in the `bench/reports` directory (by default). + +You can then convert these reports into other formats. + +- To [Firefox profiler](https://profiler.firefox.com) format. Run: + ```sh + cd bench/reports + cargo run --release --bin trace-to-firefox -- my_workload_1-0-trace.json + ``` + You can then upload the resulting `firefox-my_workload_1-0-trace.json` file to the online profiler. + + +## Designing benchmark workloads + +Benchmark workloads conventionally live in the `workloads` directory of the Meilisearch repository. + +They are JSON files with the following structure (comments are not actually supported, to make your own, remove them or copy some existing workload file): + +```jsonc +{ + // Name of the workload. Must be unique to the workload, as it will be used to group results on the dashboard. + "name": "hackernews.ndjson_1M,no-threads", + // Number of consecutive runs of the commands that should be performed. + // Each run uses a fresh instance of Meilisearch and a fresh database. + // Each run produces its own report file. + "run_count": 3, + // List of arguments to add to the Meilisearch command line. + "extra_cli_args": ["--max-indexing-threads=1"], + // List of named assets that can be used in the commands. + "assets": { + // name of the asset. + // Must be unique at the workload level. + // For better results, the same asset (same sha256) should have the same name accross workloads. + // Having multiple assets with the same name and distinct hashes is supported accross workloads, + // but will lead to superfluous downloads. + // + // Assets are stored in the `bench/assets/` directory by default. + "hackernews-100_000.ndjson": { + // If the assets exists in the local filesystem (Meilisearch repository or for your local workloads) + // Its file path can be specified here. + // `null` if the asset should be downloaded from a remote location. + "local_location": null, + // URL of the remote location where the asset can be downloaded. + // Use the `--assets-key` of the runner to pass an API key in the `Authorization: Bearer` header of the download requests. + // `null` if the asset should be imported from a local location. + // if both local and remote locations are specified, then the local one is tried first, then the remote one + // if the file is locally missing or its hash differs. + "remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/hackernews/hackernews-100_000.ndjson", + // SHA256 of the asset. + // Optional, the `sha256` of the asset will be displayed during a run of the workload if it is missing. + // If present, the hash of the asset in the `bench/assets/` directory will be compared against this hash before + // running the workload. If the hashes differ, the asset will be downloaded anew. + "sha256": "60ecd23485d560edbd90d9ca31f0e6dba1455422f2a44e402600fbb5f7f1b213", + // Optional, one of "Auto", "Json", "NdJson" or "Raw". + // If missing, assumed to be "Auto". + // If "Auto", the format will be determined from the extension in the asset name. + "format": "NdJson" + }, + "hackernews-200_000.ndjson": { + "local_location": null, + "remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/hackernews/hackernews-200_000.ndjson", + "sha256": "785b0271fdb47cba574fab617d5d332276b835c05dd86e4a95251cf7892a1685" + }, + "hackernews-300_000.ndjson": { + "local_location": null, + "remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/hackernews/hackernews-300_000.ndjson", + "sha256": "de73c7154652eddfaf69cdc3b2f824d5c452f095f40a20a1c97bb1b5c4d80ab2" + }, + "hackernews-400_000.ndjson": { + "local_location": null, + "remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/hackernews/hackernews-400_000.ndjson", + "sha256": "c1b00a24689110f366447e434c201c086d6f456d54ed1c4995894102794d8fe7" + }, + "hackernews-500_000.ndjson": { + "local_location": null, + "remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/hackernews/hackernews-500_000.ndjson", + "sha256": "ae98f9dbef8193d750e3e2dbb6a91648941a1edca5f6e82c143e7996f4840083" + }, + "hackernews-600_000.ndjson": { + "local_location": null, + "remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/hackernews/hackernews-600_000.ndjson", + "sha256": "b495fdc72c4a944801f786400f22076ab99186bee9699f67cbab2f21f5b74dbe" + }, + "hackernews-700_000.ndjson": { + "local_location": null, + "remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/hackernews/hackernews-700_000.ndjson", + "sha256": "4b2c63974f3dabaa4954e3d4598b48324d03c522321ac05b0d583f36cb78a28b" + }, + "hackernews-800_000.ndjson": { + "local_location": null, + "remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/hackernews/hackernews-800_000.ndjson", + "sha256": "cb7b6afe0e6caa1be111be256821bc63b0771b2a0e1fad95af7aaeeffd7ba546" + }, + "hackernews-900_000.ndjson": { + "local_location": null, + "remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/hackernews/hackernews-900_000.ndjson", + "sha256": "e1154ddcd398f1c867758a93db5bcb21a07b9e55530c188a2917fdef332d3ba9" + }, + "hackernews-1_000_000.ndjson": { + "local_location": null, + "remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/hackernews/hackernews-1_000_000.ndjson", + "sha256": "27e25efd0b68b159b8b21350d9af76938710cb29ce0393fa71b41c4f3c630ffe" + } + }, + // Core of the workload. + // A list of commands to run sequentially. + // A command is a request to the Meilisearch instance that is executed while the profiling runs. + "commands": [ + { + // Meilisearch route to call. `http://localhost:7700/` will be prepended. + "route": "indexes/movies/settings", + // HTTP method to call. + "method": "PATCH", + // If applicable, body of the request. + // Optional, if missing, the body will be empty. + "body": { + // One of "empty", "inline" or "asset". + // If using "empty", you can skip the entire "body" key. + "inline": { + // when "inline" is used, the body is the JSON object that is the value of the `"inline"` key. + "displayedAttributes": [ + "title", + "by", + "score", + "time" + ], + "searchableAttributes": [ + "title" + ], + "filterableAttributes": [ + "by" + ], + "sortableAttributes": [ + "score", + "time" + ] + } + }, + // Whether to wait before running the next request. + // One of: + // - DontWait: run the next command without waiting the response to this one. + // - WaitForResponse: run the next command as soon as the response from the server is received. + // - WaitForTask: run the next command once **all** the Meilisearch tasks created up to now have finished processing. + "synchronous": "DontWait" + }, + { + "route": "indexes/movies/documents", + "method": "POST", + "body": { + // When using "asset", use the name of an asset as value to use the content of that asset as body. + // the content type is derived of the format of the asset: + // "NdJson" => "application/x-ndjson" + // "Json" => "application/json" + // "Raw" => "application/octet-stream" + // See [AssetFormat::to_content_type](https://github.com/meilisearch/meilisearch/blob/7b670a4afadb132ac4a01b6403108700501a391d/xtask/src/bench/assets.rs#L30) + // for details and up-to-date list. + "asset": "hackernews-100_000.ndjson" + }, + "synchronous": "WaitForTask" + }, + { + "route": "indexes/movies/documents", + "method": "POST", + "body": { + "asset": "hackernews-200_000.ndjson" + }, + "synchronous": "WaitForResponse" + }, + { + "route": "indexes/movies/documents", + "method": "POST", + "body": { + "asset": "hackernews-300_000.ndjson" + }, + "synchronous": "WaitForResponse" + }, + { + "route": "indexes/movies/documents", + "method": "POST", + "body": { + "asset": "hackernews-400_000.ndjson" + }, + "synchronous": "WaitForResponse" + }, + { + "route": "indexes/movies/documents", + "method": "POST", + "body": { + "asset": "hackernews-500_000.ndjson" + }, + "synchronous": "WaitForResponse" + }, + { + "route": "indexes/movies/documents", + "method": "POST", + "body": { + "asset": "hackernews-600_000.ndjson" + }, + "synchronous": "WaitForResponse" + }, + { + "route": "indexes/movies/documents", + "method": "POST", + "body": { + "asset": "hackernews-700_000.ndjson" + }, + "synchronous": "WaitForResponse" + }, + { + "route": "indexes/movies/documents", + "method": "POST", + "body": { + "asset": "hackernews-800_000.ndjson" + }, + "synchronous": "WaitForResponse" + }, + { + "route": "indexes/movies/documents", + "method": "POST", + "body": { + "asset": "hackernews-900_000.ndjson" + }, + "synchronous": "WaitForResponse" + }, + { + "route": "indexes/movies/documents", + "method": "POST", + "body": { + "asset": "hackernews-1_000_000.ndjson" + }, + "synchronous": "WaitForTask" + } + ] +} +``` + + +## Upgrading `https://bench.meilisearch.dev` + +The URL of the server is in our password manager (look for "benchboard"). + +1. Make the needed modifications on the [benchboard repository](https://github.com/meilisearch/benchboard) and merge them to main. +2. Publish a new release to produce the Ubuntu/Debian binary. +3. Download the binary locally, send it to the server: + ``` + scp -6 ~/Downloads/benchboard root@\[\]:/bench/new-benchboard + ``` + Note that the ipv6 must be between escaped square brackets for SCP. +4. SSH to the server: + ``` + ssh root@ + ``` + Note the the ipv6 must **NOT** be between escaped square brackets for SSH 🥲 +5. On the server, set the correct permissions for the new binary: + ``` + chown bench:bench /bench/new-benchboard + chmod 700 /bench/new-benchboard + ``` +6. On the server, move the new binary to the location of the running binary (if unsure, start by making a backup of the running binary): + ``` + mv /bench/{new-,}benchboard + ``` +7. Restart the benchboard service. + ``` + systemctl restart benchboard + ``` +8. Check that the service runs correctly. + ``` + systemctl status benchboard + ``` +9. Check the availability of the service by going to on your browser. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 073da7031..6d6e6076b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -81,6 +81,30 @@ Meilisearch follows the [cargo xtask](https://github.com/matklad/cargo-xtask) wo Run `cargo xtask --help` from the root of the repository to find out what is available. +### Logging + +Meilisearch uses [`tracing`](https://lib.rs/crates/tracing) for logging purposes. Tracing logs are structured and can be displayed as JSON to the end user, so prefer passing arguments as fields rather than interpolating them in the message. + +Refer to the [documentation](https://docs.rs/tracing/0.1.40/tracing/index.html#using-the-macros) for the syntax of the spans and events. + +Logging spans are used for 3 distinct purposes: + +1. Regular logging +2. Profiling +3. Benchmarking + +As a result, the spans should follow some rules: + +- They should not be put on functions that are called too often. That is because opening and closing a span causes some overhead. For regular logging, avoid putting spans on functions that are taking less than a few hundred nanoseconds. For profiling or benchmarking, avoid putting spans on functions that are taking less than a few microseconds. +- For profiling and benchmarking, use the `TRACE` level. +- For profiling and benchmarking, use the following `target` prefixes: + - `indexing::` for spans meant when profiling the indexing operations. + - `search::` for spans meant when profiling the search operations. + +### Benchmarking + +See [BENCHMARKS.md](./BENCHMARKS.md) + ## Git Guidelines ### Git Branches From 4a467739cdfd6c6eafb73ae57649199729bf8d7b Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 5 Mar 2024 11:21:46 +0100 Subject: [PATCH 02/19] implements a first version of the cutoff without settings --- .../src/analytics/segment_analytics.rs | 9 ++ meilisearch/src/search.rs | 39 ++++++--- milli/examples/search.rs | 3 +- milli/src/index.rs | 1 + milli/src/lib.rs | 35 ++++++++ milli/src/search/hybrid.rs | 2 + milli/src/search/mod.rs | 82 +++++++++++-------- milli/src/search/new/bucket_sort.rs | 27 +++++- milli/src/search/new/matches/mod.rs | 3 +- milli/src/search/new/mod.rs | 16 +++- milli/tests/search/mod.rs | 45 +++++++++- 11 files changed, 210 insertions(+), 52 deletions(-) diff --git a/meilisearch/src/analytics/segment_analytics.rs b/meilisearch/src/analytics/segment_analytics.rs index 7dfc52900..99298bd43 100644 --- a/meilisearch/src/analytics/segment_analytics.rs +++ b/meilisearch/src/analytics/segment_analytics.rs @@ -579,6 +579,7 @@ pub struct SearchAggregator { // requests total_received: usize, total_succeeded: usize, + total_degraded: usize, time_spent: BinaryHeap, // sort @@ -758,9 +759,13 @@ impl SearchAggregator { hits_info: _, facet_distribution: _, facet_stats: _, + degraded, } = result; self.total_succeeded = self.total_succeeded.saturating_add(1); + if *degraded { + self.total_degraded = self.total_degraded.saturating_add(1); + } self.time_spent.push(*processing_time_ms as usize); } @@ -802,6 +807,7 @@ impl SearchAggregator { semantic_ratio, embedder, hybrid, + total_degraded, } = other; if self.timestamp.is_none() { @@ -816,6 +822,7 @@ impl SearchAggregator { // request self.total_received = self.total_received.saturating_add(total_received); self.total_succeeded = self.total_succeeded.saturating_add(total_succeeded); + self.total_degraded = self.total_degraded.saturating_add(total_degraded); self.time_spent.append(time_spent); // sort @@ -921,6 +928,7 @@ impl SearchAggregator { semantic_ratio, embedder, hybrid, + total_degraded, } = self; if total_received == 0 { @@ -940,6 +948,7 @@ impl SearchAggregator { "total_succeeded": total_succeeded, "total_failed": total_received.saturating_sub(total_succeeded), // just to be sure we never panics "total_received": total_received, + "total_degraded": total_degraded, }, "sort": { "with_geoPoint": sort_with_geo_point, diff --git a/meilisearch/src/search.rs b/meilisearch/src/search.rs index e65192d16..9bc7b69fc 100644 --- a/meilisearch/src/search.rs +++ b/meilisearch/src/search.rs @@ -1,7 +1,7 @@ use std::cmp::min; use std::collections::{BTreeMap, BTreeSet, HashSet}; use std::str::FromStr; -use std::time::Instant; +use std::time::{Duration, Instant}; use deserr::Deserr; use either::Either; @@ -14,7 +14,7 @@ use meilisearch_types::heed::RoTxn; use meilisearch_types::index_uid::IndexUid; use meilisearch_types::milli::score_details::{self, ScoreDetails, ScoringStrategy}; use meilisearch_types::milli::vector::DistributionShift; -use meilisearch_types::milli::{FacetValueHit, OrderBy, SearchForFacetValues}; +use meilisearch_types::milli::{FacetValueHit, OrderBy, SearchForFacetValues, TimeBudget}; use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS; use meilisearch_types::{milli, Document}; use milli::tokenizer::TokenizerBuilder; @@ -323,6 +323,9 @@ pub struct SearchResult { pub facet_distribution: Option>>, #[serde(skip_serializing_if = "Option::is_none")] pub facet_stats: Option>, + + #[serde(skip_serializing_if = "std::ops::Not::not")] + pub degraded: bool, } #[derive(Serialize, Debug, Clone, PartialEq)] @@ -382,8 +385,10 @@ fn prepare_search<'t>( query: &'t SearchQuery, features: RoFeatures, distribution: Option, + time_budget: TimeBudget, ) -> Result<(milli::Search<'t>, bool, usize, usize), MeilisearchHttpError> { let mut search = index.search(rtxn); + search.time_budget(time_budget); if query.vector.is_some() { features.check_vector("Passing `vector` as a query parameter")?; @@ -491,19 +496,26 @@ pub fn perform_search( distribution: Option, ) -> Result { let before_search = Instant::now(); + let time_budget = TimeBudget::new(Duration::from_millis(150)); let rtxn = index.read_txn()?; let (search, is_finite_pagination, max_total_hits, offset) = - prepare_search(index, &rtxn, &query, features, distribution)?; + prepare_search(index, &rtxn, &query, features, distribution, time_budget)?; - let milli::SearchResult { documents_ids, matching_words, candidates, document_scores, .. } = - match &query.hybrid { - Some(hybrid) => match *hybrid.semantic_ratio { - ratio if ratio == 0.0 || ratio == 1.0 => search.execute()?, - ratio => search.execute_hybrid(ratio)?, - }, - None => search.execute()?, - }; + let milli::SearchResult { + documents_ids, + matching_words, + candidates, + document_scores, + degraded, + .. + } = match &query.hybrid { + Some(hybrid) => match *hybrid.semantic_ratio { + ratio if ratio == 0.0 || ratio == 1.0 => search.execute()?, + ratio => search.execute_hybrid(ratio)?, + }, + None => search.execute()?, + }; let fields_ids_map = index.fields_ids_map(&rtxn).unwrap(); @@ -700,6 +712,7 @@ pub fn perform_search( processing_time_ms: before_search.elapsed().as_millis(), facet_distribution, facet_stats, + degraded, }; Ok(result) } @@ -712,9 +725,11 @@ pub fn perform_facet_search( features: RoFeatures, ) -> Result { let before_search = Instant::now(); + let time_budget = TimeBudget::new(Duration::from_millis(150)); let rtxn = index.read_txn()?; - let (search, _, _, _) = prepare_search(index, &rtxn, &search_query, features, None)?; + let (search, _, _, _) = + prepare_search(index, &rtxn, &search_query, features, None, time_budget)?; let mut facet_search = SearchForFacetValues::new(facet_name, search, search_query.hybrid.is_some()); if let Some(facet_query) = &facet_query { diff --git a/milli/examples/search.rs b/milli/examples/search.rs index a94677771..8640acf42 100644 --- a/milli/examples/search.rs +++ b/milli/examples/search.rs @@ -6,7 +6,7 @@ use std::time::Instant; use heed::EnvOpenOptions; use milli::{ execute_search, filtered_universe, DefaultSearchLogger, GeoSortStrategy, Index, SearchContext, - SearchLogger, TermsMatchingStrategy, + SearchLogger, TermsMatchingStrategy, TimeBudget, }; #[global_allocator] @@ -65,6 +65,7 @@ fn main() -> Result<(), Box> { None, &mut DefaultSearchLogger, logger, + TimeBudget::max(), )?; if let Some((logger, dir)) = detailed_logger { logger.finish(&mut ctx, Path::new(dir))?; diff --git a/milli/src/index.rs b/milli/src/index.rs index 2c3977403..e79c137e7 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -2421,6 +2421,7 @@ pub(crate) mod tests { candidates: _, document_scores: _, mut documents_ids, + degraded: _, } = search.execute().unwrap(); let primary_key_id = index.fields_ids_map(&rtxn).unwrap().id("primary_key").unwrap(); documents_ids.sort_unstable(); diff --git a/milli/src/lib.rs b/milli/src/lib.rs index 5effcea3d..eedd25f7e 100644 --- a/milli/src/lib.rs +++ b/milli/src/lib.rs @@ -30,6 +30,7 @@ pub mod snapshot_tests; use std::collections::{BTreeMap, HashMap}; use std::convert::{TryFrom, TryInto}; +use std::fmt; use std::hash::BuildHasherDefault; use charabia::normalizer::{CharNormalizer, CompatibilityDecompositionNormalizer}; @@ -104,6 +105,40 @@ pub const MAX_WORD_LENGTH: usize = MAX_LMDB_KEY_LENGTH / 2; pub const MAX_POSITION_PER_ATTRIBUTE: u32 = u16::MAX as u32 + 1; +#[derive(Clone, Copy)] +pub struct TimeBudget { + started_at: std::time::Instant, + budget: std::time::Duration, +} + +impl fmt::Debug for TimeBudget { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("TimeBudget") + .field("started_at", &self.started_at) + .field("budget", &self.budget) + .field("left", &(self.budget - self.started_at.elapsed())) + .finish() + } +} + +impl TimeBudget { + pub fn new(budget: std::time::Duration) -> Self { + Self { started_at: std::time::Instant::now(), budget } + } + + pub fn max() -> Self { + Self::new(std::time::Duration::from_secs(u64::MAX)) + } + + pub fn exceeded(&self) -> bool { + self.must_stop() + } + + pub fn must_stop(&self) -> bool { + self.started_at.elapsed() > self.budget + } +} + // Convert an absolute word position into a relative position. // Return the field id of the attribute related to the absolute position // and the relative position in the attribute. diff --git a/milli/src/search/hybrid.rs b/milli/src/search/hybrid.rs index b4c79f7f5..9d8b3860d 100644 --- a/milli/src/search/hybrid.rs +++ b/milli/src/search/hybrid.rs @@ -106,6 +106,7 @@ impl ScoreWithRatioResult { candidates: left.candidates | right.candidates, documents_ids, document_scores, + degraded: false, } } } @@ -131,6 +132,7 @@ impl<'a> Search<'a> { index: self.index, distribution_shift: self.distribution_shift, embedder_name: self.embedder_name.clone(), + time_budget: self.time_budget, }; let vector_query = search.vector.take(); diff --git a/milli/src/search/mod.rs b/milli/src/search/mod.rs index dc8354486..b14d88d03 100644 --- a/milli/src/search/mod.rs +++ b/milli/src/search/mod.rs @@ -11,7 +11,7 @@ use crate::score_details::{ScoreDetails, ScoringStrategy}; use crate::vector::DistributionShift; use crate::{ execute_search, filtered_universe, AscDesc, DefaultSearchLogger, DocumentId, Index, Result, - SearchContext, + SearchContext, TimeBudget, }; // Building these factories is not free. @@ -43,6 +43,8 @@ pub struct Search<'a> { index: &'a Index, distribution_shift: Option, embedder_name: Option, + + time_budget: TimeBudget, } impl<'a> Search<'a> { @@ -64,6 +66,7 @@ impl<'a> Search<'a> { index, distribution_shift: None, embedder_name: None, + time_budget: TimeBudget::max(), } } @@ -143,6 +146,11 @@ impl<'a> Search<'a> { self } + pub fn time_budget(&mut self, time_budget: TimeBudget) -> &mut Search<'a> { + self.time_budget = time_budget; + self + } + pub fn execute_for_candidates(&self, has_vector_search: bool) -> Result { if has_vector_search { let ctx = SearchContext::new(self.index, self.rtxn); @@ -169,36 +177,43 @@ impl<'a> Search<'a> { } let universe = filtered_universe(&ctx, &self.filter)?; - let PartialSearchResult { located_query_terms, candidates, documents_ids, document_scores } = - match self.vector.as_ref() { - Some(vector) => execute_vector_search( - &mut ctx, - vector, - self.scoring_strategy, - universe, - &self.sort_criteria, - self.geo_strategy, - self.offset, - self.limit, - self.distribution_shift, - embedder_name, - )?, - None => execute_search( - &mut ctx, - self.query.as_deref(), - self.terms_matching_strategy, - self.scoring_strategy, - self.exhaustive_number_hits, - universe, - &self.sort_criteria, - self.geo_strategy, - self.offset, - self.limit, - Some(self.words_limit), - &mut DefaultSearchLogger, - &mut DefaultSearchLogger, - )?, - }; + let PartialSearchResult { + located_query_terms, + candidates, + documents_ids, + document_scores, + degraded, + } = match self.vector.as_ref() { + Some(vector) => execute_vector_search( + &mut ctx, + vector, + self.scoring_strategy, + universe, + &self.sort_criteria, + self.geo_strategy, + self.offset, + self.limit, + self.distribution_shift, + embedder_name, + self.time_budget, + )?, + None => execute_search( + &mut ctx, + self.query.as_deref(), + self.terms_matching_strategy, + self.scoring_strategy, + self.exhaustive_number_hits, + universe, + &self.sort_criteria, + self.geo_strategy, + self.offset, + self.limit, + Some(self.words_limit), + &mut DefaultSearchLogger, + &mut DefaultSearchLogger, + self.time_budget, + )?, + }; // consume context and located_query_terms to build MatchingWords. let matching_words = match located_query_terms { @@ -206,7 +221,7 @@ impl<'a> Search<'a> { None => MatchingWords::default(), }; - Ok(SearchResult { matching_words, candidates, document_scores, documents_ids }) + Ok(SearchResult { matching_words, candidates, document_scores, documents_ids, degraded }) } } @@ -229,6 +244,7 @@ impl fmt::Debug for Search<'_> { index: _, distribution_shift, embedder_name, + time_budget, } = self; f.debug_struct("Search") .field("query", query) @@ -244,6 +260,7 @@ impl fmt::Debug for Search<'_> { .field("words_limit", words_limit) .field("distribution_shift", distribution_shift) .field("embedder_name", embedder_name) + .field("time_bduget", time_budget) .finish() } } @@ -254,6 +271,7 @@ pub struct SearchResult { pub candidates: RoaringBitmap, pub documents_ids: Vec, pub document_scores: Vec>, + pub degraded: bool, } #[derive(Debug, Clone, Copy, PartialEq, Eq)] diff --git a/milli/src/search/new/bucket_sort.rs b/milli/src/search/new/bucket_sort.rs index 02528e378..7fc830c1f 100644 --- a/milli/src/search/new/bucket_sort.rs +++ b/milli/src/search/new/bucket_sort.rs @@ -5,12 +5,14 @@ use super::ranking_rules::{BoxRankingRule, RankingRuleQueryTrait}; use super::SearchContext; use crate::score_details::{ScoreDetails, ScoringStrategy}; use crate::search::new::distinct::{apply_distinct_rule, distinct_single_docid, DistinctOutput}; -use crate::Result; +use crate::{Result, TimeBudget}; pub struct BucketSortOutput { pub docids: Vec, pub scores: Vec>, pub all_candidates: RoaringBitmap, + + pub degraded: bool, } // TODO: would probably be good to regroup some of these inside of a struct? @@ -25,6 +27,7 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>( length: usize, scoring_strategy: ScoringStrategy, logger: &mut dyn SearchLogger, + time_budget: TimeBudget, ) -> Result { logger.initial_query(query); logger.ranking_rules(&ranking_rules); @@ -41,6 +44,7 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>( docids: vec![], scores: vec![], all_candidates: universe.clone(), + degraded: false, }); } if ranking_rules.is_empty() { @@ -74,6 +78,7 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>( scores: vec![Default::default(); results.len()], docids: results, all_candidates, + degraded: false, }); } else { let docids: Vec = universe.iter().skip(from).take(length).collect(); @@ -81,6 +86,7 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>( scores: vec![Default::default(); docids.len()], docids, all_candidates: universe.clone(), + degraded: false, }); }; } @@ -154,6 +160,18 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>( } while valid_docids.len() < length { + if time_budget.exceeded() { + let bucket = std::mem::take(&mut ranking_rule_universes[cur_ranking_rule_index]); + maybe_add_to_results!(bucket); + + return Ok(BucketSortOutput { + scores: vec![Default::default(); valid_docids.len()], + docids: valid_docids, + all_candidates, + degraded: true, + }); + } + // The universe for this bucket is zero, so we don't need to sort // anything, just go back to the parent ranking rule. if ranking_rule_universes[cur_ranking_rule_index].is_empty() @@ -219,7 +237,12 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>( )?; } - Ok(BucketSortOutput { docids: valid_docids, scores: valid_scores, all_candidates }) + Ok(BucketSortOutput { + docids: valid_docids, + scores: valid_scores, + all_candidates, + degraded: false, + }) } /// Add the candidates to the results. Take `distinct`, `from`, `length`, and `cur_offset` diff --git a/milli/src/search/new/matches/mod.rs b/milli/src/search/new/matches/mod.rs index 8de1d9262..2913f206d 100644 --- a/milli/src/search/new/matches/mod.rs +++ b/milli/src/search/new/matches/mod.rs @@ -502,7 +502,7 @@ mod tests { use super::*; use crate::index::tests::TempIndex; - use crate::{execute_search, filtered_universe, SearchContext}; + use crate::{execute_search, filtered_universe, SearchContext, TimeBudget}; impl<'a> MatcherBuilder<'a> { fn new_test(rtxn: &'a heed::RoTxn, index: &'a TempIndex, query: &str) -> Self { @@ -522,6 +522,7 @@ mod tests { Some(10), &mut crate::DefaultSearchLogger, &mut crate::DefaultSearchLogger, + TimeBudget::max(), ) .unwrap(); diff --git a/milli/src/search/new/mod.rs b/milli/src/search/new/mod.rs index ae661e3f6..ad996f363 100644 --- a/milli/src/search/new/mod.rs +++ b/milli/src/search/new/mod.rs @@ -52,7 +52,8 @@ use crate::score_details::{ScoreDetails, ScoringStrategy}; use crate::search::new::distinct::apply_distinct_rule; use crate::vector::DistributionShift; use crate::{ - AscDesc, DocumentId, FieldId, Filter, Index, Member, Result, TermsMatchingStrategy, UserError, + AscDesc, DocumentId, FieldId, Filter, Index, Member, Result, TermsMatchingStrategy, TimeBudget, + UserError, }; /// A structure used throughout the execution of a search query. @@ -518,6 +519,7 @@ pub fn execute_vector_search( length: usize, distribution_shift: Option, embedder_name: &str, + time_budget: TimeBudget, ) -> Result { check_sort_criteria(ctx, sort_criteria.as_ref())?; @@ -537,7 +539,7 @@ pub fn execute_vector_search( let placeholder_search_logger: &mut dyn SearchLogger = &mut placeholder_search_logger; - let BucketSortOutput { docids, scores, all_candidates } = bucket_sort( + let BucketSortOutput { docids, scores, all_candidates, degraded } = bucket_sort( ctx, ranking_rules, &PlaceholderQuery, @@ -546,6 +548,7 @@ pub fn execute_vector_search( length, scoring_strategy, placeholder_search_logger, + time_budget, )?; Ok(PartialSearchResult { @@ -553,6 +556,7 @@ pub fn execute_vector_search( document_scores: scores, documents_ids: docids, located_query_terms: None, + degraded, }) } @@ -572,6 +576,7 @@ pub fn execute_search( words_limit: Option, placeholder_search_logger: &mut dyn SearchLogger, query_graph_logger: &mut dyn SearchLogger, + time_budget: TimeBudget, ) -> Result { check_sort_criteria(ctx, sort_criteria.as_ref())?; @@ -648,6 +653,7 @@ pub fn execute_search( length, scoring_strategy, query_graph_logger, + time_budget, )? } else { let ranking_rules = @@ -661,10 +667,11 @@ pub fn execute_search( length, scoring_strategy, placeholder_search_logger, + time_budget, )? }; - let BucketSortOutput { docids, scores, mut all_candidates } = bucket_sort_output; + let BucketSortOutput { docids, scores, mut all_candidates, degraded } = bucket_sort_output; let fields_ids_map = ctx.index.fields_ids_map(ctx.txn)?; // The candidates is the universe unless the exhaustive number of hits @@ -682,6 +689,7 @@ pub fn execute_search( document_scores: scores, documents_ids: docids, located_query_terms, + degraded, }) } @@ -742,4 +750,6 @@ pub struct PartialSearchResult { pub candidates: RoaringBitmap, pub documents_ids: Vec, pub document_scores: Vec>, + + pub degraded: bool, } diff --git a/milli/tests/search/mod.rs b/milli/tests/search/mod.rs index 9193ab762..ab6befa60 100644 --- a/milli/tests/search/mod.rs +++ b/milli/tests/search/mod.rs @@ -1,14 +1,19 @@ use std::cmp::Reverse; use std::collections::HashSet; use std::io::Cursor; +use std::time::Duration; use big_s::S; use either::{Either, Left, Right}; use heed::EnvOpenOptions; use maplit::{btreemap, hashset}; +use meili_snap::snapshot; use milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader}; use milli::update::{IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings}; -use milli::{AscDesc, Criterion, DocumentId, Index, Member, Object, TermsMatchingStrategy}; +use milli::{ + AscDesc, Criterion, DocumentId, Filter, Index, Member, Object, Search, TermsMatchingStrategy, + TimeBudget, +}; use serde::{Deserialize, Deserializer}; use slice_group_by::GroupBy; @@ -349,3 +354,41 @@ where let result = serde_json::Value::deserialize(deserializer)?; Ok(Some(result)) } + +#[test] +fn basic_degraded_search() { + use Criterion::*; + let criteria = vec![Words, Typo, Proximity, Attribute, Exactness]; + let index = setup_search_index_with_criteria(&criteria); + let rtxn = index.read_txn().unwrap(); + + let mut search = Search::new(&rtxn, &index); + search.query(TEST_QUERY); + search.limit(EXTERNAL_DOCUMENTS_IDS.len()); + search.time_budget(TimeBudget::new(Duration::from_millis(0))); + + let result = search.execute().unwrap(); + assert!(result.degraded); +} + +#[test] +fn degraded_search_cannot_skip_filter() { + use Criterion::*; + let criteria = vec![Words, Typo, Proximity, Attribute, Exactness]; + let index = setup_search_index_with_criteria(&criteria); + let rtxn = index.read_txn().unwrap(); + + let mut search = Search::new(&rtxn, &index); + search.query(TEST_QUERY); + search.limit(EXTERNAL_DOCUMENTS_IDS.len()); + search.time_budget(TimeBudget::new(Duration::from_millis(0))); + let filter_condition = Filter::from_str("tag = etiopia").unwrap().unwrap(); + search.filter(filter_condition); + + let result = search.execute().unwrap(); + assert!(result.degraded); + snapshot!(format!("{:?}\n{:?}", result.candidates, result.documents_ids), @r###" + RoaringBitmap<[0, 2, 5, 8, 11, 14]> + [0, 2, 5, 8, 11, 14] + "###); +} From d1db4951195bae1aaa13dd3481bc4ff0003122d7 Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 11 Mar 2024 18:24:21 +0100 Subject: [PATCH 03/19] add a settings for the search cutoff --- dump/src/lib.rs | 1 + dump/src/reader/compat/v5_to_v6.rs | 1 + meilisearch-types/src/error.rs | 1 + meilisearch-types/src/settings.rs | 76 +++++++++++++++++----- meilisearch/src/routes/indexes/settings.rs | 22 ++++++- meilisearch/src/search.rs | 5 +- meilisearch/tests/dumps/mod.rs | 39 +++++++---- meilisearch/tests/settings/get_settings.rs | 7 +- milli/src/index.rs | 13 ++++ milli/src/lib.rs | 6 ++ milli/src/update/settings.rs | 33 ++++++++++ 11 files changed, 169 insertions(+), 35 deletions(-) diff --git a/dump/src/lib.rs b/dump/src/lib.rs index be0053a7c..e7cadacbe 100644 --- a/dump/src/lib.rs +++ b/dump/src/lib.rs @@ -277,6 +277,7 @@ pub(crate) mod test { }), pagination: Setting::NotSet, embedders: Setting::NotSet, + search_cutoff: Setting::NotSet, _kind: std::marker::PhantomData, }; settings.check() diff --git a/dump/src/reader/compat/v5_to_v6.rs b/dump/src/reader/compat/v5_to_v6.rs index e00d3a599..2b8997847 100644 --- a/dump/src/reader/compat/v5_to_v6.rs +++ b/dump/src/reader/compat/v5_to_v6.rs @@ -379,6 +379,7 @@ impl From> for v6::Settings { v5::Setting::NotSet => v6::Setting::NotSet, }, embedders: v6::Setting::NotSet, + search_cutoff: v6::Setting::NotSet, _kind: std::marker::PhantomData, } } diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index 965d2e672..bf9492ff6 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -259,6 +259,7 @@ InvalidSettingsProximityPrecision , InvalidRequest , BAD_REQUEST ; InvalidSettingsFaceting , InvalidRequest , BAD_REQUEST ; InvalidSettingsFilterableAttributes , InvalidRequest , BAD_REQUEST ; InvalidSettingsPagination , InvalidRequest , BAD_REQUEST ; +InvalidSettingsSearchCutoff , InvalidRequest , BAD_REQUEST ; InvalidSettingsEmbedders , InvalidRequest , BAD_REQUEST ; InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ; InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ; diff --git a/meilisearch-types/src/settings.rs b/meilisearch-types/src/settings.rs index ca46abb0c..d05201943 100644 --- a/meilisearch-types/src/settings.rs +++ b/meilisearch-types/src/settings.rs @@ -202,6 +202,9 @@ pub struct Settings { #[serde(default, skip_serializing_if = "Setting::is_not_set")] #[deserr(default, error = DeserrJsonError)] pub embedders: Setting>>, + #[serde(default, skip_serializing_if = "Setting::is_not_set")] + #[deserr(default, error = DeserrJsonError)] + pub search_cutoff: Setting, #[serde(skip)] #[deserr(skip)] @@ -227,6 +230,7 @@ impl Settings { faceting: Setting::Reset, pagination: Setting::Reset, embedders: Setting::Reset, + search_cutoff: Setting::Reset, _kind: PhantomData, } } @@ -249,6 +253,7 @@ impl Settings { faceting, pagination, embedders, + search_cutoff, .. } = self; @@ -269,6 +274,7 @@ impl Settings { faceting, pagination, embedders, + search_cutoff, _kind: PhantomData, } } @@ -315,6 +321,7 @@ impl Settings { faceting: self.faceting, pagination: self.pagination, embedders: self.embedders, + search_cutoff: self.search_cutoff, _kind: PhantomData, } } @@ -347,19 +354,40 @@ pub fn apply_settings_to_builder( settings: &Settings, builder: &mut milli::update::Settings, ) { - match settings.searchable_attributes { + let Settings { + displayed_attributes, + searchable_attributes, + filterable_attributes, + sortable_attributes, + ranking_rules, + stop_words, + non_separator_tokens, + separator_tokens, + dictionary, + synonyms, + distinct_attribute, + proximity_precision, + typo_tolerance, + faceting, + pagination, + embedders, + search_cutoff, + _kind, + } = settings; + + match searchable_attributes { Setting::Set(ref names) => builder.set_searchable_fields(names.clone()), Setting::Reset => builder.reset_searchable_fields(), Setting::NotSet => (), } - match settings.displayed_attributes { + match displayed_attributes { Setting::Set(ref names) => builder.set_displayed_fields(names.clone()), Setting::Reset => builder.reset_displayed_fields(), Setting::NotSet => (), } - match settings.filterable_attributes { + match filterable_attributes { Setting::Set(ref facets) => { builder.set_filterable_fields(facets.clone().into_iter().collect()) } @@ -367,13 +395,13 @@ pub fn apply_settings_to_builder( Setting::NotSet => (), } - match settings.sortable_attributes { + match sortable_attributes { Setting::Set(ref fields) => builder.set_sortable_fields(fields.iter().cloned().collect()), Setting::Reset => builder.reset_sortable_fields(), Setting::NotSet => (), } - match settings.ranking_rules { + match ranking_rules { Setting::Set(ref criteria) => { builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect()) } @@ -381,13 +409,13 @@ pub fn apply_settings_to_builder( Setting::NotSet => (), } - match settings.stop_words { + match stop_words { Setting::Set(ref stop_words) => builder.set_stop_words(stop_words.clone()), Setting::Reset => builder.reset_stop_words(), Setting::NotSet => (), } - match settings.non_separator_tokens { + match non_separator_tokens { Setting::Set(ref non_separator_tokens) => { builder.set_non_separator_tokens(non_separator_tokens.clone()) } @@ -395,7 +423,7 @@ pub fn apply_settings_to_builder( Setting::NotSet => (), } - match settings.separator_tokens { + match separator_tokens { Setting::Set(ref separator_tokens) => { builder.set_separator_tokens(separator_tokens.clone()) } @@ -403,31 +431,31 @@ pub fn apply_settings_to_builder( Setting::NotSet => (), } - match settings.dictionary { + match dictionary { Setting::Set(ref dictionary) => builder.set_dictionary(dictionary.clone()), Setting::Reset => builder.reset_dictionary(), Setting::NotSet => (), } - match settings.synonyms { + match synonyms { Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()), Setting::Reset => builder.reset_synonyms(), Setting::NotSet => (), } - match settings.distinct_attribute { + match distinct_attribute { Setting::Set(ref attr) => builder.set_distinct_field(attr.clone()), Setting::Reset => builder.reset_distinct_field(), Setting::NotSet => (), } - match settings.proximity_precision { + match proximity_precision { Setting::Set(ref precision) => builder.set_proximity_precision((*precision).into()), Setting::Reset => builder.reset_proximity_precision(), Setting::NotSet => (), } - match settings.typo_tolerance { + match typo_tolerance { Setting::Set(ref value) => { match value.enabled { Setting::Set(val) => builder.set_autorize_typos(val), @@ -482,7 +510,7 @@ pub fn apply_settings_to_builder( Setting::NotSet => (), } - match &settings.faceting { + match faceting { Setting::Set(FacetingSettings { max_values_per_facet, sort_facet_values_by }) => { match max_values_per_facet { Setting::Set(val) => builder.set_max_values_per_facet(*val), @@ -504,7 +532,7 @@ pub fn apply_settings_to_builder( Setting::NotSet => (), } - match settings.pagination { + match pagination { Setting::Set(ref value) => match value.max_total_hits { Setting::Set(val) => builder.set_pagination_max_total_hits(val), Setting::Reset => builder.reset_pagination_max_total_hits(), @@ -514,11 +542,17 @@ pub fn apply_settings_to_builder( Setting::NotSet => (), } - match settings.embedders.clone() { - Setting::Set(value) => builder.set_embedder_settings(value), + match embedders { + Setting::Set(value) => builder.set_embedder_settings(value.clone()), Setting::Reset => builder.reset_embedder_settings(), Setting::NotSet => (), } + + match search_cutoff { + Setting::Set(cutoff) => builder.set_search_cutoff(*cutoff), + Setting::Reset => builder.reset_search_cutoff(), + Setting::NotSet => (), + } } pub fn settings( @@ -607,6 +641,8 @@ pub fn settings( .collect(); let embedders = if embedders.is_empty() { Setting::NotSet } else { Setting::Set(embedders) }; + let search_cutoff = index.search_cutoff(rtxn)?; + Ok(Settings { displayed_attributes: match displayed_attributes { Some(attrs) => Setting::Set(attrs), @@ -633,6 +669,10 @@ pub fn settings( faceting: Setting::Set(faceting), pagination: Setting::Set(pagination), embedders, + search_cutoff: match search_cutoff { + Some(cutoff) => Setting::Set(cutoff), + None => Setting::Reset, + }, _kind: PhantomData, }) } @@ -783,6 +823,7 @@ pub(crate) mod test { faceting: Setting::NotSet, pagination: Setting::NotSet, embedders: Setting::NotSet, + search_cutoff: Setting::NotSet, _kind: PhantomData::, }; @@ -809,6 +850,7 @@ pub(crate) mod test { faceting: Setting::NotSet, pagination: Setting::NotSet, embedders: Setting::NotSet, + search_cutoff: Setting::NotSet, _kind: PhantomData::, }; diff --git a/meilisearch/src/routes/indexes/settings.rs b/meilisearch/src/routes/indexes/settings.rs index c782e78cb..1d03c9a91 100644 --- a/meilisearch/src/routes/indexes/settings.rs +++ b/meilisearch/src/routes/indexes/settings.rs @@ -624,6 +624,25 @@ fn embedder_analytics( ) } +make_setting_route!( + "/search_cutoff", + patch, + u64, + meilisearch_types::deserr::DeserrJsonError< + meilisearch_types::error::deserr_codes::InvalidSettingsSearchCutoff, + >, + search_cutoff, + "search_cutoff", + analytics, + |setting: &Option, req: &HttpRequest| { + analytics.publish( + "Search Cutoff Updated".to_string(), + serde_json::json!({"search_cutoff": setting }), + Some(req), + ); + } +); + macro_rules! generate_configure { ($($mod:ident),*) => { pub fn configure(cfg: &mut web::ServiceConfig) { @@ -765,7 +784,8 @@ pub async fn update_all( "synonyms": { "total": new_settings.synonyms.as_ref().set().map(|synonyms| synonyms.len()), }, - "embedders": crate::routes::indexes::settings::embedder_analytics(new_settings.embedders.as_ref().set()) + "embedders": crate::routes::indexes::settings::embedder_analytics(new_settings.embedders.as_ref().set()), + "search_cutoff": new_settings.search_cutoff.as_ref().set(), }), Some(&req), ); diff --git a/meilisearch/src/search.rs b/meilisearch/src/search.rs index 9bc7b69fc..f83e14187 100644 --- a/meilisearch/src/search.rs +++ b/meilisearch/src/search.rs @@ -496,8 +496,11 @@ pub fn perform_search( distribution: Option, ) -> Result { let before_search = Instant::now(); - let time_budget = TimeBudget::new(Duration::from_millis(150)); let rtxn = index.read_txn()?; + let time_budget = match index.search_cutoff(&rtxn)? { + Some(cutoff) => TimeBudget::new(Duration::from_millis(cutoff)), + None => TimeBudget::default(), + }; let (search, is_finite_pagination, max_total_hits, offset) = prepare_search(index, &rtxn, &query, features, distribution, time_budget)?; diff --git a/meilisearch/tests/dumps/mod.rs b/meilisearch/tests/dumps/mod.rs index e8061ae4a..7bf97f8b2 100644 --- a/meilisearch/tests/dumps/mod.rs +++ b/meilisearch/tests/dumps/mod.rs @@ -77,7 +77,8 @@ async fn import_dump_v1_movie_raw() { }, "pagination": { "maxTotalHits": 1000 - } + }, + "searchCutoff": null } "### ); @@ -238,7 +239,8 @@ async fn import_dump_v1_movie_with_settings() { }, "pagination": { "maxTotalHits": 1000 - } + }, + "searchCutoff": null } "### ); @@ -385,7 +387,8 @@ async fn import_dump_v1_rubygems_with_settings() { }, "pagination": { "maxTotalHits": 1000 - } + }, + "searchCutoff": null } "### ); @@ -518,7 +521,8 @@ async fn import_dump_v2_movie_raw() { }, "pagination": { "maxTotalHits": 1000 - } + }, + "searchCutoff": null } "### ); @@ -663,7 +667,8 @@ async fn import_dump_v2_movie_with_settings() { }, "pagination": { "maxTotalHits": 1000 - } + }, + "searchCutoff": null } "### ); @@ -807,7 +812,8 @@ async fn import_dump_v2_rubygems_with_settings() { }, "pagination": { "maxTotalHits": 1000 - } + }, + "searchCutoff": null } "### ); @@ -940,7 +946,8 @@ async fn import_dump_v3_movie_raw() { }, "pagination": { "maxTotalHits": 1000 - } + }, + "searchCutoff": null } "### ); @@ -1085,7 +1092,8 @@ async fn import_dump_v3_movie_with_settings() { }, "pagination": { "maxTotalHits": 1000 - } + }, + "searchCutoff": null } "### ); @@ -1229,7 +1237,8 @@ async fn import_dump_v3_rubygems_with_settings() { }, "pagination": { "maxTotalHits": 1000 - } + }, + "searchCutoff": null } "### ); @@ -1362,7 +1371,8 @@ async fn import_dump_v4_movie_raw() { }, "pagination": { "maxTotalHits": 1000 - } + }, + "searchCutoff": null } "### ); @@ -1507,7 +1517,8 @@ async fn import_dump_v4_movie_with_settings() { }, "pagination": { "maxTotalHits": 1000 - } + }, + "searchCutoff": null } "### ); @@ -1651,7 +1662,8 @@ async fn import_dump_v4_rubygems_with_settings() { }, "pagination": { "maxTotalHits": 1000 - } + }, + "searchCutoff": null } "### ); @@ -1895,7 +1907,8 @@ async fn import_dump_v6_containing_experimental_features() { }, "pagination": { "maxTotalHits": 1000 - } + }, + "searchCutoff": null } "###); diff --git a/meilisearch/tests/settings/get_settings.rs b/meilisearch/tests/settings/get_settings.rs index 5642e854f..000443f36 100644 --- a/meilisearch/tests/settings/get_settings.rs +++ b/meilisearch/tests/settings/get_settings.rs @@ -49,12 +49,12 @@ async fn get_settings_unexisting_index() { async fn get_settings() { let server = Server::new().await; let index = server.index("test"); - index.create(None).await; - index.wait_task(0).await; + let (response, _code) = index.create(None).await; + index.wait_task(response.uid()).await; let (response, code) = index.settings().await; assert_eq!(code, 200); let settings = response.as_object().unwrap(); - assert_eq!(settings.keys().len(), 15); + assert_eq!(settings.keys().len(), 16); assert_eq!(settings["displayedAttributes"], json!(["*"])); assert_eq!(settings["searchableAttributes"], json!(["*"])); assert_eq!(settings["filterableAttributes"], json!([])); @@ -84,6 +84,7 @@ async fn get_settings() { }) ); assert_eq!(settings["proximityPrecision"], json!("byWord")); + assert_eq!(settings["searchCutoff"], json!(null)); } #[actix_rt::test] diff --git a/milli/src/index.rs b/milli/src/index.rs index e79c137e7..d921de9e4 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -67,6 +67,7 @@ pub mod main_key { pub const PAGINATION_MAX_TOTAL_HITS: &str = "pagination-max-total-hits"; pub const PROXIMITY_PRECISION: &str = "proximity-precision"; pub const EMBEDDING_CONFIGS: &str = "embedding_configs"; + pub const SEARCH_CUTOFF: &str = "search_cutoff"; } pub mod db_name { @@ -1505,6 +1506,18 @@ impl Index { _ => "default".to_owned(), }) } + + pub(crate) fn put_search_cutoff(&self, wtxn: &mut RwTxn<'_>, cutoff: u64) -> heed::Result<()> { + self.main.remap_types::().put(wtxn, main_key::SEARCH_CUTOFF, &cutoff) + } + + pub fn search_cutoff(&self, rtxn: &RoTxn<'_>) -> Result> { + Ok(self.main.remap_types::().get(rtxn, main_key::SEARCH_CUTOFF)?) + } + + pub(crate) fn delete_search_cutoff(&self, wtxn: &mut RwTxn<'_>) -> heed::Result { + self.main.remap_key_type::().delete(wtxn, main_key::SEARCH_CUTOFF) + } } #[cfg(test)] diff --git a/milli/src/lib.rs b/milli/src/lib.rs index eedd25f7e..896aadb50 100644 --- a/milli/src/lib.rs +++ b/milli/src/lib.rs @@ -121,6 +121,12 @@ impl fmt::Debug for TimeBudget { } } +impl Default for TimeBudget { + fn default() -> Self { + Self::new(std::time::Duration::from_millis(150)) + } +} + impl TimeBudget { pub fn new(budget: std::time::Duration) -> Self { Self { started_at: std::time::Instant::now(), budget } diff --git a/milli/src/update/settings.rs b/milli/src/update/settings.rs index 63b45e3aa..1e720ba56 100644 --- a/milli/src/update/settings.rs +++ b/milli/src/update/settings.rs @@ -150,6 +150,7 @@ pub struct Settings<'a, 't, 'i> { pagination_max_total_hits: Setting, proximity_precision: Setting, embedder_settings: Setting>>, + search_cutoff: Setting, } impl<'a, 't, 'i> Settings<'a, 't, 'i> { @@ -183,6 +184,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> { pagination_max_total_hits: Setting::NotSet, proximity_precision: Setting::NotSet, embedder_settings: Setting::NotSet, + search_cutoff: Setting::NotSet, indexer_config, } } @@ -373,6 +375,14 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> { self.embedder_settings = Setting::Reset; } + pub fn set_search_cutoff(&mut self, value: u64) { + self.search_cutoff = Setting::Set(value); + } + + pub fn reset_search_cutoff(&mut self) { + self.search_cutoff = Setting::Reset; + } + #[tracing::instrument( level = "trace" skip(self, progress_callback, should_abort, old_fields_ids_map), @@ -1026,6 +1036,24 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> { Ok(update) } + fn update_search_cutoff(&mut self) -> Result { + let changed = match self.search_cutoff { + Setting::Set(new) => { + let old = self.index.search_cutoff(self.wtxn)?; + if old == Some(new) { + false + } else { + self.index.put_search_cutoff(self.wtxn, new)?; + true + } + } + Setting::Reset => self.index.delete_search_cutoff(self.wtxn)?, + Setting::NotSet => false, + }; + + Ok(changed) + } + pub fn execute(mut self, progress_callback: FP, should_abort: FA) -> Result<()> where FP: Fn(UpdateIndexingStep) + Sync, @@ -1071,6 +1099,9 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> { // 3. Keep the old vectors but reattempt indexing on a prompt change: only actually changed prompt will need embedding + storage let embedding_configs_updated = self.update_embedding_configs()?; + // never trigger re-indexing + self.update_search_cutoff()?; + if stop_words_updated || non_separator_tokens_updated || separator_tokens_updated @@ -2027,6 +2058,7 @@ mod tests { pagination_max_total_hits, proximity_precision, embedder_settings, + search_cutoff, } = settings; assert!(matches!(searchable_fields, Setting::NotSet)); assert!(matches!(displayed_fields, Setting::NotSet)); @@ -2050,6 +2082,7 @@ mod tests { assert!(matches!(pagination_max_total_hits, Setting::NotSet)); assert!(matches!(proximity_precision, Setting::NotSet)); assert!(matches!(embedder_settings, Setting::NotSet)); + assert!(matches!(search_cutoff, Setting::NotSet)); }) .unwrap(); } From b72495eb5892cf56d3b30b6d575491b1e80f6889 Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 12 Mar 2024 18:19:02 +0100 Subject: [PATCH 04/19] fix the settings tests --- meilisearch/src/routes/indexes/settings.rs | 10 ++++++---- meilisearch/tests/settings/get_settings.rs | 4 +++- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/meilisearch/src/routes/indexes/settings.rs b/meilisearch/src/routes/indexes/settings.rs index 1d03c9a91..41fc58a87 100644 --- a/meilisearch/src/routes/indexes/settings.rs +++ b/meilisearch/src/routes/indexes/settings.rs @@ -138,6 +138,7 @@ macro_rules! make_setting_route { debug!(returns = ?settings, "Update settings"); let mut json = serde_json::json!(&settings); + dbg!(&json); let val = json[$camelcase_attr].take(); Ok(HttpResponse::Ok().json(val)) @@ -625,14 +626,14 @@ fn embedder_analytics( } make_setting_route!( - "/search_cutoff", - patch, + "/search-cutoff", + put, u64, meilisearch_types::deserr::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsSearchCutoff, >, search_cutoff, - "search_cutoff", + "searchCutoff", analytics, |setting: &Option, req: &HttpRequest| { analytics.publish( @@ -673,7 +674,8 @@ generate_configure!( typo_tolerance, pagination, faceting, - embedders + embedders, + search_cutoff ); pub async fn update_all( diff --git a/meilisearch/tests/settings/get_settings.rs b/meilisearch/tests/settings/get_settings.rs index 000443f36..d573f38e0 100644 --- a/meilisearch/tests/settings/get_settings.rs +++ b/meilisearch/tests/settings/get_settings.rs @@ -35,6 +35,7 @@ static DEFAULT_SETTINGS_VALUES: Lazy> = Lazy::new(| "maxTotalHits": json!(1000), }), ); + map.insert("search_cutoff", json!(null)); map }); @@ -286,7 +287,8 @@ test_setting_routes!( ranking_rules put, synonyms put, pagination patch, - faceting patch + faceting patch, + search_cutoff put ); #[actix_rt::test] From b8cda6c300f8ca351a739319b2fcfadcc80e327b Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 14 Mar 2024 17:34:46 +0100 Subject: [PATCH 05/19] fix the search cutoff and add a test --- meilisearch/tests/search/mod.rs | 109 +++++++ milli/src/lib.rs | 37 ++- milli/src/score_details.rs | 12 + milli/src/search/hybrid.rs | 2 +- milli/src/search/mod.rs | 4 +- milli/src/search/new/bucket_sort.rs | 16 +- milli/src/search/new/tests/cutoff.rs | 419 +++++++++++++++++++++++++++ milli/src/search/new/tests/mod.rs | 1 + milli/tests/search/mod.rs | 45 +-- 9 files changed, 590 insertions(+), 55 deletions(-) create mode 100644 milli/src/search/new/tests/cutoff.rs diff --git a/meilisearch/tests/search/mod.rs b/meilisearch/tests/search/mod.rs index 90098c5b6..62dd73c63 100644 --- a/meilisearch/tests/search/mod.rs +++ b/meilisearch/tests/search/mod.rs @@ -834,6 +834,115 @@ async fn test_score_details() { .await; } +#[actix_rt::test] +async fn test_degraded_score_details() { + let server = Server::new().await; + let index = server.index("test"); + + let documents = NESTED_DOCUMENTS.clone(); + + index.add_documents(json!(documents), None).await; + // We can't really use anything else than 0ms here; otherwise, the test will get flaky. + let (res, _code) = index.update_settings(json!({ "searchCutoff": 0 })).await; + index.wait_task(res.uid()).await; + + index + .search( + json!({ + "q": "b", + "showRankingScoreDetails": true, + }), + |response, code| { + meili_snap::snapshot!(code, @"200 OK"); + meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" + [ + { + "id": 852, + "father": "jean", + "mother": "michelle", + "doggos": [ + { + "name": "bobby", + "age": 2 + }, + { + "name": "buddy", + "age": 4 + } + ], + "cattos": "pésti", + "_vectors": { + "manual": [ + 1, + 2, + 3 + ] + }, + "_rankingScoreDetails": { + "skipped": 0.0 + } + }, + { + "id": 654, + "father": "pierre", + "mother": "sabine", + "doggos": [ + { + "name": "gros bill", + "age": 8 + } + ], + "cattos": [ + "simba", + "pestiféré" + ], + "_vectors": { + "manual": [ + 1, + 2, + 54 + ] + }, + "_rankingScoreDetails": { + "skipped": 0.0 + } + }, + { + "id": 951, + "father": "jean-baptiste", + "mother": "sophie", + "doggos": [ + { + "name": "turbo", + "age": 5 + }, + { + "name": "fast", + "age": 6 + } + ], + "cattos": [ + "moumoute", + "gomez" + ], + "_vectors": { + "manual": [ + 10, + 23, + 32 + ] + }, + "_rankingScoreDetails": { + "skipped": 0.0 + } + } + ] + "###); + }, + ) + .await; +} + #[actix_rt::test] async fn experimental_feature_vector_store() { let server = Server::new().await; diff --git a/milli/src/lib.rs b/milli/src/lib.rs index 896aadb50..df44ca127 100644 --- a/milli/src/lib.rs +++ b/milli/src/lib.rs @@ -105,10 +105,15 @@ pub const MAX_WORD_LENGTH: usize = MAX_LMDB_KEY_LENGTH / 2; pub const MAX_POSITION_PER_ATTRIBUTE: u32 = u16::MAX as u32 + 1; -#[derive(Clone, Copy)] +#[derive(Clone)] pub struct TimeBudget { started_at: std::time::Instant, budget: std::time::Duration, + + /// When testing the time budget, ensuring we did more than iteration of the bucket sort can be useful. + /// But to avoid being flaky, the only option is to add the ability to stop after a specific number of calls instead of a `Duration`. + #[cfg(test)] + stop_after: Option<(std::sync::Arc, usize)>, } impl fmt::Debug for TimeBudget { @@ -129,18 +134,40 @@ impl Default for TimeBudget { impl TimeBudget { pub fn new(budget: std::time::Duration) -> Self { - Self { started_at: std::time::Instant::now(), budget } + Self { + started_at: std::time::Instant::now(), + budget, + + #[cfg(test)] + stop_after: None, + } } pub fn max() -> Self { Self::new(std::time::Duration::from_secs(u64::MAX)) } - pub fn exceeded(&self) -> bool { - self.must_stop() + #[cfg(test)] + pub fn with_stop_after(mut self, stop_after: usize) -> Self { + use std::sync::atomic::AtomicUsize; + use std::sync::Arc; + + self.stop_after = Some((Arc::new(AtomicUsize::new(0)), stop_after)); + self } - pub fn must_stop(&self) -> bool { + pub fn exceeded(&self) -> bool { + #[cfg(test)] + if let Some((current, stop_after)) = &self.stop_after { + let current = current.fetch_add(1, std::sync::atomic::Ordering::Relaxed); + if current >= *stop_after { + return true; + } else { + // if a number has been specified then we ignore entirely the time budget + return false; + } + } + self.started_at.elapsed() > self.budget } } diff --git a/milli/src/score_details.rs b/milli/src/score_details.rs index f6b9db58c..f2c6fb58a 100644 --- a/milli/src/score_details.rs +++ b/milli/src/score_details.rs @@ -17,6 +17,9 @@ pub enum ScoreDetails { Sort(Sort), Vector(Vector), GeoSort(GeoSort), + + /// Returned when we don't have the time to finish applying all the subsequent ranking-rules + Skipped, } #[derive(Clone, Copy)] @@ -50,6 +53,7 @@ impl ScoreDetails { ScoreDetails::Sort(_) => None, ScoreDetails::GeoSort(_) => None, ScoreDetails::Vector(_) => None, + ScoreDetails::Skipped => Some(Rank { rank: 0, max_rank: 1 }), } } @@ -97,6 +101,7 @@ impl ScoreDetails { ScoreDetails::Vector(vector) => RankOrValue::Score( vector.value_similarity.as_ref().map(|(_, s)| *s as f64).unwrap_or(0.0f64), ), + ScoreDetails::Skipped => RankOrValue::Score(0.), } } @@ -256,6 +261,13 @@ impl ScoreDetails { details_map.insert(vector, details); order += 1; } + ScoreDetails::Skipped => { + details_map.insert( + "skipped".to_string(), + serde_json::Number::from_f64(0.).unwrap().into(), + ); + order += 1; + } } } details_map diff --git a/milli/src/search/hybrid.rs b/milli/src/search/hybrid.rs index 9d8b3860d..1e14f4430 100644 --- a/milli/src/search/hybrid.rs +++ b/milli/src/search/hybrid.rs @@ -132,7 +132,7 @@ impl<'a> Search<'a> { index: self.index, distribution_shift: self.distribution_shift, embedder_name: self.embedder_name.clone(), - time_budget: self.time_budget, + time_budget: self.time_budget.clone(), }; let vector_query = search.vector.take(); diff --git a/milli/src/search/mod.rs b/milli/src/search/mod.rs index b14d88d03..f6ab8a7de 100644 --- a/milli/src/search/mod.rs +++ b/milli/src/search/mod.rs @@ -195,7 +195,7 @@ impl<'a> Search<'a> { self.limit, self.distribution_shift, embedder_name, - self.time_budget, + self.time_budget.clone(), )?, None => execute_search( &mut ctx, @@ -211,7 +211,7 @@ impl<'a> Search<'a> { Some(self.words_limit), &mut DefaultSearchLogger, &mut DefaultSearchLogger, - self.time_budget, + self.time_budget.clone(), )?, }; diff --git a/milli/src/search/new/bucket_sort.rs b/milli/src/search/new/bucket_sort.rs index 7fc830c1f..521fcb983 100644 --- a/milli/src/search/new/bucket_sort.rs +++ b/milli/src/search/new/bucket_sort.rs @@ -161,11 +161,21 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>( while valid_docids.len() < length { if time_budget.exceeded() { - let bucket = std::mem::take(&mut ranking_rule_universes[cur_ranking_rule_index]); - maybe_add_to_results!(bucket); + loop { + let bucket = std::mem::take(&mut ranking_rule_universes[cur_ranking_rule_index]); + ranking_rule_scores.push(ScoreDetails::Skipped); + maybe_add_to_results!(bucket); + ranking_rule_scores.pop(); + + if cur_ranking_rule_index == 0 { + break; + } + + back!(); + } return Ok(BucketSortOutput { - scores: vec![Default::default(); valid_docids.len()], + scores: valid_scores, docids: valid_docids, all_candidates, degraded: true, diff --git a/milli/src/search/new/tests/cutoff.rs b/milli/src/search/new/tests/cutoff.rs new file mode 100644 index 000000000..4256abc2b --- /dev/null +++ b/milli/src/search/new/tests/cutoff.rs @@ -0,0 +1,419 @@ +//! This module test the search cutoff and ensure a few things: +//! 1. A basic test works and mark the search as degraded +//! 2. A test that ensure the filters are affectively applied even with a cutoff of 0 +//! 3. A test that ensure the cutoff works well with the ranking scores + +use std::time::Duration; + +use big_s::S; +use maplit::hashset; +use meili_snap::snapshot; + +use crate::index::tests::TempIndex; +use crate::{Criterion, Filter, Search, TimeBudget}; + +fn create_index() -> TempIndex { + let index = TempIndex::new(); + + index + .update_settings(|s| { + s.set_primary_key("id".to_owned()); + s.set_searchable_fields(vec!["text".to_owned()]); + s.set_filterable_fields(hashset! { S("id") }); + s.set_criteria(vec![Criterion::Words, Criterion::Typo]); + }) + .unwrap(); + + // reverse the ID / insertion order so we see better what was sorted from what got the insertion order ordering + index + .add_documents(documents!([ + { + "id": 4, + "text": "hella puppo kefir", + }, + { + "id": 3, + "text": "hella puppy kefir", + }, + { + "id": 2, + "text": "hello", + }, + { + "id": 1, + "text": "hello puppy", + }, + { + "id": 0, + "text": "hello puppy kefir", + }, + ])) + .unwrap(); + index +} + +#[test] +fn basic_degraded_search() { + let index = create_index(); + let rtxn = index.read_txn().unwrap(); + + let mut search = Search::new(&rtxn, &index); + search.query("hello puppy kefir"); + search.limit(3); + search.time_budget(TimeBudget::new(Duration::from_millis(0))); + + let result = search.execute().unwrap(); + assert!(result.degraded); +} + +#[test] +fn degraded_search_cannot_skip_filter() { + let index = create_index(); + let rtxn = index.read_txn().unwrap(); + + let mut search = Search::new(&rtxn, &index); + search.query("hello puppy kefir"); + search.limit(100); + search.time_budget(TimeBudget::new(Duration::from_millis(0))); + let filter_condition = Filter::from_str("id > 2").unwrap().unwrap(); + search.filter(filter_condition); + + let result = search.execute().unwrap(); + assert!(result.degraded); + snapshot!(format!("{:?}\n{:?}", result.candidates, result.documents_ids), @r###" + RoaringBitmap<[0, 1]> + [0, 1] + "###); +} + +#[test] +fn degraded_search_and_score_details() { + let index = create_index(); + let rtxn = index.read_txn().unwrap(); + + let mut search = Search::new(&rtxn, &index); + search.query("hello puppy kefir"); + search.limit(4); + search.time_budget(TimeBudget::max()); + + let result = search.execute().unwrap(); + snapshot!(format!("{:#?}\n{:#?}", result.documents_ids, result.document_scores), @r###" + [ + 4, + 1, + 0, + 3, + ] + [ + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + Typo( + Typo { + typo_count: 0, + max_typo_count: 3, + }, + ), + ], + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + Typo( + Typo { + typo_count: 1, + max_typo_count: 3, + }, + ), + ], + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + ], + [ + Words( + Words { + matching_words: 2, + max_matching_words: 3, + }, + ), + ], + ] + "###); + + // Do ONE loop iteration. Not much can be deduced, almost everyone matched the words first bucket. + search.time_budget(TimeBudget::max().with_stop_after(1)); + + let result = search.execute().unwrap(); + snapshot!(format!("{:#?}\n{:#?}", result.documents_ids, result.document_scores), @r###" + [ + 0, + 1, + 4, + 2, + ] + [ + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + Skipped, + ], + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + Skipped, + ], + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + Skipped, + ], + [ + Skipped, + ], + ] + "###); + + // Do TWO loop iterations. The first document should be entirely sorted + search.time_budget(TimeBudget::max().with_stop_after(2)); + + let result = search.execute().unwrap(); + snapshot!(format!("{:#?}\n{:#?}", result.documents_ids, result.document_scores), @r###" + [ + 4, + 0, + 1, + 2, + ] + [ + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + Typo( + Typo { + typo_count: 0, + max_typo_count: 3, + }, + ), + ], + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + Skipped, + ], + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + Skipped, + ], + [ + Skipped, + ], + ] + "###); + + // Do THREE loop iterations. The second document should be entirely sorted as well + search.time_budget(TimeBudget::max().with_stop_after(3)); + + let result = search.execute().unwrap(); + snapshot!(format!("{:#?}\n{:#?}", result.documents_ids, result.document_scores), @r###" + [ + 4, + 1, + 0, + 2, + ] + [ + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + Typo( + Typo { + typo_count: 0, + max_typo_count: 3, + }, + ), + ], + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + Typo( + Typo { + typo_count: 1, + max_typo_count: 3, + }, + ), + ], + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + Skipped, + ], + [ + Skipped, + ], + ] + "###); + + // Do FOUR loop iterations. The third document should be entirely sorted as well + // The words bucket have still not progressed thus the last document doesn't have any info yet. + search.time_budget(TimeBudget::max().with_stop_after(4)); + + let result = search.execute().unwrap(); + snapshot!(format!("{:#?}\n{:#?}", result.documents_ids, result.document_scores), @r###" + [ + 4, + 1, + 0, + 2, + ] + [ + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + Typo( + Typo { + typo_count: 0, + max_typo_count: 3, + }, + ), + ], + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + Typo( + Typo { + typo_count: 1, + max_typo_count: 3, + }, + ), + ], + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + ], + [ + Skipped, + ], + ] + "###); + + // After FIVE loop iteration. The words ranking rule gave us a new bucket. + // Since we reached the limit we were able to early exit without checking the typo ranking rule. + search.time_budget(TimeBudget::max().with_stop_after(5)); + + let result = search.execute().unwrap(); + snapshot!(format!("{:#?}\n{:#?}", result.documents_ids, result.document_scores), @r###" + [ + 4, + 1, + 0, + 3, + ] + [ + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + Typo( + Typo { + typo_count: 0, + max_typo_count: 3, + }, + ), + ], + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + Typo( + Typo { + typo_count: 1, + max_typo_count: 3, + }, + ), + ], + [ + Words( + Words { + matching_words: 3, + max_matching_words: 3, + }, + ), + ], + [ + Words( + Words { + matching_words: 2, + max_matching_words: 3, + }, + ), + ], + ] + "###); +} diff --git a/milli/src/search/new/tests/mod.rs b/milli/src/search/new/tests/mod.rs index e500d16fb..26199b79b 100644 --- a/milli/src/search/new/tests/mod.rs +++ b/milli/src/search/new/tests/mod.rs @@ -1,5 +1,6 @@ pub mod attribute_fid; pub mod attribute_position; +pub mod cutoff; pub mod distinct; pub mod exactness; pub mod geo_sort; diff --git a/milli/tests/search/mod.rs b/milli/tests/search/mod.rs index ab6befa60..9193ab762 100644 --- a/milli/tests/search/mod.rs +++ b/milli/tests/search/mod.rs @@ -1,19 +1,14 @@ use std::cmp::Reverse; use std::collections::HashSet; use std::io::Cursor; -use std::time::Duration; use big_s::S; use either::{Either, Left, Right}; use heed::EnvOpenOptions; use maplit::{btreemap, hashset}; -use meili_snap::snapshot; use milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader}; use milli::update::{IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings}; -use milli::{ - AscDesc, Criterion, DocumentId, Filter, Index, Member, Object, Search, TermsMatchingStrategy, - TimeBudget, -}; +use milli::{AscDesc, Criterion, DocumentId, Index, Member, Object, TermsMatchingStrategy}; use serde::{Deserialize, Deserializer}; use slice_group_by::GroupBy; @@ -354,41 +349,3 @@ where let result = serde_json::Value::deserialize(deserializer)?; Ok(Some(result)) } - -#[test] -fn basic_degraded_search() { - use Criterion::*; - let criteria = vec![Words, Typo, Proximity, Attribute, Exactness]; - let index = setup_search_index_with_criteria(&criteria); - let rtxn = index.read_txn().unwrap(); - - let mut search = Search::new(&rtxn, &index); - search.query(TEST_QUERY); - search.limit(EXTERNAL_DOCUMENTS_IDS.len()); - search.time_budget(TimeBudget::new(Duration::from_millis(0))); - - let result = search.execute().unwrap(); - assert!(result.degraded); -} - -#[test] -fn degraded_search_cannot_skip_filter() { - use Criterion::*; - let criteria = vec![Words, Typo, Proximity, Attribute, Exactness]; - let index = setup_search_index_with_criteria(&criteria); - let rtxn = index.read_txn().unwrap(); - - let mut search = Search::new(&rtxn, &index); - search.query(TEST_QUERY); - search.limit(EXTERNAL_DOCUMENTS_IDS.len()); - search.time_budget(TimeBudget::new(Duration::from_millis(0))); - let filter_condition = Filter::from_str("tag = etiopia").unwrap().unwrap(); - search.filter(filter_condition); - - let result = search.execute().unwrap(); - assert!(result.degraded); - snapshot!(format!("{:?}\n{:?}", result.candidates, result.documents_ids), @r###" - RoaringBitmap<[0, 2, 5, 8, 11, 14]> - [0, 2, 5, 8, 11, 14] - "###); -} From ad9192fbbf38a29413b20cdf7678a522673b8ad7 Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 14 Mar 2024 17:42:33 +0100 Subject: [PATCH 06/19] reduce the size of an integration test --- meilisearch/tests/search/mod.rs | 46 +++++---------------------------- 1 file changed, 6 insertions(+), 40 deletions(-) diff --git a/meilisearch/tests/search/mod.rs b/meilisearch/tests/search/mod.rs index 62dd73c63..8c947a329 100644 --- a/meilisearch/tests/search/mod.rs +++ b/meilisearch/tests/search/mod.rs @@ -850,6 +850,7 @@ async fn test_degraded_score_details() { .search( json!({ "q": "b", + "attributesToRetrieve": ["doggos.name", "cattos"], "showRankingScoreDetails": true, }), |response, code| { @@ -857,81 +858,46 @@ async fn test_degraded_score_details() { meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" [ { - "id": 852, - "father": "jean", - "mother": "michelle", "doggos": [ { - "name": "bobby", - "age": 2 + "name": "bobby" }, { - "name": "buddy", - "age": 4 + "name": "buddy" } ], "cattos": "pésti", - "_vectors": { - "manual": [ - 1, - 2, - 3 - ] - }, "_rankingScoreDetails": { "skipped": 0.0 } }, { - "id": 654, - "father": "pierre", - "mother": "sabine", "doggos": [ { - "name": "gros bill", - "age": 8 + "name": "gros bill" } ], "cattos": [ "simba", "pestiféré" ], - "_vectors": { - "manual": [ - 1, - 2, - 54 - ] - }, "_rankingScoreDetails": { "skipped": 0.0 } }, { - "id": 951, - "father": "jean-baptiste", - "mother": "sophie", "doggos": [ { - "name": "turbo", - "age": 5 + "name": "turbo" }, { - "name": "fast", - "age": 6 + "name": "fast" } ], "cattos": [ "moumoute", "gomez" ], - "_vectors": { - "manual": [ - 10, - 23, - 32 - ] - }, "_rankingScoreDetails": { "skipped": 0.0 } From 038c26c118ef041e5843f29b5f4862c879e39979 Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 14 Mar 2024 17:52:08 +0100 Subject: [PATCH 07/19] stop returning the degraded boolean when a search was cutoff --- meilisearch/src/search.rs | 3 +- meilisearch/tests/search/mod.rs | 95 ++++++++++++++++++--------------- 2 files changed, 53 insertions(+), 45 deletions(-) diff --git a/meilisearch/src/search.rs b/meilisearch/src/search.rs index f83e14187..0333eb0d5 100644 --- a/meilisearch/src/search.rs +++ b/meilisearch/src/search.rs @@ -324,7 +324,8 @@ pub struct SearchResult { #[serde(skip_serializing_if = "Option::is_none")] pub facet_stats: Option>, - #[serde(skip_serializing_if = "std::ops::Not::not")] + // This information is only used for analytics purposes + #[serde(skip)] pub degraded: bool, } diff --git a/meilisearch/tests/search/mod.rs b/meilisearch/tests/search/mod.rs index 8c947a329..3e5c4278a 100644 --- a/meilisearch/tests/search/mod.rs +++ b/meilisearch/tests/search/mod.rs @@ -855,54 +855,61 @@ async fn test_degraded_score_details() { }), |response, code| { meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response["hits"]), @r###" - [ - { - "doggos": [ - { - "name": "bobby" - }, - { - "name": "buddy" + meili_snap::snapshot!(meili_snap::json_string!(response), @r###" + { + "hits": [ + { + "doggos": [ + { + "name": "bobby" + }, + { + "name": "buddy" + } + ], + "cattos": "pésti", + "_rankingScoreDetails": { + "skipped": 0.0 } - ], - "cattos": "pésti", - "_rankingScoreDetails": { - "skipped": 0.0 - } - }, - { - "doggos": [ - { - "name": "gros bill" + }, + { + "doggos": [ + { + "name": "gros bill" + } + ], + "cattos": [ + "simba", + "pestiféré" + ], + "_rankingScoreDetails": { + "skipped": 0.0 } - ], - "cattos": [ - "simba", - "pestiféré" - ], - "_rankingScoreDetails": { - "skipped": 0.0 - } - }, - { - "doggos": [ - { - "name": "turbo" - }, - { - "name": "fast" + }, + { + "doggos": [ + { + "name": "turbo" + }, + { + "name": "fast" + } + ], + "cattos": [ + "moumoute", + "gomez" + ], + "_rankingScoreDetails": { + "skipped": 0.0 } - ], - "cattos": [ - "moumoute", - "gomez" - ], - "_rankingScoreDetails": { - "skipped": 0.0 } - } - ] + ], + "query": "b", + "processingTimeMs": 0, + "limit": 20, + "offset": 0, + "estimatedTotalHits": 3 + } "###); }, ) From 6a0c399c2f827a46600deda0b0d3695d1ed6af19 Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 18 Mar 2024 12:06:00 +0100 Subject: [PATCH 08/19] rename the search_cutoff parameter to search_cutoff_ms --- dump/src/lib.rs | 2 +- dump/src/reader/compat/v5_to_v6.rs | 2 +- meilisearch-types/src/settings.rs | 22 +++++++++--------- meilisearch/src/routes/indexes/settings.rs | 12 +++++----- meilisearch/tests/common/mod.rs | 1 + meilisearch/tests/dumps/mod.rs | 26 +++++++++++----------- meilisearch/tests/search/mod.rs | 6 ++--- meilisearch/tests/settings/get_settings.rs | 6 ++--- 8 files changed, 39 insertions(+), 38 deletions(-) diff --git a/dump/src/lib.rs b/dump/src/lib.rs index e7cadacbe..a7af2d5d0 100644 --- a/dump/src/lib.rs +++ b/dump/src/lib.rs @@ -277,7 +277,7 @@ pub(crate) mod test { }), pagination: Setting::NotSet, embedders: Setting::NotSet, - search_cutoff: Setting::NotSet, + search_cutoff_ms: Setting::NotSet, _kind: std::marker::PhantomData, }; settings.check() diff --git a/dump/src/reader/compat/v5_to_v6.rs b/dump/src/reader/compat/v5_to_v6.rs index 2b8997847..a883f0ba0 100644 --- a/dump/src/reader/compat/v5_to_v6.rs +++ b/dump/src/reader/compat/v5_to_v6.rs @@ -379,7 +379,7 @@ impl From> for v6::Settings { v5::Setting::NotSet => v6::Setting::NotSet, }, embedders: v6::Setting::NotSet, - search_cutoff: v6::Setting::NotSet, + search_cutoff_ms: v6::Setting::NotSet, _kind: std::marker::PhantomData, } } diff --git a/meilisearch-types/src/settings.rs b/meilisearch-types/src/settings.rs index d05201943..23fe98347 100644 --- a/meilisearch-types/src/settings.rs +++ b/meilisearch-types/src/settings.rs @@ -204,7 +204,7 @@ pub struct Settings { pub embedders: Setting>>, #[serde(default, skip_serializing_if = "Setting::is_not_set")] #[deserr(default, error = DeserrJsonError)] - pub search_cutoff: Setting, + pub search_cutoff_ms: Setting, #[serde(skip)] #[deserr(skip)] @@ -230,7 +230,7 @@ impl Settings { faceting: Setting::Reset, pagination: Setting::Reset, embedders: Setting::Reset, - search_cutoff: Setting::Reset, + search_cutoff_ms: Setting::Reset, _kind: PhantomData, } } @@ -253,7 +253,7 @@ impl Settings { faceting, pagination, embedders, - search_cutoff, + search_cutoff_ms, .. } = self; @@ -274,7 +274,7 @@ impl Settings { faceting, pagination, embedders, - search_cutoff, + search_cutoff_ms, _kind: PhantomData, } } @@ -321,7 +321,7 @@ impl Settings { faceting: self.faceting, pagination: self.pagination, embedders: self.embedders, - search_cutoff: self.search_cutoff, + search_cutoff_ms: self.search_cutoff_ms, _kind: PhantomData, } } @@ -371,7 +371,7 @@ pub fn apply_settings_to_builder( faceting, pagination, embedders, - search_cutoff, + search_cutoff_ms, _kind, } = settings; @@ -548,7 +548,7 @@ pub fn apply_settings_to_builder( Setting::NotSet => (), } - match search_cutoff { + match search_cutoff_ms { Setting::Set(cutoff) => builder.set_search_cutoff(*cutoff), Setting::Reset => builder.reset_search_cutoff(), Setting::NotSet => (), @@ -641,7 +641,7 @@ pub fn settings( .collect(); let embedders = if embedders.is_empty() { Setting::NotSet } else { Setting::Set(embedders) }; - let search_cutoff = index.search_cutoff(rtxn)?; + let search_cutoff_ms = index.search_cutoff(rtxn)?; Ok(Settings { displayed_attributes: match displayed_attributes { @@ -669,7 +669,7 @@ pub fn settings( faceting: Setting::Set(faceting), pagination: Setting::Set(pagination), embedders, - search_cutoff: match search_cutoff { + search_cutoff_ms: match search_cutoff_ms { Some(cutoff) => Setting::Set(cutoff), None => Setting::Reset, }, @@ -823,7 +823,7 @@ pub(crate) mod test { faceting: Setting::NotSet, pagination: Setting::NotSet, embedders: Setting::NotSet, - search_cutoff: Setting::NotSet, + search_cutoff_ms: Setting::NotSet, _kind: PhantomData::, }; @@ -850,7 +850,7 @@ pub(crate) mod test { faceting: Setting::NotSet, pagination: Setting::NotSet, embedders: Setting::NotSet, - search_cutoff: Setting::NotSet, + search_cutoff_ms: Setting::NotSet, _kind: PhantomData::, }; diff --git a/meilisearch/src/routes/indexes/settings.rs b/meilisearch/src/routes/indexes/settings.rs index 41fc58a87..4c03eb1a1 100644 --- a/meilisearch/src/routes/indexes/settings.rs +++ b/meilisearch/src/routes/indexes/settings.rs @@ -626,19 +626,19 @@ fn embedder_analytics( } make_setting_route!( - "/search-cutoff", + "/search-cutoff-ms", put, u64, meilisearch_types::deserr::DeserrJsonError< meilisearch_types::error::deserr_codes::InvalidSettingsSearchCutoff, >, - search_cutoff, - "searchCutoff", + search_cutoff_ms, + "searchCutoffMs", analytics, |setting: &Option, req: &HttpRequest| { analytics.publish( "Search Cutoff Updated".to_string(), - serde_json::json!({"search_cutoff": setting }), + serde_json::json!({"search_cutoff_ms": setting }), Some(req), ); } @@ -675,7 +675,7 @@ generate_configure!( pagination, faceting, embedders, - search_cutoff + search_cutoff_ms ); pub async fn update_all( @@ -787,7 +787,7 @@ pub async fn update_all( "total": new_settings.synonyms.as_ref().set().map(|synonyms| synonyms.len()), }, "embedders": crate::routes::indexes::settings::embedder_analytics(new_settings.embedders.as_ref().set()), - "search_cutoff": new_settings.search_cutoff.as_ref().set(), + "search_cutoff_ms": new_settings.search_cutoff_ms.as_ref().set(), }), Some(&req), ); diff --git a/meilisearch/tests/common/mod.rs b/meilisearch/tests/common/mod.rs index 2b9e5e1d7..3117dd185 100644 --- a/meilisearch/tests/common/mod.rs +++ b/meilisearch/tests/common/mod.rs @@ -16,6 +16,7 @@ pub use server::{default_settings, Server}; pub struct Value(pub serde_json::Value); impl Value { + #[track_caller] pub fn uid(&self) -> u64 { if let Some(uid) = self["uid"].as_u64() { uid diff --git a/meilisearch/tests/dumps/mod.rs b/meilisearch/tests/dumps/mod.rs index 7bf97f8b2..1a31437f8 100644 --- a/meilisearch/tests/dumps/mod.rs +++ b/meilisearch/tests/dumps/mod.rs @@ -78,7 +78,7 @@ async fn import_dump_v1_movie_raw() { "pagination": { "maxTotalHits": 1000 }, - "searchCutoff": null + "searchCutoffMs": null } "### ); @@ -240,7 +240,7 @@ async fn import_dump_v1_movie_with_settings() { "pagination": { "maxTotalHits": 1000 }, - "searchCutoff": null + "searchCutoffMs": null } "### ); @@ -388,7 +388,7 @@ async fn import_dump_v1_rubygems_with_settings() { "pagination": { "maxTotalHits": 1000 }, - "searchCutoff": null + "searchCutoffMs": null } "### ); @@ -522,7 +522,7 @@ async fn import_dump_v2_movie_raw() { "pagination": { "maxTotalHits": 1000 }, - "searchCutoff": null + "searchCutoffMs": null } "### ); @@ -668,7 +668,7 @@ async fn import_dump_v2_movie_with_settings() { "pagination": { "maxTotalHits": 1000 }, - "searchCutoff": null + "searchCutoffMs": null } "### ); @@ -813,7 +813,7 @@ async fn import_dump_v2_rubygems_with_settings() { "pagination": { "maxTotalHits": 1000 }, - "searchCutoff": null + "searchCutoffMs": null } "### ); @@ -947,7 +947,7 @@ async fn import_dump_v3_movie_raw() { "pagination": { "maxTotalHits": 1000 }, - "searchCutoff": null + "searchCutoffMs": null } "### ); @@ -1093,7 +1093,7 @@ async fn import_dump_v3_movie_with_settings() { "pagination": { "maxTotalHits": 1000 }, - "searchCutoff": null + "searchCutoffMs": null } "### ); @@ -1238,7 +1238,7 @@ async fn import_dump_v3_rubygems_with_settings() { "pagination": { "maxTotalHits": 1000 }, - "searchCutoff": null + "searchCutoffMs": null } "### ); @@ -1372,7 +1372,7 @@ async fn import_dump_v4_movie_raw() { "pagination": { "maxTotalHits": 1000 }, - "searchCutoff": null + "searchCutoffMs": null } "### ); @@ -1518,7 +1518,7 @@ async fn import_dump_v4_movie_with_settings() { "pagination": { "maxTotalHits": 1000 }, - "searchCutoff": null + "searchCutoffMs": null } "### ); @@ -1663,7 +1663,7 @@ async fn import_dump_v4_rubygems_with_settings() { "pagination": { "maxTotalHits": 1000 }, - "searchCutoff": null + "searchCutoffMs": null } "### ); @@ -1908,7 +1908,7 @@ async fn import_dump_v6_containing_experimental_features() { "pagination": { "maxTotalHits": 1000 }, - "searchCutoff": null + "searchCutoffMs": null } "###); diff --git a/meilisearch/tests/search/mod.rs b/meilisearch/tests/search/mod.rs index 3e5c4278a..971539a31 100644 --- a/meilisearch/tests/search/mod.rs +++ b/meilisearch/tests/search/mod.rs @@ -843,7 +843,7 @@ async fn test_degraded_score_details() { index.add_documents(json!(documents), None).await; // We can't really use anything else than 0ms here; otherwise, the test will get flaky. - let (res, _code) = index.update_settings(json!({ "searchCutoff": 0 })).await; + let (res, _code) = index.update_settings(json!({ "searchCutoffMs": 0 })).await; index.wait_task(res.uid()).await; index @@ -855,7 +855,7 @@ async fn test_degraded_score_details() { }), |response, code| { meili_snap::snapshot!(code, @"200 OK"); - meili_snap::snapshot!(meili_snap::json_string!(response), @r###" + meili_snap::snapshot!(meili_snap::json_string!(response, { ".processingTimeMs" => "[duration]" }), @r###" { "hits": [ { @@ -905,7 +905,7 @@ async fn test_degraded_score_details() { } ], "query": "b", - "processingTimeMs": 0, + "processingTimeMs": "[duration]", "limit": 20, "offset": 0, "estimatedTotalHits": 3 diff --git a/meilisearch/tests/settings/get_settings.rs b/meilisearch/tests/settings/get_settings.rs index d573f38e0..09e38e55a 100644 --- a/meilisearch/tests/settings/get_settings.rs +++ b/meilisearch/tests/settings/get_settings.rs @@ -35,7 +35,7 @@ static DEFAULT_SETTINGS_VALUES: Lazy> = Lazy::new(| "maxTotalHits": json!(1000), }), ); - map.insert("search_cutoff", json!(null)); + map.insert("search_cutoff_ms", json!(null)); map }); @@ -85,7 +85,7 @@ async fn get_settings() { }) ); assert_eq!(settings["proximityPrecision"], json!("byWord")); - assert_eq!(settings["searchCutoff"], json!(null)); + assert_eq!(settings["searchCutoffMs"], json!(null)); } #[actix_rt::test] @@ -288,7 +288,7 @@ test_setting_routes!( synonyms put, pagination patch, faceting patch, - search_cutoff put + search_cutoff_ms put ); #[actix_rt::test] From 7bd881b9bcdeb2e84b9ec4870584d11efa580897 Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 18 Mar 2024 18:39:05 +0100 Subject: [PATCH 09/19] adds the degraded searches to the prometheus dashboard --- assets/grafana-dashboard.json | 64 ++++++++++++++++++++++++ meilisearch/src/metrics.rs | 5 ++ meilisearch/src/routes/indexes/search.rs | 4 ++ 3 files changed, 73 insertions(+) diff --git a/assets/grafana-dashboard.json b/assets/grafana-dashboard.json index 37f7b1ca2..74a456b97 100644 --- a/assets/grafana-dashboard.json +++ b/assets/grafana-dashboard.json @@ -238,6 +238,70 @@ "title": "Total Searches (1h)", "type": "gauge" }, + { + "datasource": { + "type": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "thresholds" + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 6, + "w": 4, + "x": 8, + "y": 1 + }, + "id": 26, + "options": { + "orientation": "auto", + "reduceOptions": { + "calcs": [ + "lastNotNull" + ], + "fields": "", + "values": false + }, + "showThresholdLabels": false, + "showThresholdMarkers": true, + "text": {} + }, + "pluginVersion": "9.5.2", + "targets": [ + { + "datasource": { + "type": "prometheus" + }, + "editorMode": "builder", + "exemplar": true, + "expr": "round(increase(meilisearch_degraded_search_requests{job=\"$job\"}[1h]))", + "interval": "", + "legendFormat": "", + "range": true, + "refId": "A" + } + ], + "title": "Total Degraded Searches (1h)", + "type": "gauge" + }, { "datasource": { "type": "prometheus" diff --git a/meilisearch/src/metrics.rs b/meilisearch/src/metrics.rs index bfe704979..652e6c227 100644 --- a/meilisearch/src/metrics.rs +++ b/meilisearch/src/metrics.rs @@ -22,6 +22,11 @@ lazy_static! { &["method", "path"] ) .expect("Can't create a metric"); + pub static ref MEILISEARCH_DEGRADED_SEARCH_REQUESTS: IntGauge = register_int_gauge!(opts!( + "meilisearch_degraded_search_requests", + "Meilisearch number of degraded search requests" + )) + .expect("Can't create a metric"); pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge = register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch DB Size In Bytes")) .expect("Can't create a metric"); diff --git a/meilisearch/src/routes/indexes/search.rs b/meilisearch/src/routes/indexes/search.rs index 3adfce970..6a430b6a3 100644 --- a/meilisearch/src/routes/indexes/search.rs +++ b/meilisearch/src/routes/indexes/search.rs @@ -17,6 +17,7 @@ use crate::analytics::{Analytics, SearchAggregator}; use crate::extractors::authentication::policies::*; use crate::extractors::authentication::GuardedData; use crate::extractors::sequential_extractor::SeqHandler; +use crate::metrics::MEILISEARCH_DEGRADED_SEARCH_REQUESTS; use crate::search::{ add_search_rules, perform_search, HybridQuery, MatchingStrategy, SearchQuery, SemanticRatio, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, @@ -247,6 +248,9 @@ pub async fn search_with_post( .await?; if let Ok(ref search_result) = search_result { aggregate.succeed(search_result); + if search_result.degraded { + MEILISEARCH_DEGRADED_SEARCH_REQUESTS.inc(); + } } analytics.post_search(aggregate); From 4369e9e97c47401066f4a2c076ebd717f0fccf5b Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 19 Mar 2024 11:14:28 +0100 Subject: [PATCH 10/19] add an error code test on the setting --- meilisearch-types/src/error.rs | 2 +- meilisearch-types/src/settings.rs | 2 +- meilisearch/src/routes/indexes/settings.rs | 3 +-- meilisearch/tests/common/index.rs | 5 ++++ meilisearch/tests/settings/errors.rs | 28 ++++++++++++++++++++++ 5 files changed, 36 insertions(+), 4 deletions(-) diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index bf9492ff6..aed77411a 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -259,7 +259,7 @@ InvalidSettingsProximityPrecision , InvalidRequest , BAD_REQUEST ; InvalidSettingsFaceting , InvalidRequest , BAD_REQUEST ; InvalidSettingsFilterableAttributes , InvalidRequest , BAD_REQUEST ; InvalidSettingsPagination , InvalidRequest , BAD_REQUEST ; -InvalidSettingsSearchCutoff , InvalidRequest , BAD_REQUEST ; +InvalidSettingsSearchCutoffMs , InvalidRequest , BAD_REQUEST ; InvalidSettingsEmbedders , InvalidRequest , BAD_REQUEST ; InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ; InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ; diff --git a/meilisearch-types/src/settings.rs b/meilisearch-types/src/settings.rs index 23fe98347..5480e72c6 100644 --- a/meilisearch-types/src/settings.rs +++ b/meilisearch-types/src/settings.rs @@ -203,7 +203,7 @@ pub struct Settings { #[deserr(default, error = DeserrJsonError)] pub embedders: Setting>>, #[serde(default, skip_serializing_if = "Setting::is_not_set")] - #[deserr(default, error = DeserrJsonError)] + #[deserr(default, error = DeserrJsonError)] pub search_cutoff_ms: Setting, #[serde(skip)] diff --git a/meilisearch/src/routes/indexes/settings.rs b/meilisearch/src/routes/indexes/settings.rs index 4c03eb1a1..5dabd7b0d 100644 --- a/meilisearch/src/routes/indexes/settings.rs +++ b/meilisearch/src/routes/indexes/settings.rs @@ -138,7 +138,6 @@ macro_rules! make_setting_route { debug!(returns = ?settings, "Update settings"); let mut json = serde_json::json!(&settings); - dbg!(&json); let val = json[$camelcase_attr].take(); Ok(HttpResponse::Ok().json(val)) @@ -630,7 +629,7 @@ make_setting_route!( put, u64, meilisearch_types::deserr::DeserrJsonError< - meilisearch_types::error::deserr_codes::InvalidSettingsSearchCutoff, + meilisearch_types::error::deserr_codes::InvalidSettingsSearchCutoffMs, >, search_cutoff_ms, "searchCutoffMs", diff --git a/meilisearch/tests/common/index.rs b/meilisearch/tests/common/index.rs index 16fc10e98..9ed6a6077 100644 --- a/meilisearch/tests/common/index.rs +++ b/meilisearch/tests/common/index.rs @@ -328,6 +328,11 @@ impl Index<'_> { self.service.patch_encoded(url, settings, self.encoder).await } + pub async fn update_settings_search_cutoff_ms(&self, settings: Value) -> (Value, StatusCode) { + let url = format!("/indexes/{}/settings/search-cutoff-ms", urlencode(self.uid.as_ref())); + self.service.put_encoded(url, settings, self.encoder).await + } + pub async fn delete_settings(&self) -> (Value, StatusCode) { let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref())); self.service.delete(url).await diff --git a/meilisearch/tests/settings/errors.rs b/meilisearch/tests/settings/errors.rs index 687cef1f8..2bd17d649 100644 --- a/meilisearch/tests/settings/errors.rs +++ b/meilisearch/tests/settings/errors.rs @@ -337,3 +337,31 @@ async fn settings_bad_pagination() { } "###); } + +#[actix_rt::test] +async fn settings_bad_search_cutoff_ms() { + let server = Server::new().await; + let index = server.index("test"); + + let (response, code) = index.update_settings(json!({ "searchCutoffMs": "doggo" })).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value type at `.searchCutoffMs`: expected a positive integer, but found a string: `\"doggo\"`", + "code": "invalid_settings_search_cutoff_ms", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_settings_search_cutoff_ms" + } + "###); + + let (response, code) = index.update_settings_search_cutoff_ms(json!("doggo")).await; + snapshot!(code, @"400 Bad Request"); + snapshot!(json_string!(response), @r###" + { + "message": "Invalid value type: expected a positive integer, but found a string: `\"doggo\"`", + "code": "invalid_settings_search_cutoff_ms", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_settings_search_cutoff_ms" + } + "###); +} From 2a92c041006630e0ef573b159acfdd7bd6cfceac Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 19 Mar 2024 11:31:32 +0100 Subject: [PATCH 11/19] Adding new assets --- BENCHMARKS.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/BENCHMARKS.md b/BENCHMARKS.md index dd69864cc..b3d311c45 100644 --- a/BENCHMARKS.md +++ b/BENCHMARKS.md @@ -317,6 +317,14 @@ They are JSON files with the following structure (comments are not actually supp } ``` +### Adding new assets + +Assets reside in our DigitalOcean S3 space. Assuming you have team access to the DigitalOcean S3 space: + +1. go to +2. upload your dataset: + 1. if your dataset is a single file, upload that single file using the "upload" button, + 2. otherwise, create a folder using the "create folder" button, then inside that folder upload your individual files. ## Upgrading `https://bench.meilisearch.dev` From bfec9468d47414e7f260261504ed46a83c291e65 Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 19 Mar 2024 14:49:15 +0100 Subject: [PATCH 12/19] Update milli/src/search/mod.rs Co-authored-by: Louis Dureuil --- milli/src/search/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/milli/src/search/mod.rs b/milli/src/search/mod.rs index f6ab8a7de..b3dd0c091 100644 --- a/milli/src/search/mod.rs +++ b/milli/src/search/mod.rs @@ -260,7 +260,7 @@ impl fmt::Debug for Search<'_> { .field("words_limit", words_limit) .field("distribution_shift", distribution_shift) .field("embedder_name", embedder_name) - .field("time_bduget", time_budget) + .field("time_budget", time_budget) .finish() } } From 0ae39644f7d97e83c8edfb19f344cbb2eb24fc40 Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 19 Mar 2024 15:07:06 +0100 Subject: [PATCH 13/19] fix the facet search --- meilisearch/src/search.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/meilisearch/src/search.rs b/meilisearch/src/search.rs index 0333eb0d5..3c00ca802 100644 --- a/meilisearch/src/search.rs +++ b/meilisearch/src/search.rs @@ -729,8 +729,11 @@ pub fn perform_facet_search( features: RoFeatures, ) -> Result { let before_search = Instant::now(); - let time_budget = TimeBudget::new(Duration::from_millis(150)); let rtxn = index.read_txn()?; + let time_budget = match index.search_cutoff(&rtxn)? { + Some(cutoff) => TimeBudget::new(Duration::from_millis(cutoff)), + None => TimeBudget::default(), + }; let (search, _, _, _) = prepare_search(index, &rtxn, &search_query, features, None, time_budget)?; From 7b9e0d29442df352a99aee7eab839464cd5764c1 Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 19 Mar 2024 15:11:21 +0100 Subject: [PATCH 14/19] forward the degraded parameter to the hybrid search --- milli/src/search/hybrid.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/milli/src/search/hybrid.rs b/milli/src/search/hybrid.rs index 1e14f4430..47ac5f46b 100644 --- a/milli/src/search/hybrid.rs +++ b/milli/src/search/hybrid.rs @@ -10,6 +10,7 @@ struct ScoreWithRatioResult { matching_words: MatchingWords, candidates: RoaringBitmap, document_scores: Vec<(u32, ScoreWithRatio)>, + degraded: bool, } type ScoreWithRatio = (Vec, f32); @@ -72,6 +73,7 @@ impl ScoreWithRatioResult { matching_words: results.matching_words, candidates: results.candidates, document_scores, + degraded: results.degraded, } } @@ -106,7 +108,7 @@ impl ScoreWithRatioResult { candidates: left.candidates | right.candidates, documents_ids, document_scores, - degraded: false, + degraded: left.degraded | right.degraded, } } } From d8fe4fe49d12b36bd9b82963c7e2fcc278d2f894 Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 19 Mar 2024 15:45:04 +0100 Subject: [PATCH 15/19] return the order in the score details --- meilisearch/tests/search/mod.rs | 12 +++++++++--- milli/src/score_details.rs | 8 +++----- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/meilisearch/tests/search/mod.rs b/meilisearch/tests/search/mod.rs index 971539a31..88470187a 100644 --- a/meilisearch/tests/search/mod.rs +++ b/meilisearch/tests/search/mod.rs @@ -869,7 +869,9 @@ async fn test_degraded_score_details() { ], "cattos": "pésti", "_rankingScoreDetails": { - "skipped": 0.0 + "skipped": { + "order": 0 + } } }, { @@ -883,7 +885,9 @@ async fn test_degraded_score_details() { "pestiféré" ], "_rankingScoreDetails": { - "skipped": 0.0 + "skipped": { + "order": 0 + } } }, { @@ -900,7 +904,9 @@ async fn test_degraded_score_details() { "gomez" ], "_rankingScoreDetails": { - "skipped": 0.0 + "skipped": { + "order": 0 + } } } ], diff --git a/milli/src/score_details.rs b/milli/src/score_details.rs index f2c6fb58a..08dfcdbb6 100644 --- a/milli/src/score_details.rs +++ b/milli/src/score_details.rs @@ -101,7 +101,7 @@ impl ScoreDetails { ScoreDetails::Vector(vector) => RankOrValue::Score( vector.value_similarity.as_ref().map(|(_, s)| *s as f64).unwrap_or(0.0f64), ), - ScoreDetails::Skipped => RankOrValue::Score(0.), + ScoreDetails::Skipped => RankOrValue::Rank(Rank { rank: 0, max_rank: 1 }), } } @@ -262,10 +262,8 @@ impl ScoreDetails { order += 1; } ScoreDetails::Skipped => { - details_map.insert( - "skipped".to_string(), - serde_json::Number::from_f64(0.).unwrap().into(), - ); + details_map + .insert("skipped".to_string(), serde_json::json!({ "order": order })); order += 1; } } From 098ab594eb156f5ba34ee4db81893f4e2c146b1f Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 19 Mar 2024 17:32:32 +0100 Subject: [PATCH 16/19] A score of 0.0 is now lesser than a sort result MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit handles the niche case 🐩 in the hybrid search where: 1. a sort ranking rule is the first rule. 2. the keyword search is skipped at the first rule. 3. the semantic search is not skipped at the first rule. Previously, we would have the skipped search winning, whereas we want the non skipped one winning. --- milli/src/search/hybrid.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/milli/src/search/hybrid.rs b/milli/src/search/hybrid.rs index 47ac5f46b..a8b7f0fcf 100644 --- a/milli/src/search/hybrid.rs +++ b/milli/src/search/hybrid.rs @@ -50,8 +50,12 @@ fn compare_scores( order => return order, } } - (Some(ScoreValue::Score(_)), Some(_)) => return Ordering::Greater, - (Some(_), Some(ScoreValue::Score(_))) => return Ordering::Less, + (Some(ScoreValue::Score(x)), Some(_)) => { + return if x == 0. { Ordering::Less } else { Ordering::Greater } + } + (Some(_), Some(ScoreValue::Score(x))) => { + return if x == 0. { Ordering::Greater } else { Ordering::Less } + } // if we have this, we're bad (Some(ScoreValue::GeoSort(_)), Some(ScoreValue::Sort(_))) | (Some(ScoreValue::Sort(_)), Some(ScoreValue::GeoSort(_))) => { From 2c3af8e51379b698276a23864c229cafc3984d77 Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 19 Mar 2024 18:07:11 +0100 Subject: [PATCH 17/19] query the detailed score detail in the test --- milli/src/search/new/tests/cutoff.rs | 31 ++++++++++++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/milli/src/search/new/tests/cutoff.rs b/milli/src/search/new/tests/cutoff.rs index 4256abc2b..664b139f3 100644 --- a/milli/src/search/new/tests/cutoff.rs +++ b/milli/src/search/new/tests/cutoff.rs @@ -10,6 +10,7 @@ use maplit::hashset; use meili_snap::snapshot; use crate::index::tests::TempIndex; +use crate::score_details::ScoringStrategy; use crate::{Criterion, Filter, Search, TimeBudget}; fn create_index() -> TempIndex { @@ -94,6 +95,7 @@ fn degraded_search_and_score_details() { let mut search = Search::new(&rtxn, &index); search.query("hello puppy kefir"); search.limit(4); + search.scoring_strategy(ScoringStrategy::Detailed); search.time_budget(TimeBudget::max()); let result = search.execute().unwrap(); @@ -140,6 +142,12 @@ fn degraded_search_and_score_details() { max_matching_words: 3, }, ), + Typo( + Typo { + typo_count: 2, + max_typo_count: 3, + }, + ), ], [ Words( @@ -148,6 +156,12 @@ fn degraded_search_and_score_details() { max_matching_words: 3, }, ), + Typo( + Typo { + typo_count: 0, + max_typo_count: 2, + }, + ), ], ] "###); @@ -350,6 +364,12 @@ fn degraded_search_and_score_details() { max_matching_words: 3, }, ), + Typo( + Typo { + typo_count: 2, + max_typo_count: 3, + }, + ), ], [ Skipped, @@ -357,9 +377,9 @@ fn degraded_search_and_score_details() { ] "###); - // After FIVE loop iteration. The words ranking rule gave us a new bucket. + // After SIX loop iteration. The words ranking rule gave us a new bucket. // Since we reached the limit we were able to early exit without checking the typo ranking rule. - search.time_budget(TimeBudget::max().with_stop_after(5)); + search.time_budget(TimeBudget::max().with_stop_after(6)); let result = search.execute().unwrap(); snapshot!(format!("{:#?}\n{:#?}", result.documents_ids, result.document_scores), @r###" @@ -405,6 +425,12 @@ fn degraded_search_and_score_details() { max_matching_words: 3, }, ), + Typo( + Typo { + typo_count: 2, + max_typo_count: 3, + }, + ), ], [ Words( @@ -413,6 +439,7 @@ fn degraded_search_and_score_details() { max_matching_words: 3, }, ), + Skipped, ], ] "###); From 6079141ea6d77ac08a4b2c44ecc5f7fb07feb57f Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 19 Mar 2024 18:30:14 +0100 Subject: [PATCH 18/19] snapshot the scores side by side with the score details --- milli/src/search/new/tests/cutoff.rs | 69 +++++++++++----------------- 1 file changed, 26 insertions(+), 43 deletions(-) diff --git a/milli/src/search/new/tests/cutoff.rs b/milli/src/search/new/tests/cutoff.rs index 664b139f3..63b67f2e7 100644 --- a/milli/src/search/new/tests/cutoff.rs +++ b/milli/src/search/new/tests/cutoff.rs @@ -10,7 +10,7 @@ use maplit::hashset; use meili_snap::snapshot; use crate::index::tests::TempIndex; -use crate::score_details::ScoringStrategy; +use crate::score_details::{ScoreDetails, ScoringStrategy}; use crate::{Criterion, Filter, Search, TimeBudget}; fn create_index() -> TempIndex { @@ -88,6 +88,7 @@ fn degraded_search_cannot_skip_filter() { } #[test] +#[allow(clippy::format_collect)] // the test is already quite big fn degraded_search_and_score_details() { let index = create_index(); let rtxn = index.read_txn().unwrap(); @@ -99,13 +100,10 @@ fn degraded_search_and_score_details() { search.time_budget(TimeBudget::max()); let result = search.execute().unwrap(); - snapshot!(format!("{:#?}\n{:#?}", result.documents_ids, result.document_scores), @r###" - [ - 4, - 1, - 0, - 3, - ] + snapshot!(format!("IDs: {:?}\nScores: {}\nScore Details:\n{:#?}", result.documents_ids, result.document_scores.iter().map(|scores| format!("{:.4} ", ScoreDetails::global_score(scores.iter()))).collect::(), result.document_scores), @r###" + IDs: [4, 1, 0, 3] + Scores: 1.0000 0.9167 0.8333 0.6667 + Score Details: [ [ Words( @@ -170,13 +168,10 @@ fn degraded_search_and_score_details() { search.time_budget(TimeBudget::max().with_stop_after(1)); let result = search.execute().unwrap(); - snapshot!(format!("{:#?}\n{:#?}", result.documents_ids, result.document_scores), @r###" - [ - 0, - 1, - 4, - 2, - ] + snapshot!(format!("IDs: {:?}\nScores: {}\nScore Details:\n{:#?}", result.documents_ids, result.document_scores.iter().map(|scores| format!("{:.4} ", ScoreDetails::global_score(scores.iter()))).collect::(), result.document_scores), @r###" + IDs: [0, 1, 4, 2] + Scores: 0.6667 0.6667 0.6667 0.0000 + Score Details: [ [ Words( @@ -215,13 +210,10 @@ fn degraded_search_and_score_details() { search.time_budget(TimeBudget::max().with_stop_after(2)); let result = search.execute().unwrap(); - snapshot!(format!("{:#?}\n{:#?}", result.documents_ids, result.document_scores), @r###" - [ - 4, - 0, - 1, - 2, - ] + snapshot!(format!("IDs: {:?}\nScores: {}\nScore Details:\n{:#?}", result.documents_ids, result.document_scores.iter().map(|scores| format!("{:.4} ", ScoreDetails::global_score(scores.iter()))).collect::(), result.document_scores), @r###" + IDs: [4, 0, 1, 2] + Scores: 1.0000 0.6667 0.6667 0.0000 + Score Details: [ [ Words( @@ -265,13 +257,10 @@ fn degraded_search_and_score_details() { search.time_budget(TimeBudget::max().with_stop_after(3)); let result = search.execute().unwrap(); - snapshot!(format!("{:#?}\n{:#?}", result.documents_ids, result.document_scores), @r###" - [ - 4, - 1, - 0, - 2, - ] + snapshot!(format!("IDs: {:?}\nScores: {}\nScore Details:\n{:#?}", result.documents_ids, result.document_scores.iter().map(|scores| format!("{:.4} ", ScoreDetails::global_score(scores.iter()))).collect::(), result.document_scores), @r###" + IDs: [4, 1, 0, 2] + Scores: 1.0000 0.9167 0.6667 0.0000 + Score Details: [ [ Words( @@ -321,13 +310,10 @@ fn degraded_search_and_score_details() { search.time_budget(TimeBudget::max().with_stop_after(4)); let result = search.execute().unwrap(); - snapshot!(format!("{:#?}\n{:#?}", result.documents_ids, result.document_scores), @r###" - [ - 4, - 1, - 0, - 2, - ] + snapshot!(format!("IDs: {:?}\nScores: {}\nScore Details:\n{:#?}", result.documents_ids, result.document_scores.iter().map(|scores| format!("{:.4} ", ScoreDetails::global_score(scores.iter()))).collect::(), result.document_scores), @r###" + IDs: [4, 1, 0, 2] + Scores: 1.0000 0.9167 0.8333 0.0000 + Score Details: [ [ Words( @@ -382,13 +368,10 @@ fn degraded_search_and_score_details() { search.time_budget(TimeBudget::max().with_stop_after(6)); let result = search.execute().unwrap(); - snapshot!(format!("{:#?}\n{:#?}", result.documents_ids, result.document_scores), @r###" - [ - 4, - 1, - 0, - 3, - ] + snapshot!(format!("IDs: {:?}\nScores: {}\nScore Details:\n{:#?}", result.documents_ids, result.document_scores.iter().map(|scores| format!("{:.4} ", ScoreDetails::global_score(scores.iter()))).collect::(), result.document_scores), @r###" + IDs: [4, 1, 0, 3] + Scores: 1.0000 0.9167 0.8333 0.3333 + Score Details: [ [ Words( From c5322df519ba9bb7c1010e5b8cf14edef5b8d168 Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 20 Mar 2024 10:08:28 +0100 Subject: [PATCH 19/19] Revert "Revert "Merge remote-tracking branch 'origin/main' into release-v1.7.1"" --- .github/workflows/bench-pr.yml | 2 +- .github/workflows/milestone-workflow.yml | 19 + CONTRIBUTING.md | 2 +- Cargo.lock | 195 ++++------- Cargo.toml | 2 +- meilisearch-types/Cargo.toml | 2 +- meilisearch/Cargo.toml | 12 +- meilisearch/src/main.rs | 2 +- meilisearch/src/option.rs | 4 +- meilisearch/src/routes/indexes/settings.rs | 1 + meilisearch/src/search.rs | 21 +- meilisearch/tests/documents/add_documents.rs | 239 ++++++++++++- meilisearch/tests/search/facet_search.rs | 43 +++ milli/src/index.rs | 14 +- milli/src/lib.rs | 7 +- milli/src/order_by_map.rs | 57 +++ milli/src/search/facet/facet_range_search.rs | 4 +- milli/src/search/facet/mod.rs | 3 + milli/src/search/facet/search.rs | 326 ++++++++++++++++++ milli/src/search/mod.rs | 249 +------------ milli/src/search/new/tests/typo_proximity.rs | 2 +- milli/src/update/settings.rs | 14 +- milli/src/vector/error.rs | 39 +++ milli/src/vector/mod.rs | 18 + milli/src/vector/ollama.rs | 307 +++++++++++++++++ milli/src/vector/openai.rs | 20 +- milli/src/vector/settings.rs | 29 +- workloads/settings-add-remove-filters.json | 94 +++++ workloads/settings-proximity-precision.json | 86 +++++ .../settings-remove-add-swap-searchable.json | 114 ++++++ workloads/settings-typo.json | 115 ++++++ xtask/src/bench/dashboard.rs | 312 +++++++++-------- xtask/src/bench/mod.rs | 24 +- xtask/src/bench/workload.rs | 16 +- 34 files changed, 1784 insertions(+), 610 deletions(-) create mode 100644 milli/src/order_by_map.rs create mode 100644 milli/src/search/facet/search.rs create mode 100644 milli/src/vector/ollama.rs create mode 100644 workloads/settings-add-remove-filters.json create mode 100644 workloads/settings-proximity-precision.json create mode 100644 workloads/settings-remove-add-swap-searchable.json create mode 100644 workloads/settings-typo.json diff --git a/.github/workflows/bench-pr.yml b/.github/workflows/bench-pr.yml index 6f4956542..418a23717 100644 --- a/.github/workflows/bench-pr.yml +++ b/.github/workflows/bench-pr.yml @@ -43,4 +43,4 @@ jobs: - name: Run benchmarks on PR ${{ github.event.issue.id }} run: | - cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "[Comment](${{ github.event.comment.url }}) on [#${{github.event.issue.id}}](${{ github.event.issue.url }})" -- ${{ steps.command.outputs.command-arguments }} \ No newline at end of file + cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "[Comment](${{ github.event.comment.html_url }}) on [#${{ github.event.issue.number }}](${{ github.event.issue.html_url }})" -- ${{ steps.command.outputs.command-arguments }} \ No newline at end of file diff --git a/.github/workflows/milestone-workflow.yml b/.github/workflows/milestone-workflow.yml index 2b8b7bf62..2ede3dc21 100644 --- a/.github/workflows/milestone-workflow.yml +++ b/.github/workflows/milestone-workflow.yml @@ -110,6 +110,25 @@ jobs: --milestone $MILESTONE_VERSION \ --assignee curquiza + create-update-version-issue: + needs: get-release-version + # Create the changelog issue if the release is not only a patch release + if: github.event.action == 'created' + runs-on: ubuntu-latest + env: + ISSUE_TEMPLATE: issue-template.md + steps: + - uses: actions/checkout@v3 + - name: Download the issue template + run: curl -s https://raw.githubusercontent.com/meilisearch/engine-team/main/issue-templates/update-version-issue.md > $ISSUE_TEMPLATE + - name: Create the issue + run: | + gh issue create \ + --title "Update version in Cargo.toml for $MILESTONE_VERSION" \ + --label 'maintenance' \ + --body-file $ISSUE_TEMPLATE \ + --milestone $MILESTONE_VERSION + # ---------------- # MILESTONE CLOSED # ---------------- diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6d6e6076b..f33416820 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,7 +4,7 @@ First, thank you for contributing to Meilisearch! The goal of this document is t Remember that there are many ways to contribute other than writing code: writing [tutorials or blog posts](https://github.com/meilisearch/awesome-meilisearch), improving [the documentation](https://github.com/meilisearch/documentation), submitting [bug reports](https://github.com/meilisearch/meilisearch/issues/new?assignees=&labels=&template=bug_report.md&title=) and [feature requests](https://github.com/meilisearch/product/discussions/categories/feedback-feature-proposal)... -The code in this repository is only concerned with managing multiple indexes, handling the update store, and exposing an HTTP API. Search and indexation are the domain of our core engine, [`milli`](https://github.com/meilisearch/milli), while tokenization is handled by [our `charabia` library](https://github.com/meilisearch/charabia/). +Meilisearch can manage multiple indexes, handle the update store, and expose an HTTP API. Search and indexation are the domain of our core engine, [`milli`](https://github.com/meilisearch/meilisearch/tree/main/milli), while tokenization is handled by [our `charabia` library](https://github.com/meilisearch/charabia/). If Meilisearch does not offer optimized support for your language, please consider contributing to `charabia` by following the [CONTRIBUTING.md file](https://github.com/meilisearch/charabia/blob/main/CONTRIBUTING.md) and integrating your intended normalizer/segmenter. diff --git a/Cargo.lock b/Cargo.lock index a1527c31c..bdca7e24c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -36,9 +36,9 @@ dependencies = [ [[package]] name = "actix-http" -version = "3.5.1" +version = "3.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "129d4c88e98860e1758c5de288d1632b07970a16d59bdf7b8d66053d582bb71f" +checksum = "d223b13fd481fc0d1f83bb12659ae774d9e3601814c68a0bc539731698cca743" dependencies = [ "actix-codec", "actix-rt", @@ -138,9 +138,9 @@ dependencies = [ [[package]] name = "actix-tls" -version = "3.1.1" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72616e7fbec0aa99c6f3164677fa48ff5a60036d0799c98cab894a44f3e0efc3" +checksum = "d4cce60a2f2b477bc72e5cde0af1812a6e82d8fd85b5570a5dcf2a5bf2c5be5f" dependencies = [ "actix-rt", "actix-service", @@ -148,13 +148,11 @@ dependencies = [ "futures-core", "impl-more", "pin-project-lite", - "rustls 0.21.6", - "rustls-webpki", "tokio", - "tokio-rustls 0.23.4", + "tokio-rustls", "tokio-util", "tracing", - "webpki-roots 0.22.6", + "webpki-roots", ] [[package]] @@ -169,9 +167,9 @@ dependencies = [ [[package]] name = "actix-web" -version = "4.4.1" +version = "4.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e43428f3bf11dee6d166b00ec2df4e3aa8cc1606aaa0b7433c146852e2f4e03b" +checksum = "43a6556ddebb638c2358714d853257ed226ece6023ef9364f23f0c70737ea984" dependencies = [ "actix-codec", "actix-http", @@ -259,9 +257,9 @@ dependencies = [ [[package]] name = "ahash" -version = "0.8.8" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42cd52102d3df161c77a887b608d7a4897d7cc112886a9537b738a887a03aaff" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", "getrandom", @@ -496,7 +494,7 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "benchmarks" -version = "1.7.1" +version = "1.8.0" dependencies = [ "anyhow", "bytes", @@ -630,7 +628,7 @@ dependencies = [ [[package]] name = "build-info" -version = "1.7.1" +version = "1.8.0" dependencies = [ "anyhow", "time", @@ -835,9 +833,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.0.82" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "305fe645edc1442a0fa8b6726ba61d422798d37a52e12eaecf4b022ebbb88f01" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" dependencies = [ "jobserver", "libc", @@ -1531,7 +1529,7 @@ dependencies = [ [[package]] name = "dump" -version = "1.7.1" +version = "1.8.0" dependencies = [ "anyhow", "big_s", @@ -1769,7 +1767,7 @@ dependencies = [ [[package]] name = "file-store" -version = "1.7.1" +version = "1.8.0" dependencies = [ "faux", "tempfile", @@ -1792,7 +1790,7 @@ dependencies = [ [[package]] name = "filter-parser" -version = "1.7.1" +version = "1.8.0" dependencies = [ "insta", "nom", @@ -1812,7 +1810,7 @@ dependencies = [ [[package]] name = "flatten-serde-json" -version = "1.7.1" +version = "1.8.0" dependencies = [ "criterion", "serde_json", @@ -1930,7 +1928,7 @@ dependencies = [ [[package]] name = "fuzzers" -version = "1.7.1" +version = "1.8.0" dependencies = [ "arbitrary", "clap", @@ -2104,8 +2102,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if", + "js-sys", "libc", "wasi", + "wasm-bindgen", ] [[package]] @@ -2224,7 +2224,7 @@ dependencies = [ "atomic-polyfill", "hash32", "rustc_version", - "spin 0.9.8", + "spin", "stable_deref_trait", ] @@ -2393,9 +2393,9 @@ dependencies = [ "futures-util", "http 0.2.11", "hyper", - "rustls 0.21.6", + "rustls", "tokio", - "tokio-rustls 0.24.1", + "tokio-rustls", ] [[package]] @@ -2422,7 +2422,7 @@ checksum = "206ca75c9c03ba3d4ace2460e57b189f39f43de612c2f85836e65c929701bb2d" [[package]] name = "index-scheduler" -version = "1.7.1" +version = "1.8.0" dependencies = [ "anyhow", "big_s", @@ -2609,7 +2609,7 @@ dependencies = [ [[package]] name = "json-depth-checker" -version = "1.7.1" +version = "1.8.0" dependencies = [ "criterion", "serde_json", @@ -2617,13 +2617,14 @@ dependencies = [ [[package]] name = "jsonwebtoken" -version = "8.3.0" +version = "9.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6971da4d9c3aa03c3d8f3ff0f4155b534aad021292003895a469716b2a230378" +checksum = "5c7ea04a7c5c055c175f189b6dc6ba036fd62306b58c66c9f6389036c503a3f4" dependencies = [ "base64 0.21.7", + "js-sys", "pem", - "ring 0.16.20", + "ring", "serde", "serde_json", "simple_asn1", @@ -3117,7 +3118,7 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "meili-snap" -version = "1.7.1" +version = "1.8.0" dependencies = [ "insta", "md5", @@ -3126,7 +3127,7 @@ dependencies = [ [[package]] name = "meilisearch" -version = "1.7.1" +version = "1.8.0" dependencies = [ "actix-cors", "actix-http", @@ -3184,7 +3185,7 @@ dependencies = [ "rayon", "regex", "reqwest", - "rustls 0.20.9", + "rustls", "rustls-pemfile", "segment", "serde", @@ -3219,7 +3220,7 @@ dependencies = [ [[package]] name = "meilisearch-auth" -version = "1.7.1" +version = "1.8.0" dependencies = [ "base64 0.21.7", "enum-iterator", @@ -3238,7 +3239,7 @@ dependencies = [ [[package]] name = "meilisearch-types" -version = "1.7.1" +version = "1.8.0" dependencies = [ "actix-web", "anyhow", @@ -3268,7 +3269,7 @@ dependencies = [ [[package]] name = "meilitool" -version = "1.7.1" +version = "1.8.0" dependencies = [ "anyhow", "clap", @@ -3307,7 +3308,7 @@ dependencies = [ [[package]] name = "milli" -version = "1.7.1" +version = "1.8.0" dependencies = [ "arroy", "big_s", @@ -3413,9 +3414,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.9" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log", @@ -3733,11 +3734,12 @@ checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" [[package]] name = "pem" -version = "1.1.1" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8" +checksum = "1b8fcc794035347fb64beda2d3b462595dd2753e3f268d89c5aae77e8cf2c310" dependencies = [ - "base64 0.13.1", + "base64 0.21.7", + "serde", ] [[package]] @@ -3748,7 +3750,7 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "permissive-json-pointer" -version = "1.7.1" +version = "1.8.0" dependencies = [ "big_s", "serde_json", @@ -4239,14 +4241,14 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.6", + "rustls", "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", "system-configuration", "tokio", - "tokio-rustls 0.24.1", + "tokio-rustls", "tokio-util", "tower-service", "url", @@ -4254,7 +4256,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots 0.25.3", + "webpki-roots", "winreg", ] @@ -4272,30 +4274,15 @@ checksum = "b9b1a3d5f46d53f4a3478e2be4a5a5ce5108ea58b100dcd139830eae7f79a3a1" [[package]] name = "ring" -version = "0.16.20" +version = "0.17.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" -dependencies = [ - "cc", - "libc", - "once_cell", - "spin 0.5.2", - "untrusted 0.7.1", - "web-sys", - "winapi", -] - -[[package]] -name = "ring" -version = "0.17.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9babe80d5c16becf6594aa32ad2be8fe08498e7ae60b77de8df700e67f191d7e" +checksum = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74" dependencies = [ "cc", "getrandom", "libc", - "spin 0.9.8", - "untrusted 0.9.0", + "spin", + "untrusted", "windows-sys 0.48.0", ] @@ -4373,24 +4360,12 @@ dependencies = [ [[package]] name = "rustls" -version = "0.20.9" +version = "0.21.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" dependencies = [ "log", - "ring 0.16.20", - "sct", - "webpki", -] - -[[package]] -name = "rustls" -version = "0.21.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d1feddffcfcc0b33f5c6ce9a29e341e4cd59c3f78e7ee45f4a40c038b1d6cbb" -dependencies = [ - "log", - "ring 0.16.20", + "ring", "rustls-webpki", "sct", ] @@ -4410,8 +4385,8 @@ version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring 0.17.3", - "untrusted 0.9.0", + "ring", + "untrusted", ] [[package]] @@ -4453,12 +4428,12 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sct" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring 0.16.20", - "untrusted 0.7.1", + "ring", + "untrusted", ] [[package]] @@ -4721,12 +4696,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - [[package]] name = "spin" version = "0.9.8" @@ -5080,24 +5049,13 @@ dependencies = [ "syn 2.0.48", ] -[[package]] -name = "tokio-rustls" -version = "0.23.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" -dependencies = [ - "rustls 0.20.9", - "tokio", - "webpki", -] - [[package]] name = "tokio-rustls" version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.6", + "rustls", "tokio", ] @@ -5366,12 +5324,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" -[[package]] -name = "untrusted" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" - [[package]] name = "untrusted" version = "0.9.0" @@ -5388,13 +5340,13 @@ dependencies = [ "flate2", "log", "once_cell", - "rustls 0.21.6", + "rustls", "rustls-webpki", "serde", "serde_json", "socks", "url", - "webpki-roots 0.25.3", + "webpki-roots", ] [[package]] @@ -5630,25 +5582,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "webpki" -version = "0.22.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07ecc0cd7cac091bf682ec5efa18b1cff79d617b84181f38b3951dbe135f607f" -dependencies = [ - "ring 0.16.20", - "untrusted 0.7.1", -] - -[[package]] -name = "webpki-roots" -version = "0.22.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c71e40d7d2c34a5106301fb632274ca37242cd0c9d3e64dbece371a40a2d87" -dependencies = [ - "webpki", -] - [[package]] name = "webpki-roots" version = "0.25.3" @@ -5943,7 +5876,7 @@ dependencies = [ [[package]] name = "xtask" -version = "1.7.1" +version = "1.8.0" dependencies = [ "anyhow", "build-info", diff --git a/Cargo.toml b/Cargo.toml index 5337ec5c3..1d0e0ca0d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,7 +21,7 @@ members = [ ] [workspace.package] -version = "1.7.1" +version = "1.8.0" authors = [ "Quentin de Quelen ", "Clément Renault ", diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index b5460fb56..7709d33d7 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -11,7 +11,7 @@ edition.workspace = true license.workspace = true [dependencies] -actix-web = { version = "4.4.1", default-features = false } +actix-web = { version = "4.5.1", default-features = false } anyhow = "1.0.79" convert_case = "0.6.0" csv = "1.3.0" diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index b65c466ca..04b919904 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -14,18 +14,18 @@ default-run = "meilisearch" [dependencies] actix-cors = "0.7.0" -actix-http = { version = "3.5.1", default-features = false, features = [ +actix-http = { version = "3.6.0", default-features = false, features = [ "compress-brotli", "compress-gzip", - "rustls", + "rustls-0_21", ] } actix-utils = "3.0.1" -actix-web = { version = "4.4.1", default-features = false, features = [ +actix-web = { version = "4.5.1", default-features = false, features = [ "macros", "compress-brotli", "compress-gzip", "cookies", - "rustls", + "rustls-0_21", ] } actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true } anyhow = { version = "1.0.79", features = ["backtrace"] } @@ -52,7 +52,7 @@ index-scheduler = { path = "../index-scheduler" } indexmap = { version = "2.1.0", features = ["serde"] } is-terminal = "0.4.10" itertools = "0.11.0" -jsonwebtoken = "8.3.0" +jsonwebtoken = "9.2.0" lazy_static = "1.4.0" meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-types = { path = "../meilisearch-types" } @@ -75,7 +75,7 @@ reqwest = { version = "0.11.23", features = [ "rustls-tls", "json", ], default-features = false } -rustls = "0.20.8" +rustls = "0.21.6" rustls-pemfile = "1.0.2" segment = { version = "0.2.3", optional = true } serde = { version = "1.0.195", features = ["derive"] } diff --git a/meilisearch/src/main.rs b/meilisearch/src/main.rs index 3451325b2..af02f58e1 100644 --- a/meilisearch/src/main.rs +++ b/meilisearch/src/main.rs @@ -151,7 +151,7 @@ async fn run_http( .keep_alive(KeepAlive::Os); if let Some(config) = opt_clone.get_ssl_config()? { - http_server.bind_rustls(opt_clone.http_addr, config)?.run().await?; + http_server.bind_rustls_021(opt_clone.http_addr, config)?.run().await?; } else { http_server.bind(&opt_clone.http_addr)?.run().await?; } diff --git a/meilisearch/src/option.rs b/meilisearch/src/option.rs index 92d53fd32..43bf2c62c 100644 --- a/meilisearch/src/option.rs +++ b/meilisearch/src/option.rs @@ -564,11 +564,11 @@ impl Opt { } if self.ssl_require_auth { let verifier = AllowAnyAuthenticatedClient::new(client_auth_roots); - config.with_client_cert_verifier(verifier) + config.with_client_cert_verifier(Arc::from(verifier)) } else { let verifier = AllowAnyAnonymousOrAuthenticatedClient::new(client_auth_roots); - config.with_client_cert_verifier(verifier) + config.with_client_cert_verifier(Arc::from(verifier)) } } None => config.with_no_client_auth(), diff --git a/meilisearch/src/routes/indexes/settings.rs b/meilisearch/src/routes/indexes/settings.rs index c71d83279..c782e78cb 100644 --- a/meilisearch/src/routes/indexes/settings.rs +++ b/meilisearch/src/routes/indexes/settings.rs @@ -604,6 +604,7 @@ fn embedder_analytics( EmbedderSource::OpenAi => sources.insert("openAi"), EmbedderSource::HuggingFace => sources.insert("huggingFace"), EmbedderSource::UserProvided => sources.insert("userProvided"), + EmbedderSource::Ollama => sources.insert("ollama"), }; } }; diff --git a/meilisearch/src/search.rs b/meilisearch/src/search.rs index 27de36c6d..e65192d16 100644 --- a/meilisearch/src/search.rs +++ b/meilisearch/src/search.rs @@ -530,7 +530,7 @@ pub fn perform_search( // The attributes to retrieve are the ones explicitly marked as to retrieve (all by default), // but these attributes must be also be present // - in the fields_ids_map - // - in the the displayed attributes + // - in the displayed attributes let to_retrieve_ids: BTreeSet<_> = query .attributes_to_retrieve .as_ref() @@ -671,27 +671,16 @@ pub fn perform_search( let sort_facet_values_by = index.sort_facet_values_by(&rtxn).map_err(milli::Error::from)?; - let default_sort_facet_values_by = - sort_facet_values_by.get("*").copied().unwrap_or_default(); if fields.iter().all(|f| f != "*") { - let fields: Vec<_> = fields - .iter() - .map(|n| { - ( - n, - sort_facet_values_by - .get(n) - .copied() - .unwrap_or(default_sort_facet_values_by), - ) - }) - .collect(); + let fields: Vec<_> = + fields.iter().map(|n| (n, sort_facet_values_by.get(n))).collect(); facet_distribution.facets(fields); } + let distribution = facet_distribution .candidates(candidates) - .default_order_by(default_sort_facet_values_by) + .default_order_by(sort_facet_values_by.get("*")) .execute()?; let stats = facet_distribution.compute_stats()?; (Some(distribution), Some(stats)) diff --git a/meilisearch/tests/documents/add_documents.rs b/meilisearch/tests/documents/add_documents.rs index e6af85229..b1262fa2d 100644 --- a/meilisearch/tests/documents/add_documents.rs +++ b/meilisearch/tests/documents/add_documents.rs @@ -1237,8 +1237,8 @@ async fn error_add_documents_missing_document_id() { } #[actix_rt::test] -#[ignore] // // TODO: Fix in an other PR: this does not provoke any error. -async fn error_document_field_limit_reached() { +#[should_panic] +async fn error_document_field_limit_reached_in_one_document() { let server = Server::new().await; let index = server.index("test"); @@ -1246,22 +1246,241 @@ async fn error_document_field_limit_reached() { let mut big_object = std::collections::HashMap::new(); big_object.insert("id".to_owned(), "wow"); - for i in 0..65535 { + for i in 0..(u16::MAX as usize + 1) { let key = i.to_string(); big_object.insert(key, "I am a text!"); } let documents = json!([big_object]); - let (_response, code) = index.update_documents(documents, Some("id")).await; - snapshot!(code, @"202"); + let (response, code) = index.update_documents(documents, Some("id")).await; + snapshot!(code, @"500 Internal Server Error"); - index.wait_task(0).await; - let (response, code) = index.get_task(0).await; - snapshot!(code, @"200"); + let response = index.wait_task(response.uid()).await; + snapshot!(code, @"202 Accepted"); // Documents without a primary key are not accepted. - snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }), - @""); + snapshot!(response, + @r###" + { + "uid": 1, + "indexUid": "test", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); +} + +#[actix_rt::test] +async fn error_document_field_limit_reached_over_multiple_documents() { + let server = Server::new().await; + let index = server.index("test"); + + index.create(Some("id")).await; + + let mut big_object = std::collections::HashMap::new(); + big_object.insert("id".to_owned(), "wow"); + for i in 0..(u16::MAX / 2) { + let key = i.to_string(); + big_object.insert(key, "I am a text!"); + } + + let documents = json!([big_object]); + + let (response, code) = index.update_documents(documents, Some("id")).await; + snapshot!(code, @"202 Accepted"); + + let response = index.wait_task(response.uid()).await; + snapshot!(code, @"202 Accepted"); + snapshot!(response, + @r###" + { + "uid": 1, + "indexUid": "test", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + + let mut big_object = std::collections::HashMap::new(); + big_object.insert("id".to_owned(), "waw"); + for i in (u16::MAX as usize / 2)..(u16::MAX as usize + 1) { + let key = i.to_string(); + big_object.insert(key, "I am a text!"); + } + + let documents = json!([big_object]); + + let (response, code) = index.update_documents(documents, Some("id")).await; + snapshot!(code, @"202 Accepted"); + + let response = index.wait_task(response.uid()).await; + snapshot!(code, @"202 Accepted"); + snapshot!(response, + @r###" + { + "uid": 2, + "indexUid": "test", + "status": "failed", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 0 + }, + "error": { + "message": "A document cannot contain more than 65,535 fields.", + "code": "max_fields_limit_exceeded", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#max_fields_limit_exceeded" + }, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); +} + +#[actix_rt::test] +async fn error_document_field_limit_reached_in_one_nested_document() { + let server = Server::new().await; + let index = server.index("test"); + + index.create(Some("id")).await; + + let mut nested = std::collections::HashMap::new(); + for i in 0..(u16::MAX as usize + 1) { + let key = i.to_string(); + nested.insert(key, "I am a text!"); + } + let mut big_object = std::collections::HashMap::new(); + big_object.insert("id".to_owned(), "wow"); + + let documents = json!([big_object]); + + let (response, code) = index.update_documents(documents, Some("id")).await; + snapshot!(code, @"202 Accepted"); + + let response = index.wait_task(response.uid()).await; + snapshot!(code, @"202 Accepted"); + // Documents without a primary key are not accepted. + snapshot!(response, + @r###" + { + "uid": 1, + "indexUid": "test", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); +} + +#[actix_rt::test] +async fn error_document_field_limit_reached_over_multiple_documents_with_nested_fields() { + let server = Server::new().await; + let index = server.index("test"); + + index.create(Some("id")).await; + + let mut nested = std::collections::HashMap::new(); + for i in 0..(u16::MAX / 2) { + let key = i.to_string(); + nested.insert(key, "I am a text!"); + } + let mut big_object = std::collections::HashMap::new(); + big_object.insert("id".to_owned(), "wow"); + + let documents = json!([big_object]); + + let (response, code) = index.update_documents(documents, Some("id")).await; + snapshot!(code, @"202 Accepted"); + + let response = index.wait_task(response.uid()).await; + snapshot!(code, @"202 Accepted"); + snapshot!(response, + @r###" + { + "uid": 1, + "indexUid": "test", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); + + let mut nested = std::collections::HashMap::new(); + for i in 0..(u16::MAX / 2) { + let key = i.to_string(); + nested.insert(key, "I am a text!"); + } + let mut big_object = std::collections::HashMap::new(); + big_object.insert("id".to_owned(), "wow"); + + let documents = json!([big_object]); + + let (response, code) = index.update_documents(documents, Some("id")).await; + snapshot!(code, @"202 Accepted"); + + let response = index.wait_task(response.uid()).await; + snapshot!(code, @"202 Accepted"); + snapshot!(response, + @r###" + { + "uid": 2, + "indexUid": "test", + "status": "succeeded", + "type": "documentAdditionOrUpdate", + "canceledBy": null, + "details": { + "receivedDocuments": 1, + "indexedDocuments": 1 + }, + "error": null, + "duration": "[duration]", + "enqueuedAt": "[date]", + "startedAt": "[date]", + "finishedAt": "[date]" + } + "###); } #[actix_rt::test] diff --git a/meilisearch/tests/search/facet_search.rs b/meilisearch/tests/search/facet_search.rs index 5f9f631f9..12d2226a9 100644 --- a/meilisearch/tests/search/facet_search.rs +++ b/meilisearch/tests/search/facet_search.rs @@ -123,6 +123,28 @@ async fn simple_facet_search_with_max_values() { assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1); } +#[actix_rt::test] +async fn simple_facet_search_by_count_with_max_values() { + let server = Server::new().await; + let index = server.index("test"); + + let documents = DOCUMENTS.clone(); + index + .update_settings_faceting( + json!({ "maxValuesPerFacet": 1, "sortFacetValuesBy": { "*": "count" } }), + ) + .await; + index.update_settings_filterable_attributes(json!(["genres"])).await; + index.add_documents(documents, None).await; + index.wait_task(2).await; + + let (response, code) = + index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; + + assert_eq!(code, 200, "{}", response); + assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1); +} + #[actix_rt::test] async fn non_filterable_facet_search_error() { let server = Server::new().await; @@ -157,3 +179,24 @@ async fn facet_search_dont_support_words() { assert_eq!(code, 200, "{}", response); assert_eq!(response["facetHits"].as_array().unwrap().len(), 0); } + +#[actix_rt::test] +async fn simple_facet_search_with_sort_by_count() { + let server = Server::new().await; + let index = server.index("test"); + + let documents = DOCUMENTS.clone(); + index.update_settings_faceting(json!({ "sortFacetValuesBy": { "*": "count" } })).await; + index.update_settings_filterable_attributes(json!(["genres"])).await; + index.add_documents(documents, None).await; + index.wait_task(2).await; + + let (response, code) = + index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await; + + assert_eq!(code, 200, "{}", response); + let hits = response["facetHits"].as_array().unwrap(); + assert_eq!(hits.len(), 2); + assert_eq!(hits[0], json!({ "value": "Action", "count": 3 })); + assert_eq!(hits[1], json!({ "value": "Adventure", "count": 2 })); +} diff --git a/milli/src/index.rs b/milli/src/index.rs index 6ad39dcb1..2c3977403 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -20,13 +20,13 @@ use crate::heed_codec::facet::{ use crate::heed_codec::{ BEU16StrCodec, FstSetCodec, ScriptLanguageCodec, StrBEU16Codec, StrRefCodec, }; +use crate::order_by_map::OrderByMap; use crate::proximity::ProximityPrecision; use crate::vector::EmbeddingConfig; use crate::{ default_criteria, CboRoaringBitmapCodec, Criterion, DocumentId, ExternalDocumentsIds, FacetDistribution, FieldDistribution, FieldId, FieldIdWordCountCodec, GeoPoint, ObkvCodec, - OrderBy, Result, RoaringBitmapCodec, RoaringBitmapLenCodec, Search, U8StrStrCodec, BEU16, - BEU32, BEU64, + Result, RoaringBitmapCodec, RoaringBitmapLenCodec, Search, U8StrStrCodec, BEU16, BEU32, BEU64, }; pub const DEFAULT_MIN_WORD_LEN_ONE_TYPO: u8 = 5; @@ -1373,21 +1373,19 @@ impl Index { self.main.remap_key_type::().delete(txn, main_key::MAX_VALUES_PER_FACET) } - pub fn sort_facet_values_by(&self, txn: &RoTxn) -> heed::Result> { - let mut orders = self + pub fn sort_facet_values_by(&self, txn: &RoTxn) -> heed::Result { + let orders = self .main - .remap_types::>>() + .remap_types::>() .get(txn, main_key::SORT_FACET_VALUES_BY)? .unwrap_or_default(); - // Insert the default ordering if it is not already overwritten by the user. - orders.entry("*".to_string()).or_insert(OrderBy::Lexicographic); Ok(orders) } pub(crate) fn put_sort_facet_values_by( &self, txn: &mut RwTxn, - val: &HashMap, + val: &OrderByMap, ) -> heed::Result<()> { self.main.remap_types::>().put(txn, main_key::SORT_FACET_VALUES_BY, &val) } diff --git a/milli/src/lib.rs b/milli/src/lib.rs index f6b398304..5effcea3d 100644 --- a/milli/src/lib.rs +++ b/milli/src/lib.rs @@ -16,6 +16,7 @@ pub mod facet; mod fields_ids_map; pub mod heed_codec; pub mod index; +pub mod order_by_map; pub mod prompt; pub mod proximity; pub mod score_details; @@ -56,10 +57,10 @@ pub use self::heed_codec::{ UncheckedU8StrStrCodec, }; pub use self::index::Index; +pub use self::search::facet::{FacetValueHit, SearchForFacetValues}; pub use self::search::{ - FacetDistribution, FacetValueHit, Filter, FormatOptions, MatchBounds, MatcherBuilder, - MatchingWords, OrderBy, Search, SearchForFacetValues, SearchResult, TermsMatchingStrategy, - DEFAULT_VALUES_PER_FACET, + FacetDistribution, Filter, FormatOptions, MatchBounds, MatcherBuilder, MatchingWords, OrderBy, + Search, SearchResult, TermsMatchingStrategy, DEFAULT_VALUES_PER_FACET, }; pub type Result = std::result::Result; diff --git a/milli/src/order_by_map.rs b/milli/src/order_by_map.rs new file mode 100644 index 000000000..287e62c3a --- /dev/null +++ b/milli/src/order_by_map.rs @@ -0,0 +1,57 @@ +use std::collections::{hash_map, HashMap}; +use std::iter::FromIterator; + +use serde::{Deserialize, Deserializer, Serialize}; + +use crate::OrderBy; + +#[derive(Serialize)] +pub struct OrderByMap(HashMap); + +impl OrderByMap { + pub fn get(&self, key: impl AsRef) -> OrderBy { + self.0 + .get(key.as_ref()) + .copied() + .unwrap_or_else(|| self.0.get("*").copied().unwrap_or_default()) + } + + pub fn insert(&mut self, key: String, value: OrderBy) -> Option { + self.0.insert(key, value) + } +} + +impl Default for OrderByMap { + fn default() -> Self { + let mut map = HashMap::new(); + map.insert("*".to_string(), OrderBy::Lexicographic); + OrderByMap(map) + } +} + +impl FromIterator<(String, OrderBy)> for OrderByMap { + fn from_iter>(iter: T) -> Self { + OrderByMap(iter.into_iter().collect()) + } +} + +impl IntoIterator for OrderByMap { + type Item = (String, OrderBy); + type IntoIter = hash_map::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +impl<'de> Deserialize<'de> for OrderByMap { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let mut map = Deserialize::deserialize(deserializer).map(OrderByMap)?; + // Insert the default ordering if it is not already overwritten by the user. + map.0.entry("*".to_string()).or_insert(OrderBy::default()); + Ok(map) + } +} diff --git a/milli/src/search/facet/facet_range_search.rs b/milli/src/search/facet/facet_range_search.rs index f1a26ded5..e340fbac5 100644 --- a/milli/src/search/facet/facet_range_search.rs +++ b/milli/src/search/facet/facet_range_search.rs @@ -168,7 +168,7 @@ impl<'t, 'b, 'bitmap> FacetRangeSearch<'t, 'b, 'bitmap> { } // should we stop? - // We should if the the search range doesn't include any + // We should if the search range doesn't include any // element from the previous key or its successors let should_stop = { match self.right { @@ -232,7 +232,7 @@ impl<'t, 'b, 'bitmap> FacetRangeSearch<'t, 'b, 'bitmap> { } // should we stop? - // We should if the the search range doesn't include any + // We should if the search range doesn't include any // element from the previous key or its successors let should_stop = { match self.right { diff --git a/milli/src/search/facet/mod.rs b/milli/src/search/facet/mod.rs index f676ee109..34a9cdcb8 100644 --- a/milli/src/search/facet/mod.rs +++ b/milli/src/search/facet/mod.rs @@ -6,15 +6,18 @@ use roaring::RoaringBitmap; pub use self::facet_distribution::{FacetDistribution, OrderBy, DEFAULT_VALUES_PER_FACET}; pub use self::filter::{BadGeoError, Filter}; +pub use self::search::{FacetValueHit, SearchForFacetValues}; use crate::heed_codec::facet::{FacetGroupKeyCodec, FacetGroupValueCodec, OrderedF64Codec}; use crate::heed_codec::BytesRefCodec; use crate::{Index, Result}; + mod facet_distribution; mod facet_distribution_iter; mod facet_range_search; mod facet_sort_ascending; mod facet_sort_descending; mod filter; +mod search; fn facet_extreme_value<'t>( mut extreme_it: impl Iterator> + 't, diff --git a/milli/src/search/facet/search.rs b/milli/src/search/facet/search.rs new file mode 100644 index 000000000..0251d6b8d --- /dev/null +++ b/milli/src/search/facet/search.rs @@ -0,0 +1,326 @@ +use std::cmp::{Ordering, Reverse}; +use std::collections::BinaryHeap; +use std::ops::ControlFlow; + +use charabia::normalizer::NormalizerOption; +use charabia::Normalize; +use fst::automaton::{Automaton, Str}; +use fst::{IntoStreamer, Streamer}; +use roaring::RoaringBitmap; +use tracing::error; + +use crate::error::UserError; +use crate::heed_codec::facet::{FacetGroupKey, FacetGroupValue}; +use crate::search::build_dfa; +use crate::{DocumentId, FieldId, OrderBy, Result, Search}; + +/// The maximum number of values per facet returned by the facet search route. +const DEFAULT_MAX_NUMBER_OF_VALUES_PER_FACET: usize = 100; + +pub struct SearchForFacetValues<'a> { + query: Option, + facet: String, + search_query: Search<'a>, + max_values: usize, + is_hybrid: bool, +} + +impl<'a> SearchForFacetValues<'a> { + pub fn new( + facet: String, + search_query: Search<'a>, + is_hybrid: bool, + ) -> SearchForFacetValues<'a> { + SearchForFacetValues { + query: None, + facet, + search_query, + max_values: DEFAULT_MAX_NUMBER_OF_VALUES_PER_FACET, + is_hybrid, + } + } + + pub fn query(&mut self, query: impl Into) -> &mut Self { + self.query = Some(query.into()); + self + } + + pub fn max_values(&mut self, max: usize) -> &mut Self { + self.max_values = max; + self + } + + fn one_original_value_of( + &self, + field_id: FieldId, + facet_str: &str, + any_docid: DocumentId, + ) -> Result> { + let index = self.search_query.index; + let rtxn = self.search_query.rtxn; + let key: (FieldId, _, &str) = (field_id, any_docid, facet_str); + Ok(index.field_id_docid_facet_strings.get(rtxn, &key)?.map(|v| v.to_owned())) + } + + pub fn execute(&self) -> Result> { + let index = self.search_query.index; + let rtxn = self.search_query.rtxn; + + let filterable_fields = index.filterable_fields(rtxn)?; + if !filterable_fields.contains(&self.facet) { + let (valid_fields, hidden_fields) = + index.remove_hidden_fields(rtxn, filterable_fields)?; + + return Err(UserError::InvalidFacetSearchFacetName { + field: self.facet.clone(), + valid_fields, + hidden_fields, + } + .into()); + } + + let fields_ids_map = index.fields_ids_map(rtxn)?; + let fid = match fields_ids_map.id(&self.facet) { + Some(fid) => fid, + // we return an empty list of results when the attribute has been + // set as filterable but no document contains this field (yet). + None => return Ok(Vec::new()), + }; + + let fst = match self.search_query.index.facet_id_string_fst.get(rtxn, &fid)? { + Some(fst) => fst, + None => return Ok(Vec::new()), + }; + + let search_candidates = self + .search_query + .execute_for_candidates(self.is_hybrid || self.search_query.vector.is_some())?; + + let mut results = match index.sort_facet_values_by(rtxn)?.get(&self.facet) { + OrderBy::Lexicographic => ValuesCollection::by_lexicographic(self.max_values), + OrderBy::Count => ValuesCollection::by_count(self.max_values), + }; + + match self.query.as_ref() { + Some(query) => { + let options = NormalizerOption { lossy: true, ..Default::default() }; + let query = query.normalize(&options); + let query = query.as_ref(); + + let authorize_typos = self.search_query.index.authorize_typos(rtxn)?; + let field_authorizes_typos = + !self.search_query.index.exact_attributes_ids(rtxn)?.contains(&fid); + + if authorize_typos && field_authorizes_typos { + let exact_words_fst = self.search_query.index.exact_words(rtxn)?; + if exact_words_fst.map_or(false, |fst| fst.contains(query)) { + if fst.contains(query) { + self.fetch_original_facets_using_normalized( + fid, + query, + query, + &search_candidates, + &mut results, + )?; + } + } else { + let one_typo = self.search_query.index.min_word_len_one_typo(rtxn)?; + let two_typos = self.search_query.index.min_word_len_two_typos(rtxn)?; + + let is_prefix = true; + let automaton = if query.len() < one_typo as usize { + build_dfa(query, 0, is_prefix) + } else if query.len() < two_typos as usize { + build_dfa(query, 1, is_prefix) + } else { + build_dfa(query, 2, is_prefix) + }; + + let mut stream = fst.search(automaton).into_stream(); + while let Some(facet_value) = stream.next() { + let value = std::str::from_utf8(facet_value)?; + if self + .fetch_original_facets_using_normalized( + fid, + value, + query, + &search_candidates, + &mut results, + )? + .is_break() + { + break; + } + } + } + } else { + let automaton = Str::new(query).starts_with(); + let mut stream = fst.search(automaton).into_stream(); + while let Some(facet_value) = stream.next() { + let value = std::str::from_utf8(facet_value)?; + if self + .fetch_original_facets_using_normalized( + fid, + value, + query, + &search_candidates, + &mut results, + )? + .is_break() + { + break; + } + } + } + } + None => { + let prefix = FacetGroupKey { field_id: fid, level: 0, left_bound: "" }; + for result in index.facet_id_string_docids.prefix_iter(rtxn, &prefix)? { + let (FacetGroupKey { left_bound, .. }, FacetGroupValue { bitmap, .. }) = + result?; + let count = search_candidates.intersection_len(&bitmap); + if count != 0 { + let value = self + .one_original_value_of(fid, left_bound, bitmap.min().unwrap())? + .unwrap_or_else(|| left_bound.to_string()); + if results.insert(FacetValueHit { value, count }).is_break() { + break; + } + } + } + } + } + + Ok(results.into_sorted_vec()) + } + + fn fetch_original_facets_using_normalized( + &self, + fid: FieldId, + value: &str, + query: &str, + search_candidates: &RoaringBitmap, + results: &mut ValuesCollection, + ) -> Result> { + let index = self.search_query.index; + let rtxn = self.search_query.rtxn; + + let database = index.facet_id_normalized_string_strings; + let key = (fid, value); + let original_strings = match database.get(rtxn, &key)? { + Some(original_strings) => original_strings, + None => { + error!("the facet value is missing from the facet database: {key:?}"); + return Ok(ControlFlow::Continue(())); + } + }; + for original in original_strings { + let key = FacetGroupKey { field_id: fid, level: 0, left_bound: original.as_str() }; + let docids = match index.facet_id_string_docids.get(rtxn, &key)? { + Some(FacetGroupValue { bitmap, .. }) => bitmap, + None => { + error!("the facet value is missing from the facet database: {key:?}"); + return Ok(ControlFlow::Continue(())); + } + }; + let count = search_candidates.intersection_len(&docids); + if count != 0 { + let value = self + .one_original_value_of(fid, &original, docids.min().unwrap())? + .unwrap_or_else(|| query.to_string()); + if results.insert(FacetValueHit { value, count }).is_break() { + break; + } + } + } + + Ok(ControlFlow::Continue(())) + } +} + +#[derive(Debug, Clone, serde::Serialize, PartialEq)] +pub struct FacetValueHit { + /// The original facet value + pub value: String, + /// The number of documents associated to this facet + pub count: u64, +} + +impl PartialOrd for FacetValueHit { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for FacetValueHit { + fn cmp(&self, other: &Self) -> Ordering { + self.count.cmp(&other.count).then_with(|| self.value.cmp(&other.value)) + } +} + +impl Eq for FacetValueHit {} + +/// A wrapper type that collects the best facet values by +/// lexicographic or number of associated values. +enum ValuesCollection { + /// Keeps the top values according to the lexicographic order. + Lexicographic { max: usize, content: Vec }, + /// Keeps the top values according to the number of values associated to them. + /// + /// Note that it is a max heap and we need to move the smallest counts + /// at the top to be able to pop them when we reach the max_values limit. + Count { max: usize, content: BinaryHeap> }, +} + +impl ValuesCollection { + pub fn by_lexicographic(max: usize) -> Self { + ValuesCollection::Lexicographic { max, content: Vec::new() } + } + + pub fn by_count(max: usize) -> Self { + ValuesCollection::Count { max, content: BinaryHeap::new() } + } + + pub fn insert(&mut self, value: FacetValueHit) -> ControlFlow<()> { + match self { + ValuesCollection::Lexicographic { max, content } => { + if content.len() < *max { + content.push(value); + if content.len() < *max { + return ControlFlow::Continue(()); + } + } + ControlFlow::Break(()) + } + ValuesCollection::Count { max, content } => { + if content.len() == *max { + // Peeking gives us the worst value in the list as + // this is a max-heap and we reversed it. + let Some(mut peek) = content.peek_mut() else { return ControlFlow::Break(()) }; + if peek.0.count <= value.count { + // Replace the current worst value in the heap + // with the new one we received that is better. + *peek = Reverse(value); + } + } else { + content.push(Reverse(value)); + } + ControlFlow::Continue(()) + } + } + } + + /// Returns the list of facet values in descending order of, either, + /// count or lexicographic order of the value depending on the type. + pub fn into_sorted_vec(self) -> Vec { + match self { + ValuesCollection::Lexicographic { content, .. } => content.into_iter().collect(), + ValuesCollection::Count { content, .. } => { + // Convert the heap into a vec of hits by removing the Reverse wrapper. + // Hits are already in the right order as they were reversed and there + // are output in ascending order. + content.into_sorted_vec().into_iter().map(|Reverse(hit)| hit).collect() + } + } + } +} diff --git a/milli/src/search/mod.rs b/milli/src/search/mod.rs index e411bd032..dc8354486 100644 --- a/milli/src/search/mod.rs +++ b/milli/src/search/mod.rs @@ -1,25 +1,17 @@ use std::fmt; -use std::ops::ControlFlow; -use charabia::normalizer::NormalizerOption; -use charabia::Normalize; -use fst::automaton::{Automaton, Str}; -use fst::{IntoStreamer, Streamer}; use levenshtein_automata::{LevenshteinAutomatonBuilder as LevBuilder, DFA}; use once_cell::sync::Lazy; use roaring::bitmap::RoaringBitmap; -use tracing::error; pub use self::facet::{FacetDistribution, Filter, OrderBy, DEFAULT_VALUES_PER_FACET}; pub use self::new::matches::{FormatOptions, MatchBounds, MatcherBuilder, MatchingWords}; use self::new::{execute_vector_search, PartialSearchResult}; -use crate::error::UserError; -use crate::heed_codec::facet::{FacetGroupKey, FacetGroupValue}; use crate::score_details::{ScoreDetails, ScoringStrategy}; use crate::vector::DistributionShift; use crate::{ - execute_search, filtered_universe, AscDesc, DefaultSearchLogger, DocumentId, FieldId, Index, - Result, SearchContext, + execute_search, filtered_universe, AscDesc, DefaultSearchLogger, DocumentId, Index, Result, + SearchContext, }; // Building these factories is not free. @@ -27,9 +19,6 @@ static LEVDIST0: Lazy = Lazy::new(|| LevBuilder::new(0, true)); static LEVDIST1: Lazy = Lazy::new(|| LevBuilder::new(1, true)); static LEVDIST2: Lazy = Lazy::new(|| LevBuilder::new(2, true)); -/// The maximum number of values per facet returned by the facet search route. -const DEFAULT_MAX_NUMBER_OF_VALUES_PER_FACET: usize = 100; - pub mod facet; mod fst_utils; pub mod hybrid; @@ -302,240 +291,6 @@ pub fn build_dfa(word: &str, typos: u8, is_prefix: bool) -> DFA { } } -pub struct SearchForFacetValues<'a> { - query: Option, - facet: String, - search_query: Search<'a>, - max_values: usize, - is_hybrid: bool, -} - -impl<'a> SearchForFacetValues<'a> { - pub fn new( - facet: String, - search_query: Search<'a>, - is_hybrid: bool, - ) -> SearchForFacetValues<'a> { - SearchForFacetValues { - query: None, - facet, - search_query, - max_values: DEFAULT_MAX_NUMBER_OF_VALUES_PER_FACET, - is_hybrid, - } - } - - pub fn query(&mut self, query: impl Into) -> &mut Self { - self.query = Some(query.into()); - self - } - - pub fn max_values(&mut self, max: usize) -> &mut Self { - self.max_values = max; - self - } - - fn one_original_value_of( - &self, - field_id: FieldId, - facet_str: &str, - any_docid: DocumentId, - ) -> Result> { - let index = self.search_query.index; - let rtxn = self.search_query.rtxn; - let key: (FieldId, _, &str) = (field_id, any_docid, facet_str); - Ok(index.field_id_docid_facet_strings.get(rtxn, &key)?.map(|v| v.to_owned())) - } - - pub fn execute(&self) -> Result> { - let index = self.search_query.index; - let rtxn = self.search_query.rtxn; - - let filterable_fields = index.filterable_fields(rtxn)?; - if !filterable_fields.contains(&self.facet) { - let (valid_fields, hidden_fields) = - index.remove_hidden_fields(rtxn, filterable_fields)?; - - return Err(UserError::InvalidFacetSearchFacetName { - field: self.facet.clone(), - valid_fields, - hidden_fields, - } - .into()); - } - - let fields_ids_map = index.fields_ids_map(rtxn)?; - let fid = match fields_ids_map.id(&self.facet) { - Some(fid) => fid, - // we return an empty list of results when the attribute has been - // set as filterable but no document contains this field (yet). - None => return Ok(Vec::new()), - }; - - let fst = match self.search_query.index.facet_id_string_fst.get(rtxn, &fid)? { - Some(fst) => fst, - None => return Ok(vec![]), - }; - - let search_candidates = self - .search_query - .execute_for_candidates(self.is_hybrid || self.search_query.vector.is_some())?; - - match self.query.as_ref() { - Some(query) => { - let options = NormalizerOption { lossy: true, ..Default::default() }; - let query = query.normalize(&options); - let query = query.as_ref(); - - let authorize_typos = self.search_query.index.authorize_typos(rtxn)?; - let field_authorizes_typos = - !self.search_query.index.exact_attributes_ids(rtxn)?.contains(&fid); - - if authorize_typos && field_authorizes_typos { - let exact_words_fst = self.search_query.index.exact_words(rtxn)?; - if exact_words_fst.map_or(false, |fst| fst.contains(query)) { - let mut results = vec![]; - if fst.contains(query) { - self.fetch_original_facets_using_normalized( - fid, - query, - query, - &search_candidates, - &mut results, - )?; - } - Ok(results) - } else { - let one_typo = self.search_query.index.min_word_len_one_typo(rtxn)?; - let two_typos = self.search_query.index.min_word_len_two_typos(rtxn)?; - - let is_prefix = true; - let automaton = if query.len() < one_typo as usize { - build_dfa(query, 0, is_prefix) - } else if query.len() < two_typos as usize { - build_dfa(query, 1, is_prefix) - } else { - build_dfa(query, 2, is_prefix) - }; - - let mut stream = fst.search(automaton).into_stream(); - let mut results = vec![]; - while let Some(facet_value) = stream.next() { - let value = std::str::from_utf8(facet_value)?; - if self - .fetch_original_facets_using_normalized( - fid, - value, - query, - &search_candidates, - &mut results, - )? - .is_break() - { - break; - } - } - - Ok(results) - } - } else { - let automaton = Str::new(query).starts_with(); - let mut stream = fst.search(automaton).into_stream(); - let mut results = vec![]; - while let Some(facet_value) = stream.next() { - let value = std::str::from_utf8(facet_value)?; - if self - .fetch_original_facets_using_normalized( - fid, - value, - query, - &search_candidates, - &mut results, - )? - .is_break() - { - break; - } - } - - Ok(results) - } - } - None => { - let mut results = vec![]; - let prefix = FacetGroupKey { field_id: fid, level: 0, left_bound: "" }; - for result in index.facet_id_string_docids.prefix_iter(rtxn, &prefix)? { - let (FacetGroupKey { left_bound, .. }, FacetGroupValue { bitmap, .. }) = - result?; - let count = search_candidates.intersection_len(&bitmap); - if count != 0 { - let value = self - .one_original_value_of(fid, left_bound, bitmap.min().unwrap())? - .unwrap_or_else(|| left_bound.to_string()); - results.push(FacetValueHit { value, count }); - } - if results.len() >= self.max_values { - break; - } - } - Ok(results) - } - } - } - - fn fetch_original_facets_using_normalized( - &self, - fid: FieldId, - value: &str, - query: &str, - search_candidates: &RoaringBitmap, - results: &mut Vec, - ) -> Result> { - let index = self.search_query.index; - let rtxn = self.search_query.rtxn; - - let database = index.facet_id_normalized_string_strings; - let key = (fid, value); - let original_strings = match database.get(rtxn, &key)? { - Some(original_strings) => original_strings, - None => { - error!("the facet value is missing from the facet database: {key:?}"); - return Ok(ControlFlow::Continue(())); - } - }; - for original in original_strings { - let key = FacetGroupKey { field_id: fid, level: 0, left_bound: original.as_str() }; - let docids = match index.facet_id_string_docids.get(rtxn, &key)? { - Some(FacetGroupValue { bitmap, .. }) => bitmap, - None => { - error!("the facet value is missing from the facet database: {key:?}"); - return Ok(ControlFlow::Continue(())); - } - }; - let count = search_candidates.intersection_len(&docids); - if count != 0 { - let value = self - .one_original_value_of(fid, &original, docids.min().unwrap())? - .unwrap_or_else(|| query.to_string()); - results.push(FacetValueHit { value, count }); - } - if results.len() >= self.max_values { - return Ok(ControlFlow::Break(())); - } - } - - Ok(ControlFlow::Continue(())) - } -} - -#[derive(Debug, Clone, serde::Serialize, PartialEq)] -pub struct FacetValueHit { - /// The original facet value - pub value: String, - /// The number of documents associated to this facet - pub count: u64, -} - #[cfg(test)] mod test { #[allow(unused_imports)] diff --git a/milli/src/search/new/tests/typo_proximity.rs b/milli/src/search/new/tests/typo_proximity.rs index 8dd110704..e71d32331 100644 --- a/milli/src/search/new/tests/typo_proximity.rs +++ b/milli/src/search/new/tests/typo_proximity.rs @@ -5,7 +5,7 @@ The typo ranking rule should transform the query graph such that it only contain the combinations of word derivations that it used to compute its bucket. The proximity ranking rule should then look for proximities only between those specific derivations. -For example, given the the search query `beautiful summer` and the dataset: +For example, given the search query `beautiful summer` and the dataset: ```text { "id": 0, "text": "beautigul summer...... beautiful day in the summer" } { "id": 1, "text": "beautiful summer" } diff --git a/milli/src/update/settings.rs b/milli/src/update/settings.rs index 2f53718ac..46014202b 100644 --- a/milli/src/update/settings.rs +++ b/milli/src/update/settings.rs @@ -14,12 +14,13 @@ use super::IndexerConfig; use crate::criterion::Criterion; use crate::error::UserError; use crate::index::{DEFAULT_MIN_WORD_LEN_ONE_TYPO, DEFAULT_MIN_WORD_LEN_TWO_TYPOS}; +use crate::order_by_map::OrderByMap; use crate::proximity::ProximityPrecision; use crate::update::index_documents::IndexDocumentsMethod; use crate::update::{IndexDocuments, UpdateIndexingStep}; use crate::vector::settings::{check_set, check_unset, EmbedderSource, EmbeddingSettings}; use crate::vector::{Embedder, EmbeddingConfig, EmbeddingConfigs}; -use crate::{FieldsIdsMap, Index, OrderBy, Result}; +use crate::{FieldsIdsMap, Index, Result}; #[derive(Debug, Clone, PartialEq, Eq, Copy)] pub enum Setting { @@ -145,7 +146,7 @@ pub struct Settings<'a, 't, 'i> { /// Attributes on which typo tolerance is disabled. exact_attributes: Setting>, max_values_per_facet: Setting, - sort_facet_values_by: Setting>, + sort_facet_values_by: Setting, pagination_max_total_hits: Setting, proximity_precision: Setting, embedder_settings: Setting>>, @@ -340,7 +341,7 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> { self.max_values_per_facet = Setting::Reset; } - pub fn set_sort_facet_values_by(&mut self, value: HashMap) { + pub fn set_sort_facet_values_by(&mut self, value: OrderByMap) { self.sort_facet_values_by = Setting::Set(value); } @@ -1186,6 +1187,13 @@ pub fn validate_embedding_settings( } } } + EmbedderSource::Ollama => { + // Dimensions get inferred, only model name is required + check_unset(&dimensions, "dimensions", inferred_source, name)?; + check_set(&model, "model", inferred_source, name)?; + check_unset(&api_key, "apiKey", inferred_source, name)?; + check_unset(&revision, "revision", inferred_source, name)?; + } EmbedderSource::HuggingFace => { check_unset(&api_key, "apiKey", inferred_source, name)?; check_unset(&dimensions, "dimensions", inferred_source, name)?; diff --git a/milli/src/vector/error.rs b/milli/src/vector/error.rs index fbe4ee878..9bbdeaa90 100644 --- a/milli/src/vector/error.rs +++ b/milli/src/vector/error.rs @@ -2,6 +2,7 @@ use std::path::PathBuf; use hf_hub::api::sync::ApiError; +use super::ollama::OllamaError; use crate::error::FaultSource; use crate::vector::openai::OpenAiError; @@ -71,6 +72,17 @@ pub enum EmbedErrorKind { OpenAiRuntimeInit(std::io::Error), #[error("initializing web client for sending embedding requests failed: {0}")] InitWebClient(reqwest::Error), + // Dedicated Ollama error kinds, might have to merge them into one cohesive error type for all backends. + #[error("unexpected response from Ollama: {0}")] + OllamaUnexpected(reqwest::Error), + #[error("sent too many requests to Ollama: {0}")] + OllamaTooManyRequests(OllamaError), + #[error("received internal error from Ollama: {0}")] + OllamaInternalServerError(OllamaError), + #[error("model not found. Meilisearch will not automatically download models from the Ollama library, please pull the model manually: {0}")] + OllamaModelNotFoundError(OllamaError), + #[error("received unhandled HTTP status code {0} from Ollama")] + OllamaUnhandledStatusCode(u16), } impl EmbedError { @@ -129,6 +141,26 @@ impl EmbedError { pub fn openai_initialize_web_client(inner: reqwest::Error) -> Self { Self { kind: EmbedErrorKind::InitWebClient(inner), fault: FaultSource::Runtime } } + + pub(crate) fn ollama_unexpected(inner: reqwest::Error) -> EmbedError { + Self { kind: EmbedErrorKind::OllamaUnexpected(inner), fault: FaultSource::Bug } + } + + pub(crate) fn ollama_model_not_found(inner: OllamaError) -> EmbedError { + Self { kind: EmbedErrorKind::OllamaModelNotFoundError(inner), fault: FaultSource::User } + } + + pub(crate) fn ollama_too_many_requests(inner: OllamaError) -> EmbedError { + Self { kind: EmbedErrorKind::OllamaTooManyRequests(inner), fault: FaultSource::Runtime } + } + + pub(crate) fn ollama_internal_server_error(inner: OllamaError) -> EmbedError { + Self { kind: EmbedErrorKind::OllamaInternalServerError(inner), fault: FaultSource::Runtime } + } + + pub(crate) fn ollama_unhandled_status_code(code: u16) -> EmbedError { + Self { kind: EmbedErrorKind::OllamaUnhandledStatusCode(code), fault: FaultSource::Bug } + } } #[derive(Debug, thiserror::Error)] @@ -195,6 +227,13 @@ impl NewEmbedderError { } } + pub fn ollama_could_not_determine_dimension(inner: EmbedError) -> NewEmbedderError { + Self { + kind: NewEmbedderErrorKind::CouldNotDetermineDimension(inner), + fault: FaultSource::User, + } + } + pub fn openai_invalid_api_key_format(inner: reqwest::header::InvalidHeaderValue) -> Self { Self { kind: NewEmbedderErrorKind::InvalidApiKeyFormat(inner), fault: FaultSource::User } } diff --git a/milli/src/vector/mod.rs b/milli/src/vector/mod.rs index 6aa324da9..035ac555e 100644 --- a/milli/src/vector/mod.rs +++ b/milli/src/vector/mod.rs @@ -10,6 +10,8 @@ pub mod manual; pub mod openai; pub mod settings; +pub mod ollama; + pub use self::error::Error; pub type Embedding = Vec; @@ -76,6 +78,7 @@ pub enum Embedder { HuggingFace(hf::Embedder), OpenAi(openai::Embedder), UserProvided(manual::Embedder), + Ollama(ollama::Embedder), } #[derive(Debug, Clone, Default, serde::Deserialize, serde::Serialize)] @@ -127,6 +130,7 @@ impl IntoIterator for EmbeddingConfigs { pub enum EmbedderOptions { HuggingFace(hf::EmbedderOptions), OpenAi(openai::EmbedderOptions), + Ollama(ollama::EmbedderOptions), UserProvided(manual::EmbedderOptions), } @@ -144,6 +148,10 @@ impl EmbedderOptions { pub fn openai(api_key: Option) -> Self { Self::OpenAi(openai::EmbedderOptions::with_default_model(api_key)) } + + pub fn ollama() -> Self { + Self::Ollama(ollama::EmbedderOptions::with_default_model()) + } } impl Embedder { @@ -151,6 +159,7 @@ impl Embedder { Ok(match options { EmbedderOptions::HuggingFace(options) => Self::HuggingFace(hf::Embedder::new(options)?), EmbedderOptions::OpenAi(options) => Self::OpenAi(openai::Embedder::new(options)?), + EmbedderOptions::Ollama(options) => Self::Ollama(ollama::Embedder::new(options)?), EmbedderOptions::UserProvided(options) => { Self::UserProvided(manual::Embedder::new(options)) } @@ -167,6 +176,10 @@ impl Embedder { let client = embedder.new_client()?; embedder.embed(texts, &client).await } + Embedder::Ollama(embedder) => { + let client = embedder.new_client()?; + embedder.embed(texts, &client).await + } Embedder::UserProvided(embedder) => embedder.embed(texts), } } @@ -181,6 +194,7 @@ impl Embedder { match self { Embedder::HuggingFace(embedder) => embedder.embed_chunks(text_chunks), Embedder::OpenAi(embedder) => embedder.embed_chunks(text_chunks), + Embedder::Ollama(embedder) => embedder.embed_chunks(text_chunks), Embedder::UserProvided(embedder) => embedder.embed_chunks(text_chunks), } } @@ -189,6 +203,7 @@ impl Embedder { match self { Embedder::HuggingFace(embedder) => embedder.chunk_count_hint(), Embedder::OpenAi(embedder) => embedder.chunk_count_hint(), + Embedder::Ollama(embedder) => embedder.chunk_count_hint(), Embedder::UserProvided(_) => 1, } } @@ -197,6 +212,7 @@ impl Embedder { match self { Embedder::HuggingFace(embedder) => embedder.prompt_count_in_chunk_hint(), Embedder::OpenAi(embedder) => embedder.prompt_count_in_chunk_hint(), + Embedder::Ollama(embedder) => embedder.prompt_count_in_chunk_hint(), Embedder::UserProvided(_) => 1, } } @@ -205,6 +221,7 @@ impl Embedder { match self { Embedder::HuggingFace(embedder) => embedder.dimensions(), Embedder::OpenAi(embedder) => embedder.dimensions(), + Embedder::Ollama(embedder) => embedder.dimensions(), Embedder::UserProvided(embedder) => embedder.dimensions(), } } @@ -213,6 +230,7 @@ impl Embedder { match self { Embedder::HuggingFace(embedder) => embedder.distribution(), Embedder::OpenAi(embedder) => embedder.distribution(), + Embedder::Ollama(embedder) => embedder.distribution(), Embedder::UserProvided(_embedder) => None, } } diff --git a/milli/src/vector/ollama.rs b/milli/src/vector/ollama.rs new file mode 100644 index 000000000..76988f70b --- /dev/null +++ b/milli/src/vector/ollama.rs @@ -0,0 +1,307 @@ +// Copied from "openai.rs" with the sections I actually understand changed for Ollama. +// The common components of the Ollama and OpenAI interfaces might need to be extracted. + +use std::fmt::Display; + +use reqwest::StatusCode; + +use super::error::{EmbedError, NewEmbedderError}; +use super::openai::Retry; +use super::{DistributionShift, Embedding, Embeddings}; + +#[derive(Debug)] +pub struct Embedder { + headers: reqwest::header::HeaderMap, + options: EmbedderOptions, +} + +#[derive(Debug, Clone, Hash, PartialEq, Eq, serde::Deserialize, serde::Serialize)] +pub struct EmbedderOptions { + pub embedding_model: EmbeddingModel, +} + +#[derive( + Debug, Clone, Hash, PartialEq, Eq, serde::Serialize, serde::Deserialize, deserr::Deserr, +)] +#[deserr(deny_unknown_fields)] +pub struct EmbeddingModel { + name: String, + dimensions: usize, +} + +#[derive(Debug, serde::Serialize)] +struct OllamaRequest<'a> { + model: &'a str, + prompt: &'a str, +} + +#[derive(Debug, serde::Deserialize)] +struct OllamaResponse { + embedding: Embedding, +} + +#[derive(Debug, serde::Deserialize)] +pub struct OllamaError { + error: String, +} + +impl EmbeddingModel { + pub fn max_token(&self) -> usize { + // this might not be the same for all models + 8192 + } + + pub fn default_dimensions(&self) -> usize { + // Dimensions for nomic-embed-text + 768 + } + + pub fn name(&self) -> String { + self.name.clone() + } + + pub fn from_name(name: &str) -> Self { + Self { name: name.to_string(), dimensions: 0 } + } + + pub fn supports_overriding_dimensions(&self) -> bool { + false + } +} + +impl Default for EmbeddingModel { + fn default() -> Self { + Self { name: "nomic-embed-text".to_string(), dimensions: 0 } + } +} + +impl EmbedderOptions { + pub fn with_default_model() -> Self { + Self { embedding_model: Default::default() } + } + + pub fn with_embedding_model(embedding_model: EmbeddingModel) -> Self { + Self { embedding_model } + } +} + +impl Embedder { + pub fn new_client(&self) -> Result { + reqwest::ClientBuilder::new() + .default_headers(self.headers.clone()) + .build() + .map_err(EmbedError::openai_initialize_web_client) + } + + pub fn new(options: EmbedderOptions) -> Result { + let mut headers = reqwest::header::HeaderMap::new(); + headers.insert( + reqwest::header::CONTENT_TYPE, + reqwest::header::HeaderValue::from_static("application/json"), + ); + + let mut embedder = Self { options, headers }; + + let rt = tokio::runtime::Builder::new_current_thread() + .enable_io() + .enable_time() + .build() + .map_err(EmbedError::openai_runtime_init) + .map_err(NewEmbedderError::ollama_could_not_determine_dimension)?; + + // Get dimensions from Ollama + let request = + OllamaRequest { model: &embedder.options.embedding_model.name(), prompt: "test" }; + // TODO: Refactor into shared error type + let client = embedder + .new_client() + .map_err(NewEmbedderError::ollama_could_not_determine_dimension)?; + + rt.block_on(async move { + let response = client + .post(get_ollama_path()) + .json(&request) + .send() + .await + .map_err(EmbedError::ollama_unexpected) + .map_err(NewEmbedderError::ollama_could_not_determine_dimension)?; + + // Process error in case model not found + let response = Self::check_response(response).await.map_err(|_err| { + let e = EmbedError::ollama_model_not_found(OllamaError { + error: format!("model: {}", embedder.options.embedding_model.name()), + }); + NewEmbedderError::ollama_could_not_determine_dimension(e) + })?; + + let response: OllamaResponse = response + .json() + .await + .map_err(EmbedError::ollama_unexpected) + .map_err(NewEmbedderError::ollama_could_not_determine_dimension)?; + + let embedding = Embeddings::from_single_embedding(response.embedding); + + embedder.options.embedding_model.dimensions = embedding.dimension(); + + tracing::info!( + "ollama model {} with dimensionality {} added", + embedder.options.embedding_model.name(), + embedding.dimension() + ); + + Ok(embedder) + }) + } + + async fn check_response(response: reqwest::Response) -> Result { + if !response.status().is_success() { + // Not the same number of possible error cases covered as with OpenAI. + match response.status() { + StatusCode::TOO_MANY_REQUESTS => { + let error_response: OllamaError = response + .json() + .await + .map_err(EmbedError::ollama_unexpected) + .map_err(Retry::retry_later)?; + + return Err(Retry::rate_limited(EmbedError::ollama_too_many_requests( + OllamaError { error: error_response.error }, + ))); + } + StatusCode::SERVICE_UNAVAILABLE => { + let error_response: OllamaError = response + .json() + .await + .map_err(EmbedError::ollama_unexpected) + .map_err(Retry::retry_later)?; + return Err(Retry::retry_later(EmbedError::ollama_internal_server_error( + OllamaError { error: error_response.error }, + ))); + } + StatusCode::NOT_FOUND => { + let error_response: OllamaError = response + .json() + .await + .map_err(EmbedError::ollama_unexpected) + .map_err(Retry::give_up)?; + + return Err(Retry::give_up(EmbedError::ollama_model_not_found(OllamaError { + error: error_response.error, + }))); + } + code => { + return Err(Retry::give_up(EmbedError::ollama_unhandled_status_code( + code.as_u16(), + ))); + } + } + } + Ok(response) + } + + pub async fn embed( + &self, + texts: Vec, + client: &reqwest::Client, + ) -> Result>, EmbedError> { + // Ollama only embedds one document at a time. + let mut results = Vec::with_capacity(texts.len()); + + // The retry loop is inside the texts loop, might have to switch that around + for text in texts { + // Retries copied from openai.rs + for attempt in 0..7 { + let retry_duration = match self.try_embed(&text, client).await { + Ok(result) => { + results.push(result); + break; + } + Err(retry) => { + tracing::warn!("Failed: {}", retry.error); + retry.into_duration(attempt) + } + }?; + tracing::warn!( + "Attempt #{}, retrying after {}ms.", + attempt, + retry_duration.as_millis() + ); + tokio::time::sleep(retry_duration).await; + } + } + + Ok(results) + } + + async fn try_embed( + &self, + text: &str, + client: &reqwest::Client, + ) -> Result, Retry> { + let request = OllamaRequest { model: &self.options.embedding_model.name(), prompt: text }; + let response = client + .post(get_ollama_path()) + .json(&request) + .send() + .await + .map_err(EmbedError::openai_network) + .map_err(Retry::retry_later)?; + + let response = Self::check_response(response).await?; + + let response: OllamaResponse = response + .json() + .await + .map_err(EmbedError::openai_unexpected) + .map_err(Retry::retry_later)?; + + tracing::trace!("response: {:?}", response.embedding); + + let embedding = Embeddings::from_single_embedding(response.embedding); + Ok(embedding) + } + + pub fn embed_chunks( + &self, + text_chunks: Vec>, + ) -> Result>>, EmbedError> { + let rt = tokio::runtime::Builder::new_current_thread() + .enable_io() + .enable_time() + .build() + .map_err(EmbedError::openai_runtime_init)?; + let client = self.new_client()?; + rt.block_on(futures::future::try_join_all( + text_chunks.into_iter().map(|prompts| self.embed(prompts, &client)), + )) + } + + // Defaults copied from openai.rs + pub fn chunk_count_hint(&self) -> usize { + 10 + } + + pub fn prompt_count_in_chunk_hint(&self) -> usize { + 10 + } + + pub fn dimensions(&self) -> usize { + self.options.embedding_model.dimensions + } + + pub fn distribution(&self) -> Option { + None + } +} + +impl Display for OllamaError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.error) + } +} + +fn get_ollama_path() -> String { + // Important: Hostname not enough, has to be entire path to embeddings endpoint + std::env::var("MEILI_OLLAMA_URL").unwrap_or("http://localhost:11434/api/embeddings".to_string()) +} diff --git a/milli/src/vector/openai.rs b/milli/src/vector/openai.rs index 33442dda4..dcf3f4c89 100644 --- a/milli/src/vector/openai.rs +++ b/milli/src/vector/openai.rs @@ -419,12 +419,12 @@ impl Embedder { // retrying in case of failure -struct Retry { - error: EmbedError, +pub struct Retry { + pub error: EmbedError, strategy: RetryStrategy, } -enum RetryStrategy { +pub enum RetryStrategy { GiveUp, Retry, RetryTokenized, @@ -432,23 +432,23 @@ enum RetryStrategy { } impl Retry { - fn give_up(error: EmbedError) -> Self { + pub fn give_up(error: EmbedError) -> Self { Self { error, strategy: RetryStrategy::GiveUp } } - fn retry_later(error: EmbedError) -> Self { + pub fn retry_later(error: EmbedError) -> Self { Self { error, strategy: RetryStrategy::Retry } } - fn retry_tokenized(error: EmbedError) -> Self { + pub fn retry_tokenized(error: EmbedError) -> Self { Self { error, strategy: RetryStrategy::RetryTokenized } } - fn rate_limited(error: EmbedError) -> Self { + pub fn rate_limited(error: EmbedError) -> Self { Self { error, strategy: RetryStrategy::RetryAfterRateLimit } } - fn into_duration(self, attempt: u32) -> Result { + pub fn into_duration(self, attempt: u32) -> Result { match self.strategy { RetryStrategy::GiveUp => Err(self.error), RetryStrategy::Retry => Ok(tokio::time::Duration::from_millis((10u64).pow(attempt))), @@ -459,11 +459,11 @@ impl Retry { } } - fn must_tokenize(&self) -> bool { + pub fn must_tokenize(&self) -> bool { matches!(self.strategy, RetryStrategy::RetryTokenized) } - fn into_error(self) -> EmbedError { + pub fn into_error(self) -> EmbedError { self.error } } diff --git a/milli/src/vector/settings.rs b/milli/src/vector/settings.rs index 834a1c81d..89571e98a 100644 --- a/milli/src/vector/settings.rs +++ b/milli/src/vector/settings.rs @@ -1,7 +1,7 @@ use deserr::Deserr; use serde::{Deserialize, Serialize}; -use super::openai; +use super::{ollama, openai}; use crate::prompt::PromptData; use crate::update::Setting; use crate::vector::EmbeddingConfig; @@ -80,11 +80,15 @@ impl EmbeddingSettings { Self::SOURCE => { &[EmbedderSource::HuggingFace, EmbedderSource::OpenAi, EmbedderSource::UserProvided] } - Self::MODEL => &[EmbedderSource::HuggingFace, EmbedderSource::OpenAi], + Self::MODEL => { + &[EmbedderSource::HuggingFace, EmbedderSource::OpenAi, EmbedderSource::Ollama] + } Self::REVISION => &[EmbedderSource::HuggingFace], Self::API_KEY => &[EmbedderSource::OpenAi], Self::DIMENSIONS => &[EmbedderSource::OpenAi, EmbedderSource::UserProvided], - Self::DOCUMENT_TEMPLATE => &[EmbedderSource::HuggingFace, EmbedderSource::OpenAi], + Self::DOCUMENT_TEMPLATE => { + &[EmbedderSource::HuggingFace, EmbedderSource::OpenAi, EmbedderSource::Ollama] + } _other => unreachable!("unknown field"), } } @@ -101,6 +105,7 @@ impl EmbeddingSettings { EmbedderSource::HuggingFace => { &[Self::SOURCE, Self::MODEL, Self::REVISION, Self::DOCUMENT_TEMPLATE] } + EmbedderSource::Ollama => &[Self::SOURCE, Self::MODEL, Self::DOCUMENT_TEMPLATE], EmbedderSource::UserProvided => &[Self::SOURCE, Self::DIMENSIONS], } } @@ -134,6 +139,7 @@ pub enum EmbedderSource { #[default] OpenAi, HuggingFace, + Ollama, UserProvided, } @@ -143,6 +149,7 @@ impl std::fmt::Display for EmbedderSource { EmbedderSource::OpenAi => "openAi", EmbedderSource::HuggingFace => "huggingFace", EmbedderSource::UserProvided => "userProvided", + EmbedderSource::Ollama => "ollama", }; f.write_str(s) } @@ -195,6 +202,14 @@ impl From for EmbeddingSettings { dimensions: options.dimensions.map(Setting::Set).unwrap_or_default(), document_template: Setting::Set(prompt.template), }, + super::EmbedderOptions::Ollama(options) => Self { + source: Setting::Set(EmbedderSource::Ollama), + model: Setting::Set(options.embedding_model.name().to_owned()), + revision: Setting::NotSet, + api_key: Setting::NotSet, + dimensions: Setting::NotSet, + document_template: Setting::Set(prompt.template), + }, super::EmbedderOptions::UserProvided(options) => Self { source: Setting::Set(EmbedderSource::UserProvided), model: Setting::NotSet, @@ -229,6 +244,14 @@ impl From for EmbeddingConfig { } this.embedder_options = super::EmbedderOptions::OpenAi(options); } + EmbedderSource::Ollama => { + let mut options: ollama::EmbedderOptions = + super::ollama::EmbedderOptions::with_default_model(); + if let Some(model) = model.set() { + options.embedding_model = super::ollama::EmbeddingModel::from_name(&model); + } + this.embedder_options = super::EmbedderOptions::Ollama(options); + } EmbedderSource::HuggingFace => { let mut options = super::hf::EmbedderOptions::default(); if let Some(model) = model.set() { diff --git a/workloads/settings-add-remove-filters.json b/workloads/settings-add-remove-filters.json new file mode 100644 index 000000000..04a57c707 --- /dev/null +++ b/workloads/settings-add-remove-filters.json @@ -0,0 +1,94 @@ +{ + "name": "settings-add-remove-filters.json", + "run_count": 2, + "extra_cli_args": [ + "--max-indexing-threads=4" + ], + "assets": { + "150k-people.json": { + "local_location": null, + "remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/150k-people.json", + "sha256": "28c359a0956958af0ba204ec11bad3045a0864a10b4838914fea25a01724f84b" + } + }, + "commands": [ + { + "route": "indexes/peoples/settings", + "method": "PATCH", + "body": { + "inline": { + "searchableAttributes": [ + "last_name", + "first_name", + "featured_job_organization_name", + "facebook_url", + "twitter_url", + "linkedin_url" + ], + "filterableAttributes": [ + "city", + "region", + "country_code" + ], + "dictionary": [ + "https://", + "http://", + "www.", + "crunchbase.com", + "facebook.com", + "twitter.com", + "linkedin.com" + ], + "stopWords": [ + "https://", + "http://", + "www.", + "crunchbase.com", + "facebook.com", + "twitter.com", + "linkedin.com" + ] + } + }, + "synchronous": "DontWait" + }, + { + "route": "indexes/peoples/documents", + "method": "POST", + "body": { + "asset": "150k-people.json" + }, + "synchronous": "WaitForTask" + }, + { + "route": "indexes/peoples/settings", + "method": "PATCH", + "body": { + "inline": { + "filterableAttributes": [ + "city", + "region", + "country_code", + "featured_job_title", + "featured_job_organization_name" + ] + } + }, + "synchronous": "WaitForTask" + }, + { + "route": "indexes/peoples/settings", + "method": "PATCH", + "body": { + "inline": { + "filterableAttributes": [ + "city", + "region", + "country_code" + ] + } + }, + "synchronous": "WaitForTask" + } + ] +} \ No newline at end of file diff --git a/workloads/settings-proximity-precision.json b/workloads/settings-proximity-precision.json new file mode 100644 index 000000000..48cfad49d --- /dev/null +++ b/workloads/settings-proximity-precision.json @@ -0,0 +1,86 @@ +{ + "name": "settings-proximity-precision.json", + "run_count": 2, + "extra_cli_args": [ + "--max-indexing-threads=4" + ], + "assets": { + "150k-people.json": { + "local_location": null, + "remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/150k-people.json", + "sha256": "28c359a0956958af0ba204ec11bad3045a0864a10b4838914fea25a01724f84b" + } + }, + "commands": [ + { + "route": "indexes/peoples/settings", + "method": "PATCH", + "body": { + "inline": { + "searchableAttributes": [ + "last_name", + "first_name", + "featured_job_organization_name", + "facebook_url", + "twitter_url", + "linkedin_url" + ], + "filterableAttributes": [ + "city", + "region", + "country_code", + "featured_job_title", + "featured_job_organization_name" + ], + "dictionary": [ + "https://", + "http://", + "www.", + "crunchbase.com", + "facebook.com", + "twitter.com", + "linkedin.com" + ], + "stopWords": [ + "https://", + "http://", + "www.", + "crunchbase.com", + "facebook.com", + "twitter.com", + "linkedin.com" + ] + } + }, + "synchronous": "DontWait" + }, + { + "route": "indexes/peoples/documents", + "method": "POST", + "body": { + "asset": "150k-people.json" + }, + "synchronous": "WaitForTask" + }, + { + "route": "indexes/peoples/settings", + "method": "PATCH", + "body": { + "inline": { + "proximityPrecision": "byAttribute" + } + }, + "synchronous": "WaitForTask" + }, + { + "route": "indexes/peoples/settings", + "method": "PATCH", + "body": { + "inline": { + "proximityPrecision": "byWord" + } + }, + "synchronous": "WaitForTask" + } + ] +} \ No newline at end of file diff --git a/workloads/settings-remove-add-swap-searchable.json b/workloads/settings-remove-add-swap-searchable.json new file mode 100644 index 000000000..ba315680f --- /dev/null +++ b/workloads/settings-remove-add-swap-searchable.json @@ -0,0 +1,114 @@ +{ + "name": "settings-remove-add-swap-searchable.json", + "run_count": 2, + "extra_cli_args": [ + "--max-indexing-threads=4" + ], + "assets": { + "150k-people.json": { + "local_location": null, + "remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/150k-people.json", + "sha256": "28c359a0956958af0ba204ec11bad3045a0864a10b4838914fea25a01724f84b" + } + }, + "commands": [ + { + "route": "indexes/peoples/settings", + "method": "PATCH", + "body": { + "inline": { + "searchableAttributes": [ + "last_name", + "first_name", + "featured_job_organization_name", + "facebook_url", + "twitter_url", + "linkedin_url" + ], + "filterableAttributes": [ + "city", + "region", + "country_code", + "featured_job_title", + "featured_job_organization_name" + ], + "dictionary": [ + "https://", + "http://", + "www.", + "crunchbase.com", + "facebook.com", + "twitter.com", + "linkedin.com" + ], + "stopWords": [ + "https://", + "http://", + "www.", + "crunchbase.com", + "facebook.com", + "twitter.com", + "linkedin.com" + ] + } + }, + "synchronous": "DontWait" + }, + { + "route": "indexes/peoples/documents", + "method": "POST", + "body": { + "asset": "150k-people.json" + }, + "synchronous": "WaitForTask" + }, + { + "route": "indexes/peoples/settings", + "method": "PATCH", + "body": { + "inline": { + "searchableAttributes": [ + "last_name", + "first_name", + "featured_job_organization_name" + ] + } + }, + "synchronous": "WaitForTask" + }, + { + "route": "indexes/peoples/settings", + "method": "PATCH", + "body": { + "inline": { + "searchableAttributes": [ + "last_name", + "first_name", + "featured_job_organization_name", + "facebook_url", + "twitter_url", + "linkedin_url" + ] + } + }, + "synchronous": "WaitForTask" + }, + { + "route": "indexes/peoples/settings", + "method": "PATCH", + "body": { + "inline": { + "searchableAttributes": [ + "first_name", + "last_name", + "featured_job_organization_name", + "facebook_url", + "twitter_url", + "linkedin_url" + ] + } + }, + "synchronous": "WaitForTask" + } + ] +} \ No newline at end of file diff --git a/workloads/settings-typo.json b/workloads/settings-typo.json new file mode 100644 index 000000000..a272e6d1f --- /dev/null +++ b/workloads/settings-typo.json @@ -0,0 +1,115 @@ +{ + "name": "settings-typo.json", + "run_count": 2, + "extra_cli_args": [ + "--max-indexing-threads=4" + ], + "assets": { + "150k-people.json": { + "local_location": null, + "remote_location": "https://milli-benchmarks.fra1.digitaloceanspaces.com/bench/datasets/150k-people.json", + "sha256": "28c359a0956958af0ba204ec11bad3045a0864a10b4838914fea25a01724f84b" + } + }, + "commands": [ + { + "route": "indexes/peoples/settings", + "method": "PATCH", + "body": { + "inline": { + "searchableAttributes": [ + "last_name", + "first_name", + "featured_job_title", + "featured_job_organization_name", + "facebook_url", + "twitter_url", + "linkedin_url" + ], + "filterableAttributes": [ + "city", + "region", + "country_code", + "featured_job_title", + "featured_job_organization_name" + ], + "dictionary": [ + "https://", + "http://", + "www.", + "crunchbase.com", + "facebook.com", + "twitter.com", + "linkedin.com" + ], + "stopWords": [ + "https://", + "http://", + "www.", + "crunchbase.com", + "facebook.com", + "twitter.com", + "linkedin.com" + ] + } + }, + "synchronous": "DontWait" + }, + { + "route": "indexes/peoples/documents", + "method": "POST", + "body": { + "asset": "150k-people.json" + }, + "synchronous": "WaitForTask" + }, + { + "route": "indexes/peoples/settings", + "method": "PATCH", + "body": { + "inline": { + "typoTolerance": { + "disableOnAttributes": ["featured_job_organization_name"] + } + } + }, + "synchronous": "WaitForTask" + }, + { + "route": "indexes/peoples/settings", + "method": "PATCH", + "body": { + "inline": { + "typoTolerance": { + "disableOnAttributes": [] + } + } + }, + "synchronous": "WaitForTask" + }, + { + "route": "indexes/peoples/settings", + "method": "PATCH", + "body": { + "inline": { + "typoTolerance": { + "disableOnWords": ["Ben","Elowitz","Kevin","Flaherty", "Ron", "Dustin", "Owen", "Chris", "Mark", "Matt", "Peter", "Van", "Head", "of"] + } + } + }, + "synchronous": "WaitForTask" + }, + { + "route": "indexes/peoples/settings", + "method": "PATCH", + "body": { + "inline": { + "typoTolerance": { + "disableOnWords": [] + } + } + }, + "synchronous": "WaitForTask" + } + ] +} \ No newline at end of file diff --git a/xtask/src/bench/dashboard.rs b/xtask/src/bench/dashboard.rs index 833426207..3ba0ca58b 100644 --- a/xtask/src/bench/dashboard.rs +++ b/xtask/src/bench/dashboard.rs @@ -11,157 +11,179 @@ use super::client::Client; use super::env_info; use super::workload::Workload; -pub async fn cancel_on_ctrl_c( - invocation_uuid: Uuid, - dashboard_client: Client, - abort_handle: AbortHandle, -) { - tracing::info!("press Ctrl-C to cancel the invocation"); - match ctrl_c().await { - Ok(()) => { - tracing::info!(%invocation_uuid, "received Ctrl-C, cancelling invocation"); - mark_as_failed(dashboard_client, invocation_uuid, None).await; - abort_handle.abort(); +#[derive(Debug, Clone)] +pub enum DashboardClient { + Client(Client), + Dry, +} + +impl DashboardClient { + pub fn new(dashboard_url: &str, api_key: Option<&str>) -> anyhow::Result { + let dashboard_client = Client::new( + Some(format!("{}/api/v1", dashboard_url)), + api_key, + Some(std::time::Duration::from_secs(60)), + )?; + + Ok(Self::Client(dashboard_client)) + } + + pub fn new_dry() -> Self { + Self::Dry + } + + pub async fn send_machine_info(&self, env: &env_info::Environment) -> anyhow::Result<()> { + let Self::Client(dashboard_client) = self else { return Ok(()) }; + + let response = dashboard_client + .put("machine") + .json(&json!({"hostname": env.hostname})) + .send() + .await + .context("sending machine information")?; + if !response.status().is_success() { + bail!( + "could not send machine information: {} {}", + response.status(), + response.text().await.unwrap_or_else(|_| "unknown".into()) + ); } - Err(error) => tracing::warn!( - error = &error as &dyn std::error::Error, - "failed to listen to Ctrl-C signal, invocation won't be canceled on Ctrl-C" - ), + Ok(()) } -} -pub async fn mark_as_failed( - dashboard_client: Client, - invocation_uuid: Uuid, - failure_reason: Option, -) { - let response = dashboard_client - .post("cancel-invocation") - .json(&json!({ - "invocation_uuid": invocation_uuid, - "failure_reason": failure_reason, - })) - .send() - .await; - let response = match response { - Ok(response) => response, - Err(response_error) => { - tracing::error!(error = &response_error as &dyn std::error::Error, %invocation_uuid, "could not mark invocation as failed"); - return; + pub async fn create_invocation( + &self, + build_info: build_info::BuildInfo, + commit_message: &str, + env: env_info::Environment, + max_workloads: usize, + reason: Option<&str>, + ) -> anyhow::Result { + let Self::Client(dashboard_client) = self else { return Ok(Uuid::now_v7()) }; + + let response = dashboard_client + .put("invocation") + .json(&json!({ + "commit": { + "sha1": build_info.commit_sha1, + "message": commit_message, + "commit_date": build_info.commit_timestamp, + "branch": build_info.branch, + "tag": build_info.describe.and_then(|describe| describe.as_tag()), + }, + "machine_hostname": env.hostname, + "max_workloads": max_workloads, + "reason": reason + })) + .send() + .await + .context("sending invocation")?; + if !response.status().is_success() { + bail!( + "could not send new invocation: {}", + response.text().await.unwrap_or_else(|_| "unknown".into()) + ); } - }; - - if !response.status().is_success() { - tracing::error!( - %invocation_uuid, - "could not mark invocation as failed: {}", - response.text().await.unwrap() - ); - return; - } - tracing::warn!(%invocation_uuid, "marked invocation as failed or canceled"); -} - -pub async fn send_machine_info( - dashboard_client: &Client, - env: &env_info::Environment, -) -> anyhow::Result<()> { - let response = dashboard_client - .put("machine") - .json(&json!({"hostname": env.hostname})) - .send() - .await - .context("sending machine information")?; - if !response.status().is_success() { - bail!( - "could not send machine information: {} {}", - response.status(), - response.text().await.unwrap_or_else(|_| "unknown".into()) - ); - } - Ok(()) -} - -pub async fn create_invocation( - dashboard_client: &Client, - build_info: build_info::BuildInfo, - commit_message: &str, - env: env_info::Environment, - max_workloads: usize, - reason: Option<&str>, -) -> anyhow::Result { - let response = dashboard_client - .put("invocation") - .json(&json!({ - "commit": { - "sha1": build_info.commit_sha1, - "message": commit_message, - "commit_date": build_info.commit_timestamp, - "branch": build_info.branch, - "tag": build_info.describe.and_then(|describe| describe.as_tag()), - }, - "machine_hostname": env.hostname, - "max_workloads": max_workloads, - "reason": reason - })) - .send() - .await - .context("sending invocation")?; - if !response.status().is_success() { - bail!( - "could not send new invocation: {}", - response.text().await.unwrap_or_else(|_| "unknown".into()) - ); - } - let invocation_uuid: Uuid = - response.json().await.context("could not deserialize invocation response as JSON")?; - Ok(invocation_uuid) -} - -pub async fn create_workload( - dashboard_client: &Client, - invocation_uuid: Uuid, - workload: &Workload, -) -> anyhow::Result { - let response = dashboard_client - .put("workload") - .json(&json!({ - "invocation_uuid": invocation_uuid, - "name": &workload.name, - "max_runs": workload.run_count, - })) - .send() - .await - .context("could not create new workload")?; - - if !response.status().is_success() { - bail!("creating new workload failed: {}", response.text().await.unwrap()) + let invocation_uuid: Uuid = + response.json().await.context("could not deserialize invocation response as JSON")?; + Ok(invocation_uuid) } - let workload_uuid: Uuid = - response.json().await.context("could not deserialize JSON as UUID")?; - Ok(workload_uuid) -} + pub async fn create_workload( + &self, + invocation_uuid: Uuid, + workload: &Workload, + ) -> anyhow::Result { + let Self::Client(dashboard_client) = self else { return Ok(Uuid::now_v7()) }; -pub async fn create_run( - dashboard_client: Client, - workload_uuid: Uuid, - report: &BTreeMap, -) -> anyhow::Result<()> { - let response = dashboard_client - .put("run") - .json(&json!({ - "workload_uuid": workload_uuid, - "data": report - })) - .send() - .await - .context("sending new run")?; - if !response.status().is_success() { - bail!( - "sending new run failed: {}", - response.text().await.unwrap_or_else(|_| "unknown".into()) - ) + let response = dashboard_client + .put("workload") + .json(&json!({ + "invocation_uuid": invocation_uuid, + "name": &workload.name, + "max_runs": workload.run_count, + })) + .send() + .await + .context("could not create new workload")?; + + if !response.status().is_success() { + bail!("creating new workload failed: {}", response.text().await.unwrap()) + } + + let workload_uuid: Uuid = + response.json().await.context("could not deserialize JSON as UUID")?; + Ok(workload_uuid) + } + + pub async fn create_run( + &self, + workload_uuid: Uuid, + report: &BTreeMap, + ) -> anyhow::Result<()> { + let Self::Client(dashboard_client) = self else { return Ok(()) }; + + let response = dashboard_client + .put("run") + .json(&json!({ + "workload_uuid": workload_uuid, + "data": report + })) + .send() + .await + .context("sending new run")?; + if !response.status().is_success() { + bail!( + "sending new run failed: {}", + response.text().await.unwrap_or_else(|_| "unknown".into()) + ) + } + Ok(()) + } + + pub async fn cancel_on_ctrl_c(self, invocation_uuid: Uuid, abort_handle: AbortHandle) { + tracing::info!("press Ctrl-C to cancel the invocation"); + match ctrl_c().await { + Ok(()) => { + tracing::info!(%invocation_uuid, "received Ctrl-C, cancelling invocation"); + self.mark_as_failed(invocation_uuid, None).await; + abort_handle.abort(); + } + Err(error) => tracing::warn!( + error = &error as &dyn std::error::Error, + "failed to listen to Ctrl-C signal, invocation won't be canceled on Ctrl-C" + ), + } + } + + pub async fn mark_as_failed(&self, invocation_uuid: Uuid, failure_reason: Option) { + if let DashboardClient::Client(client) = self { + let response = client + .post("cancel-invocation") + .json(&json!({ + "invocation_uuid": invocation_uuid, + "failure_reason": failure_reason, + })) + .send() + .await; + let response = match response { + Ok(response) => response, + Err(response_error) => { + tracing::error!(error = &response_error as &dyn std::error::Error, %invocation_uuid, "could not mark invocation as failed"); + return; + } + }; + + if !response.status().is_success() { + tracing::error!( + %invocation_uuid, + "could not mark invocation as failed: {}", + response.text().await.unwrap() + ); + return; + } + } + + tracing::warn!(%invocation_uuid, "marked invocation as failed or canceled"); } - Ok(()) } diff --git a/xtask/src/bench/mod.rs b/xtask/src/bench/mod.rs index 62c11b604..844b64f63 100644 --- a/xtask/src/bench/mod.rs +++ b/xtask/src/bench/mod.rs @@ -50,6 +50,10 @@ pub struct BenchDeriveArgs { #[arg(long, default_value_t = default_dashboard_url())] dashboard_url: String, + /// Don't actually send results to the dashboard + #[arg(long)] + no_dashboard: bool, + /// Directory to output reports. #[arg(long, default_value_t = default_report_folder())] report_folder: String, @@ -103,11 +107,11 @@ pub fn run(args: BenchDeriveArgs) -> anyhow::Result<()> { let assets_client = Client::new(None, args.assets_key.as_deref(), Some(std::time::Duration::from_secs(3600)))?; // 1h - let dashboard_client = Client::new( - Some(format!("{}/api/v1", args.dashboard_url)), - args.api_key.as_deref(), - Some(std::time::Duration::from_secs(60)), - )?; + let dashboard_client = if args.no_dashboard { + dashboard::DashboardClient::new_dry() + } else { + dashboard::DashboardClient::new(&args.dashboard_url, args.api_key.as_deref())? + }; // reporting uses its own client because keeping the stream open to wait for entries // blocks any other requests @@ -127,12 +131,12 @@ pub fn run(args: BenchDeriveArgs) -> anyhow::Result<()> { // enter runtime rt.block_on(async { - dashboard::send_machine_info(&dashboard_client, &env).await?; + dashboard_client.send_machine_info(&env).await?; let commit_message = build_info.commit_msg.context("missing commit message")?.split('\n').next().unwrap(); let max_workloads = args.workload_file.len(); let reason: Option<&str> = args.reason.as_deref(); - let invocation_uuid = dashboard::create_invocation(&dashboard_client, build_info, commit_message, env, max_workloads, reason).await?; + let invocation_uuid = dashboard_client.create_invocation( build_info, commit_message, env, max_workloads, reason).await?; tracing::info!(workload_count = args.workload_file.len(), "handling workload files"); @@ -167,7 +171,7 @@ pub fn run(args: BenchDeriveArgs) -> anyhow::Result<()> { let abort_handle = workload_runs.abort_handle(); tokio::spawn({ let dashboard_client = dashboard_client.clone(); - dashboard::cancel_on_ctrl_c(invocation_uuid, dashboard_client, abort_handle) + dashboard_client.cancel_on_ctrl_c(invocation_uuid, abort_handle) }); // wait for the end of the main task, handle result @@ -178,7 +182,7 @@ pub fn run(args: BenchDeriveArgs) -> anyhow::Result<()> { } Ok(Err(error)) => { tracing::error!(%invocation_uuid, error = %error, "invocation failed, attempting to report the failure to dashboard"); - dashboard::mark_as_failed(dashboard_client, invocation_uuid, Some(error.to_string())).await; + dashboard_client.mark_as_failed(invocation_uuid, Some(error.to_string())).await; tracing::warn!(%invocation_uuid, "invocation marked as failed following error"); Err(error) }, @@ -186,7 +190,7 @@ pub fn run(args: BenchDeriveArgs) -> anyhow::Result<()> { match join_error.try_into_panic() { Ok(panic) => { tracing::error!("invocation panicked, attempting to report the failure to dashboard"); - dashboard::mark_as_failed(dashboard_client, invocation_uuid, Some("Panicked".into())).await; + dashboard_client.mark_as_failed( invocation_uuid, Some("Panicked".into())).await; std::panic::resume_unwind(panic) } Err(_) => { diff --git a/xtask/src/bench/workload.rs b/xtask/src/bench/workload.rs index b3e952f29..d82c5ad19 100644 --- a/xtask/src/bench/workload.rs +++ b/xtask/src/bench/workload.rs @@ -12,8 +12,9 @@ use uuid::Uuid; use super::assets::Asset; use super::client::Client; use super::command::SyncMode; +use super::dashboard::DashboardClient; use super::BenchDeriveArgs; -use crate::bench::{assets, dashboard, meili_process}; +use crate::bench::{assets, meili_process}; #[derive(Deserialize)] pub struct Workload { @@ -25,7 +26,7 @@ pub struct Workload { } async fn run_commands( - dashboard_client: &Client, + dashboard_client: &DashboardClient, logs_client: &Client, meili_client: &Client, workload_uuid: Uuid, @@ -64,7 +65,7 @@ async fn run_commands( #[tracing::instrument(skip(assets_client, dashboard_client, logs_client, meili_client, workload, master_key, args), fields(workload = workload.name))] pub async fn execute( assets_client: &Client, - dashboard_client: &Client, + dashboard_client: &DashboardClient, logs_client: &Client, meili_client: &Client, invocation_uuid: Uuid, @@ -74,8 +75,7 @@ pub async fn execute( ) -> anyhow::Result<()> { assets::fetch_assets(assets_client, &workload.assets, &args.asset_folder).await?; - let workload_uuid = - dashboard::create_workload(dashboard_client, invocation_uuid, &workload).await?; + let workload_uuid = dashboard_client.create_workload(invocation_uuid, &workload).await?; let mut tasks = Vec::new(); @@ -113,7 +113,7 @@ pub async fn execute( #[allow(clippy::too_many_arguments)] // not best code quality, but this is a benchmark runner #[tracing::instrument(skip(dashboard_client, logs_client, meili_client, workload, master_key, args), fields(workload = %workload.name))] async fn execute_run( - dashboard_client: &Client, + dashboard_client: &DashboardClient, logs_client: &Client, meili_client: &Client, workload_uuid: Uuid, @@ -202,7 +202,7 @@ async fn start_report( } async fn stop_report( - dashboard_client: &Client, + dashboard_client: &DashboardClient, logs_client: &Client, workload_uuid: Uuid, filename: String, @@ -232,7 +232,7 @@ async fn stop_report( .context("could not convert trace to report")?; let context = || format!("writing report to {filename}"); - dashboard::create_run(dashboard_client, workload_uuid, &report).await?; + dashboard_client.create_run(workload_uuid, &report).await?; let mut output_file = std::io::BufWriter::new( std::fs::File::options()