From 9287858997b2a0392dd5637a093f190d381b0c02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Wed, 8 Mar 2023 16:14:00 +0100 Subject: [PATCH 01/56] Introduce a new facet_id_is_null_docids database in the index --- milli/src/index.rs | 8 +++++++- milli/src/update/clear_documents.rs | 2 ++ milli/src/update/delete_documents.rs | 8 ++++++++ 3 files changed, 17 insertions(+), 1 deletion(-) diff --git a/milli/src/index.rs b/milli/src/index.rs index a4048dfb0..ae7bd211e 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -80,6 +80,7 @@ pub mod db_name { pub const FIELD_ID_WORD_COUNT_DOCIDS: &str = "field-id-word-count-docids"; pub const FACET_ID_F64_DOCIDS: &str = "facet-id-f64-docids"; pub const FACET_ID_EXISTS_DOCIDS: &str = "facet-id-exists-docids"; + pub const FACET_ID_IS_NULL_DOCIDS: &str = "facet-id-is-null-docids"; pub const FACET_ID_STRING_DOCIDS: &str = "facet-id-string-docids"; pub const FIELD_ID_DOCID_FACET_F64S: &str = "field-id-docid-facet-f64s"; pub const FIELD_ID_DOCID_FACET_STRINGS: &str = "field-id-docid-facet-strings"; @@ -130,6 +131,9 @@ pub struct Index { /// Maps the facet field id and the docids for which this field exists pub facet_id_exists_docids: Database, + /// Maps the facet field id and the docids for which this field is set as null + pub facet_id_is_null_docids: Database, + /// Maps the facet field id and ranges of numbers with the docids that corresponds to them. pub facet_id_f64_docids: Database, FacetGroupValueCodec>, /// Maps the facet field id and ranges of strings with the docids that corresponds to them. @@ -153,7 +157,7 @@ impl Index { ) -> Result { use db_name::*; - options.max_dbs(19); + options.max_dbs(20); unsafe { options.flag(Flags::MdbAlwaysFreePages) }; let env = options.open(path)?; @@ -175,6 +179,7 @@ impl Index { let facet_id_f64_docids = env.create_database(Some(FACET_ID_F64_DOCIDS))?; let facet_id_string_docids = env.create_database(Some(FACET_ID_STRING_DOCIDS))?; let facet_id_exists_docids = env.create_database(Some(FACET_ID_EXISTS_DOCIDS))?; + let facet_id_is_null_docids = env.create_database(Some(FACET_ID_IS_NULL_DOCIDS))?; let field_id_docid_facet_f64s = env.create_database(Some(FIELD_ID_DOCID_FACET_F64S))?; let field_id_docid_facet_strings = @@ -201,6 +206,7 @@ impl Index { facet_id_f64_docids, facet_id_string_docids, facet_id_exists_docids, + facet_id_is_null_docids, field_id_docid_facet_f64s, field_id_docid_facet_strings, documents, diff --git a/milli/src/update/clear_documents.rs b/milli/src/update/clear_documents.rs index 0296bc192..7ac09a785 100644 --- a/milli/src/update/clear_documents.rs +++ b/milli/src/update/clear_documents.rs @@ -34,6 +34,7 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> { facet_id_f64_docids, facet_id_string_docids, facet_id_exists_docids, + facet_id_is_null_docids, field_id_docid_facet_f64s, field_id_docid_facet_strings, documents, @@ -86,6 +87,7 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> { script_language_docids.clear(self.wtxn)?; facet_id_f64_docids.clear(self.wtxn)?; facet_id_exists_docids.clear(self.wtxn)?; + facet_id_is_null_docids.clear(self.wtxn)?; facet_id_string_docids.clear(self.wtxn)?; field_id_docid_facet_f64s.clear(self.wtxn)?; field_id_docid_facet_strings.clear(self.wtxn)?; diff --git a/milli/src/update/delete_documents.rs b/milli/src/update/delete_documents.rs index eeb67b829..7180d7d42 100644 --- a/milli/src/update/delete_documents.rs +++ b/milli/src/update/delete_documents.rs @@ -245,6 +245,7 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> { field_id_docid_facet_strings: _, script_language_docids, facet_id_exists_docids, + facet_id_is_null_docids, documents, } = self.index; @@ -523,6 +524,13 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> { &self.to_delete_docids, )?; + // We delete the documents ids that are under the facet field id values. + remove_docids_from_facet_id_exists_docids( + self.wtxn, + facet_id_is_null_docids, + &self.to_delete_docids, + )?; + self.index.put_soft_deleted_documents_ids(self.wtxn, &RoaringBitmap::new())?; Ok(DetailedDocumentDeletionResult { From 19ab4d1a159d3be2bda3bde05dcef244bdfd9393 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Wed, 8 Mar 2023 16:46:42 +0100 Subject: [PATCH 02/56] Classify the NULL fields values in the facet extractor --- .../extract/extract_fid_docid_facet_values.rs | 99 +++++++++++++------ .../src/update/index_documents/extract/mod.rs | 15 ++- 2 files changed, 78 insertions(+), 36 deletions(-) diff --git a/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs b/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs index 71ac330e2..5fe6a7606 100644 --- a/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs +++ b/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs @@ -8,7 +8,7 @@ use charabia::normalizer::{CharNormalizer, CompatibilityDecompositionNormalizer} use heed::zerocopy::AsBytes; use heed::BytesEncode; use roaring::RoaringBitmap; -use serde_json::Value; +use serde_json::{from_slice, Value}; use super::helpers::{create_sorter, keep_first, sorter_into_reader, GrenadParameters}; use crate::error::InternalError; @@ -25,7 +25,8 @@ pub fn extract_fid_docid_facet_values( obkv_documents: grenad::Reader, indexer: GrenadParameters, faceted_fields: &HashSet, -) -> Result<(grenad::Reader, grenad::Reader, grenad::Reader)> { +) -> Result<(grenad::Reader, grenad::Reader, grenad::Reader, grenad::Reader)> +{ let max_memory = indexer.max_memory_by_thread(); let mut fid_docid_facet_numbers_sorter = create_sorter( @@ -47,6 +48,7 @@ pub fn extract_fid_docid_facet_values( ); let mut facet_exists_docids = BTreeMap::::new(); + let mut facet_is_null_docids = BTreeMap::::new(); let mut key_buffer = Vec::new(); let mut cursor = obkv_documents.into_cursor()?; @@ -70,33 +72,40 @@ pub fn extract_fid_docid_facet_values( // For the other extraction tasks, prefix the key with the field_id and the document_id key_buffer.extend_from_slice(docid_bytes); - let value = - serde_json::from_slice(field_bytes).map_err(InternalError::SerdeJson)?; - - let (numbers, strings) = extract_facet_values(&value); - - // insert facet numbers in sorter - for number in numbers { - key_buffer.truncate(size_of::() + size_of::()); - if let Some(value_bytes) = f64_into_bytes(number) { - key_buffer.extend_from_slice(&value_bytes); - key_buffer.extend_from_slice(&number.to_be_bytes()); - - fid_docid_facet_numbers_sorter.insert(&key_buffer, ().as_bytes())?; + let value = from_slice(field_bytes).map_err(InternalError::SerdeJson)?; + match extract_facet_values(&value) { + FilterableValues::Null => { + facet_is_null_docids.entry(field_id).or_default().insert(document); } - } + FilterableValues::Values { numbers, strings } => { + // insert facet numbers in sorter + for number in numbers { + key_buffer.truncate(size_of::() + size_of::()); + if let Some(value_bytes) = f64_into_bytes(number) { + key_buffer.extend_from_slice(&value_bytes); + key_buffer.extend_from_slice(&number.to_be_bytes()); - // insert normalized and original facet string in sorter - for (normalized, original) in strings.into_iter().filter(|(n, _)| !n.is_empty()) { - let normalised_truncated_value: String = normalized - .char_indices() - .take_while(|(idx, _)| idx + 4 < MAX_FACET_VALUE_LENGTH) - .map(|(_, c)| c) - .collect(); + fid_docid_facet_numbers_sorter + .insert(&key_buffer, ().as_bytes())?; + } + } - key_buffer.truncate(size_of::() + size_of::()); - key_buffer.extend_from_slice(normalised_truncated_value.as_bytes()); - fid_docid_facet_strings_sorter.insert(&key_buffer, original.as_bytes())?; + // insert normalized and original facet string in sorter + for (normalized, original) in + strings.into_iter().filter(|(n, _)| !n.is_empty()) + { + let normalised_truncated_value: String = normalized + .char_indices() + .take_while(|(idx, _)| idx + 4 < MAX_FACET_VALUE_LENGTH) + .map(|(_, c)| c) + .collect(); + + key_buffer.truncate(size_of::() + size_of::()); + key_buffer.extend_from_slice(normalised_truncated_value.as_bytes()); + fid_docid_facet_strings_sorter + .insert(&key_buffer, original.as_bytes())?; + } + } } } } @@ -113,14 +122,36 @@ pub fn extract_fid_docid_facet_values( } let facet_exists_docids_reader = writer_into_reader(facet_exists_docids_writer)?; + let mut facet_is_null_docids_writer = create_writer( + indexer.chunk_compression_type, + indexer.chunk_compression_level, + tempfile::tempfile()?, + ); + for (fid, bitmap) in facet_is_null_docids.into_iter() { + let bitmap_bytes = CboRoaringBitmapCodec::bytes_encode(&bitmap).unwrap(); + facet_is_null_docids_writer.insert(fid.to_be_bytes(), &bitmap_bytes)?; + } + let facet_is_null_docids_reader = writer_into_reader(facet_is_null_docids_writer)?; + Ok(( sorter_into_reader(fid_docid_facet_numbers_sorter, indexer)?, sorter_into_reader(fid_docid_facet_strings_sorter, indexer)?, + facet_is_null_docids_reader, facet_exists_docids_reader, )) } -fn extract_facet_values(value: &Value) -> (Vec, Vec<(String, String)>) { +/// Represent what a document field contains. +enum FilterableValues { + Null, + /// Represents all the numbers and strings values found in this document field. + Values { + numbers: Vec, + strings: Vec<(String, String)>, + }, +} + +fn extract_facet_values(value: &Value) -> FilterableValues { fn inner_extract_facet_values( value: &Value, can_recurse: bool, @@ -152,9 +183,13 @@ fn extract_facet_values(value: &Value) -> (Vec, Vec<(String, String)>) { } } - let mut facet_number_values = Vec::new(); - let mut facet_string_values = Vec::new(); - inner_extract_facet_values(value, true, &mut facet_number_values, &mut facet_string_values); - - (facet_number_values, facet_string_values) + match value { + Value::Null => FilterableValues::Null, + otherwise => { + let mut numbers = Vec::new(); + let mut strings = Vec::new(); + inner_extract_facet_values(otherwise, true, &mut numbers, &mut strings); + FilterableValues::Values { numbers, strings } + } + } } diff --git a/milli/src/update/index_documents/extract/mod.rs b/milli/src/update/index_documents/extract/mod.rs index c0f07cf79..9f9fc8f4f 100644 --- a/milli/src/update/index_documents/extract/mod.rs +++ b/milli/src/update/index_documents/extract/mod.rs @@ -55,7 +55,7 @@ pub(crate) fn data_from_obkv_documents( .collect::>()?; #[allow(clippy::type_complexity)] - let result: Result<(Vec<_>, (Vec<_>, (Vec<_>, Vec<_>)))> = flattened_obkv_chunks + let result: Result<(Vec<_>, (Vec<_>, (Vec<_>, (Vec<_>, Vec<_>))))> = flattened_obkv_chunks .par_bridge() .map(|flattened_obkv_chunks| { send_and_extract_flattened_documents_data( @@ -76,7 +76,10 @@ pub(crate) fn data_from_obkv_documents( docid_word_positions_chunks, ( docid_fid_facet_numbers_chunks, - (docid_fid_facet_strings_chunks, facet_exists_docids_chunks), + ( + docid_fid_facet_strings_chunks, + (facet_is_null_docids_chunks, facet_exists_docids_chunks), + ), ), ) = result?; @@ -235,7 +238,7 @@ fn send_and_extract_flattened_documents_data( grenad::Reader, ( grenad::Reader, - (grenad::Reader, grenad::Reader), + (grenad::Reader, (grenad::Reader, grenad::Reader)), ), )> { let flattened_documents_chunk = @@ -284,6 +287,7 @@ fn send_and_extract_flattened_documents_data( let ( docid_fid_facet_numbers_chunk, docid_fid_facet_strings_chunk, + fid_facet_is_null_docids_chunk, fid_facet_exists_docids_chunk, ) = extract_fid_docid_facet_values( flattened_documents_chunk.clone(), @@ -309,7 +313,10 @@ fn send_and_extract_flattened_documents_data( Ok(( docid_fid_facet_numbers_chunk, - (docid_fid_facet_strings_chunk, fid_facet_exists_docids_chunk), + ( + docid_fid_facet_strings_chunk, + (fid_facet_is_null_docids_chunk, fid_facet_exists_docids_chunk), + ), )) }, ); From 43ff236df8b7ad693c4cfe26d8a16dc09dc72359 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Wed, 8 Mar 2023 16:49:53 +0100 Subject: [PATCH 03/56] Write the NULL facet values in the database --- milli/src/update/index_documents/extract/mod.rs | 16 ++++++++++++++++ milli/src/update/index_documents/typed_chunk.rs | 12 ++++++++++++ 2 files changed, 28 insertions(+) diff --git a/milli/src/update/index_documents/extract/mod.rs b/milli/src/update/index_documents/extract/mod.rs index 9f9fc8f4f..c11d08405 100644 --- a/milli/src/update/index_documents/extract/mod.rs +++ b/milli/src/update/index_documents/extract/mod.rs @@ -99,6 +99,22 @@ pub(crate) fn data_from_obkv_documents( }); } + // merge facet_is_null_docids and send them as a typed chunk + { + let lmdb_writer_sx = lmdb_writer_sx.clone(); + rayon::spawn(move || { + debug!("merge {} database", "facet-id-is-null-docids"); + match facet_is_null_docids_chunks.merge(merge_cbo_roaring_bitmaps, &indexer) { + Ok(reader) => { + let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsNullDocids(reader))); + } + Err(e) => { + let _ = lmdb_writer_sx.send(Err(e)); + } + } + }); + } + spawn_extraction_task::<_, _, Vec>>( docid_word_positions_chunks.clone(), indexer, diff --git a/milli/src/update/index_documents/typed_chunk.rs b/milli/src/update/index_documents/typed_chunk.rs index b9b11cfa8..79f2e2c55 100644 --- a/milli/src/update/index_documents/typed_chunk.rs +++ b/milli/src/update/index_documents/typed_chunk.rs @@ -39,6 +39,7 @@ pub(crate) enum TypedChunk { FieldIdFacetStringDocids(grenad::Reader), FieldIdFacetNumberDocids(grenad::Reader), FieldIdFacetExistsDocids(grenad::Reader), + FieldIdFacetIsNullDocids(grenad::Reader), GeoPoints(grenad::Reader), ScriptLanguageDocids(HashMap<(Script, Language), RoaringBitmap>), } @@ -161,6 +162,17 @@ pub(crate) fn write_typed_chunk_into_index( )?; is_merged_database = true; } + TypedChunk::FieldIdFacetIsNullDocids(facet_id_is_null_docids) => { + append_entries_into_database( + facet_id_is_null_docids, + &index.facet_id_is_null_docids, + wtxn, + index_is_empty, + |value, _buffer| Ok(value), + merge_cbo_roaring_bitmaps, + )?; + is_merged_database = true; + } TypedChunk::WordPairProximityDocids(word_pair_proximity_docids_iter) => { append_entries_into_database( word_pair_proximity_docids_iter, From 7c0cd7172d02664af8f5f59e2520741185d5dca1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Wed, 8 Mar 2023 16:57:42 +0100 Subject: [PATCH 04/56] Introduce the NULL and NOT value NULL operator --- filter-parser/src/condition.rs | 16 ++++++++++++++++ filter-parser/src/lib.rs | 20 ++++++++++++++++---- milli/src/index.rs | 12 ++++++++++++ milli/src/search/facet/filter.rs | 4 ++++ 4 files changed, 48 insertions(+), 4 deletions(-) diff --git a/filter-parser/src/condition.rs b/filter-parser/src/condition.rs index 735ffec0e..834fac8b8 100644 --- a/filter-parser/src/condition.rs +++ b/filter-parser/src/condition.rs @@ -20,6 +20,7 @@ pub enum Condition<'a> { GreaterThanOrEqual(Token<'a>), Equal(Token<'a>), NotEqual(Token<'a>), + Null, Exists, LowerThan(Token<'a>), LowerThanOrEqual(Token<'a>), @@ -44,6 +45,21 @@ pub fn parse_condition(input: Span) -> IResult { Ok((input, condition)) } +/// null = value "NULL" +pub fn parse_null(input: Span) -> IResult { + let (input, key) = terminated(parse_value, tag("NULL"))(input)?; + + Ok((input, FilterCondition::Condition { fid: key, op: Null })) +} + +/// null = value "NOT" WS+ "NULL" +pub fn parse_not_null(input: Span) -> IResult { + let (input, key) = parse_value(input)?; + + let (input, _) = tuple((tag("NOT"), multispace1, tag("NULL")))(input)?; + Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Null })))) +} + /// exist = value "EXISTS" pub fn parse_exists(input: Span) -> IResult { let (input, key) = terminated(parse_value, tag("EXISTS"))(input)?; diff --git a/filter-parser/src/lib.rs b/filter-parser/src/lib.rs index 8e21ff6be..36657587f 100644 --- a/filter-parser/src/lib.rs +++ b/filter-parser/src/lib.rs @@ -47,7 +47,7 @@ mod value; use std::fmt::Debug; pub use condition::{parse_condition, parse_to, Condition}; -use condition::{parse_exists, parse_not_exists}; +use condition::{parse_exists, parse_not_exists, parse_not_null, parse_null}; use error::{cut_with_err, ExpectedValueKind, NomErrorExt}; pub use error::{Error, ErrorKind}; use nom::branch::alt; @@ -414,6 +414,8 @@ fn parse_primary(input: Span, depth: usize) -> IResult { parse_in, parse_not_in, parse_condition, + parse_null, + parse_not_null, parse_exists, parse_not_exists, parse_to, @@ -496,14 +498,23 @@ pub mod tests { insta::assert_display_snapshot!(p("subscribers <= 1000"), @"{subscribers} <= {1000}"); insta::assert_display_snapshot!(p("subscribers 100 TO 1000"), @"{subscribers} {100} TO {1000}"); - // Test NOT + EXISTS - insta::assert_display_snapshot!(p("subscribers EXISTS"), @"{subscribers} EXISTS"); + // Test NOT insta::assert_display_snapshot!(p("NOT subscribers < 1000"), @"NOT ({subscribers} < {1000})"); + insta::assert_display_snapshot!(p("NOT subscribers 100 TO 1000"), @"NOT ({subscribers} {100} TO {1000})"); + + // Test NULL + NOT NULL + insta::assert_display_snapshot!(p("subscribers NULL"), @"{subscribers} NULL"); + insta::assert_display_snapshot!(p("NOT subscribers NULL"), @"NOT ({subscribers} NULL)"); + insta::assert_display_snapshot!(p("subscribers NOT NULL"), @"NOT ({subscribers} NULL)"); + insta::assert_display_snapshot!(p("NOT subscribers NOT NULL"), @"{subscribers} NULL"); + insta::assert_display_snapshot!(p("subscribers NOT NULL"), @"NOT ({subscribers} NULL)"); + + // Test EXISTS + NOT EXITS + insta::assert_display_snapshot!(p("subscribers EXISTS"), @"{subscribers} EXISTS"); insta::assert_display_snapshot!(p("NOT subscribers EXISTS"), @"NOT ({subscribers} EXISTS)"); insta::assert_display_snapshot!(p("subscribers NOT EXISTS"), @"NOT ({subscribers} EXISTS)"); insta::assert_display_snapshot!(p("NOT subscribers NOT EXISTS"), @"{subscribers} EXISTS"); insta::assert_display_snapshot!(p("subscribers NOT EXISTS"), @"NOT ({subscribers} EXISTS)"); - insta::assert_display_snapshot!(p("NOT subscribers 100 TO 1000"), @"NOT ({subscribers} {100} TO {1000})"); // Test nested NOT insta::assert_display_snapshot!(p("NOT NOT NOT NOT x = 5"), @"{x} = {5}"); @@ -800,6 +811,7 @@ impl<'a> std::fmt::Display for Condition<'a> { Condition::GreaterThanOrEqual(token) => write!(f, ">= {token}"), Condition::Equal(token) => write!(f, "= {token}"), Condition::NotEqual(token) => write!(f, "!= {token}"), + Condition::Null => write!(f, "NULL"), Condition::Exists => write!(f, "EXISTS"), Condition::LowerThan(token) => write!(f, "< {token}"), Condition::LowerThanOrEqual(token) => write!(f, "<= {token}"), diff --git a/milli/src/index.rs b/milli/src/index.rs index ae7bd211e..3316028df 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -839,6 +839,18 @@ impl Index { } } + /// Retrieve all the documents which contain this field id set as null + pub fn null_faceted_documents_ids( + &self, + rtxn: &RoTxn, + field_id: FieldId, + ) -> heed::Result { + match self.facet_id_is_null_docids.get(rtxn, &BEU16::new(field_id))? { + Some(docids) => Ok(docids), + None => Ok(RoaringBitmap::new()), + } + } + /// Retrieve all the documents which contain this field id pub fn exists_faceted_documents_ids( &self, diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index a4ac53950..df42725c5 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -219,6 +219,10 @@ impl<'a> Filter<'a> { Condition::Between { from, to } => { (Included(from.parse_finite_float()?), Included(to.parse_finite_float()?)) } + Condition::Null => { + let is_null = index.null_faceted_documents_ids(rtxn, field_id)?; + return Ok(is_null); + } Condition::Exists => { let exist = index.exists_faceted_documents_ids(rtxn, field_id)?; return Ok(exist); From 7dc04747fd94e8b43d15584e351ecb1489ab2fed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Wed, 8 Mar 2023 17:37:08 +0100 Subject: [PATCH 05/56] Make clippy happy --- .../index_documents/extract/extract_fid_docid_facet_values.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs b/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs index 5fe6a7606..0589dc773 100644 --- a/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs +++ b/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs @@ -21,6 +21,7 @@ use crate::{CboRoaringBitmapCodec, DocumentId, FieldId, Result, BEU32, MAX_FACET /// Returns the generated grenad reader containing the docid the fid and the orginal value as key /// and the normalized value as value extracted from the given chunk of documents. #[logging_timer::time] +#[allow(clippy::type_complexity)] pub fn extract_fid_docid_facet_values( obkv_documents: grenad::Reader, indexer: GrenadParameters, From b1d61f5a02f50cdb9f34d23f192f953e155d30ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Thu, 9 Mar 2023 10:04:27 +0100 Subject: [PATCH 06/56] Add more tests for the NULL filter --- milli/tests/search/filters.rs | 7 +++++++ milli/tests/search/mod.rs | 34 ++++++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+) diff --git a/milli/tests/search/filters.rs b/milli/tests/search/filters.rs index 18de24ac3..0b5296b82 100644 --- a/milli/tests/search/filters.rs +++ b/milli/tests/search/filters.rs @@ -82,10 +82,17 @@ test_filter!( vec![Left(vec!["tag=red", "tag=green"]), Left(vec!["asc_desc_rank<3", "asc_desc_rank<1"])] ); test_filter!(exists_filter_1, vec![Right("opt1 EXISTS")]); +test_filter!(exists_filter_2, vec![Right("opt1.opt2 EXISTS")]); test_filter!(exists_filter_1_not, vec![Right("opt1 NOT EXISTS")]); test_filter!(exists_filter_1_not_alt, vec![Right("NOT opt1 EXISTS")]); test_filter!(exists_filter_1_double_not, vec![Right("NOT opt1 NOT EXISTS")]); +test_filter!(null_filter_1, vec![Right("opt1 NULL")]); +test_filter!(null_filter_2, vec![Right("opt1.opt2 NULL")]); +test_filter!(null_filter_1_not, vec![Right("opt1 NOT NULL")]); +test_filter!(null_filter_1_not_alt, vec![Right("NOT opt1 NULL")]); +test_filter!(null_filter_1_double_not, vec![Right("NOT opt1 NOT NULL")]); + test_filter!(in_filter, vec![Right("tag_in IN[1, 2, 3, four, five]")]); test_filter!(not_in_filter, vec![Right("tag_in NOT IN[1, 2, 3, four, five]")]); test_filter!(not_not_in_filter, vec![Right("NOT tag_in NOT IN[1, 2, 3, four, five]")]); diff --git a/milli/tests/search/mod.rs b/milli/tests/search/mod.rs index 18c74e344..e67c1bc64 100644 --- a/milli/tests/search/mod.rs +++ b/milli/tests/search/mod.rs @@ -205,6 +205,18 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option { } else if let Some(opt1) = &document.opt1 { id = contains_key_rec(opt1, "opt2").then(|| document.id.clone()); } + } else if matches!(filter, "opt1 NULL" | "NOT opt1 NOT NULL") { + id = document.opt1.as_ref().map_or(false, |v| v.is_null()).then(|| document.id.clone()); + } else if matches!(filter, "NOT opt1 NULL" | "opt1 NOT NULL") { + id = document.opt1.as_ref().map_or(true, |v| !v.is_null()).then(|| document.id.clone()); + } else if matches!(filter, "opt1.opt2 NULL") { + if document.opt1opt2.as_ref().map_or(false, |v| v.is_null()) { + id = Some(document.id.clone()); + } else if let Some(opt1) = &document.opt1 { + if !opt1.is_null() { + id = contains_null_rec(opt1, "opt2").then(|| document.id.clone()); + } + } } else if matches!( filter, "tag_in IN[1, 2, 3, four, five]" | "NOT tag_in NOT IN[1, 2, 3, four, five]" @@ -240,6 +252,28 @@ pub fn contains_key_rec(v: &serde_json::Value, key: &str) -> bool { } } +pub fn contains_null_rec(v: &serde_json::Value, key: &str) -> bool { + match v { + serde_json::Value::Object(v) => { + for (k, v) in v.iter() { + if k == key && v.is_null() || contains_null_rec(v, key) { + return true; + } + } + false + } + serde_json::Value::Array(v) => { + for v in v.iter() { + if contains_null_rec(v, key) { + return true; + } + } + false + } + _ => false, + } +} + pub fn expected_filtered_ids(filters: Vec, &str>>) -> HashSet { let dataset: Vec = serde_json::Deserializer::from_str(CONTENT).into_iter().map(|r| r.unwrap()).collect(); From e106b16148ba7231af3045be5e8384813373b509 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Thu, 9 Mar 2023 13:01:43 +0100 Subject: [PATCH 07/56] Fix a typo in a variable Co-authored-by: Louis Dureuil aaa --- .../index_documents/extract/extract_fid_docid_facet_values.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs b/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs index 0589dc773..be7b44eee 100644 --- a/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs +++ b/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs @@ -95,14 +95,14 @@ pub fn extract_fid_docid_facet_values( for (normalized, original) in strings.into_iter().filter(|(n, _)| !n.is_empty()) { - let normalised_truncated_value: String = normalized + let normalized_truncated_value: String = normalized .char_indices() .take_while(|(idx, _)| idx + 4 < MAX_FACET_VALUE_LENGTH) .map(|(_, c)| c) .collect(); key_buffer.truncate(size_of::() + size_of::()); - key_buffer.extend_from_slice(normalised_truncated_value.as_bytes()); + key_buffer.extend_from_slice(normalized_truncated_value.as_bytes()); fid_docid_facet_strings_sorter .insert(&key_buffer, original.as_bytes())?; } From e064c52544e97dbad7bc43732fc5301db6ea1988 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Thu, 9 Mar 2023 13:05:54 +0100 Subject: [PATCH 08/56] Rename an internal facet deletion method --- milli/src/update/delete_documents.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/milli/src/update/delete_documents.rs b/milli/src/update/delete_documents.rs index 7180d7d42..bb232d7cc 100644 --- a/milli/src/update/delete_documents.rs +++ b/milli/src/update/delete_documents.rs @@ -518,14 +518,14 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> { drop(iter); // We delete the documents ids that are under the facet field id values. - remove_docids_from_facet_id_exists_docids( + remove_docids_from_facet_id_docids( self.wtxn, facet_id_exists_docids, &self.to_delete_docids, )?; // We delete the documents ids that are under the facet field id values. - remove_docids_from_facet_id_exists_docids( + remove_docids_from_facet_id_docids( self.wtxn, facet_id_is_null_docids, &self.to_delete_docids, @@ -633,7 +633,7 @@ fn remove_docids_from_field_id_docid_facet_value( Ok(all_affected_facet_values) } -fn remove_docids_from_facet_id_exists_docids<'a, C>( +fn remove_docids_from_facet_id_docids<'a, C>( wtxn: &'a mut heed::RwTxn, db: &heed::Database, to_remove: &RoaringBitmap, From 0ad53784e73b6c2da743d9cafee999002a191f1d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Thu, 9 Mar 2023 13:21:21 +0100 Subject: [PATCH 09/56] Create a new struct to reduce the type complexity --- .../extract/extract_fid_docid_facet_values.rs | 24 ++++++++++++------- .../src/update/index_documents/extract/mod.rs | 6 ++--- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs b/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs index be7b44eee..6460af812 100644 --- a/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs +++ b/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs @@ -16,18 +16,24 @@ use crate::facet::value_encoding::f64_into_bytes; use crate::update::index_documents::{create_writer, writer_into_reader}; use crate::{CboRoaringBitmapCodec, DocumentId, FieldId, Result, BEU32, MAX_FACET_VALUE_LENGTH}; +/// The extracted facet values stored in grenad files by type. +pub struct ExtractedFacetValues { + pub docid_fid_facet_numbers_chunk: grenad::Reader, + pub docid_fid_facet_strings_chunk: grenad::Reader, + pub fid_facet_is_null_docids_chunk: grenad::Reader, + pub fid_facet_exists_docids_chunk: grenad::Reader, +} + /// Extracts the facet values of each faceted field of each document. /// /// Returns the generated grenad reader containing the docid the fid and the orginal value as key /// and the normalized value as value extracted from the given chunk of documents. #[logging_timer::time] -#[allow(clippy::type_complexity)] pub fn extract_fid_docid_facet_values( obkv_documents: grenad::Reader, indexer: GrenadParameters, faceted_fields: &HashSet, -) -> Result<(grenad::Reader, grenad::Reader, grenad::Reader, grenad::Reader)> -{ +) -> Result { let max_memory = indexer.max_memory_by_thread(); let mut fid_docid_facet_numbers_sorter = create_sorter( @@ -134,12 +140,12 @@ pub fn extract_fid_docid_facet_values( } let facet_is_null_docids_reader = writer_into_reader(facet_is_null_docids_writer)?; - Ok(( - sorter_into_reader(fid_docid_facet_numbers_sorter, indexer)?, - sorter_into_reader(fid_docid_facet_strings_sorter, indexer)?, - facet_is_null_docids_reader, - facet_exists_docids_reader, - )) + Ok(ExtractedFacetValues { + docid_fid_facet_numbers_chunk: sorter_into_reader(fid_docid_facet_numbers_sorter, indexer)?, + docid_fid_facet_strings_chunk: sorter_into_reader(fid_docid_facet_strings_sorter, indexer)?, + fid_facet_is_null_docids_chunk: facet_is_null_docids_reader, + fid_facet_exists_docids_chunk: facet_exists_docids_reader, + }) } /// Represent what a document field contains. diff --git a/milli/src/update/index_documents/extract/mod.rs b/milli/src/update/index_documents/extract/mod.rs index c11d08405..4a5c9b64c 100644 --- a/milli/src/update/index_documents/extract/mod.rs +++ b/milli/src/update/index_documents/extract/mod.rs @@ -18,7 +18,7 @@ use rayon::prelude::*; use self::extract_docid_word_positions::extract_docid_word_positions; use self::extract_facet_number_docids::extract_facet_number_docids; use self::extract_facet_string_docids::extract_facet_string_docids; -use self::extract_fid_docid_facet_values::extract_fid_docid_facet_values; +use self::extract_fid_docid_facet_values::{extract_fid_docid_facet_values, ExtractedFacetValues}; use self::extract_fid_word_count_docids::extract_fid_word_count_docids; use self::extract_geo_points::extract_geo_points; use self::extract_word_docids::extract_word_docids; @@ -300,12 +300,12 @@ fn send_and_extract_flattened_documents_data( Ok(docid_word_positions_chunk) }, || { - let ( + let ExtractedFacetValues { docid_fid_facet_numbers_chunk, docid_fid_facet_strings_chunk, fid_facet_is_null_docids_chunk, fid_facet_exists_docids_chunk, - ) = extract_fid_docid_facet_values( + } = extract_fid_docid_facet_values( flattened_documents_chunk.clone(), indexer, faceted_fields, From ff86073288ce407e25710af8d0c9cad4ae80bacf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Thu, 9 Mar 2023 13:32:27 +0100 Subject: [PATCH 10/56] Add a snapshot for the NULL facet database --- milli/src/snapshot_tests.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/milli/src/snapshot_tests.rs b/milli/src/snapshot_tests.rs index f7f1a97e6..c6ea8f3dd 100644 --- a/milli/src/snapshot_tests.rs +++ b/milli/src/snapshot_tests.rs @@ -271,6 +271,11 @@ pub fn snap_facet_id_exists_docids(index: &Index) -> String { &format!("{facet_id:<3} {}", display_bitmap(&docids)) }) } +pub fn snap_facet_id_is_null_docids(index: &Index) -> String { + make_db_snap_from_iter!(index, facet_id_is_null_docids, |(facet_id, docids)| { + &format!("{facet_id:<3} {}", display_bitmap(&docids)) + }) +} pub fn snap_facet_id_string_docids(index: &Index) -> String { make_db_snap_from_iter!(index, facet_id_string_docids, |( FacetGroupKey { field_id, level, left_bound }, @@ -495,6 +500,9 @@ macro_rules! full_snap_of_db { ($index:ident, facet_id_exists_docids) => {{ $crate::snapshot_tests::snap_facet_id_exists_docids(&$index) }}; + ($index:ident, facet_id_is_null_docids) => {{ + $crate::snapshot_tests::snap_facet_id_is_null_docids(&$index) + }}; ($index:ident, documents_ids) => {{ $crate::snapshot_tests::snap_documents_ids(&$index) }}; From df48ac8803b908c9f080b5481930983103f63a07 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Thu, 9 Mar 2023 13:53:37 +0100 Subject: [PATCH 11/56] Add one more test for the NULL operator --- milli/src/update/index_documents/mod.rs | 108 ++++++++++++++++++++++++ 1 file changed, 108 insertions(+) diff --git a/milli/src/update/index_documents/mod.rs b/milli/src/update/index_documents/mod.rs index 2a7930f84..7b9bd7834 100644 --- a/milli/src/update/index_documents/mod.rs +++ b/milli/src/update/index_documents/mod.rs @@ -1758,6 +1758,114 @@ mod tests { check_ok(&index); } + #[test] + fn index_documents_check_is_null_database() { + let content = || { + documents!([ + { + "id": 0, + "colour": null, + }, + { + "id": 6, + "colour": { + "green": null + } + }, + { + "id": 7, + "colour": { + "green": { + "blue": null + } + } + }, + { + "id": 8, + "colour": 0, + }, + { + "id": 9, + "colour": [] + }, + { + "id": 10, + "colour": {} + }, + { + "id": 12, + "colour": [1] + }, + { + "id": 13 + }, + { + "id": 14, + "colour": { + "green": 1 + } + }, + { + "id": 15, + "colour": { + "green": { + "blue": [] + } + } + } + ]) + }; + + let check_ok = |index: &Index| { + let rtxn = index.read_txn().unwrap(); + let facets = index.faceted_fields(&rtxn).unwrap(); + assert_eq!(facets, hashset!(S("colour"), S("colour.green"), S("colour.green.blue"))); + + let colour_id = index.fields_ids_map(&rtxn).unwrap().id("colour").unwrap(); + let colour_green_id = index.fields_ids_map(&rtxn).unwrap().id("colour.green").unwrap(); + let colour_blue_id = + index.fields_ids_map(&rtxn).unwrap().id("colour.green.blue").unwrap(); + + let bitmap_null_colour = + index.facet_id_is_null_docids.get(&rtxn, &BEU16::new(colour_id)).unwrap().unwrap(); + assert_eq!(bitmap_null_colour.into_iter().collect::>(), vec![0]); + + let bitmap_colour_green = index + .facet_id_is_null_docids + .get(&rtxn, &BEU16::new(colour_green_id)) + .unwrap() + .unwrap(); + assert_eq!(bitmap_colour_green.into_iter().collect::>(), vec![1]); + + let bitmap_colour_blue = index + .facet_id_is_null_docids + .get(&rtxn, &BEU16::new(colour_blue_id)) + .unwrap() + .unwrap(); + assert_eq!(bitmap_colour_blue.into_iter().collect::>(), vec![2]); + }; + + let faceted_fields = hashset!(S("colour")); + + let index = TempIndex::new(); + index.add_documents(content()).unwrap(); + index + .update_settings(|settings| { + settings.set_filterable_fields(faceted_fields.clone()); + }) + .unwrap(); + check_ok(&index); + + let index = TempIndex::new(); + index + .update_settings(|settings| { + settings.set_filterable_fields(faceted_fields.clone()); + }) + .unwrap(); + index.add_documents(content()).unwrap(); + check_ok(&index); + } + #[test] fn primary_key_must_not_contain_floats() { let index = TempIndex::new_with_map_size(4096 * 100); From c25779afba1e7fadd9365422db87f2e0b5ec3635 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Mon, 13 Mar 2023 17:40:34 +0100 Subject: [PATCH 12/56] Specify that the NULL keyword is a keyword too --- filter-parser/src/value.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/filter-parser/src/value.rs b/filter-parser/src/value.rs index 2296c0769..9a0a5e710 100644 --- a/filter-parser/src/value.rs +++ b/filter-parser/src/value.rs @@ -178,7 +178,10 @@ fn is_syntax_component(c: char) -> bool { } fn is_keyword(s: &str) -> bool { - matches!(s, "AND" | "OR" | "IN" | "NOT" | "TO" | "EXISTS" | "_geoRadius" | "_geoBoundingBox") + matches!( + s, + "AND" | "OR" | "IN" | "NOT" | "TO" | "EXISTS" | "NULL" | "_geoRadius" | "_geoBoundingBox" + ) } #[cfg(test)] From 030263caa36c73e4d9a178621483231718f0e2e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Tue, 14 Mar 2023 10:31:04 +0100 Subject: [PATCH 13/56] Change the IS NULL filter syntax to use the IS keyword --- filter-parser/src/condition.rs | 13 +++++++------ filter-parser/src/lib.rs | 8 ++++---- filter-parser/src/value.rs | 11 ++++++++++- 3 files changed, 21 insertions(+), 11 deletions(-) diff --git a/filter-parser/src/condition.rs b/filter-parser/src/condition.rs index 834fac8b8..fe424539f 100644 --- a/filter-parser/src/condition.rs +++ b/filter-parser/src/condition.rs @@ -45,18 +45,19 @@ pub fn parse_condition(input: Span) -> IResult { Ok((input, condition)) } -/// null = value "NULL" -pub fn parse_null(input: Span) -> IResult { - let (input, key) = terminated(parse_value, tag("NULL"))(input)?; +/// null = value "IS" WS+ "NULL" +pub fn parse_is_null(input: Span) -> IResult { + let (input, key) = parse_value(input)?; + let (input, _) = tuple((tag("IS"), multispace1, tag("NULL")))(input)?; Ok((input, FilterCondition::Condition { fid: key, op: Null })) } -/// null = value "NOT" WS+ "NULL" -pub fn parse_not_null(input: Span) -> IResult { +/// null = value "IS" WS+ "NOT" WS+ "NULL" +pub fn parse_is_not_null(input: Span) -> IResult { let (input, key) = parse_value(input)?; - let (input, _) = tuple((tag("NOT"), multispace1, tag("NULL")))(input)?; + let (input, _) = tuple((tag("IS"), multispace1, tag("NOT"), multispace1, tag("NULL")))(input)?; Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Null })))) } diff --git a/filter-parser/src/lib.rs b/filter-parser/src/lib.rs index 36657587f..513da07c5 100644 --- a/filter-parser/src/lib.rs +++ b/filter-parser/src/lib.rs @@ -47,7 +47,7 @@ mod value; use std::fmt::Debug; pub use condition::{parse_condition, parse_to, Condition}; -use condition::{parse_exists, parse_not_exists, parse_not_null, parse_null}; +use condition::{parse_exists, parse_is_not_null, parse_is_null, parse_not_exists}; use error::{cut_with_err, ExpectedValueKind, NomErrorExt}; pub use error::{Error, ErrorKind}; use nom::branch::alt; @@ -414,8 +414,8 @@ fn parse_primary(input: Span, depth: usize) -> IResult { parse_in, parse_not_in, parse_condition, - parse_null, - parse_not_null, + parse_is_null, + parse_is_not_null, parse_exists, parse_not_exists, parse_to, @@ -811,7 +811,7 @@ impl<'a> std::fmt::Display for Condition<'a> { Condition::GreaterThanOrEqual(token) => write!(f, ">= {token}"), Condition::Equal(token) => write!(f, "= {token}"), Condition::NotEqual(token) => write!(f, "!= {token}"), - Condition::Null => write!(f, "NULL"), + Condition::Null => write!(f, "IS NULL"), Condition::Exists => write!(f, "EXISTS"), Condition::LowerThan(token) => write!(f, "< {token}"), Condition::LowerThanOrEqual(token) => write!(f, "<= {token}"), diff --git a/filter-parser/src/value.rs b/filter-parser/src/value.rs index 9a0a5e710..f8f1c43bc 100644 --- a/filter-parser/src/value.rs +++ b/filter-parser/src/value.rs @@ -180,7 +180,16 @@ fn is_syntax_component(c: char) -> bool { fn is_keyword(s: &str) -> bool { matches!( s, - "AND" | "OR" | "IN" | "NOT" | "TO" | "EXISTS" | "NULL" | "_geoRadius" | "_geoBoundingBox" + "AND" + | "OR" + | "IN" + | "NOT" + | "TO" + | "EXISTS" + | "IS" + | "NULL" + | "_geoRadius" + | "_geoBoundingBox" ) } From fa2ea4a3793ce1cd78d4d4f1b38f94f7273fb5c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Tue, 14 Mar 2023 10:31:27 +0100 Subject: [PATCH 14/56] Update the test to accept the new IS syntax --- filter-parser/src/error.rs | 4 ++-- filter-parser/src/lib.rs | 20 ++++++++++---------- meilisearch/tests/search/errors.rs | 4 ++-- milli/tests/search/filters.rs | 10 +++++----- milli/tests/search/mod.rs | 6 +++--- 5 files changed, 22 insertions(+), 22 deletions(-) diff --git a/filter-parser/src/error.rs b/filter-parser/src/error.rs index 4d9d89859..fc6ad8f6d 100644 --- a/filter-parser/src/error.rs +++ b/filter-parser/src/error.rs @@ -144,10 +144,10 @@ impl<'a> Display for Error<'a> { writeln!(f, "Expression `{}` is missing the following closing delimiter: `{}`.", escaped_input, c)? } ErrorKind::InvalidPrimary if input.trim().is_empty() => { - writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.")? + writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.")? } ErrorKind::InvalidPrimary => { - writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `{}`.", escaped_input)? + writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` at `{}`.", escaped_input)? } ErrorKind::ExpectedEof => { writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)? diff --git a/filter-parser/src/lib.rs b/filter-parser/src/lib.rs index 513da07c5..c75ada205 100644 --- a/filter-parser/src/lib.rs +++ b/filter-parser/src/lib.rs @@ -503,11 +503,11 @@ pub mod tests { insta::assert_display_snapshot!(p("NOT subscribers 100 TO 1000"), @"NOT ({subscribers} {100} TO {1000})"); // Test NULL + NOT NULL - insta::assert_display_snapshot!(p("subscribers NULL"), @"{subscribers} NULL"); - insta::assert_display_snapshot!(p("NOT subscribers NULL"), @"NOT ({subscribers} NULL)"); - insta::assert_display_snapshot!(p("subscribers NOT NULL"), @"NOT ({subscribers} NULL)"); - insta::assert_display_snapshot!(p("NOT subscribers NOT NULL"), @"{subscribers} NULL"); - insta::assert_display_snapshot!(p("subscribers NOT NULL"), @"NOT ({subscribers} NULL)"); + insta::assert_display_snapshot!(p("subscribers IS NULL"), @"{subscribers} IS NULL"); + insta::assert_display_snapshot!(p("NOT subscribers IS NULL"), @"NOT ({subscribers} IS NULL)"); + insta::assert_display_snapshot!(p("subscribers IS NOT NULL"), @"NOT ({subscribers} IS NULL)"); + insta::assert_display_snapshot!(p("NOT subscribers IS NOT NULL"), @"{subscribers} IS NULL"); + insta::assert_display_snapshot!(p("subscribers IS NOT NULL"), @"NOT ({subscribers} IS NULL)"); // Test EXISTS + NOT EXITS insta::assert_display_snapshot!(p("subscribers EXISTS"), @"{subscribers} EXISTS"); @@ -587,7 +587,7 @@ pub mod tests { "###); insta::assert_display_snapshot!(p("'OR'"), @r###" - Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`. + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`. 1:5 'OR' "###); @@ -597,12 +597,12 @@ pub mod tests { "###); insta::assert_display_snapshot!(p("channel Ponce"), @r###" - Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`. + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`. 1:14 channel Ponce "###); insta::assert_display_snapshot!(p("channel = Ponce OR"), @r###" - Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing. + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing. 19:19 channel = Ponce OR "###); @@ -667,12 +667,12 @@ pub mod tests { "###); insta::assert_display_snapshot!(p("colour NOT EXIST"), @r###" - Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`. + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`. 1:17 colour NOT EXIST "###); insta::assert_display_snapshot!(p("subscribers 100 TO1000"), @r###" - Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`. + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`. 1:23 subscribers 100 TO1000 "###); diff --git a/meilisearch/tests/search/errors.rs b/meilisearch/tests/search/errors.rs index 9e4dbdcf5..ab42700f3 100644 --- a/meilisearch/tests/search/errors.rs +++ b/meilisearch/tests/search/errors.rs @@ -547,7 +547,7 @@ async fn filter_invalid_syntax_object() { index.wait_task(1).await; let expected_response = json!({ - "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass", + "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -572,7 +572,7 @@ async fn filter_invalid_syntax_array() { index.wait_task(1).await; let expected_response = json!({ - "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass", + "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" diff --git a/milli/tests/search/filters.rs b/milli/tests/search/filters.rs index 0b5296b82..57ad6a40b 100644 --- a/milli/tests/search/filters.rs +++ b/milli/tests/search/filters.rs @@ -87,11 +87,11 @@ test_filter!(exists_filter_1_not, vec![Right("opt1 NOT EXISTS")]); test_filter!(exists_filter_1_not_alt, vec![Right("NOT opt1 EXISTS")]); test_filter!(exists_filter_1_double_not, vec![Right("NOT opt1 NOT EXISTS")]); -test_filter!(null_filter_1, vec![Right("opt1 NULL")]); -test_filter!(null_filter_2, vec![Right("opt1.opt2 NULL")]); -test_filter!(null_filter_1_not, vec![Right("opt1 NOT NULL")]); -test_filter!(null_filter_1_not_alt, vec![Right("NOT opt1 NULL")]); -test_filter!(null_filter_1_double_not, vec![Right("NOT opt1 NOT NULL")]); +test_filter!(null_filter_1, vec![Right("opt1 IS NULL")]); +test_filter!(null_filter_2, vec![Right("opt1.opt2 IS NULL")]); +test_filter!(null_filter_1_not, vec![Right("opt1 IS NOT NULL")]); +test_filter!(null_filter_1_not_alt, vec![Right("NOT opt1 IS NULL")]); +test_filter!(null_filter_1_double_not, vec![Right("NOT opt1 IS NOT NULL")]); test_filter!(in_filter, vec![Right("tag_in IN[1, 2, 3, four, five]")]); test_filter!(not_in_filter, vec![Right("tag_in NOT IN[1, 2, 3, four, five]")]); diff --git a/milli/tests/search/mod.rs b/milli/tests/search/mod.rs index e67c1bc64..51852cced 100644 --- a/milli/tests/search/mod.rs +++ b/milli/tests/search/mod.rs @@ -205,11 +205,11 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option { } else if let Some(opt1) = &document.opt1 { id = contains_key_rec(opt1, "opt2").then(|| document.id.clone()); } - } else if matches!(filter, "opt1 NULL" | "NOT opt1 NOT NULL") { + } else if matches!(filter, "opt1 IS NULL" | "NOT opt1 IS NOT NULL") { id = document.opt1.as_ref().map_or(false, |v| v.is_null()).then(|| document.id.clone()); - } else if matches!(filter, "NOT opt1 NULL" | "opt1 NOT NULL") { + } else if matches!(filter, "NOT opt1 IS NULL" | "opt1 IS NOT NULL") { id = document.opt1.as_ref().map_or(true, |v| !v.is_null()).then(|| document.id.clone()); - } else if matches!(filter, "opt1.opt2 NULL") { + } else if matches!(filter, "opt1.opt2 IS NULL") { if document.opt1opt2.as_ref().map_or(false, |v| v.is_null()) { id = Some(document.id.clone()); } else if let Some(opt1) = &document.opt1 { From ea016d97afd2dfdae2fe15a12a7bfd6554d3a097 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Tue, 14 Mar 2023 18:08:12 +0100 Subject: [PATCH 15/56] Implementing an IS EMPTY filter --- filter-parser/src/condition.rs | 17 +++++ filter-parser/src/error.rs | 6 +- filter-parser/src/lib.rs | 25 +++++-- filter-parser/src/value.rs | 1 + meilisearch/tests/search/errors.rs | 4 +- milli/src/index.rs | 20 +++++- milli/src/search/facet/filter.rs | 4 ++ milli/src/update/clear_documents.rs | 2 + milli/src/update/delete_documents.rs | 8 +++ .../extract/extract_fid_docid_facet_values.rs | 29 +++++++-- .../src/update/index_documents/extract/mod.rs | 65 +++++++++++++------ .../src/update/index_documents/typed_chunk.rs | 12 ++++ 12 files changed, 156 insertions(+), 37 deletions(-) diff --git a/filter-parser/src/condition.rs b/filter-parser/src/condition.rs index fe424539f..9abe1c6ea 100644 --- a/filter-parser/src/condition.rs +++ b/filter-parser/src/condition.rs @@ -21,6 +21,7 @@ pub enum Condition<'a> { Equal(Token<'a>), NotEqual(Token<'a>), Null, + Empty, Exists, LowerThan(Token<'a>), LowerThanOrEqual(Token<'a>), @@ -61,6 +62,22 @@ pub fn parse_is_not_null(input: Span) -> IResult { Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Null })))) } +/// empty = value "IS" WS+ "EMPTY" +pub fn parse_is_empty(input: Span) -> IResult { + let (input, key) = parse_value(input)?; + + let (input, _) = tuple((tag("IS"), multispace1, tag("EMPTY")))(input)?; + Ok((input, FilterCondition::Condition { fid: key, op: Empty })) +} + +/// empty = value "IS" WS+ "NOT" WS+ "EMPTY" +pub fn parse_is_not_empty(input: Span) -> IResult { + let (input, key) = parse_value(input)?; + + let (input, _) = tuple((tag("IS"), multispace1, tag("NOT"), multispace1, tag("EMPTY")))(input)?; + Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Empty })))) +} + /// exist = value "EXISTS" pub fn parse_exists(input: Span) -> IResult { let (input, key) = terminated(parse_value, tag("EXISTS"))(input)?; diff --git a/filter-parser/src/error.rs b/filter-parser/src/error.rs index fc6ad8f6d..cbb83c841 100644 --- a/filter-parser/src/error.rs +++ b/filter-parser/src/error.rs @@ -143,11 +143,9 @@ impl<'a> Display for Error<'a> { ErrorKind::MissingClosingDelimiter(c) => { writeln!(f, "Expression `{}` is missing the following closing delimiter: `{}`.", escaped_input, c)? } - ErrorKind::InvalidPrimary if input.trim().is_empty() => { - writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.")? - } ErrorKind::InvalidPrimary => { - writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` at `{}`.", escaped_input)? + let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) }; + writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` {}", text)? } ErrorKind::ExpectedEof => { writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)? diff --git a/filter-parser/src/lib.rs b/filter-parser/src/lib.rs index c75ada205..69eb6700f 100644 --- a/filter-parser/src/lib.rs +++ b/filter-parser/src/lib.rs @@ -47,7 +47,10 @@ mod value; use std::fmt::Debug; pub use condition::{parse_condition, parse_to, Condition}; -use condition::{parse_exists, parse_is_not_null, parse_is_null, parse_not_exists}; +use condition::{ + parse_exists, parse_is_empty, parse_is_not_empty, parse_is_not_null, parse_is_null, + parse_not_exists, +}; use error::{cut_with_err, ExpectedValueKind, NomErrorExt}; pub use error::{Error, ErrorKind}; use nom::branch::alt; @@ -416,6 +419,8 @@ fn parse_primary(input: Span, depth: usize) -> IResult { parse_condition, parse_is_null, parse_is_not_null, + parse_is_empty, + parse_is_not_empty, parse_exists, parse_not_exists, parse_to, @@ -509,6 +514,13 @@ pub mod tests { insta::assert_display_snapshot!(p("NOT subscribers IS NOT NULL"), @"{subscribers} IS NULL"); insta::assert_display_snapshot!(p("subscribers IS NOT NULL"), @"NOT ({subscribers} IS NULL)"); + // Test EMPTY + NOT EMPTY + insta::assert_display_snapshot!(p("subscribers IS EMPTY"), @"{subscribers} IS EMPTY"); + insta::assert_display_snapshot!(p("NOT subscribers IS EMPTY"), @"NOT ({subscribers} IS EMPTY)"); + insta::assert_display_snapshot!(p("subscribers IS NOT EMPTY"), @"NOT ({subscribers} IS EMPTY)"); + insta::assert_display_snapshot!(p("NOT subscribers IS NOT EMPTY"), @"{subscribers} IS EMPTY"); + insta::assert_display_snapshot!(p("subscribers IS NOT EMPTY"), @"NOT ({subscribers} IS EMPTY)"); + // Test EXISTS + NOT EXITS insta::assert_display_snapshot!(p("subscribers EXISTS"), @"{subscribers} EXISTS"); insta::assert_display_snapshot!(p("NOT subscribers EXISTS"), @"NOT ({subscribers} EXISTS)"); @@ -587,7 +599,7 @@ pub mod tests { "###); insta::assert_display_snapshot!(p("'OR'"), @r###" - Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`. + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`. 1:5 'OR' "###); @@ -597,12 +609,12 @@ pub mod tests { "###); insta::assert_display_snapshot!(p("channel Ponce"), @r###" - Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`. + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`. 1:14 channel Ponce "###); insta::assert_display_snapshot!(p("channel = Ponce OR"), @r###" - Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing. + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing. 19:19 channel = Ponce OR "###); @@ -667,12 +679,12 @@ pub mod tests { "###); insta::assert_display_snapshot!(p("colour NOT EXIST"), @r###" - Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`. + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`. 1:17 colour NOT EXIST "###); insta::assert_display_snapshot!(p("subscribers 100 TO1000"), @r###" - Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`. + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`. 1:23 subscribers 100 TO1000 "###); @@ -812,6 +824,7 @@ impl<'a> std::fmt::Display for Condition<'a> { Condition::Equal(token) => write!(f, "= {token}"), Condition::NotEqual(token) => write!(f, "!= {token}"), Condition::Null => write!(f, "IS NULL"), + Condition::Empty => write!(f, "IS EMPTY"), Condition::Exists => write!(f, "EXISTS"), Condition::LowerThan(token) => write!(f, "< {token}"), Condition::LowerThanOrEqual(token) => write!(f, "<= {token}"), diff --git a/filter-parser/src/value.rs b/filter-parser/src/value.rs index f8f1c43bc..518c0a25a 100644 --- a/filter-parser/src/value.rs +++ b/filter-parser/src/value.rs @@ -188,6 +188,7 @@ fn is_keyword(s: &str) -> bool { | "EXISTS" | "IS" | "NULL" + | "EMPTY" | "_geoRadius" | "_geoBoundingBox" ) diff --git a/meilisearch/tests/search/errors.rs b/meilisearch/tests/search/errors.rs index ab42700f3..2a0e4a39d 100644 --- a/meilisearch/tests/search/errors.rs +++ b/meilisearch/tests/search/errors.rs @@ -547,7 +547,7 @@ async fn filter_invalid_syntax_object() { index.wait_task(1).await; let expected_response = json!({ - "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass", + "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" @@ -572,7 +572,7 @@ async fn filter_invalid_syntax_array() { index.wait_task(1).await; let expected_response = json!({ - "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass", + "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass", "code": "invalid_search_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_search_filter" diff --git a/milli/src/index.rs b/milli/src/index.rs index 3316028df..c60857bd0 100644 --- a/milli/src/index.rs +++ b/milli/src/index.rs @@ -81,6 +81,7 @@ pub mod db_name { pub const FACET_ID_F64_DOCIDS: &str = "facet-id-f64-docids"; pub const FACET_ID_EXISTS_DOCIDS: &str = "facet-id-exists-docids"; pub const FACET_ID_IS_NULL_DOCIDS: &str = "facet-id-is-null-docids"; + pub const FACET_ID_IS_EMPTY_DOCIDS: &str = "facet-id-is-empty-docids"; pub const FACET_ID_STRING_DOCIDS: &str = "facet-id-string-docids"; pub const FIELD_ID_DOCID_FACET_F64S: &str = "field-id-docid-facet-f64s"; pub const FIELD_ID_DOCID_FACET_STRINGS: &str = "field-id-docid-facet-strings"; @@ -130,9 +131,10 @@ pub struct Index { /// Maps the facet field id and the docids for which this field exists pub facet_id_exists_docids: Database, - /// Maps the facet field id and the docids for which this field is set as null pub facet_id_is_null_docids: Database, + /// Maps the facet field id and the docids for which this field is considered empty + pub facet_id_is_empty_docids: Database, /// Maps the facet field id and ranges of numbers with the docids that corresponds to them. pub facet_id_f64_docids: Database, FacetGroupValueCodec>, @@ -157,7 +159,7 @@ impl Index { ) -> Result { use db_name::*; - options.max_dbs(20); + options.max_dbs(21); unsafe { options.flag(Flags::MdbAlwaysFreePages) }; let env = options.open(path)?; @@ -180,6 +182,7 @@ impl Index { let facet_id_string_docids = env.create_database(Some(FACET_ID_STRING_DOCIDS))?; let facet_id_exists_docids = env.create_database(Some(FACET_ID_EXISTS_DOCIDS))?; let facet_id_is_null_docids = env.create_database(Some(FACET_ID_IS_NULL_DOCIDS))?; + let facet_id_is_empty_docids = env.create_database(Some(FACET_ID_IS_EMPTY_DOCIDS))?; let field_id_docid_facet_f64s = env.create_database(Some(FIELD_ID_DOCID_FACET_F64S))?; let field_id_docid_facet_strings = @@ -207,6 +210,7 @@ impl Index { facet_id_string_docids, facet_id_exists_docids, facet_id_is_null_docids, + facet_id_is_empty_docids, field_id_docid_facet_f64s, field_id_docid_facet_strings, documents, @@ -851,6 +855,18 @@ impl Index { } } + /// Retrieve all the documents which contain this field id and that is considered empty + pub fn empty_faceted_documents_ids( + &self, + rtxn: &RoTxn, + field_id: FieldId, + ) -> heed::Result { + match self.facet_id_is_empty_docids.get(rtxn, &BEU16::new(field_id))? { + Some(docids) => Ok(docids), + None => Ok(RoaringBitmap::new()), + } + } + /// Retrieve all the documents which contain this field id pub fn exists_faceted_documents_ids( &self, diff --git a/milli/src/search/facet/filter.rs b/milli/src/search/facet/filter.rs index df42725c5..0c11b737e 100644 --- a/milli/src/search/facet/filter.rs +++ b/milli/src/search/facet/filter.rs @@ -223,6 +223,10 @@ impl<'a> Filter<'a> { let is_null = index.null_faceted_documents_ids(rtxn, field_id)?; return Ok(is_null); } + Condition::Empty => { + let is_empty = index.empty_faceted_documents_ids(rtxn, field_id)?; + return Ok(is_empty); + } Condition::Exists => { let exist = index.exists_faceted_documents_ids(rtxn, field_id)?; return Ok(exist); diff --git a/milli/src/update/clear_documents.rs b/milli/src/update/clear_documents.rs index 7ac09a785..326e0825d 100644 --- a/milli/src/update/clear_documents.rs +++ b/milli/src/update/clear_documents.rs @@ -35,6 +35,7 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> { facet_id_string_docids, facet_id_exists_docids, facet_id_is_null_docids, + facet_id_is_empty_docids, field_id_docid_facet_f64s, field_id_docid_facet_strings, documents, @@ -88,6 +89,7 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> { facet_id_f64_docids.clear(self.wtxn)?; facet_id_exists_docids.clear(self.wtxn)?; facet_id_is_null_docids.clear(self.wtxn)?; + facet_id_is_empty_docids.clear(self.wtxn)?; facet_id_string_docids.clear(self.wtxn)?; field_id_docid_facet_f64s.clear(self.wtxn)?; field_id_docid_facet_strings.clear(self.wtxn)?; diff --git a/milli/src/update/delete_documents.rs b/milli/src/update/delete_documents.rs index bb232d7cc..6f2fa5e5a 100644 --- a/milli/src/update/delete_documents.rs +++ b/milli/src/update/delete_documents.rs @@ -246,6 +246,7 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> { script_language_docids, facet_id_exists_docids, facet_id_is_null_docids, + facet_id_is_empty_docids, documents, } = self.index; @@ -531,6 +532,13 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> { &self.to_delete_docids, )?; + // We delete the documents ids that are under the facet field id values. + remove_docids_from_facet_id_docids( + self.wtxn, + facet_id_is_empty_docids, + &self.to_delete_docids, + )?; + self.index.put_soft_deleted_documents_ids(self.wtxn, &RoaringBitmap::new())?; Ok(DetailedDocumentDeletionResult { diff --git a/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs b/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs index 6460af812..8f3d9408d 100644 --- a/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs +++ b/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs @@ -21,6 +21,7 @@ pub struct ExtractedFacetValues { pub docid_fid_facet_numbers_chunk: grenad::Reader, pub docid_fid_facet_strings_chunk: grenad::Reader, pub fid_facet_is_null_docids_chunk: grenad::Reader, + pub fid_facet_is_empty_docids_chunk: grenad::Reader, pub fid_facet_exists_docids_chunk: grenad::Reader, } @@ -56,6 +57,7 @@ pub fn extract_fid_docid_facet_values( let mut facet_exists_docids = BTreeMap::::new(); let mut facet_is_null_docids = BTreeMap::::new(); + let mut facet_is_empty_docids = BTreeMap::::new(); let mut key_buffer = Vec::new(); let mut cursor = obkv_documents.into_cursor()?; @@ -80,10 +82,14 @@ pub fn extract_fid_docid_facet_values( key_buffer.extend_from_slice(docid_bytes); let value = from_slice(field_bytes).map_err(InternalError::SerdeJson)?; + match extract_facet_values(&value) { FilterableValues::Null => { facet_is_null_docids.entry(field_id).or_default().insert(document); } + FilterableValues::Empty => { + facet_is_empty_docids.entry(field_id).or_default().insert(document); + } FilterableValues::Values { numbers, strings } => { // insert facet numbers in sorter for number in numbers { @@ -140,22 +146,34 @@ pub fn extract_fid_docid_facet_values( } let facet_is_null_docids_reader = writer_into_reader(facet_is_null_docids_writer)?; + let mut facet_is_empty_docids_writer = create_writer( + indexer.chunk_compression_type, + indexer.chunk_compression_level, + tempfile::tempfile()?, + ); + for (fid, bitmap) in facet_is_empty_docids.into_iter() { + let bitmap_bytes = CboRoaringBitmapCodec::bytes_encode(&bitmap).unwrap(); + facet_is_empty_docids_writer.insert(fid.to_be_bytes(), &bitmap_bytes)?; + } + let facet_is_empty_docids_reader = writer_into_reader(facet_is_empty_docids_writer)?; + Ok(ExtractedFacetValues { docid_fid_facet_numbers_chunk: sorter_into_reader(fid_docid_facet_numbers_sorter, indexer)?, docid_fid_facet_strings_chunk: sorter_into_reader(fid_docid_facet_strings_sorter, indexer)?, fid_facet_is_null_docids_chunk: facet_is_null_docids_reader, + fid_facet_is_empty_docids_chunk: facet_is_empty_docids_reader, fid_facet_exists_docids_chunk: facet_exists_docids_reader, }) } /// Represent what a document field contains. enum FilterableValues { + /// Corresponds to the JSON `null` value. Null, + /// Corresponds to either, an empty string `""`, an empty array `[]`, or an empty object `{}`. + Empty, /// Represents all the numbers and strings values found in this document field. - Values { - numbers: Vec, - strings: Vec<(String, String)>, - }, + Values { numbers: Vec, strings: Vec<(String, String)> }, } fn extract_facet_values(value: &Value) -> FilterableValues { @@ -192,6 +210,9 @@ fn extract_facet_values(value: &Value) -> FilterableValues { match value { Value::Null => FilterableValues::Null, + Value::String(s) if s.is_empty() => FilterableValues::Empty, + Value::Array(a) if a.is_empty() => FilterableValues::Empty, + Value::Object(o) if o.is_empty() => FilterableValues::Empty, otherwise => { let mut numbers = Vec::new(); let mut strings = Vec::new(); diff --git a/milli/src/update/index_documents/extract/mod.rs b/milli/src/update/index_documents/extract/mod.rs index 4a5c9b64c..641a8a210 100644 --- a/milli/src/update/index_documents/extract/mod.rs +++ b/milli/src/update/index_documents/extract/mod.rs @@ -55,22 +55,23 @@ pub(crate) fn data_from_obkv_documents( .collect::>()?; #[allow(clippy::type_complexity)] - let result: Result<(Vec<_>, (Vec<_>, (Vec<_>, (Vec<_>, Vec<_>))))> = flattened_obkv_chunks - .par_bridge() - .map(|flattened_obkv_chunks| { - send_and_extract_flattened_documents_data( - flattened_obkv_chunks, - indexer, - lmdb_writer_sx.clone(), - &searchable_fields, - &faceted_fields, - primary_key_id, - geo_fields_ids, - &stop_words, - max_positions_per_attributes, - ) - }) - .collect(); + let result: Result<(Vec<_>, (Vec<_>, (Vec<_>, (Vec<_>, (Vec<_>, Vec<_>)))))> = + flattened_obkv_chunks + .par_bridge() + .map(|flattened_obkv_chunks| { + send_and_extract_flattened_documents_data( + flattened_obkv_chunks, + indexer, + lmdb_writer_sx.clone(), + &searchable_fields, + &faceted_fields, + primary_key_id, + geo_fields_ids, + &stop_words, + max_positions_per_attributes, + ) + }) + .collect(); let ( docid_word_positions_chunks, @@ -78,7 +79,10 @@ pub(crate) fn data_from_obkv_documents( docid_fid_facet_numbers_chunks, ( docid_fid_facet_strings_chunks, - (facet_is_null_docids_chunks, facet_exists_docids_chunks), + ( + facet_is_null_docids_chunks, + (facet_is_empty_docids_chunks, facet_exists_docids_chunks), + ), ), ), ) = result?; @@ -115,6 +119,22 @@ pub(crate) fn data_from_obkv_documents( }); } + // merge facet_is_empty_docids and send them as a typed chunk + { + let lmdb_writer_sx = lmdb_writer_sx.clone(); + rayon::spawn(move || { + debug!("merge {} database", "facet-id-is-empty-docids"); + match facet_is_empty_docids_chunks.merge(merge_cbo_roaring_bitmaps, &indexer) { + Ok(reader) => { + let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsEmptyDocids(reader))); + } + Err(e) => { + let _ = lmdb_writer_sx.send(Err(e)); + } + } + }); + } + spawn_extraction_task::<_, _, Vec>>( docid_word_positions_chunks.clone(), indexer, @@ -254,7 +274,10 @@ fn send_and_extract_flattened_documents_data( grenad::Reader, ( grenad::Reader, - (grenad::Reader, (grenad::Reader, grenad::Reader)), + ( + grenad::Reader, + (grenad::Reader, (grenad::Reader, grenad::Reader)), + ), ), )> { let flattened_documents_chunk = @@ -304,6 +327,7 @@ fn send_and_extract_flattened_documents_data( docid_fid_facet_numbers_chunk, docid_fid_facet_strings_chunk, fid_facet_is_null_docids_chunk, + fid_facet_is_empty_docids_chunk, fid_facet_exists_docids_chunk, } = extract_fid_docid_facet_values( flattened_documents_chunk.clone(), @@ -331,7 +355,10 @@ fn send_and_extract_flattened_documents_data( docid_fid_facet_numbers_chunk, ( docid_fid_facet_strings_chunk, - (fid_facet_is_null_docids_chunk, fid_facet_exists_docids_chunk), + ( + fid_facet_is_null_docids_chunk, + (fid_facet_is_empty_docids_chunk, fid_facet_exists_docids_chunk), + ), ), )) }, diff --git a/milli/src/update/index_documents/typed_chunk.rs b/milli/src/update/index_documents/typed_chunk.rs index 79f2e2c55..e1fc01ca9 100644 --- a/milli/src/update/index_documents/typed_chunk.rs +++ b/milli/src/update/index_documents/typed_chunk.rs @@ -40,6 +40,7 @@ pub(crate) enum TypedChunk { FieldIdFacetNumberDocids(grenad::Reader), FieldIdFacetExistsDocids(grenad::Reader), FieldIdFacetIsNullDocids(grenad::Reader), + FieldIdFacetIsEmptyDocids(grenad::Reader), GeoPoints(grenad::Reader), ScriptLanguageDocids(HashMap<(Script, Language), RoaringBitmap>), } @@ -173,6 +174,17 @@ pub(crate) fn write_typed_chunk_into_index( )?; is_merged_database = true; } + TypedChunk::FieldIdFacetIsEmptyDocids(facet_id_is_empty_docids) => { + append_entries_into_database( + facet_id_is_empty_docids, + &index.facet_id_is_empty_docids, + wtxn, + index_is_empty, + |value, _buffer| Ok(value), + merge_cbo_roaring_bitmaps, + )?; + is_merged_database = true; + } TypedChunk::WordPairProximityDocids(word_pair_proximity_docids_iter) => { append_entries_into_database( word_pair_proximity_docids_iter, From d5881519cb5b364b42f2b2780cea48752c1ec79c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Wed, 15 Mar 2023 11:01:51 +0100 Subject: [PATCH 16/56] Make the json flattener return the original values --- flatten-serde-json/src/lib.rs | 64 ++++++++++++++++++++++++++--------- 1 file changed, 48 insertions(+), 16 deletions(-) diff --git a/flatten-serde-json/src/lib.rs b/flatten-serde-json/src/lib.rs index e1b2b20c7..b2e36d5b3 100644 --- a/flatten-serde-json/src/lib.rs +++ b/flatten-serde-json/src/lib.rs @@ -3,45 +3,45 @@ use serde_json::{Map, Value}; pub fn flatten(json: &Map) -> Map { - let mut obj = Map::new(); - let mut all_keys = vec![]; - insert_object(&mut obj, None, json, &mut all_keys); - for key in all_keys { - obj.entry(key).or_insert(Value::Array(vec![])); + let mut obj = json.clone(); + let mut all_entries = vec![]; + insert_object(&mut obj, None, json, &mut all_entries); + for (key, old_val) in all_entries { + obj.entry(key).or_insert(old_val.clone()); } obj } -fn insert_object( +fn insert_object<'a>( base_json: &mut Map, base_key: Option<&str>, - object: &Map, - all_keys: &mut Vec, + object: &'a Map, + all_entries: &mut Vec<(String, &'a Value)>, ) { for (key, value) in object { let new_key = base_key.map_or_else(|| key.clone(), |base_key| format!("{base_key}.{key}")); - all_keys.push(new_key.clone()); + all_entries.push((new_key.clone(), value)); if let Some(array) = value.as_array() { - insert_array(base_json, &new_key, array, all_keys); + insert_array(base_json, &new_key, array, all_entries); } else if let Some(object) = value.as_object() { - insert_object(base_json, Some(&new_key), object, all_keys); + insert_object(base_json, Some(&new_key), object, all_entries); } else { insert_value(base_json, &new_key, value.clone()); } } } -fn insert_array( +fn insert_array<'a>( base_json: &mut Map, base_key: &str, - array: &Vec, - all_keys: &mut Vec, + array: &'a Vec, + all_entries: &mut Vec<(String, &'a Value)>, ) { for value in array { if let Some(object) = value.as_object() { - insert_object(base_json, Some(base_key), object, all_keys); + insert_object(base_json, Some(base_key), object, all_entries); } else if let Some(sub_array) = value.as_array() { - insert_array(base_json, base_key, sub_array, all_keys); + insert_array(base_json, base_key, sub_array, all_entries); } else { insert_value(base_json, base_key, value.clone()); } @@ -302,4 +302,36 @@ mod tests { .unwrap() ); } + + #[test] + fn flatten_nested_values_keep_original_values() { + let mut base: Value = json!({ + "tags": { + "t1": "v1" + }, + "prices": { + "p1": [null] + } + }); + let json = std::mem::take(base.as_object_mut().unwrap()); + let flat = flatten(&json); + + println!("{}", serde_json::to_string_pretty(&flat).unwrap()); + + assert_eq!( + &flat, + json!({ + "tags": { + "t1": "v1" + }, + "tags.t1": "v1", + "prices": { + "p1": [null] + }, + "prices.p1": [null] + }) + .as_object() + .unwrap() + ); + } } From 72123c458b91fcf349d9b186bedfa05fb165934a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Wed, 15 Mar 2023 11:20:34 +0100 Subject: [PATCH 17/56] Fix the tests to make flattening work --- flatten-serde-json/src/lib.rs | 68 +++++++++++++++++++++++++---------- 1 file changed, 49 insertions(+), 19 deletions(-) diff --git a/flatten-serde-json/src/lib.rs b/flatten-serde-json/src/lib.rs index b2e36d5b3..c3346e15e 100644 --- a/flatten-serde-json/src/lib.rs +++ b/flatten-serde-json/src/lib.rs @@ -3,7 +3,7 @@ use serde_json::{Map, Value}; pub fn flatten(json: &Map) -> Map { - let mut obj = json.clone(); + let mut obj = Map::new(); let mut all_entries = vec![]; insert_object(&mut obj, None, json, &mut all_entries); for (key, old_val) in all_entries { @@ -26,7 +26,7 @@ fn insert_object<'a>( } else if let Some(object) = value.as_object() { insert_object(base_json, Some(&new_key), object, all_entries); } else { - insert_value(base_json, &new_key, value.clone()); + insert_value(base_json, &new_key, value.clone(), false); } } } @@ -43,12 +43,17 @@ fn insert_array<'a>( } else if let Some(sub_array) = value.as_array() { insert_array(base_json, base_key, sub_array, all_entries); } else { - insert_value(base_json, base_key, value.clone()); + insert_value(base_json, base_key, value.clone(), true); } } } -fn insert_value(base_json: &mut Map, key: &str, to_insert: Value) { +fn insert_value( + base_json: &mut Map, + key: &str, + to_insert: Value, + came_from_array: bool, +) { debug_assert!(!to_insert.is_object()); debug_assert!(!to_insert.is_array()); @@ -63,6 +68,8 @@ fn insert_value(base_json: &mut Map, key: &str, to_insert: Value) base_json[key] = Value::Array(vec![value, to_insert]); } // if it does not exist we can push the value untouched + } else if came_from_array { + base_json.insert(key.to_string(), Value::Array(vec![to_insert])); } else { base_json.insert(key.to_string(), to_insert); } @@ -113,7 +120,11 @@ mod tests { assert_eq!( &flat, json!({ - "a": [], + "a": { + "b": "c", + "d": "e", + "f": "g" + }, "a.b": "c", "a.d": "e", "a.f": "g" @@ -164,7 +175,7 @@ mod tests { assert_eq!( &flat, json!({ - "a": 42, + "a": [42], "a.b": ["c", "d", "e"], }) .as_object() @@ -186,7 +197,7 @@ mod tests { assert_eq!( &flat, json!({ - "a": null, + "a": [null], "a.b": ["c", "d", "e"], }) .as_object() @@ -208,7 +219,9 @@ mod tests { assert_eq!( &flat, json!({ - "a": [], + "a": { + "b": "c" + }, "a.b": ["c", "d"], }) .as_object() @@ -234,7 +247,7 @@ mod tests { json!({ "a.b": ["c", "d", "f"], "a.c": "e", - "a": 35, + "a": [35], }) .as_object() .unwrap() @@ -310,8 +323,10 @@ mod tests { "t1": "v1" }, "prices": { - "p1": [null] - } + "p1": [null], + "p1000": {"tamo": {"le": {}}} + }, + "kiki": [[]] }); let json = std::mem::take(base.as_object_mut().unwrap()); let flat = flatten(&json); @@ -321,14 +336,29 @@ mod tests { assert_eq!( &flat, json!({ - "tags": { - "t1": "v1" - }, - "tags.t1": "v1", - "prices": { - "p1": [null] - }, - "prices.p1": [null] + "prices": { + "p1": [null], + "p1000": { + "tamo": { + "le": {} + } + } + }, + "prices.p1": [null], + "prices.p1000": { + "tamo": { + "le": {} + } + }, + "prices.p1000.tamo": { + "le": {} + }, + "prices.p1000.tamo.le": {}, + "tags": { + "t1": "v1" + }, + "tags.t1": "v1", + "kiki": [[]] }) .as_object() .unwrap() From 64571c8288b358096fff5bd992d2ee168516e4dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Wed, 15 Mar 2023 14:57:17 +0100 Subject: [PATCH 18/56] Improve the testing of the filters --- milli/src/snapshot_tests.rs | 8 +++ milli/src/update/index_documents/mod.rs | 77 ++++++++++++++++++++++++- milli/tests/search/filters.rs | 6 ++ milli/tests/search/mod.rs | 51 ++++++++-------- 4 files changed, 114 insertions(+), 28 deletions(-) diff --git a/milli/src/snapshot_tests.rs b/milli/src/snapshot_tests.rs index c6ea8f3dd..85d1bc626 100644 --- a/milli/src/snapshot_tests.rs +++ b/milli/src/snapshot_tests.rs @@ -276,6 +276,11 @@ pub fn snap_facet_id_is_null_docids(index: &Index) -> String { &format!("{facet_id:<3} {}", display_bitmap(&docids)) }) } +pub fn snap_facet_id_is_empty_docids(index: &Index) -> String { + make_db_snap_from_iter!(index, facet_id_is_empty_docids, |(facet_id, docids)| { + &format!("{facet_id:<3} {}", display_bitmap(&docids)) + }) +} pub fn snap_facet_id_string_docids(index: &Index) -> String { make_db_snap_from_iter!(index, facet_id_string_docids, |( FacetGroupKey { field_id, level, left_bound }, @@ -503,6 +508,9 @@ macro_rules! full_snap_of_db { ($index:ident, facet_id_is_null_docids) => {{ $crate::snapshot_tests::snap_facet_id_is_null_docids(&$index) }}; + ($index:ident, facet_id_is_empty_docids) => {{ + $crate::snapshot_tests::snap_facet_id_is_empty_docids(&$index) + }}; ($index:ident, documents_ids) => {{ $crate::snapshot_tests::snap_documents_ids(&$index) }}; diff --git a/milli/src/update/index_documents/mod.rs b/milli/src/update/index_documents/mod.rs index 7b9bd7834..a0bf1400d 100644 --- a/milli/src/update/index_documents/mod.rs +++ b/milli/src/update/index_documents/mod.rs @@ -1766,6 +1766,10 @@ mod tests { "id": 0, "colour": null, }, + { + "id": 1, + "colour": [null], // must not be returned + }, { "id": 6, "colour": { @@ -1835,14 +1839,14 @@ mod tests { .get(&rtxn, &BEU16::new(colour_green_id)) .unwrap() .unwrap(); - assert_eq!(bitmap_colour_green.into_iter().collect::>(), vec![1]); + assert_eq!(bitmap_colour_green.into_iter().collect::>(), vec![2]); let bitmap_colour_blue = index .facet_id_is_null_docids .get(&rtxn, &BEU16::new(colour_blue_id)) .unwrap() .unwrap(); - assert_eq!(bitmap_colour_blue.into_iter().collect::>(), vec![2]); + assert_eq!(bitmap_colour_blue.into_iter().collect::>(), vec![3]); }; let faceted_fields = hashset!(S("colour")); @@ -1866,6 +1870,75 @@ mod tests { check_ok(&index); } + #[test] + fn index_documents_check_is_empty_database() { + let content = || { + documents!([ + {"id": 0, "tags": null }, + {"id": 1, "tags": [null] }, + {"id": 2, "tags": [] }, + {"id": 3, "tags": ["hello","world"] }, + {"id": 4, "tags": [""] }, + {"id": 5 }, + {"id": 6, "tags": {} }, + {"id": 7, "tags": {"green": "cool"} }, + {"id": 8, "tags": {"green": ""} }, + {"id": 9, "tags": "" }, + {"id": 10, "tags": { "green": null } }, + {"id": 11, "tags": { "green": { "blue": null } } }, + {"id": 12, "tags": { "green": { "blue": [] } } } + ]) + }; + + let check_ok = |index: &Index| { + let rtxn = index.read_txn().unwrap(); + let facets = index.faceted_fields(&rtxn).unwrap(); + assert_eq!(facets, hashset!(S("tags"), S("tags.green"), S("tags.green.blue"))); + + let tags_id = index.fields_ids_map(&rtxn).unwrap().id("tags").unwrap(); + let tags_green_id = index.fields_ids_map(&rtxn).unwrap().id("tags.green").unwrap(); + let tags_blue_id = index.fields_ids_map(&rtxn).unwrap().id("tags.green.blue").unwrap(); + + let bitmap_empty_tags = + index.facet_id_is_empty_docids.get(&rtxn, &BEU16::new(tags_id)).unwrap().unwrap(); + assert_eq!(bitmap_empty_tags.into_iter().collect::>(), vec![2, 6, 9]); + + let bitmap_tags_green = index + .facet_id_is_empty_docids + .get(&rtxn, &BEU16::new(tags_green_id)) + .unwrap() + .unwrap(); + assert_eq!(bitmap_tags_green.into_iter().collect::>(), vec![8]); + + let bitmap_tags_blue = index + .facet_id_is_empty_docids + .get(&rtxn, &BEU16::new(tags_blue_id)) + .unwrap() + .unwrap(); + assert_eq!(bitmap_tags_blue.into_iter().collect::>(), vec![12]); + }; + + let faceted_fields = hashset!(S("tags")); + + let index = TempIndex::new(); + index.add_documents(content()).unwrap(); + index + .update_settings(|settings| { + settings.set_filterable_fields(faceted_fields.clone()); + }) + .unwrap(); + check_ok(&index); + + let index = TempIndex::new(); + index + .update_settings(|settings| { + settings.set_filterable_fields(faceted_fields.clone()); + }) + .unwrap(); + index.add_documents(content()).unwrap(); + check_ok(&index); + } + #[test] fn primary_key_must_not_contain_floats() { let index = TempIndex::new_with_map_size(4096 * 100); diff --git a/milli/tests/search/filters.rs b/milli/tests/search/filters.rs index 57ad6a40b..db5a004e0 100644 --- a/milli/tests/search/filters.rs +++ b/milli/tests/search/filters.rs @@ -93,6 +93,12 @@ test_filter!(null_filter_1_not, vec![Right("opt1 IS NOT NULL")]); test_filter!(null_filter_1_not_alt, vec![Right("NOT opt1 IS NULL")]); test_filter!(null_filter_1_double_not, vec![Right("NOT opt1 IS NOT NULL")]); +test_filter!(empty_filter_1, vec![Right("opt1 IS EMPTY")]); +test_filter!(empty_filter_2, vec![Right("opt1.opt2 IS EMPTY")]); +test_filter!(empty_filter_1_not, vec![Right("opt1 IS NOT EMPTY")]); +test_filter!(empty_filter_1_not_alt, vec![Right("NOT opt1 IS EMPTY")]); +test_filter!(empty_filter_1_double_not, vec![Right("NOT opt1 IS NOT EMPTY")]); + test_filter!(in_filter, vec![Right("tag_in IN[1, 2, 3, four, five]")]); test_filter!(not_in_filter, vec![Right("tag_in NOT IN[1, 2, 3, four, five]")]); test_filter!(not_not_in_filter, vec![Right("NOT tag_in NOT IN[1, 2, 3, four, five]")]); diff --git a/milli/tests/search/mod.rs b/milli/tests/search/mod.rs index 51852cced..23744c005 100644 --- a/milli/tests/search/mod.rs +++ b/milli/tests/search/mod.rs @@ -212,10 +212,22 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option { } else if matches!(filter, "opt1.opt2 IS NULL") { if document.opt1opt2.as_ref().map_or(false, |v| v.is_null()) { id = Some(document.id.clone()); - } else if let Some(opt1) = &document.opt1 { - if !opt1.is_null() { - id = contains_null_rec(opt1, "opt2").then(|| document.id.clone()); - } + } + } else if matches!(filter, "opt1 IS EMPTY" | "NOT opt1 IS NOT EMPTY") { + id = document + .opt1 + .as_ref() + .map_or(false, |v| is_empty_value(v)) + .then(|| document.id.clone()); + } else if matches!(filter, "NOT opt1 IS EMPTY" | "opt1 IS NOT EMPTY") { + id = document + .opt1 + .as_ref() + .map_or(true, |v| !is_empty_value(v)) + .then(|| document.id.clone()); + } else if matches!(filter, "opt1.opt2 IS EMPTY") { + if document.opt1opt2.as_ref().map_or(false, |v| is_empty_value(v)) { + id = Some(document.id.clone()); } } else if matches!( filter, @@ -230,6 +242,15 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option { id } +pub fn is_empty_value(v: &serde_json::Value) -> bool { + match v { + serde_json::Value::String(s) => s.is_empty(), + serde_json::Value::Array(a) => a.is_empty(), + serde_json::Value::Object(o) => o.is_empty(), + _ => false, + } +} + pub fn contains_key_rec(v: &serde_json::Value, key: &str) -> bool { match v { serde_json::Value::Array(v) => { @@ -252,28 +273,6 @@ pub fn contains_key_rec(v: &serde_json::Value, key: &str) -> bool { } } -pub fn contains_null_rec(v: &serde_json::Value, key: &str) -> bool { - match v { - serde_json::Value::Object(v) => { - for (k, v) in v.iter() { - if k == key && v.is_null() || contains_null_rec(v, key) { - return true; - } - } - false - } - serde_json::Value::Array(v) => { - for v in v.iter() { - if contains_null_rec(v, key) { - return true; - } - } - false - } - _ => false, - } -} - pub fn expected_filtered_ids(filters: Vec, &str>>) -> HashSet { let dataset: Vec = serde_json::Deserializer::from_str(CONTENT).into_iter().map(|r| r.unwrap()).collect(); From 1a9c58a7abe9820a716b49d5f869e9bd4162b8db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Wed, 15 Mar 2023 16:56:44 +0100 Subject: [PATCH 19/56] Fix a bug with the new flattening rules --- .../extract/extract_docid_word_positions.rs | 19 +------------------ 1 file changed, 1 insertion(+), 18 deletions(-) diff --git a/milli/src/update/index_documents/extract/extract_docid_word_positions.rs b/milli/src/update/index_documents/extract/extract_docid_word_positions.rs index 2d51fcc1a..ac5148363 100644 --- a/milli/src/update/index_documents/extract/extract_docid_word_positions.rs +++ b/milli/src/update/index_documents/extract/extract_docid_word_positions.rs @@ -107,7 +107,7 @@ fn json_to_string<'a>(value: &'a Value, buffer: &'a mut String) -> Option<&'a st fn inner(value: &Value, output: &mut String) -> bool { use std::fmt::Write; match value { - Value::Null => false, + Value::Null | Value::Object(_) => false, Value::Bool(boolean) => write!(output, "{}", boolean).is_ok(), Value::Number(number) => write!(output, "{}", number).is_ok(), Value::String(string) => write!(output, "{}", string).is_ok(), @@ -122,23 +122,6 @@ fn json_to_string<'a>(value: &'a Value, buffer: &'a mut String) -> Option<&'a st // check that at least one value was written count != 0 } - Value::Object(object) => { - let mut buffer = String::new(); - let mut count = 0; - for (key, value) in object { - buffer.clear(); - let _ = write!(&mut buffer, "{}: ", key); - if inner(value, &mut buffer) { - buffer.push_str(". "); - // We write the "key: value. " pair only when - // we are sure that the value can be written. - output.push_str(&buffer); - count += 1; - } - } - // check that at least one value was written - count != 0 - } } } From cf34d1c95f33d7ec6adeb1180901698e4d6ea916 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Wed, 15 Mar 2023 16:57:09 +0100 Subject: [PATCH 20/56] Fix a test that forget to match a Null value --- milli/tests/search/mod.rs | 34 ++++++++++++++++++++++++++++------ 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/milli/tests/search/mod.rs b/milli/tests/search/mod.rs index 23744c005..7f072ef95 100644 --- a/milli/tests/search/mod.rs +++ b/milli/tests/search/mod.rs @@ -212,13 +212,13 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option { } else if matches!(filter, "opt1.opt2 IS NULL") { if document.opt1opt2.as_ref().map_or(false, |v| v.is_null()) { id = Some(document.id.clone()); + } else if let Some(opt1) = &document.opt1 { + if !opt1.is_null() { + id = contains_null_rec(opt1, "opt2").then(|| document.id.clone()); + } } } else if matches!(filter, "opt1 IS EMPTY" | "NOT opt1 IS NOT EMPTY") { - id = document - .opt1 - .as_ref() - .map_or(false, |v| is_empty_value(v)) - .then(|| document.id.clone()); + id = document.opt1.as_ref().map_or(false, is_empty_value).then(|| document.id.clone()); } else if matches!(filter, "NOT opt1 IS EMPTY" | "opt1 IS NOT EMPTY") { id = document .opt1 @@ -226,7 +226,7 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option { .map_or(true, |v| !is_empty_value(v)) .then(|| document.id.clone()); } else if matches!(filter, "opt1.opt2 IS EMPTY") { - if document.opt1opt2.as_ref().map_or(false, |v| is_empty_value(v)) { + if document.opt1opt2.as_ref().map_or(false, is_empty_value) { id = Some(document.id.clone()); } } else if matches!( @@ -273,6 +273,28 @@ pub fn contains_key_rec(v: &serde_json::Value, key: &str) -> bool { } } +pub fn contains_null_rec(v: &serde_json::Value, key: &str) -> bool { + match v { + serde_json::Value::Object(v) => { + for (k, v) in v.iter() { + if k == key && v.is_null() || contains_null_rec(v, key) { + return true; + } + } + false + } + serde_json::Value::Array(v) => { + for v in v.iter() { + if contains_null_rec(v, key) { + return true; + } + } + false + } + _ => false, + } +} + pub fn expected_filtered_ids(filters: Vec, &str>>) -> HashSet { let dataset: Vec = serde_json::Deserializer::from_str(CONTENT).into_iter().map(|r| r.unwrap()).collect(); From a8531053a0083ba9673d8470a935a70407fafccb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Thu, 16 Mar 2023 11:09:20 +0100 Subject: [PATCH 21/56] Make sure the parser reject invalid syntax --- filter-parser/src/lib.rs | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/filter-parser/src/lib.rs b/filter-parser/src/lib.rs index 69eb6700f..640009983 100644 --- a/filter-parser/src/lib.rs +++ b/filter-parser/src/lib.rs @@ -745,6 +745,39 @@ pub mod tests { Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes. 5:7 NOT OR EXISTS AND EXISTS NOT EXISTS "###); + + insta::assert_display_snapshot!(p(r#"value NULL"#), @r###" + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NULL`. + 1:11 value NULL + "###); + insta::assert_display_snapshot!(p(r#"value NOT NULL"#), @r###" + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NOT NULL`. + 1:15 value NOT NULL + "###); + insta::assert_display_snapshot!(p(r#"value EMPTY"#), @r###" + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value EMPTY`. + 1:12 value EMPTY + "###); + insta::assert_display_snapshot!(p(r#"value NOT EMPTY"#), @r###" + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NOT EMPTY`. + 1:16 value NOT EMPTY + "###); + insta::assert_display_snapshot!(p(r#"value IS"#), @r###" + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS`. + 1:9 value IS + "###); + insta::assert_display_snapshot!(p(r#"value IS NOT"#), @r###" + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT`. + 1:13 value IS NOT + "###); + insta::assert_display_snapshot!(p(r#"value IS EXISTS"#), @r###" + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS EXISTS`. + 1:16 value IS EXISTS + "###); + insta::assert_display_snapshot!(p(r#"value IS NOT EXISTS"#), @r###" + Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT EXISTS`. + 1:20 value IS NOT EXISTS + "###); } #[test] From a94e78ffb051193ece752a9dd19858a05922f706 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Wed, 12 Apr 2023 10:53:00 +0200 Subject: [PATCH 22/56] Disable autobatching of additions and deletions --- index-scheduler/src/autobatcher.rs | 103 +++++++++-------------------- index-scheduler/src/lib.rs | 99 --------------------------- 2 files changed, 33 insertions(+), 169 deletions(-) diff --git a/index-scheduler/src/autobatcher.rs b/index-scheduler/src/autobatcher.rs index 31634237f..24625a7fb 100644 --- a/index-scheduler/src/autobatcher.rs +++ b/index-scheduler/src/autobatcher.rs @@ -311,18 +311,9 @@ impl BatchKind { }) } ( - BatchKind::DocumentOperation { method, allow_index_creation, primary_key, mut operation_ids }, + this @ BatchKind::DocumentOperation { .. }, K::DocumentDeletion, - ) => { - operation_ids.push(id); - - Continue(BatchKind::DocumentOperation { - method, - allow_index_creation, - primary_key, - operation_ids, - }) - } + ) => Break(this), // but we can't autobatch documents if it's not the same kind // this match branch MUST be AFTER the previous one ( @@ -345,35 +336,7 @@ impl BatchKind { deletion_ids.push(id); Continue(BatchKind::DocumentClear { ids: deletion_ids }) } - // we can autobatch the deletion and import if the index already exists - ( - BatchKind::DocumentDeletion { mut deletion_ids }, - K::DocumentImport { method, allow_index_creation, primary_key } - ) if index_already_exists => { - deletion_ids.push(id); - - Continue(BatchKind::DocumentOperation { - method, - allow_index_creation, - primary_key, - operation_ids: deletion_ids, - }) - } - // we can autobatch the deletion and import if both can't create an index - ( - BatchKind::DocumentDeletion { mut deletion_ids }, - K::DocumentImport { method, allow_index_creation, primary_key } - ) if !allow_index_creation => { - deletion_ids.push(id); - - Continue(BatchKind::DocumentOperation { - method, - allow_index_creation, - primary_key, - operation_ids: deletion_ids, - }) - } - // we can't autobatch a deletion and an import if the index does not exists but would be created by an addition + // we can't autobatch a deletion and an import ( this @ BatchKind::DocumentDeletion { .. }, K::DocumentImport { .. } @@ -674,36 +637,36 @@ mod tests { debug_snapshot!(autobatch_from(false,None, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))"); debug_snapshot!(autobatch_from(false,None, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))"); - // We can autobatch document addition with document deletion - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###); - debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); - debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); - debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))"); - debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###); - debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###); - debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); - debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); - // And the other way around - debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); - debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); - debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); - debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); - debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))"); - debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); - debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###); + // We can't autobatch document addition with document deletion + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###); + debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###); + debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))"); + debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###); + debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###); + debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###); + // we also can't do the only way around + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); + debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))"); } #[test] diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index b402985e3..0f82fb47d 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -1869,105 +1869,6 @@ mod tests { snapshot!(snapshot_index_scheduler(&index_scheduler), name: "both_task_succeeded"); } - #[test] - fn document_addition_and_document_deletion() { - let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); - - let content = r#"[ - { "id": 1, "doggo": "jean bob" }, - { "id": 2, "catto": "jorts" }, - { "id": 3, "doggo": "bork" } - ]"#; - - let (uuid, mut file) = index_scheduler.create_update_file_with_uuid(0).unwrap(); - let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap(); - file.persist().unwrap(); - index_scheduler - .register(KindWithContent::DocumentAdditionOrUpdate { - index_uid: S("doggos"), - primary_key: Some(S("id")), - method: ReplaceDocuments, - content_file: uuid, - documents_count, - allow_index_creation: true, - }) - .unwrap(); - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); - index_scheduler - .register(KindWithContent::DocumentDeletion { - index_uid: S("doggos"), - documents_ids: vec![S("1"), S("2")], - }) - .unwrap(); - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_second_task"); - - handle.advance_one_successful_batch(); // The addition AND deletion should've been batched together - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_processing_the_batch"); - - let index = index_scheduler.index("doggos").unwrap(); - let rtxn = index.read_txn().unwrap(); - let field_ids_map = index.fields_ids_map(&rtxn).unwrap(); - let field_ids = field_ids_map.ids().collect::>(); - let documents = index - .all_documents(&rtxn) - .unwrap() - .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) - .collect::>(); - snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); - } - - #[test] - fn document_deletion_and_document_addition() { - let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); - index_scheduler - .register(KindWithContent::DocumentDeletion { - index_uid: S("doggos"), - documents_ids: vec![S("1"), S("2")], - }) - .unwrap(); - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); - - let content = r#"[ - { "id": 1, "doggo": "jean bob" }, - { "id": 2, "catto": "jorts" }, - { "id": 3, "doggo": "bork" } - ]"#; - - let (uuid, mut file) = index_scheduler.create_update_file_with_uuid(0).unwrap(); - let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap(); - file.persist().unwrap(); - index_scheduler - .register(KindWithContent::DocumentAdditionOrUpdate { - index_uid: S("doggos"), - primary_key: Some(S("id")), - method: ReplaceDocuments, - content_file: uuid, - documents_count, - allow_index_creation: true, - }) - .unwrap(); - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_second_task"); - - // The deletion should have failed because it can't create an index - handle.advance_one_failed_batch(); - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_failing_the_deletion"); - - // The addition should works - handle.advance_one_successful_batch(); - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_last_successful_addition"); - - let index = index_scheduler.index("doggos").unwrap(); - let rtxn = index.read_txn().unwrap(); - let field_ids_map = index.fields_ids_map(&rtxn).unwrap(); - let field_ids = field_ids_map.ids().collect::>(); - let documents = index - .all_documents(&rtxn) - .unwrap() - .map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap()) - .collect::>(); - snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents"); - } - #[test] fn do_not_batch_task_of_different_indexes() { let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); From a5f44a5ceb3ca143e1c4f414017b09c2d5cb9bb7 Mon Sep 17 00:00:00 2001 From: gui machiavelli Date: Wed, 12 Apr 2023 16:27:04 +0200 Subject: [PATCH 23/56] Update references to new docs website With the launch of the new website, we need to update the README so it references the correct URLs. Two minor details: - we have removed the contact page from the documentation (it had the same links present in this readme and on the community section of the landing page) - we have recently separated filtering and faceted search into two separate articles --- README.md | 43 +++++++++++++++++++++---------------------- 1 file changed, 21 insertions(+), 22 deletions(-) diff --git a/README.md b/README.md index b97a10e7d..679097e70 100644 --- a/README.md +++ b/README.md @@ -7,8 +7,8 @@ Website | Roadmap | Blog | - Documentation | - FAQ | + Documentation | + FAQ | Discord @@ -36,27 +36,27 @@ Meilisearch helps you shape a delightful search experience in a snap, offering f ## ✨ Features - **Search-as-you-type:** find search results in less than 50 milliseconds -- **[Typo tolerance](https://docs.meilisearch.com/learn/getting_started/customizing_relevancy.html#typo-tolerance):** get relevant matches even when queries contain typos and misspellings -- **[Filtering and faceted search](https://docs.meilisearch.com/learn/advanced/filtering_and_faceted_search.html):** enhance your user's search experience with custom filters and build a faceted search interface in a few lines of code -- **[Sorting](https://docs.meilisearch.com/learn/advanced/sorting.html):** sort results based on price, date, or pretty much anything else your users need -- **[Synonym support](https://docs.meilisearch.com/learn/getting_started/customizing_relevancy.html#synonyms):** configure synonyms to include more relevant content in your search results -- **[Geosearch](https://docs.meilisearch.com/learn/advanced/geosearch.html):** filter and sort documents based on geographic data -- **[Extensive language support](https://docs.meilisearch.com/learn/what_is_meilisearch/language.html):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet -- **[Security management](https://docs.meilisearch.com/learn/security/master_api_keys.html):** control which users can access what data with API keys that allow fine-grained permissions handling -- **[Multi-Tenancy](https://docs.meilisearch.com/learn/security/tenant_tokens.html):** personalize search results for any number of application tenants +- **[Typo tolerance](https://meilisearch.com/docs/learn/getting_started/customizing_relevancy#typo-tolerance):** get relevant matches even when queries contain typos and misspellings +- **[Filtering](https://meilisearch.com/docs/learn/advanced/filtering) and [faceted search](https://meilisearch.com/docs/learn/advanced/faceted_search):** enhance your user's search experience with custom filters and build a faceted search interface in a few lines of code +- **[Sorting](https://meilisearch.com/docs/learn/advanced/sorting):** sort results based on price, date, or pretty much anything else your users need +- **[Synonym support](https://meilisearch.com/docs/learn/getting_started/customizing_relevancy#synonyms):** configure synonyms to include more relevant content in your search results +- **[Geosearch](https://meilisearch.com/docs/learn/advanced/geosearch):** filter and sort documents based on geographic data +- **[Extensive language support](https://meilisearch.com/docs/learn/what_is_meilisearch/language):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet +- **[Security management](https://meilisearch.com/docs/learn/security/master_api_keys):** control which users can access what data with API keys that allow fine-grained permissions handling +- **[Multi-Tenancy](https://meilisearch.com/docs/learn/security/tenant_tokens):** personalize search results for any number of application tenants - **Highly Customizable:** customize Meilisearch to your specific needs or use our out-of-the-box and hassle-free presets -- **[RESTful API](https://docs.meilisearch.com/reference/api/overview.html):** integrate Meilisearch in your technical stack with our plugins and SDKs +- **[RESTful API](https://meilisearch.com/docs/reference/api/overview):** integrate Meilisearch in your technical stack with our plugins and SDKs - **Easy to install, deploy, and maintain** ## 📖 Documentation -You can consult Meilisearch's documentation at [https://docs.meilisearch.com](https://docs.meilisearch.com/). +You can consult Meilisearch's documentation at [https://docs.meilisearch.com](https://meilisearch.com/docs/). ## 🚀 Getting started -For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://docs.meilisearch.com/learn/getting_started/quick_start.html) guide. +For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://meilisearch.com/docs/learn/getting_started/quick_start) guide. -You may also want to check out [Meilisearch 101](https://docs.meilisearch.com/learn/getting_started/filtering_and_sorting.html) for an introduction to some of Meilisearch's most popular features. +You may also want to check out [Meilisearch 101](https://meilisearch.com/docs/learn/getting_started/filtering_and_sorting) for an introduction to some of Meilisearch's most popular features. ## ☁️ Meilisearch cloud @@ -66,25 +66,25 @@ Let us manage your infrastructure so you can focus on integrating a great search Install one of our SDKs in your project for seamless integration between Meilisearch and your favorite language or framework! -Take a look at the complete [Meilisearch integration list](https://docs.meilisearch.com/learn/what_is_meilisearch/sdks.html). +Take a look at the complete [Meilisearch integration list](https://meilisearch.com/docs/learn/what_is_meilisearch/sdks). -[![Logos belonging to different languages and frameworks supported by Meilisearch, including React, Ruby on Rails, Go, Rust, and PHP](assets/integrations.png)](https://docs.meilisearch.com/learn/what_is_meilisearch/sdks.html) +[![Logos belonging to different languages and frameworks supported by Meilisearch, including React, Ruby on Rails, Go, Rust, and PHP](assets/integrations.png)](https://meilisearch.com/docs/learn/what_is_meilisearch/sdks.html) ## ⚙️ Advanced usage -Experienced users will want to keep our [API Reference](https://docs.meilisearch.com/reference/api) close at hand. +Experienced users will want to keep our [API Reference](https://meilisearch.com/docs/reference/api) close at hand. -We also offer a wide range of dedicated guides to all Meilisearch features, such as [filtering](https://docs.meilisearch.com/learn/advanced/filtering_and_faceted_search.html), [sorting](https://docs.meilisearch.com/learn/advanced/sorting.html), [geosearch](https://docs.meilisearch.com/learn/advanced/geosearch.html), [API keys](https://docs.meilisearch.com/learn/security/master_api_keys.html), and [tenant tokens](https://docs.meilisearch.com/learn/security/tenant_tokens.html). +We also offer a wide range of dedicated guides to all Meilisearch features, such as [filtering](https://meilisearch.com/docs/learn/advanced/filtering), [sorting](https://meilisearch.com/docs/learn/advanced/sorting), [geosearch](https://meilisearch.com/docs/learn/advanced/geosearch), [API keys](https://meilisearch.com/docs/learn/security/master_api_keys), and [tenant tokens](https://meilisearch.com/docs/learn/security/tenant_tokens). -Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://docs.meilisearch.com/learn/core_concepts/documents.html) and [indexes](https://docs.meilisearch.com/learn/core_concepts/indexes.html). +Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://meilisearch.com/docs/learn/core_concepts/documents) and [indexes](https://meilisearch.com/docs/learn/core_concepts/indexes). ## 📊 Telemetry -Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html#how-to-disable-data-collection) whenever you want. +Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://meilisearch.com/docs/learn/what_is_meilisearch/telemetry#how-to-disable-data-collection) whenever you want. To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Don't forget to include your `Instance UID` in the message, as this helps us quickly find and delete your data. -If you want to know more about the kind of data we collect and what we use it for, check the [telemetry section](https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html) of our documentation. +If you want to know more about the kind of data we collect and what we use it for, check the [telemetry section](https://meilisearch.com/docs/learn/what_is_meilisearch/telemetry) of our documentation. ## 📫 Get in touch! @@ -97,7 +97,6 @@ Meilisearch is a search engine created by [Meili](https://www.welcometothejungle - For feature requests, please visit our [product repository](https://github.com/meilisearch/product/discussions) - Found a bug? Open an [issue](https://github.com/meilisearch/meilisearch/issues)! - Want to be part of our Discord community? [Join us!](https://discord.gg/meilisearch) -- For everything else, please check [this page listing some of the other places where you can find us](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html) Thank you for your support! From 5cfb066b0ac8bf26c5c95bc234895ad352944a6c Mon Sep 17 00:00:00 2001 From: gui machiavelli Date: Wed, 12 Apr 2023 16:29:20 +0200 Subject: [PATCH 24/56] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 679097e70..96a15e050 100644 --- a/README.md +++ b/README.md @@ -50,7 +50,7 @@ Meilisearch helps you shape a delightful search experience in a snap, offering f ## 📖 Documentation -You can consult Meilisearch's documentation at [https://docs.meilisearch.com](https://meilisearch.com/docs/). +You can consult Meilisearch's documentation at [https://meilisearch.com/docs](https://meilisearch.com/docs/). ## 🚀 Getting started From be69ab320dbf13a859ef07a00433089a809d08c4 Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 6 Apr 2023 18:26:27 +0200 Subject: [PATCH 25/56] stops receiving tasks once the task queue is full --- index-scheduler/src/error.rs | 4 ++ index-scheduler/src/lib.rs | 7 +++ meilisearch/src/option.rs | 2 +- meilisearch/tests/tasks/mod.rs | 91 +++++++++++++++++++++++++++++++++- 4 files changed, 102 insertions(+), 2 deletions(-) diff --git a/index-scheduler/src/error.rs b/index-scheduler/src/error.rs index 3264bda7a..14623871b 100644 --- a/index-scheduler/src/error.rs +++ b/index-scheduler/src/error.rs @@ -61,6 +61,8 @@ pub enum Error { SwapDuplicateIndexesFound(Vec), #[error("Index `{0}` not found.")] SwapIndexNotFound(String), + #[error("No space left in database. Free some space by deleting tasks.")] + NoSpaceLeftInTaskQueue, #[error( "Indexes {} not found.", .0.iter().map(|s| format!("`{}`", s)).collect::>().join(", ") @@ -152,6 +154,8 @@ impl ErrorCode for Error { Error::TaskNotFound(_) => Code::TaskNotFound, Error::TaskDeletionWithEmptyQuery => Code::MissingTaskFilters, Error::TaskCancelationWithEmptyQuery => Code::MissingTaskFilters, + // TODO: not sure of the Code to use + Error::NoSpaceLeftInTaskQueue => Code::NoSpaceLeftOnDevice, Error::Dump(e) => e.error_code(), Error::Milli(e) => e.error_code(), Error::ProcessBatchPanicked => Code::Internal, diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index b402985e3..692888404 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -820,6 +820,13 @@ impl IndexScheduler { pub fn register(&self, kind: KindWithContent) -> Result { let mut wtxn = self.env.write_txn()?; + // if the task doesn't delete anything and 90% of the task queue is full, we must refuse to enqueue the incomming task + if !matches!(&kind, KindWithContent::TaskDeletion { tasks, .. } if !tasks.is_empty()) + && (self.env.real_disk_size()? * 100) / self.env.map_size()? as u64 > 90 + { + return Err(Error::NoSpaceLeftInTaskQueue); + } + let mut task = Task { uid: self.next_task_id(&wtxn)?, enqueued_at: OffsetDateTime::now_utc(), diff --git a/meilisearch/src/option.rs b/meilisearch/src/option.rs index 0c6457e7a..d419e0875 100644 --- a/meilisearch/src/option.rs +++ b/meilisearch/src/option.rs @@ -68,7 +68,7 @@ const DEFAULT_LOG_EVERY_N: usize = 100_000; // The actual size of the virtual address space is computed at startup to determine how many 2TiB indexes can be // opened simultaneously. pub const INDEX_SIZE: u64 = 2 * 1024 * 1024 * 1024 * 1024; // 2 TiB -pub const TASK_DB_SIZE: u64 = 10 * 1024 * 1024 * 1024; // 10 GiB +pub const TASK_DB_SIZE: u64 = 11 * 1024 * 1024 * 1024; // 11 GiB #[derive(Debug, Default, Clone, Copy, Serialize, Deserialize)] #[serde(rename_all = "UPPERCASE")] diff --git a/meilisearch/tests/tasks/mod.rs b/meilisearch/tests/tasks/mod.rs index e9b5a2325..6124de4b7 100644 --- a/meilisearch/tests/tasks/mod.rs +++ b/meilisearch/tests/tasks/mod.rs @@ -1,11 +1,14 @@ mod errors; +use byte_unit::{Byte, ByteUnit}; use meili_snap::insta::assert_json_snapshot; +use meili_snap::{json_string, snapshot}; use serde_json::json; +use tempfile::TempDir; use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; -use crate::common::Server; +use crate::common::{default_settings, Server}; #[actix_rt::test] async fn error_get_unexisting_task_status() { @@ -1000,3 +1003,89 @@ async fn test_summarized_dump_creation() { } "###); } + +#[actix_web::test] +async fn test_task_queue_is_full() { + let dir = TempDir::new().unwrap(); + let mut options = default_settings(dir.path()); + options.max_task_db_size = Byte::from_unit(500.0, ByteUnit::B).unwrap(); + + let server = Server::new_with_options(options).await.unwrap(); + + // the first task should be enqueued without issue + let (result, code) = server.create_index(json!({ "uid": "doggo" })).await; + snapshot!(code, @"202 Accepted"); + snapshot!(json_string!(result, { ".enqueuedAt" => "[date]" }), @r###" + { + "taskUid": 0, + "indexUid": "doggo", + "status": "enqueued", + "type": "indexCreation", + "enqueuedAt": "[date]" + } + "###); + + loop { + let (res, _code) = server.create_index(json!({ "uid": "doggo" })).await; + if res["taskUid"] == json!(null) { + break; + } + } + + let (result, code) = server.create_index(json!({ "uid": "doggo" })).await; + snapshot!(code, @"422 Unprocessable Entity"); + snapshot!(json_string!(result), @r###" + { + "message": "No space left in database. Free some space by deleting tasks.", + "code": "no_space_left_on_device", + "type": "system", + "link": "https://docs.meilisearch.com/errors#no_space_left_on_device" + } + "###); + + // But we should still be able to register tasks deletion IF they delete something + let (result, code) = server.delete_tasks("uids=0").await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(result, { ".enqueuedAt" => "[date]", ".taskUid" => "uid" }), @r###" + { + "taskUid": "uid", + "indexUid": null, + "status": "enqueued", + "type": "taskDeletion", + "enqueuedAt": "[date]" + } + "###); + + // we're going to fill up the queue once again + loop { + let (res, _code) = server.create_index(json!({ "uid": "doggo" })).await; + if res["taskUid"] == json!(null) { + break; + } + } + + // But we should NOT be able to register this task because it doesn't match any tasks + let (result, code) = server.delete_tasks("uids=0").await; + snapshot!(code, @"422 Unprocessable Entity"); + snapshot!(json_string!(result), @r###" + { + "message": "No space left in database. Free some space by deleting tasks.", + "code": "no_space_left_on_device", + "type": "system", + "link": "https://docs.meilisearch.com/errors#no_space_left_on_device" + } + "###); + + // The deletion still works + let (result, code) = server.delete_tasks("uids=*").await; + snapshot!(code, @"200 OK"); + snapshot!(json_string!(result, { ".enqueuedAt" => "[date]", ".taskUid" => "uid" }), @r###" + { + "taskUid": "uid", + "indexUid": null, + "status": "enqueued", + "type": "taskDeletion", + "enqueuedAt": "[date]" + } + "###); +} From 9350a7b01739bdb10ba3e017705a35f449c48b8c Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 11 Apr 2023 16:30:56 +0200 Subject: [PATCH 26/56] improve the test and try to understand the issue happening on windows --- meilisearch/tests/tasks/mod.rs | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/meilisearch/tests/tasks/mod.rs b/meilisearch/tests/tasks/mod.rs index 6124de4b7..a9ed5c00f 100644 --- a/meilisearch/tests/tasks/mod.rs +++ b/meilisearch/tests/tasks/mod.rs @@ -1026,10 +1026,16 @@ async fn test_task_queue_is_full() { "###); loop { - let (res, _code) = server.create_index(json!({ "uid": "doggo" })).await; - if res["taskUid"] == json!(null) { + let (res, code) = server.create_index(json!({ "uid": "doggo" })).await; + if code == 422 { break; } + if res["taskUid"] == json!(null) { + panic!( + "Encountered the strange case:\n{}", + serde_json::to_string_pretty(&res).unwrap() + ); + } } let (result, code) = server.create_index(json!({ "uid": "doggo" })).await; @@ -1058,10 +1064,16 @@ async fn test_task_queue_is_full() { // we're going to fill up the queue once again loop { - let (res, _code) = server.create_index(json!({ "uid": "doggo" })).await; - if res["taskUid"] == json!(null) { + let (res, code) = server.create_index(json!({ "uid": "doggo" })).await; + if code == 422 { break; } + if res["taskUid"] == json!(null) { + panic!( + "Encountered the strange case:\n{}", + serde_json::to_string_pretty(&res).unwrap() + ); + } } // But we should NOT be able to register this task because it doesn't match any tasks From b4fabce36dc4e918474b4cfd7314713d51b988fb Mon Sep 17 00:00:00 2001 From: Tamo Date: Wed, 12 Apr 2023 18:46:24 +0200 Subject: [PATCH 27/56] update the error message + update the task db size to 20GiB with a limit at 50% --- index-scheduler/src/error.rs | 2 +- index-scheduler/src/lib.rs | 4 ++-- meilisearch/src/option.rs | 2 +- meilisearch/tests/tasks/mod.rs | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/index-scheduler/src/error.rs b/index-scheduler/src/error.rs index 14623871b..7b884e0a4 100644 --- a/index-scheduler/src/error.rs +++ b/index-scheduler/src/error.rs @@ -61,7 +61,7 @@ pub enum Error { SwapDuplicateIndexesFound(Vec), #[error("Index `{0}` not found.")] SwapIndexNotFound(String), - #[error("No space left in database. Free some space by deleting tasks.")] + #[error("Meilisearch cannot receive write operations because the limit of the task database has been reached. Please delete tasks to continue performing write operations.")] NoSpaceLeftInTaskQueue, #[error( "Indexes {} not found.", diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index 692888404..9c52b008d 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -820,9 +820,9 @@ impl IndexScheduler { pub fn register(&self, kind: KindWithContent) -> Result { let mut wtxn = self.env.write_txn()?; - // if the task doesn't delete anything and 90% of the task queue is full, we must refuse to enqueue the incomming task + // if the task doesn't delete anything and 50% of the task queue is full, we must refuse to enqueue the incomming task if !matches!(&kind, KindWithContent::TaskDeletion { tasks, .. } if !tasks.is_empty()) - && (self.env.real_disk_size()? * 100) / self.env.map_size()? as u64 > 90 + && (self.env.real_disk_size()? * 100) / self.env.map_size()? as u64 > 50 { return Err(Error::NoSpaceLeftInTaskQueue); } diff --git a/meilisearch/src/option.rs b/meilisearch/src/option.rs index d419e0875..563bc3496 100644 --- a/meilisearch/src/option.rs +++ b/meilisearch/src/option.rs @@ -68,7 +68,7 @@ const DEFAULT_LOG_EVERY_N: usize = 100_000; // The actual size of the virtual address space is computed at startup to determine how many 2TiB indexes can be // opened simultaneously. pub const INDEX_SIZE: u64 = 2 * 1024 * 1024 * 1024 * 1024; // 2 TiB -pub const TASK_DB_SIZE: u64 = 11 * 1024 * 1024 * 1024; // 11 GiB +pub const TASK_DB_SIZE: u64 = 20 * 1024 * 1024 * 1024; // 20 GiB #[derive(Debug, Default, Clone, Copy, Serialize, Deserialize)] #[serde(rename_all = "UPPERCASE")] diff --git a/meilisearch/tests/tasks/mod.rs b/meilisearch/tests/tasks/mod.rs index a9ed5c00f..27e212f39 100644 --- a/meilisearch/tests/tasks/mod.rs +++ b/meilisearch/tests/tasks/mod.rs @@ -1042,7 +1042,7 @@ async fn test_task_queue_is_full() { snapshot!(code, @"422 Unprocessable Entity"); snapshot!(json_string!(result), @r###" { - "message": "No space left in database. Free some space by deleting tasks.", + "message": "Meilisearch cannot receive write operations because the limit of the task database has been reached. Please delete tasks to continue performing write operations.", "code": "no_space_left_on_device", "type": "system", "link": "https://docs.meilisearch.com/errors#no_space_left_on_device" @@ -1081,7 +1081,7 @@ async fn test_task_queue_is_full() { snapshot!(code, @"422 Unprocessable Entity"); snapshot!(json_string!(result), @r###" { - "message": "No space left in database. Free some space by deleting tasks.", + "message": "Meilisearch cannot receive write operations because the limit of the task database has been reached. Please delete tasks to continue performing write operations.", "code": "no_space_left_on_device", "type": "system", "link": "https://docs.meilisearch.com/errors#no_space_left_on_device" From b3f60ee8057f837a5de9107db42faa1cfd4fde17 Mon Sep 17 00:00:00 2001 From: Tamo Date: Thu, 13 Apr 2023 10:18:58 +0200 Subject: [PATCH 28/56] try to fix the ci --- meilisearch/tests/tasks/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch/tests/tasks/mod.rs b/meilisearch/tests/tasks/mod.rs index 27e212f39..88f83bb70 100644 --- a/meilisearch/tests/tasks/mod.rs +++ b/meilisearch/tests/tasks/mod.rs @@ -1064,7 +1064,7 @@ async fn test_task_queue_is_full() { // we're going to fill up the queue once again loop { - let (res, code) = server.create_index(json!({ "uid": "doggo" })).await; + let (res, code) = server.delete_tasks("uids=0").await; if code == 422 { break; } From cd45d21d6e92df4f0768c3a338447ceb57d30d50 Mon Sep 17 00:00:00 2001 From: dureuill Date: Thu, 13 Apr 2023 13:25:10 +0000 Subject: [PATCH 29/56] Update version for the next release (v1.1.1) in Cargo.toml --- Cargo.lock | 26 +++++++++++++------------- Cargo.toml | 2 +- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 853d1a896..f1ff389c0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -410,7 +410,7 @@ checksum = "b645a089122eccb6111b4f81cbc1a49f5900ac4666bb93ac027feaecf15607bf" [[package]] name = "benchmarks" -version = "1.1.0" +version = "1.1.1" dependencies = [ "anyhow", "bytes", @@ -1150,7 +1150,7 @@ dependencies = [ [[package]] name = "dump" -version = "1.1.0" +version = "1.1.1" dependencies = [ "anyhow", "big_s", @@ -1371,7 +1371,7 @@ dependencies = [ [[package]] name = "file-store" -version = "1.1.0" +version = "1.1.1" dependencies = [ "faux", "tempfile", @@ -1393,7 +1393,7 @@ dependencies = [ [[package]] name = "filter-parser" -version = "1.1.0" +version = "1.1.1" dependencies = [ "insta", "nom", @@ -1413,7 +1413,7 @@ dependencies = [ [[package]] name = "flatten-serde-json" -version = "1.1.0" +version = "1.1.1" dependencies = [ "criterion", "serde_json", @@ -1890,7 +1890,7 @@ dependencies = [ [[package]] name = "index-scheduler" -version = "1.1.0" +version = "1.1.1" dependencies = [ "anyhow", "big_s", @@ -2049,7 +2049,7 @@ dependencies = [ [[package]] name = "json-depth-checker" -version = "1.1.0" +version = "1.1.1" dependencies = [ "criterion", "serde_json", @@ -2445,7 +2445,7 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "meili-snap" -version = "1.1.0" +version = "1.1.1" dependencies = [ "insta", "md5", @@ -2454,7 +2454,7 @@ dependencies = [ [[package]] name = "meilisearch" -version = "1.1.0" +version = "1.1.1" dependencies = [ "actix-cors", "actix-http", @@ -2542,7 +2542,7 @@ dependencies = [ [[package]] name = "meilisearch-auth" -version = "1.1.0" +version = "1.1.1" dependencies = [ "base64 0.13.1", "enum-iterator", @@ -2561,7 +2561,7 @@ dependencies = [ [[package]] name = "meilisearch-types" -version = "1.1.0" +version = "1.1.1" dependencies = [ "actix-web", "anyhow", @@ -2615,7 +2615,7 @@ dependencies = [ [[package]] name = "milli" -version = "1.1.0" +version = "1.1.1" dependencies = [ "big_s", "bimap", @@ -2969,7 +2969,7 @@ checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" [[package]] name = "permissive-json-pointer" -version = "1.1.0" +version = "1.1.1" dependencies = [ "big_s", "serde_json", diff --git a/Cargo.toml b/Cargo.toml index b1f475410..c8d4dd1ef 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,7 +17,7 @@ members = [ ] [workspace.package] -version = "1.1.0" +version = "1.1.1" authors = ["Quentin de Quelen ", "Clément Renault "] description = "Meilisearch HTTP server" homepage = "https://meilisearch.com" From fd583501d7ad34258a81f35aa7b8f0c827293188 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Thu, 13 Apr 2023 17:07:44 +0200 Subject: [PATCH 30/56] Use non_free_pages_size instead of real_disk_size to check task db space taken --- index-scheduler/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index 3bde39040..b0ecef0f3 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -822,7 +822,7 @@ impl IndexScheduler { // if the task doesn't delete anything and 50% of the task queue is full, we must refuse to enqueue the incomming task if !matches!(&kind, KindWithContent::TaskDeletion { tasks, .. } if !tasks.is_empty()) - && (self.env.real_disk_size()? * 100) / self.env.map_size()? as u64 > 50 + && (self.env.non_free_pages_size()? * 100) / self.env.map_size()? as u64 > 50 { return Err(Error::NoSpaceLeftInTaskQueue); } From 066c6bd87582c2eb89cade3e6f9487acfe975d6b Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Thu, 13 Apr 2023 17:20:06 +0200 Subject: [PATCH 31/56] test task db full now checks that a task can be successfully added after deleting tasks --- meilisearch/tests/tasks/mod.rs | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/meilisearch/tests/tasks/mod.rs b/meilisearch/tests/tasks/mod.rs index 88f83bb70..723c628bb 100644 --- a/meilisearch/tests/tasks/mod.rs +++ b/meilisearch/tests/tasks/mod.rs @@ -1050,7 +1050,7 @@ async fn test_task_queue_is_full() { "###); // But we should still be able to register tasks deletion IF they delete something - let (result, code) = server.delete_tasks("uids=0").await; + let (result, code) = server.delete_tasks("uids=*").await; snapshot!(code, @"200 OK"); snapshot!(json_string!(result, { ".enqueuedAt" => "[date]", ".taskUid" => "uid" }), @r###" { @@ -1062,6 +1062,19 @@ async fn test_task_queue_is_full() { } "###); + // But we should still be able to register tasks deletion IF they delete something + let (result, code) = server.create_index(json!({ "uid": "doggo" })).await; + snapshot!(code, @"202 Accepted"); + snapshot!(json_string!(result, { ".enqueuedAt" => "[date]", ".taskUid" => "uid" }), @r###" + { + "taskUid": "uid", + "indexUid": "doggo", + "status": "enqueued", + "type": "indexCreation", + "enqueuedAt": "[date]" + } + "###); + // we're going to fill up the queue once again loop { let (res, code) = server.delete_tasks("uids=0").await; From 1e6cbcaf12a4f39be46b6b65f98c994574044bba Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Thu, 13 Apr 2023 17:27:12 +0200 Subject: [PATCH 32/56] Update test comment Co-authored-by: Tamo --- meilisearch/tests/tasks/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch/tests/tasks/mod.rs b/meilisearch/tests/tasks/mod.rs index 723c628bb..b608040da 100644 --- a/meilisearch/tests/tasks/mod.rs +++ b/meilisearch/tests/tasks/mod.rs @@ -1062,7 +1062,7 @@ async fn test_task_queue_is_full() { } "###); - // But we should still be able to register tasks deletion IF they delete something + // Now we should be able to register tasks again let (result, code) = server.create_index(json!({ "uid": "doggo" })).await; snapshot!(code, @"202 Accepted"); snapshot!(json_string!(result, { ".enqueuedAt" => "[date]", ".taskUid" => "uid" }), @r###" From c2f4b6ced09530ec969fec256a864c635a93f760 Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Thu, 13 Apr 2023 18:22:42 +0200 Subject: [PATCH 33/56] Test: await for the deletion task to complete before trying to add another task --- meilisearch/tests/tasks/mod.rs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/meilisearch/tests/tasks/mod.rs b/meilisearch/tests/tasks/mod.rs index b608040da..40093dc41 100644 --- a/meilisearch/tests/tasks/mod.rs +++ b/meilisearch/tests/tasks/mod.rs @@ -1062,6 +1062,9 @@ async fn test_task_queue_is_full() { } "###); + let result = server.wait_task(result["taskUid"].as_u64().unwrap()).await; + snapshot!(json_string!(result["status"]), @r###""succeeded""###); + // Now we should be able to register tasks again let (result, code) = server.create_index(json!({ "uid": "doggo" })).await; snapshot!(code, @"202 Accepted"); From f0b4046c43f8861cec80a154b89323f842f8d894 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 13 Apr 2023 17:03:48 +0000 Subject: [PATCH 34/56] Bump h2 from 0.3.15 to 0.3.17 Bumps [h2](https://github.com/hyperium/h2) from 0.3.15 to 0.3.17. - [Release notes](https://github.com/hyperium/h2/releases) - [Changelog](https://github.com/hyperium/h2/blob/master/CHANGELOG.md) - [Commits](https://github.com/hyperium/h2/compare/v0.3.15...v0.3.17) --- updated-dependencies: - dependency-name: h2 dependency-type: indirect ... Signed-off-by: dependabot[bot] --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 853d1a896..a2f70ff4b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1668,9 +1668,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.15" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f9f29bc9dda355256b2916cf526ab02ce0aeaaaf2bad60d65ef3f12f11dd0f4" +checksum = "66b91535aa35fea1523ad1b86cb6b53c28e0ae566ba4a460f4457e936cad7c6f" dependencies = [ "bytes", "fnv", From dfd9c384aad6b60e12f6eb1634bf6899c1e502db Mon Sep 17 00:00:00 2001 From: inductor Date: Tue, 7 Mar 2023 18:08:08 +0900 Subject: [PATCH 35/56] use docker cache --- .dockerignore | 1 + .github/workflows/publish-docker-images.yml | 7 +++++++ Dockerfile | 9 ++++++--- 3 files changed, 14 insertions(+), 3 deletions(-) diff --git a/.dockerignore b/.dockerignore index 8c6bdbdeb..c87217b16 100644 --- a/.dockerignore +++ b/.dockerignore @@ -2,3 +2,4 @@ target Dockerfile .dockerignore .gitignore +.git \ No newline at end of file diff --git a/.github/workflows/publish-docker-images.yml b/.github/workflows/publish-docker-images.yml index 9ceeaaaa4..fa1f145da 100644 --- a/.github/workflows/publish-docker-images.yml +++ b/.github/workflows/publish-docker-images.yml @@ -58,9 +58,13 @@ jobs: - name: Set up QEMU uses: docker/setup-qemu-action@v2 + with: + platforms: linux/amd64,linux/arm64 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 + with: + platforms: linux/amd64,linux/arm64 - name: Login to Docker Hub uses: docker/login-action@v2 @@ -88,10 +92,13 @@ jobs: push: true platforms: linux/amd64,linux/arm64 tags: ${{ steps.meta.outputs.tags }} + builder: ${{ steps.buildx.outputs.name }} build-args: | COMMIT_SHA=${{ github.sha }} COMMIT_DATE=${{ steps.build-metadata.outputs.date }} GIT_TAG=${{ github.ref_name }} + cache-from: type=gha + cache-to: type=gha,mode=max # /!\ Don't touch this without checking with Cloud team - name: Send CI information to Cloud team diff --git a/Dockerfile b/Dockerfile index 70950f338..b828894ce 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,3 +1,4 @@ +# syntax=docker/dockerfile:1.4 # Compile FROM rust:alpine3.16 AS compiler @@ -11,8 +12,10 @@ ARG GIT_TAG ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} VERGEN_GIT_SEMVER_LIGHTWEIGHT=${GIT_TAG} ENV RUSTFLAGS="-C target-feature=-crt-static" -COPY . . -RUN set -eux; \ +COPY --link . . +RUN --mount=type=cache,target=/usr/local/cargo/registry \ + --mount=type=cache,target=/meilisearch/target \ + set -eux; \ apkArch="$(apk --print-arch)"; \ if [ "$apkArch" = "aarch64" ]; then \ export JEMALLOC_SYS_WITH_LG_PAGE=16; \ @@ -30,7 +33,7 @@ RUN apk update --quiet \ # add meilisearch to the `/bin` so you can run it from anywhere and it's easy # to find. -COPY --from=compiler /meilisearch/target/release/meilisearch /bin/meilisearch +COPY --from=compiler --link /meilisearch/target/release/meilisearch /bin/meilisearch # To stay compatible with the older version of the container (pre v0.27.0) we're # going to symlink the meilisearch binary in the path to `/meilisearch` RUN ln -s /bin/meilisearch /meilisearch From 3e4a35663870d73dd4b5c9770a26da1c9a9616bc Mon Sep 17 00:00:00 2001 From: inductor Date: Tue, 7 Mar 2023 18:08:58 +0900 Subject: [PATCH 36/56] EOF --- .dockerignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.dockerignore b/.dockerignore index c87217b16..51bea9b35 100644 --- a/.dockerignore +++ b/.dockerignore @@ -2,4 +2,4 @@ target Dockerfile .dockerignore .gitignore -.git \ No newline at end of file +.git From 85182497abcf8169e2e303253aee8ddfad9c7fea Mon Sep 17 00:00:00 2001 From: inductor Date: Tue, 18 Apr 2023 15:15:33 +0900 Subject: [PATCH 37/56] revert mount --- Dockerfile | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index b828894ce..0d7593f79 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,9 +13,7 @@ ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} ENV RUSTFLAGS="-C target-feature=-crt-static" COPY --link . . -RUN --mount=type=cache,target=/usr/local/cargo/registry \ - --mount=type=cache,target=/meilisearch/target \ - set -eux; \ +RUN set -eux; \ apkArch="$(apk --print-arch)"; \ if [ "$apkArch" = "aarch64" ]; then \ export JEMALLOC_SYS_WITH_LG_PAGE=16; \ From 11f47249578871c7f61f3b2fa9c2184c635d2f77 Mon Sep 17 00:00:00 2001 From: inductor Date: Tue, 18 Apr 2023 16:32:31 +0900 Subject: [PATCH 38/56] ignore all .git --- .dockerignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.dockerignore b/.dockerignore index 51bea9b35..e5278220d 100644 --- a/.dockerignore +++ b/.dockerignore @@ -2,4 +2,5 @@ target Dockerfile .dockerignore .gitignore -.git +**/.git + From 47b66e49b84a4cb4a082fbf6a1f44772a16c962c Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Tue, 11 Apr 2023 15:23:51 +0200 Subject: [PATCH 39/56] Upgrade the compatible versions of the dependencies --- Cargo.lock | 920 +++++++++++++++++++++-------------- benchmarks/Cargo.toml | 16 +- dump/Cargo.toml | 24 +- file-store/Cargo.toml | 8 +- filter-parser/Cargo.toml | 6 +- index-scheduler/Cargo.toml | 24 +- meili-snap/Cargo.toml | 4 +- meilisearch-auth/Cargo.toml | 14 +- meilisearch-types/Cargo.toml | 32 +- meilisearch/Cargo.toml | 106 ++-- milli/Cargo.toml | 34 +- 11 files changed, 692 insertions(+), 496 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a6cc41fe2..473891871 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -36,16 +36,16 @@ dependencies = [ [[package]] name = "actix-http" -version = "3.3.0" +version = "3.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0070905b2c4a98d184c4e81025253cb192aa8a73827553f38e9410801ceb35bb" +checksum = "c2079246596c18b4a33e274ae10c0e50613f4d32a4198e09c7b93771013fed74" dependencies = [ "actix-codec", "actix-rt", "actix-service", "actix-tls", "actix-utils", - "ahash", + "ahash 0.8.3", "base64 0.21.0", "bitflags", "brotli", @@ -59,7 +59,7 @@ dependencies = [ "http", "httparse", "httpdate", - "itoa 1.0.5", + "itoa", "language-tags", "local-channel", "mime", @@ -81,7 +81,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "465a6172cf69b960917811022d8f29bc0b7fa1398bc4f78b3c466673db1213b6" dependencies = [ "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -99,9 +99,9 @@ dependencies = [ [[package]] name = "actix-rt" -version = "2.7.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ea16c295198e958ef31930a6ef37d0fb64e9ca3b6116e6b93a8bdae96ee1000" +checksum = "15265b6b8e2347670eb363c47fc8c75208b4a4994b27192f345fcbe707804f3e" dependencies = [ "actix-macros", "futures-core", @@ -167,9 +167,9 @@ dependencies = [ [[package]] name = "actix-web" -version = "4.3.0" +version = "4.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "464e0fddc668ede5f26ec1f9557a8d44eda948732f40c6b0ad79126930eb775f" +checksum = "cd3cb42f9566ab176e1ef0b8b3a896529062b4efc6be0123046095914c4c1c96" dependencies = [ "actix-codec", "actix-http", @@ -181,7 +181,7 @@ dependencies = [ "actix-tls", "actix-utils", "actix-web-codegen", - "ahash", + "ahash 0.7.6", "bytes", "bytestring", "cfg-if", @@ -191,7 +191,7 @@ dependencies = [ "futures-core", "futures-util", "http", - "itoa 1.0.5", + "itoa", "language-tags", "log", "mime", @@ -209,14 +209,14 @@ dependencies = [ [[package]] name = "actix-web-codegen" -version = "4.1.0" +version = "4.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fa9362663c8643d67b2d5eafba49e4cb2c8a053a29ed00a0bea121f17c76b13" +checksum = "2262160a7ae29e3415554a3f1fc04c764b1540c116aa524683208078b7a75bc9" dependencies = [ "actix-router", "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -268,6 +268,18 @@ dependencies = [ "version_check", ] +[[package]] +name = "ahash" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +dependencies = [ + "cfg-if", + "getrandom", + "once_cell", + "version_check", +] + [[package]] name = "aho-corasick" version = "0.7.20" @@ -299,10 +311,50 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] -name = "anyhow" -version = "1.0.68" +name = "anstream" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cb2f989d18dd141ab8ae82f64d1a8cdd37e0840f73a406896cf5e99502fab61" +checksum = "342258dd14006105c2b75ab1bd7543a03bdf0cfc94383303ac212a04939dff6f" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-wincon", + "concolor-override", + "concolor-query", + "is-terminal", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23ea9e81bd02e310c216d080f6223c179012256e5151c41db88d12c88a1684d2" + +[[package]] +name = "anstyle-parse" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7d1bb534e9efed14f3e5f44e7dd1a4f709384023a4165199a4241e18dff0116" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-wincon" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3127af6145b149f3287bb9a0d10ad9c5692dba8c53ad48285e5bec4063834fa" +dependencies = [ + "anstyle", + "windows-sys 0.45.0", +] + +[[package]] +name = "anyhow" +version = "1.0.70" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4" dependencies = [ "backtrace", ] @@ -319,34 +371,35 @@ dependencies = [ [[package]] name = "async-stream" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dad5c83079eae9969be7fadefe640a1c566901f05ff91ab221de4b6f68d9507e" +checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" dependencies = [ "async-stream-impl", "futures-core", + "pin-project-lite", ] [[package]] name = "async-stream-impl" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" +checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.14", ] [[package]] name = "async-trait" -version = "0.1.61" +version = "0.1.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "705339e0e4a9690e2908d2b3d049d85682cf19fbd5782494498fbf7003a6a282" +checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.14", ] [[package]] @@ -435,9 +488,9 @@ checksum = "199edb7b90631283b10c2422e6a0bc8b7d987bf732995ba1de53b576c97e51a8" [[package]] name = "bimap" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc0455254eb5c6964c4545d8bac815e1a1be4f3afe0ae695ea539c12d728d44b" +checksum = "230c5f1ca6a325a32553f8640d31ac9b49f2411e901e427570154868b46da4f7" dependencies = [ "serde", ] @@ -465,9 +518,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "block-buffer" -version = "0.10.3" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" dependencies = [ "generic-array", ] @@ -485,9 +538,9 @@ dependencies = [ [[package]] name = "brotli-decompressor" -version = "2.3.2" +version = "2.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59ad2d4653bf5ca36ae797b1f4bb4dbddb60ce49ca4aed8a2ce4829f60425b80" +checksum = "4b6561fd3f895a11e8f72af2cb7d22e08366bebc2b6b57f7744c4bda27034744" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -495,21 +548,9 @@ dependencies = [ [[package]] name = "bstr" -version = "0.2.17" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3569f383e8f1598449f1a423e72e99569137b47740b1da11ef19af3d5c3223" -dependencies = [ - "lazy_static", - "memchr", - "regex-automata", - "serde", -] - -[[package]] -name = "bstr" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45ea9b00a7b3f2988e9a65ad3917e62123c38dba709b666506207be96d1790b" +checksum = "c3d4260bcc2e8fc9df1eac4919a720effeb63a3f0952f5bf4944adfa18897f09" dependencies = [ "memchr", "once_cell", @@ -525,9 +566,9 @@ checksum = "572f695136211188308f16ad2ca5c851a712c464060ae6974944458eb83880ba" [[package]] name = "byte-unit" -version = "4.0.18" +version = "4.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3348673602e04848647fffaa8e9a861e7b5d5cae6570727b41bde0f722514484" +checksum = "da78b32057b8fdfc352504708feeba7216dcd65a2c9ab02978cbd288d1279b6c" dependencies = [ "serde", "utf8-width", @@ -541,22 +582,22 @@ checksum = "2c676a478f63e9fa2dd5368a42f28bba0d6c560b775f38583c8bbaa7fcd67c9c" [[package]] name = "bytemuck" -version = "1.12.3" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aaa3a8d9a1ca92e282c96a32d6511b695d7d994d1d102ba85d279f9b2756947f" +checksum = "17febce684fd15d89027105661fec94afb475cb995fbc59d2865198446ba2eea" dependencies = [ "bytemuck_derive", ] [[package]] name = "bytemuck_derive" -version = "1.3.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fe233b960f12f8007e3db2d136e3cb1c291bfd7396e384ee76025fc1a3932b4" +checksum = "fdde5c9cd29ebd706ce1b35600920a33550e402fc998a2e53ad3b42c3c47a192" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.14", ] [[package]] @@ -567,9 +608,9 @@ checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfb24e866b15a1af2a1b663f10c6b6b8f397a84aadb828f12e5b289ec23a3a3c" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" [[package]] name = "bytestring" @@ -619,9 +660,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.0.78" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a20104e2335ce8a659d6dd92a51a767a0c062599c73b343fd152cb401e828c3d" +checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" dependencies = [ "jobserver", ] @@ -726,30 +767,38 @@ dependencies = [ [[package]] name = "clap" -version = "4.0.32" +version = "4.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7db700bc935f9e43e88d00b0850dae18a63773cfbec6d8e070fccf7fef89a39" +checksum = "046ae530c528f252094e4a77886ee1374437744b2bff1497aa898bbddbbb29b3" dependencies = [ - "bitflags", + "clap_builder", "clap_derive", - "clap_lex 0.3.0", - "is-terminal", "once_cell", +] + +[[package]] +name = "clap_builder" +version = "4.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "223163f58c9a40c3b0a43e1c4b50a9ce09f007ea2cb1ec258a687945b4b7929f" +dependencies = [ + "anstream", + "anstyle", + "bitflags", + "clap_lex 0.4.1", "strsim", - "termcolor", ] [[package]] name = "clap_derive" -version = "4.0.21" +version = "4.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0177313f9f02afc995627906bbd8967e2be069f5261954222dac78290c2b9014" +checksum = "3f9644cd56d6b87dbe899ef8b053e331c0637664e9e21a33dfcdc36093f5c5c4" dependencies = [ "heck", - "proc-macro-error", "proc-macro2", "quote", - "syn", + "syn 2.0.14", ] [[package]] @@ -763,12 +812,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.3.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d4198f73e42b4936b35b5bb248d81d2b595ecb170da0bac7655c54eedfa8da8" -dependencies = [ - "os_str_bytes", -] +checksum = "8a2dd5a6fe8c6e3502f568a6353e5273bbb15193ad9a89e457b9970798efbea1" [[package]] name = "concat-arrays" @@ -778,19 +824,34 @@ checksum = "1df715824eb382e34b7afb7463b0247bf41538aeba731fba05241ecdb5dc3747" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.109", +] + +[[package]] +name = "concolor-override" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a855d4a1978dc52fb0536a04d384c2c0c1aa273597f08b77c8c4d3b2eec6037f" + +[[package]] +name = "concolor-query" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88d11d52c3d7ca2e6d0040212be9e4dbbcd78b6447f535b6b561f449427944cf" +dependencies = [ + "windows-sys 0.45.0", ] [[package]] name = "console" -version = "0.15.4" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9b6515d269224923b26b5febea2ed42b2d5f2ce37284a4dd670fedd6cb8347a" +checksum = "c3d79fbe8970a77e3e34151cc13d3b3e248aa0faaecb9f6091fa07ebefe5ad60" dependencies = [ "encode_unicode", "lazy_static", "libc", - "windows-sys", + "windows-sys 0.42.0", ] [[package]] @@ -833,9 +894,9 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" +checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" [[package]] name = "cow-utils" @@ -845,9 +906,9 @@ checksum = "79bb3adfaf5f75d24b01aee375f7555907840fa2800e5ec8fa3b9e2031830173" [[package]] name = "cpufeatures" -version = "0.2.5" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" +checksum = "280a9f2d8b3a38871a3c8a46fb80db65e5e5ed97da80c4d08bf27fb63e35e181" dependencies = [ "libc", ] @@ -919,9 +980,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.6" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" +checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" dependencies = [ "cfg-if", "crossbeam-utils", @@ -929,9 +990,9 @@ dependencies = [ [[package]] name = "crossbeam-deque" -version = "0.8.2" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" dependencies = [ "cfg-if", "crossbeam-epoch", @@ -940,9 +1001,9 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.13" +version = "0.9.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01a9af1f4c2ef74bb8aa1f7e19706bc72d03598c8a570bb5de72243c7a9d9d5a" +checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" dependencies = [ "autocfg", "cfg-if", @@ -963,9 +1024,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.14" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb766fa798726286dbbb842f174001dab8abc7b627a1dd86e0b7222a95d929f" +checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" dependencies = [ "cfg-if", ] @@ -982,13 +1043,12 @@ dependencies = [ [[package]] name = "csv" -version = "1.1.6" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22813a6dc45b335f9bade10bf7271dc477e81113e89eb251a0bc2a8a81c536e1" +checksum = "0b015497079b9a9d69c02ad25de6c0a6edef051ea6360a327d0bd05802ef64ad" dependencies = [ - "bstr 0.2.17", "csv-core", - "itoa 0.4.8", + "itoa", "ryu", "serde", ] @@ -1004,9 +1064,9 @@ dependencies = [ [[package]] name = "darling" -version = "0.14.2" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0dd3cd20dc6b5a876612a6e5accfe7f3dd883db6d07acfbf14c128f61550dfa" +checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850" dependencies = [ "darling_core", "darling_macro", @@ -1014,27 +1074,27 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.14.2" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a784d2ccaf7c98501746bf0be29b2022ba41fd62a2e622af997a03e9f972859f" +checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn", + "syn 1.0.109", ] [[package]] name = "darling_macro" -version = "0.14.2" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7618812407e9402654622dd402b0a89dff9ba93badd6540781526117b92aab7e" +checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" dependencies = [ "darling_core", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -1055,7 +1115,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -1065,7 +1125,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f0314b72bed045f3a68671b3c86328386762c93f82d98c65c3cb5e5f573dd68" dependencies = [ "derive_builder_core", - "syn", + "syn 1.0.109", ] [[package]] @@ -1078,7 +1138,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version", - "syn", + "syn 1.0.109", ] [[package]] @@ -1107,7 +1167,7 @@ dependencies = [ "convert_case 0.5.0", "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -1170,14 +1230,14 @@ dependencies = [ "tempfile", "thiserror", "time", - "uuid 1.2.2", + "uuid 1.3.1", ] [[package]] name = "either" -version = "1.8.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" +checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" dependencies = [ "serde", ] @@ -1254,9 +1314,9 @@ checksum = "a246d82be1c9d791c5dfde9a2bd045fc3cbba3fa2b11ad558f27d01712f00569" [[package]] name = "encoding_rs" -version = "0.8.31" +version = "0.8.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9852635589dc9f9ea1b6fe9f05b50ef208c85c834a562f0c6abb1c475736ec2b" +checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" dependencies = [ "cfg-if", ] @@ -1272,22 +1332,22 @@ dependencies = [ [[package]] name = "enum-iterator" -version = "1.2.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91a4ec26efacf4aeff80887a175a419493cb6f8b5480d26387eb0bd038976187" +checksum = "706d9e7cf1c7664859d79cd524e4e53ea2b67ea03c98cc2870c5e539695d597e" dependencies = [ "enum-iterator-derive", ] [[package]] name = "enum-iterator-derive" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "828de45d0ca18782232dfb8f3ea9cc428e8ced380eb26a520baaacfc70de39ce" +checksum = "355f93763ef7b0ae1c43c4d8eccc9d5848d84ad1a1d8ce61c421d1ac85a19d05" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -1327,6 +1387,17 @@ dependencies = [ "winapi", ] +[[package]] +name = "errno" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys 0.48.0", +] + [[package]] name = "errno-dragonfly" version = "0.1.2" @@ -1339,9 +1410,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" dependencies = [ "instant", ] @@ -1365,7 +1436,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn", + "syn 1.0.109", "uuid 0.8.2", ] @@ -1376,7 +1447,7 @@ dependencies = [ "faux", "tempfile", "thiserror", - "uuid 1.2.2", + "uuid 1.3.1", ] [[package]] @@ -1387,8 +1458,8 @@ checksum = "4e884668cd0c7480504233e951174ddc3b382f7c2666e3b7310b5c4e7b0c37f9" dependencies = [ "cfg-if", "libc", - "redox_syscall", - "windows-sys", + "redox_syscall 0.2.16", + "windows-sys 0.42.0", ] [[package]] @@ -1442,9 +1513,9 @@ checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a" [[package]] name = "futures" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38390104763dc37a5145a53c29c63c1290b5d316d6086ec32c293f6736051bb0" +checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" dependencies = [ "futures-channel", "futures-core", @@ -1457,9 +1528,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52ba265a92256105f45b719605a571ffe2d1f0fea3807304b522c1d778f79eed" +checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" dependencies = [ "futures-core", "futures-sink", @@ -1467,15 +1538,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04909a7a7e4633ae6c4a9ab280aeb86da1236243a77b694a49eacd659a4bd3ac" +checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" [[package]] name = "futures-executor" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7acc85df6714c176ab5edf386123fafe217be88c0840ec11f199441134a074e2" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" dependencies = [ "futures-core", "futures-task", @@ -1484,38 +1555,38 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00f5fb52a06bdcadeb54e8d3671f8888a39697dcb0b81b23b55174030427f4eb" +checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" [[package]] name = "futures-macro" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d" +checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.14", ] [[package]] name = "futures-sink" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39c15cf1a4aa79df40f1bb462fb39676d0ad9e366c2a33b590d7c66f4f81fcf9" +checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" [[package]] name = "futures-task" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ffb393ac5d9a6eaa9d3fdf37ae2776656b706e200c8e16b1bdb227f5198e6ea" +checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" [[package]] name = "futures-util" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "197676987abd2f9cadff84926f410af1c183608d36641465df73ae8211dc65d6" +checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" dependencies = [ "futures-channel", "futures-core", @@ -1535,7 +1606,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee76e8096c3fcd82ab23177edddcc9b81b72c123caab54bb1e2dc19fd09d2dec" dependencies = [ - "ahash", + "ahash 0.7.6", "bit-vec", "cc", "cfg-if", @@ -1570,7 +1641,7 @@ checksum = "30ce01e8bbb3e7e0758dcf907fe799f5998a54368963f766ae94b84624ba60c8" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -1584,9 +1655,9 @@ dependencies = [ [[package]] name = "generic-array" -version = "0.14.6" +version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", @@ -1627,7 +1698,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -1638,9 +1709,9 @@ checksum = "dec7af912d60cdbd3677c1af9352ebae6fb8394d165568a2234df0fa00f87793" [[package]] name = "git2" -version = "0.15.0" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2994bee4a3a6a51eb90c218523be382fd7ea09b16380b9312e9dbe955ff7c7d1" +checksum = "ccf7f68c2995f392c49fffb4f95ae2c873297830eb25c6bc4c114ce8f4562acc" dependencies = [ "bitflags", "libc", @@ -1706,7 +1777,7 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ - "ahash", + "ahash 0.7.6", ] [[package]] @@ -1724,9 +1795,9 @@ dependencies = [ [[package]] name = "heck" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "heed" @@ -1780,6 +1851,12 @@ dependencies = [ "libc", ] +[[package]] +name = "hermit-abi" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" + [[package]] name = "hex" version = "0.4.3" @@ -1797,13 +1874,13 @@ dependencies = [ [[package]] name = "http" -version = "0.2.8" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" dependencies = [ "bytes", "fnv", - "itoa 1.0.5", + "itoa", ] [[package]] @@ -1837,9 +1914,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.23" +version = "0.14.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "034711faac9d2166cb1baf1a2fb0b60b1f277f8492fd72176c17f3515e1abd3c" +checksum = "cc5e554ff619822309ffd57d8734d77cd5ce6238bc956f037ea06c58238c9899" dependencies = [ "bytes", "futures-channel", @@ -1850,7 +1927,7 @@ dependencies = [ "http-body", "httparse", "httpdate", - "itoa 1.0.5", + "itoa", "pin-project-lite", "socket2", "tokio", @@ -1915,14 +1992,14 @@ dependencies = [ "tempfile", "thiserror", "time", - "uuid 1.2.2", + "uuid 1.3.1", ] [[package]] name = "indexmap" -version = "1.9.2" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885e79c1fc4b10f0e172c475f458b7f7b93061064d98c3293e98c5ba0c8b399" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown", @@ -1931,9 +2008,9 @@ dependencies = [ [[package]] name = "insta" -version = "1.26.0" +version = "1.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f0f08b46e4379744de2ab67aa8f7de3ffd1da3e275adc41fcc82053ede46ff" +checksum = "9a28d25139df397cbca21408bb742cf6837e04cdbebf1b07b760caf971d6a972" dependencies = [ "console", "lazy_static", @@ -1956,19 +2033,20 @@ dependencies = [ [[package]] name = "io-lifetimes" -version = "1.0.3" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46112a93252b123d31a119a8d1a1ac19deac4fac6e0e8b0df58f0d4e5870e63c" +checksum = "9c66c74d2ae7e79a5a8f7ac924adbe38ee42a859c6539ad869eb51f0b52dc220" dependencies = [ + "hermit-abi 0.3.1", "libc", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] name = "ipnet" -version = "2.7.1" +version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30e22bd8629359895450b59ea7a776c850561b96a3b1d31321c1949d9e6c9146" +checksum = "12b6ee2129af8d4fb011108c73d99a1b83a85977f23b82460c0ae2e25bb4b57f" [[package]] name = "irg-kvariants" @@ -1983,14 +2061,14 @@ dependencies = [ [[package]] name = "is-terminal" -version = "0.4.2" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28dfb6c8100ccc63462345b67d1bbc3679177c75ee4bf59bf29c8b1d110b8189" +checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f" dependencies = [ - "hermit-abi 0.2.6", + "hermit-abi 0.3.1", "io-lifetimes", - "rustix", - "windows-sys", + "rustix 0.37.11", + "windows-sys 0.48.0", ] [[package]] @@ -2004,15 +2082,9 @@ dependencies = [ [[package]] name = "itoa" -version = "0.4.8" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" - -[[package]] -name = "itoa" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" +checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" [[package]] name = "jieba-rs" @@ -2031,9 +2103,9 @@ dependencies = [ [[package]] name = "jobserver" -version = "0.1.25" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b" +checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2" dependencies = [ "libc", ] @@ -2057,11 +2129,11 @@ dependencies = [ [[package]] name = "jsonwebtoken" -version = "8.2.0" +version = "8.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09f4f04699947111ec1733e71778d763555737579e44b85844cae8e1940a1828" +checksum = "6971da4d9c3aa03c3d8f3ff0f4155b534aad021292003895a469716b2a230378" dependencies = [ - "base64 0.13.1", + "base64 0.21.0", "pem", "ring", "serde", @@ -2101,9 +2173,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.139" +version = "0.2.141" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" +checksum = "3304a64d199bb964be99741b7a14d26972741915b3649639149b2479bb46f4b5" [[package]] name = "libgit2-sys" @@ -2125,9 +2197,9 @@ checksum = "348108ab3fba42ec82ff6e9564fc4ca0247bdccdc68dd8af9764bbc79c3c8ffb" [[package]] name = "libmimalloc-sys" -version = "0.1.30" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8c7cbf8b89019683667e347572e6d55a7df7ea36b0c4ce69961b0cde67b174" +checksum = "43a558e3d911bc3c7bfc8c78bc580b404d6e51c1cefbf656e176a94b49b0df40" dependencies = [ "cc", "libc", @@ -2350,6 +2422,12 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f051f77a7c8e6957c0696eac88f26b0117e54f52d3fc682ab19397a8812846a4" +[[package]] +name = "linux-raw-sys" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d59d8c75012853d2e872fb56bc8a2e53718e2cafe1a4c823143141c6d90c322f" + [[package]] name = "lmdb-rkv-sys" version = "0.15.1" @@ -2416,7 +2494,7 @@ dependencies = [ "log", "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -2428,7 +2506,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn", + "syn 1.0.109", ] [[package]] @@ -2468,11 +2546,11 @@ dependencies = [ "async-trait", "atty", "brotli", - "bstr 1.1.0", + "bstr", "byte-unit", "bytes", "cargo_toml", - "clap 4.0.32", + "clap 4.2.1", "crossbeam-channel", "deserr", "dump", @@ -2533,7 +2611,7 @@ dependencies = [ "tokio-stream", "toml", "urlencoding", - "uuid 1.2.2", + "uuid 1.3.1", "vergen", "walkdir", "yaup", @@ -2556,7 +2634,7 @@ dependencies = [ "sha2", "thiserror", "time", - "uuid 1.2.2", + "uuid 1.3.1", ] [[package]] @@ -2586,7 +2664,7 @@ dependencies = [ "thiserror", "time", "tokio", - "uuid 1.2.2", + "uuid 1.3.1", ] [[package]] @@ -2597,18 +2675,18 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memmap2" -version = "0.5.8" +version = "0.5.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b182332558b18d807c4ce1ca8ca983b34c3ee32765e47b3f0f69b90355cc1dc" +checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" dependencies = [ "libc", ] [[package]] name = "memoffset" -version = "0.7.1" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" +checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" dependencies = [ "autocfg", ] @@ -2620,7 +2698,7 @@ dependencies = [ "big_s", "bimap", "bincode", - "bstr 1.1.0", + "bstr", "byteorder", "charabia", "concat-arrays", @@ -2661,23 +2739,23 @@ dependencies = [ "tempfile", "thiserror", "time", - "uuid 1.2.2", + "uuid 1.3.1", ] [[package]] name = "mimalloc" -version = "0.1.34" +version = "0.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dcb174b18635f7561a0c6c9fc2ce57218ac7523cf72c50af80e2d79ab8f3ba1" +checksum = "3d88dad3f985ec267a3fcb7a1726f5cb1a7e8cad8b646e70a84f967210df23da" dependencies = [ "libmimalloc-sys", ] [[package]] name = "mime" -version = "0.3.16" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] name = "mime_guess" @@ -2706,14 +2784,14 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d732bc30207a6423068df043e3d02e0735b155ad7ce1a6f76fe2baa5b158de" +checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9" dependencies = [ "libc", "log", "wasi", - "windows-sys", + "windows-sys 0.45.0", ] [[package]] @@ -2723,9 +2801,9 @@ source = "git+https://github.com/meilisearch/nelson.git?rev=675f13885548fb415ead [[package]] name = "nom" -version = "7.1.2" +version = "7.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5507769c4919c998e69e49c839d9dc6e693ede4cc4290d6ad8b41d4f09c548c" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" dependencies = [ "memchr", "minimal-lexical", @@ -2733,9 +2811,9 @@ dependencies = [ [[package]] name = "nom_locate" -version = "4.0.0" +version = "4.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37794436ca3029a3089e0b95d42da1f0b565ad271e4d3bb4bad0c7bb70b10605" +checksum = "b1e299bf5ea7b212e811e71174c5d1a5d065c4c0ad0c8691ecb1f97e3e66025e" dependencies = [ "bytecount", "memchr", @@ -2828,9 +2906,9 @@ checksum = "f69e48cd7c8e5bb52a1da1287fdbfd877c32673176583ce664cd63b201aba385" [[package]] name = "once_cell" -version = "1.17.0" +version = "1.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" +checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" [[package]] name = "oorandom" @@ -2846,18 +2924,18 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "ordered-float" -version = "3.4.0" +version = "3.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d84eb1409416d254e4a9c8fa56cc24701755025b458f0fcd8e59e1f5f40c23bf" +checksum = "13a384337e997e6860ffbaa83708b2ef329fd8c54cb67a5f64d421e0f943254f" dependencies = [ "num-traits", ] [[package]] name = "os_str_bytes" -version = "6.4.1" +version = "6.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b7820b9daea5457c9f21c69448905d723fbd21136ccf521748f23fd49e723ee" +checksum = "ceedf44fb00f2d1984b0bc98102627ce622e083e49a5bacdb3e514fa4238e267" [[package]] name = "overload" @@ -2897,15 +2975,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.5" +version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ff9f3fef3968a3ec5945535ed654cb38ff72d7495a25619e2247fb15a2ed9ba" +checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" dependencies = [ "cfg-if", "libc", - "redox_syscall", + "redox_syscall 0.2.16", "smallvec", - "windows-sys", + "windows-sys 0.45.0", ] [[package]] @@ -2921,9 +2999,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d01a5bd0424d00070b0098dd17ebca6f961a959dead1dbcbbbc1d1cd8d3deeba" +checksum = "9f746c4065a8fa3fe23974dd82f15431cc8d40779821001404d10d2e79ca7d79" [[package]] name = "path-matchers" @@ -2977,9 +3055,9 @@ dependencies = [ [[package]] name = "pest" -version = "2.5.3" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4257b4a04d91f7e9e6290be5d3da4804dd5784fafde3a497d73eb2b4a158c30a" +checksum = "7b1403e8401ad5dedea73c626b99758535b342502f8d1e361f4a2dd952749122" dependencies = [ "thiserror", "ucd-trie", @@ -2987,9 +3065,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.5.3" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "241cda393b0cdd65e62e07e12454f1f25d57017dcc514b1514cd3c4645e3a0a6" +checksum = "be99c4c1d2fc2769b1d00239431d711d08f6efedcecb8b6e30707160aee99c15" dependencies = [ "pest", "pest_generator", @@ -2997,22 +3075,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.5.3" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46b53634d8c8196302953c74d5352f33d0c512a9499bd2ce468fc9f4128fa27c" +checksum = "e56094789873daa36164de2e822b3888c6ae4b4f9da555a1103587658c805b1e" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn", + "syn 2.0.14", ] [[package]] name = "pest_meta" -version = "2.5.3" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ef4f1332a8d4678b41966bb4cc1d0676880e84183a1ecc3f4b69f03e99c7a51" +checksum = "6733073c7cff3d8459fda0e42f13a047870242aed8b509fe98000928975f359e" dependencies = [ "once_cell", "pest", @@ -3133,7 +3211,7 @@ dependencies = [ "proc-macro-error-attr", "proc-macro2", "quote", - "syn", + "syn 1.0.109", "version_check", ] @@ -3150,9 +3228,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.49" +version = "1.0.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57a8eca9f9c4ffde41714334dee777596264c7825420f521abc92b5b5deb63a5" +checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435" dependencies = [ "unicode-ident", ] @@ -3167,7 +3245,7 @@ dependencies = [ "byteorder", "hex", "lazy_static", - "rustix", + "rustix 0.36.11", ] [[package]] @@ -3195,9 +3273,9 @@ checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94" [[package]] name = "quote" -version = "1.0.23" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" +checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" dependencies = [ "proc-macro2", ] @@ -3234,9 +3312,9 @@ dependencies = [ [[package]] name = "rayon" -version = "1.6.1" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7" +checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" dependencies = [ "either", "rayon-core", @@ -3244,9 +3322,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cac410af5d00ab6884528b4ab69d1e8e146e8d471201800fa1b4524126de6ad3" +checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" dependencies = [ "crossbeam-channel", "crossbeam-deque", @@ -3263,6 +3341,15 @@ dependencies = [ "bitflags", ] +[[package]] +name = "redox_syscall" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +dependencies = [ + "bitflags", +] + [[package]] name = "redox_users" version = "0.4.3" @@ -3270,15 +3357,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ "getrandom", - "redox_syscall", + "redox_syscall 0.2.16", "thiserror", ] [[package]] name = "regex" -version = "1.7.1" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733" +checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d" dependencies = [ "aho-corasick", "memchr", @@ -3293,26 +3380,17 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" [[package]] name = "regex-syntax" -version = "0.6.28" +version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848" - -[[package]] -name = "remove_dir_all" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" -dependencies = [ - "winapi", -] +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "reqwest" -version = "0.11.13" +version = "0.11.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68cc60575865c7831548863cc02356512e3f1dc2f3f82cb837d7fc4cc8f3c97c" +checksum = "27b71749df584b7f4cac2c426c127a7c785a5106cc98f7a8feb044115f0fa254" dependencies = [ - "base64 0.13.1", + "base64 0.21.0", "bytes", "encoding_rs", "futures-core", @@ -3407,23 +3485,37 @@ dependencies = [ [[package]] name = "rustix" -version = "0.36.6" +version = "0.36.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4feacf7db682c6c329c4ede12649cd36ecab0f3be5b7d74e6a20304725db4549" +checksum = "db4165c9963ab29e422d6c26fbc1d37f15bace6b2810221f9d925023480fcf0e" dependencies = [ "bitflags", - "errno", + "errno 0.2.8", "io-lifetimes", "libc", - "linux-raw-sys", - "windows-sys", + "linux-raw-sys 0.1.4", + "windows-sys 0.45.0", +] + +[[package]] +name = "rustix" +version = "0.37.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85597d61f83914ddeba6a47b3b8ffe7365107221c2e557ed94426489fefb5f77" +dependencies = [ + "bitflags", + "errno 0.3.1", + "io-lifetimes", + "libc", + "linux-raw-sys 0.3.1", + "windows-sys 0.48.0", ] [[package]] name = "rustls" -version = "0.20.7" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "539a2bfe908f471bfa933876bd1eb6a19cf2176d375f82ef7f99530a40e48c2c" +checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" dependencies = [ "log", "ring", @@ -3442,15 +3534,15 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70" +checksum = "4f3208ce4d8448b3f3e7d168a73f5e0c43a61e32930de3bceeccedb388b6bf06" [[package]] name = "ryu" -version = "1.0.12" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde" +checksum = "f91339c0467de62360649f8d3e185ca8de4224ff281f66000de5eb2a77a79041" [[package]] name = "same-file" @@ -3493,15 +3585,15 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a" +checksum = "bebd363326d05ec3e2f532ab7660680f3b02130d780c299bca73469d521bc0ed" [[package]] name = "serde" -version = "1.0.152" +version = "1.0.160" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb" +checksum = "bb2f3770c8bce3bcda7e149193a069a0f4365bda1fa5cd88e03bca26afc1216c" dependencies = [ "serde_derive", ] @@ -3517,23 +3609,23 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.152" +version = "1.0.160" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" +checksum = "291a097c63d8497e00160b166a967a4a79c64f3facdd01cbd7502231688d77df" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.14", ] [[package]] name = "serde_json" -version = "1.0.91" +version = "1.0.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883" +checksum = "d721eca97ac802aa7777b701877c8004d950fc142651367300d21c1cc0194744" dependencies = [ "indexmap", - "itoa 1.0.5", + "itoa", "ryu", "serde", ] @@ -3545,7 +3637,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", - "itoa 1.0.5", + "itoa", "ryu", "serde", ] @@ -3585,9 +3677,9 @@ dependencies = [ [[package]] name = "signal-hook-registry" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" +checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" dependencies = [ "libc", ] @@ -3618,9 +3710,9 @@ checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de" [[package]] name = "slab" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" dependencies = [ "autocfg", ] @@ -3660,9 +3752,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" dependencies = [ "libc", "winapi", @@ -3720,9 +3812,20 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" [[package]] name = "syn" -version = "1.0.107" +version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcf316d5356ed6847742d036f8a39c3b8435cac10bd528a4bd461928a6ab34d5" dependencies = [ "proc-macro2", "quote", @@ -3746,7 +3849,7 @@ checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.109", "unicode-xid", ] @@ -3778,32 +3881,31 @@ dependencies = [ [[package]] name = "temp-env" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a30d48359f77fbb6af3d7b928cc2d092e1dc90b44f397e979ef08ae15733ed65" +checksum = "5ee95b343d943e5a0d2221fb73029e8040f3c91d6d06afec86c664682a361681" dependencies = [ - "once_cell", + "parking_lot", ] [[package]] name = "tempfile" -version = "3.3.0" +version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" +checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" dependencies = [ "cfg-if", "fastrand", - "libc", - "redox_syscall", - "remove_dir_all", - "winapi", + "redox_syscall 0.3.5", + "rustix 0.37.11", + "windows-sys 0.45.0", ] [[package]] name = "termcolor" -version = "1.1.3" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" +checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" dependencies = [ "winapi-util", ] @@ -3816,31 +3918,31 @@ checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" [[package]] name = "thiserror" -version = "1.0.38" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" +checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.38" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" +checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.14", ] [[package]] name = "time" -version = "0.3.17" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376" +checksum = "cd0cbfecb4d19b5ea75bb31ad904eb5b9fa13f21079c3b92017ebdf4999a5890" dependencies = [ - "itoa 1.0.5", + "itoa", "serde", "time-core", "time-macros", @@ -3854,9 +3956,9 @@ checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" [[package]] name = "time-macros" -version = "0.2.6" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2" +checksum = "fd80a657e71da814b8e5d60d3374fc6d35045062245d80224748ae522dd76f36" dependencies = [ "time-core", ] @@ -3882,20 +3984,19 @@ dependencies = [ [[package]] name = "tinyvec_macros" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.24.2" +version = "1.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a12a59981d9e3c38d216785b0c37399f6e415e8d0712047620f189371b0bb" +checksum = "d0de47a4eecbe11f498978a9b29d792f0d2692d1dd003650c24c76510e3bc001" dependencies = [ "autocfg", "bytes", "libc", - "memchr", "mio", "num_cpus", "parking_lot", @@ -3903,18 +4004,18 @@ dependencies = [ "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys", + "windows-sys 0.45.0", ] [[package]] name = "tokio-macros" -version = "1.8.2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8" +checksum = "61a573bdc87985e9d6ddeed1b3d864e8a302c847e40d647746df2f1de209d1ce" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.14", ] [[package]] @@ -3930,9 +4031,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d660770404473ccd7bc9f8b28494a811bc18542b915c0855c51e8f419d5223ce" +checksum = "8fb52b74f05dbf495a8fba459fdc331812b96aa086d9eb78101fa0d4569c3313" dependencies = [ "futures-core", "pin-project-lite", @@ -3941,9 +4042,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.4" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bb2e075f03b3d66d8d8785356224ba688d2906a371015e225beeb65ca92c740" +checksum = "5427d89453009325de0d8f342c9490009f76e999cb7672d77e46267448f7e6b2" dependencies = [ "bytes", "futures-core", @@ -3955,9 +4056,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.5.10" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1333c76748e868a4d9d1017b5ab53171dfd095f70c712fdb4653a406547f598f" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" dependencies = [ "serde", ] @@ -4018,9 +4119,9 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.8" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" [[package]] name = "unicode-blocks" @@ -4030,9 +4131,9 @@ checksum = "9de2be6bad6f56ce8373d377e611cbb2265de3a656138065609ce82e217aad70" [[package]] name = "unicode-ident" -version = "1.0.6" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" +checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" [[package]] name = "unicode-normalization" @@ -4090,6 +4191,12 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1" +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + [[package]] name = "uuid" version = "0.8.2" @@ -4101,9 +4208,9 @@ dependencies = [ [[package]] name = "uuid" -version = "1.2.2" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "422ee0de9031b5b948b97a8fc04e3aa35230001a722ddd27943e0be31564ce4c" +checksum = "5b55a3fef2a1e3b3a00ce878640918820d3c51081576ac657d23af9fc7928fdb" dependencies = [ "getrandom", "serde", @@ -4117,9 +4224,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "vergen" -version = "7.5.0" +version = "7.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "571b69f690c855821462709b6f41d42ceccc316fbd17b60bd06d06928cfe6a99" +checksum = "f21b881cd6636ece9735721cf03c1fe1e774fe258683d084bb2812ab67435749" dependencies = [ "anyhow", "cfg-if", @@ -4139,12 +4246,11 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "walkdir" -version = "2.3.2" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56" +checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" dependencies = [ "same-file", - "winapi", "winapi-util", ] @@ -4196,7 +4302,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn", + "syn 1.0.109", "wasm-bindgen-shared", ] @@ -4230,7 +4336,7 @@ checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.109", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -4317,56 +4423,146 @@ version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.0", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +dependencies = [ + "windows_aarch64_gnullvm 0.48.0", + "windows_aarch64_msvc 0.48.0", + "windows_i686_gnu 0.48.0", + "windows_i686_msvc 0.48.0", + "windows_x86_64_gnu 0.48.0", + "windows_x86_64_gnullvm 0.48.0", + "windows_x86_64_msvc 0.48.0", ] [[package]] name = "windows_aarch64_gnullvm" -version = "0.42.0" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" [[package]] name = "windows_aarch64_msvc" -version = "0.42.0" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" [[package]] name = "windows_i686_gnu" -version = "0.42.0" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" [[package]] name = "windows_i686_msvc" -version = "0.42.0" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" [[package]] name = "windows_x86_64_gnu" -version = "0.42.0" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" [[package]] name = "windows_x86_64_gnullvm" -version = "0.42.0" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" [[package]] name = "windows_x86_64_msvc" -version = "0.42.0" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" [[package]] name = "winreg" @@ -4428,15 +4624,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d498dbd1fd7beb83c86709ae1c33ca50942889473473d287d56ce4770a18edfb" dependencies = [ "proc-macro2", - "syn", + "syn 1.0.109", "synstructure", ] [[package]] name = "zip" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "537ce7411d25e54e8ae21a7ce0b15840e7bfcff15b51d697ec3266cc76bdf080" +checksum = "0445d0fbc924bb93539b4316c11afb121ea39296f99a3c4c9edad09e3658cdef" dependencies = [ "aes", "byteorder", diff --git a/benchmarks/Cargo.toml b/benchmarks/Cargo.toml index 5203a7601..9a9682320 100644 --- a/benchmarks/Cargo.toml +++ b/benchmarks/Cargo.toml @@ -11,11 +11,11 @@ edition.workspace = true license.workspace = true [dependencies] -anyhow = "1.0.65" -csv = "1.1.6" +anyhow = "1.0.70" +csv = "1.2.1" milli = { path = "../milli", default-features = false } -mimalloc = { version = "0.1.29", default-features = false } -serde_json = { version = "1.0.85", features = ["preserve_order"] } +mimalloc = { version = "0.1.36", default-features = false } +serde_json = { version = "1.0.95", features = ["preserve_order"] } [dev-dependencies] criterion = { version = "0.4.0", features = ["html_reports"] } @@ -24,11 +24,11 @@ rand_chacha = "0.3.1" roaring = "0.10.1" [build-dependencies] -anyhow = "1.0.65" -bytes = "1.2.1" +anyhow = "1.0.70" +bytes = "1.4.0" convert_case = "0.6.0" -flate2 = "1.0.24" -reqwest = { version = "0.11.12", features = ["blocking", "rustls-tls"], default-features = false } +flate2 = "1.0.25" +reqwest = { version = "0.11.16", features = ["blocking", "rustls-tls"], default-features = false } [features] default = ["milli/default"] diff --git a/dump/Cargo.toml b/dump/Cargo.toml index d0ed46d7c..6834d5c26 100644 --- a/dump/Cargo.toml +++ b/dump/Cargo.toml @@ -11,22 +11,22 @@ readme.workspace = true license.workspace = true [dependencies] -anyhow = "1.0.65" -flate2 = "1.0.22" -http = "0.2.8" +anyhow = "1.0.70" +flate2 = "1.0.25" +http = "0.2.9" log = "0.4.17" meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-types = { path = "../meilisearch-types" } -once_cell = "1.15.0" -regex = "1.6.0" -roaring = { version = "0.10.0", features = ["serde"] } -serde = { version = "1.0.136", features = ["derive"] } -serde_json = { version = "1.0.85", features = ["preserve_order"] } +once_cell = "1.17.1" +regex = "1.7.3" +roaring = { version = "0.10.1", features = ["serde"] } +serde = { version = "1.0.160", features = ["derive"] } +serde_json = { version = "1.0.95", features = ["preserve_order"] } tar = "0.4.38" -tempfile = "3.3.0" -thiserror = "1.0.30" -time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } -uuid = { version = "1.1.2", features = ["serde", "v4"] } +tempfile = "3.5.0" +thiserror = "1.0.40" +time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] } +uuid = { version = "1.3.1", features = ["serde", "v4"] } [dev-dependencies] big_s = "1.0.2" diff --git a/file-store/Cargo.toml b/file-store/Cargo.toml index b9ab9ea36..d213c8963 100644 --- a/file-store/Cargo.toml +++ b/file-store/Cargo.toml @@ -11,9 +11,9 @@ edition.workspace = true license.workspace = true [dependencies] -tempfile = "3.3.0" -thiserror = "1.0.30" -uuid = { version = "1.1.2", features = ["serde", "v4"] } +tempfile = "3.5.0" +thiserror = "1.0.40" +uuid = { version = "1.3.1", features = ["serde", "v4"] } [dev-dependencies] -faux = "0.1.8" +faux = "0.1.9" diff --git a/filter-parser/Cargo.toml b/filter-parser/Cargo.toml index e6652a489..58111ee08 100644 --- a/filter-parser/Cargo.toml +++ b/filter-parser/Cargo.toml @@ -12,8 +12,8 @@ edition.workspace = true license.workspace = true [dependencies] -nom = "7.1.1" -nom_locate = "4.0.0" +nom = "7.1.3" +nom_locate = "4.1.0" [dev-dependencies] -insta = "1.21.0" +insta = "1.29.0" diff --git a/index-scheduler/Cargo.toml b/index-scheduler/Cargo.toml index 99dfaa493..b90b9786e 100644 --- a/index-scheduler/Cargo.toml +++ b/index-scheduler/Cargo.toml @@ -11,29 +11,29 @@ edition.workspace = true license.workspace = true [dependencies] -anyhow = "1.0.64" +anyhow = "1.0.70" bincode = "1.3.3" -csv = "1.1.6" +csv = "1.2.1" derive_builder = "0.11.2" dump = { path = "../dump" } -enum-iterator = "1.1.3" +enum-iterator = "1.4.0" file-store = { path = "../file-store" } -log = "0.4.14" +log = "0.4.17" meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-types = { path = "../meilisearch-types" } page_size = "0.5.0" -roaring = { version = "0.10.0", features = ["serde"] } -serde = { version = "1.0.136", features = ["derive"] } -serde_json = { version = "1.0.85", features = ["preserve_order"] } +roaring = { version = "0.10.1", features = ["serde"] } +serde = { version = "1.0.160", features = ["derive"] } +serde_json = { version = "1.0.95", features = ["preserve_order"] } synchronoise = "1.0.1" -tempfile = "3.3.0" -thiserror = "1.0.30" -time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } -uuid = { version = "1.1.2", features = ["serde", "v4"] } +tempfile = "3.5.0" +thiserror = "1.0.40" +time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] } +uuid = { version = "1.3.1", features = ["serde", "v4"] } [dev-dependencies] big_s = "1.0.2" crossbeam = "0.8.2" -insta = { version = "1.19.1", features = ["json", "redactions"] } +insta = { version = "1.29.0", features = ["json", "redactions"] } meili-snap = { path = "../meili-snap" } nelson = { git = "https://github.com/meilisearch/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"} diff --git a/meili-snap/Cargo.toml b/meili-snap/Cargo.toml index 8aeb30141..ab7aa78c7 100644 --- a/meili-snap/Cargo.toml +++ b/meili-snap/Cargo.toml @@ -11,6 +11,6 @@ edition.workspace = true license.workspace = true [dependencies] -insta = { version = "^1.19.1", features = ["json", "redactions"] } +insta = { version = "^1.29.0", features = ["json", "redactions"] } md5 = "0.7.0" -once_cell = "1.15" +once_cell = "1.17" diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index 9a00140fa..2065c6f4c 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -12,15 +12,15 @@ license.workspace = true [dependencies] base64 = "0.13.1" -enum-iterator = "1.1.3" +enum-iterator = "1.4.0" hmac = "0.12.1" maplit = "1.0.2" meilisearch-types = { path = "../meilisearch-types" } rand = "0.8.5" -roaring = { version = "0.10.0", features = ["serde"] } -serde = { version = "1.0.145", features = ["derive"] } -serde_json = { version = "1.0.85", features = ["preserve_order"] } +roaring = { version = "0.10.1", features = ["serde"] } +serde = { version = "1.0.160", features = ["derive"] } +serde_json = { version = "1.0.95", features = ["preserve_order"] } sha2 = "0.10.6" -thiserror = "1.0.37" -time = { version = "0.3.15", features = ["serde-well-known", "formatting", "parsing", "macros"] } -uuid = { version = "1.1.2", features = ["serde", "v4"] } +thiserror = "1.0.40" +time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] } +uuid = { version = "1.3.1", features = ["serde", "v4"] } diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index 9828c5f49..017359d25 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -11,31 +11,31 @@ edition.workspace = true license.workspace = true [dependencies] -actix-web = { version = "4.2.1", default-features = false } -anyhow = "1.0.65" +actix-web = { version = "4.3.1", default-features = false } +anyhow = "1.0.70" convert_case = "0.6.0" -csv = "1.1.6" +csv = "1.2.1" deserr = "0.5.0" -either = { version = "1.6.1", features = ["serde"] } -enum-iterator = "1.1.3" +either = { version = "1.8.1", features = ["serde"] } +enum-iterator = "1.4.0" file-store = { path = "../file-store" } -flate2 = "1.0.24" +flate2 = "1.0.25" fst = "0.4.7" -memmap2 = "0.5.7" +memmap2 = "0.5.10" milli = { path = "../milli", default-features = false } -roaring = { version = "0.10.0", features = ["serde"] } -serde = { version = "1.0.145", features = ["derive"] } +roaring = { version = "0.10.1", features = ["serde"] } +serde = { version = "1.0.160", features = ["derive"] } serde-cs = "0.2.4" -serde_json = "1.0.85" +serde_json = "1.0.95" tar = "0.4.38" -tempfile = "3.3.0" -thiserror = "1.0.30" -time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } -tokio = "1.24" -uuid = { version = "1.1.2", features = ["serde", "v4"] } +tempfile = "3.5.0" +thiserror = "1.0.40" +time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] } +tokio = "1.27" +uuid = { version = "1.3.1", features = ["serde", "v4"] } [dev-dependencies] -insta = "1.19.1" +insta = "1.29.0" meili-snap = { path = "../meili-snap" } [features] diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index d4b772322..09b461417 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -13,97 +13,97 @@ license.workspace = true default-run = "meilisearch" [dependencies] -actix-cors = "0.6.3" -actix-http = { version = "3.2.2", default-features = false, features = ["compress-brotli", "compress-gzip", "rustls"] } -actix-web = { version = "4.2.1", default-features = false, features = ["macros", "compress-brotli", "compress-gzip", "cookies", "rustls"] } +actix-cors = "0.6.4" +actix-http = { version = "3.3.1", default-features = false, features = ["compress-brotli", "compress-gzip", "rustls"] } +actix-web = { version = "4.3.1", default-features = false, features = ["macros", "compress-brotli", "compress-gzip", "cookies", "rustls"] } actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true } -anyhow = { version = "1.0.65", features = ["backtrace"] } -async-stream = "0.3.3" -async-trait = "0.1.57" -bstr = "1.0.1" -byte-unit = { version = "4.0.14", default-features = false, features = ["std", "serde"] } -bytes = "1.2.1" -clap = { version = "4.0.9", features = ["derive", "env"] } -crossbeam-channel = "0.5.6" +anyhow = { version = "1.0.70", features = ["backtrace"] } +async-stream = "0.3.5" +async-trait = "0.1.68" +bstr = "1.4.0" +byte-unit = { version = "4.0.19", default-features = false, features = ["std", "serde"] } +bytes = "1.4.0" +clap = { version = "4.2.1", features = ["derive", "env"] } +crossbeam-channel = "0.5.8" deserr = "0.5.0" dump = { path = "../dump" } -either = "1.8.0" -env_logger = "0.9.1" +either = "1.8.1" +env_logger = "0.9.3" file-store = { path = "../file-store" } -flate2 = "1.0.24" +flate2 = "1.0.25" fst = "0.4.7" -futures = "0.3.24" -futures-util = "0.3.24" -http = "0.2.8" +futures = "0.3.28" +futures-util = "0.3.28" +http = "0.2.9" index-scheduler = { path = "../index-scheduler" } -indexmap = { version = "1.9.1", features = ["serde-1"] } +indexmap = { version = "1.9.3", features = ["serde-1"] } itertools = "0.10.5" -jsonwebtoken = "8.1.1" +jsonwebtoken = "8.3.0" lazy_static = "1.4.0" log = "0.4.17" meilisearch-auth = { path = "../meilisearch-auth" } meilisearch-types = { path = "../meilisearch-types" } -mimalloc = { version = "0.1.29", default-features = false } -mime = "0.3.16" -num_cpus = "1.13.1" +mimalloc = { version = "0.1.36", default-features = false } +mime = "0.3.17" +num_cpus = "1.15.0" obkv = "0.2.0" -once_cell = "1.15.0" +once_cell = "1.17.1" parking_lot = "0.12.1" permissive-json-pointer = { path = "../permissive-json-pointer" } pin-project-lite = "0.2.9" platform-dirs = "0.3.0" -prometheus = { version = "0.13.2", features = ["process"] } +prometheus = { version = "0.13.3", features = ["process"] } rand = "0.8.5" -rayon = "1.5.3" -regex = "1.6.0" -reqwest = { version = "0.11.12", features = ["rustls-tls", "json"], default-features = false } -rustls = "0.20.6" -rustls-pemfile = "1.0.1" -segment = { version = "0.2.1", optional = true } -serde = { version = "1.0.145", features = ["derive"] } -serde_json = { version = "1.0.85", features = ["preserve_order"] } +rayon = "1.7.0" +regex = "1.7.3" +reqwest = { version = "0.11.16", features = ["rustls-tls", "json"], default-features = false } +rustls = "0.20.8" +rustls-pemfile = "1.0.2" +segment = { version = "0.2.2", optional = true } +serde = { version = "1.0.160", features = ["derive"] } +serde_json = { version = "1.0.95", features = ["preserve_order"] } sha2 = "0.10.6" siphasher = "0.3.10" slice-group-by = "0.3.0" static-files = { version = "0.2.3", optional = true } -sysinfo = "0.26.4" +sysinfo = "0.26.9" tar = "0.4.38" -tempfile = "3.3.0" -thiserror = "1.0.37" -time = { version = "0.3.15", features = ["serde-well-known", "formatting", "parsing", "macros"] } -tokio = { version = "1.24.2", features = ["full"] } -tokio-stream = "0.1.10" -toml = "0.5.9" -uuid = { version = "1.1.2", features = ["serde", "v4"] } -walkdir = "2.3.2" -yaup = "0.2.0" +tempfile = "3.5.0" +thiserror = "1.0.40" +time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] } +tokio = { version = "1.27.0", features = ["full"] } +tokio-stream = "0.1.12" +toml = "0.5.11" +uuid = { version = "1.3.1", features = ["serde", "v4"] } +walkdir = "2.3.3" +yaup = "0.2.1" serde_urlencoded = "0.7.1" actix-utils = "3.0.1" atty = "0.2.14" -termcolor = "1.1.3" +termcolor = "1.2.0" [dev-dependencies] -actix-rt = "2.7.0" +actix-rt = "2.8.0" assert-json-diff = "2.0.2" brotli = "3.3.4" -insta = "1.19.1" +insta = "1.29.0" manifest-dir-macros = "0.1.16" maplit = "1.0.2" meili-snap = {path = "../meili-snap"} -temp-env = "0.3.1" +temp-env = "0.3.3" urlencoding = "2.1.2" yaup = "0.2.1" [build-dependencies] -anyhow = { version = "1.0.65", optional = true } -cargo_toml = { version = "0.14.0", optional = true } +anyhow = { version = "1.0.70", optional = true } +cargo_toml = { version = "0.14.1", optional = true } hex = { version = "0.4.3", optional = true } -reqwest = { version = "0.11.12", features = ["blocking", "rustls-tls"], default-features = false, optional = true } -sha-1 = { version = "0.10.0", optional = true } +reqwest = { version = "0.11.16", features = ["blocking", "rustls-tls"], default-features = false, optional = true } +sha-1 = { version = "0.10.1", optional = true } static-files = { version = "0.2.3", optional = true } -tempfile = { version = "3.3.0", optional = true } -vergen = { version = "7.4.2", default-features = false, features = ["git"] } -zip = { version = "0.6.2", optional = true } +tempfile = { version = "3.5.0", optional = true } +vergen = { version = "7.5.1", default-features = false, features = ["git"] } +zip = { version = "0.6.4", optional = true } [features] default = ["analytics", "meilisearch-types/default", "mini-dashboard"] diff --git a/milli/Cargo.toml b/milli/Cargo.toml index 04591e8fd..1b9a88700 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -12,40 +12,40 @@ readme.workspace = true license.workspace = true [dependencies] -bimap = { version = "0.6.2", features = ["serde"] } +bimap = { version = "0.6.3", features = ["serde"] } bincode = "1.3.3" -bstr = "1.0.1" +bstr = "1.4.0" byteorder = "1.4.3" charabia = { version = "0.7.1", default-features = false } concat-arrays = "0.1.2" -crossbeam-channel = "0.5.6" +crossbeam-channel = "0.5.8" deserr = "0.5.0" -either = "1.8.0" +either = "1.8.1" flatten-serde-json = { path = "../flatten-serde-json" } fst = "0.4.7" fxhash = "0.2.1" geoutils = "0.5.1" -grenad = { version = "0.4.3", default-features = false, features = ["tempfile"] } +grenad = { version = "0.4.4", default-features = false, features = ["tempfile"] } heed = { git = "https://github.com/meilisearch/heed", tag = "v0.12.5", default-features = false, features = ["lmdb", "sync-read-txn"] } json-depth-checker = { path = "../json-depth-checker" } levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] } -memmap2 = "0.5.7" +memmap2 = "0.5.10" obkv = "0.2.0" -once_cell = "1.15.0" -ordered-float = "3.2.0" -rayon = "1.5.3" +once_cell = "1.17.1" +ordered-float = "3.6.0" +rayon = "1.7.0" roaring = "0.10.1" rstar = { version = "0.9.3", features = ["serde"] } -serde = { version = "1.0.145", features = ["derive"] } -serde_json = { version = "1.0.85", features = ["preserve_order"] } +serde = { version = "1.0.160", features = ["derive"] } +serde_json = { version = "1.0.95", features = ["preserve_order"] } slice-group-by = "0.3.0" smallstr = { version = "0.3.0", features = ["serde"] } smallvec = "1.10.0" smartstring = "1.0.1" -tempfile = "3.3.0" -thiserror = "1.0.37" -time = { version = "0.3.15", features = ["serde-well-known", "formatting", "parsing", "macros"] } -uuid = { version = "1.1.2", features = ["v4"] } +tempfile = "3.5.0" +thiserror = "1.0.40" +time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] } +uuid = { version = "1.3.1", features = ["v4"] } filter-parser = { path = "../filter-parser" } @@ -55,11 +55,11 @@ itertools = "0.10.5" # logging log = "0.4.17" logging_timer = "1.1.0" -csv = "1.1.6" +csv = "1.2.1" [dev-dependencies] big_s = "1.0.2" -insta = "1.21.0" +insta = "1.29.0" maplit = "1.0.2" md5 = "0.7.0" rand = {version = "0.8.5", features = ["small_rng"] } From 2d8060df80a62c62d7a7bda8eae3a89f8cc5f169 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Tue, 11 Apr 2023 15:26:57 +0200 Subject: [PATCH 40/56] Fix the tests --- meilisearch/tests/auth/errors.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/meilisearch/tests/auth/errors.rs b/meilisearch/tests/auth/errors.rs index 904bb182d..cd1e9acf2 100644 --- a/meilisearch/tests/auth/errors.rs +++ b/meilisearch/tests/auth/errors.rs @@ -60,7 +60,7 @@ async fn create_api_key_bad_uid() { snapshot!(code, @"400 Bad Request"); snapshot!(json_string!(response), @r###" { - "message": "Invalid value at `.uid`: invalid character: expected an optional prefix of `urn:uuid:` followed by [0-9a-zA-Z], found `o` at 2", + "message": "Invalid value at `.uid`: invalid character: expected an optional prefix of `urn:uuid:` followed by [0-9a-fA-F-], found `o` at 2", "code": "invalid_api_key_uid", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_api_key_uid" From a109802d45796b8788ba396039847b27f5c515c5 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Tue, 11 Apr 2023 15:55:23 +0200 Subject: [PATCH 41/56] Upgrade the incompatible versions of the dependencies --- Cargo.lock | 102 +++++++++++++++++++++++------------- index-scheduler/Cargo.toml | 2 +- meilisearch-auth/Cargo.toml | 2 +- meilisearch/Cargo.toml | 8 +-- milli/Cargo.toml | 2 +- 5 files changed, 73 insertions(+), 43 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 473891871..454749dab 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -644,9 +644,9 @@ dependencies = [ [[package]] name = "cargo_toml" -version = "0.14.1" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bfbc36312494041e2cdd5f06697b7e89d4b76f42773a0b5556ac290ff22acc2" +checksum = "7f83bc2e401ed041b7057345ebc488c005efa0341d5541ce7004d30458d0090b" dependencies = [ "serde", "toml", @@ -1099,18 +1099,18 @@ dependencies = [ [[package]] name = "derive_builder" -version = "0.11.2" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07adf7be193b71cc36b193d0f5fe60b918a3a9db4dad0449f57bcfd519704a3" +checksum = "8d67778784b508018359cbc8696edb3db78160bab2c2a28ba7f56ef6932997f8" dependencies = [ "derive_builder_macro", ] [[package]] name = "derive_builder_core" -version = "0.11.2" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f91d4cfa921f1c05904dc3c57b4a32c38aed3340cce209f3a6fd1478babafc4" +checksum = "c11bdc11a0c47bc7d37d582b5285da6849c96681023680b906673c5707af7b0f" dependencies = [ "darling", "proc-macro2", @@ -1120,9 +1120,9 @@ dependencies = [ [[package]] name = "derive_builder_macro" -version = "0.11.2" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f0314b72bed045f3a68671b3c86328386762c93f82d98c65c3cb5e5f573dd68" +checksum = "ebcda35c7a396850a55ffeac740804b40ffec779b98fffbb1738f4033f0ee79e" dependencies = [ "derive_builder_core", "syn 1.0.109", @@ -1350,19 +1350,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "env_logger" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" -dependencies = [ - "atty", - "humantime", - "log", - "regex", - "termcolor", -] - [[package]] name = "env_logger" version = "0.10.0" @@ -1789,7 +1776,7 @@ dependencies = [ "atomic-polyfill", "hash32", "rustc_version", - "spin 0.9.4", + "spin 0.9.8", "stable_deref_trait", ] @@ -2256,7 +2243,7 @@ dependencies = [ "byteorder", "csv", "encoding", - "env_logger 0.10.0", + "env_logger", "glob", "lindera-core", "lindera-decompress", @@ -2344,7 +2331,7 @@ dependencies = [ "csv", "encoding_rs", "encoding_rs_io", - "env_logger 0.10.0", + "env_logger", "glob", "lindera-compress", "lindera-core", @@ -2382,7 +2369,7 @@ dependencies = [ "byteorder", "csv", "encoding", - "env_logger 0.10.0", + "env_logger", "glob", "lindera-compress", "lindera-core", @@ -2402,7 +2389,7 @@ dependencies = [ "byteorder", "csv", "encoding", - "env_logger 0.10.0", + "env_logger", "glob", "lindera-core", "lindera-decompress", @@ -2555,7 +2542,7 @@ dependencies = [ "deserr", "dump", "either", - "env_logger 0.9.3", + "env_logger", "file-store", "flate2", "fst", @@ -2622,7 +2609,7 @@ dependencies = [ name = "meilisearch-auth" version = "1.1.1" dependencies = [ - "base64 0.13.1", + "base64 0.21.0", "enum-iterator", "hmac", "maplit", @@ -3458,9 +3445,9 @@ dependencies = [ [[package]] name = "rstar" -version = "0.9.3" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b40f1bfe5acdab44bc63e6699c28b74f75ec43afb59f3eda01e145aff86a25fa" +checksum = "1f39465655a1e3d8ae79c6d9e007f4953bfc5d55297602df9dc38f9ae9f1359a" dependencies = [ "heapless", "num-traits", @@ -3630,6 +3617,15 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_spanned" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0efd8caf556a6cebd3b285caf480045fcc1ac04f6bd786b09a6f11af30c4fcf4" +dependencies = [ + "serde", +] + [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -3768,9 +3764,9 @@ checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" [[package]] name = "spin" -version = "0.9.4" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f6002a767bff9e83f8eeecf883ecb8011875a21ae8da43bffb817a57e78cc09" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" dependencies = [ "lock_api", ] @@ -3855,9 +3851,9 @@ dependencies = [ [[package]] name = "sysinfo" -version = "0.26.9" +version = "0.28.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c18a6156d1f27a9592ee18c1a846ca8dd5c258b7179fc193ae87c74ebb666f5" +checksum = "b4c2f3ca6693feb29a89724516f016488e9aafc7f37264f898593ee4b942f31b" dependencies = [ "cfg-if", "core-foundation-sys", @@ -4056,11 +4052,36 @@ dependencies = [ [[package]] name = "toml" -version = "0.5.11" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +checksum = "b403acf6f2bb0859c93c7f0d967cb4a75a7ac552100f9322faf64dc047669b21" dependencies = [ "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ab8ed2edee10b50132aed5f331333428b011c99402b5a534154ed15746f9622" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.19.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "239410c8609e8125456927e6707163a3b1fdb40561e4b803bc041f466ccfdc13" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "winnow", ] [[package]] @@ -4564,6 +4585,15 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +[[package]] +name = "winnow" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae8970b36c66498d8ff1d66685dc86b91b29db0c7739899012f63a63814b4b28" +dependencies = [ + "memchr", +] + [[package]] name = "winreg" version = "0.10.1" diff --git a/index-scheduler/Cargo.toml b/index-scheduler/Cargo.toml index b90b9786e..36c28cd67 100644 --- a/index-scheduler/Cargo.toml +++ b/index-scheduler/Cargo.toml @@ -14,7 +14,7 @@ license.workspace = true anyhow = "1.0.70" bincode = "1.3.3" csv = "1.2.1" -derive_builder = "0.11.2" +derive_builder = "0.12.0" dump = { path = "../dump" } enum-iterator = "1.4.0" file-store = { path = "../file-store" } diff --git a/meilisearch-auth/Cargo.toml b/meilisearch-auth/Cargo.toml index 2065c6f4c..51be88f9e 100644 --- a/meilisearch-auth/Cargo.toml +++ b/meilisearch-auth/Cargo.toml @@ -11,7 +11,7 @@ edition.workspace = true license.workspace = true [dependencies] -base64 = "0.13.1" +base64 = "0.21.0" enum-iterator = "1.4.0" hmac = "0.12.1" maplit = "1.0.2" diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index 09b461417..92588a063 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -28,7 +28,7 @@ crossbeam-channel = "0.5.8" deserr = "0.5.0" dump = { path = "../dump" } either = "1.8.1" -env_logger = "0.9.3" +env_logger = "0.10.0" file-store = { path = "../file-store" } flate2 = "1.0.25" fst = "0.4.7" @@ -66,14 +66,14 @@ sha2 = "0.10.6" siphasher = "0.3.10" slice-group-by = "0.3.0" static-files = { version = "0.2.3", optional = true } -sysinfo = "0.26.9" +sysinfo = "0.28.4" tar = "0.4.38" tempfile = "3.5.0" thiserror = "1.0.40" time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] } tokio = { version = "1.27.0", features = ["full"] } tokio-stream = "0.1.12" -toml = "0.5.11" +toml = "0.7.3" uuid = { version = "1.3.1", features = ["serde", "v4"] } walkdir = "2.3.3" yaup = "0.2.1" @@ -96,7 +96,7 @@ yaup = "0.2.1" [build-dependencies] anyhow = { version = "1.0.70", optional = true } -cargo_toml = { version = "0.14.1", optional = true } +cargo_toml = { version = "0.15.2", optional = true } hex = { version = "0.4.3", optional = true } reqwest = { version = "0.11.16", features = ["blocking", "rustls-tls"], default-features = false, optional = true } sha-1 = { version = "0.10.1", optional = true } diff --git a/milli/Cargo.toml b/milli/Cargo.toml index 1b9a88700..0e0cee595 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -35,7 +35,7 @@ once_cell = "1.17.1" ordered-float = "3.6.0" rayon = "1.7.0" roaring = "0.10.1" -rstar = { version = "0.9.3", features = ["serde"] } +rstar = { version = "0.10.0", features = ["serde"] } serde = { version = "1.0.160", features = ["derive"] } serde_json = { version = "1.0.95", features = ["preserve_order"] } slice-group-by = "0.3.0" From a3cf104736b9d5340fd5f4c1e72ac16e478a98f6 Mon Sep 17 00:00:00 2001 From: Kerollmops Date: Tue, 11 Apr 2023 15:55:39 +0200 Subject: [PATCH 42/56] Fix the compilation --- meilisearch-auth/src/lib.rs | 3 ++- meilisearch/src/option.rs | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/meilisearch-auth/src/lib.rs b/meilisearch-auth/src/lib.rs index 05f320dfb..2a02776bd 100644 --- a/meilisearch-auth/src/lib.rs +++ b/meilisearch-auth/src/lib.rs @@ -310,6 +310,7 @@ pub const MASTER_KEY_MIN_SIZE: usize = 16; const MASTER_KEY_GEN_SIZE: usize = 32; pub fn generate_master_key() -> String { + use base64::Engine; use rand::rngs::OsRng; use rand::RngCore; @@ -320,5 +321,5 @@ pub fn generate_master_key() -> String { // let's encode the random bytes to base64 to make them human-readable and not too long. // We're using the URL_SAFE alphabet that will produce keys without =, / or other unusual characters. - base64::encode_config(buf, base64::URL_SAFE_NO_PAD) + base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(buf) } diff --git a/meilisearch/src/option.rs b/meilisearch/src/option.rs index 563bc3496..8e6ca9006 100644 --- a/meilisearch/src/option.rs +++ b/meilisearch/src/option.rs @@ -323,10 +323,10 @@ impl Opt { .clone() .unwrap_or_else(|| PathBuf::from(DEFAULT_CONFIG_FILE_PATH)); - match std::fs::read(&config_file_path) { + match std::fs::read_to_string(&config_file_path) { Ok(config) => { // If the file is successfully read, we deserialize it with `toml`. - let opt_from_config = toml::from_slice::(&config)?; + let opt_from_config = toml::from_str::(&config)?; // Return an error if config file contains 'config_file_path' // Using that key in the config file doesn't make sense bc it creates a logical loop (config file referencing itself) if opt_from_config.config_file_path.is_some() { From 729fa3770dd353dbb203feabefe91fafdba30f02 Mon Sep 17 00:00:00 2001 From: curquiza Date: Tue, 25 Apr 2023 15:50:38 +0200 Subject: [PATCH 43/56] Remove Uffizzi because not used by the team --- .github/uffizzi/Dockerfile | 19 --- .github/uffizzi/docker-compose.uffizzi.yml | 26 ---- .github/uffizzi/nginx/nginx.conf | 28 ----- .github/workflows/uffizzi-build.yml | 120 ------------------- .github/workflows/uffizzi-preview-deploy.yml | 103 ---------------- 5 files changed, 296 deletions(-) delete mode 100644 .github/uffizzi/Dockerfile delete mode 100644 .github/uffizzi/docker-compose.uffizzi.yml delete mode 100644 .github/uffizzi/nginx/nginx.conf delete mode 100644 .github/workflows/uffizzi-build.yml delete mode 100644 .github/workflows/uffizzi-preview-deploy.yml diff --git a/.github/uffizzi/Dockerfile b/.github/uffizzi/Dockerfile deleted file mode 100644 index a05b28a9c..000000000 --- a/.github/uffizzi/Dockerfile +++ /dev/null @@ -1,19 +0,0 @@ -# Run -FROM uffizzi/ttyd:alpine - -ENV MEILI_HTTP_ADDR 0.0.0.0:7700 -ENV MEILI_SERVER_PROVIDER docker -ENV MEILI_NO_ANALYTICS true - -RUN apk update --quiet \ - && apk add -q --no-cache libgcc tini curl - -COPY target/x86_64-unknown-linux-musl/release/meilisearch /bin/meilisearch -RUN ln -s /bin/meilisearch /meilisearch - -WORKDIR /meili_data - -EXPOSE 7700/tcp - -ENTRYPOINT ["tini", "--"] -CMD ["ttyd", "/bin/zsh"] diff --git a/.github/uffizzi/docker-compose.uffizzi.yml b/.github/uffizzi/docker-compose.uffizzi.yml deleted file mode 100644 index 17f241238..000000000 --- a/.github/uffizzi/docker-compose.uffizzi.yml +++ /dev/null @@ -1,26 +0,0 @@ -version: "3" - -x-uffizzi: - ingress: - service: nginx - port: 8081 - -services: - meilisearch: - image: "${MEILISEARCH_IMAGE}" - restart: unless-stopped - ports: - - "7681:7681" - - "7700:7700" - deploy: - resources: - limits: - memory: 500M - - nginx: - image: nginx:alpine - restart: unless-stopped - ports: - - "8081:8081" - volumes: - - ./.github/uffizzi/nginx:/etc/nginx diff --git a/.github/uffizzi/nginx/nginx.conf b/.github/uffizzi/nginx/nginx.conf deleted file mode 100644 index 6eca6b6f0..000000000 --- a/.github/uffizzi/nginx/nginx.conf +++ /dev/null @@ -1,28 +0,0 @@ - -events { - worker_connections 4096; ## Default: 1024 -} - -http { - map $http_upgrade $connection_upgrade { - default upgrade; - '' close; - } - - server { - listen 8081; - - location / { - proxy_pass http://localhost:7681; - proxy_http_version 1.1; - proxy_set_header Upgrade $http_upgrade; - proxy_set_header Connection $connection_upgrade; - } - - location /meilisearch/ { - # rewrite /meilisearch/(.*) /$1 break; - proxy_pass http://localhost:7700/; - } - } -} - diff --git a/.github/workflows/uffizzi-build.yml b/.github/workflows/uffizzi-build.yml deleted file mode 100644 index 1e2ae7018..000000000 --- a/.github/workflows/uffizzi-build.yml +++ /dev/null @@ -1,120 +0,0 @@ -name: Uffizzi - Build PR Image -on: - pull_request: - types: [opened,synchronize,reopened,closed] - -jobs: - build-meilisearch: - name: Build and push `meilisearch` - runs-on: ubuntu-latest - outputs: - tags: ${{ steps.meta.outputs.tags }} - if: ${{ github.event.action != 'closed' }} - steps: - - name: checkout - uses: actions/checkout@v3 - - - run: sudo apt-get install musl-tools - - - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - override: true - target: x86_64-unknown-linux-musl - - - name: Cache dependencies - uses: Swatinem/rust-cache@v2.2.1 - - - name: Run cargo check without any default features - uses: actions-rs/cargo@v1 - with: - command: build - args: --target x86_64-unknown-linux-musl --release - - - name: Remove dockerignore so we can use the target folder in our docker build - run: rm -f .dockerignore - - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - - name: Generate UUID image name - id: uuid - run: echo "UUID_TAG=$(uuidgen)" >> $GITHUB_ENV - - - name: Docker metadata - id: meta - uses: docker/metadata-action@v4 - with: - images: registry.uffizzi.com/${{ env.UUID_TAG }} - tags: | - type=raw,value=60d - - - name: Build Image - uses: docker/build-push-action@v4 - with: - context: ./ - file: .github/uffizzi/Dockerfile - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - push: true - cache-from: type=gha - cache-to: type=gha,mode=max - - render-compose-file: - name: Render Docker Compose File - # Pass output of this workflow to another triggered by `workflow_run` event. - runs-on: ubuntu-latest - needs: - - build-meilisearch - outputs: - compose-file-cache-key: ${{ env.COMPOSE_FILE_HASH }} - steps: - - name: Checkout git repo - uses: actions/checkout@v3 - - name: Render Compose File - run: | - MEILISEARCH_IMAGE=$(echo ${{ needs.build-meilisearch.outputs.tags }}) - export MEILISEARCH_IMAGE - # Render simple template from environment variables. - envsubst < .github/uffizzi/docker-compose.uffizzi.yml > docker-compose.rendered.yml - cat docker-compose.rendered.yml - - name: Upload Rendered Compose File as Artifact - uses: actions/upload-artifact@v3 - with: - name: preview-spec - path: docker-compose.rendered.yml - retention-days: 2 - - name: Serialize PR Event to File - run: | - cat << EOF > event.json - ${{ toJSON(github.event) }} - - EOF - - name: Upload PR Event as Artifact - uses: actions/upload-artifact@v3 - with: - name: preview-spec - path: event.json - retention-days: 2 - - delete-preview: - name: Call for Preview Deletion - runs-on: ubuntu-latest - if: ${{ github.event.action == 'closed' }} - steps: - # If this PR is closing, we will not render a compose file nor pass it to the next workflow. - - name: Serialize PR Event to File - run: | - cat << EOF > event.json - ${{ toJSON(github.event) }} - - EOF - - name: Upload PR Event as Artifact - uses: actions/upload-artifact@v3 - with: - name: preview-spec - path: event.json - retention-days: 2 diff --git a/.github/workflows/uffizzi-preview-deploy.yml b/.github/workflows/uffizzi-preview-deploy.yml deleted file mode 100644 index fce60eb01..000000000 --- a/.github/workflows/uffizzi-preview-deploy.yml +++ /dev/null @@ -1,103 +0,0 @@ -name: Uffizzi - Deploy Preview - -on: - workflow_run: - workflows: - - "Uffizzi - Build PR Image" - types: - - completed - -jobs: - cache-compose-file: - name: Cache Compose File - runs-on: ubuntu-latest - if: ${{ github.event.workflow_run.conclusion == 'success' }} - outputs: - compose-file-cache-key: ${{ env.COMPOSE_FILE_HASH }} - pr-number: ${{ env.PR_NUMBER }} - expected-url: ${{ env.EXPECTED_URL }} - steps: - - name: 'Download artifacts' - # Fetch output (zip archive) from the workflow run that triggered this workflow. - uses: actions/github-script@v6 - with: - script: | - let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ - owner: context.repo.owner, - repo: context.repo.repo, - run_id: context.payload.workflow_run.id, - }); - let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => { - return artifact.name == "preview-spec" - })[0]; - let download = await github.rest.actions.downloadArtifact({ - owner: context.repo.owner, - repo: context.repo.repo, - artifact_id: matchArtifact.id, - archive_format: 'zip', - }); - let fs = require('fs'); - fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/preview-spec.zip`, Buffer.from(download.data)); - - - name: 'Unzip artifact' - run: unzip preview-spec.zip - - - name: Read Event into ENV - run: | - echo 'EVENT_JSON<> $GITHUB_ENV - cat event.json >> $GITHUB_ENV - echo 'EOF' >> $GITHUB_ENV - - - name: Hash Rendered Compose File - id: hash - # If the previous workflow was triggered by a PR close event, we will not have a compose file artifact. - if: ${{ fromJSON(env.EVENT_JSON).action != 'closed' }} - run: echo "COMPOSE_FILE_HASH=$(md5sum docker-compose.rendered.yml | awk '{ print $1 }')" >> $GITHUB_ENV - - - name: Cache Rendered Compose File - if: ${{ fromJSON(env.EVENT_JSON).action != 'closed' }} - uses: actions/cache@v3 - with: - path: docker-compose.rendered.yml - key: ${{ env.COMPOSE_FILE_HASH }} - - - name: Read PR Number From Event Object - id: pr - run: echo "PR_NUMBER=${{ fromJSON(env.EVENT_JSON).number }}" >> $GITHUB_ENV - - - name: DEBUG - Print Job Outputs - if: ${{ runner.debug }} - run: | - echo "PR number: ${{ env.PR_NUMBER }}" - echo "Compose file hash: ${{ env.COMPOSE_FILE_HASH }}" - cat event.json - - - name: Add expected URL env var - if: ${{ runner.debug }} - run: | - REPO=$(echo ${{ github.repository }} | sed 's/\./+/g') - echo "EXPECTED_URL=${{ inputs.server }}/github.com/$REPO/pull/${{ env.PR_NUMBER }}" >> $GITHUB_ENV - - deploy-uffizzi-preview: - name: Use Remote Workflow to Preview on Uffizzi - needs: - - cache-compose-file - uses: UffizziCloud/preview-action/.github/workflows/reusable.yaml@v2 - with: - # If this workflow was triggered by a PR close event, cache-key will be an empty string - # and this reusable workflow will delete the preview deployment. - compose-file-cache-key: ${{ needs.cache-compose-file.outputs.compose-file-cache-key }} - compose-file-cache-path: docker-compose.rendered.yml - server: https://app.uffizzi.com - pr-number: ${{ needs.cache-compose-file.outputs.pr-number }} - description: | - The meilisearch preview environment contains a web terminal from where you can run the - `meilisearch` command. You should be able to access this instance of meilisearch running in - the preview from the link Meilisearch Endpoint link given below. - - Web Terminal Endpoint : - Meilisearch Endpoint : /meilisearch - permissions: - contents: read - pull-requests: write - id-token: write \ No newline at end of file From e0730b55b30f5d5b75900a64a14e36503b06cb7c Mon Sep 17 00:00:00 2001 From: curquiza Date: Tue, 25 Apr 2023 16:05:28 +0200 Subject: [PATCH 44/56] Update clippy toolchain from v1.67 to v1.69 --- .github/workflows/test-suite.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml index 820fcb656..46e1e87dc 100644 --- a/.github/workflows/test-suite.yml +++ b/.github/workflows/test-suite.yml @@ -138,7 +138,7 @@ jobs: - uses: actions-rs/toolchain@v1 with: profile: minimal - toolchain: 1.67.0 + toolchain: 1.69.0 override: true components: clippy - name: Cache dependencies From cfd1b2cc97a2fece6c893da2af2455e881ab09ae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Tue, 25 Apr 2023 16:40:32 +0200 Subject: [PATCH 45/56] Fix the clippy warnings --- dump/src/reader/compat/v2_to_v3.rs | 1 - meilisearch/tests/documents/add_documents.rs | 3 +- milli/src/search/criteria/exactness.rs | 34 ++++++++--------- milli/src/search/facet/facet_distribution.rs | 38 +++++++++---------- .../search/facet/facet_distribution_iter.rs | 4 +- milli/src/search/facet/facet_range_search.rs | 8 ++-- .../src/search/facet/facet_sort_ascending.rs | 4 +- .../src/search/facet/facet_sort_descending.rs | 4 +- milli/src/search/query_tree.rs | 3 +- milli/src/update/facet/incremental.rs | 32 ++++++++-------- milli/src/update/index_documents/mod.rs | 1 - 11 files changed, 64 insertions(+), 68 deletions(-) diff --git a/dump/src/reader/compat/v2_to_v3.rs b/dump/src/reader/compat/v2_to_v3.rs index 14fc0ee4d..1d5f4e153 100644 --- a/dump/src/reader/compat/v2_to_v3.rs +++ b/dump/src/reader/compat/v2_to_v3.rs @@ -25,7 +25,6 @@ impl CompatV2ToV3 { CompatV2ToV3::Compat(compat) => compat.index_uuid(), }; v2_uuids - .into_iter() .into_iter() .map(|index| v3::meta::IndexUuid { uid: index.uid, uuid: index.uuid }) .collect() diff --git a/meilisearch/tests/documents/add_documents.rs b/meilisearch/tests/documents/add_documents.rs index 164d68582..c2ba2ccaa 100644 --- a/meilisearch/tests/documents/add_documents.rs +++ b/meilisearch/tests/documents/add_documents.rs @@ -1773,7 +1773,7 @@ async fn error_add_documents_payload_size() { "content": "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec metus erat, consequat in blandit venenatis, ultricies eu ipsum. Etiam luctus elit et mollis ultrices. Nam turpis risus, dictum non eros in, eleifend feugiat elit. Morbi non dolor pulvinar, sagittis mi sed, ultricies lorem. Nulla ultricies sem metus. Donec at suscipit quam, sed elementum mi. Suspendisse potenti. Fusce pharetra turpis tortor, sed eleifend odio dapibus ut. Nulla facilisi. Suspendisse elementum, dui eget aliquet dignissim, ex tellus aliquam nisl, at eleifend nisl metus tempus diam. Mauris fermentum sollicitudin efficitur. Donec dignissim est vitae elit finibus faucibus" } ); - let documents: Vec<_> = (0..16000).into_iter().map(|_| document.clone()).collect(); + let documents: Vec<_> = (0..16000).map(|_| document.clone()).collect(); let documents = json!(documents); let (response, code) = index.add_documents(documents, None).await; @@ -1934,7 +1934,6 @@ async fn batch_several_documents_addition() { let index = server.index("test"); let mut documents: Vec<_> = (0..150usize) - .into_iter() .map(|id| { json!( { diff --git a/milli/src/search/criteria/exactness.rs b/milli/src/search/criteria/exactness.rs index 078a9cd6c..6ca5735b7 100644 --- a/milli/src/search/criteria/exactness.rs +++ b/milli/src/search/criteria/exactness.rs @@ -561,7 +561,7 @@ mod tests { #[test] fn compute_combinations_1() { - let b0: RoaringBitmap = (0..).into_iter().map(|x| 2 * x).take_while(|x| *x < 150).collect(); + let b0: RoaringBitmap = (0..).map(|x| 2 * x).take_while(|x| *x < 150).collect(); let parts_candidates = vec![b0]; @@ -575,8 +575,8 @@ mod tests { #[test] fn compute_combinations_2() { - let b0: RoaringBitmap = (0..).into_iter().map(|x| 2 * x).take_while(|x| *x < 150).collect(); - let b1: RoaringBitmap = (0..).into_iter().map(|x| 3 * x).take_while(|x| *x < 150).collect(); + let b0: RoaringBitmap = (0..).map(|x| 2 * x).take_while(|x| *x < 150).collect(); + let b1: RoaringBitmap = (0..).map(|x| 3 * x).take_while(|x| *x < 150).collect(); let parts_candidates = vec![b0, b1]; @@ -589,10 +589,10 @@ mod tests { #[test] fn compute_combinations_4() { - let b0: RoaringBitmap = (0..).into_iter().map(|x| 2 * x).take_while(|x| *x < 150).collect(); - let b1: RoaringBitmap = (0..).into_iter().map(|x| 3 * x).take_while(|x| *x < 150).collect(); - let b2: RoaringBitmap = (0..).into_iter().map(|x| 5 * x).take_while(|x| *x < 150).collect(); - let b3: RoaringBitmap = (0..).into_iter().map(|x| 7 * x).take_while(|x| *x < 150).collect(); + let b0: RoaringBitmap = (0..).map(|x| 2 * x).take_while(|x| *x < 150).collect(); + let b1: RoaringBitmap = (0..).map(|x| 3 * x).take_while(|x| *x < 150).collect(); + let b2: RoaringBitmap = (0..).map(|x| 5 * x).take_while(|x| *x < 150).collect(); + let b3: RoaringBitmap = (0..).map(|x| 7 * x).take_while(|x| *x < 150).collect(); let parts_candidates = vec![b0, b1, b2, b3]; @@ -610,10 +610,10 @@ mod tests { } #[test] fn compute_combinations_4_with_empty_results_at_end() { - let b0: RoaringBitmap = (1..).into_iter().map(|x| 2 * x).take_while(|x| *x < 150).collect(); - let b1: RoaringBitmap = (1..).into_iter().map(|x| 3 * x).take_while(|x| *x < 150).collect(); - let b2: RoaringBitmap = (1..).into_iter().map(|x| 5 * x).take_while(|x| *x < 150).collect(); - let b3: RoaringBitmap = (1..).into_iter().map(|x| 7 * x).take_while(|x| *x < 150).collect(); + let b0: RoaringBitmap = (1..).map(|x| 2 * x).take_while(|x| *x < 150).collect(); + let b1: RoaringBitmap = (1..).map(|x| 3 * x).take_while(|x| *x < 150).collect(); + let b2: RoaringBitmap = (1..).map(|x| 5 * x).take_while(|x| *x < 150).collect(); + let b3: RoaringBitmap = (1..).map(|x| 7 * x).take_while(|x| *x < 150).collect(); let parts_candidates = vec![b0, b1, b2, b3]; @@ -632,11 +632,11 @@ mod tests { #[test] fn compute_combinations_4_with_some_equal_bitmaps() { - let b0: RoaringBitmap = (0..).into_iter().map(|x| 2 * x).take_while(|x| *x < 150).collect(); - let b1: RoaringBitmap = (0..).into_iter().map(|x| 3 * x).take_while(|x| *x < 150).collect(); - let b2: RoaringBitmap = (0..).into_iter().map(|x| 5 * x).take_while(|x| *x < 150).collect(); + let b0: RoaringBitmap = (0..).map(|x| 2 * x).take_while(|x| *x < 150).collect(); + let b1: RoaringBitmap = (0..).map(|x| 3 * x).take_while(|x| *x < 150).collect(); + let b2: RoaringBitmap = (0..).map(|x| 5 * x).take_while(|x| *x < 150).collect(); // b3 == b1 - let b3: RoaringBitmap = (0..).into_iter().map(|x| 3 * x).take_while(|x| *x < 150).collect(); + let b3: RoaringBitmap = (0..).map(|x| 3 * x).take_while(|x| *x < 150).collect(); let parts_candidates = vec![b0, b1, b2, b3]; @@ -659,7 +659,7 @@ mod tests { let parts_candidates: Vec = dividers .iter() .map(|÷r| { - (0..).into_iter().map(|x| divider * x).take_while(|x| *x <= 210).collect() + (0..).map(|x| divider * x).take_while(|x| *x <= 210).collect() }) .collect(); @@ -689,7 +689,7 @@ mod tests { let parts_candidates: Vec = dividers .iter() .map(|divider| { - (0..).into_iter().map(|x| divider * x).take_while(|x| *x <= 100).collect() + (0..).map(|x| divider * x).take_while(|x| *x <= 100).collect() }) .collect(); diff --git a/milli/src/search/facet/facet_distribution.rs b/milli/src/search/facet/facet_distribution.rs index 2aae78bb2..d2a026165 100644 --- a/milli/src/search/facet/facet_distribution.rs +++ b/milli/src/search/facet/facet_distribution.rs @@ -505,7 +505,7 @@ mod tests { let map = FacetDistribution::new(&txn, &index) .facets(std::iter::once("colour")) - .candidates((0..10_000).into_iter().collect()) + .candidates((0..10_000).collect()) .execute() .unwrap(); @@ -513,7 +513,7 @@ mod tests { let map = FacetDistribution::new(&txn, &index) .facets(std::iter::once("colour")) - .candidates((0..5_000).into_iter().collect()) + .candidates((0..5_000).collect()) .execute() .unwrap(); @@ -521,7 +521,7 @@ mod tests { let map = FacetDistribution::new(&txn, &index) .facets(std::iter::once("colour")) - .candidates((0..5_000).into_iter().collect()) + .candidates((0..5_000).collect()) .execute() .unwrap(); @@ -529,7 +529,7 @@ mod tests { let map = FacetDistribution::new(&txn, &index) .facets(std::iter::once("colour")) - .candidates((0..5_000).into_iter().collect()) + .candidates((0..5_000).collect()) .max_values_per_facet(1) .execute() .unwrap(); @@ -546,7 +546,7 @@ mod tests { .update_settings(|settings| settings.set_filterable_fields(hashset! { S("colour") })) .unwrap(); - let facet_values = (0..1000).into_iter().map(|x| format!("{x:x}")).collect::>(); + let facet_values = (0..1000).map(|x| format!("{x:x}")).collect::>(); let mut documents = vec![]; for i in 0..10_000 { @@ -582,7 +582,7 @@ mod tests { let map = FacetDistribution::new(&txn, &index) .facets(std::iter::once("colour")) - .candidates((0..10_000).into_iter().collect()) + .candidates((0..10_000).collect()) .execute() .unwrap(); @@ -590,7 +590,7 @@ mod tests { let map = FacetDistribution::new(&txn, &index) .facets(std::iter::once("colour")) - .candidates((0..5_000).into_iter().collect()) + .candidates((0..5_000).collect()) .execute() .unwrap(); @@ -606,7 +606,7 @@ mod tests { .update_settings(|settings| settings.set_filterable_fields(hashset! { S("colour") })) .unwrap(); - let facet_values = (0..1000).into_iter().collect::>(); + let facet_values = (0..1000).collect::>(); let mut documents = vec![]; for i in 0..1000 { @@ -634,7 +634,7 @@ mod tests { let map = FacetDistribution::new(&txn, &index) .facets(std::iter::once("colour")) - .candidates((0..1000).into_iter().collect()) + .candidates((0..1000).collect()) .compute_stats() .unwrap(); @@ -642,7 +642,7 @@ mod tests { let map = FacetDistribution::new(&txn, &index) .facets(std::iter::once("colour")) - .candidates((217..777).into_iter().collect()) + .candidates((217..777).collect()) .compute_stats() .unwrap(); @@ -658,7 +658,7 @@ mod tests { .update_settings(|settings| settings.set_filterable_fields(hashset! { S("colour") })) .unwrap(); - let facet_values = (0..1000).into_iter().collect::>(); + let facet_values = (0..1000).collect::>(); let mut documents = vec![]; for i in 0..1000 { @@ -686,7 +686,7 @@ mod tests { let map = FacetDistribution::new(&txn, &index) .facets(std::iter::once("colour")) - .candidates((0..1000).into_iter().collect()) + .candidates((0..1000).collect()) .compute_stats() .unwrap(); @@ -694,7 +694,7 @@ mod tests { let map = FacetDistribution::new(&txn, &index) .facets(std::iter::once("colour")) - .candidates((217..777).into_iter().collect()) + .candidates((217..777).collect()) .compute_stats() .unwrap(); @@ -710,7 +710,7 @@ mod tests { .update_settings(|settings| settings.set_filterable_fields(hashset! { S("colour") })) .unwrap(); - let facet_values = (0..1000).into_iter().collect::>(); + let facet_values = (0..1000).collect::>(); let mut documents = vec![]; for i in 0..1000 { @@ -738,7 +738,7 @@ mod tests { let map = FacetDistribution::new(&txn, &index) .facets(std::iter::once("colour")) - .candidates((0..1000).into_iter().collect()) + .candidates((0..1000).collect()) .compute_stats() .unwrap(); @@ -746,7 +746,7 @@ mod tests { let map = FacetDistribution::new(&txn, &index) .facets(std::iter::once("colour")) - .candidates((217..777).into_iter().collect()) + .candidates((217..777).collect()) .compute_stats() .unwrap(); @@ -762,7 +762,7 @@ mod tests { .update_settings(|settings| settings.set_filterable_fields(hashset! { S("colour") })) .unwrap(); - let facet_values = (0..1000).into_iter().collect::>(); + let facet_values = (0..1000).collect::>(); let mut documents = vec![]; for i in 0..1000 { @@ -794,7 +794,7 @@ mod tests { let map = FacetDistribution::new(&txn, &index) .facets(std::iter::once("colour")) - .candidates((0..1000).into_iter().collect()) + .candidates((0..1000).collect()) .compute_stats() .unwrap(); @@ -802,7 +802,7 @@ mod tests { let map = FacetDistribution::new(&txn, &index) .facets(std::iter::once("colour")) - .candidates((217..777).into_iter().collect()) + .candidates((217..777).collect()) .compute_stats() .unwrap(); diff --git a/milli/src/search/facet/facet_distribution_iter.rs b/milli/src/search/facet/facet_distribution_iter.rs index bb3c75343..d355b981a 100644 --- a/milli/src/search/facet/facet_distribution_iter.rs +++ b/milli/src/search/facet/facet_distribution_iter.rs @@ -142,7 +142,7 @@ mod tests { let indexes = [get_simple_index(), get_random_looking_index()]; for (i, index) in indexes.iter().enumerate() { let txn = index.env.read_txn().unwrap(); - let candidates = (0..=255).into_iter().collect::(); + let candidates = (0..=255).collect::(); let mut results = String::new(); iterate_over_facet_distribution( &txn, @@ -166,7 +166,7 @@ mod tests { let indexes = [get_simple_index(), get_random_looking_index()]; for (i, index) in indexes.iter().enumerate() { let txn = index.env.read_txn().unwrap(); - let candidates = (0..=255).into_iter().collect::(); + let candidates = (0..=255).collect::(); let mut results = String::new(); let mut nbr_facets = 0; iterate_over_facet_distribution( diff --git a/milli/src/search/facet/facet_range_search.rs b/milli/src/search/facet/facet_range_search.rs index b1ab6f71f..26854bc1a 100644 --- a/milli/src/search/facet/facet_range_search.rs +++ b/milli/src/search/facet/facet_range_search.rs @@ -410,7 +410,7 @@ mod tests { let mut results = String::new(); - for i in (0..=255).into_iter().rev() { + for i in (0..=255).rev() { let i = i as f64; let start = Bound::Included(i); let end = Bound::Included(255.); @@ -431,7 +431,7 @@ mod tests { let mut results = String::new(); - for i in (0..=255).into_iter().rev() { + for i in (0..=255).rev() { let i = i as f64; let start = Bound::Excluded(i); let end = Bound::Excluded(255.); @@ -466,7 +466,7 @@ mod tests { let mut results = String::new(); - for i in (0..=128).into_iter().rev() { + for i in (0..=128).rev() { let i = i as f64; let start = Bound::Included(i); let end = Bound::Included(255. - i); @@ -491,7 +491,7 @@ mod tests { let mut results = String::new(); - for i in (0..=128).into_iter().rev() { + for i in (0..=128).rev() { let i = i as f64; let start = Bound::Excluded(i); let end = Bound::Excluded(255. - i); diff --git a/milli/src/search/facet/facet_sort_ascending.rs b/milli/src/search/facet/facet_sort_ascending.rs index f59b884de..892401c08 100644 --- a/milli/src/search/facet/facet_sort_ascending.rs +++ b/milli/src/search/facet/facet_sort_ascending.rs @@ -132,7 +132,7 @@ mod tests { let indexes = [get_simple_index(), get_random_looking_index()]; for (i, index) in indexes.iter().enumerate() { let txn = index.env.read_txn().unwrap(); - let candidates = (200..=300).into_iter().collect::(); + let candidates = (200..=300).collect::(); let mut results = String::new(); let iter = ascending_facet_sort(&txn, index.content, 0, candidates).unwrap(); for el in iter { @@ -154,7 +154,7 @@ mod tests { ]; for (i, index) in indexes.iter().enumerate() { let txn = index.env.read_txn().unwrap(); - let candidates = (200..=300).into_iter().collect::(); + let candidates = (200..=300).collect::(); let mut results = String::new(); let iter = ascending_facet_sort(&txn, index.content, 0, candidates.clone()).unwrap(); for el in iter { diff --git a/milli/src/search/facet/facet_sort_descending.rs b/milli/src/search/facet/facet_sort_descending.rs index 454b12859..549f50f0a 100644 --- a/milli/src/search/facet/facet_sort_descending.rs +++ b/milli/src/search/facet/facet_sort_descending.rs @@ -142,7 +142,7 @@ mod tests { ]; for (i, index) in indexes.iter().enumerate() { let txn = index.env.read_txn().unwrap(); - let candidates = (200..=300).into_iter().collect::(); + let candidates = (200..=300).collect::(); let mut results = String::new(); let db = index.content.remap_key_type::>(); let iter = descending_facet_sort(&txn, db, 0, candidates).unwrap(); @@ -165,7 +165,7 @@ mod tests { ]; for (i, index) in indexes.iter().enumerate() { let txn = index.env.read_txn().unwrap(); - let candidates = (200..=300).into_iter().collect::(); + let candidates = (200..=300).collect::(); let mut results = String::new(); let db = index.content.remap_key_type::>(); let iter = descending_facet_sort(&txn, db, 0, candidates.clone()).unwrap(); diff --git a/milli/src/search/query_tree.rs b/milli/src/search/query_tree.rs index 50f16c2d9..15b18b565 100755 --- a/milli/src/search/query_tree.rs +++ b/milli/src/search/query_tree.rs @@ -629,7 +629,7 @@ fn create_matching_words( // create a CONSECUTIVE matchings words wrapping all word in the phrase PrimitiveQueryPart::Phrase(words) => { let ids: Vec<_> = - (0..words.len()).into_iter().map(|i| id + i as PrimitiveWordId).collect(); + (0..words.len()).map(|i| id + i as PrimitiveWordId).collect(); // Require that all words of the phrase have a corresponding MatchingWord // before adding any of them to the matching_words result if let Some(phrase_matching_words) = words @@ -687,7 +687,6 @@ fn create_matching_words( }) .collect(); let ids: Vec<_> = (0..words.len()) - .into_iter() .map(|i| id + i as PrimitiveWordId) .collect(); diff --git a/milli/src/update/facet/incremental.rs b/milli/src/update/facet/incremental.rs index aaef93b48..a921d4115 100644 --- a/milli/src/update/facet/incremental.rs +++ b/milli/src/update/facet/incremental.rs @@ -699,21 +699,21 @@ mod tests { #[test] fn many_field_ids_prepend() { let index = FacetIndex::::new(4, 8, 5); - for i in (0..256).into_iter().rev() { + for i in (0..256).rev() { let mut bitmap = RoaringBitmap::new(); bitmap.insert(i as u32); let mut txn = index.env.write_txn().unwrap(); index.insert(&mut txn, 0, &(i as f64), &bitmap); txn.commit().unwrap(); } - for i in (0..256).into_iter().rev() { + for i in (0..256).rev() { let mut bitmap = RoaringBitmap::new(); bitmap.insert(i as u32); let mut txn = index.env.write_txn().unwrap(); index.insert(&mut txn, 2, &(i as f64), &bitmap); txn.commit().unwrap(); } - for i in (0..256).into_iter().rev() { + for i in (0..256).rev() { let mut bitmap = RoaringBitmap::new(); bitmap.insert(i as u32); let mut txn = index.env.write_txn().unwrap(); @@ -733,7 +733,7 @@ mod tests { let index = FacetIndex::::new(4, 8, 5); let mut txn = index.env.write_txn().unwrap(); - for i in (0..256).into_iter().rev() { + for i in (0..256).rev() { let mut bitmap = RoaringBitmap::new(); bitmap.insert(i); index.insert(&mut txn, 0, &(i as f64), &bitmap); @@ -749,7 +749,7 @@ mod tests { let index = FacetIndex::::new(4, 8, 5); let mut txn = index.env.write_txn().unwrap(); - let mut keys = (0..256).into_iter().collect::>(); + let mut keys = (0..256).collect::>(); let mut rng = rand::rngs::SmallRng::from_seed([0; 32]); keys.shuffle(&mut rng); @@ -768,7 +768,7 @@ mod tests { let index = FacetIndex::::new(4, 8, 5); let mut txn = index.env.write_txn().unwrap(); - let mut keys = (0..256).into_iter().collect::>(); + let mut keys = (0..256).collect::>(); let mut rng = rand::rngs::SmallRng::from_seed([0; 32]); keys.shuffle(&mut rng); @@ -796,7 +796,7 @@ mod tests { index.insert(&mut txn, 0, &(i as f64), &bitmap); } - for i in (200..256).into_iter().rev() { + for i in (200..256).rev() { index.verify_structure_validity(&txn, 0); index.delete_single_docid(&mut txn, 0, &(i as f64), i as u32); } @@ -805,7 +805,7 @@ mod tests { milli_snap!(format!("{index}"), 200); let mut txn = index.env.write_txn().unwrap(); - for i in (150..200).into_iter().rev() { + for i in (150..200).rev() { index.verify_structure_validity(&txn, 0); index.delete_single_docid(&mut txn, 0, &(i as f64), i as u32); } @@ -813,7 +813,7 @@ mod tests { txn.commit().unwrap(); milli_snap!(format!("{index}"), 150); let mut txn = index.env.write_txn().unwrap(); - for i in (100..150).into_iter().rev() { + for i in (100..150).rev() { index.verify_structure_validity(&txn, 0); index.delete_single_docid(&mut txn, 0, &(i as f64), i as u32); } @@ -821,7 +821,7 @@ mod tests { txn.commit().unwrap(); milli_snap!(format!("{index}"), 100); let mut txn = index.env.write_txn().unwrap(); - for i in (17..100).into_iter().rev() { + for i in (17..100).rev() { index.verify_structure_validity(&txn, 0); index.delete_single_docid(&mut txn, 0, &(i as f64), i as u32); } @@ -829,14 +829,14 @@ mod tests { txn.commit().unwrap(); milli_snap!(format!("{index}"), 17); let mut txn = index.env.write_txn().unwrap(); - for i in (15..17).into_iter().rev() { + for i in (15..17).rev() { index.delete_single_docid(&mut txn, 0, &(i as f64), i as u32); } index.verify_structure_validity(&txn, 0); txn.commit().unwrap(); milli_snap!(format!("{index}"), 15); let mut txn = index.env.write_txn().unwrap(); - for i in (0..15).into_iter().rev() { + for i in (0..15).rev() { index.verify_structure_validity(&txn, 0); index.delete_single_docid(&mut txn, 0, &(i as f64), i as u32); } @@ -893,7 +893,7 @@ mod tests { index.insert(&mut txn, 0, &(i as f64), &bitmap); } - let mut keys = (0..256).into_iter().collect::>(); + let mut keys = (0..256).collect::>(); let mut rng = rand::rngs::SmallRng::from_seed([0; 32]); keys.shuffle(&mut rng); @@ -930,7 +930,7 @@ mod tests { let index = FacetIndex::::new(4, 8, 5); let mut txn = index.env.write_txn().unwrap(); - let mut keys = (0..16).into_iter().collect::>(); + let mut keys = (0..16).collect::>(); let mut rng = rand::rngs::SmallRng::from_seed([0; 32]); keys.shuffle(&mut rng); for i in 0..4 { @@ -951,7 +951,7 @@ mod tests { let index = FacetIndex::::new(4, 8, 5); let mut txn = index.env.write_txn().unwrap(); - let mut keys = (0..64).into_iter().collect::>(); + let mut keys = (0..64).collect::>(); let mut rng = rand::rngs::SmallRng::from_seed([0; 32]); keys.shuffle(&mut rng); @@ -983,7 +983,7 @@ mod tests { let index = FacetIndex::::new(4, 8, 5); let mut txn = index.env.write_txn().unwrap(); - let mut keys = (1000..1064).into_iter().collect::>(); + let mut keys = (1000..1064).collect::>(); let mut rng = rand::rngs::SmallRng::from_seed([0; 32]); keys.shuffle(&mut rng); diff --git a/milli/src/update/index_documents/mod.rs b/milli/src/update/index_documents/mod.rs index 2a7930f84..ade217beb 100644 --- a/milli/src/update/index_documents/mod.rs +++ b/milli/src/update/index_documents/mod.rs @@ -1613,7 +1613,6 @@ mod tests { // Create 200 documents with a long text let content = { let documents_iter = (0..200i32) - .into_iter() .map(|i| serde_json::json!({ "id": i, "script": script })) .filter_map(|json| match json { serde_json::Value::Object(object) => Some(object), From 8195d366faa123530def1990cc909d2987864414 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Tue, 25 Apr 2023 16:48:25 +0200 Subject: [PATCH 46/56] Update .dockerignore --- .dockerignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.dockerignore b/.dockerignore index e5278220d..ef2e39bfc 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,4 +3,3 @@ Dockerfile .dockerignore .gitignore **/.git - From 14293f6c8ff0d2c3515b548be77587bcfa18d45c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Tue, 25 Apr 2023 16:55:39 +0200 Subject: [PATCH 47/56] Make rustfmt happy --- milli/src/search/criteria/exactness.rs | 8 ++------ milli/src/search/query_tree.rs | 8 +++----- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/milli/src/search/criteria/exactness.rs b/milli/src/search/criteria/exactness.rs index 6ca5735b7..879bdcdb3 100644 --- a/milli/src/search/criteria/exactness.rs +++ b/milli/src/search/criteria/exactness.rs @@ -658,9 +658,7 @@ mod tests { let dividers = [2, 3, 5, 7, 11, 6, 15, 35, 18, 14]; let parts_candidates: Vec = dividers .iter() - .map(|÷r| { - (0..).map(|x| divider * x).take_while(|x| *x <= 210).collect() - }) + .map(|÷r| (0..).map(|x| divider * x).take_while(|x| *x <= 210).collect()) .collect(); let combinations = create_disjoint_combinations(parts_candidates); @@ -688,9 +686,7 @@ mod tests { ]; let parts_candidates: Vec = dividers .iter() - .map(|divider| { - (0..).map(|x| divider * x).take_while(|x| *x <= 100).collect() - }) + .map(|divider| (0..).map(|x| divider * x).take_while(|x| *x <= 100).collect()) .collect(); let combinations = create_non_disjoint_combinations(parts_candidates.clone()); diff --git a/milli/src/search/query_tree.rs b/milli/src/search/query_tree.rs index 15b18b565..b2418c9c5 100755 --- a/milli/src/search/query_tree.rs +++ b/milli/src/search/query_tree.rs @@ -628,8 +628,7 @@ fn create_matching_words( } // create a CONSECUTIVE matchings words wrapping all word in the phrase PrimitiveQueryPart::Phrase(words) => { - let ids: Vec<_> = - (0..words.len()).map(|i| id + i as PrimitiveWordId).collect(); + let ids: Vec<_> = (0..words.len()).map(|i| id + i as PrimitiveWordId).collect(); // Require that all words of the phrase have a corresponding MatchingWord // before adding any of them to the matching_words result if let Some(phrase_matching_words) = words @@ -686,9 +685,8 @@ fn create_matching_words( } }) .collect(); - let ids: Vec<_> = (0..words.len()) - .map(|i| id + i as PrimitiveWordId) - .collect(); + let ids: Vec<_> = + (0..words.len()).map(|i| id + i as PrimitiveWordId).collect(); if let Some(synonyms) = ctx.synonyms(&words)? { for synonym in synonyms { From 0b2200e6e74679e098e631d88bbea1264933868c Mon Sep 17 00:00:00 2001 From: Tamo Date: Tue, 25 Apr 2023 17:55:27 +0200 Subject: [PATCH 48/56] remove the unused snapshot files --- .../after_processing_the_batch.snap | 43 ----------------- .../documents.snap | 9 ---- .../registered_the_first_task.snap | 37 --------------- .../registered_the_second_task.snap | 40 ---------------- .../after_failing_the_deletion.snap | 43 ----------------- .../after_last_successful_addition.snap | 46 ------------------- .../documents.snap | 17 ------- .../registered_the_first_task.snap | 36 --------------- .../registered_the_second_task.snap | 40 ---------------- 9 files changed, 311 deletions(-) delete mode 100644 index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/after_processing_the_batch.snap delete mode 100644 index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/documents.snap delete mode 100644 index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_first_task.snap delete mode 100644 index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_second_task.snap delete mode 100644 index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_failing_the_deletion.snap delete mode 100644 index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_last_successful_addition.snap delete mode 100644 index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/documents.snap delete mode 100644 index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_first_task.snap delete mode 100644 index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_second_task.snap diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/after_processing_the_batch.snap b/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/after_processing_the_batch.snap deleted file mode 100644 index b27288a0f..000000000 --- a/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/after_processing_the_batch.snap +++ /dev/null @@ -1,43 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -### Autobatching Enabled = true -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }} -1 {uid: 1, status: succeeded, details: { received_document_ids: 2, deleted_documents: Some(2) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }} ----------------------------------------------------------------------- -### Status: -enqueued [] -succeeded [0,1,] ----------------------------------------------------------------------- -### Kind: -"documentAdditionOrUpdate" [0,] -"documentDeletion" [1,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,1,] ----------------------------------------------------------------------- -### Index Mapper: -doggos: { number_of_documents: 1, field_distribution: {"doggo": 1, "id": 1} } - ----------------------------------------------------------------------- -### Canceled By: - ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] -[timestamp] [1,] ----------------------------------------------------------------------- -### Started At: -[timestamp] [0,1,] ----------------------------------------------------------------------- -### Finished At: -[timestamp] [0,1,] ----------------------------------------------------------------------- -### File Store: - ----------------------------------------------------------------------- - diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/documents.snap b/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/documents.snap deleted file mode 100644 index 2b56b71d1..000000000 --- a/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/documents.snap +++ /dev/null @@ -1,9 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -[ - { - "id": 3, - "doggo": "bork" - } -] diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_first_task.snap deleted file mode 100644 index d26e62bff..000000000 --- a/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_first_task.snap +++ /dev/null @@ -1,37 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -### Autobatching Enabled = true -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }} ----------------------------------------------------------------------- -### Status: -enqueued [0,] ----------------------------------------------------------------------- -### Kind: -"documentAdditionOrUpdate" [0,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,] ----------------------------------------------------------------------- -### Index Mapper: - ----------------------------------------------------------------------- -### Canceled By: - ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] ----------------------------------------------------------------------- -### Started At: ----------------------------------------------------------------------- -### Finished At: ----------------------------------------------------------------------- -### File Store: -00000000-0000-0000-0000-000000000000 - ----------------------------------------------------------------------- - diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_second_task.snap b/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_second_task.snap deleted file mode 100644 index e0f371120..000000000 --- a/index-scheduler/src/snapshots/lib.rs/document_addition_and_document_deletion/registered_the_second_task.snap +++ /dev/null @@ -1,40 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -### Autobatching Enabled = true -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }} -1 {uid: 1, status: enqueued, details: { received_document_ids: 2, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }} ----------------------------------------------------------------------- -### Status: -enqueued [0,1,] ----------------------------------------------------------------------- -### Kind: -"documentAdditionOrUpdate" [0,] -"documentDeletion" [1,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,1,] ----------------------------------------------------------------------- -### Index Mapper: - ----------------------------------------------------------------------- -### Canceled By: - ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] -[timestamp] [1,] ----------------------------------------------------------------------- -### Started At: ----------------------------------------------------------------------- -### Finished At: ----------------------------------------------------------------------- -### File Store: -00000000-0000-0000-0000-000000000000 - ----------------------------------------------------------------------- - diff --git a/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_failing_the_deletion.snap b/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_failing_the_deletion.snap deleted file mode 100644 index 1d4aa24e2..000000000 --- a/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_failing_the_deletion.snap +++ /dev/null @@ -1,43 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -### Autobatching Enabled = true -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_document_ids: 2, deleted_documents: Some(0) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }} -1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }} ----------------------------------------------------------------------- -### Status: -enqueued [1,] -failed [0,] ----------------------------------------------------------------------- -### Kind: -"documentAdditionOrUpdate" [1,] -"documentDeletion" [0,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,1,] ----------------------------------------------------------------------- -### Index Mapper: - ----------------------------------------------------------------------- -### Canceled By: - ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] -[timestamp] [1,] ----------------------------------------------------------------------- -### Started At: -[timestamp] [0,] ----------------------------------------------------------------------- -### Finished At: -[timestamp] [0,] ----------------------------------------------------------------------- -### File Store: -00000000-0000-0000-0000-000000000000 - ----------------------------------------------------------------------- - diff --git a/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_last_successful_addition.snap b/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_last_successful_addition.snap deleted file mode 100644 index 0f9dfd3e6..000000000 --- a/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/after_last_successful_addition.snap +++ /dev/null @@ -1,46 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -### Autobatching Enabled = true -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_document_ids: 2, deleted_documents: Some(0) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }} -1 {uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }} ----------------------------------------------------------------------- -### Status: -enqueued [] -succeeded [1,] -failed [0,] ----------------------------------------------------------------------- -### Kind: -"documentAdditionOrUpdate" [1,] -"documentDeletion" [0,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,1,] ----------------------------------------------------------------------- -### Index Mapper: -doggos: { number_of_documents: 3, field_distribution: {"catto": 1, "doggo": 2, "id": 3} } - ----------------------------------------------------------------------- -### Canceled By: - ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] -[timestamp] [1,] ----------------------------------------------------------------------- -### Started At: -[timestamp] [0,] -[timestamp] [1,] ----------------------------------------------------------------------- -### Finished At: -[timestamp] [0,] -[timestamp] [1,] ----------------------------------------------------------------------- -### File Store: - ----------------------------------------------------------------------- - diff --git a/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/documents.snap b/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/documents.snap deleted file mode 100644 index 8204d059b..000000000 --- a/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/documents.snap +++ /dev/null @@ -1,17 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -[ - { - "id": 1, - "doggo": "jean bob" - }, - { - "id": 2, - "catto": "jorts" - }, - { - "id": 3, - "doggo": "bork" - } -] diff --git a/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_first_task.snap deleted file mode 100644 index 5753db7e6..000000000 --- a/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_first_task.snap +++ /dev/null @@ -1,36 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -### Autobatching Enabled = true -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: enqueued, details: { received_document_ids: 2, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }} ----------------------------------------------------------------------- -### Status: -enqueued [0,] ----------------------------------------------------------------------- -### Kind: -"documentDeletion" [0,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,] ----------------------------------------------------------------------- -### Index Mapper: - ----------------------------------------------------------------------- -### Canceled By: - ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] ----------------------------------------------------------------------- -### Started At: ----------------------------------------------------------------------- -### Finished At: ----------------------------------------------------------------------- -### File Store: - ----------------------------------------------------------------------- - diff --git a/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_second_task.snap b/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_second_task.snap deleted file mode 100644 index 0b6191f9e..000000000 --- a/index-scheduler/src/snapshots/lib.rs/document_deletion_and_document_addition/registered_the_second_task.snap +++ /dev/null @@ -1,40 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -### Autobatching Enabled = true -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: enqueued, details: { received_document_ids: 2, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }} -1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }} ----------------------------------------------------------------------- -### Status: -enqueued [0,1,] ----------------------------------------------------------------------- -### Kind: -"documentAdditionOrUpdate" [1,] -"documentDeletion" [0,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,1,] ----------------------------------------------------------------------- -### Index Mapper: - ----------------------------------------------------------------------- -### Canceled By: - ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] -[timestamp] [1,] ----------------------------------------------------------------------- -### Started At: ----------------------------------------------------------------------- -### Finished At: ----------------------------------------------------------------------- -### File Store: -00000000-0000-0000-0000-000000000000 - ----------------------------------------------------------------------- - From ff2cf2a5aeb93687c97c131d5c99339479fabfd2 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 26 Apr 2023 14:56:54 +0200 Subject: [PATCH 49/56] Update charabia in milli --- milli/Cargo.toml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/milli/Cargo.toml b/milli/Cargo.toml index 0e0cee595..880dceeb6 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -16,7 +16,7 @@ bimap = { version = "0.6.3", features = ["serde"] } bincode = "1.3.3" bstr = "1.4.0" byteorder = "1.4.3" -charabia = { version = "0.7.1", default-features = false } +charabia = { version = "0.7.2", default-features = false } concat-arrays = "0.1.2" crossbeam-channel = "0.5.8" deserr = "0.5.0" @@ -89,3 +89,6 @@ korean = ["charabia/korean"] # allow thai specialized tokenization thai = ["charabia/thai"] + +# allow greek specialized tokenization +greek = [] From 249053e51479c026f781f078945d90f5b3b37995 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 26 Apr 2023 14:58:32 +0200 Subject: [PATCH 50/56] Update feature flags --- meilisearch-types/Cargo.toml | 3 +++ meilisearch/Cargo.toml | 1 + milli/Cargo.toml | 2 +- 3 files changed, 5 insertions(+), 1 deletion(-) diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index 017359d25..77a3fd53b 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -50,3 +50,6 @@ hebrew = ["milli/hebrew"] japanese = ["milli/japanese"] # thai specialized tokenization thai = ["milli/thai"] + +# allow greek specialized tokenization +greek = ["milli/greek"] diff --git a/meilisearch/Cargo.toml b/meilisearch/Cargo.toml index 92588a063..80c5ee6c1 100644 --- a/meilisearch/Cargo.toml +++ b/meilisearch/Cargo.toml @@ -113,6 +113,7 @@ chinese = ["meilisearch-types/chinese"] hebrew = ["meilisearch-types/hebrew"] japanese = ["meilisearch-types/japanese"] thai = ["meilisearch-types/thai"] +greek = ["meilisearch-types/greek"] [package.metadata.mini-dashboard] assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.7/build.zip" diff --git a/milli/Cargo.toml b/milli/Cargo.toml index 880dceeb6..e2d5994a9 100644 --- a/milli/Cargo.toml +++ b/milli/Cargo.toml @@ -91,4 +91,4 @@ korean = ["charabia/korean"] thai = ["charabia/thai"] # allow greek specialized tokenization -greek = [] +greek = ["charabia/greek"] From ed9cc1af5598247bdd058889ea20210e68e54b8e Mon Sep 17 00:00:00 2001 From: Louis Dureuil Date: Tue, 7 Feb 2023 09:28:58 +0100 Subject: [PATCH 51/56] Remove CLI changes for clippy --- .github/workflows/test-suite.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml index 46e1e87dc..5a64feda8 100644 --- a/.github/workflows/test-suite.yml +++ b/.github/workflows/test-suite.yml @@ -147,8 +147,7 @@ jobs: uses: actions-rs/cargo@v1 with: command: clippy - # allow unlined_format_args https://github.com/rust-lang/rust-clippy/issues/10087 - args: --all-targets -- --deny warnings --allow clippy::uninlined_format_args + args: --all-targets -- --deny warnings fmt: name: Run Rustfmt From 1bf2694604d79125543583b045c3b7ab425c23ca Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Wed, 26 Apr 2023 17:41:29 +0200 Subject: [PATCH 52/56] Update cargo lock --- Cargo.lock | 93 +++++++++++++++++++++++++++++++++++------------------- 1 file changed, 61 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 454749dab..c9ffb0843 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -694,19 +694,18 @@ dependencies = [ [[package]] name = "charabia" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad3d9667a6b4e03813162c22c4d58235c2dc25d580d60837ce29199038341c9" +checksum = "413155d93157bff9130895c3bd83970ac7f35659ca57226a96aa35cf1e8e102c" dependencies = [ "cow-utils", "csv", "deunicode", + "finl_unicode", "fst", "irg-kvariants", "jieba-rs", "lindera", - "lindera-ipadic", - "lindera-ko-dic", "once_cell", "pinyin", "serde", @@ -1458,6 +1457,12 @@ dependencies = [ "nom_locate", ] +[[package]] +name = "finl_unicode" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" + [[package]] name = "flate2" version = "1.0.25" @@ -2206,9 +2211,9 @@ dependencies = [ [[package]] name = "lindera" -version = "0.21.1" +version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f33a20bb9cbf95572b2d2f40d7040c8d8c7ad09ae20e1f6513db6ef2564dfc5" +checksum = "72be283281bec2768687b1784be03a678609b51f2f90f6f9d9b4f07953e6dd25" dependencies = [ "anyhow", "bincode", @@ -2218,9 +2223,8 @@ dependencies = [ "lindera-cc-cedict-builder", "lindera-core", "lindera-dictionary", - "lindera-ipadic", + "lindera-filter", "lindera-ipadic-builder", - "lindera-ko-dic", "lindera-ko-dic-builder", "lindera-unidic-builder", "regex", @@ -2234,9 +2238,9 @@ dependencies = [ [[package]] name = "lindera-cc-cedict-builder" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60c3b379251edadbac7a5fdb31e482274e11dae6ab6cc789d0d86cf34369cf49" +checksum = "10fbafd37adab44ccc2668a40fba2dbc4e665cb3c36018c15dfe2e2b830e28ce" dependencies = [ "anyhow", "bincode", @@ -2253,9 +2257,9 @@ dependencies = [ [[package]] name = "lindera-compress" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8d0ea3de5625e2381cac94e518d3b56103fde56bc0dce840fe875c1e871b125" +checksum = "ed9196bf5995503f6878a090dfee6114ba86430c72f67ef3624246b564869937" dependencies = [ "anyhow", "flate2", @@ -2264,9 +2268,9 @@ dependencies = [ [[package]] name = "lindera-core" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2281747b98fdd46bcc54ce7fdb6870dad9f67ddb3dc086c47b6704f3e1178cd5" +checksum = "e5f0baa9932f682e9c5b388897330f155d3c40de80016e60125897fde5e0e246" dependencies = [ "anyhow", "bincode", @@ -2281,9 +2285,9 @@ dependencies = [ [[package]] name = "lindera-decompress" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52101bd454754c506305ab897af5ac2ae41fe91e3272c1ff5c6a02a089dfaefd" +checksum = "a6e63fa6ef0bc3ce2c26d372aa6185b7a316194494a84f81678f5da2893bf4a2" dependencies = [ "anyhow", "flate2", @@ -2292,21 +2296,46 @@ dependencies = [ [[package]] name = "lindera-dictionary" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af1c6668848f1d30d216c99093a3ed3fe125c105fa12a4aeed5a1861dc01dd52" +checksum = "fd765c36166016de87a1f447ea971573e4c63e334836c46ad0020f0408c88bfc" dependencies = [ "anyhow", "bincode", "byteorder", "lindera-core", + "lindera-ipadic", + "lindera-ko-dic", + "serde", +] + +[[package]] +name = "lindera-filter" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5345e37fb9521ab3cee19283bed135d46b3521dc1fd13a49fa0992379056203" +dependencies = [ + "anyhow", + "bincode", + "byteorder", + "kanaria", + "lindera-core", + "lindera-dictionary", + "once_cell", + "regex", + "serde", + "serde_json", + "unicode-blocks", + "unicode-normalization", + "unicode-segmentation", + "yada", ] [[package]] name = "lindera-ipadic" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693098007200fa43fd5cdc9ca8740f371327369672ce812cd87a1f6344971e31" +checksum = "60eeb356295f784e7db4cfd2c6772f2bd059e565a7744e246642a07bc333a88a" dependencies = [ "bincode", "byteorder", @@ -2321,9 +2350,9 @@ dependencies = [ [[package]] name = "lindera-ipadic-builder" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b6b7240d097a8fc37ee8f90ebff02c4db0ba5325ecb0dacb6da3724596798c9" +checksum = "0a16a2a88db9d956f5086bc976deb9951ca2dbbfef41a002df0a7bfb2c845aab" dependencies = [ "anyhow", "bincode", @@ -2343,9 +2372,9 @@ dependencies = [ [[package]] name = "lindera-ko-dic" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abd3c5a4addeb61ca66788a3dd1fd51093e6cd8fea1d997042ada5aa60e8cc5e" +checksum = "abb479b170a841b8cfbe602d772e30849ffe0562b219190a378368968b8c8f66" dependencies = [ "bincode", "byteorder", @@ -2360,9 +2389,9 @@ dependencies = [ [[package]] name = "lindera-ko-dic-builder" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512bb1393a9281e0b13704319d1343b7931416865852d9d7b7c0178431518326" +checksum = "9b9b58213552560717c48e7833444a20d2d7fe26a6e565f7ce0cbbf85784c7cf" dependencies = [ "anyhow", "bincode", @@ -2380,9 +2409,9 @@ dependencies = [ [[package]] name = "lindera-unidic-builder" -version = "0.21.0" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f575a27f8ba67c15fe16ebf7d277a0ac04e8c8a0f72670ebc2443da9d41c450" +checksum = "6858147cdaf4a7b564c08a247449d3aca38e9b4812499651af08afbf85324596" dependencies = [ "anyhow", "bincode", @@ -4167,9 +4196,9 @@ dependencies = [ [[package]] name = "unicode-segmentation" -version = "1.10.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" [[package]] name = "unicode-width" @@ -4277,9 +4306,9 @@ dependencies = [ [[package]] name = "wana_kana" -version = "2.1.2" +version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5770f395a1c9d6d64bee602f0a36763d7861ef5715f9d4f707cb0086f82dba54" +checksum = "477976a5c56fb7b014795df5a2ce08d2de8bcd4d5980844c5bd3978a7fd1c30b" dependencies = [ "fnv", "itertools", From feaf25a95d1aeafa410a99ac13fa8fb73b33136f Mon Sep 17 00:00:00 2001 From: TATHAGATA ROY <98920199+roy9495@users.noreply.github.com> Date: Fri, 28 Apr 2023 20:52:03 +0000 Subject: [PATCH 53/56] Updated messages pointing to the docs website --- download-latest.sh | 2 +- meilisearch-types/src/versioning.rs | 4 ++-- meilisearch/src/main.rs | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/download-latest.sh b/download-latest.sh index aa0b6e4d7..c533d6616 100644 --- a/download-latest.sh +++ b/download-latest.sh @@ -103,7 +103,7 @@ not_available_failure_usage() { printf "$RED%s\n$DEFAULT" 'ERROR: Meilisearch binary is not available for your OS distribution or your architecture yet.' echo '' echo 'However, you can easily compile the binary from the source files.' - echo 'Follow the steps at the page ("Source" tab): https://docs.meilisearch.com/learn/getting_started/installation.html' + echo 'Follow the steps at the page ("Source" tab): https://www.meilisearch.com/docs/learn/getting_started/installation' } fetch_release_failure_usage() { diff --git a/meilisearch-types/src/versioning.rs b/meilisearch-types/src/versioning.rs index f429a6128..3c4726403 100644 --- a/meilisearch-types/src/versioning.rs +++ b/meilisearch-types/src/versioning.rs @@ -46,7 +46,7 @@ pub fn check_version_file(db_path: &Path) -> anyhow::Result<()> { pub enum VersionFileError { #[error( "Meilisearch (v{}) failed to infer the version of the database. - To update Meilisearch please follow our guide on https://docs.meilisearch.com/learn/update_and_migration/updating.html.", + To update Meilisearch please follow our guide on https://www.meilisearch.com/docs/learn/update_and_migration/updating.", env!("CARGO_PKG_VERSION").to_string() )] MissingVersionFile, @@ -54,7 +54,7 @@ pub enum VersionFileError { MalformedVersionFile, #[error( "Your database version ({major}.{minor}.{patch}) is incompatible with your current engine version ({}).\n\ - To migrate data between Meilisearch versions, please follow our guide on https://docs.meilisearch.com/learn/update_and_migration/updating.html.", + To migrate data between Meilisearch versions, please follow our guide on https://www.meilisearch.com/docs/learn/update_and_migration/updating.", env!("CARGO_PKG_VERSION").to_string() )] VersionMismatch { major: String, minor: String, patch: String }, diff --git a/meilisearch/src/main.rs b/meilisearch/src/main.rs index c80245c7e..1b95291fd 100644 --- a/meilisearch/src/main.rs +++ b/meilisearch/src/main.rs @@ -149,7 +149,7 @@ pub fn print_launch_resume( " Thank you for using Meilisearch! -\nWe collect anonymized analytics to improve our product and your experience. To learn more, including how to turn off analytics, visit our dedicated documentation page: https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html +\nWe collect anonymized analytics to improve our product and your experience. To learn more, including how to turn off analytics, visit our dedicated documentation page: https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry Anonymous telemetry:\t\"Enabled\"" ); From 0cba9192283e9a50b3f5e9bb4cdf8379cc944f95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar?= Date: Fri, 28 Apr 2023 17:59:04 +0200 Subject: [PATCH 54/56] Add SDKs test in a CI --- .github/workflows/sdks-tests.yml | 200 +++++++++++++++++++++++++++++++ 1 file changed, 200 insertions(+) create mode 100644 .github/workflows/sdks-tests.yml diff --git a/.github/workflows/sdks-tests.yml b/.github/workflows/sdks-tests.yml new file mode 100644 index 000000000..617b446d1 --- /dev/null +++ b/.github/workflows/sdks-tests.yml @@ -0,0 +1,200 @@ +# If any test fails, the engine team should ensure the "breaking" changes are expected and contact the integration team +name: SDKs tests + +on: + workflow_dispatch: + schedule: + - cron: "0 6 * * MON" # Every Monday at 6:00AM + +env: + MEILI_MASTER_KEY: 'masterKey' + MEILI_NO_ANALYTICS: 'true' + +jobs: + + meilisearch-js-tests: + name: JS SDK tests + runs-on: ubuntu-latest + services: + meilisearch: + image: getmeili/meilisearch:nightly + env: + MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }} + MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }} + ports: + - '7700:7700' + steps: + - uses: actions/checkout@v3 + with: + repository: meilisearch/meilisearch-js + - name: Setup node + uses: actions/setup-node@v3 + with: + cache: 'yarn' + - name: Install dependencies + run: yarn --dev + - name: Run tests + run: yarn test + - name: Build project + run: yarn build + - name: Run ESM env + run: yarn test:env:esm + - name: Run Node.js env + run: yarn test:env:nodejs + - name: Run node typescript env + run: yarn test:env:node-ts + - name: Run Browser env + run: yarn test:env:browser + + instant-meilisearch-tests: + name: instant-meilisearch tests + runs-on: ubuntu-latest + services: + meilisearch: + image: getmeili/meilisearch:nightly + env: + MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }} + MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }} + ports: + - '7700:7700' + steps: + - uses: actions/checkout@v3 + with: + repository: meilisearch/instant-meilisearch + - name: Setup node + uses: actions/setup-node@v3 + with: + cache: yarn + - name: Install dependencies + run: yarn install + - name: Run tests + run: yarn test + - name: Build all the playgrounds and the packages + run: yarn build + + meilisearch-php-tests: + name: PHP SDK tests + runs-on: ubuntu-latest + services: + meilisearch: + image: getmeili/meilisearch:nightly + env: + MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }} + MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }} + ports: + - '7700:7700' + steps: + - uses: actions/checkout@v3 + with: + repository: meilisearch/meilisearch-php + - name: Install PHP + uses: shivammathur/setup-php@v2 + with: + coverage: none + - name: Validate composer.json and composer.lock + run: composer validate + - name: Install dependencies + run: | + composer remove --dev friendsofphp/php-cs-fixer --no-update --no-interaction + composer update --prefer-dist --no-progress + - name: Run test suite - default HTTP client (Guzzle 7) + run: | + sh scripts/tests.sh + composer remove --dev guzzlehttp/guzzle http-interop/http-factory-guzzle + + meilisearch-python-tests: + name: Python SDK tests + runs-on: ubuntu-latest + services: + meilisearch: + image: getmeili/meilisearch:nightly + env: + MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }} + MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }} + ports: + - '7700:7700' + steps: + - uses: actions/checkout@v3 + with: + repository: meilisearch/meilisearch-python + - name: Set up Python + uses: actions/setup-python@v4 + - name: Install pipenv + uses: dschep/install-pipenv-action@v1 + - name: Install dependencies + run: pipenv install --dev --python=${{ matrix.python-version }} + - name: Test with pytest + run: pipenv run pytest + + meilisearch-go-tests: + name: Go SDK tests + runs-on: ubuntu-latest + services: + meilisearch: + image: getmeili/meilisearch:nightly + env: + MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }} + MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }} + ports: + - '7700:7700' + steps: + - name: Set up Go + uses: actions/setup-go@v3 + with: + go-version: stable + - uses: actions/checkout@v3 + with: + repository: meilisearch/meilisearch-go + - name: Get dependencies + run: | + go get -v -t -d ./... + if [ -f Gopkg.toml ]; then + curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh + dep ensure + fi + - name: Run integration tests + run: go test -v ./... + + meilisearch-ruby-tests: + name: Ruby SDK tests + runs-on: ubuntu-latest + services: + meilisearch: + image: getmeili/meilisearch:nightly + env: + MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }} + MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }} + ports: + - '7700:7700' + steps: + - uses: actions/checkout@v3 + with: + repository: meilisearch/meilisearch-ruby + - name: Set up Ruby 3 + uses: ruby/setup-ruby@v1 + with: + ruby-version: 3 + - name: Install ruby dependencies + run: bundle install --with test + - name: Run test suite + run: bundle exec rspec + + meilisearch-rust-tests: + name: Rust SDK tests + runs-on: ubuntu-latest + services: + meilisearch: + image: getmeili/meilisearch:nightly + env: + MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }} + MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }} + ports: + - '7700:7700' + steps: + - uses: actions/checkout@v3 + with: + repository: meilisearch/meilisearch-rust + - name: Build + run: cargo build --verbose + - name: Run tests + run: cargo test --verbose From f284a9c0dd83cf2b2d93469206a5575a2f5dbe87 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Tue, 2 May 2023 13:51:44 +0200 Subject: [PATCH 55/56] Fix the README.md broken links --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 96a15e050..2fc829467 100644 --- a/README.md +++ b/README.md @@ -68,11 +68,11 @@ Install one of our SDKs in your project for seamless integration between Meilise Take a look at the complete [Meilisearch integration list](https://meilisearch.com/docs/learn/what_is_meilisearch/sdks). -[![Logos belonging to different languages and frameworks supported by Meilisearch, including React, Ruby on Rails, Go, Rust, and PHP](assets/integrations.png)](https://meilisearch.com/docs/learn/what_is_meilisearch/sdks.html) +[![Logos belonging to different languages and frameworks supported by Meilisearch, including React, Ruby on Rails, Go, Rust, and PHP](assets/integrations.png)](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks) ## ⚙️ Advanced usage -Experienced users will want to keep our [API Reference](https://meilisearch.com/docs/reference/api) close at hand. +Experienced users will want to keep our [API Reference](https://www.meilisearch.com/docs/reference/api/overview) close at hand. We also offer a wide range of dedicated guides to all Meilisearch features, such as [filtering](https://meilisearch.com/docs/learn/advanced/filtering), [sorting](https://meilisearch.com/docs/learn/advanced/sorting), [geosearch](https://meilisearch.com/docs/learn/advanced/geosearch), [API keys](https://meilisearch.com/docs/learn/security/master_api_keys), and [tenant tokens](https://meilisearch.com/docs/learn/security/tenant_tokens). From d89d2efb7eda7c50cc51f0ab4177924f97c162a9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Tue, 2 May 2023 13:53:36 +0200 Subject: [PATCH 56/56] Change a the text of a link --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a4255f1e7..564d2438b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,7 +18,7 @@ If Meilisearch does not offer optimized support for your language, please consid ## Assumptions -1. **You're familiar with [GitHub](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.** +1. **You're familiar with [GitHub](https://github.com) and the [Pull Requests (PR)](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests) workflow.** 2. **You've read the Meilisearch [documentation](https://docs.meilisearch.com).** 3. **You know about the [Meilisearch community](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html). Please use this for help.**