mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-12-23 05:00:06 +01:00
Fixes for Rust v1.79
This commit is contained in:
parent
09d9b63e1c
commit
0a8f50695e
@ -152,6 +152,7 @@ impl Settings<Unchecked> {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
#[allow(dead_code)] // otherwise rustc complains that the fields go unused
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
#[serde(deny_unknown_fields)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
@ -182,6 +182,7 @@ impl Settings<Unchecked> {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)] // otherwise rustc complains that the fields go unused
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
#[serde(deny_unknown_fields)]
|
||||
|
@ -200,6 +200,7 @@ impl std::ops::Deref for IndexUid {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)] // otherwise rustc complains that the fields go unused
|
||||
#[derive(Debug)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
#[cfg_attr(test, serde(rename_all = "camelCase"))]
|
||||
|
@ -71,10 +71,10 @@ csv = "1.3.0"
|
||||
candle-core = { version = "0.4.1" }
|
||||
candle-transformers = { version = "0.4.1" }
|
||||
candle-nn = { version = "0.4.1" }
|
||||
tokenizers = { git = "https://github.com/huggingface/tokenizers.git", tag = "v0.15.2", version = "0.15.2", default_features = false, features = [
|
||||
tokenizers = { git = "https://github.com/huggingface/tokenizers.git", tag = "v0.15.2", version = "0.15.2", default-features = false, features = [
|
||||
"onig",
|
||||
] }
|
||||
hf-hub = { git = "https://github.com/dureuill/hf-hub.git", branch = "rust_tls", default_features = false, features = [
|
||||
hf-hub = { git = "https://github.com/dureuill/hf-hub.git", branch = "rust_tls", default-features = false, features = [
|
||||
"online",
|
||||
] }
|
||||
tiktoken-rs = "0.5.8"
|
||||
|
@ -22,7 +22,7 @@ pub enum SearchEvents {
|
||||
RankingRuleStartIteration { ranking_rule_idx: usize, universe_len: u64 },
|
||||
RankingRuleNextBucket { ranking_rule_idx: usize, universe_len: u64, bucket_len: u64 },
|
||||
RankingRuleSkipBucket { ranking_rule_idx: usize, bucket_len: u64 },
|
||||
RankingRuleEndIteration { ranking_rule_idx: usize, universe_len: u64 },
|
||||
RankingRuleEndIteration { ranking_rule_idx: usize },
|
||||
ExtendResults { new: Vec<u32> },
|
||||
ProximityGraph { graph: RankingRuleGraph<ProximityGraph> },
|
||||
ProximityPaths { paths: Vec<Vec<Interned<ProximityCondition>>> },
|
||||
@ -123,12 +123,9 @@ impl SearchLogger<QueryGraph> for VisualSearchLogger {
|
||||
&mut self,
|
||||
ranking_rule_idx: usize,
|
||||
_ranking_rule: &dyn RankingRule<QueryGraph>,
|
||||
universe: &RoaringBitmap,
|
||||
_universe: &RoaringBitmap,
|
||||
) {
|
||||
self.events.push(SearchEvents::RankingRuleEndIteration {
|
||||
ranking_rule_idx,
|
||||
universe_len: universe.len(),
|
||||
});
|
||||
self.events.push(SearchEvents::RankingRuleEndIteration { ranking_rule_idx });
|
||||
self.location.pop();
|
||||
}
|
||||
fn add_to_results(&mut self, docids: &[u32]) {
|
||||
@ -326,7 +323,7 @@ impl<'ctx> DetailedLoggerFinish<'ctx> {
|
||||
assert!(ranking_rule_idx == self.rr_action_counter.len() - 1);
|
||||
self.write_skip_bucket(bucket_len)?;
|
||||
}
|
||||
SearchEvents::RankingRuleEndIteration { ranking_rule_idx, universe_len: _ } => {
|
||||
SearchEvents::RankingRuleEndIteration { ranking_rule_idx } => {
|
||||
assert!(ranking_rule_idx == self.rr_action_counter.len() - 1);
|
||||
self.write_end_iteration()?;
|
||||
}
|
||||
|
@ -53,7 +53,6 @@ pub struct Transform<'a, 'i> {
|
||||
fields_ids_map: FieldsIdsMap,
|
||||
|
||||
indexer_settings: &'a IndexerConfig,
|
||||
pub autogenerate_docids: bool,
|
||||
pub index_documents_method: IndexDocumentsMethod,
|
||||
available_documents_ids: AvailableDocumentsIds,
|
||||
|
||||
@ -107,7 +106,7 @@ impl<'a, 'i> Transform<'a, 'i> {
|
||||
index: &'i Index,
|
||||
indexer_settings: &'a IndexerConfig,
|
||||
index_documents_method: IndexDocumentsMethod,
|
||||
autogenerate_docids: bool,
|
||||
_autogenerate_docids: bool,
|
||||
) -> Result<Self> {
|
||||
// We must choose the appropriate merge function for when two or more documents
|
||||
// with the same user id must be merged or fully replaced in the same batch.
|
||||
@ -141,7 +140,6 @@ impl<'a, 'i> Transform<'a, 'i> {
|
||||
index,
|
||||
fields_ids_map: index.fields_ids_map(wtxn)?,
|
||||
indexer_settings,
|
||||
autogenerate_docids,
|
||||
available_documents_ids: AvailableDocumentsIds::from_documents_ids(&documents_ids),
|
||||
original_sorter,
|
||||
flattened_sorter,
|
||||
|
@ -21,7 +21,7 @@ reqwest = { version = "0.11.23", features = [
|
||||
"stream",
|
||||
"json",
|
||||
"rustls-tls",
|
||||
], default_features = false }
|
||||
], default-features = false }
|
||||
serde = { version = "1.0.195", features = ["derive"] }
|
||||
serde_json = "1.0.111"
|
||||
sha2 = "0.10.8"
|
||||
|
Loading…
x
Reference in New Issue
Block a user