Introduce a rustfmt file

This commit is contained in:
Clément Renault 2022-10-20 18:00:07 +02:00
parent dd57e051d7
commit 3f6bd7fb11
No known key found for this signature in database
GPG key ID: 92ADA4E935E71FA4
92 changed files with 1251 additions and 2857 deletions

View file

@ -1,12 +1,13 @@
use std::{any::Any, sync::Arc};
use std::any::Any;
use std::sync::Arc;
use actix_web::HttpRequest;
use meilisearch_types::InstanceUid;
use serde_json::Value;
use crate::{routes::indexes::documents::UpdateDocumentsQuery, Opt};
use super::{find_user_id, Analytics};
use crate::routes::indexes::documents::UpdateDocumentsQuery;
use crate::Opt;
pub struct MockAnalytics {
instance_uid: Option<InstanceUid>,

View file

@ -9,14 +9,13 @@ use std::str::FromStr;
use actix_web::HttpRequest;
use meilisearch_types::InstanceUid;
pub use mock_analytics::MockAnalytics;
use once_cell::sync::Lazy;
use platform_dirs::AppDirs;
use serde_json::Value;
use crate::routes::indexes::documents::UpdateDocumentsQuery;
pub use mock_analytics::MockAnalytics;
// if we are in debug mode OR the analytics feature is disabled
// the `SegmentAnalytics` point to the mock instead of the real analytics
#[cfg(any(debug_assertions, not(feature = "analytics")))]
@ -42,12 +41,7 @@ fn config_user_id_path(db_path: &Path) -> Option<PathBuf> {
db_path
.canonicalize()
.ok()
.map(|path| {
path.join("instance-uid")
.display()
.to_string()
.replace('/', "-")
})
.map(|path| path.join("instance-uid").display().to_string().replace('/', "-"))
.zip(MEILISEARCH_CONFIG_PATH.as_ref())
.map(|(filename, config_path)| config_path.join(filename.trim_start_matches('-')))
}

View file

@ -21,6 +21,7 @@ use tokio::select;
use tokio::sync::mpsc::{self, Receiver, Sender};
use uuid::Uuid;
use super::{config_user_id_path, MEILISEARCH_CONFIG_PATH};
use crate::analytics::Analytics;
use crate::option::default_http_addr;
use crate::routes::indexes::documents::UpdateDocumentsQuery;
@ -31,16 +32,13 @@ use crate::search::{
};
use crate::Opt;
use super::{config_user_id_path, MEILISEARCH_CONFIG_PATH};
const ANALYTICS_HEADER: &str = "X-Meilisearch-Client";
/// Write the instance-uid in the `data.ms` and in `~/.config/MeiliSearch/path-to-db-instance-uid`. Ignore the errors.
fn write_user_id(db_path: &Path, user_id: &InstanceUid) {
let _ = fs::write(db_path.join("instance-uid"), user_id.as_bytes());
if let Some((meilisearch_config_path, user_id_path)) = MEILISEARCH_CONFIG_PATH
.as_ref()
.zip(config_user_id_path(db_path))
if let Some((meilisearch_config_path, user_id_path)) =
MEILISEARCH_CONFIG_PATH.as_ref().zip(config_user_id_path(db_path))
{
let _ = fs::create_dir_all(&meilisearch_config_path);
let _ = fs::write(user_id_path, user_id.to_string());
@ -84,22 +82,16 @@ impl SegmentAnalytics {
let instance_uid = instance_uid.unwrap_or_else(|| Uuid::new_v4());
write_user_id(&opt.db_path, &instance_uid);
let client = reqwest::Client::builder()
.connect_timeout(Duration::from_secs(10))
.build();
let client = reqwest::Client::builder().connect_timeout(Duration::from_secs(10)).build();
// if reqwest throws an error we won't be able to send analytics
if client.is_err() {
return super::MockAnalytics::new(opt);
}
let client = HttpClient::new(
client.unwrap(),
"https://telemetry.meilisearch.com".to_string(),
);
let user = User::UserId {
user_id: instance_uid.to_string(),
};
let client =
HttpClient::new(client.unwrap(), "https://telemetry.meilisearch.com".to_string());
let user = User::UserId { user_id: instance_uid.to_string() };
let mut batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string());
// If Meilisearch is Launched for the first time:
@ -108,9 +100,7 @@ impl SegmentAnalytics {
if first_time_run {
let _ = batcher
.push(Track {
user: User::UserId {
user_id: "total_launch".to_string(),
},
user: User::UserId { user_id: "total_launch".to_string() },
event: "Launched".to_string(),
..Default::default()
})
@ -139,11 +129,7 @@ impl SegmentAnalytics {
});
tokio::spawn(segment.run(index_scheduler.clone()));
let this = Self {
instance_uid,
sender,
user: user.clone(),
};
let this = Self { instance_uid, sender, user: user.clone() };
Arc::new(this)
}
@ -164,21 +150,15 @@ impl super::Analytics for SegmentAnalytics {
properties: send,
..Default::default()
};
let _ = self
.sender
.try_send(AnalyticsMsg::BatchMessage(event.into()));
let _ = self.sender.try_send(AnalyticsMsg::BatchMessage(event.into()));
}
fn get_search(&self, aggregate: SearchAggregator) {
let _ = self
.sender
.try_send(AnalyticsMsg::AggregateGetSearch(aggregate));
let _ = self.sender.try_send(AnalyticsMsg::AggregateGetSearch(aggregate));
}
fn post_search(&self, aggregate: SearchAggregator) {
let _ = self
.sender
.try_send(AnalyticsMsg::AggregatePostSearch(aggregate));
let _ = self.sender.try_send(AnalyticsMsg::AggregatePostSearch(aggregate));
}
fn add_documents(
@ -188,9 +168,7 @@ impl super::Analytics for SegmentAnalytics {
request: &HttpRequest,
) {
let aggregate = DocumentsAggregator::from_query(documents_query, index_creation, request);
let _ = self
.sender
.try_send(AnalyticsMsg::AggregateAddDocuments(aggregate));
let _ = self.sender.try_send(AnalyticsMsg::AggregateAddDocuments(aggregate));
}
fn update_documents(
@ -200,9 +178,7 @@ impl super::Analytics for SegmentAnalytics {
request: &HttpRequest,
) {
let aggregate = DocumentsAggregator::from_query(documents_query, index_creation, request);
let _ = self
.sender
.try_send(AnalyticsMsg::AggregateUpdateDocuments(aggregate));
let _ = self.sender.try_send(AnalyticsMsg::AggregateUpdateDocuments(aggregate));
}
}
@ -261,11 +237,8 @@ impl Segment {
infos
};
let number_of_documents = stats
.indexes
.values()
.map(|index| index.number_of_documents)
.collect::<Vec<u64>>();
let number_of_documents =
stats.indexes.values().map(|index| index.number_of_documents).collect::<Vec<u64>>();
json!({
"start_since_days": FIRST_START_TIMESTAMP.elapsed().as_secs() / (60 * 60 * 24), // one day
@ -412,11 +385,7 @@ impl SearchAggregator {
let syntax = match filter {
Value::String(_) => "string".to_string(),
Value::Array(values) => {
if values
.iter()
.map(|v| v.to_string())
.any(|s| RE.is_match(&s))
{
if values.iter().map(|v| v.to_string()).any(|s| RE.is_match(&s)) {
"mixed".to_string()
} else {
"array".to_string()
@ -436,8 +405,7 @@ impl SearchAggregator {
ret.max_terms_number = q.split_whitespace().count();
}
ret.matching_strategy
.insert(format!("{:?}", query.matching_strategy), 1);
ret.matching_strategy.insert(format!("{:?}", query.matching_strategy), 1);
ret.max_limit = query.limit;
ret.max_offset = query.offset.unwrap_or_default();
@ -472,17 +440,14 @@ impl SearchAggregator {
self.time_spent.append(&mut other.time_spent);
// sort
self.sort_with_geo_point |= other.sort_with_geo_point;
self.sort_sum_of_criteria_terms = self
.sort_sum_of_criteria_terms
.saturating_add(other.sort_sum_of_criteria_terms);
self.sort_total_number_of_criteria = self
.sort_total_number_of_criteria
.saturating_add(other.sort_total_number_of_criteria);
self.sort_sum_of_criteria_terms =
self.sort_sum_of_criteria_terms.saturating_add(other.sort_sum_of_criteria_terms);
self.sort_total_number_of_criteria =
self.sort_total_number_of_criteria.saturating_add(other.sort_total_number_of_criteria);
// filter
self.filter_with_geo_radius |= other.filter_with_geo_radius;
self.filter_sum_of_criteria_terms = self
.filter_sum_of_criteria_terms
.saturating_add(other.filter_sum_of_criteria_terms);
self.filter_sum_of_criteria_terms =
self.filter_sum_of_criteria_terms.saturating_add(other.filter_sum_of_criteria_terms);
self.filter_total_number_of_criteria = self
.filter_total_number_of_criteria
.saturating_add(other.filter_total_number_of_criteria);