fix the main

This commit is contained in:
Tamo 2024-10-17 08:38:11 +02:00
parent e4ace98004
commit 7382fb21e4
5 changed files with 33 additions and 33 deletions

View File

@ -5,8 +5,11 @@ pub mod segment_analytics;
use std::fs; use std::fs;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::str::FromStr; use std::str::FromStr;
use std::sync::Arc;
use actix_web::HttpRequest; use actix_web::HttpRequest;
use index_scheduler::IndexScheduler;
use meilisearch_auth::AuthController;
use meilisearch_types::InstanceUid; use meilisearch_types::InstanceUid;
use mopa::mopafy; use mopa::mopafy;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
@ -17,6 +20,8 @@ pub type SegmentAnalytics = segment_analytics::SegmentAnalytics;
pub use segment_analytics::SearchAggregator; pub use segment_analytics::SearchAggregator;
pub use segment_analytics::SimilarAggregator; pub use segment_analytics::SimilarAggregator;
use crate::Opt;
use self::segment_analytics::extract_user_agents; use self::segment_analytics::extract_user_agents;
pub type MultiSearchAggregator = segment_analytics::MultiSearchAggregator; pub type MultiSearchAggregator = segment_analytics::MultiSearchAggregator;
pub type FacetSearchAggregator = segment_analytics::FacetSearchAggregator; pub type FacetSearchAggregator = segment_analytics::FacetSearchAggregator;
@ -137,17 +142,22 @@ macro_rules! aggregate_methods {
}; };
} }
#[derive(Clone)]
pub struct Analytics { pub struct Analytics {
segment: Option<SegmentAnalytics>, segment: Option<Arc<SegmentAnalytics>>,
} }
impl Analytics { impl Analytics {
fn no_analytics() -> Self { pub async fn new(
opt: &Opt,
index_scheduler: Arc<IndexScheduler>,
auth_controller: Arc<AuthController>,
) -> Self {
if opt.no_analytics {
Self { segment: None } Self { segment: None }
} else {
Self { segment: SegmentAnalytics::new(opt, index_scheduler, auth_controller).await }
} }
fn segment_analytics(segment: SegmentAnalytics) -> Self {
Self { segment: Some(segment) }
} }
pub fn instance_uid(&self) -> Option<&InstanceUid> { pub fn instance_uid(&self) -> Option<&InstanceUid> {

View File

@ -102,7 +102,7 @@ impl SegmentAnalytics {
opt: &Opt, opt: &Opt,
index_scheduler: Arc<IndexScheduler>, index_scheduler: Arc<IndexScheduler>,
auth_controller: Arc<AuthController>, auth_controller: Arc<AuthController>,
) -> Arc<Analytics> { ) -> Option<Arc<Self>> {
let instance_uid = super::find_user_id(&opt.db_path); let instance_uid = super::find_user_id(&opt.db_path);
let first_time_run = instance_uid.is_none(); let first_time_run = instance_uid.is_none();
let instance_uid = instance_uid.unwrap_or_else(Uuid::new_v4); let instance_uid = instance_uid.unwrap_or_else(Uuid::new_v4);
@ -112,7 +112,7 @@ impl SegmentAnalytics {
// if reqwest throws an error we won't be able to send analytics // if reqwest throws an error we won't be able to send analytics
if client.is_err() { if client.is_err() {
return Arc::new(Analytics::no_analytics()); return None;
} }
let client = let client =
@ -148,13 +148,13 @@ impl SegmentAnalytics {
user: user.clone(), user: user.clone(),
opt: opt.clone(), opt: opt.clone(),
batcher, batcher,
events: todo!(), events: HashMap::new(),
}); });
tokio::spawn(segment.run(index_scheduler.clone(), auth_controller.clone())); tokio::spawn(segment.run(index_scheduler.clone(), auth_controller.clone()));
let this = Self { instance_uid, sender, user: user.clone() }; let this = Self { instance_uid, sender, user: user.clone() };
Arc::new(Analytics::segment_analytics(this)) Some(Arc::new(this))
} }
} }
@ -595,7 +595,7 @@ pub struct SearchAggregator<Method: AggregateMethod> {
impl<Method: AggregateMethod> SearchAggregator<Method> { impl<Method: AggregateMethod> SearchAggregator<Method> {
#[allow(clippy::field_reassign_with_default)] #[allow(clippy::field_reassign_with_default)]
pub fn from_query(query: &SearchQuery, request: &HttpRequest) -> Self { pub fn from_query(query: &SearchQuery) -> Self {
let SearchQuery { let SearchQuery {
q, q,
vector, vector,

View File

@ -120,7 +120,7 @@ pub fn create_app(
search_queue: Data<SearchQueue>, search_queue: Data<SearchQueue>,
opt: Opt, opt: Opt,
logs: (LogRouteHandle, LogStderrHandle), logs: (LogRouteHandle, LogStderrHandle),
analytics: Arc<Analytics>, analytics: Data<Analytics>,
enable_dashboard: bool, enable_dashboard: bool,
) -> actix_web::App< ) -> actix_web::App<
impl ServiceFactory< impl ServiceFactory<
@ -473,14 +473,14 @@ pub fn configure_data(
search_queue: Data<SearchQueue>, search_queue: Data<SearchQueue>,
opt: &Opt, opt: &Opt,
(logs_route, logs_stderr): (LogRouteHandle, LogStderrHandle), (logs_route, logs_stderr): (LogRouteHandle, LogStderrHandle),
analytics: Arc<Analytics>, analytics: Data<Analytics>,
) { ) {
let http_payload_size_limit = opt.http_payload_size_limit.as_u64() as usize; let http_payload_size_limit = opt.http_payload_size_limit.as_u64() as usize;
config config
.app_data(index_scheduler) .app_data(index_scheduler)
.app_data(auth) .app_data(auth)
.app_data(search_queue) .app_data(search_queue)
.app_data(web::Data::from(analytics)) .app_data(analytics)
.app_data(web::Data::new(logs_route)) .app_data(web::Data::new(logs_route))
.app_data(web::Data::new(logs_stderr)) .app_data(web::Data::new(logs_stderr))
.app_data(web::Data::new(opt.clone())) .app_data(web::Data::new(opt.clone()))

View File

@ -124,19 +124,12 @@ async fn try_main() -> anyhow::Result<()> {
let (index_scheduler, auth_controller) = setup_meilisearch(&opt)?; let (index_scheduler, auth_controller) = setup_meilisearch(&opt)?;
#[cfg(all(not(debug_assertions), feature = "analytics"))] let analytics =
let analytics = if !opt.no_analytics { analytics::Analytics::new(&opt, index_scheduler.clone(), auth_controller.clone()).await;
analytics::SegmentAnalytics::new(&opt, index_scheduler.clone(), auth_controller.clone())
.await
} else {
analytics::MockAnalytics::new(&opt)
};
#[cfg(any(debug_assertions, not(feature = "analytics")))]
let analytics = analytics::MockAnalytics::new(&opt);
print_launch_resume(&opt, analytics.clone(), config_read_from); print_launch_resume(&opt, analytics.clone(), config_read_from);
run_http(index_scheduler, auth_controller, opt, log_handle, analytics).await?; run_http(index_scheduler, auth_controller, opt, log_handle, Arc::new(analytics)).await?;
Ok(()) Ok(())
} }
@ -146,12 +139,13 @@ async fn run_http(
auth_controller: Arc<AuthController>, auth_controller: Arc<AuthController>,
opt: Opt, opt: Opt,
logs: (LogRouteHandle, LogStderrHandle), logs: (LogRouteHandle, LogStderrHandle),
analytics: Arc<dyn Analytics>, analytics: Arc<Analytics>,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let enable_dashboard = &opt.env == "development"; let enable_dashboard = &opt.env == "development";
let opt_clone = opt.clone(); let opt_clone = opt.clone();
let index_scheduler = Data::from(index_scheduler); let index_scheduler = Data::from(index_scheduler);
let auth_controller = Data::from(auth_controller); let auth_controller = Data::from(auth_controller);
let analytics = Data::from(analytics);
let search_queue = SearchQueue::new( let search_queue = SearchQueue::new(
opt.experimental_search_queue_size, opt.experimental_search_queue_size,
available_parallelism() available_parallelism()
@ -187,11 +181,7 @@ async fn run_http(
Ok(()) Ok(())
} }
pub fn print_launch_resume( pub fn print_launch_resume(opt: &Opt, analytics: Analytics, config_read_from: Option<PathBuf>) {
opt: &Opt,
analytics: Arc<dyn Analytics>,
config_read_from: Option<PathBuf>,
) {
let build_info = build_info::BuildInfo::from_build(); let build_info = build_info::BuildInfo::from_build();
let protocol = let protocol =

View File

@ -238,7 +238,7 @@ pub async fn search_with_url_query(
add_search_rules(&mut query.filter, search_rules); add_search_rules(&mut query.filter, search_rules);
} }
let mut aggregate = SearchAggregator::<SearchGET>::from_query(&query, &req); let mut aggregate = SearchAggregator::<SearchGET>::from_query(&query);
let index = index_scheduler.index(&index_uid)?; let index = index_scheduler.index(&index_uid)?;
let features = index_scheduler.features(); let features = index_scheduler.features();
@ -281,7 +281,7 @@ pub async fn search_with_post(
add_search_rules(&mut query.filter, search_rules); add_search_rules(&mut query.filter, search_rules);
} }
let mut aggregate = SearchAggregator::<SearchPOST>::from_query(&query, &req); let mut aggregate = SearchAggregator::<SearchPOST>::from_query(&query);
let index = index_scheduler.index(&index_uid)?; let index = index_scheduler.index(&index_uid)?;