mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-04 12:27:13 +02:00
fix clippy
This commit is contained in:
parent
e35bc48ba6
commit
3283bb0454
31 changed files with 125 additions and 124 deletions
|
@ -87,21 +87,20 @@ pub fn create_app(
|
|||
.configure(|s| dashboard(s, enable_dashboard));
|
||||
#[cfg(feature = "metrics")]
|
||||
let app = app.configure(|s| configure_metrics_route(s, opt.enable_metrics_route));
|
||||
let app = app
|
||||
.wrap(
|
||||
Cors::default()
|
||||
.send_wildcard()
|
||||
.allow_any_header()
|
||||
.allow_any_origin()
|
||||
.allow_any_method()
|
||||
.max_age(86_400), // 24h
|
||||
)
|
||||
.wrap(middleware::Logger::default())
|
||||
.wrap(middleware::Compress::default())
|
||||
.wrap(middleware::NormalizePath::new(middleware::TrailingSlash::Trim));
|
||||
|
||||
#[cfg(feature = "metrics")]
|
||||
let app = app.wrap(Condition::new(opt.enable_metrics_route, route_metrics::RouteMetrics));
|
||||
app
|
||||
app.wrap(
|
||||
Cors::default()
|
||||
.send_wildcard()
|
||||
.allow_any_header()
|
||||
.allow_any_origin()
|
||||
.allow_any_method()
|
||||
.max_age(86_400), // 24h
|
||||
)
|
||||
.wrap(middleware::Logger::default())
|
||||
.wrap(middleware::Compress::default())
|
||||
.wrap(middleware::NormalizePath::new(middleware::TrailingSlash::Trim))
|
||||
}
|
||||
|
||||
// TODO: TAMO: Finish setting up things
|
||||
|
@ -148,7 +147,7 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(IndexScheduler, AuthContr
|
|||
Ok(()) => (index_scheduler, auth_controller),
|
||||
Err(e) => {
|
||||
std::fs::remove_dir_all(&opt.db_path)?;
|
||||
return Err(e.into());
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
} else if !empty_db && !opt.ignore_dump_if_db_exists {
|
||||
|
@ -164,7 +163,7 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(IndexScheduler, AuthContr
|
|||
Ok(()) => (index_scheduler, auth_controller),
|
||||
Err(e) => {
|
||||
std::fs::remove_dir_all(&opt.db_path)?;
|
||||
return Err(e.into());
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -232,7 +231,7 @@ fn import_dump(
|
|||
keys.push(key);
|
||||
}
|
||||
|
||||
let indexer_config = index_scheduler.indexer_config().clone();
|
||||
let indexer_config = index_scheduler.indexer_config();
|
||||
|
||||
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
|
||||
// try to process tasks while we're trying to import the indexes.
|
||||
|
|
|
@ -147,7 +147,7 @@ Anonymous telemetry:\t\"Enabled\""
|
|||
}
|
||||
|
||||
if let Some(instance_uid) = analytics.instance_uid() {
|
||||
eprintln!("Instance UID:\t\t\"{}\"", instance_uid.to_string());
|
||||
eprintln!("Instance UID:\t\t\"{}\"", instance_uid);
|
||||
}
|
||||
|
||||
eprintln!();
|
||||
|
|
|
@ -262,7 +262,7 @@ async fn document_addition(
|
|||
Err(index_scheduler::Error::FileStore(file_store::Error::IoError(e)))
|
||||
if e.kind() == ErrorKind::NotFound =>
|
||||
{
|
||||
()
|
||||
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!("Unknown error happened while deleting a malformed update file with uuid {uuid}: {e}");
|
||||
|
|
|
@ -45,7 +45,7 @@ macro_rules! make_setting_route {
|
|||
let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner();
|
||||
let task = KindWithContent::SettingsUpdate {
|
||||
index_uid,
|
||||
new_settings,
|
||||
new_settings: Box::new(new_settings),
|
||||
is_deletion: true,
|
||||
allow_index_creation,
|
||||
};
|
||||
|
@ -84,7 +84,7 @@ macro_rules! make_setting_route {
|
|||
let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner();
|
||||
let task = KindWithContent::SettingsUpdate {
|
||||
index_uid,
|
||||
new_settings,
|
||||
new_settings: Box::new(new_settings),
|
||||
is_deletion: false,
|
||||
allow_index_creation,
|
||||
};
|
||||
|
@ -443,7 +443,7 @@ pub async fn update_all(
|
|||
let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner();
|
||||
let task = KindWithContent::SettingsUpdate {
|
||||
index_uid,
|
||||
new_settings,
|
||||
new_settings: Box::new(new_settings),
|
||||
is_deletion: false,
|
||||
allow_index_creation,
|
||||
};
|
||||
|
@ -474,8 +474,8 @@ pub async fn delete_all(
|
|||
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner();
|
||||
let task = KindWithContent::SettingsUpdate {
|
||||
index_uid: index_uid,
|
||||
new_settings,
|
||||
index_uid,
|
||||
new_settings: Box::new(new_settings),
|
||||
is_deletion: true,
|
||||
allow_index_creation,
|
||||
};
|
||||
|
|
|
@ -275,7 +275,7 @@ pub fn create_all_stats(
|
|||
limit: Some(1),
|
||||
..Query::default()
|
||||
})?;
|
||||
let processing_index = processing_task.first().and_then(|task| task.index_uid().clone());
|
||||
let processing_index = processing_task.first().and_then(|task| task.index_uid());
|
||||
for (name, index) in index_scheduler.indexes()? {
|
||||
if !search_rules.is_index_authorized(&name) {
|
||||
continue;
|
||||
|
@ -286,7 +286,7 @@ pub fn create_all_stats(
|
|||
let rtxn = index.read_txn()?;
|
||||
let stats = IndexStats {
|
||||
number_of_documents: index.number_of_documents(&rtxn)?,
|
||||
is_indexing: processing_index.as_deref().map_or(false, |index_name| name == index_name),
|
||||
is_indexing: processing_index.map_or(false, |index_name| name == index_name),
|
||||
field_distribution: index.field_distribution(&rtxn)?,
|
||||
};
|
||||
|
||||
|
|
|
@ -113,14 +113,14 @@ pub struct DetailsView {
|
|||
pub dump_uid: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[serde(flatten)]
|
||||
pub settings: Option<Settings<Unchecked>>,
|
||||
pub settings: Option<Box<Settings<Unchecked>>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub indexes: Option<Vec<(String, String)>>,
|
||||
}
|
||||
|
||||
impl From<Details> for DetailsView {
|
||||
fn from(details: Details) -> Self {
|
||||
match details.clone() {
|
||||
match details {
|
||||
Details::DocumentAdditionOrUpdate { received_documents, indexed_documents } => {
|
||||
DetailsView {
|
||||
received_documents: Some(received_documents),
|
||||
|
@ -471,7 +471,7 @@ async fn get_task(
|
|||
filters.uid = Some(vec![task_id]);
|
||||
|
||||
if let Some(task) = index_scheduler.get_tasks(filters)?.first() {
|
||||
let task_view = TaskView::from_task(&task);
|
||||
let task_view = TaskView::from_task(task);
|
||||
Ok(HttpResponse::Ok().json(task_view))
|
||||
} else {
|
||||
Err(index_scheduler::Error::TaskNotFound(task_id).into())
|
||||
|
@ -494,7 +494,7 @@ fn filter_out_inaccessible_indexes_from_query<const ACTION: u8>(
|
|||
match indexes {
|
||||
Some(indexes) => {
|
||||
for name in indexes.iter() {
|
||||
if search_rules.is_index_authorized(&name) {
|
||||
if search_rules.is_index_authorized(name) {
|
||||
query = query.with_index(name.to_string());
|
||||
}
|
||||
}
|
||||
|
@ -543,7 +543,7 @@ pub(crate) mod date_deserializer {
|
|||
DeserializeDateOption::After => {
|
||||
let datetime = datetime
|
||||
.checked_add(Duration::days(1))
|
||||
.ok_or(serde::de::Error::custom("date overflow"))?;
|
||||
.ok_or_else(|| serde::de::Error::custom("date overflow"))?;
|
||||
Ok(datetime)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -552,7 +552,7 @@ fn parse_filter(facets: &Value) -> Result<Option<Filter>, MeilisearchHttpError>
|
|||
Ok(condition)
|
||||
}
|
||||
Value::Array(arr) => parse_filter_array(arr),
|
||||
v => Err(MeilisearchHttpError::InvalidExpression(&["Array"], v.clone()).into()),
|
||||
v => Err(MeilisearchHttpError::InvalidExpression(&["Array"], v.clone())),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -570,8 +570,7 @@ fn parse_filter_array(arr: &[Value]) -> Result<Option<Filter>, MeilisearchHttpEr
|
|||
return Err(MeilisearchHttpError::InvalidExpression(
|
||||
&["String"],
|
||||
v.clone(),
|
||||
)
|
||||
.into())
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -581,8 +580,7 @@ fn parse_filter_array(arr: &[Value]) -> Result<Option<Filter>, MeilisearchHttpEr
|
|||
return Err(MeilisearchHttpError::InvalidExpression(
|
||||
&["String", "[String]"],
|
||||
v.clone(),
|
||||
)
|
||||
.into())
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue