mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-04 04:17:10 +02:00
fixes for review
This commit is contained in:
parent
40d7396d90
commit
dc9ca2ebc9
20 changed files with 66 additions and 64 deletions
|
@ -92,7 +92,13 @@ impl DataInner {
|
|||
// convert attributes to their names
|
||||
let frequency: HashMap<_, _> = fields_frequency
|
||||
.into_iter()
|
||||
.map(|(a, c)| (schema.name(a).unwrap().to_string(), c))
|
||||
.filter_map(|(a, c)| {
|
||||
if let Some(name) = schema.name(a) {
|
||||
return Some((name.to_string(), c));
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
index
|
||||
|
|
|
@ -2,10 +2,10 @@ use std::fmt::Display;
|
|||
|
||||
use http::status::StatusCode;
|
||||
use log::{error, warn};
|
||||
use meilisearch_core::{FstError, HeedError};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tide::IntoResponse;
|
||||
use tide::Response;
|
||||
use meilisearch_core::{HeedError, FstError};
|
||||
|
||||
use crate::helpers::meilisearch::Error as SearchError;
|
||||
|
||||
|
|
|
@ -206,7 +206,8 @@ impl<'a> SearchBuilder<'a> {
|
|||
query_builder.with_fetch_timeout(self.timeout);
|
||||
|
||||
let start = Instant::now();
|
||||
let docs = query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit));
|
||||
let docs =
|
||||
query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit));
|
||||
let time_ms = start.elapsed().as_millis() as usize;
|
||||
|
||||
let mut hits = Vec::with_capacity(self.limit);
|
||||
|
|
|
@ -115,7 +115,7 @@ pub fn load_routes(app: &mut tide::Server<Data>) {
|
|||
.delete(|ctx| into_response(stop_words::delete(ctx)));
|
||||
|
||||
app.at("/indexes/:index/stats")
|
||||
.get(|ctx| into_response(stats::index_stat(ctx)));
|
||||
.get(|ctx| into_response(stats::index_stats(ctx)));
|
||||
|
||||
app.at("/keys/")
|
||||
.get(|ctx| into_response(key::list(ctx)))
|
||||
|
|
|
@ -64,7 +64,7 @@ pub async fn search_with_url_query(ctx: Request<Data>) -> SResult<Response> {
|
|||
let attributes_to_crop = schema
|
||||
.displayed_name()
|
||||
.iter()
|
||||
.map(|attr| ((*attr).to_string(), crop_length))
|
||||
.map(|attr| (attr.to_string(), crop_length))
|
||||
.collect();
|
||||
search_builder.attributes_to_crop(attributes_to_crop);
|
||||
} else {
|
||||
|
@ -81,7 +81,7 @@ pub async fn search_with_url_query(ctx: Request<Data>) -> SResult<Response> {
|
|||
schema
|
||||
.displayed_name()
|
||||
.iter()
|
||||
.map(|s| (*s).to_string())
|
||||
.map(|s| s.to_string())
|
||||
.collect()
|
||||
} else {
|
||||
attributes_to_highlight
|
||||
|
|
|
@ -21,7 +21,7 @@ struct IndexStatsResponse {
|
|||
fields_frequency: HashMap<String, usize>,
|
||||
}
|
||||
|
||||
pub async fn index_stat(ctx: Request<Data>) -> SResult<Response> {
|
||||
pub async fn index_stats(ctx: Request<Data>) -> SResult<Response> {
|
||||
ctx.is_allowed(Admin)?;
|
||||
let index_uid = ctx.url_param("index")?;
|
||||
let index = ctx.index()?;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue