mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-03 20:07:09 +02:00
Replace logging timer by spans
This commit is contained in:
parent
f4a6261dea
commit
25f64ce7df
5 changed files with 15 additions and 30 deletions
|
@ -15,7 +15,7 @@ pub struct BucketSortOutput {
|
|||
|
||||
// TODO: would probably be good to regroup some of these inside of a struct?
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
#[logging_timer::time]
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "search::bucket_sort")]
|
||||
pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
||||
ctx: &mut SearchContext<'ctx>,
|
||||
mut ranking_rules: Vec<BoxRankingRule<'ctx, Q>>,
|
||||
|
|
|
@ -191,7 +191,7 @@ fn resolve_maximally_reduced_query_graph(
|
|||
Ok(docids)
|
||||
}
|
||||
|
||||
#[logging_timer::time]
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "search")]
|
||||
fn resolve_universe(
|
||||
ctx: &mut SearchContext,
|
||||
initial_universe: &RoaringBitmap,
|
||||
|
@ -557,7 +557,7 @@ pub fn execute_vector_search(
|
|||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
#[logging_timer::time]
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "search")]
|
||||
pub fn execute_search(
|
||||
ctx: &mut SearchContext,
|
||||
query: Option<&str>,
|
||||
|
@ -577,6 +577,9 @@ pub fn execute_search(
|
|||
|
||||
let mut located_query_terms = None;
|
||||
let query_terms = if let Some(query) = query {
|
||||
let span = tracing::trace_span!(target: "search::tokens", "tokenizer_builder");
|
||||
let entered = span.enter();
|
||||
|
||||
// We make sure that the analyzer is aware of the stop words
|
||||
// this ensures that the query builder is able to properly remove them.
|
||||
let mut tokbuilder = TokenizerBuilder::new();
|
||||
|
@ -605,7 +608,12 @@ pub fn execute_search(
|
|||
}
|
||||
|
||||
let tokenizer = tokbuilder.build();
|
||||
drop(entered);
|
||||
|
||||
let span = tracing::trace_span!(target: "search::tokens", "tokenize");
|
||||
let entered = span.enter();
|
||||
let tokens = tokenizer.tokenize(query);
|
||||
drop(entered);
|
||||
|
||||
let query_terms = located_query_terms_from_tokens(ctx, tokens, words_limit)?;
|
||||
if query_terms.is_empty() {
|
||||
|
|
|
@ -9,7 +9,7 @@ use crate::search::new::query_term::{Lazy, Phrase, QueryTerm};
|
|||
use crate::{Result, SearchContext, MAX_WORD_LENGTH};
|
||||
|
||||
/// Convert the tokenised search query into a list of located query terms.
|
||||
#[logging_timer::time]
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "search::query")]
|
||||
pub fn located_query_terms_from_tokens(
|
||||
ctx: &mut SearchContext,
|
||||
query: NormalizedTokenIter,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue