mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-04 20:37:15 +02:00
Structures a bunch of logs
This commit is contained in:
parent
d71b77f18b
commit
08af0e690c
14 changed files with 77 additions and 67 deletions
|
@ -15,7 +15,7 @@ use std::io::BufReader;
|
|||
|
||||
use crossbeam_channel::Sender;
|
||||
use rayon::prelude::*;
|
||||
use tracing::debug;
|
||||
use tracing::{debug_span};
|
||||
|
||||
use self::extract_docid_word_positions::extract_docid_word_positions;
|
||||
use self::extract_facet_number_docids::extract_facet_number_docids;
|
||||
|
@ -114,7 +114,7 @@ pub(crate) fn data_from_obkv_documents(
|
|||
{
|
||||
let lmdb_writer_sx = lmdb_writer_sx.clone();
|
||||
rayon::spawn(move || {
|
||||
debug!("merge {} database", "facet-id-exists-docids");
|
||||
debug_span!("merge", database = "facet-id-exists-docids");
|
||||
match facet_exists_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) {
|
||||
Ok(reader) => {
|
||||
let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetExistsDocids(reader)));
|
||||
|
@ -130,7 +130,7 @@ pub(crate) fn data_from_obkv_documents(
|
|||
{
|
||||
let lmdb_writer_sx = lmdb_writer_sx.clone();
|
||||
rayon::spawn(move || {
|
||||
debug!("merge {} database", "facet-id-is-null-docids");
|
||||
debug_span!("merge", database = "facet-id-is-null-docids");
|
||||
match facet_is_null_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) {
|
||||
Ok(reader) => {
|
||||
let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsNullDocids(reader)));
|
||||
|
@ -146,7 +146,7 @@ pub(crate) fn data_from_obkv_documents(
|
|||
{
|
||||
let lmdb_writer_sx = lmdb_writer_sx.clone();
|
||||
rayon::spawn(move || {
|
||||
debug!("merge {} database", "facet-id-is-empty-docids");
|
||||
debug_span!("merge", database = "facet-id-is-empty-docids");
|
||||
match facet_is_empty_docids_chunks.merge(merge_deladd_cbo_roaring_bitmaps, &indexer) {
|
||||
Ok(reader) => {
|
||||
let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsEmptyDocids(reader)));
|
||||
|
@ -272,7 +272,7 @@ fn spawn_extraction_task<FE, FS, M>(
|
|||
Ok(chunks) => {
|
||||
let child_span = tracing::trace_span!(target: "indexing::details", parent: ¤t_span, "merge_multiple_chunks");
|
||||
let _entered = child_span.enter();
|
||||
debug!("merge {} database", name);
|
||||
debug_span!("merge", database = name);
|
||||
puffin::profile_scope!("merge_multiple_chunks", name);
|
||||
let reader = chunks.merge(merge_fn, &indexer);
|
||||
let _ = lmdb_writer_sx.send(reader.map(serialize_fn));
|
||||
|
|
|
@ -17,7 +17,7 @@ use rand::SeedableRng;
|
|||
use roaring::RoaringBitmap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use slice_group_by::GroupBy;
|
||||
use tracing::debug;
|
||||
use tracing::{debug_span};
|
||||
use typed_chunk::{write_typed_chunk_into_index, TypedChunk};
|
||||
|
||||
use self::enrich::enrich_documents_batch;
|
||||
|
@ -506,10 +506,7 @@ where
|
|||
documents_seen: documents_seen_count as usize,
|
||||
total_documents: documents_count,
|
||||
});
|
||||
debug!(
|
||||
"We have seen {} documents on {} total document so far",
|
||||
documents_seen_count, documents_count
|
||||
);
|
||||
debug_span!("Seen", documents = documents_seen_count, total = documents_count);
|
||||
}
|
||||
if is_merged_database {
|
||||
databases_seen += 1;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue