Count exported documents by index name, not pattern

This commit is contained in:
Mubelotix 2025-07-01 11:14:59 +02:00
parent 9cfbef478e
commit 259fc067d3
No known key found for this signature in database
GPG key ID: 0406DF6C3A69B942
2 changed files with 11 additions and 12 deletions

View file

@ -30,7 +30,7 @@ impl IndexScheduler {
payload_size: Option<&Byte>, payload_size: Option<&Byte>,
indexes: &BTreeMap<IndexUidPattern, ExportIndexSettings>, indexes: &BTreeMap<IndexUidPattern, ExportIndexSettings>,
progress: Progress, progress: Progress,
) -> Result<BTreeMap<IndexUidPattern, DetailsExportIndexSettings>> { ) -> Result<BTreeMap<String, DetailsExportIndexSettings>> {
#[cfg(test)] #[cfg(test)]
self.maybe_fail(crate::test_utils::FailureLocation::ProcessExport)?; self.maybe_fail(crate::test_utils::FailureLocation::ProcessExport)?;
@ -48,7 +48,7 @@ impl IndexScheduler {
let mut output = BTreeMap::new(); let mut output = BTreeMap::new();
let agent = ureq::AgentBuilder::new().timeout(Duration::from_secs(5)).build(); let agent = ureq::AgentBuilder::new().timeout(Duration::from_secs(5)).build();
let must_stop_processing = self.scheduler.must_stop_processing.clone(); let must_stop_processing = self.scheduler.must_stop_processing.clone();
for (i, (pattern, uid, export_settings)) in indexes.iter().enumerate() { for (i, (_pattern, uid, export_settings)) in indexes.iter().enumerate() {
if must_stop_processing.get() { if must_stop_processing.get() {
return Err(Error::AbortedTask); return Err(Error::AbortedTask);
} }
@ -63,9 +63,8 @@ impl IndexScheduler {
let index = self.index(uid)?; let index = self.index(uid)?;
let index_rtxn = index.read_txn()?; let index_rtxn = index.read_txn()?;
let url = format!("{base_url}/indexes/{uid}");
// First, check if the index already exists // First, check if the index already exists
let url = format!("{base_url}/indexes/{uid}");
let response = retry(&must_stop_processing, || { let response = retry(&must_stop_processing, || {
let mut request = agent.get(&url); let mut request = agent.get(&url);
if let Some(api_key) = api_key { if let Some(api_key) = api_key {
@ -158,7 +157,7 @@ impl IndexScheduler {
progress.update_progress(progress_step); progress.update_progress(progress_step);
output.insert( output.insert(
(*pattern).clone(), uid.clone(),
DetailsExportIndexSettings { DetailsExportIndexSettings {
settings: (*export_settings).clone(), settings: (*export_settings).clone(),
matched_documents: Some(total_documents as u64), matched_documents: Some(total_documents as u64),

View file

@ -289,12 +289,12 @@ impl KindWithContent {
}), }),
KindWithContent::DumpCreation { .. } => Some(Details::Dump { dump_uid: None }), KindWithContent::DumpCreation { .. } => Some(Details::Dump { dump_uid: None }),
KindWithContent::SnapshotCreation => None, KindWithContent::SnapshotCreation => None,
KindWithContent::Export { url, api_key, payload_size, indexes } => { KindWithContent::Export { url, api_key, payload_size, indexes: _ } => {
Some(Details::Export { Some(Details::Export {
url: url.clone(), url: url.clone(),
api_key: api_key.clone(), api_key: api_key.clone(),
payload_size: *payload_size, payload_size: *payload_size,
indexes: indexes.iter().map(|(p, s)| (p.clone(), s.clone().into())).collect(), indexes: BTreeMap::new(),
}) })
} }
KindWithContent::UpgradeDatabase { from } => Some(Details::UpgradeDatabase { KindWithContent::UpgradeDatabase { from } => Some(Details::UpgradeDatabase {
@ -363,12 +363,12 @@ impl KindWithContent {
}), }),
KindWithContent::DumpCreation { .. } => Some(Details::Dump { dump_uid: None }), KindWithContent::DumpCreation { .. } => Some(Details::Dump { dump_uid: None }),
KindWithContent::SnapshotCreation => None, KindWithContent::SnapshotCreation => None,
KindWithContent::Export { url, api_key, payload_size, indexes } => { KindWithContent::Export { url, api_key, payload_size, indexes: _ } => {
Some(Details::Export { Some(Details::Export {
url: url.clone(), url: url.clone(),
api_key: api_key.clone(), api_key: api_key.clone(),
payload_size: *payload_size, payload_size: *payload_size,
indexes: indexes.iter().map(|(p, s)| (p.clone(), s.clone().into())).collect(), indexes: BTreeMap::new(),
}) })
} }
KindWithContent::UpgradeDatabase { from } => Some(Details::UpgradeDatabase { KindWithContent::UpgradeDatabase { from } => Some(Details::UpgradeDatabase {
@ -419,12 +419,12 @@ impl From<&KindWithContent> for Option<Details> {
}), }),
KindWithContent::DumpCreation { .. } => Some(Details::Dump { dump_uid: None }), KindWithContent::DumpCreation { .. } => Some(Details::Dump { dump_uid: None }),
KindWithContent::SnapshotCreation => None, KindWithContent::SnapshotCreation => None,
KindWithContent::Export { url, api_key, payload_size, indexes } => { KindWithContent::Export { url, api_key, payload_size, indexes: _ } => {
Some(Details::Export { Some(Details::Export {
url: url.clone(), url: url.clone(),
api_key: api_key.clone(), api_key: api_key.clone(),
payload_size: *payload_size, payload_size: *payload_size,
indexes: indexes.iter().map(|(p, s)| (p.clone(), s.clone().into())).collect(), indexes: BTreeMap::new(),
}) })
} }
KindWithContent::UpgradeDatabase { from } => Some(Details::UpgradeDatabase { KindWithContent::UpgradeDatabase { from } => Some(Details::UpgradeDatabase {
@ -696,7 +696,7 @@ pub enum Details {
url: String, url: String,
api_key: Option<String>, api_key: Option<String>,
payload_size: Option<Byte>, payload_size: Option<Byte>,
indexes: BTreeMap<IndexUidPattern, DetailsExportIndexSettings>, indexes: BTreeMap<String, DetailsExportIndexSettings>,
}, },
UpgradeDatabase { UpgradeDatabase {
from: (u32, u32, u32), from: (u32, u32, u32),