mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-06-21 05:58:29 +02:00
Support task cancelation
This commit is contained in:
parent
ba84865dbc
commit
ed96556296
@ -15,6 +15,7 @@ use meilisearch_types::tasks::ExportIndexSettings;
|
|||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use ureq::{json, Agent};
|
use ureq::{json, Agent};
|
||||||
|
|
||||||
|
use super::MustStopProcessing;
|
||||||
use crate::processing::AtomicDocumentStep;
|
use crate::processing::AtomicDocumentStep;
|
||||||
use crate::{Error, IndexScheduler, Result};
|
use crate::{Error, IndexScheduler, Result};
|
||||||
|
|
||||||
@ -41,9 +42,8 @@ impl IndexScheduler {
|
|||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let agent: Agent = ureq::AgentBuilder::new().timeout(Duration::from_secs(5)).build();
|
let agent: Agent = ureq::AgentBuilder::new().timeout(Duration::from_secs(5)).build();
|
||||||
|
let must_stop_processing = self.scheduler.must_stop_processing.clone();
|
||||||
for (i, (uid, settings)) in indexes.iter().enumerate() {
|
for (i, (uid, settings)) in indexes.iter().enumerate() {
|
||||||
let must_stop_processing = self.scheduler.must_stop_processing.clone();
|
|
||||||
if must_stop_processing.get() {
|
if must_stop_processing.get() {
|
||||||
return Err(Error::AbortedTask);
|
return Err(Error::AbortedTask);
|
||||||
}
|
}
|
||||||
@ -59,9 +59,9 @@ impl IndexScheduler {
|
|||||||
let index_rtxn = index.read_txn()?;
|
let index_rtxn = index.read_txn()?;
|
||||||
|
|
||||||
// Send the primary key
|
// Send the primary key
|
||||||
let primary_key = index.primary_key(&index_rtxn).unwrap();
|
let primary_key = index.primary_key(&index_rtxn)?;
|
||||||
let url = format!("{base_url}/indexes");
|
let url = format!("{base_url}/indexes");
|
||||||
retry(|| {
|
retry(&must_stop_processing, || {
|
||||||
let mut request = agent.post(&url);
|
let mut request = agent.post(&url);
|
||||||
if let Some(api_key) = api_key {
|
if let Some(api_key) = api_key {
|
||||||
request = request.set("Authorization", &format!("Bearer {api_key}"));
|
request = request.set("Authorization", &format!("Bearer {api_key}"));
|
||||||
@ -79,7 +79,7 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
// Retry logic for sending settings
|
// Retry logic for sending settings
|
||||||
let url = format!("{base_url}/indexes/{uid}/settings");
|
let url = format!("{base_url}/indexes/{uid}/settings");
|
||||||
retry(|| {
|
retry(&must_stop_processing, || {
|
||||||
let mut request = agent.patch(&url);
|
let mut request = agent.patch(&url);
|
||||||
if let Some(api_key) = api_key {
|
if let Some(api_key) = api_key {
|
||||||
request = request.set("Authorization", &format!("Bearer {api_key}"));
|
request = request.set("Authorization", &format!("Bearer {api_key}"));
|
||||||
@ -115,6 +115,8 @@ impl IndexScheduler {
|
|||||||
progress.update_progress(progress_step);
|
progress.update_progress(progress_step);
|
||||||
|
|
||||||
let limit = 50 * 1024 * 1024; // 50 MiB
|
let limit = 50 * 1024 * 1024; // 50 MiB
|
||||||
|
let documents_url = format!("{base_url}/indexes/{uid}/documents");
|
||||||
|
|
||||||
let mut buffer = Vec::new();
|
let mut buffer = Vec::new();
|
||||||
let mut tmp_buffer = Vec::new();
|
let mut tmp_buffer = Vec::new();
|
||||||
for (i, docid) in universe.into_iter().enumerate() {
|
for (i, docid) in universe.into_iter().enumerate() {
|
||||||
@ -185,7 +187,14 @@ impl IndexScheduler {
|
|||||||
.map_err(|e| Error::from_milli(e.into(), Some(uid.to_string())))?;
|
.map_err(|e| Error::from_milli(e.into(), Some(uid.to_string())))?;
|
||||||
|
|
||||||
if buffer.len() + tmp_buffer.len() > limit {
|
if buffer.len() + tmp_buffer.len() > limit {
|
||||||
post_serialized_documents(&agent, base_url, uid, api_key, &buffer).unwrap();
|
retry(&must_stop_processing, || {
|
||||||
|
let mut request = agent.post(&documents_url);
|
||||||
|
request = request.set("Content-Type", "application/x-ndjson");
|
||||||
|
if let Some(api_key) = api_key {
|
||||||
|
request = request.set("Authorization", &(format!("Bearer {api_key}")));
|
||||||
|
}
|
||||||
|
request.send_bytes(&buffer).map_err(into_backoff_error)
|
||||||
|
})?;
|
||||||
buffer.clear();
|
buffer.clear();
|
||||||
}
|
}
|
||||||
buffer.extend_from_slice(&tmp_buffer);
|
buffer.extend_from_slice(&tmp_buffer);
|
||||||
@ -195,7 +204,14 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
post_serialized_documents(&agent, base_url, uid, api_key, &buffer).unwrap();
|
retry(&must_stop_processing, || {
|
||||||
|
let mut request = agent.post(&documents_url);
|
||||||
|
request = request.set("Content-Type", "application/x-ndjson");
|
||||||
|
if let Some(api_key) = api_key {
|
||||||
|
request = request.set("Authorization", &(format!("Bearer {api_key}")));
|
||||||
|
}
|
||||||
|
request.send_bytes(&buffer).map_err(into_backoff_error)
|
||||||
|
})?;
|
||||||
step.store(total_documents, atomic::Ordering::Relaxed);
|
step.store(total_documents, atomic::Ordering::Relaxed);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -203,10 +219,14 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn retry<F>(send_request: F) -> Result<ureq::Response>
|
fn retry<F>(must_stop_processing: &MustStopProcessing, send_request: F) -> Result<ureq::Response>
|
||||||
where
|
where
|
||||||
F: Fn() -> Result<ureq::Response, backoff::Error<ureq::Error>>,
|
F: Fn() -> Result<ureq::Response, backoff::Error<ureq::Error>>,
|
||||||
{
|
{
|
||||||
|
if must_stop_processing.get() {
|
||||||
|
return Err(Error::AbortedTask);
|
||||||
|
}
|
||||||
|
|
||||||
match backoff::retry(ExponentialBackoff::default(), || send_request()) {
|
match backoff::retry(ExponentialBackoff::default(), || send_request()) {
|
||||||
Ok(response) => Ok(response),
|
Ok(response) => Ok(response),
|
||||||
Err(backoff::Error::Permanent(e)) => Err(ureq_error_into_error(e)),
|
Err(backoff::Error::Permanent(e)) => Err(ureq_error_into_error(e)),
|
||||||
@ -214,24 +234,6 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn post_serialized_documents(
|
|
||||||
agent: &Agent,
|
|
||||||
base_url: &str,
|
|
||||||
uid: &str,
|
|
||||||
api_key: Option<&str>,
|
|
||||||
buffer: &[u8],
|
|
||||||
) -> Result<ureq::Response> {
|
|
||||||
let url = format!("{base_url}/indexes/{uid}/documents");
|
|
||||||
retry(|| {
|
|
||||||
let mut request = agent.post(&url);
|
|
||||||
request = request.set("Content-Type", "application/x-ndjson");
|
|
||||||
if let Some(api_key) = api_key {
|
|
||||||
request = request.set("Authorization", &(format!("Bearer {api_key}")));
|
|
||||||
}
|
|
||||||
request.send_bytes(buffer).map_err(into_backoff_error)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn into_backoff_error(err: ureq::Error) -> backoff::Error<ureq::Error> {
|
fn into_backoff_error(err: ureq::Error) -> backoff::Error<ureq::Error> {
|
||||||
match err {
|
match err {
|
||||||
// Those code status must trigger an automatic retry
|
// Those code status must trigger an automatic retry
|
||||||
|
Loading…
x
Reference in New Issue
Block a user