mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-04 04:17:10 +02:00
rename Succeded to Succeeded
This commit is contained in:
parent
ab39df9693
commit
446f1f31e0
10 changed files with 111 additions and 17 deletions
|
@ -4,9 +4,10 @@ use serde::{Deserialize, Serialize};
|
|||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::v4::{Task, TaskEvent};
|
||||
use crate::index::{Settings, Unchecked};
|
||||
use crate::index_resolver::IndexUid;
|
||||
use crate::tasks::task::{DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult};
|
||||
use crate::tasks::task::{DocumentDeletion, TaskContent, TaskId, TaskResult};
|
||||
|
||||
use super::v2;
|
||||
|
||||
|
@ -187,7 +188,7 @@ impl From<(UpdateStatus, String, TaskId)> for Task {
|
|||
// Dummy task
|
||||
let mut task = Task {
|
||||
id: task_id,
|
||||
index_uid: Some(IndexUid::new(uid).unwrap()),
|
||||
index_uid: IndexUid::new(uid).unwrap(),
|
||||
content: TaskContent::IndexDeletion,
|
||||
events: Vec::new(),
|
||||
};
|
||||
|
|
|
@ -1 +1,67 @@
|
|||
use meilisearch_error::ResponseError;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::tasks::batch::BatchId;
|
||||
use crate::tasks::task::{TaskContent, TaskEvent as NewTaskEvent, TaskId, TaskResult};
|
||||
use crate::IndexUid;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Task {
|
||||
pub id: TaskId,
|
||||
pub index_uid: IndexUid,
|
||||
pub content: TaskContent,
|
||||
pub events: Vec<TaskEvent>,
|
||||
}
|
||||
|
||||
impl From<Task> for crate::tasks::task::Task {
|
||||
fn from(other: Task) -> Self {
|
||||
Self {
|
||||
id: other.id,
|
||||
index_uid: Some(other.index_uid),
|
||||
content: other.content,
|
||||
events: other.events.into_iter().map(Into::into).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum TaskEvent {
|
||||
Created(#[serde(with = "time::serde::rfc3339")] OffsetDateTime),
|
||||
Batched {
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
timestamp: OffsetDateTime,
|
||||
batch_id: BatchId,
|
||||
},
|
||||
Processing(#[serde(with = "time::serde::rfc3339")] OffsetDateTime),
|
||||
Succeded {
|
||||
result: TaskResult,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
timestamp: OffsetDateTime,
|
||||
},
|
||||
Failed {
|
||||
error: ResponseError,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
timestamp: OffsetDateTime,
|
||||
},
|
||||
}
|
||||
|
||||
impl From<TaskEvent> for NewTaskEvent {
|
||||
fn from(other: TaskEvent) -> Self {
|
||||
match other {
|
||||
TaskEvent::Created(x) => NewTaskEvent::Created(x),
|
||||
TaskEvent::Batched {
|
||||
timestamp,
|
||||
batch_id,
|
||||
} => NewTaskEvent::Batched {
|
||||
timestamp,
|
||||
batch_id,
|
||||
},
|
||||
TaskEvent::Processing(x) => NewTaskEvent::Processing(x),
|
||||
TaskEvent::Succeded { result, timestamp } => {
|
||||
NewTaskEvent::Succeeded { result, timestamp }
|
||||
}
|
||||
TaskEvent::Failed { error, timestamp } => NewTaskEvent::Failed { error, timestamp },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,11 +9,11 @@ use log::info;
|
|||
use tempfile::tempdir;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::dump::compat::v3;
|
||||
use crate::dump::compat::{self, v3};
|
||||
use crate::dump::Metadata;
|
||||
use crate::index_resolver::meta_store::{DumpEntry, IndexMeta};
|
||||
use crate::options::IndexerOpts;
|
||||
use crate::tasks::task::{Task, TaskId};
|
||||
use crate::tasks::task::TaskId;
|
||||
|
||||
/// dump structure for V3:
|
||||
/// .
|
||||
|
@ -124,7 +124,7 @@ fn patch_updates(
|
|||
.clone();
|
||||
serde_json::to_writer(
|
||||
&mut dst_file,
|
||||
&Task::from((entry.update, name, task_id as TaskId)),
|
||||
&compat::v4::Task::from((entry.update, name, task_id as TaskId)),
|
||||
)?;
|
||||
dst_file.write_all(b"\n")?;
|
||||
Ok(())
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
use std::fs;
|
||||
use std::fs::{self, create_dir_all, File};
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
|
||||
use fs_extra::dir::{self, CopyOptions};
|
||||
use log::info;
|
||||
use tempfile::tempdir;
|
||||
|
||||
use crate::dump::Metadata;
|
||||
use crate::dump::{compat, Metadata};
|
||||
use crate::options::IndexerOpts;
|
||||
use crate::tasks::task::Task;
|
||||
|
||||
pub fn load_dump(
|
||||
meta: Metadata,
|
||||
|
@ -38,7 +40,7 @@ pub fn load_dump(
|
|||
)?;
|
||||
|
||||
// Updates
|
||||
dir::copy(src.as_ref().join("updates"), patched_dir.path(), &options)?;
|
||||
patch_updates(&src, &patched_dir)?;
|
||||
|
||||
// Keys
|
||||
if src.as_ref().join("keys").exists() {
|
||||
|
@ -54,3 +56,26 @@ pub fn load_dump(
|
|||
indexing_options,
|
||||
)
|
||||
}
|
||||
|
||||
fn patch_updates(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> anyhow::Result<()> {
|
||||
let updates_path = src.as_ref().join("updates/data.jsonl");
|
||||
let output_updates_path = dst.as_ref().join("updates/data.jsonl");
|
||||
create_dir_all(output_updates_path.parent().unwrap())?;
|
||||
let udpates_file = File::open(updates_path)?;
|
||||
let mut output_update_file = File::create(output_updates_path)?;
|
||||
|
||||
serde_json::Deserializer::from_reader(udpates_file)
|
||||
.into_iter::<compat::v4::Task>()
|
||||
.try_for_each(|task| -> anyhow::Result<()> {
|
||||
let task: Task = task?.into();
|
||||
|
||||
serde_json::to_writer(&mut output_update_file, &task)?;
|
||||
output_update_file.write_all(b"\n")?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
output_update_file.flush()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue