mirror of
https://github.com/meilisearch/MeiliSearch
synced 2025-07-03 11:57:07 +02:00
chore: get rid of chrono in favor of time
Chrono has been unmaintened for a few month now and there is a CVE on it. make clippy happy bump milli
This commit is contained in:
parent
216965e9d9
commit
05c8d81e65
33 changed files with 369 additions and 226 deletions
|
@ -5,12 +5,12 @@ use std::ops::Deref;
|
|||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use heed::{EnvOpenOptions, RoTxn};
|
||||
use milli::update::{IndexerConfig, Setting};
|
||||
use milli::{obkv_to_json, FieldDistribution, FieldId};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{Map, Value};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::EnvSizer;
|
||||
|
@ -24,8 +24,10 @@ pub type Document = Map<String, Value>;
|
|||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct IndexMeta {
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub created_at: OffsetDateTime,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub updated_at: OffsetDateTime,
|
||||
pub primary_key: Option<String>,
|
||||
}
|
||||
|
||||
|
|
|
@ -3,9 +3,10 @@ use std::path::{Path, PathBuf};
|
|||
use std::sync::Arc;
|
||||
|
||||
use async_stream::stream;
|
||||
use chrono::Utc;
|
||||
use futures::{lock::Mutex, stream::StreamExt};
|
||||
use log::{error, trace};
|
||||
use time::macros::format_description;
|
||||
use time::OffsetDateTime;
|
||||
use tokio::sync::{mpsc, oneshot, RwLock};
|
||||
|
||||
use super::error::{DumpActorError, Result};
|
||||
|
@ -29,7 +30,9 @@ pub struct DumpActor {
|
|||
|
||||
/// Generate uid from creation date
|
||||
fn generate_uid() -> String {
|
||||
Utc::now().format("%Y%m%d-%H%M%S%3f").to_string()
|
||||
OffsetDateTime::now_utc()
|
||||
.format(format_description!("%Y%m%d-%H%M%S%3f"))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
impl DumpActor {
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
use anyhow::bail;
|
||||
use chrono::{DateTime, Utc};
|
||||
use meilisearch_error::Code;
|
||||
use milli::update::IndexDocumentsMethod;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::index::{Settings, Unchecked};
|
||||
|
@ -51,7 +51,7 @@ pub enum UpdateMeta {
|
|||
pub struct Enqueued {
|
||||
pub update_id: u64,
|
||||
pub meta: UpdateMeta,
|
||||
pub enqueued_at: DateTime<Utc>,
|
||||
pub enqueued_at: OffsetDateTime,
|
||||
pub content: Option<Uuid>,
|
||||
}
|
||||
|
||||
|
@ -59,7 +59,7 @@ pub struct Enqueued {
|
|||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Processed {
|
||||
pub success: UpdateResult,
|
||||
pub processed_at: DateTime<Utc>,
|
||||
pub processed_at: OffsetDateTime,
|
||||
#[serde(flatten)]
|
||||
pub from: Processing,
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ pub struct Processed {
|
|||
pub struct Processing {
|
||||
#[serde(flatten)]
|
||||
pub from: Enqueued,
|
||||
pub started_processing_at: DateTime<Utc>,
|
||||
pub started_processing_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
|
@ -77,7 +77,7 @@ pub struct Processing {
|
|||
pub struct Aborted {
|
||||
#[serde(flatten)]
|
||||
pub from: Enqueued,
|
||||
pub aborted_at: DateTime<Utc>,
|
||||
pub aborted_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
|
@ -86,7 +86,7 @@ pub struct Failed {
|
|||
#[serde(flatten)]
|
||||
pub from: Processing,
|
||||
pub error: ResponseError,
|
||||
pub failed_at: DateTime<Utc>,
|
||||
pub failed_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use chrono::{DateTime, Utc};
|
||||
use meilisearch_error::{Code, ResponseError};
|
||||
use milli::update::IndexDocumentsMethod;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::index::{Settings, Unchecked};
|
||||
|
@ -107,7 +107,7 @@ pub enum UpdateMeta {
|
|||
pub struct Enqueued {
|
||||
pub update_id: u64,
|
||||
pub meta: Update,
|
||||
pub enqueued_at: DateTime<Utc>,
|
||||
pub enqueued_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
impl Enqueued {
|
||||
|
@ -122,7 +122,7 @@ impl Enqueued {
|
|||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Processed {
|
||||
pub success: v2::UpdateResult,
|
||||
pub processed_at: DateTime<Utc>,
|
||||
pub processed_at: OffsetDateTime,
|
||||
#[serde(flatten)]
|
||||
pub from: Processing,
|
||||
}
|
||||
|
@ -144,7 +144,7 @@ impl Processed {
|
|||
pub struct Processing {
|
||||
#[serde(flatten)]
|
||||
pub from: Enqueued,
|
||||
pub started_processing_at: DateTime<Utc>,
|
||||
pub started_processing_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
impl Processing {
|
||||
|
@ -163,7 +163,7 @@ pub struct Failed {
|
|||
pub from: Processing,
|
||||
pub msg: String,
|
||||
pub code: Code,
|
||||
pub failed_at: DateTime<Utc>,
|
||||
pub failed_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
impl Failed {
|
||||
|
|
|
@ -3,9 +3,9 @@ use std::path::{Path, PathBuf};
|
|||
use std::sync::Arc;
|
||||
|
||||
use anyhow::bail;
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::{info, trace};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
pub use actor::DumpActor;
|
||||
pub use handle_impl::*;
|
||||
|
@ -40,7 +40,7 @@ pub struct Metadata {
|
|||
db_version: String,
|
||||
index_db_size: usize,
|
||||
update_db_size: usize,
|
||||
dump_date: DateTime<Utc>,
|
||||
dump_date: OffsetDateTime,
|
||||
}
|
||||
|
||||
impl Metadata {
|
||||
|
@ -49,7 +49,7 @@ impl Metadata {
|
|||
db_version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
index_db_size,
|
||||
update_db_size,
|
||||
dump_date: Utc::now(),
|
||||
dump_date: OffsetDateTime::now_utc(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -144,7 +144,7 @@ impl MetadataVersion {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn dump_date(&self) -> Option<&DateTime<Utc>> {
|
||||
pub fn dump_date(&self) -> Option<&OffsetDateTime> {
|
||||
match self {
|
||||
MetadataVersion::V1(_) => None,
|
||||
MetadataVersion::V2(meta) | MetadataVersion::V3(meta) | MetadataVersion::V4(meta) => {
|
||||
|
@ -169,9 +169,13 @@ pub struct DumpInfo {
|
|||
pub status: DumpStatus,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub error: Option<String>,
|
||||
started_at: DateTime<Utc>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
finished_at: Option<DateTime<Utc>>,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
started_at: OffsetDateTime,
|
||||
#[serde(
|
||||
skip_serializing_if = "Option::is_none",
|
||||
with = "time::serde::rfc3339::option"
|
||||
)]
|
||||
finished_at: Option<OffsetDateTime>,
|
||||
}
|
||||
|
||||
impl DumpInfo {
|
||||
|
@ -180,19 +184,19 @@ impl DumpInfo {
|
|||
uid,
|
||||
status,
|
||||
error: None,
|
||||
started_at: Utc::now(),
|
||||
started_at: OffsetDateTime::now_utc(),
|
||||
finished_at: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_error(&mut self, error: String) {
|
||||
self.status = DumpStatus::Failed;
|
||||
self.finished_at = Some(Utc::now());
|
||||
self.finished_at = Some(OffsetDateTime::now_utc());
|
||||
self.error = Some(error);
|
||||
}
|
||||
|
||||
pub fn done(&mut self) {
|
||||
self.finished_at = Some(Utc::now());
|
||||
self.finished_at = Some(OffsetDateTime::now_utc());
|
||||
self.status = DumpStatus::Done;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,11 +8,11 @@ use std::time::Duration;
|
|||
|
||||
use actix_web::error::PayloadError;
|
||||
use bytes::Bytes;
|
||||
use chrono::{DateTime, Utc};
|
||||
use futures::Stream;
|
||||
use futures::StreamExt;
|
||||
use milli::update::IndexDocumentsMethod;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
use tokio::sync::{mpsc, RwLock};
|
||||
use tokio::task::spawn_blocking;
|
||||
use tokio::time::sleep;
|
||||
|
@ -107,7 +107,7 @@ impl fmt::Display for DocumentAdditionFormat {
|
|||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Stats {
|
||||
pub database_size: u64,
|
||||
pub last_update: Option<DateTime<Utc>>,
|
||||
pub last_update: Option<OffsetDateTime>,
|
||||
pub indexes: BTreeMap<String, IndexStats>,
|
||||
}
|
||||
|
||||
|
@ -579,7 +579,7 @@ where
|
|||
}
|
||||
|
||||
pub async fn get_all_stats(&self, search_rules: &SearchRules) -> Result<Stats> {
|
||||
let mut last_task: Option<DateTime<_>> = None;
|
||||
let mut last_task: Option<OffsetDateTime> = None;
|
||||
let mut indexes = BTreeMap::new();
|
||||
let mut database_size = 0;
|
||||
let processing_tasks = self.scheduler.read().await.get_processing_tasks().await?;
|
||||
|
|
|
@ -6,7 +6,6 @@ use std::convert::{TryFrom, TryInto};
|
|||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use chrono::Utc;
|
||||
use error::{IndexResolverError, Result};
|
||||
use heed::Env;
|
||||
use index_store::{IndexStore, MapIndexStore};
|
||||
|
@ -14,6 +13,7 @@ use meilisearch_error::ResponseError;
|
|||
use meta_store::{HeedMetaStore, IndexMetaStore};
|
||||
use milli::update::{DocumentDeletionResult, IndexerConfig};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
use tokio::sync::oneshot;
|
||||
use tokio::task::spawn_blocking;
|
||||
use uuid::Uuid;
|
||||
|
@ -115,18 +115,19 @@ where
|
|||
self.process_document_addition_batch(batch).await
|
||||
} else {
|
||||
if let Some(task) = batch.tasks.first_mut() {
|
||||
task.events.push(TaskEvent::Processing(Utc::now()));
|
||||
task.events
|
||||
.push(TaskEvent::Processing(OffsetDateTime::now_utc()));
|
||||
|
||||
match self.process_task(task).await {
|
||||
Ok(success) => {
|
||||
task.events.push(TaskEvent::Succeded {
|
||||
result: success,
|
||||
timestamp: Utc::now(),
|
||||
timestamp: OffsetDateTime::now_utc(),
|
||||
});
|
||||
}
|
||||
Err(err) => task.events.push(TaskEvent::Failed {
|
||||
error: err.into(),
|
||||
timestamp: Utc::now(),
|
||||
timestamp: OffsetDateTime::now_utc(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
@ -225,7 +226,7 @@ where
|
|||
|
||||
// If the index doesn't exist and we are not allowed to create it with the first
|
||||
// task, we must fails the whole batch.
|
||||
let now = Utc::now();
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let index = match index {
|
||||
Ok(index) => index,
|
||||
Err(e) => {
|
||||
|
@ -253,17 +254,17 @@ where
|
|||
|
||||
let event = match result {
|
||||
Ok(Ok(result)) => TaskEvent::Succeded {
|
||||
timestamp: Utc::now(),
|
||||
timestamp: OffsetDateTime::now_utc(),
|
||||
result: TaskResult::DocumentAddition {
|
||||
indexed_documents: result.indexed_documents,
|
||||
},
|
||||
},
|
||||
Ok(Err(e)) => TaskEvent::Failed {
|
||||
timestamp: Utc::now(),
|
||||
timestamp: OffsetDateTime::now_utc(),
|
||||
error: e.into(),
|
||||
},
|
||||
Err(e) => TaskEvent::Failed {
|
||||
timestamp: Utc::now(),
|
||||
timestamp: OffsetDateTime::now_utc(),
|
||||
error: IndexResolverError::from(e).into(),
|
||||
},
|
||||
};
|
||||
|
@ -524,7 +525,7 @@ mod test {
|
|||
};
|
||||
if primary_key.is_some() {
|
||||
mocker.when::<String, IndexResult<IndexMeta>>("update_primary_key")
|
||||
.then(move |_| Ok(IndexMeta{ created_at: Utc::now(), updated_at: Utc::now(), primary_key: None }));
|
||||
.then(move |_| Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None }));
|
||||
}
|
||||
mocker.when::<(IndexDocumentsMethod, Option<String>, UpdateFileStore, IntoIter<Uuid>), IndexResult<DocumentAdditionResult>>("update_documents")
|
||||
.then(move |(_, _, _, _)| result());
|
||||
|
@ -569,7 +570,7 @@ mod test {
|
|||
| TaskContent::IndexCreation { primary_key } => {
|
||||
if primary_key.is_some() {
|
||||
let result = move || if !index_op_fails {
|
||||
Ok(IndexMeta{ created_at: Utc::now(), updated_at: Utc::now(), primary_key: None })
|
||||
Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None })
|
||||
} else {
|
||||
// return this error because it's easy to generate...
|
||||
Err(IndexError::DocumentNotFound("a doc".into()))
|
||||
|
@ -640,7 +641,7 @@ mod test {
|
|||
let update_file_store = UpdateFileStore::mock(mocker);
|
||||
let index_resolver = IndexResolver::new(uuid_store, index_store, update_file_store);
|
||||
|
||||
let batch = Batch { id: 1, created_at: Utc::now(), tasks: vec![task.clone()] };
|
||||
let batch = Batch { id: 1, created_at: OffsetDateTime::now_utc(), tasks: vec![task.clone()] };
|
||||
let result = index_resolver.process_batch(batch).await;
|
||||
|
||||
// Test for some expected output scenarios:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use chrono::{DateTime, Utc};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use super::task::Task;
|
||||
|
||||
|
@ -7,7 +7,7 @@ pub type BatchId = u64;
|
|||
#[derive(Debug)]
|
||||
pub struct Batch {
|
||||
pub id: BatchId,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub created_at: OffsetDateTime,
|
||||
pub tasks: Vec<Task>,
|
||||
}
|
||||
|
||||
|
|
|
@ -6,8 +6,8 @@ use std::sync::Arc;
|
|||
use std::time::Duration;
|
||||
|
||||
use atomic_refcell::AtomicRefCell;
|
||||
use chrono::Utc;
|
||||
use milli::update::IndexDocumentsMethod;
|
||||
use time::OffsetDateTime;
|
||||
use tokio::sync::{watch, RwLock};
|
||||
|
||||
use crate::options::SchedulerConfig;
|
||||
|
@ -357,7 +357,7 @@ impl Scheduler {
|
|||
tasks.iter_mut().for_each(|t| {
|
||||
t.events.push(TaskEvent::Batched {
|
||||
batch_id: id,
|
||||
timestamp: Utc::now(),
|
||||
timestamp: OffsetDateTime::now_utc(),
|
||||
})
|
||||
});
|
||||
|
||||
|
@ -365,7 +365,7 @@ impl Scheduler {
|
|||
|
||||
let batch = Batch {
|
||||
id,
|
||||
created_at: Utc::now(),
|
||||
created_at: OffsetDateTime::now_utc(),
|
||||
tasks,
|
||||
};
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use meilisearch_error::ResponseError;
|
||||
use milli::update::{DocumentAdditionResult, IndexDocumentsMethod};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
use tokio::sync::oneshot;
|
||||
use uuid::Uuid;
|
||||
|
||||
|
@ -36,22 +36,22 @@ impl From<DocumentAdditionResult> for TaskResult {
|
|||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
||||
pub enum TaskEvent {
|
||||
Created(#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] DateTime<Utc>),
|
||||
Created(#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] OffsetDateTime),
|
||||
Batched {
|
||||
#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))]
|
||||
timestamp: DateTime<Utc>,
|
||||
timestamp: OffsetDateTime,
|
||||
batch_id: BatchId,
|
||||
},
|
||||
Processing(#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] DateTime<Utc>),
|
||||
Processing(#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] OffsetDateTime),
|
||||
Succeded {
|
||||
result: TaskResult,
|
||||
#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))]
|
||||
timestamp: DateTime<Utc>,
|
||||
timestamp: OffsetDateTime,
|
||||
},
|
||||
Failed {
|
||||
error: ResponseError,
|
||||
#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))]
|
||||
timestamp: DateTime<Utc>,
|
||||
timestamp: OffsetDateTime,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -165,7 +165,7 @@ mod test {
|
|||
]
|
||||
}
|
||||
|
||||
pub(super) fn datetime_strategy() -> impl Strategy<Value = DateTime<Utc>> {
|
||||
Just(Utc::now())
|
||||
pub(super) fn datetime_strategy() -> impl Strategy<Value = OffsetDateTime> {
|
||||
Just(OffsetDateTime::now_utc())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,9 +5,9 @@ use std::io::{BufWriter, Write};
|
|||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use chrono::Utc;
|
||||
use heed::{Env, RwTxn};
|
||||
use log::debug;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use super::error::TaskError;
|
||||
use super::task::{Task, TaskContent, TaskId};
|
||||
|
@ -72,7 +72,7 @@ impl TaskStore {
|
|||
let task = tokio::task::spawn_blocking(move || -> Result<Task> {
|
||||
let mut txn = store.wtxn()?;
|
||||
let next_task_id = store.next_task_id(&mut txn)?;
|
||||
let created_at = TaskEvent::Created(Utc::now());
|
||||
let created_at = TaskEvent::Created(OffsetDateTime::now_utc());
|
||||
let task = Task {
|
||||
id: next_task_id,
|
||||
index_uid,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use chrono::Utc;
|
||||
use time::OffsetDateTime;
|
||||
use tokio::sync::{watch, RwLock};
|
||||
use tokio::time::interval_at;
|
||||
|
||||
|
@ -63,7 +63,8 @@ where
|
|||
match pending {
|
||||
Pending::Batch(mut batch) => {
|
||||
for task in &mut batch.tasks {
|
||||
task.events.push(TaskEvent::Processing(Utc::now()));
|
||||
task.events
|
||||
.push(TaskEvent::Processing(OffsetDateTime::now_utc()));
|
||||
}
|
||||
|
||||
batch.tasks = {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue