chore: get rid of chrono in favor of time

Chrono has been unmaintened for a few month now and there is a CVE on it.

make clippy happy

bump milli
This commit is contained in:
Irevoire 2022-02-14 15:32:41 +01:00
parent 216965e9d9
commit 05c8d81e65
No known key found for this signature in database
GPG key ID: 7A6A970C96104F1B
33 changed files with 369 additions and 226 deletions

View file

@ -3,9 +3,10 @@ use std::path::{Path, PathBuf};
use std::sync::Arc;
use async_stream::stream;
use chrono::Utc;
use futures::{lock::Mutex, stream::StreamExt};
use log::{error, trace};
use time::macros::format_description;
use time::OffsetDateTime;
use tokio::sync::{mpsc, oneshot, RwLock};
use super::error::{DumpActorError, Result};
@ -29,7 +30,9 @@ pub struct DumpActor {
/// Generate uid from creation date
fn generate_uid() -> String {
Utc::now().format("%Y%m%d-%H%M%S%3f").to_string()
OffsetDateTime::now_utc()
.format(format_description!("%Y%m%d-%H%M%S%3f"))
.unwrap()
}
impl DumpActor {

View file

@ -1,8 +1,8 @@
use anyhow::bail;
use chrono::{DateTime, Utc};
use meilisearch_error::Code;
use milli::update::IndexDocumentsMethod;
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use uuid::Uuid;
use crate::index::{Settings, Unchecked};
@ -51,7 +51,7 @@ pub enum UpdateMeta {
pub struct Enqueued {
pub update_id: u64,
pub meta: UpdateMeta,
pub enqueued_at: DateTime<Utc>,
pub enqueued_at: OffsetDateTime,
pub content: Option<Uuid>,
}
@ -59,7 +59,7 @@ pub struct Enqueued {
#[serde(rename_all = "camelCase")]
pub struct Processed {
pub success: UpdateResult,
pub processed_at: DateTime<Utc>,
pub processed_at: OffsetDateTime,
#[serde(flatten)]
pub from: Processing,
}
@ -69,7 +69,7 @@ pub struct Processed {
pub struct Processing {
#[serde(flatten)]
pub from: Enqueued,
pub started_processing_at: DateTime<Utc>,
pub started_processing_at: OffsetDateTime,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
@ -77,7 +77,7 @@ pub struct Processing {
pub struct Aborted {
#[serde(flatten)]
pub from: Enqueued,
pub aborted_at: DateTime<Utc>,
pub aborted_at: OffsetDateTime,
}
#[derive(Debug, Serialize, Deserialize)]
@ -86,7 +86,7 @@ pub struct Failed {
#[serde(flatten)]
pub from: Processing,
pub error: ResponseError,
pub failed_at: DateTime<Utc>,
pub failed_at: OffsetDateTime,
}
#[derive(Debug, Serialize, Deserialize)]

View file

@ -1,7 +1,7 @@
use chrono::{DateTime, Utc};
use meilisearch_error::{Code, ResponseError};
use milli::update::IndexDocumentsMethod;
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use uuid::Uuid;
use crate::index::{Settings, Unchecked};
@ -107,7 +107,7 @@ pub enum UpdateMeta {
pub struct Enqueued {
pub update_id: u64,
pub meta: Update,
pub enqueued_at: DateTime<Utc>,
pub enqueued_at: OffsetDateTime,
}
impl Enqueued {
@ -122,7 +122,7 @@ impl Enqueued {
#[serde(rename_all = "camelCase")]
pub struct Processed {
pub success: v2::UpdateResult,
pub processed_at: DateTime<Utc>,
pub processed_at: OffsetDateTime,
#[serde(flatten)]
pub from: Processing,
}
@ -144,7 +144,7 @@ impl Processed {
pub struct Processing {
#[serde(flatten)]
pub from: Enqueued,
pub started_processing_at: DateTime<Utc>,
pub started_processing_at: OffsetDateTime,
}
impl Processing {
@ -163,7 +163,7 @@ pub struct Failed {
pub from: Processing,
pub msg: String,
pub code: Code,
pub failed_at: DateTime<Utc>,
pub failed_at: OffsetDateTime,
}
impl Failed {

View file

@ -3,9 +3,9 @@ use std::path::{Path, PathBuf};
use std::sync::Arc;
use anyhow::bail;
use chrono::{DateTime, Utc};
use log::{info, trace};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
pub use actor::DumpActor;
pub use handle_impl::*;
@ -40,7 +40,7 @@ pub struct Metadata {
db_version: String,
index_db_size: usize,
update_db_size: usize,
dump_date: DateTime<Utc>,
dump_date: OffsetDateTime,
}
impl Metadata {
@ -49,7 +49,7 @@ impl Metadata {
db_version: env!("CARGO_PKG_VERSION").to_string(),
index_db_size,
update_db_size,
dump_date: Utc::now(),
dump_date: OffsetDateTime::now_utc(),
}
}
}
@ -144,7 +144,7 @@ impl MetadataVersion {
}
}
pub fn dump_date(&self) -> Option<&DateTime<Utc>> {
pub fn dump_date(&self) -> Option<&OffsetDateTime> {
match self {
MetadataVersion::V1(_) => None,
MetadataVersion::V2(meta) | MetadataVersion::V3(meta) | MetadataVersion::V4(meta) => {
@ -169,9 +169,13 @@ pub struct DumpInfo {
pub status: DumpStatus,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
started_at: DateTime<Utc>,
#[serde(skip_serializing_if = "Option::is_none")]
finished_at: Option<DateTime<Utc>>,
#[serde(with = "time::serde::rfc3339")]
started_at: OffsetDateTime,
#[serde(
skip_serializing_if = "Option::is_none",
with = "time::serde::rfc3339::option"
)]
finished_at: Option<OffsetDateTime>,
}
impl DumpInfo {
@ -180,19 +184,19 @@ impl DumpInfo {
uid,
status,
error: None,
started_at: Utc::now(),
started_at: OffsetDateTime::now_utc(),
finished_at: None,
}
}
pub fn with_error(&mut self, error: String) {
self.status = DumpStatus::Failed;
self.finished_at = Some(Utc::now());
self.finished_at = Some(OffsetDateTime::now_utc());
self.error = Some(error);
}
pub fn done(&mut self) {
self.finished_at = Some(Utc::now());
self.finished_at = Some(OffsetDateTime::now_utc());
self.status = DumpStatus::Done;
}

View file

@ -8,11 +8,11 @@ use std::time::Duration;
use actix_web::error::PayloadError;
use bytes::Bytes;
use chrono::{DateTime, Utc};
use futures::Stream;
use futures::StreamExt;
use milli::update::IndexDocumentsMethod;
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use tokio::sync::{mpsc, RwLock};
use tokio::task::spawn_blocking;
use tokio::time::sleep;
@ -107,7 +107,7 @@ impl fmt::Display for DocumentAdditionFormat {
#[serde(rename_all = "camelCase")]
pub struct Stats {
pub database_size: u64,
pub last_update: Option<DateTime<Utc>>,
pub last_update: Option<OffsetDateTime>,
pub indexes: BTreeMap<String, IndexStats>,
}
@ -579,7 +579,7 @@ where
}
pub async fn get_all_stats(&self, search_rules: &SearchRules) -> Result<Stats> {
let mut last_task: Option<DateTime<_>> = None;
let mut last_task: Option<OffsetDateTime> = None;
let mut indexes = BTreeMap::new();
let mut database_size = 0;
let processing_tasks = self.scheduler.read().await.get_processing_tasks().await?;