Implement backups

* trigger backup importation via http route
* follow backup advancement with status route
* import backup via a command line
* let user choose batch size of documents to import (command lines)

closes #884
closes #840
This commit is contained in:
many 2020-07-28 14:41:49 +02:00
parent efe5984d54
commit c254320860
No known key found for this signature in database
GPG key ID: 2CEF23B75189EACA
26 changed files with 1313 additions and 167 deletions

View file

@ -1,14 +1,16 @@
use actix_web::{web, HttpResponse};
use actix_web::{delete, get, post, put};
use actix_web::{web, HttpResponse};
use chrono::{DateTime, Utc};
use log::error;
use meilisearch_core::{Database, MainReader, UpdateReader};
use meilisearch_core::update::UpdateStatus;
use rand::seq::SliceRandom;
use serde::{Deserialize, Serialize};
use crate::Data;
use crate::error::{Error, ResponseError};
use crate::helpers::Authentication;
use crate::routes::IndexParam;
use crate::Data;
pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(list_indexes)
@ -29,19 +31,17 @@ fn generate_uid() -> String {
.collect()
}
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
struct IndexResponse {
name: String,
uid: String,
pub struct IndexResponse {
pub name: String,
pub uid: String,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
primary_key: Option<String>,
pub primary_key: Option<String>,
}
#[get("/indexes", wrap = "Authentication::Private")]
async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
let reader = data.db.main_read_txn()?;
pub fn list_indexes_sync(data: &web::Data<Data>, reader: &MainReader) -> Result<Vec<IndexResponse>, ResponseError> {
let mut indexes = Vec::new();
for index_uid in data.db.indexes_uids() {
@ -49,23 +49,23 @@ async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseErr
match index {
Some(index) => {
let name = index.main.name(&reader)?.ok_or(Error::internal(
let name = index.main.name(reader)?.ok_or(Error::internal(
"Impossible to get the name of an index",
))?;
let created_at = index
.main
.created_at(&reader)?
.created_at(reader)?
.ok_or(Error::internal(
"Impossible to get the create date of an index",
))?;
let updated_at = index
.main
.updated_at(&reader)?
.updated_at(reader)?
.ok_or(Error::internal(
"Impossible to get the last update date of an index",
))?;
let primary_key = match index.main.schema(&reader) {
let primary_key = match index.main.schema(reader) {
Ok(Some(schema)) => match schema.primary_key() {
Some(primary_key) => Some(primary_key.to_owned()),
None => None,
@ -89,6 +89,14 @@ async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseErr
}
}
Ok(indexes)
}
#[get("/indexes", wrap = "Authentication::Private")]
async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
let reader = data.db.main_read_txn()?;
let indexes = list_indexes_sync(&data, &reader)?;
Ok(HttpResponse::Ok().json(indexes))
}
@ -145,6 +153,55 @@ struct IndexCreateRequest {
primary_key: Option<String>,
}
pub fn create_index_sync(
database: &std::sync::Arc<Database>,
uid: String,
name: String,
primary_key: Option<String>,
) -> Result<IndexResponse, Error> {
let created_index = database
.create_index(&uid)
.map_err(|e| match e {
meilisearch_core::Error::IndexAlreadyExists => Error::IndexAlreadyExists(uid.clone()),
_ => Error::create_index(e)
})?;
let index_response = database.main_write::<_, _, Error>(|mut write_txn| {
created_index.main.put_name(&mut write_txn, &name)?;
let created_at = created_index
.main
.created_at(&write_txn)?
.ok_or(Error::internal("Impossible to read created at"))?;
let updated_at = created_index
.main
.updated_at(&write_txn)?
.ok_or(Error::internal("Impossible to read updated at"))?;
if let Some(id) = primary_key.clone() {
if let Some(mut schema) = created_index.main.schema(&write_txn)? {
schema
.set_primary_key(&id)
.map_err(Error::bad_request)?;
created_index.main.put_schema(&mut write_txn, &schema)?;
}
}
let index_response = IndexResponse {
name,
uid,
created_at,
updated_at,
primary_key,
};
Ok(index_response)
})?;
Ok(index_response)
}
#[post("/indexes", wrap = "Authentication::Private")]
async fn create_index(
data: web::Data<Data>,
@ -175,45 +232,9 @@ async fn create_index(
},
};
let created_index = data
.db
.create_index(&uid)
.map_err(|e| match e {
meilisearch_core::Error::IndexAlreadyExists => e.into(),
_ => ResponseError::from(Error::create_index(e))
})?;
let name = body.name.as_ref().unwrap_or(&uid).to_string();
let index_response = data.db.main_write::<_, _, ResponseError>(|mut writer| {
let name = body.name.as_ref().unwrap_or(&uid);
created_index.main.put_name(&mut writer, name)?;
let created_at = created_index
.main
.created_at(&writer)?
.ok_or(Error::internal("Impossible to read created at"))?;
let updated_at = created_index
.main
.updated_at(&writer)?
.ok_or(Error::internal("Impossible to read updated at"))?;
if let Some(id) = body.primary_key.clone() {
if let Some(mut schema) = created_index.main.schema(&writer)? {
schema
.set_primary_key(&id)
.map_err(Error::bad_request)?;
created_index.main.put_schema(&mut writer, &schema)?;
}
}
let index_response = IndexResponse {
name: name.to_string(),
uid,
created_at,
updated_at,
primary_key: body.primary_key.clone(),
};
Ok(index_response)
})?;
let index_response = create_index_sync(&data.db, uid, name, body.primary_key.clone())?;
Ok(HttpResponse::Created().json(index_response))
}
@ -340,20 +361,28 @@ async fn get_update_status(
)).into()),
}
}
pub fn get_all_updates_status_sync(
data: &web::Data<Data>,
reader: &UpdateReader,
index_uid: &str,
) -> Result<Vec<UpdateStatus>, Error> {
let index = data
.db
.open_index(index_uid)
.ok_or(Error::index_not_found(index_uid))?;
Ok(index.all_updates_status(reader)?)
}
#[get("/indexes/{index_uid}/updates", wrap = "Authentication::Private")]
async fn get_all_updates_status(
data: web::Data<Data>,
path: web::Path<IndexParam>,
) -> Result<HttpResponse, ResponseError> {
let index = data
.db
.open_index(&path.index_uid)
.ok_or(Error::index_not_found(&path.index_uid))?;
let reader = data.db.update_read_txn()?;
let response = index.all_updates_status(&reader)?;
let response = get_all_updates_status_sync(&data, &reader, &path.index_uid)?;
Ok(HttpResponse::Ok().json(response))
}