2020-06-24 16:06:04 +02:00
|
|
|
use std::error::Error;
|
2019-10-31 15:00:36 +01:00
|
|
|
use std::ops::Deref;
|
2020-07-28 14:41:49 +02:00
|
|
|
use std::path::PathBuf;
|
2020-12-15 13:05:01 +01:00
|
|
|
use std::sync::{Arc, Mutex};
|
2019-10-31 15:00:36 +01:00
|
|
|
|
2020-09-08 19:16:17 +02:00
|
|
|
use meilisearch_core::{Database, DatabaseOptions, Index};
|
2020-02-06 15:41:11 +01:00
|
|
|
use sha2::Digest;
|
2019-10-31 15:00:36 +01:00
|
|
|
|
2020-09-08 19:16:17 +02:00
|
|
|
use crate::error::{Error as MSError, ResponseError};
|
2020-04-15 10:51:15 +02:00
|
|
|
use crate::index_update_callback;
|
2019-10-31 15:00:36 +01:00
|
|
|
use crate::option::Opt;
|
2020-12-15 13:05:01 +01:00
|
|
|
use crate::dump::DumpInfo;
|
2019-10-31 15:00:36 +01:00
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct Data {
|
|
|
|
inner: Arc<DataInner>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Deref for Data {
|
|
|
|
type Target = DataInner;
|
|
|
|
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
|
|
&self.inner
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct DataInner {
|
|
|
|
pub db: Arc<Database>,
|
|
|
|
pub db_path: String,
|
2020-10-13 11:17:02 +02:00
|
|
|
pub dumps_dir: PathBuf,
|
2020-09-29 12:18:09 +02:00
|
|
|
pub dump_batch_size: usize,
|
2020-02-06 15:41:11 +01:00
|
|
|
pub api_keys: ApiKeys,
|
2020-06-30 13:11:49 +02:00
|
|
|
pub server_pid: u32,
|
2020-05-14 17:52:10 +02:00
|
|
|
pub http_payload_size_limit: usize,
|
2020-12-15 13:05:01 +01:00
|
|
|
pub current_dump: Arc<Mutex<Option<DumpInfo>>>,
|
2019-10-31 15:00:36 +01:00
|
|
|
}
|
|
|
|
|
2020-04-24 15:00:52 +02:00
|
|
|
#[derive(Clone)]
|
2020-02-06 15:41:11 +01:00
|
|
|
pub struct ApiKeys {
|
|
|
|
pub public: Option<String>,
|
|
|
|
pub private: Option<String>,
|
|
|
|
pub master: Option<String>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ApiKeys {
|
|
|
|
pub fn generate_missing_api_keys(&mut self) {
|
|
|
|
if let Some(master_key) = &self.master {
|
|
|
|
if self.private.is_none() {
|
|
|
|
let key = format!("{}-private", master_key);
|
|
|
|
let sha = sha2::Sha256::digest(key.as_bytes());
|
|
|
|
self.private = Some(format!("{:x}", sha));
|
|
|
|
}
|
|
|
|
if self.public.is_none() {
|
|
|
|
let key = format!("{}-public", master_key);
|
|
|
|
let sha = sha2::Sha256::digest(key.as_bytes());
|
|
|
|
self.public = Some(format!("{:x}", sha));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-31 15:00:36 +01:00
|
|
|
impl Data {
|
2020-06-24 16:06:04 +02:00
|
|
|
pub fn new(opt: Opt) -> Result<Data, Box<dyn Error>> {
|
2019-11-19 17:09:06 +01:00
|
|
|
let db_path = opt.db_path.clone();
|
2020-10-13 11:17:02 +02:00
|
|
|
let dumps_dir = opt.dumps_dir.clone();
|
2020-09-29 12:18:09 +02:00
|
|
|
let dump_batch_size = opt.dump_batch_size;
|
2020-06-30 13:11:49 +02:00
|
|
|
let server_pid = std::process::id();
|
2019-10-31 15:00:36 +01:00
|
|
|
|
2020-04-29 00:40:06 +02:00
|
|
|
let db_opt = DatabaseOptions {
|
2020-08-24 14:14:11 +02:00
|
|
|
main_map_size: opt.max_mdb_size,
|
|
|
|
update_map_size: opt.max_udb_size,
|
2020-04-29 00:40:06 +02:00
|
|
|
};
|
|
|
|
|
2020-05-14 17:52:10 +02:00
|
|
|
let http_payload_size_limit = opt.http_payload_size_limit;
|
|
|
|
|
2020-06-24 16:06:04 +02:00
|
|
|
let db = Arc::new(Database::open_or_create(opt.db_path, db_opt)?);
|
2019-10-31 15:00:36 +01:00
|
|
|
|
2020-02-06 15:41:11 +01:00
|
|
|
let mut api_keys = ApiKeys {
|
2020-04-10 19:05:05 +02:00
|
|
|
master: opt.master_key,
|
2020-02-06 15:41:11 +01:00
|
|
|
private: None,
|
|
|
|
public: None,
|
|
|
|
};
|
|
|
|
|
|
|
|
api_keys.generate_missing_api_keys();
|
|
|
|
|
2020-12-15 13:05:01 +01:00
|
|
|
let current_dump = Arc::new(Mutex::new(None));
|
|
|
|
|
2019-10-31 15:00:36 +01:00
|
|
|
let inner_data = DataInner {
|
|
|
|
db: db.clone(),
|
|
|
|
db_path,
|
2020-10-13 11:17:02 +02:00
|
|
|
dumps_dir,
|
2020-09-29 12:18:09 +02:00
|
|
|
dump_batch_size,
|
2020-02-06 15:41:11 +01:00
|
|
|
api_keys,
|
2019-10-31 15:00:36 +01:00
|
|
|
server_pid,
|
2020-05-14 17:52:10 +02:00
|
|
|
http_payload_size_limit,
|
2020-12-15 13:05:01 +01:00
|
|
|
current_dump,
|
2019-10-31 15:00:36 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
let data = Data {
|
|
|
|
inner: Arc::new(inner_data),
|
|
|
|
};
|
|
|
|
|
2019-11-15 17:33:06 +01:00
|
|
|
let callback_context = data.clone();
|
2019-11-19 16:15:49 +01:00
|
|
|
db.set_update_callback(Box::new(move |index_uid, status| {
|
2020-04-08 14:13:45 +02:00
|
|
|
index_update_callback(&index_uid, &callback_context, status);
|
2019-11-15 17:33:06 +01:00
|
|
|
}));
|
2019-10-31 15:00:36 +01:00
|
|
|
|
2020-06-24 16:06:04 +02:00
|
|
|
Ok(data)
|
2019-10-31 15:00:36 +01:00
|
|
|
}
|
2020-09-08 19:16:17 +02:00
|
|
|
|
|
|
|
fn create_index(&self, uid: &str) -> Result<Index, ResponseError> {
|
|
|
|
if !uid
|
|
|
|
.chars()
|
|
|
|
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|
|
|
|
{
|
|
|
|
return Err(MSError::InvalidIndexUid.into());
|
|
|
|
}
|
|
|
|
|
|
|
|
let created_index = self.db.create_index(&uid).map_err(|e| match e {
|
|
|
|
meilisearch_core::Error::IndexAlreadyExists => e.into(),
|
|
|
|
_ => ResponseError::from(MSError::create_index(e)),
|
|
|
|
})?;
|
|
|
|
|
|
|
|
self.db.main_write::<_, _, ResponseError>(|mut writer| {
|
|
|
|
created_index.main.put_name(&mut writer, uid)?;
|
|
|
|
|
|
|
|
created_index
|
|
|
|
.main
|
|
|
|
.created_at(&writer)?
|
|
|
|
.ok_or(MSError::internal("Impossible to read created at"))?;
|
|
|
|
|
|
|
|
created_index
|
|
|
|
.main
|
|
|
|
.updated_at(&writer)?
|
|
|
|
.ok_or(MSError::internal("Impossible to read updated at"))?;
|
|
|
|
Ok(())
|
|
|
|
})?;
|
|
|
|
|
|
|
|
Ok(created_index)
|
|
|
|
}
|
|
|
|
|
2020-12-15 13:05:01 +01:00
|
|
|
pub fn get_current_dump_info(&self) -> Option<DumpInfo> {
|
|
|
|
self.current_dump.lock().unwrap().clone()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn set_current_dump_info(&self, dump_info: DumpInfo) {
|
|
|
|
self.current_dump.lock().unwrap().replace(dump_info);
|
|
|
|
}
|
|
|
|
|
2020-09-08 19:16:17 +02:00
|
|
|
pub fn get_or_create_index<F, R>(&self, uid: &str, f: F) -> Result<R, ResponseError>
|
|
|
|
where
|
|
|
|
F: FnOnce(&Index) -> Result<R, ResponseError>,
|
|
|
|
{
|
|
|
|
let mut index_has_been_created = false;
|
|
|
|
|
|
|
|
let index = match self.db.open_index(&uid) {
|
|
|
|
Some(index) => index,
|
|
|
|
None => {
|
|
|
|
index_has_been_created = true;
|
|
|
|
self.create_index(&uid)?
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
match f(&index) {
|
|
|
|
Ok(r) => Ok(r),
|
|
|
|
Err(err) => {
|
|
|
|
if index_has_been_created {
|
|
|
|
let _ = self.db.delete_index(&uid);
|
|
|
|
}
|
|
|
|
Err(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-10-31 15:00:36 +01:00
|
|
|
}
|