MeiliSearch/meilisearch-http/src/data.rs

173 lines
4.8 KiB
Rust
Raw Normal View History

2019-10-31 15:00:36 +01:00
use std::collections::HashMap;
use std::ops::Deref;
use std::sync::Arc;
use chrono::{DateTime, Utc};
use heed::types::{SerdeBincode, Str};
2019-11-20 17:28:46 +01:00
use log::error;
use meilisearch_core::{Database, DatabaseOptions, Error as MError, MResult, MainT, UpdateT};
2020-02-06 15:41:11 +01:00
use sha2::Digest;
2020-02-26 18:49:17 +01:00
use sysinfo::Pid;
2019-10-31 15:00:36 +01:00
2020-04-15 10:51:15 +02:00
use crate::index_update_callback;
2019-10-31 15:00:36 +01:00
use crate::option::Opt;
2019-11-20 17:28:46 +01:00
const LAST_UPDATE_KEY: &str = "last-update";
2019-10-31 15:00:36 +01:00
type SerdeDatetime = SerdeBincode<DateTime<Utc>>;
#[derive(Clone)]
pub struct Data {
inner: Arc<DataInner>,
}
impl Deref for Data {
type Target = DataInner;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
#[derive(Clone)]
pub struct DataInner {
pub db: Arc<Database>,
pub db_path: String,
2020-02-06 15:41:11 +01:00
pub api_keys: ApiKeys,
2019-10-31 15:00:36 +01:00
pub server_pid: Pid,
2020-05-14 17:52:10 +02:00
pub http_payload_size_limit: usize,
2019-10-31 15:00:36 +01:00
}
2020-04-24 15:00:52 +02:00
#[derive(Clone)]
2020-02-06 15:41:11 +01:00
pub struct ApiKeys {
pub public: Option<String>,
pub private: Option<String>,
pub master: Option<String>,
}
impl ApiKeys {
pub fn generate_missing_api_keys(&mut self) {
if let Some(master_key) = &self.master {
if self.private.is_none() {
let key = format!("{}-private", master_key);
let sha = sha2::Sha256::digest(key.as_bytes());
self.private = Some(format!("{:x}", sha));
}
if self.public.is_none() {
let key = format!("{}-public", master_key);
let sha = sha2::Sha256::digest(key.as_bytes());
self.public = Some(format!("{:x}", sha));
}
}
}
}
2019-10-31 15:00:36 +01:00
impl DataInner {
pub fn is_indexing(&self, reader: &heed::RoTxn<UpdateT>, index: &str) -> MResult<Option<bool>> {
2019-10-31 15:00:36 +01:00
match self.db.open_index(&index) {
Some(index) => index.current_update_id(&reader).map(|u| Some(u.is_some())),
None => Ok(None),
}
}
pub fn last_update(&self, reader: &heed::RoTxn<MainT>) -> MResult<Option<DateTime<Utc>>> {
2019-10-31 15:00:36 +01:00
match self
.db
.common_store()
.get::<_, Str, SerdeDatetime>(reader, LAST_UPDATE_KEY)?
2019-10-31 15:00:36 +01:00
{
Some(datetime) => Ok(Some(datetime)),
None => Ok(None),
}
}
pub fn set_last_update(&self, writer: &mut heed::RwTxn<MainT>) -> MResult<()> {
2019-10-31 15:00:36 +01:00
self.db
.common_store()
.put::<_, Str, SerdeDatetime>(writer, LAST_UPDATE_KEY, &Utc::now())
2019-10-31 15:00:36 +01:00
.map_err(Into::into)
}
pub fn compute_stats(&self, writer: &mut heed::RwTxn<MainT>, index_uid: &str) -> MResult<()> {
2019-11-19 16:15:49 +01:00
let index = match self.db.open_index(&index_uid) {
2019-10-31 15:00:36 +01:00
Some(index) => index,
None => {
2019-11-19 16:15:49 +01:00
error!("Impossible to retrieve index {}", index_uid);
2019-10-31 15:00:36 +01:00
return Ok(());
}
};
let schema = match index.main.schema(&writer)? {
Some(schema) => schema,
None => return Ok(()),
};
let all_documents_fields = index
.documents_fields_counts
.all_documents_fields_counts(&writer)?;
// count fields frequencies
let mut fields_distribution = HashMap::<_, usize>::new();
2019-10-31 15:00:36 +01:00
for result in all_documents_fields {
let (_, attr, _) = result?;
2020-01-22 14:29:39 +01:00
if let Some(field_id) = schema.indexed_pos_to_field_id(attr) {
*fields_distribution.entry(field_id).or_default() += 1;
2020-01-22 14:29:39 +01:00
}
2019-10-31 15:00:36 +01:00
}
// convert attributes to their names
let distribution: HashMap<_, _> = fields_distribution
2019-10-31 15:00:36 +01:00
.into_iter()
2020-02-26 18:49:17 +01:00
.filter_map(|(a, c)| schema.name(a).map(|name| (name.to_string(), c)))
2019-10-31 15:00:36 +01:00
.collect();
2019-11-20 11:24:08 +01:00
index
.main
.put_fields_distribution(writer, &distribution)
2019-11-20 11:24:08 +01:00
.map_err(MError::Zlmdb)
2019-10-31 15:00:36 +01:00
}
}
impl Data {
pub fn new(opt: Opt) -> Data {
let db_path = opt.db_path.clone();
2019-10-31 15:00:36 +01:00
let server_pid = sysinfo::get_current_pid().unwrap();
let db_opt = DatabaseOptions {
main_map_size: opt.main_map_size,
2020-05-07 12:28:41 +02:00
update_map_size: opt.update_map_size,
};
2020-05-14 17:52:10 +02:00
let http_payload_size_limit = opt.http_payload_size_limit;
let db = Arc::new(Database::open_or_create(opt.db_path, db_opt).unwrap());
2019-10-31 15:00:36 +01:00
2020-02-06 15:41:11 +01:00
let mut api_keys = ApiKeys {
2020-04-10 19:05:05 +02:00
master: opt.master_key,
2020-02-06 15:41:11 +01:00
private: None,
public: None,
};
api_keys.generate_missing_api_keys();
2019-10-31 15:00:36 +01:00
let inner_data = DataInner {
db: db.clone(),
db_path,
2020-02-06 15:41:11 +01:00
api_keys,
2019-10-31 15:00:36 +01:00
server_pid,
2020-05-14 17:52:10 +02:00
http_payload_size_limit,
2019-10-31 15:00:36 +01:00
};
let data = Data {
inner: Arc::new(inner_data),
};
let callback_context = data.clone();
2019-11-19 16:15:49 +01:00
db.set_update_callback(Box::new(move |index_uid, status| {
index_update_callback(&index_uid, &callback_context, status);
}));
2019-10-31 15:00:36 +01:00
data
}
}