2021-07-27 16:01:06 +02:00
|
|
|
#![allow(rustdoc::private_intra_doc_links)]
|
2021-06-15 17:55:27 +02:00
|
|
|
#[macro_use]
|
2020-12-12 13:32:06 +01:00
|
|
|
pub mod error;
|
2021-12-02 16:03:26 +01:00
|
|
|
pub mod analytics;
|
2021-06-24 14:22:12 +02:00
|
|
|
#[macro_use]
|
2021-06-23 14:56:02 +02:00
|
|
|
pub mod extractors;
|
2021-03-15 18:11:10 +01:00
|
|
|
pub mod option;
|
|
|
|
pub mod routes;
|
2022-10-11 17:42:43 +02:00
|
|
|
pub mod search;
|
2021-12-02 16:03:26 +01:00
|
|
|
|
2022-08-29 12:36:54 +02:00
|
|
|
#[cfg(feature = "metrics")]
|
|
|
|
pub mod metrics;
|
|
|
|
#[cfg(feature = "metrics")]
|
|
|
|
pub mod route_metrics;
|
|
|
|
|
2022-10-16 01:39:01 +02:00
|
|
|
use std::{
|
|
|
|
fs::File,
|
2022-10-17 17:12:37 +02:00
|
|
|
io::{BufReader, BufWriter},
|
2022-10-16 01:39:01 +02:00
|
|
|
path::Path,
|
|
|
|
sync::{atomic::AtomicBool, Arc},
|
|
|
|
};
|
2021-09-28 18:10:09 +02:00
|
|
|
|
2021-10-06 11:49:34 +02:00
|
|
|
use crate::error::MeilisearchHttpError;
|
2021-10-05 13:30:53 +02:00
|
|
|
use actix_web::error::JsonPayloadError;
|
2022-09-27 16:33:37 +02:00
|
|
|
use actix_web::web::Data;
|
2021-10-12 14:32:44 +02:00
|
|
|
use analytics::Analytics;
|
2022-10-16 01:39:01 +02:00
|
|
|
use anyhow::bail;
|
2021-10-06 11:49:34 +02:00
|
|
|
use error::PayloadError;
|
2021-10-05 13:30:53 +02:00
|
|
|
use http::header::CONTENT_TYPE;
|
2022-10-16 01:39:01 +02:00
|
|
|
use meilisearch_types::{
|
|
|
|
milli::{
|
|
|
|
self,
|
|
|
|
documents::{DocumentsBatchBuilder, DocumentsBatchReader},
|
|
|
|
update::{IndexDocumentsConfig, IndexDocumentsMethod},
|
|
|
|
},
|
|
|
|
settings::apply_settings_to_builder,
|
|
|
|
};
|
2021-03-15 18:11:10 +01:00
|
|
|
pub use option::Opt;
|
2021-03-10 11:56:51 +01:00
|
|
|
|
2021-10-06 11:49:34 +02:00
|
|
|
use actix_web::{web, HttpRequest};
|
2021-06-23 14:56:02 +02:00
|
|
|
|
2021-06-24 16:25:52 +02:00
|
|
|
use extractors::payload::PayloadConfig;
|
2022-09-27 16:33:37 +02:00
|
|
|
use index_scheduler::IndexScheduler;
|
2021-11-08 18:31:27 +01:00
|
|
|
use meilisearch_auth::AuthController;
|
2021-09-20 15:31:03 +02:00
|
|
|
|
2022-01-19 11:21:19 +01:00
|
|
|
pub static AUTOBATCHING_ENABLED: AtomicBool = AtomicBool::new(false);
|
|
|
|
|
2022-10-16 01:39:01 +02:00
|
|
|
/// Check if a db is empty. It does not provide any information on the
|
|
|
|
/// validity of the data in it.
|
|
|
|
/// We consider a database as non empty when it's a non empty directory.
|
|
|
|
fn is_empty_db(db_path: impl AsRef<Path>) -> bool {
|
|
|
|
let db_path = db_path.as_ref();
|
|
|
|
|
|
|
|
if !db_path.exists() {
|
|
|
|
true
|
|
|
|
// if we encounter an error or if the db is a file we consider the db non empty
|
|
|
|
} else if let Ok(dir) = db_path.read_dir() {
|
|
|
|
dir.count() == 0
|
|
|
|
} else {
|
|
|
|
true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-27 16:33:37 +02:00
|
|
|
// TODO: TAMO: Finish setting up things
|
2022-10-16 01:39:01 +02:00
|
|
|
pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(IndexScheduler, AuthController)> {
|
|
|
|
// we don't want to create anything in the data.ms yet, thus we
|
|
|
|
// wrap our two builders in a closure that'll be executed later.
|
|
|
|
let auth_controller_builder = || AuthController::new(&opt.db_path, &opt.master_key);
|
|
|
|
let index_scheduler_builder = || {
|
|
|
|
IndexScheduler::new(
|
|
|
|
opt.db_path.join("tasks"),
|
|
|
|
opt.db_path.join("update_files"),
|
|
|
|
opt.db_path.join("indexes"),
|
|
|
|
opt.dumps_dir.clone(),
|
|
|
|
opt.max_index_size.get_bytes() as usize,
|
|
|
|
(&opt.indexer_options).try_into()?,
|
|
|
|
true,
|
|
|
|
#[cfg(test)]
|
|
|
|
todo!("We'll see later"),
|
|
|
|
)
|
|
|
|
};
|
2022-10-17 16:45:00 +02:00
|
|
|
let meilisearch_builder = || -> anyhow::Result<_> {
|
2022-10-16 03:04:17 +02:00
|
|
|
// if anything wrong happens we delete the `data.ms` entirely.
|
|
|
|
match (
|
|
|
|
index_scheduler_builder().map_err(anyhow::Error::from),
|
|
|
|
auth_controller_builder().map_err(anyhow::Error::from),
|
|
|
|
) {
|
|
|
|
(Ok(i), Ok(a)) => Ok((i, a)),
|
|
|
|
(Err(e), _) | (_, Err(e)) => {
|
|
|
|
std::fs::remove_dir_all(&opt.db_path)?;
|
|
|
|
Err(e)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2022-10-16 01:39:01 +02:00
|
|
|
|
|
|
|
let (index_scheduler, auth_controller) = if let Some(ref _path) = opt.import_snapshot {
|
|
|
|
// handle the snapshot with something akin to the dumps
|
|
|
|
// + the snapshot interval / spawning a thread
|
|
|
|
todo!();
|
|
|
|
} else if let Some(ref path) = opt.import_dump {
|
|
|
|
let empty_db = is_empty_db(&opt.db_path);
|
|
|
|
let src_path_exists = path.exists();
|
|
|
|
|
|
|
|
if empty_db && src_path_exists {
|
2022-10-17 16:45:00 +02:00
|
|
|
let (mut index_scheduler, mut auth_controller) = meilisearch_builder()?;
|
2022-10-16 01:39:01 +02:00
|
|
|
import_dump(
|
|
|
|
&opt.db_path,
|
|
|
|
path,
|
|
|
|
&mut index_scheduler,
|
|
|
|
&mut auth_controller,
|
|
|
|
)?;
|
|
|
|
(index_scheduler, auth_controller)
|
|
|
|
} else if !empty_db && !opt.ignore_dump_if_db_exists {
|
|
|
|
bail!(
|
|
|
|
"database already exists at {:?}, try to delete it or rename it",
|
|
|
|
opt.db_path
|
|
|
|
.canonicalize()
|
|
|
|
.unwrap_or_else(|_| opt.db_path.to_owned())
|
|
|
|
)
|
|
|
|
} else if !src_path_exists && !opt.ignore_missing_dump {
|
|
|
|
bail!("dump doesn't exist at {:?}", path)
|
|
|
|
} else {
|
2022-10-17 16:45:00 +02:00
|
|
|
let (mut index_scheduler, mut auth_controller) = meilisearch_builder()?;
|
2022-10-16 01:39:01 +02:00
|
|
|
import_dump(
|
|
|
|
&opt.db_path,
|
|
|
|
path,
|
|
|
|
&mut index_scheduler,
|
|
|
|
&mut auth_controller,
|
|
|
|
)?;
|
|
|
|
(index_scheduler, auth_controller)
|
|
|
|
}
|
|
|
|
} else {
|
2022-10-17 16:45:00 +02:00
|
|
|
meilisearch_builder()?
|
2022-10-16 01:39:01 +02:00
|
|
|
};
|
2022-09-27 16:33:37 +02:00
|
|
|
|
|
|
|
/*
|
|
|
|
TODO: We should start a thread to handle the snapshots.
|
2021-09-28 18:10:09 +02:00
|
|
|
meilisearch
|
2022-01-20 16:00:14 +01:00
|
|
|
// snapshot
|
2021-09-28 18:10:09 +02:00
|
|
|
.set_ignore_missing_snapshot(opt.ignore_missing_snapshot)
|
|
|
|
.set_ignore_snapshot_if_db_exists(opt.ignore_snapshot_if_db_exists)
|
|
|
|
.set_snapshot_interval(Duration::from_secs(opt.snapshot_interval_sec))
|
2022-01-20 16:00:14 +01:00
|
|
|
.set_snapshot_dir(opt.snapshot_dir.clone())
|
2021-09-28 18:10:09 +02:00
|
|
|
|
|
|
|
if let Some(ref path) = opt.import_snapshot {
|
|
|
|
meilisearch.set_import_snapshot(path.clone());
|
|
|
|
}
|
|
|
|
|
|
|
|
if opt.schedule_snapshot {
|
|
|
|
meilisearch.set_schedule_snapshot();
|
|
|
|
}
|
2022-09-27 16:33:37 +02:00
|
|
|
*/
|
2021-09-28 18:10:09 +02:00
|
|
|
|
2022-10-16 01:39:01 +02:00
|
|
|
Ok((index_scheduler, auth_controller))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn import_dump(
|
|
|
|
db_path: &Path,
|
|
|
|
dump_path: &Path,
|
|
|
|
index_scheduler: &mut IndexScheduler,
|
|
|
|
auth: &mut AuthController,
|
|
|
|
) -> Result<(), anyhow::Error> {
|
|
|
|
let reader = File::open(dump_path)?;
|
|
|
|
let mut dump_reader = dump::DumpReader::open(reader)?;
|
|
|
|
|
|
|
|
if let Some(date) = dump_reader.date() {
|
|
|
|
log::info!(
|
|
|
|
"Importing a dump of meilisearch `{:?}` from the {}",
|
|
|
|
dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
|
|
|
date
|
|
|
|
);
|
|
|
|
} else {
|
|
|
|
log::info!(
|
|
|
|
"Importing a dump of meilisearch `{:?}`",
|
|
|
|
dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
let instance_uid = dump_reader.instance_uid()?;
|
|
|
|
|
|
|
|
// 1. Import the instance-uid.
|
|
|
|
if let Some(ref instance_uid) = instance_uid {
|
|
|
|
// we don't want to panic if there is an error with the instance-uid.
|
|
|
|
let _ = std::fs::write(
|
|
|
|
db_path.join("instance-uid"),
|
|
|
|
instance_uid.to_string().as_bytes(),
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
|
|
|
// 2. Import the `Key`s.
|
|
|
|
let mut keys = Vec::new();
|
|
|
|
auth.raw_delete_all_keys()?;
|
2022-10-17 17:38:31 +02:00
|
|
|
for key in dump_reader.keys()? {
|
2022-10-16 01:39:01 +02:00
|
|
|
let key = key?;
|
|
|
|
auth.raw_insert_key(key.clone())?;
|
|
|
|
keys.push(key);
|
|
|
|
}
|
|
|
|
|
2022-10-17 13:11:12 +02:00
|
|
|
let indexer_config = index_scheduler.indexer_config().clone();
|
2022-10-16 01:39:01 +02:00
|
|
|
|
2022-10-17 13:11:12 +02:00
|
|
|
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
|
|
|
|
// try to process tasks while we're trying to import the indexes.
|
2022-10-16 01:39:01 +02:00
|
|
|
|
2022-10-17 13:11:12 +02:00
|
|
|
// 3. Import the indexes.
|
2022-10-16 01:39:01 +02:00
|
|
|
for index_reader in dump_reader.indexes()? {
|
|
|
|
let mut index_reader = index_reader?;
|
|
|
|
let metadata = index_reader.metadata();
|
|
|
|
log::info!("Importing index `{}`.", metadata.uid);
|
|
|
|
let index = index_scheduler.create_raw_index(&metadata.uid)?;
|
|
|
|
|
|
|
|
let mut wtxn = index.write_txn()?;
|
|
|
|
|
|
|
|
let mut builder = milli::update::Settings::new(&mut wtxn, &index, indexer_config);
|
2022-10-17 13:11:12 +02:00
|
|
|
// 3.1 Import the primary key if there is one.
|
2022-10-16 01:39:01 +02:00
|
|
|
if let Some(ref primary_key) = metadata.primary_key {
|
|
|
|
builder.set_primary_key(primary_key.to_string());
|
|
|
|
}
|
|
|
|
|
2022-10-17 13:11:12 +02:00
|
|
|
// 3.2 Import the settings.
|
2022-10-16 01:39:01 +02:00
|
|
|
log::info!("Importing the settings.");
|
|
|
|
let settings = index_reader.settings()?;
|
|
|
|
apply_settings_to_builder(&settings, &mut builder);
|
|
|
|
builder.execute(|indexing_step| {
|
|
|
|
log::debug!("update: {:?}", indexing_step);
|
|
|
|
})?;
|
|
|
|
|
2022-10-17 13:11:12 +02:00
|
|
|
// 3.3 Import the documents.
|
|
|
|
// 3.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
2022-10-16 01:39:01 +02:00
|
|
|
log::info!("Importing the documents.");
|
2022-10-17 17:12:37 +02:00
|
|
|
let file = tempfile::tempfile()?;
|
|
|
|
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
|
2022-10-16 01:39:01 +02:00
|
|
|
for document in index_reader.documents()? {
|
|
|
|
builder.append_json_object(&document?)?;
|
|
|
|
}
|
2022-10-17 17:12:37 +02:00
|
|
|
|
|
|
|
// This flush the content of the batch builder.
|
|
|
|
let file = builder.into_inner()?.into_inner()?;
|
2022-10-16 01:39:01 +02:00
|
|
|
|
2022-10-17 13:11:12 +02:00
|
|
|
// 3.3.2 We feed it to the milli index.
|
2022-10-16 01:39:01 +02:00
|
|
|
let reader = BufReader::new(file);
|
|
|
|
let reader = DocumentsBatchReader::from_reader(reader)?;
|
|
|
|
|
|
|
|
let builder = milli::update::IndexDocuments::new(
|
|
|
|
&mut wtxn,
|
|
|
|
&index,
|
|
|
|
indexer_config,
|
|
|
|
IndexDocumentsConfig {
|
|
|
|
update_method: IndexDocumentsMethod::ReplaceDocuments,
|
|
|
|
..Default::default()
|
|
|
|
},
|
|
|
|
|indexing_step| log::debug!("update: {:?}", indexing_step),
|
|
|
|
)?;
|
|
|
|
|
|
|
|
let (builder, user_result) = builder.add_documents(reader)?;
|
|
|
|
log::info!("{} documents found.", user_result?);
|
|
|
|
builder.execute()?;
|
|
|
|
wtxn.commit()?;
|
|
|
|
log::info!("All documents successfully imported.");
|
|
|
|
}
|
2022-10-17 13:11:12 +02:00
|
|
|
|
|
|
|
// 4. Import the tasks.
|
2022-10-17 17:38:31 +02:00
|
|
|
for ret in dump_reader.tasks()? {
|
2022-10-17 13:11:12 +02:00
|
|
|
let (task, file) = ret?;
|
2022-10-17 17:38:31 +02:00
|
|
|
index_scheduler.register_dumped_task(task, file)?;
|
2022-10-17 13:11:12 +02:00
|
|
|
}
|
2022-10-16 01:39:01 +02:00
|
|
|
Ok(())
|
2021-09-28 18:10:09 +02:00
|
|
|
}
|
|
|
|
|
2021-10-12 14:32:44 +02:00
|
|
|
pub fn configure_data(
|
|
|
|
config: &mut web::ServiceConfig,
|
2022-09-27 16:33:37 +02:00
|
|
|
index_scheduler: Data<IndexScheduler>,
|
2021-11-08 18:31:27 +01:00
|
|
|
auth: AuthController,
|
2021-10-12 14:32:44 +02:00
|
|
|
opt: &Opt,
|
2021-10-29 15:58:06 +02:00
|
|
|
analytics: Arc<dyn Analytics>,
|
2021-10-12 14:32:44 +02:00
|
|
|
) {
|
2021-09-20 15:31:03 +02:00
|
|
|
let http_payload_size_limit = opt.http_payload_size_limit.get_bytes() as usize;
|
2021-06-23 13:21:48 +02:00
|
|
|
config
|
2022-09-27 16:33:37 +02:00
|
|
|
.app_data(index_scheduler)
|
2021-11-08 18:31:27 +01:00
|
|
|
.app_data(auth)
|
2021-10-29 15:58:06 +02:00
|
|
|
.app_data(web::Data::from(analytics))
|
2021-06-23 13:21:48 +02:00
|
|
|
.app_data(
|
|
|
|
web::JsonConfig::default()
|
2021-10-05 13:30:53 +02:00
|
|
|
.content_type(|mime| mime == mime::APPLICATION_JSON)
|
2021-10-06 11:49:34 +02:00
|
|
|
.error_handler(|err, req: &HttpRequest| match err {
|
|
|
|
JsonPayloadError::ContentType => match req.headers().get(CONTENT_TYPE) {
|
|
|
|
Some(content_type) => MeilisearchHttpError::InvalidContentType(
|
|
|
|
content_type.to_str().unwrap_or("unknown").to_string(),
|
2021-10-05 13:30:53 +02:00
|
|
|
vec![mime::APPLICATION_JSON.to_string()],
|
2021-10-06 11:49:34 +02:00
|
|
|
)
|
|
|
|
.into(),
|
|
|
|
None => MeilisearchHttpError::MissingContentType(vec![
|
|
|
|
mime::APPLICATION_JSON.to_string(),
|
|
|
|
])
|
|
|
|
.into(),
|
|
|
|
},
|
|
|
|
err => PayloadError::from(err).into(),
|
2021-10-05 13:30:53 +02:00
|
|
|
}),
|
2021-06-23 13:21:48 +02:00
|
|
|
)
|
2021-06-23 13:58:22 +02:00
|
|
|
.app_data(PayloadConfig::new(http_payload_size_limit))
|
2021-06-23 13:21:48 +02:00
|
|
|
.app_data(
|
2021-10-06 11:49:34 +02:00
|
|
|
web::QueryConfig::default().error_handler(|err, _req| PayloadError::from(err).into()),
|
2021-06-23 13:21:48 +02:00
|
|
|
);
|
|
|
|
}
|
2021-03-10 11:56:51 +01:00
|
|
|
|
2021-06-23 13:21:48 +02:00
|
|
|
#[cfg(feature = "mini-dashboard")]
|
|
|
|
pub fn dashboard(config: &mut web::ServiceConfig, enable_frontend: bool) {
|
2021-06-23 13:55:16 +02:00
|
|
|
use actix_web::HttpResponse;
|
2022-01-21 21:44:17 +01:00
|
|
|
use static_files::Resource;
|
2021-04-21 13:49:21 +02:00
|
|
|
|
2021-06-23 14:48:33 +02:00
|
|
|
mod generated {
|
2021-06-23 13:21:48 +02:00
|
|
|
include!(concat!(env!("OUT_DIR"), "/generated.rs"));
|
|
|
|
}
|
2021-04-21 13:49:21 +02:00
|
|
|
|
2021-06-23 13:21:48 +02:00
|
|
|
if enable_frontend {
|
2021-06-23 14:48:33 +02:00
|
|
|
let generated = generated::generate();
|
2021-06-24 16:25:52 +02:00
|
|
|
// Generate routes for mini-dashboard assets
|
|
|
|
for (path, resource) in generated.into_iter() {
|
|
|
|
let Resource {
|
|
|
|
mime_type, data, ..
|
|
|
|
} = resource;
|
|
|
|
// Redirect index.html to /
|
|
|
|
if path == "index.html" {
|
2022-02-26 00:28:55 +01:00
|
|
|
config.service(web::resource("/").route(web::get().to(move || async move {
|
|
|
|
HttpResponse::Ok().content_type(mime_type).body(data)
|
|
|
|
})));
|
2021-06-24 16:25:52 +02:00
|
|
|
} else {
|
2022-02-26 00:28:55 +01:00
|
|
|
config.service(web::resource(path).route(web::get().to(move || async move {
|
|
|
|
HttpResponse::Ok().content_type(mime_type).body(data)
|
|
|
|
})));
|
2021-06-23 13:21:48 +02:00
|
|
|
}
|
2021-06-24 16:25:52 +02:00
|
|
|
}
|
2021-06-23 13:21:48 +02:00
|
|
|
} else {
|
2021-06-24 19:02:28 +02:00
|
|
|
config.service(web::resource("/").route(web::get().to(routes::running)));
|
2021-06-23 13:21:48 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(not(feature = "mini-dashboard"))]
|
|
|
|
pub fn dashboard(config: &mut web::ServiceConfig, _enable_frontend: bool) {
|
2021-06-24 19:02:28 +02:00
|
|
|
config.service(web::resource("/").route(web::get().to(routes::running)));
|
2021-06-23 13:21:48 +02:00
|
|
|
}
|
2021-04-21 13:49:21 +02:00
|
|
|
|
2022-08-29 12:36:54 +02:00
|
|
|
#[cfg(feature = "metrics")]
|
2022-08-17 13:44:55 +02:00
|
|
|
pub fn configure_metrics_route(config: &mut web::ServiceConfig, enable_metrics_route: bool) {
|
|
|
|
if enable_metrics_route {
|
2022-08-29 12:36:54 +02:00
|
|
|
config.service(
|
|
|
|
web::resource("/metrics").route(web::get().to(crate::route_metrics::get_metrics)),
|
|
|
|
);
|
2022-08-17 13:44:55 +02:00
|
|
|
}
|
|
|
|
}
|