2022-01-12 15:35:33 +01:00
|
|
|
use meilisearch_auth::SearchRules;
|
2021-05-10 20:25:09 +02:00
|
|
|
use std::collections::BTreeMap;
|
2021-09-30 11:29:27 +02:00
|
|
|
use std::fmt;
|
2021-12-02 16:03:26 +01:00
|
|
|
use std::io::Cursor;
|
2021-09-14 18:39:02 +02:00
|
|
|
use std::path::{Path, PathBuf};
|
2022-06-09 01:56:01 +02:00
|
|
|
use std::str::FromStr;
|
2021-03-06 12:57:56 +01:00
|
|
|
use std::sync::Arc;
|
|
|
|
use std::time::Duration;
|
2021-03-04 12:03:06 +01:00
|
|
|
|
2021-09-14 18:39:02 +02:00
|
|
|
use actix_web::error::PayloadError;
|
|
|
|
use bytes::Bytes;
|
|
|
|
use futures::Stream;
|
2021-12-02 16:03:26 +01:00
|
|
|
use futures::StreamExt;
|
2021-09-14 18:39:02 +02:00
|
|
|
use milli::update::IndexDocumentsMethod;
|
2021-03-15 18:11:10 +01:00
|
|
|
use serde::{Deserialize, Serialize};
|
2022-02-14 15:32:41 +01:00
|
|
|
use time::OffsetDateTime;
|
2022-05-19 14:44:24 +02:00
|
|
|
use tokio::sync::RwLock;
|
2021-09-24 11:53:11 +02:00
|
|
|
use tokio::task::spawn_blocking;
|
2021-03-06 12:57:56 +01:00
|
|
|
use tokio::time::sleep;
|
2021-03-18 09:09:26 +01:00
|
|
|
use uuid::Uuid;
|
2021-01-16 15:09:48 +01:00
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
use crate::document_formats::{read_csv, read_json, read_ndjson};
|
2022-05-19 20:18:43 +02:00
|
|
|
use crate::dump::{self, load_dump, DumpHandler};
|
2021-09-28 22:22:59 +02:00
|
|
|
use crate::index::{
|
|
|
|
Checked, Document, IndexMeta, IndexStats, SearchQuery, SearchResult, Settings, Unchecked,
|
|
|
|
};
|
2022-06-09 01:56:01 +02:00
|
|
|
use crate::index_resolver::error::IndexResolverError;
|
2022-01-19 11:21:19 +01:00
|
|
|
use crate::options::{IndexerOpts, SchedulerConfig};
|
2021-12-02 16:03:26 +01:00
|
|
|
use crate::snapshot::{load_snapshot, SnapshotService};
|
|
|
|
use crate::tasks::error::TaskError;
|
|
|
|
use crate::tasks::task::{DocumentDeletion, Task, TaskContent, TaskId};
|
2022-05-23 16:30:06 +02:00
|
|
|
use crate::tasks::{
|
|
|
|
BatchHandler, EmptyBatchHandler, Scheduler, SnapshotHandler, TaskFilter, TaskStore,
|
|
|
|
};
|
2021-06-23 14:48:33 +02:00
|
|
|
use error::Result;
|
2021-04-01 16:44:42 +02:00
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
use self::error::IndexControllerError;
|
|
|
|
use crate::index_resolver::index_store::{IndexStore, MapIndexStore};
|
|
|
|
use crate::index_resolver::meta_store::{HeedMetaStore, IndexMetaStore};
|
2022-05-17 16:08:23 +02:00
|
|
|
pub use crate::index_resolver::IndexUid;
|
|
|
|
use crate::index_resolver::{create_index_resolver, IndexResolver};
|
2021-12-02 16:03:26 +01:00
|
|
|
use crate::update_file_store::UpdateFileStore;
|
2021-05-27 14:30:20 +02:00
|
|
|
|
2021-06-15 17:39:07 +02:00
|
|
|
pub mod error;
|
2021-10-30 10:15:00 +02:00
|
|
|
pub mod versioning;
|
2021-09-14 18:39:02 +02:00
|
|
|
|
2021-10-06 13:01:02 +02:00
|
|
|
/// Concrete implementation of the IndexController, exposed by meilisearch-lib
|
2021-12-02 16:03:26 +01:00
|
|
|
pub type MeiliSearch = IndexController<HeedMetaStore, MapIndexStore>;
|
2021-10-06 13:01:02 +02:00
|
|
|
|
2021-09-22 15:07:04 +02:00
|
|
|
pub type Payload = Box<
|
|
|
|
dyn Stream<Item = std::result::Result<Bytes, PayloadError>> + Send + Sync + 'static + Unpin,
|
|
|
|
>;
|
2021-02-01 19:51:47 +01:00
|
|
|
|
2022-03-16 13:45:58 +01:00
|
|
|
pub fn open_meta_env(path: &Path, size: usize) -> milli::heed::Result<milli::heed::Env> {
|
|
|
|
let mut options = milli::heed::EnvOpenOptions::new();
|
2022-02-22 18:16:02 +01:00
|
|
|
options.map_size(size);
|
|
|
|
options.max_dbs(20);
|
|
|
|
options.open(path)
|
|
|
|
}
|
|
|
|
|
2021-02-03 17:44:20 +01:00
|
|
|
#[derive(Debug, Serialize, Deserialize, Clone)]
|
|
|
|
#[serde(rename_all = "camelCase")]
|
|
|
|
pub struct IndexMetadata {
|
2021-04-14 18:55:04 +02:00
|
|
|
#[serde(skip)]
|
|
|
|
pub uuid: Uuid,
|
2021-04-01 16:44:42 +02:00
|
|
|
pub uid: String,
|
2021-03-06 20:12:20 +01:00
|
|
|
#[serde(flatten)]
|
2021-09-24 11:53:11 +02:00
|
|
|
pub meta: IndexMeta,
|
2021-02-03 17:44:20 +01:00
|
|
|
}
|
|
|
|
|
2021-02-09 16:08:13 +01:00
|
|
|
#[derive(Clone, Debug)]
|
2021-02-09 11:41:26 +01:00
|
|
|
pub struct IndexSettings {
|
2021-03-11 22:47:29 +01:00
|
|
|
pub uid: Option<String>,
|
2021-02-09 11:41:26 +01:00
|
|
|
pub primary_key: Option<String>,
|
|
|
|
}
|
2021-03-04 12:03:06 +01:00
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
pub struct IndexController<U, I> {
|
2022-05-19 12:43:46 +02:00
|
|
|
pub index_resolver: Arc<IndexResolver<U, I>>,
|
2022-01-19 11:21:19 +01:00
|
|
|
scheduler: Arc<RwLock<Scheduler>>,
|
2021-12-02 16:03:26 +01:00
|
|
|
task_store: TaskStore,
|
2022-05-19 12:43:46 +02:00
|
|
|
pub update_file_store: UpdateFileStore,
|
2021-12-02 16:03:26 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Need a custom implementation for clone because deriving require that U and I are clone.
|
|
|
|
impl<U, I> Clone for IndexController<U, I> {
|
|
|
|
fn clone(&self) -> Self {
|
|
|
|
Self {
|
|
|
|
index_resolver: self.index_resolver.clone(),
|
2022-01-19 11:21:19 +01:00
|
|
|
scheduler: self.scheduler.clone(),
|
2021-12-02 16:03:26 +01:00
|
|
|
update_file_store: self.update_file_store.clone(),
|
2022-01-19 11:21:19 +01:00
|
|
|
task_store: self.task_store.clone(),
|
2021-12-02 16:03:26 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-22 15:07:04 +02:00
|
|
|
#[derive(Debug)]
|
2021-09-14 18:39:02 +02:00
|
|
|
pub enum DocumentAdditionFormat {
|
|
|
|
Json,
|
2021-09-22 16:01:21 +02:00
|
|
|
Csv,
|
2021-09-29 10:17:52 +02:00
|
|
|
Ndjson,
|
2021-09-14 18:39:02 +02:00
|
|
|
}
|
|
|
|
|
2021-09-30 11:29:27 +02:00
|
|
|
impl fmt::Display for DocumentAdditionFormat {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
|
|
match self {
|
|
|
|
DocumentAdditionFormat::Json => write!(f, "json"),
|
|
|
|
DocumentAdditionFormat::Ndjson => write!(f, "ndjson"),
|
|
|
|
DocumentAdditionFormat::Csv => write!(f, "csv"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-23 12:18:34 +02:00
|
|
|
#[derive(Serialize, Debug)]
|
2021-04-15 19:54:25 +02:00
|
|
|
#[serde(rename_all = "camelCase")]
|
2021-04-14 18:55:04 +02:00
|
|
|
pub struct Stats {
|
|
|
|
pub database_size: u64,
|
2022-02-22 01:57:46 +01:00
|
|
|
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
|
2022-02-14 15:32:41 +01:00
|
|
|
pub last_update: Option<OffsetDateTime>,
|
2021-04-14 18:55:04 +02:00
|
|
|
pub indexes: BTreeMap<String, IndexStats>,
|
|
|
|
}
|
|
|
|
|
2021-09-28 22:22:59 +02:00
|
|
|
#[allow(clippy::large_enum_variant)]
|
2021-09-22 15:07:04 +02:00
|
|
|
#[derive(derivative::Derivative)]
|
|
|
|
#[derivative(Debug)]
|
2021-09-14 18:39:02 +02:00
|
|
|
pub enum Update {
|
2021-09-24 15:21:07 +02:00
|
|
|
DeleteDocuments(Vec<String>),
|
|
|
|
ClearDocuments,
|
2021-12-02 16:03:26 +01:00
|
|
|
Settings {
|
|
|
|
settings: Settings<Unchecked>,
|
|
|
|
/// Indicates whether the update was a deletion
|
|
|
|
is_deletion: bool,
|
2021-12-15 14:52:33 +01:00
|
|
|
allow_index_creation: bool,
|
2021-12-02 16:03:26 +01:00
|
|
|
},
|
2021-09-14 18:39:02 +02:00
|
|
|
DocumentAddition {
|
2021-09-28 22:22:59 +02:00
|
|
|
#[derivative(Debug = "ignore")]
|
2021-09-14 18:39:02 +02:00
|
|
|
payload: Payload,
|
|
|
|
primary_key: Option<String>,
|
|
|
|
method: IndexDocumentsMethod,
|
|
|
|
format: DocumentAdditionFormat,
|
2021-12-15 14:52:33 +01:00
|
|
|
allow_index_creation: bool,
|
2021-09-22 15:07:04 +02:00
|
|
|
},
|
2021-12-02 16:03:26 +01:00
|
|
|
DeleteIndex,
|
|
|
|
CreateIndex {
|
|
|
|
primary_key: Option<String>,
|
|
|
|
},
|
|
|
|
UpdateIndex {
|
|
|
|
primary_key: Option<String>,
|
|
|
|
},
|
2021-09-14 18:39:02 +02:00
|
|
|
}
|
|
|
|
|
2021-09-21 13:23:22 +02:00
|
|
|
#[derive(Default, Debug)]
|
|
|
|
pub struct IndexControllerBuilder {
|
|
|
|
max_index_size: Option<usize>,
|
2021-12-02 16:03:26 +01:00
|
|
|
max_task_store_size: Option<usize>,
|
2021-09-21 13:23:22 +02:00
|
|
|
snapshot_dir: Option<PathBuf>,
|
|
|
|
import_snapshot: Option<PathBuf>,
|
2021-09-27 16:48:03 +02:00
|
|
|
snapshot_interval: Option<Duration>,
|
2021-09-21 13:23:22 +02:00
|
|
|
ignore_snapshot_if_db_exists: bool,
|
|
|
|
ignore_missing_snapshot: bool,
|
2021-09-27 16:48:03 +02:00
|
|
|
schedule_snapshot: bool,
|
2021-09-21 13:23:22 +02:00
|
|
|
dump_src: Option<PathBuf>,
|
|
|
|
dump_dst: Option<PathBuf>,
|
2022-01-20 16:00:14 +01:00
|
|
|
ignore_dump_if_db_exists: bool,
|
|
|
|
ignore_missing_dump: bool,
|
2021-09-21 13:23:22 +02:00
|
|
|
}
|
2021-03-17 12:01:56 +01:00
|
|
|
|
2021-09-21 13:23:22 +02:00
|
|
|
impl IndexControllerBuilder {
|
2021-09-22 15:07:04 +02:00
|
|
|
pub fn build(
|
|
|
|
self,
|
|
|
|
db_path: impl AsRef<Path>,
|
|
|
|
indexer_options: IndexerOpts,
|
2022-01-19 11:21:19 +01:00
|
|
|
scheduler_config: SchedulerConfig,
|
2021-10-06 13:01:02 +02:00
|
|
|
) -> anyhow::Result<MeiliSearch> {
|
2021-09-22 15:07:04 +02:00
|
|
|
let index_size = self
|
|
|
|
.max_index_size
|
|
|
|
.ok_or_else(|| anyhow::anyhow!("Missing index size"))?;
|
2021-12-02 16:03:26 +01:00
|
|
|
let task_store_size = self
|
|
|
|
.max_task_store_size
|
2021-09-22 15:07:04 +02:00
|
|
|
.ok_or_else(|| anyhow::anyhow!("Missing update database size"))?;
|
2021-09-21 13:23:22 +02:00
|
|
|
|
|
|
|
if let Some(ref path) = self.import_snapshot {
|
2021-12-02 16:03:26 +01:00
|
|
|
log::info!("Loading from snapshot {:?}", path);
|
2021-03-23 16:37:46 +01:00
|
|
|
load_snapshot(
|
2021-09-21 13:23:22 +02:00
|
|
|
db_path.as_ref(),
|
2021-03-23 16:37:46 +01:00
|
|
|
path,
|
2021-09-21 13:23:22 +02:00
|
|
|
self.ignore_snapshot_if_db_exists,
|
|
|
|
self.ignore_missing_snapshot,
|
2021-03-23 16:37:46 +01:00
|
|
|
)?;
|
2021-09-21 13:23:22 +02:00
|
|
|
} else if let Some(ref src_path) = self.dump_src {
|
2021-05-27 14:30:20 +02:00
|
|
|
load_dump(
|
2021-09-21 13:23:22 +02:00
|
|
|
db_path.as_ref(),
|
2021-05-27 14:30:20 +02:00
|
|
|
src_path,
|
2022-01-20 16:00:14 +01:00
|
|
|
self.ignore_dump_if_db_exists,
|
|
|
|
self.ignore_missing_dump,
|
2021-09-21 13:23:22 +02:00
|
|
|
index_size,
|
2021-12-02 16:03:26 +01:00
|
|
|
task_store_size,
|
2021-09-21 13:23:22 +02:00
|
|
|
&indexer_options,
|
2021-05-27 14:30:20 +02:00
|
|
|
)?;
|
2022-05-16 14:33:33 +02:00
|
|
|
} else if db_path.as_ref().exists() {
|
2022-04-27 10:41:09 +02:00
|
|
|
// Directory could be pre-created without any database in.
|
|
|
|
let db_is_empty = db_path.as_ref().read_dir()?.next().is_none();
|
|
|
|
if !db_is_empty {
|
|
|
|
versioning::check_version_file(db_path.as_ref())?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-21 13:23:22 +02:00
|
|
|
std::fs::create_dir_all(db_path.as_ref())?;
|
2021-03-23 17:23:57 +01:00
|
|
|
|
2022-02-22 18:16:02 +01:00
|
|
|
let meta_env = Arc::new(open_meta_env(db_path.as_ref(), task_store_size)?);
|
2021-12-02 16:03:26 +01:00
|
|
|
|
|
|
|
let update_file_store = UpdateFileStore::new(&db_path)?;
|
2021-10-30 10:15:00 +02:00
|
|
|
// Create or overwrite the version file for this DB
|
|
|
|
versioning::create_version_file(db_path.as_ref())?;
|
2021-12-02 16:03:26 +01:00
|
|
|
|
2021-09-28 22:22:59 +02:00
|
|
|
let index_resolver = Arc::new(create_index_resolver(
|
|
|
|
&db_path,
|
|
|
|
index_size,
|
|
|
|
&indexer_options,
|
2021-12-02 16:03:26 +01:00
|
|
|
meta_env.clone(),
|
|
|
|
update_file_store.clone(),
|
2021-09-28 22:22:59 +02:00
|
|
|
)?);
|
2021-09-22 11:52:29 +02:00
|
|
|
|
2021-09-28 22:22:59 +02:00
|
|
|
let dump_path = self
|
|
|
|
.dump_dst
|
|
|
|
.ok_or_else(|| anyhow::anyhow!("Missing dump directory path"))?;
|
2021-10-06 13:01:02 +02:00
|
|
|
|
2022-05-25 11:14:25 +02:00
|
|
|
let dump_handler = Arc::new(DumpHandler::new(
|
2022-05-19 14:51:04 +02:00
|
|
|
dump_path,
|
2022-05-25 11:14:25 +02:00
|
|
|
db_path.as_ref().into(),
|
|
|
|
update_file_store.clone(),
|
2022-05-19 14:51:04 +02:00
|
|
|
task_store_size,
|
2022-05-25 11:14:25 +02:00
|
|
|
index_size,
|
|
|
|
meta_env.clone(),
|
|
|
|
index_resolver.clone(),
|
|
|
|
));
|
2022-05-19 14:51:04 +02:00
|
|
|
let task_store = TaskStore::new(meta_env)?;
|
|
|
|
|
|
|
|
// register all the batch handlers for use with the scheduler.
|
|
|
|
let handlers: Vec<Arc<dyn BatchHandler + Sync + Send + 'static>> = vec![
|
|
|
|
index_resolver.clone(),
|
|
|
|
dump_handler,
|
2022-05-23 16:30:06 +02:00
|
|
|
Arc::new(SnapshotHandler),
|
2022-05-19 14:51:04 +02:00
|
|
|
// dummy handler to catch all empty batches
|
|
|
|
Arc::new(EmptyBatchHandler),
|
|
|
|
];
|
|
|
|
let scheduler = Scheduler::new(task_store.clone(), handlers, scheduler_config)?;
|
|
|
|
|
2021-09-27 16:48:03 +02:00
|
|
|
if self.schedule_snapshot {
|
2021-12-02 16:03:26 +01:00
|
|
|
let snapshot_period = self
|
|
|
|
.snapshot_interval
|
|
|
|
.ok_or_else(|| anyhow::anyhow!("Snapshot interval not provided."))?;
|
|
|
|
let snapshot_path = self
|
|
|
|
.snapshot_dir
|
|
|
|
.ok_or_else(|| anyhow::anyhow!("Snapshot path not provided."))?;
|
|
|
|
|
|
|
|
let snapshot_service = SnapshotService {
|
|
|
|
db_path: db_path.as_ref().to_path_buf(),
|
|
|
|
snapshot_period,
|
|
|
|
snapshot_path,
|
|
|
|
index_size,
|
|
|
|
meta_env_size: task_store_size,
|
2022-01-19 11:21:19 +01:00
|
|
|
scheduler: scheduler.clone(),
|
2021-12-02 16:03:26 +01:00
|
|
|
};
|
2021-09-27 16:48:03 +02:00
|
|
|
|
2022-01-19 11:21:19 +01:00
|
|
|
tokio::task::spawn_local(snapshot_service.run());
|
2021-09-27 16:48:03 +02:00
|
|
|
}
|
2021-03-17 12:01:56 +01:00
|
|
|
|
2021-09-21 13:23:22 +02:00
|
|
|
Ok(IndexController {
|
2021-09-24 11:53:11 +02:00
|
|
|
index_resolver,
|
2022-01-19 11:21:19 +01:00
|
|
|
scheduler,
|
2021-12-02 16:03:26 +01:00
|
|
|
update_file_store,
|
2022-01-19 11:21:19 +01:00
|
|
|
task_store,
|
2021-03-15 18:11:10 +01:00
|
|
|
})
|
2021-03-04 12:03:06 +01:00
|
|
|
}
|
|
|
|
|
2021-09-21 13:23:22 +02:00
|
|
|
/// Set the index controller builder's max update store size.
|
2021-12-02 16:03:26 +01:00
|
|
|
pub fn set_max_task_store_size(&mut self, max_update_store_size: usize) -> &mut Self {
|
|
|
|
self.max_task_store_size.replace(max_update_store_size);
|
2021-09-21 13:23:22 +02:00
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn set_max_index_size(&mut self, size: usize) -> &mut Self {
|
|
|
|
self.max_index_size.replace(size);
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Set the index controller builder's snapshot path.
|
|
|
|
pub fn set_snapshot_dir(&mut self, snapshot_dir: PathBuf) -> &mut Self {
|
|
|
|
self.snapshot_dir.replace(snapshot_dir);
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Set the index controller builder's ignore snapshot if db exists.
|
2021-09-22 15:07:04 +02:00
|
|
|
pub fn set_ignore_snapshot_if_db_exists(
|
|
|
|
&mut self,
|
|
|
|
ignore_snapshot_if_db_exists: bool,
|
|
|
|
) -> &mut Self {
|
2021-09-21 13:23:22 +02:00
|
|
|
self.ignore_snapshot_if_db_exists = ignore_snapshot_if_db_exists;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Set the index controller builder's ignore missing snapshot.
|
|
|
|
pub fn set_ignore_missing_snapshot(&mut self, ignore_missing_snapshot: bool) -> &mut Self {
|
|
|
|
self.ignore_missing_snapshot = ignore_missing_snapshot;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Set the index controller builder's import snapshot.
|
|
|
|
pub fn set_import_snapshot(&mut self, import_snapshot: PathBuf) -> &mut Self {
|
|
|
|
self.import_snapshot.replace(import_snapshot);
|
|
|
|
self
|
|
|
|
}
|
2021-09-27 16:48:03 +02:00
|
|
|
|
|
|
|
/// Set the index controller builder's snapshot interval sec.
|
|
|
|
pub fn set_snapshot_interval(&mut self, snapshot_interval: Duration) -> &mut Self {
|
|
|
|
self.snapshot_interval = Some(snapshot_interval);
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Set the index controller builder's schedule snapshot.
|
|
|
|
pub fn set_schedule_snapshot(&mut self) -> &mut Self {
|
|
|
|
self.schedule_snapshot = true;
|
|
|
|
self
|
|
|
|
}
|
2022-01-20 16:00:14 +01:00
|
|
|
|
|
|
|
/// Set the index controller builder's dump src.
|
|
|
|
pub fn set_dump_src(&mut self, dump_src: PathBuf) -> &mut Self {
|
|
|
|
self.dump_src.replace(dump_src);
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Set the index controller builder's dump dst.
|
|
|
|
pub fn set_dump_dst(&mut self, dump_dst: PathBuf) -> &mut Self {
|
|
|
|
self.dump_dst.replace(dump_dst);
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Set the index controller builder's ignore dump if db exists.
|
|
|
|
pub fn set_ignore_dump_if_db_exists(&mut self, ignore_dump_if_db_exists: bool) -> &mut Self {
|
|
|
|
self.ignore_dump_if_db_exists = ignore_dump_if_db_exists;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Set the index controller builder's ignore missing dump.
|
|
|
|
pub fn set_ignore_missing_dump(&mut self, ignore_missing_dump: bool) -> &mut Self {
|
|
|
|
self.ignore_missing_dump = ignore_missing_dump;
|
|
|
|
self
|
|
|
|
}
|
2021-09-21 13:23:22 +02:00
|
|
|
}
|
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
impl<U, I> IndexController<U, I>
|
2021-10-06 13:01:02 +02:00
|
|
|
where
|
2021-12-02 16:03:26 +01:00
|
|
|
U: IndexMetaStore,
|
|
|
|
I: IndexStore,
|
2021-10-06 13:01:02 +02:00
|
|
|
{
|
2021-09-21 13:23:22 +02:00
|
|
|
pub fn builder() -> IndexControllerBuilder {
|
|
|
|
IndexControllerBuilder::default()
|
|
|
|
}
|
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
pub async fn register_update(&self, uid: String, update: Update) -> Result<Task> {
|
2022-06-09 01:56:01 +02:00
|
|
|
let index_uid = IndexUid::from_str(&uid).map_err(IndexResolverError::from)?;
|
2021-12-02 16:03:26 +01:00
|
|
|
let content = match update {
|
2022-05-31 17:18:40 +02:00
|
|
|
Update::DeleteDocuments(ids) => TaskContent::DocumentDeletion {
|
|
|
|
index_uid,
|
|
|
|
deletion: DocumentDeletion::Ids(ids),
|
|
|
|
},
|
|
|
|
Update::ClearDocuments => TaskContent::DocumentDeletion {
|
|
|
|
index_uid,
|
|
|
|
deletion: DocumentDeletion::Clear,
|
|
|
|
},
|
2021-12-02 16:03:26 +01:00
|
|
|
Update::Settings {
|
|
|
|
settings,
|
|
|
|
is_deletion,
|
2021-12-15 14:52:33 +01:00
|
|
|
allow_index_creation,
|
2021-12-02 16:03:26 +01:00
|
|
|
} => TaskContent::SettingsUpdate {
|
|
|
|
settings,
|
|
|
|
is_deletion,
|
2021-12-15 14:52:33 +01:00
|
|
|
allow_index_creation,
|
2022-05-31 17:18:40 +02:00
|
|
|
index_uid,
|
2021-12-02 16:03:26 +01:00
|
|
|
},
|
|
|
|
Update::DocumentAddition {
|
|
|
|
mut payload,
|
|
|
|
primary_key,
|
|
|
|
format,
|
|
|
|
method,
|
2021-12-15 14:52:33 +01:00
|
|
|
allow_index_creation,
|
2021-12-02 16:03:26 +01:00
|
|
|
} => {
|
|
|
|
let mut buffer = Vec::new();
|
|
|
|
while let Some(bytes) = payload.next().await {
|
|
|
|
let bytes = bytes?;
|
|
|
|
buffer.extend_from_slice(&bytes);
|
|
|
|
}
|
|
|
|
let (content_uuid, mut update_file) = self.update_file_store.new_update()?;
|
|
|
|
let documents_count = tokio::task::spawn_blocking(move || -> Result<_> {
|
|
|
|
// check if the payload is empty, and return an error
|
|
|
|
if buffer.is_empty() {
|
|
|
|
return Err(IndexControllerError::MissingPayload(format));
|
|
|
|
}
|
|
|
|
|
|
|
|
let reader = Cursor::new(buffer);
|
|
|
|
let count = match format {
|
|
|
|
DocumentAdditionFormat::Json => read_json(reader, &mut *update_file)?,
|
|
|
|
DocumentAdditionFormat::Csv => read_csv(reader, &mut *update_file)?,
|
|
|
|
DocumentAdditionFormat::Ndjson => read_ndjson(reader, &mut *update_file)?,
|
|
|
|
};
|
|
|
|
|
|
|
|
update_file.persist()?;
|
|
|
|
|
|
|
|
Ok(count)
|
|
|
|
})
|
|
|
|
.await??;
|
|
|
|
|
|
|
|
TaskContent::DocumentAddition {
|
|
|
|
content_uuid,
|
|
|
|
merge_strategy: method,
|
|
|
|
primary_key,
|
|
|
|
documents_count,
|
2021-12-15 14:52:33 +01:00
|
|
|
allow_index_creation,
|
2022-05-31 17:18:40 +02:00
|
|
|
index_uid,
|
2021-09-28 18:10:09 +02:00
|
|
|
}
|
2021-03-04 12:03:06 +01:00
|
|
|
}
|
2022-05-31 17:18:40 +02:00
|
|
|
Update::DeleteIndex => TaskContent::IndexDeletion { index_uid },
|
|
|
|
Update::CreateIndex { primary_key } => TaskContent::IndexCreation {
|
|
|
|
primary_key,
|
|
|
|
index_uid,
|
|
|
|
},
|
|
|
|
Update::UpdateIndex { primary_key } => TaskContent::IndexUpdate {
|
|
|
|
primary_key,
|
|
|
|
index_uid,
|
|
|
|
},
|
2021-12-02 16:03:26 +01:00
|
|
|
};
|
|
|
|
|
2022-05-31 17:18:40 +02:00
|
|
|
let task = self.task_store.register(content).await?;
|
2022-01-19 11:21:19 +01:00
|
|
|
self.scheduler.read().await.notify();
|
2021-12-02 16:03:26 +01:00
|
|
|
|
|
|
|
Ok(task)
|
|
|
|
}
|
|
|
|
|
2022-05-17 10:58:15 +02:00
|
|
|
pub async fn register_dump_task(&self) -> Result<Task> {
|
2022-05-19 20:18:43 +02:00
|
|
|
let uid = dump::generate_uid();
|
|
|
|
let content = TaskContent::Dump { uid };
|
2022-05-31 17:18:40 +02:00
|
|
|
let task = self.task_store.register(content).await?;
|
2022-05-17 10:58:15 +02:00
|
|
|
self.scheduler.read().await.notify();
|
|
|
|
Ok(task)
|
|
|
|
}
|
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
pub async fn get_task(&self, id: TaskId, filter: Option<TaskFilter>) -> Result<Task> {
|
2022-01-19 11:21:19 +01:00
|
|
|
let task = self.scheduler.read().await.get_task(id, filter).await?;
|
2021-12-02 16:03:26 +01:00
|
|
|
Ok(task)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub async fn get_index_task(&self, index_uid: String, task_id: TaskId) -> Result<Task> {
|
|
|
|
let creation_task_id = self
|
|
|
|
.index_resolver
|
|
|
|
.get_index_creation_task_id(index_uid.clone())
|
|
|
|
.await?;
|
|
|
|
if task_id < creation_task_id {
|
|
|
|
return Err(TaskError::UnexistingTask(task_id).into());
|
2021-03-18 09:09:26 +01:00
|
|
|
}
|
2021-12-02 16:03:26 +01:00
|
|
|
|
|
|
|
let mut filter = TaskFilter::default();
|
|
|
|
filter.filter_index(index_uid);
|
2022-01-19 11:21:19 +01:00
|
|
|
let task = self
|
|
|
|
.scheduler
|
|
|
|
.read()
|
|
|
|
.await
|
|
|
|
.get_task(task_id, Some(filter))
|
|
|
|
.await?;
|
2021-12-02 16:03:26 +01:00
|
|
|
|
|
|
|
Ok(task)
|
2021-03-04 12:03:06 +01:00
|
|
|
}
|
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
pub async fn list_tasks(
|
|
|
|
&self,
|
|
|
|
filter: Option<TaskFilter>,
|
|
|
|
limit: Option<usize>,
|
|
|
|
offset: Option<TaskId>,
|
|
|
|
) -> Result<Vec<Task>> {
|
2022-01-19 11:21:19 +01:00
|
|
|
let tasks = self
|
|
|
|
.scheduler
|
|
|
|
.read()
|
|
|
|
.await
|
|
|
|
.list_tasks(offset, filter, limit)
|
|
|
|
.await?;
|
2021-12-02 16:03:26 +01:00
|
|
|
|
|
|
|
Ok(tasks)
|
2021-03-04 12:03:06 +01:00
|
|
|
}
|
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
pub async fn list_index_task(
|
|
|
|
&self,
|
|
|
|
index_uid: String,
|
|
|
|
limit: Option<usize>,
|
|
|
|
offset: Option<TaskId>,
|
|
|
|
) -> Result<Vec<Task>> {
|
|
|
|
let task_id = self
|
|
|
|
.index_resolver
|
|
|
|
.get_index_creation_task_id(index_uid.clone())
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
let mut filter = TaskFilter::default();
|
|
|
|
filter.filter_index(index_uid);
|
|
|
|
|
|
|
|
let tasks = self
|
2022-01-19 11:21:19 +01:00
|
|
|
.scheduler
|
|
|
|
.read()
|
|
|
|
.await
|
2021-12-02 16:03:26 +01:00
|
|
|
.list_tasks(
|
|
|
|
Some(offset.unwrap_or_default() + task_id),
|
|
|
|
Some(filter),
|
|
|
|
limit,
|
|
|
|
)
|
|
|
|
.await?;
|
|
|
|
|
|
|
|
Ok(tasks)
|
2021-03-04 12:03:06 +01:00
|
|
|
}
|
|
|
|
|
2021-06-14 21:26:35 +02:00
|
|
|
pub async fn list_indexes(&self) -> Result<Vec<IndexMetadata>> {
|
2021-09-24 11:53:11 +02:00
|
|
|
let indexes = self.index_resolver.list().await?;
|
2021-03-06 20:12:20 +01:00
|
|
|
let mut ret = Vec::new();
|
2021-09-24 11:53:11 +02:00
|
|
|
for (uid, index) in indexes {
|
|
|
|
let meta = index.meta()?;
|
2021-03-22 10:17:38 +01:00
|
|
|
let meta = IndexMetadata {
|
2021-10-04 12:15:21 +02:00
|
|
|
uuid: index.uuid(),
|
2021-03-22 10:17:38 +01:00
|
|
|
uid,
|
|
|
|
meta,
|
|
|
|
};
|
2021-03-15 16:52:05 +01:00
|
|
|
ret.push(meta);
|
2021-03-06 20:12:20 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(ret)
|
2021-03-04 12:03:06 +01:00
|
|
|
}
|
|
|
|
|
2021-06-14 21:26:35 +02:00
|
|
|
pub async fn settings(&self, uid: String) -> Result<Settings<Checked>> {
|
2021-09-24 11:53:11 +02:00
|
|
|
let index = self.index_resolver.get_index(uid).await?;
|
|
|
|
let settings = spawn_blocking(move || index.settings()).await??;
|
2021-03-04 12:38:55 +01:00
|
|
|
Ok(settings)
|
|
|
|
}
|
|
|
|
|
2022-05-25 11:51:26 +02:00
|
|
|
/// Return the total number of documents contained in the index + the selected documents.
|
2021-03-04 14:20:19 +01:00
|
|
|
pub async fn documents(
|
|
|
|
&self,
|
2021-03-11 22:47:29 +01:00
|
|
|
uid: String,
|
2021-03-04 14:20:19 +01:00
|
|
|
offset: usize,
|
|
|
|
limit: usize,
|
|
|
|
attributes_to_retrieve: Option<Vec<String>>,
|
2022-05-25 11:51:26 +02:00
|
|
|
) -> Result<(u64, Vec<Document>)> {
|
2021-09-24 11:53:11 +02:00
|
|
|
let index = self.index_resolver.get_index(uid).await?;
|
2022-05-25 11:51:26 +02:00
|
|
|
let result =
|
2021-09-28 22:22:59 +02:00
|
|
|
spawn_blocking(move || index.retrieve_documents(offset, limit, attributes_to_retrieve))
|
|
|
|
.await??;
|
2022-05-25 11:51:26 +02:00
|
|
|
Ok(result)
|
2021-03-04 14:20:19 +01:00
|
|
|
}
|
|
|
|
|
2021-03-04 15:09:00 +01:00
|
|
|
pub async fn document(
|
|
|
|
&self,
|
2021-03-11 22:47:29 +01:00
|
|
|
uid: String,
|
2021-03-04 15:09:00 +01:00
|
|
|
doc_id: String,
|
|
|
|
attributes_to_retrieve: Option<Vec<String>>,
|
2021-06-14 21:26:35 +02:00
|
|
|
) -> Result<Document> {
|
2021-09-24 11:53:11 +02:00
|
|
|
let index = self.index_resolver.get_index(uid).await?;
|
2021-09-28 22:22:59 +02:00
|
|
|
let document =
|
|
|
|
spawn_blocking(move || index.retrieve_document(doc_id, attributes_to_retrieve))
|
|
|
|
.await??;
|
2021-03-04 15:09:00 +01:00
|
|
|
Ok(document)
|
|
|
|
}
|
|
|
|
|
2021-06-14 21:26:35 +02:00
|
|
|
pub async fn search(&self, uid: String, query: SearchQuery) -> Result<SearchResult> {
|
2021-12-02 16:03:26 +01:00
|
|
|
let index = self.index_resolver.get_index(uid).await?;
|
2021-09-28 18:10:09 +02:00
|
|
|
let result = spawn_blocking(move || index.perform_search(query)).await??;
|
2021-03-04 12:03:06 +01:00
|
|
|
Ok(result)
|
|
|
|
}
|
2021-03-06 20:17:58 +01:00
|
|
|
|
2021-06-14 21:26:35 +02:00
|
|
|
pub async fn get_index(&self, uid: String) -> Result<IndexMetadata> {
|
2021-09-24 11:53:11 +02:00
|
|
|
let index = self.index_resolver.get_index(uid.clone()).await?;
|
2021-10-04 12:15:21 +02:00
|
|
|
let uuid = index.uuid();
|
2021-09-24 11:53:11 +02:00
|
|
|
let meta = spawn_blocking(move || index.meta()).await??;
|
2022-05-24 11:29:03 +02:00
|
|
|
let meta = IndexMetadata { uuid, uid, meta };
|
2021-03-15 16:52:05 +01:00
|
|
|
Ok(meta)
|
2021-03-06 20:17:58 +01:00
|
|
|
}
|
2021-04-01 16:44:42 +02:00
|
|
|
|
2021-06-14 21:26:35 +02:00
|
|
|
pub async fn get_index_stats(&self, uid: String) -> Result<IndexStats> {
|
2022-01-19 11:21:19 +01:00
|
|
|
let processing_tasks = self.scheduler.read().await.get_processing_tasks().await?;
|
2021-12-02 16:03:26 +01:00
|
|
|
// Check if the currently indexing update is from our index.
|
2022-01-19 11:21:19 +01:00
|
|
|
let is_indexing = processing_tasks
|
|
|
|
.first()
|
2022-05-31 17:18:40 +02:00
|
|
|
.map_or(false, |task| task.index_uid().map_or(false, |u| u == uid));
|
2021-12-02 16:03:26 +01:00
|
|
|
|
2021-09-24 11:53:11 +02:00
|
|
|
let index = self.index_resolver.get_index(uid).await?;
|
|
|
|
let mut stats = spawn_blocking(move || index.stats()).await??;
|
2021-12-02 16:03:26 +01:00
|
|
|
stats.is_indexing = Some(is_indexing);
|
|
|
|
|
2021-04-14 18:55:04 +02:00
|
|
|
Ok(stats)
|
2021-04-09 14:41:24 +02:00
|
|
|
}
|
|
|
|
|
2022-01-12 15:35:33 +01:00
|
|
|
pub async fn get_all_stats(&self, search_rules: &SearchRules) -> Result<Stats> {
|
2022-02-14 15:32:41 +01:00
|
|
|
let mut last_task: Option<OffsetDateTime> = None;
|
2021-04-14 18:55:04 +02:00
|
|
|
let mut indexes = BTreeMap::new();
|
2021-12-02 16:03:26 +01:00
|
|
|
let mut database_size = 0;
|
2022-01-19 11:21:19 +01:00
|
|
|
let processing_tasks = self.scheduler.read().await.get_processing_tasks().await?;
|
2021-04-14 18:55:04 +02:00
|
|
|
|
2021-09-24 11:53:11 +02:00
|
|
|
for (index_uid, index) in self.index_resolver.list().await? {
|
2022-01-12 15:35:33 +01:00
|
|
|
if !search_rules.is_index_authorized(&index_uid) {
|
2021-11-08 18:31:27 +01:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
let (mut stats, meta) =
|
|
|
|
spawn_blocking::<_, Result<(IndexStats, IndexMeta)>>(move || {
|
|
|
|
Ok((index.stats()?, index.meta()?))
|
|
|
|
})
|
|
|
|
.await??;
|
2021-09-24 11:53:11 +02:00
|
|
|
|
|
|
|
database_size += stats.size;
|
2021-04-14 18:55:04 +02:00
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
last_task = last_task.map_or(Some(meta.updated_at), |last| {
|
2021-09-24 11:53:11 +02:00
|
|
|
Some(last.max(meta.updated_at))
|
2021-04-14 18:55:04 +02:00
|
|
|
});
|
|
|
|
|
2021-09-24 11:53:11 +02:00
|
|
|
// Check if the currently indexing update is from our index.
|
2022-01-19 11:21:19 +01:00
|
|
|
stats.is_indexing = processing_tasks
|
|
|
|
.first()
|
2022-05-31 17:18:40 +02:00
|
|
|
.and_then(|p| p.index_uid().map(|u| u == index_uid))
|
2021-12-02 16:03:26 +01:00
|
|
|
.or(Some(false));
|
2021-04-14 18:55:04 +02:00
|
|
|
|
2021-09-24 11:53:11 +02:00
|
|
|
indexes.insert(index_uid, stats);
|
2021-04-14 18:55:04 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
Ok(Stats {
|
|
|
|
database_size,
|
2021-12-02 16:03:26 +01:00
|
|
|
last_update: last_task,
|
2021-04-14 18:55:04 +02:00
|
|
|
indexes,
|
|
|
|
})
|
2021-04-09 14:41:24 +02:00
|
|
|
}
|
2021-03-04 12:03:06 +01:00
|
|
|
}
|
2021-03-06 12:57:56 +01:00
|
|
|
|
|
|
|
pub async fn get_arc_ownership_blocking<T>(mut item: Arc<T>) -> T {
|
|
|
|
loop {
|
|
|
|
match Arc::try_unwrap(item) {
|
|
|
|
Ok(item) => return item,
|
|
|
|
Err(item_arc) => {
|
|
|
|
item = item_arc;
|
|
|
|
sleep(Duration::from_millis(100)).await;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-10-06 13:01:02 +02:00
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod test {
|
|
|
|
use futures::future::ok;
|
|
|
|
use mockall::predicate::eq;
|
2021-12-02 16:03:26 +01:00
|
|
|
use nelson::Mocker;
|
2021-10-06 13:01:02 +02:00
|
|
|
|
|
|
|
use crate::index::error::Result as IndexResult;
|
|
|
|
use crate::index::Index;
|
2022-04-07 11:27:06 +02:00
|
|
|
use crate::index::{
|
2022-06-02 10:48:02 +02:00
|
|
|
DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG,
|
2022-04-07 11:27:06 +02:00
|
|
|
};
|
2021-12-02 16:03:26 +01:00
|
|
|
use crate::index_resolver::index_store::MockIndexStore;
|
|
|
|
use crate::index_resolver::meta_store::MockIndexMetaStore;
|
|
|
|
use crate::index_resolver::IndexResolver;
|
2021-10-06 13:01:02 +02:00
|
|
|
|
|
|
|
use super::*;
|
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
impl IndexController<MockIndexMetaStore, MockIndexStore> {
|
2021-10-06 13:01:02 +02:00
|
|
|
pub fn mock(
|
2022-01-19 11:21:19 +01:00
|
|
|
index_resolver: Arc<IndexResolver<MockIndexMetaStore, MockIndexStore>>,
|
2021-12-02 16:03:26 +01:00
|
|
|
task_store: TaskStore,
|
|
|
|
update_file_store: UpdateFileStore,
|
2022-01-19 11:21:19 +01:00
|
|
|
scheduler: Arc<RwLock<Scheduler>>,
|
2021-10-06 13:01:02 +02:00
|
|
|
) -> Self {
|
|
|
|
IndexController {
|
2022-01-19 11:21:19 +01:00
|
|
|
index_resolver,
|
2021-12-02 16:03:26 +01:00
|
|
|
task_store,
|
|
|
|
update_file_store,
|
2022-01-19 11:21:19 +01:00
|
|
|
scheduler,
|
2021-10-06 13:01:02 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[actix_rt::test]
|
|
|
|
async fn test_search_simple() {
|
|
|
|
let index_uid = "test";
|
|
|
|
let index_uuid = Uuid::new_v4();
|
|
|
|
let query = SearchQuery {
|
|
|
|
q: Some(String::from("hello world")),
|
|
|
|
offset: Some(10),
|
|
|
|
limit: 0,
|
|
|
|
attributes_to_retrieve: Some(vec!["string".to_owned()].into_iter().collect()),
|
|
|
|
attributes_to_crop: None,
|
|
|
|
crop_length: 18,
|
|
|
|
attributes_to_highlight: None,
|
2022-05-18 13:17:56 +02:00
|
|
|
show_matches_position: true,
|
2021-10-06 13:01:02 +02:00
|
|
|
filter: None,
|
|
|
|
sort: None,
|
2022-05-18 13:17:56 +02:00
|
|
|
facets: None,
|
2022-06-02 10:48:02 +02:00
|
|
|
highlight_pre_tag: DEFAULT_HIGHLIGHT_PRE_TAG(),
|
|
|
|
highlight_post_tag: DEFAULT_HIGHLIGHT_POST_TAG(),
|
|
|
|
crop_marker: DEFAULT_CROP_MARKER(),
|
2021-10-06 13:01:02 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
let result = SearchResult {
|
|
|
|
hits: vec![],
|
2022-05-18 13:17:56 +02:00
|
|
|
estimated_total_hits: 29,
|
2021-10-06 13:01:02 +02:00
|
|
|
query: "hello world".to_string(),
|
|
|
|
limit: 24,
|
|
|
|
offset: 0,
|
|
|
|
processing_time_ms: 50,
|
2022-05-18 13:17:56 +02:00
|
|
|
facet_distribution: None,
|
2021-10-06 13:01:02 +02:00
|
|
|
};
|
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
let mut uuid_store = MockIndexMetaStore::new();
|
2021-10-06 13:01:02 +02:00
|
|
|
uuid_store
|
2021-12-02 16:03:26 +01:00
|
|
|
.expect_get()
|
2021-10-06 13:01:02 +02:00
|
|
|
.with(eq(index_uid.to_owned()))
|
2021-12-02 16:03:26 +01:00
|
|
|
.returning(move |s| {
|
|
|
|
Box::pin(ok((
|
|
|
|
s,
|
|
|
|
Some(crate::index_resolver::meta_store::IndexMeta {
|
|
|
|
uuid: index_uuid,
|
|
|
|
creation_task_id: 0,
|
|
|
|
}),
|
|
|
|
)))
|
|
|
|
});
|
2021-10-06 13:01:02 +02:00
|
|
|
|
|
|
|
let mut index_store = MockIndexStore::new();
|
|
|
|
let result_clone = result.clone();
|
|
|
|
let query_clone = query.clone();
|
|
|
|
index_store
|
|
|
|
.expect_get()
|
|
|
|
.with(eq(index_uuid))
|
|
|
|
.returning(move |_uuid| {
|
|
|
|
let result = result_clone.clone();
|
|
|
|
let query = query_clone.clone();
|
|
|
|
let mocker = Mocker::default();
|
|
|
|
mocker
|
|
|
|
.when::<SearchQuery, IndexResult<SearchResult>>("perform_search")
|
|
|
|
.once()
|
|
|
|
.then(move |q| {
|
|
|
|
assert_eq!(&q, &query);
|
|
|
|
Ok(result.clone())
|
|
|
|
});
|
2021-12-02 16:03:26 +01:00
|
|
|
let index = Index::mock(mocker);
|
2021-10-06 13:01:02 +02:00
|
|
|
Box::pin(ok(Some(index)))
|
|
|
|
});
|
|
|
|
|
2021-12-02 16:03:26 +01:00
|
|
|
let task_store_mocker = nelson::Mocker::default();
|
|
|
|
let mocker = Mocker::default();
|
|
|
|
let update_file_store = UpdateFileStore::mock(mocker);
|
2022-01-19 11:21:19 +01:00
|
|
|
let index_resolver = Arc::new(IndexResolver::new(
|
|
|
|
uuid_store,
|
|
|
|
index_store,
|
|
|
|
update_file_store.clone(),
|
|
|
|
));
|
2021-12-02 16:03:26 +01:00
|
|
|
let task_store = TaskStore::mock(task_store_mocker);
|
2022-01-19 11:21:19 +01:00
|
|
|
let scheduler = Scheduler::new(
|
|
|
|
task_store.clone(),
|
2022-05-24 16:47:10 +02:00
|
|
|
vec![index_resolver.clone()],
|
2022-01-19 11:21:19 +01:00
|
|
|
SchedulerConfig::default(),
|
|
|
|
)
|
|
|
|
.unwrap();
|
2022-05-24 16:47:10 +02:00
|
|
|
let index_controller =
|
|
|
|
IndexController::mock(index_resolver, task_store, update_file_store, scheduler);
|
2021-10-06 13:01:02 +02:00
|
|
|
|
|
|
|
let r = index_controller
|
|
|
|
.search(index_uid.to_owned(), query.clone())
|
|
|
|
.await
|
|
|
|
.unwrap();
|
|
|
|
assert_eq!(r, result);
|
|
|
|
}
|
|
|
|
}
|