Merge pull request #441 from meilisearch/issues-0.9.0

Stabilize http endpoint
This commit is contained in:
Clément Renault 2020-02-13 15:57:37 +01:00 committed by GitHub
commit 3845b89a16
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
66 changed files with 105459 additions and 3016 deletions

View File

@ -22,4 +22,4 @@ jobs:
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: with:
command: test command: test
args: --locked args: --locked --release

779
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,21 +0,0 @@
# This schema has been generated ...
# The order in which the attributes are declared is important,
# it specify the attribute xxx...
identifier = "id"
[attributes.id]
displayed = true
[attributes.title]
displayed = true
indexed = true
[attributes.overview]
displayed = true
indexed = true
[attributes.release_date]
displayed = true
[attributes.poster]
displayed = true

View File

@ -0,0 +1,11 @@
{
"identifier": "id",
"searchableAttributes": ["title", "overview"],
"displayedAttributes": [
"id",
"title",
"overview",
"release_date",
"poster"
]
}

View File

@ -32,6 +32,7 @@ serde_json = "1.0.41"
siphasher = "0.3.1" siphasher = "0.3.1"
slice-group-by = "0.2.6" slice-group-by = "0.2.6"
zerocopy = "0.2.8" zerocopy = "0.2.8"
regex = "1.3.1"
[dev-dependencies] [dev-dependencies]
assert_matches = "1.3" assert_matches = "1.3"
@ -43,7 +44,6 @@ rustyline = { version = "5.0.0", default-features = false }
structopt = "0.3.2" structopt = "0.3.2"
tempfile = "3.1.0" tempfile = "3.1.0"
termcolor = "1.0.4" termcolor = "1.0.4"
toml = "0.5.3"
[[bench]] [[bench]]
name = "search_benchmark" name = "search_benchmark"

View File

@ -13,7 +13,8 @@ use structopt::StructOpt;
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
use meilisearch_core::{Database, Highlight, ProcessedUpdateResult}; use meilisearch_core::{Database, Highlight, ProcessedUpdateResult};
use meilisearch_schema::SchemaAttr; use meilisearch_core::settings::Settings;
use meilisearch_schema::FieldId;
// #[cfg(target_os = "linux")] // #[cfg(target_os = "linux")]
#[global_allocator] #[global_allocator]
@ -32,9 +33,9 @@ struct IndexCommand {
#[structopt(parse(from_os_str))] #[structopt(parse(from_os_str))]
csv_data_path: PathBuf, csv_data_path: PathBuf,
/// The path to the schema. /// The path to the settings.
#[structopt(long, parse(from_os_str))] #[structopt(long, parse(from_os_str))]
schema: PathBuf, settings: PathBuf,
#[structopt(long)] #[structopt(long)]
update_group_size: Option<usize>, update_group_size: Option<usize>,
@ -119,25 +120,15 @@ fn index_command(command: IndexCommand, database: Database) -> Result<(), Box<dy
let db = &database; let db = &database;
let schema = { let settings = {
let string = fs::read_to_string(&command.schema)?; let string = fs::read_to_string(&command.settings)?;
toml::from_str(&string).unwrap() let settings: Settings = serde_json::from_str(&string).unwrap();
settings.into_update().unwrap()
}; };
let reader = db.main_read_txn().unwrap();
let mut update_writer = db.update_write_txn().unwrap(); let mut update_writer = db.update_write_txn().unwrap();
match index.main.schema(&reader)? { index.settings_update(&mut update_writer, settings)?;
Some(current_schema) => { update_writer.commit().unwrap();
if current_schema != schema {
return Err(meilisearch_core::Error::SchemaDiffer.into());
}
update_writer.abort();
}
None => {
index.schema_update(&mut update_writer, schema)?;
update_writer.commit().unwrap();
}
}
let mut rdr = if command.csv_data_path.as_os_str() == "-" { let mut rdr = if command.csv_data_path.as_os_str() == "-" {
csv::Reader::from_reader(Box::new(io::stdin()) as Box<dyn Read>) csv::Reader::from_reader(Box::new(io::stdin()) as Box<dyn Read>)
@ -368,7 +359,7 @@ fn search_command(command: SearchCommand, database: Database) -> Result<(), Box<
}; };
let attr = schema let attr = schema
.attribute(&filter) .id(filter)
.expect("Could not find filtered attribute"); .expect("Could not find filtered attribute");
builder.with_filter(move |document_id| { builder.with_filter(move |document_id| {
@ -399,11 +390,11 @@ fn search_command(command: SearchCommand, database: Database) -> Result<(), Box<
for (name, text) in document.0 { for (name, text) in document.0 {
print!("{}: ", name); print!("{}: ", name);
let attr = schema.attribute(&name).unwrap(); let attr = schema.id(&name).unwrap();
let highlights = doc let highlights = doc
.highlights .highlights
.iter() .iter()
.filter(|m| SchemaAttr::new(m.attribute) == attr) .filter(|m| FieldId::new(m.attribute) == attr)
.cloned(); .cloned();
let (text, highlights) = let (text, highlights) =
crop_text(&text, highlights, command.char_context); crop_text(&text, highlights, command.char_context);
@ -418,8 +409,8 @@ fn search_command(command: SearchCommand, database: Database) -> Result<(), Box<
let mut matching_attributes = HashSet::new(); let mut matching_attributes = HashSet::new();
for highlight in doc.highlights { for highlight in doc.highlights {
let attr = SchemaAttr::new(highlight.attribute); let attr = FieldId::new(highlight.attribute);
let name = schema.attribute_name(attr); let name = schema.name(attr);
matching_attributes.insert(name); matching_attributes.insert(name);
} }

View File

@ -14,6 +14,7 @@ use meilisearch_types::DocIndex;
use sdset::{Set, SetBuf, exponential_search}; use sdset::{Set, SetBuf, exponential_search};
use slice_group_by::{GroupBy, GroupByMut}; use slice_group_by::{GroupBy, GroupByMut};
use crate::error::Error;
use crate::criterion::{Criteria, Context, ContextMut}; use crate::criterion::{Criteria, Context, ContextMut};
use crate::distinct_map::{BufferedDistinctMap, DistinctMap}; use crate::distinct_map::{BufferedDistinctMap, DistinctMap};
use crate::raw_document::RawDocument; use crate::raw_document::RawDocument;
@ -68,8 +69,11 @@ where
None => return Ok(Vec::new()), None => return Ok(Vec::new()),
}; };
let stop_words = main_store.stop_words_fst(reader)?.unwrap_or_default();
let context = QTContext { let context = QTContext {
words_set, words_set,
stop_words,
synonyms: synonyms_store, synonyms: synonyms_store,
postings_lists: postings_lists_store, postings_lists: postings_lists_store,
prefix_postings_lists: prefix_postings_lists_cache_store, prefix_postings_lists: prefix_postings_lists_cache_store,
@ -161,8 +165,9 @@ where
debug!("criterion loop took {:.02?}", before_criterion_loop.elapsed()); debug!("criterion loop took {:.02?}", before_criterion_loop.elapsed());
debug!("proximity evaluation called {} times", proximity_count.load(Ordering::Relaxed)); debug!("proximity evaluation called {} times", proximity_count.load(Ordering::Relaxed));
let schema = main_store.schema(reader)?.ok_or(Error::SchemaMissing)?;
let iter = raw_documents.into_iter().skip(range.start).take(range.len()); let iter = raw_documents.into_iter().skip(range.start).take(range.len());
let iter = iter.map(|rd| Document::from_raw(rd, &queries_kinds, &arena, searchable_attrs.as_ref())); let iter = iter.map(|rd| Document::from_raw(rd, &queries_kinds, &arena, searchable_attrs.as_ref(), &schema));
let documents = iter.collect(); let documents = iter.collect();
debug!("bucket sort took {:.02?}", before_bucket_sort.elapsed()); debug!("bucket sort took {:.02?}", before_bucket_sort.elapsed());
@ -195,8 +200,11 @@ where
None => return Ok(Vec::new()), None => return Ok(Vec::new()),
}; };
let stop_words = main_store.stop_words_fst(reader)?.unwrap_or_default();
let context = QTContext { let context = QTContext {
words_set, words_set,
stop_words,
synonyms: synonyms_store, synonyms: synonyms_store,
postings_lists: postings_lists_store, postings_lists: postings_lists_store,
prefix_postings_lists: prefix_postings_lists_cache_store, prefix_postings_lists: prefix_postings_lists_cache_store,
@ -330,6 +338,7 @@ where
// once we classified the documents related to the current // once we classified the documents related to the current
// automatons we save that as the next valid result // automatons we save that as the next valid result
let mut seen = BufferedDistinctMap::new(&mut distinct_map); let mut seen = BufferedDistinctMap::new(&mut distinct_map);
let schema = main_store.schema(reader)?.ok_or(Error::SchemaMissing)?;
let mut documents = Vec::with_capacity(range.len()); let mut documents = Vec::with_capacity(range.len());
for raw_document in raw_documents.into_iter().skip(distinct_raw_offset) { for raw_document in raw_documents.into_iter().skip(distinct_raw_offset) {
@ -346,7 +355,7 @@ where
}; };
if distinct_accepted && seen.len() > range.start { if distinct_accepted && seen.len() > range.start {
documents.push(Document::from_raw(raw_document, &queries_kinds, &arena, searchable_attrs.as_ref())); documents.push(Document::from_raw(raw_document, &queries_kinds, &arena, searchable_attrs.as_ref(), &schema));
if documents.len() == range.len() { if documents.len() == range.len() {
break; break;
} }

View File

@ -1,6 +1,6 @@
use std::cmp::{Ordering, Reverse}; use std::cmp::{Ordering, Reverse};
use std::collections::hash_map::{HashMap, Entry}; use std::collections::hash_map::{HashMap, Entry};
use meilisearch_schema::SchemaAttr; use meilisearch_schema::IndexedPos;
use slice_group_by::GroupBy; use slice_group_by::GroupBy;
use crate::{RawDocument, MResult}; use crate::{RawDocument, MResult};
use crate::bucket_sort::BareMatch; use crate::bucket_sort::BareMatch;
@ -32,7 +32,7 @@ impl Criterion for Exact {
for bm in group { for bm in group {
for di in ctx.postings_lists[bm.postings_list].as_ref() { for di in ctx.postings_lists[bm.postings_list].as_ref() {
let attr = SchemaAttr(di.attribute); let attr = IndexedPos(di.attribute);
let count = match fields_counts.entry(attr) { let count = match fields_counts.entry(attr) {
Entry::Occupied(entry) => *entry.get(), Entry::Occupied(entry) => *entry.get(),
Entry::Vacant(entry) => { Entry::Vacant(entry) => {

View File

@ -1,7 +1,7 @@
use std::cmp::Ordering; use std::cmp::Ordering;
use std::error::Error; use std::error::Error;
use std::fmt; use std::fmt;
use meilisearch_schema::{Schema, SchemaAttr}; use meilisearch_schema::{Schema, FieldId};
use crate::{RankedMap, RawDocument}; use crate::{RankedMap, RawDocument};
use super::{Criterion, Context}; use super::{Criterion, Context};
@ -41,7 +41,7 @@ use super::{Criterion, Context};
/// ``` /// ```
pub struct SortByAttr<'a> { pub struct SortByAttr<'a> {
ranked_map: &'a RankedMap, ranked_map: &'a RankedMap,
attr: SchemaAttr, field_id: FieldId,
reversed: bool, reversed: bool,
} }
@ -68,18 +68,18 @@ impl<'a> SortByAttr<'a> {
attr_name: &str, attr_name: &str,
reversed: bool, reversed: bool,
) -> Result<SortByAttr<'a>, SortByAttrError> { ) -> Result<SortByAttr<'a>, SortByAttrError> {
let attr = match schema.attribute(attr_name) { let field_id = match schema.id(attr_name) {
Some(attr) => attr, Some(field_id) => field_id,
None => return Err(SortByAttrError::AttributeNotFound), None => return Err(SortByAttrError::AttributeNotFound),
}; };
if !schema.props(attr).is_ranked() { if !schema.is_ranked(field_id) {
return Err(SortByAttrError::AttributeNotRegisteredForRanking); return Err(SortByAttrError::AttributeNotRegisteredForRanking);
} }
Ok(SortByAttr { Ok(SortByAttr {
ranked_map, ranked_map,
attr, field_id,
reversed, reversed,
}) })
} }
@ -91,8 +91,8 @@ impl Criterion for SortByAttr<'_> {
} }
fn evaluate(&self, _ctx: &Context, lhs: &RawDocument, rhs: &RawDocument) -> Ordering { fn evaluate(&self, _ctx: &Context, lhs: &RawDocument, rhs: &RawDocument) -> Ordering {
let lhs = self.ranked_map.get(lhs.id, self.attr); let lhs = self.ranked_map.get(lhs.id, self.field_id);
let rhs = self.ranked_map.get(rhs.id, self.attr); let rhs = self.ranked_map.get(rhs.id, self.field_id);
match (lhs, rhs) { match (lhs, rhs) {
(Some(lhs), Some(rhs)) => { (Some(lhs), Some(rhs)) => {

View File

@ -357,6 +357,7 @@ mod tests {
use crate::criterion::{self, CriteriaBuilder}; use crate::criterion::{self, CriteriaBuilder};
use crate::update::{ProcessedUpdateResult, UpdateStatus}; use crate::update::{ProcessedUpdateResult, UpdateStatus};
use crate::settings::{Settings, SettingsUpdate, UpdateState};
use crate::{Document, DocumentId}; use crate::{Document, DocumentId};
use serde::de::IgnoredAny; use serde::de::IgnoredAny;
use std::sync::mpsc; use std::sync::mpsc;
@ -376,23 +377,31 @@ mod tests {
database.set_update_callback(Box::new(update_fn)); database.set_update_callback(Box::new(update_fn));
let schema = { let settings_update = SettingsUpdate{
identifier: UpdateState::Update("id".to_string()),
..SettingsUpdate::default()
};
let mut writer = db.update_write_txn().unwrap();
let update_id = index.settings_update(&mut writer, settings_update).unwrap();
writer.commit().unwrap();
// block until the transaction is processed
let _ = receiver.iter().find(|id| *id == update_id);
let settings = {
let data = r#" let data = r#"
identifier = "id" {
"searchableAttributes": ["name", "description"],
[attributes."name"] "displayedAttributes": ["name", "description"]
displayed = true }
indexed = true
[attributes."description"]
displayed = true
indexed = true
"#; "#;
toml::from_str(data).unwrap() let settings: Settings = serde_json::from_str(data).unwrap();
settings.into_update().unwrap()
}; };
let mut update_writer = db.update_write_txn().unwrap(); let mut update_writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut update_writer, schema).unwrap(); let _update_id = index.settings_update(&mut update_writer, settings).unwrap();
update_writer.commit().unwrap(); update_writer.commit().unwrap();
let mut additions = index.documents_addition(); let mut additions = index.documents_addition();
@ -439,23 +448,31 @@ mod tests {
database.set_update_callback(Box::new(update_fn)); database.set_update_callback(Box::new(update_fn));
let schema = { let settings_update = SettingsUpdate{
identifier: UpdateState::Update("id".to_string()),
..SettingsUpdate::default()
};
let mut writer = db.update_write_txn().unwrap();
let update_id = index.settings_update(&mut writer, settings_update).unwrap();
writer.commit().unwrap();
// block until the transaction is processed
let _ = receiver.iter().find(|id| *id == update_id);
let settings = {
let data = r#" let data = r#"
identifier = "id" {
"searchableAttributes": ["name", "description"],
[attributes."name"] "displayedAttributes": ["name", "description"]
displayed = true }
indexed = true
[attributes."description"]
displayed = true
indexed = true
"#; "#;
toml::from_str(data).unwrap() let settings: Settings = serde_json::from_str(data).unwrap();
settings.into_update().unwrap()
}; };
let mut update_writer = db.update_write_txn().unwrap(); let mut update_writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut update_writer, schema).unwrap(); let _update_id = index.settings_update(&mut update_writer, settings).unwrap();
update_writer.commit().unwrap(); update_writer.commit().unwrap();
let mut additions = index.documents_addition(); let mut additions = index.documents_addition();
@ -501,19 +518,31 @@ mod tests {
database.set_update_callback(Box::new(update_fn)); database.set_update_callback(Box::new(update_fn));
let schema = { let settings_update = SettingsUpdate{
let data = r#" identifier: UpdateState::Update("id".to_string()),
identifier = "id" ..SettingsUpdate::default()
};
[attributes."name"] let mut writer = db.update_write_txn().unwrap();
displayed = true let update_id = index.settings_update(&mut writer, settings_update).unwrap();
indexed = true writer.commit().unwrap();
// block until the transaction is processed
let _ = receiver.iter().find(|id| *id == update_id);
let settings = {
let data = r#"
{
"searchableAttributes": ["name"],
"displayedAttributes": ["name"]
}
"#; "#;
toml::from_str(data).unwrap() let settings: Settings = serde_json::from_str(data).unwrap();
settings.into_update().unwrap()
}; };
let mut update_writer = db.update_write_txn().unwrap(); let mut update_writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut update_writer, schema).unwrap(); let _update_id = index.settings_update(&mut update_writer, settings).unwrap();
update_writer.commit().unwrap(); update_writer.commit().unwrap();
let mut additions = index.documents_addition(); let mut additions = index.documents_addition();
@ -552,23 +581,31 @@ mod tests {
database.set_update_callback(Box::new(update_fn)); database.set_update_callback(Box::new(update_fn));
let schema = { let settings_update = SettingsUpdate{
identifier: UpdateState::Update("id".to_string()),
..SettingsUpdate::default()
};
let mut writer = db.update_write_txn().unwrap();
let update_id = index.settings_update(&mut writer, settings_update).unwrap();
writer.commit().unwrap();
// block until the transaction is processed
let _ = receiver.iter().find(|id| *id == update_id);
let settings = {
let data = r#" let data = r#"
identifier = "id" {
"searchableAttributes": ["name", "description"],
[attributes."name"] "displayedAttributes": ["name", "description"]
displayed = true }
indexed = true
[attributes."description"]
displayed = true
indexed = true
"#; "#;
toml::from_str(data).unwrap() let settings: Settings = serde_json::from_str(data).unwrap();
settings.into_update().unwrap()
}; };
let mut update_writer = db.update_write_txn().unwrap(); let mut update_writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut update_writer, schema).unwrap(); let _update_id = index.settings_update(&mut update_writer, settings).unwrap();
update_writer.commit().unwrap(); update_writer.commit().unwrap();
let mut additions = index.documents_addition(); let mut additions = index.documents_addition();
@ -592,31 +629,19 @@ mod tests {
let _update_id = additions.finalize(&mut update_writer).unwrap(); let _update_id = additions.finalize(&mut update_writer).unwrap();
update_writer.commit().unwrap(); update_writer.commit().unwrap();
let schema = { let settings = {
let data = r#" let data = r#"
identifier = "id" {
"searchableAttributes": ["name", "description", "age", "sex"],
[attributes."name"] "displayedAttributes": ["name", "description", "age", "sex"]
displayed = true }
indexed = true
[attributes."description"]
displayed = true
indexed = true
[attributes."age"]
displayed = true
indexed = true
[attributes."sex"]
displayed = true
indexed = true
"#; "#;
toml::from_str(data).unwrap() let settings: Settings = serde_json::from_str(data).unwrap();
settings.into_update().unwrap()
}; };
let mut writer = db.update_write_txn().unwrap(); let mut writer = db.update_write_txn().unwrap();
let update_id = index.schema_update(&mut writer, schema).unwrap(); let update_id = index.settings_update(&mut writer, settings).unwrap();
writer.commit().unwrap(); writer.commit().unwrap();
// block until the transaction is processed // block until the transaction is processed
@ -670,44 +695,27 @@ mod tests {
reader.abort(); reader.abort();
// try to introduce attributes in the middle of the schema // try to introduce attributes in the middle of the schema
let schema = { let settings = {
let data = r#" let data = r#"
identifier = "id" {
"searchableAttributes": ["name", "description", "city", "age", "sex"],
[attributes."name"] "displayedAttributes": ["name", "description", "city", "age", "sex"]
displayed = true }
indexed = true
[attributes."description"]
displayed = true
indexed = true
[attributes."city"]
displayed = true
indexed = true
[attributes."age"]
displayed = true
indexed = true
[attributes."sex"]
displayed = true
indexed = true
"#; "#;
toml::from_str(data).unwrap() let settings: Settings = serde_json::from_str(data).unwrap();
settings.into_update().unwrap()
}; };
let mut writer = db.update_write_txn().unwrap(); let mut writer = db.update_write_txn().unwrap();
let update_id = index.schema_update(&mut writer, schema).unwrap(); let update_id = index.settings_update(&mut writer, settings).unwrap();
writer.commit().unwrap(); writer.commit().unwrap();
// block until the transaction is processed // block until the transaction is processed
let _ = receiver.iter().find(|id| *id == update_id); let _ = receiver.iter().find(|id| *id == update_id);
// check if it has been accepted // check if it has been accepted
let update_reader = db.update_read_txn().unwrap(); let update_reader = db.update_read_txn().unwrap();
let result = index.update_status(&update_reader, update_id).unwrap(); let result = index.update_status(&update_reader, update_id).unwrap();
assert_matches!(result, Some(UpdateStatus::Failed { content }) if content.error.is_some()); assert_matches!(result, Some(UpdateStatus::Processed { content }) if content.error.is_none());
} }
#[test] #[test]
@ -725,23 +733,31 @@ mod tests {
database.set_update_callback(Box::new(update_fn)); database.set_update_callback(Box::new(update_fn));
let schema = { let settings_update = SettingsUpdate{
let data = r#" identifier: UpdateState::Update("id".to_string()),
identifier = "id" ..SettingsUpdate::default()
[attributes."name"]
displayed = true
indexed = true
[attributes."description"]
displayed = true
indexed = true
"#;
toml::from_str(data).unwrap()
}; };
let mut writer = db.update_write_txn().unwrap(); let mut writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut writer, schema).unwrap(); let update_id = index.settings_update(&mut writer, settings_update).unwrap();
writer.commit().unwrap();
// block until the transaction is processed
let _ = receiver.iter().find(|id| *id == update_id);
let settings = {
let data = r#"
{
"searchableAttributes": ["name", "description"],
"displayedAttributes": ["name", "description"]
}
"#;
let settings: Settings = serde_json::from_str(data).unwrap();
settings.into_update().unwrap()
};
let mut writer = db.update_write_txn().unwrap();
let _update_id = index.settings_update(&mut writer, settings).unwrap();
writer.commit().unwrap(); writer.commit().unwrap();
let mut additions = index.documents_addition(); let mut additions = index.documents_addition();
@ -780,12 +796,12 @@ mod tests {
assert!(document.is_none()); assert!(document.is_none());
let document: Option<IgnoredAny> = index let document: Option<IgnoredAny> = index
.document(&reader, None, DocumentId(7900334843754999545)) .document(&reader, None, DocumentId(7_900_334_843_754_999_545))
.unwrap(); .unwrap();
assert!(document.is_some()); assert!(document.is_some());
let document: Option<IgnoredAny> = index let document: Option<IgnoredAny> = index
.document(&reader, None, DocumentId(8367468610878465872)) .document(&reader, None, DocumentId(8_367_468_610_878_465_872))
.unwrap(); .unwrap();
assert!(document.is_some()); assert!(document.is_some());
} }
@ -805,26 +821,31 @@ mod tests {
database.set_update_callback(Box::new(update_fn)); database.set_update_callback(Box::new(update_fn));
let schema = { let settings_update = SettingsUpdate{
let data = r#" identifier: UpdateState::Update("id".to_string()),
identifier = "id" ..SettingsUpdate::default()
[attributes."id"]
displayed = true
[attributes."name"]
displayed = true
indexed = true
[attributes."description"]
displayed = true
indexed = true
"#;
toml::from_str(data).unwrap()
}; };
let mut writer = db.update_write_txn().unwrap(); let mut writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut writer, schema).unwrap(); let update_id = index.settings_update(&mut writer, settings_update).unwrap();
writer.commit().unwrap();
// block until the transaction is processed
let _ = receiver.iter().find(|id| *id == update_id);
let settings = {
let data = r#"
{
"searchableAttributes": ["name", "description"],
"displayedAttributes": ["name", "description", "id"]
}
"#;
let settings: Settings = serde_json::from_str(data).unwrap();
settings.into_update().unwrap()
};
let mut writer = db.update_write_txn().unwrap();
let _update_id = index.settings_update(&mut writer, settings).unwrap();
writer.commit().unwrap(); writer.commit().unwrap();
let mut additions = index.documents_addition(); let mut additions = index.documents_addition();
@ -863,12 +884,12 @@ mod tests {
assert!(document.is_none()); assert!(document.is_none());
let document: Option<IgnoredAny> = index let document: Option<IgnoredAny> = index
.document(&reader, None, DocumentId(7900334843754999545)) .document(&reader, None, DocumentId(7_900_334_843_754_999_545))
.unwrap(); .unwrap();
assert!(document.is_some()); assert!(document.is_some());
let document: Option<IgnoredAny> = index let document: Option<IgnoredAny> = index
.document(&reader, None, DocumentId(8367468610878465872)) .document(&reader, None, DocumentId(8_367_468_610_878_465_872))
.unwrap(); .unwrap();
assert!(document.is_some()); assert!(document.is_some());
@ -905,7 +926,7 @@ mod tests {
let reader = db.main_read_txn().unwrap(); let reader = db.main_read_txn().unwrap();
let document: Option<serde_json::Value> = index let document: Option<serde_json::Value> = index
.document(&reader, None, DocumentId(7900334843754999545)) .document(&reader, None, DocumentId(7_900_334_843_754_999_545))
.unwrap(); .unwrap();
let new_doc1 = serde_json::json!({ let new_doc1 = serde_json::json!({
@ -916,7 +937,7 @@ mod tests {
assert_eq!(document, Some(new_doc1)); assert_eq!(document, Some(new_doc1));
let document: Option<serde_json::Value> = index let document: Option<serde_json::Value> = index
.document(&reader, None, DocumentId(8367468610878465872)) .document(&reader, None, DocumentId(8_367_468_610_878_465_872))
.unwrap(); .unwrap();
let new_doc2 = serde_json::json!({ let new_doc2 = serde_json::json!({
@ -947,24 +968,31 @@ mod tests {
database.set_update_callback(Box::new(update_fn)); database.set_update_callback(Box::new(update_fn));
let schema = { let settings_update = SettingsUpdate{
let data = r#" identifier: UpdateState::Update("id".to_string()),
identifier = "id" ..SettingsUpdate::default()
[attributes."name"]
displayed = true
indexed = true
[attributes."description"]
displayed = true
indexed = true
"#;
toml::from_str(data).unwrap()
}; };
// add a schema to the index
let mut writer = db.update_write_txn().unwrap(); let mut writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut writer, schema).unwrap(); let update_id = index.settings_update(&mut writer, settings_update).unwrap();
writer.commit().unwrap();
// block until the transaction is processed
let _ = receiver.iter().find(|id| *id == update_id);
let settings = {
let data = r#"
{
"searchableAttributes": ["name", "description"],
"displayedAttributes": ["name", "description"]
}
"#;
let settings: Settings = serde_json::from_str(data).unwrap();
settings.into_update().unwrap()
};
let mut writer = db.update_write_txn().unwrap();
let _update_id = index.settings_update(&mut writer, settings).unwrap();
writer.commit().unwrap(); writer.commit().unwrap();
// add documents to the index // add documents to the index
@ -1015,23 +1043,40 @@ mod tests {
database.set_update_callback(Box::new(update_fn)); database.set_update_callback(Box::new(update_fn));
let schema = { let settings_update = SettingsUpdate{
let data = r#" identifier: UpdateState::Update("id".to_string()),
identifier = "id" ..SettingsUpdate::default()
[attributes."name"]
displayed = true
indexed = true
[attributes."release_date"]
displayed = true
ranked = true
"#;
toml::from_str(data).unwrap()
}; };
let mut writer = db.update_write_txn().unwrap(); let mut writer = db.update_write_txn().unwrap();
let _update_id = index.schema_update(&mut writer, schema).unwrap(); let update_id = index.settings_update(&mut writer, settings_update).unwrap();
writer.commit().unwrap();
// block until the transaction is processed
let _ = receiver.iter().find(|id| *id == update_id);
let settings = {
let data = r#"
{
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exact",
"dsc(release_date)"
],
"searchableAttributes": ["name", "release_date"],
"displayedAttributes": ["name", "release_date"]
}
"#;
let settings: Settings = serde_json::from_str(data).unwrap();
settings.into_update().unwrap()
};
let mut writer = db.update_write_txn().unwrap();
let _update_id = index.settings_update(&mut writer, settings).unwrap();
writer.commit().unwrap(); writer.commit().unwrap();
let mut additions = index.documents_addition(); let mut additions = index.documents_addition();
@ -1080,14 +1125,14 @@ mod tests {
assert_matches!( assert_matches!(
iter.next(), iter.next(),
Some(Document { Some(Document {
id: DocumentId(7900334843754999545), id: DocumentId(7_900_334_843_754_999_545),
.. ..
}) })
); );
assert_matches!( assert_matches!(
iter.next(), iter.next(),
Some(Document { Some(Document {
id: DocumentId(8367468610878465872), id: DocumentId(8_367_468_610_878_465_872),
.. ..
}) })
); );

View File

@ -2,16 +2,22 @@ use crate::serde::{DeserializerError, SerializerError};
use serde_json::Error as SerdeJsonError; use serde_json::Error as SerdeJsonError;
use std::{error, fmt, io}; use std::{error, fmt, io};
pub use heed::Error as HeedError;
pub use fst::Error as FstError;
pub use bincode::Error as BincodeError;
pub type MResult<T> = Result<T, Error>; pub type MResult<T> = Result<T, Error>;
#[derive(Debug)] #[derive(Debug)]
pub enum Error { pub enum Error {
Io(io::Error), Io(io::Error),
IndexAlreadyExists, IndexAlreadyExists,
SchemaDiffer, MissingIdentifier,
SchemaMissing, SchemaMissing,
WordIndexMissing, WordIndexMissing,
MissingDocumentId, MissingDocumentId,
MaxFieldsLimitExceeded,
Schema(meilisearch_schema::Error),
Zlmdb(heed::Error), Zlmdb(heed::Error),
Fst(fst::Error), Fst(fst::Error),
SerdeJson(SerdeJsonError), SerdeJson(SerdeJsonError),
@ -27,14 +33,20 @@ impl From<io::Error> for Error {
} }
} }
impl From<heed::Error> for Error { impl From<meilisearch_schema::Error> for Error {
fn from(error: heed::Error) -> Error { fn from(error: meilisearch_schema::Error) -> Error {
Error::Schema(error)
}
}
impl From<HeedError> for Error {
fn from(error: HeedError) -> Error {
Error::Zlmdb(error) Error::Zlmdb(error)
} }
} }
impl From<fst::Error> for Error { impl From<FstError> for Error {
fn from(error: fst::Error) -> Error { fn from(error: FstError) -> Error {
Error::Fst(error) Error::Fst(error)
} }
} }
@ -45,8 +57,8 @@ impl From<SerdeJsonError> for Error {
} }
} }
impl From<bincode::Error> for Error { impl From<BincodeError> for Error {
fn from(error: bincode::Error) -> Error { fn from(error: BincodeError) -> Error {
Error::Bincode(error) Error::Bincode(error)
} }
} }
@ -75,10 +87,12 @@ impl fmt::Display for Error {
match self { match self {
Io(e) => write!(f, "{}", e), Io(e) => write!(f, "{}", e),
IndexAlreadyExists => write!(f, "index already exists"), IndexAlreadyExists => write!(f, "index already exists"),
SchemaDiffer => write!(f, "schemas differ"), MissingIdentifier => write!(f, "schema cannot be built without identifier"),
SchemaMissing => write!(f, "this index does not have a schema"), SchemaMissing => write!(f, "this index does not have a schema"),
WordIndexMissing => write!(f, "this index does not have a word index"), WordIndexMissing => write!(f, "this index does not have a word index"),
MissingDocumentId => write!(f, "document id is missing"), MissingDocumentId => write!(f, "document id is missing"),
MaxFieldsLimitExceeded => write!(f, "maximum number of fields in a document exceeded"),
Schema(e) => write!(f, "schema error; {}", e),
Zlmdb(e) => write!(f, "heed error; {}", e), Zlmdb(e) => write!(f, "heed error; {}", e),
Fst(e) => write!(f, "fst error; {}", e), Fst(e) => write!(f, "fst error; {}", e),
SerdeJson(e) => write!(f, "serde json error; {}", e), SerdeJson(e) => write!(f, "serde json error; {}", e),

View File

@ -16,24 +16,27 @@ mod ranked_map;
mod raw_document; mod raw_document;
mod reordered_attrs; mod reordered_attrs;
mod update; mod update;
pub mod settings;
pub mod criterion; pub mod criterion;
pub mod raw_indexer; pub mod raw_indexer;
pub mod serde; pub mod serde;
pub mod store; pub mod store;
pub use self::database::{BoxUpdateFn, Database, MainT, UpdateT}; pub use self::database::{BoxUpdateFn, Database, MainT, UpdateT};
pub use self::error::{Error, MResult}; pub use self::error::{Error, HeedError, FstError, MResult};
pub use self::number::{Number, ParseNumberError}; pub use self::number::{Number, ParseNumberError};
pub use self::ranked_map::RankedMap; pub use self::ranked_map::RankedMap;
pub use self::raw_document::RawDocument; pub use self::raw_document::RawDocument;
pub use self::store::Index; pub use self::store::Index;
pub use self::update::{EnqueuedUpdateResult, ProcessedUpdateResult, UpdateStatus, UpdateType}; pub use self::update::{EnqueuedUpdateResult, ProcessedUpdateResult, UpdateStatus, UpdateType};
pub use meilisearch_types::{DocIndex, DocumentId, Highlight}; pub use meilisearch_types::{DocIndex, DocumentId, Highlight};
pub use meilisearch_schema::Schema;
pub use query_words_mapper::QueryWordsMapper; pub use query_words_mapper::QueryWordsMapper;
use std::convert::TryFrom; use std::convert::TryFrom;
use std::collections::HashMap; use std::collections::HashMap;
use compact_arena::SmallArena; use compact_arena::SmallArena;
use log::{error, trace};
use crate::bucket_sort::PostingsListView; use crate::bucket_sort::PostingsListView;
use crate::levenshtein::prefix_damerau_levenshtein; use crate::levenshtein::prefix_damerau_levenshtein;
@ -54,6 +57,7 @@ fn highlights_from_raw_document<'a, 'tag, 'txn>(
queries_kinds: &HashMap<QueryId, &QueryKind>, queries_kinds: &HashMap<QueryId, &QueryKind>,
arena: &SmallArena<'tag, PostingsListView<'txn>>, arena: &SmallArena<'tag, PostingsListView<'txn>>,
searchable_attrs: Option<&ReorderedAttrs>, searchable_attrs: Option<&ReorderedAttrs>,
schema: &Schema,
) -> Vec<Highlight> ) -> Vec<Highlight>
{ {
let mut highlights = Vec::new(); let mut highlights = Vec::new();
@ -80,8 +84,17 @@ fn highlights_from_raw_document<'a, 'tag, 'txn>(
.and_then(|sa| sa.reverse(di.attribute)) .and_then(|sa| sa.reverse(di.attribute))
.unwrap_or(di.attribute); .unwrap_or(di.attribute);
let attribute = match schema.indexed_pos_to_field_id(attribute) {
Some(field_id) => field_id.0,
None => {
error!("Cannot convert indexed_pos {} to field_id", attribute);
trace!("Schema is compromized; {:?}", schema);
continue
}
};
let highlight = Highlight { let highlight = Highlight {
attribute: attribute, attribute,
char_index: di.char_index, char_index: di.char_index,
char_length: covered_area, char_length: covered_area,
}; };
@ -110,6 +123,7 @@ impl Document {
queries_kinds: &HashMap<QueryId, &QueryKind>, queries_kinds: &HashMap<QueryId, &QueryKind>,
arena: &SmallArena<'tag, PostingsListView<'txn>>, arena: &SmallArena<'tag, PostingsListView<'txn>>,
searchable_attrs: Option<&ReorderedAttrs>, searchable_attrs: Option<&ReorderedAttrs>,
schema: &Schema,
) -> Document ) -> Document
{ {
let highlights = highlights_from_raw_document( let highlights = highlights_from_raw_document(
@ -117,6 +131,7 @@ impl Document {
queries_kinds, queries_kinds,
arena, arena,
searchable_attrs, searchable_attrs,
schema,
); );
Document { id: raw_document.id, highlights } Document { id: raw_document.id, highlights }
@ -128,6 +143,7 @@ impl Document {
queries_kinds: &HashMap<QueryId, &QueryKind>, queries_kinds: &HashMap<QueryId, &QueryKind>,
arena: &SmallArena<'tag, PostingsListView<'txn>>, arena: &SmallArena<'tag, PostingsListView<'txn>>,
searchable_attrs: Option<&ReorderedAttrs>, searchable_attrs: Option<&ReorderedAttrs>,
schema: &Schema,
) -> Document ) -> Document
{ {
use crate::bucket_sort::SimpleMatch; use crate::bucket_sort::SimpleMatch;
@ -137,6 +153,7 @@ impl Document {
queries_kinds, queries_kinds,
arena, arena,
searchable_attrs, searchable_attrs,
schema,
); );
let mut matches = Vec::new(); let mut matches = Vec::new();
@ -145,6 +162,15 @@ impl Document {
.and_then(|sa| sa.reverse(sm.attribute)) .and_then(|sa| sa.reverse(sm.attribute))
.unwrap_or(sm.attribute); .unwrap_or(sm.attribute);
let attribute = match schema.indexed_pos_to_field_id(attribute) {
Some(field_id) => field_id.0,
None => {
error!("Cannot convert indexed_pos {} to field_id", attribute);
trace!("Schema is compromized; {:?}", schema);
continue
}
};
matches.push(SimpleMatch { attribute, ..sm }); matches.push(SimpleMatch { attribute, ..sm });
} }
matches.sort_unstable(); matches.sort_unstable();

View File

@ -136,7 +136,7 @@ mod tests {
use std::iter::FromIterator; use std::iter::FromIterator;
use fst::{IntoStreamer, Set}; use fst::{IntoStreamer, Set};
use meilisearch_schema::SchemaAttr; use meilisearch_schema::IndexedPos;
use sdset::SetBuf; use sdset::SetBuf;
use tempfile::TempDir; use tempfile::TempDir;
@ -145,6 +145,7 @@ mod tests {
use crate::bucket_sort::SimpleMatch; use crate::bucket_sort::SimpleMatch;
use crate::database::Database; use crate::database::Database;
use crate::store::Index; use crate::store::Index;
use meilisearch_schema::Schema;
fn set_from_stream<'f, I, S>(stream: I) -> Set fn set_from_stream<'f, I, S>(stream: I) -> Set
where where
@ -268,18 +269,34 @@ mod tests {
let mut postings_lists = HashMap::new(); let mut postings_lists = HashMap::new();
let mut fields_counts = HashMap::<_, u16>::new(); let mut fields_counts = HashMap::<_, u16>::new();
let mut schema = Schema::with_identifier("id");
for (word, indexes) in iter { for (word, indexes) in iter {
let mut final_indexes = Vec::new();
for index in indexes {
let name = index.attribute.to_string();
schema.insert(&name).unwrap();
let indexed_pos = schema.set_indexed(&name).unwrap().1;
let index = DocIndex {
attribute: indexed_pos.0,
..*index
};
final_indexes.push(index);
}
let word = word.to_lowercase().into_bytes(); let word = word.to_lowercase().into_bytes();
words_fst.insert(word.clone()); words_fst.insert(word.clone());
postings_lists postings_lists
.entry(word) .entry(word)
.or_insert_with(Vec::new) .or_insert_with(Vec::new)
.extend_from_slice(indexes); .extend_from_slice(&final_indexes);
for idx in indexes { for idx in final_indexes {
fields_counts.insert((idx.document_id, idx.attribute, idx.word_index), 1); fields_counts.insert((idx.document_id, idx.attribute, idx.word_index), 1);
} }
} }
index.main.put_schema(&mut writer, &schema).unwrap();
let words_fst = Set::from_iter(words_fst).unwrap(); let words_fst = Set::from_iter(words_fst).unwrap();
index.main.put_words_fst(&mut writer, &words_fst).unwrap(); index.main.put_words_fst(&mut writer, &words_fst).unwrap();
@ -295,14 +312,14 @@ mod tests {
for ((docid, attr, _), count) in fields_counts { for ((docid, attr, _), count) in fields_counts {
let prev = index let prev = index
.documents_fields_counts .documents_fields_counts
.document_field_count(&mut writer, docid, SchemaAttr(attr)) .document_field_count(&writer, docid, IndexedPos(attr))
.unwrap(); .unwrap();
let prev = prev.unwrap_or(0); let prev = prev.unwrap_or(0);
index index
.documents_fields_counts .documents_fields_counts
.put_document_field_count(&mut writer, docid, SchemaAttr(attr), prev + count) .put_document_field_count(&mut writer, docid, IndexedPos(attr), prev + count)
.unwrap(); .unwrap();
} }

View File

@ -114,6 +114,7 @@ pub struct PostingsList {
pub struct Context { pub struct Context {
pub words_set: fst::Set, pub words_set: fst::Set,
pub stop_words: fst::Set,
pub synonyms: store::Synonyms, pub synonyms: store::Synonyms,
pub postings_lists: store::PostingsLists, pub postings_lists: store::PostingsLists,
pub prefix_postings_lists: store::PrefixPostingsListsCache, pub prefix_postings_lists: store::PrefixPostingsListsCache,
@ -180,7 +181,8 @@ pub fn create_query_tree(
) -> MResult<(Operation, HashMap<QueryId, Range<usize>>)> ) -> MResult<(Operation, HashMap<QueryId, Range<usize>>)>
{ {
let words = split_query_string(query).map(str::to_lowercase); let words = split_query_string(query).map(str::to_lowercase);
let words: Vec<_> = words.into_iter().enumerate().collect(); let words = words.filter(|w| !ctx.stop_words.contains(w));
let words: Vec<_> = words.enumerate().collect();
let mut mapper = QueryWordsMapper::new(words.iter().map(|(_, w)| w)); let mut mapper = QueryWordsMapper::new(words.iter().map(|(_, w)| w));

View File

@ -1,14 +1,14 @@
use std::io::{Read, Write}; use std::io::{Read, Write};
use hashbrown::HashMap; use hashbrown::HashMap;
use meilisearch_schema::SchemaAttr; use meilisearch_schema::FieldId;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{DocumentId, Number}; use crate::{DocumentId, Number};
#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)] #[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(transparent)] #[serde(transparent)]
pub struct RankedMap(HashMap<(DocumentId, SchemaAttr), Number>); pub struct RankedMap(HashMap<(DocumentId, FieldId), Number>);
impl RankedMap { impl RankedMap {
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
@ -19,16 +19,16 @@ impl RankedMap {
self.0.is_empty() self.0.is_empty()
} }
pub fn insert(&mut self, document: DocumentId, attribute: SchemaAttr, number: Number) { pub fn insert(&mut self, document: DocumentId, field: FieldId, number: Number) {
self.0.insert((document, attribute), number); self.0.insert((document, field), number);
} }
pub fn remove(&mut self, document: DocumentId, attribute: SchemaAttr) { pub fn remove(&mut self, document: DocumentId, field: FieldId) {
self.0.remove(&(document, attribute)); self.0.remove(&(document, field));
} }
pub fn get(&self, document: DocumentId, attribute: SchemaAttr) -> Option<Number> { pub fn get(&self, document: DocumentId, field: FieldId) -> Option<Number> {
self.0.get(&(document, attribute)).cloned() self.0.get(&(document, field)).cloned()
} }
pub fn read_from_bin<R: Read>(reader: R) -> bincode::Result<RankedMap> { pub fn read_from_bin<R: Read>(reader: R) -> bincode::Result<RankedMap> {

View File

@ -3,7 +3,7 @@ use std::convert::TryFrom;
use crate::{DocIndex, DocumentId}; use crate::{DocIndex, DocumentId};
use deunicode::deunicode_with_tofu; use deunicode::deunicode_with_tofu;
use meilisearch_schema::SchemaAttr; use meilisearch_schema::IndexedPos;
use meilisearch_tokenizer::{is_cjk, SeqTokenizer, Token, Tokenizer}; use meilisearch_tokenizer::{is_cjk, SeqTokenizer, Token, Tokenizer};
use sdset::SetBuf; use sdset::SetBuf;
@ -37,14 +37,14 @@ impl RawIndexer {
} }
} }
pub fn index_text(&mut self, id: DocumentId, attr: SchemaAttr, text: &str) -> usize { pub fn index_text(&mut self, id: DocumentId, indexed_pos: IndexedPos, text: &str) -> usize {
let mut number_of_words = 0; let mut number_of_words = 0;
for token in Tokenizer::new(text) { for token in Tokenizer::new(text) {
let must_continue = index_token( let must_continue = index_token(
token, token,
id, id,
attr, indexed_pos,
self.word_limit, self.word_limit,
&self.stop_words, &self.stop_words,
&mut self.words_doc_indexes, &mut self.words_doc_indexes,
@ -61,7 +61,7 @@ impl RawIndexer {
number_of_words number_of_words
} }
pub fn index_text_seq<'a, I>(&mut self, id: DocumentId, attr: SchemaAttr, iter: I) pub fn index_text_seq<'a, I>(&mut self, id: DocumentId, indexed_pos: IndexedPos, iter: I)
where where
I: IntoIterator<Item = &'a str>, I: IntoIterator<Item = &'a str>,
{ {
@ -70,7 +70,7 @@ impl RawIndexer {
let must_continue = index_token( let must_continue = index_token(
token, token,
id, id,
attr, indexed_pos,
self.word_limit, self.word_limit,
&self.stop_words, &self.stop_words,
&mut self.words_doc_indexes, &mut self.words_doc_indexes,
@ -110,7 +110,7 @@ impl RawIndexer {
fn index_token( fn index_token(
token: Token, token: Token,
id: DocumentId, id: DocumentId,
attr: SchemaAttr, indexed_pos: IndexedPos,
word_limit: usize, word_limit: usize,
stop_words: &fst::Set, stop_words: &fst::Set,
words_doc_indexes: &mut BTreeMap<Word, Vec<DocIndex>>, words_doc_indexes: &mut BTreeMap<Word, Vec<DocIndex>>,
@ -127,7 +127,7 @@ fn index_token(
}; };
if !stop_words.contains(&token.word) { if !stop_words.contains(&token.word) {
match token_to_docindex(id, attr, token) { match token_to_docindex(id, indexed_pos, token) {
Some(docindex) => { Some(docindex) => {
let word = Vec::from(token.word); let word = Vec::from(token.word);
@ -160,14 +160,14 @@ fn index_token(
true true
} }
fn token_to_docindex(id: DocumentId, attr: SchemaAttr, token: Token) -> Option<DocIndex> { fn token_to_docindex(id: DocumentId, indexed_pos: IndexedPos, token: Token) -> Option<DocIndex> {
let word_index = u16::try_from(token.word_index).ok()?; let word_index = u16::try_from(token.word_index).ok()?;
let char_index = u16::try_from(token.char_index).ok()?; let char_index = u16::try_from(token.char_index).ok()?;
let char_length = u16::try_from(token.word.chars().count()).ok()?; let char_length = u16::try_from(token.word.chars().count()).ok()?;
let docindex = DocIndex { let docindex = DocIndex {
document_id: id, document_id: id,
attribute: attr.0, attribute: indexed_pos.0,
word_index, word_index,
char_index, char_index,
char_length, char_length,
@ -179,15 +179,16 @@ fn token_to_docindex(id: DocumentId, attr: SchemaAttr, token: Token) -> Option<D
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use meilisearch_schema::IndexedPos;
#[test] #[test]
fn strange_apostrophe() { fn strange_apostrophe() {
let mut indexer = RawIndexer::new(fst::Set::default()); let mut indexer = RawIndexer::new(fst::Set::default());
let docid = DocumentId(0); let docid = DocumentId(0);
let attr = SchemaAttr(0); let indexed_pos = IndexedPos(0);
let text = "Zut, laspirateur, jai oublié de léteindre !"; let text = "Zut, laspirateur, jai oublié de léteindre !";
indexer.index_text(docid, attr, text); indexer.index_text(docid, indexed_pos, text);
let Indexed { let Indexed {
words_doc_indexes, .. words_doc_indexes, ..
@ -207,9 +208,9 @@ mod tests {
let mut indexer = RawIndexer::new(fst::Set::default()); let mut indexer = RawIndexer::new(fst::Set::default());
let docid = DocumentId(0); let docid = DocumentId(0);
let attr = SchemaAttr(0); let indexed_pos = IndexedPos(0);
let text = vec!["Zut, laspirateur, jai oublié de léteindre !"]; let text = vec!["Zut, laspirateur, jai oublié de léteindre !"];
indexer.index_text_seq(docid, attr, text); indexer.index_text_seq(docid, indexed_pos, text);
let Indexed { let Indexed {
words_doc_indexes, .. words_doc_indexes, ..
@ -232,9 +233,9 @@ mod tests {
let mut indexer = RawIndexer::new(stop_words); let mut indexer = RawIndexer::new(stop_words);
let docid = DocumentId(0); let docid = DocumentId(0);
let attr = SchemaAttr(0); let indexed_pos = IndexedPos(0);
let text = "Zut, laspirateur, jai oublié de léteindre !"; let text = "Zut, laspirateur, jai oublié de léteindre !";
indexer.index_text(docid, attr, text); indexer.index_text(docid, indexed_pos, text);
let Indexed { let Indexed {
words_doc_indexes, .. words_doc_indexes, ..
@ -256,9 +257,9 @@ mod tests {
let mut indexer = RawIndexer::new(fst::Set::default()); let mut indexer = RawIndexer::new(fst::Set::default());
let docid = DocumentId(0); let docid = DocumentId(0);
let attr = SchemaAttr(0); let indexed_pos = IndexedPos(0);
let text = "🇯🇵"; let text = "🇯🇵";
indexer.index_text(docid, attr, text); indexer.index_text(docid, indexed_pos, text);
let Indexed { let Indexed {
words_doc_indexes, .. words_doc_indexes, ..

View File

@ -2,7 +2,7 @@ use std::collections::HashSet;
use std::io::Cursor; use std::io::Cursor;
use std::{error::Error, fmt}; use std::{error::Error, fmt};
use meilisearch_schema::{Schema, SchemaAttr}; use meilisearch_schema::{Schema, FieldId};
use serde::{de, forward_to_deserialize_any}; use serde::{de, forward_to_deserialize_any};
use serde_json::de::IoRead as SerdeJsonIoRead; use serde_json::de::IoRead as SerdeJsonIoRead;
use serde_json::Deserializer as SerdeJsonDeserializer; use serde_json::Deserializer as SerdeJsonDeserializer;
@ -54,7 +54,7 @@ pub struct Deserializer<'a> {
pub reader: &'a heed::RoTxn<MainT>, pub reader: &'a heed::RoTxn<MainT>,
pub documents_fields: DocumentsFields, pub documents_fields: DocumentsFields,
pub schema: &'a Schema, pub schema: &'a Schema,
pub attributes: Option<&'a HashSet<SchemaAttr>>, pub fields: Option<&'a HashSet<FieldId>>,
} }
impl<'de, 'a, 'b> de::Deserializer<'de> for &'b mut Deserializer<'a> { impl<'de, 'a, 'b> de::Deserializer<'de> for &'b mut Deserializer<'a> {
@ -92,15 +92,17 @@ impl<'de, 'a, 'b> de::Deserializer<'de> for &'b mut Deserializer<'a> {
} }
}; };
let is_displayed = self.schema.props(attr).is_displayed(); let is_displayed = self.schema.is_displayed(attr);
if is_displayed && self.attributes.map_or(true, |f| f.contains(&attr)) { if is_displayed && self.fields.map_or(true, |f| f.contains(&attr)) {
let attribute_name = self.schema.attribute_name(attr); if let Some(attribute_name) = self.schema.name(attr) {
let cursor = Cursor::new(value.to_owned());
let ioread = SerdeJsonIoRead::new(cursor);
let value = Value(SerdeJsonDeserializer::new(ioread));
let cursor = Cursor::new(value.to_owned()); Some((attribute_name, value))
let ioread = SerdeJsonIoRead::new(cursor); } else {
let value = Value(SerdeJsonDeserializer::new(ioread)); None
}
Some((attribute_name, value))
} else { } else {
None None
} }

View File

@ -2,7 +2,7 @@ use std::hash::{Hash, Hasher};
use crate::DocumentId; use crate::DocumentId;
use serde::{ser, Serialize}; use serde::{ser, Serialize};
use serde_json::Value; use serde_json::{Value, Number};
use siphasher::sip::SipHasher; use siphasher::sip::SipHasher;
use super::{ConvertToString, SerializerError}; use super::{ConvertToString, SerializerError};
@ -18,12 +18,27 @@ where
document.serialize(serializer) document.serialize(serializer)
} }
fn validate_number(value: &Number) -> Option<String> {
if value.is_f64() {
return None
}
Some(value.to_string())
}
fn validate_string(value: &str) -> Option<String> {
if value.chars().all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_') {
Some(value.to_string())
} else {
None
}
}
pub fn value_to_string(value: &Value) -> Option<String> { pub fn value_to_string(value: &Value) -> Option<String> {
match value { match value {
Value::Null => None, Value::Null => None,
Value::Bool(_) => None, Value::Bool(_) => None,
Value::Number(value) => Some(value.to_string()), Value::Number(value) => validate_number(value),
Value::String(value) => Some(value.to_string()), Value::String(value) => validate_string(value),
Value::Array(_) => None, Value::Array(_) => None,
Value::Object(_) => None, Value::Object(_) => None,
} }

View File

@ -1,4 +1,4 @@
use meilisearch_schema::SchemaAttr; use meilisearch_schema::IndexedPos;
use serde::ser; use serde::ser;
use serde::Serialize; use serde::Serialize;
@ -7,7 +7,7 @@ use crate::raw_indexer::RawIndexer;
use crate::DocumentId; use crate::DocumentId;
pub struct Indexer<'a> { pub struct Indexer<'a> {
pub attribute: SchemaAttr, pub pos: IndexedPos,
pub indexer: &'a mut RawIndexer, pub indexer: &'a mut RawIndexer,
pub document_id: DocumentId, pub document_id: DocumentId,
} }
@ -85,7 +85,7 @@ impl<'a> ser::Serializer for Indexer<'a> {
fn serialize_str(self, text: &str) -> Result<Self::Ok, Self::Error> { fn serialize_str(self, text: &str) -> Result<Self::Ok, Self::Error> {
let number_of_words = self let number_of_words = self
.indexer .indexer
.index_text(self.document_id, self.attribute, text); .index_text(self.document_id, self.pos, text);
Ok(Some(number_of_words)) Ok(Some(number_of_words))
} }
@ -104,7 +104,7 @@ impl<'a> ser::Serializer for Indexer<'a> {
let text = value.serialize(ConvertToString)?; let text = value.serialize(ConvertToString)?;
let number_of_words = self let number_of_words = self
.indexer .indexer
.index_text(self.document_id, self.attribute, &text); .index_text(self.document_id, self.pos, &text);
Ok(Some(number_of_words)) Ok(Some(number_of_words))
} }
@ -153,7 +153,7 @@ impl<'a> ser::Serializer for Indexer<'a> {
fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> { fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
let indexer = SeqIndexer { let indexer = SeqIndexer {
attribute: self.attribute, pos: self.pos,
document_id: self.document_id, document_id: self.document_id,
indexer: self.indexer, indexer: self.indexer,
texts: Vec::new(), texts: Vec::new(),
@ -164,7 +164,7 @@ impl<'a> ser::Serializer for Indexer<'a> {
fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> { fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> {
let indexer = TupleIndexer { let indexer = TupleIndexer {
attribute: self.attribute, pos: self.pos,
document_id: self.document_id, document_id: self.document_id,
indexer: self.indexer, indexer: self.indexer,
texts: Vec::new(), texts: Vec::new(),
@ -197,7 +197,7 @@ impl<'a> ser::Serializer for Indexer<'a> {
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> { fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
let indexer = MapIndexer { let indexer = MapIndexer {
attribute: self.attribute, pos: self.pos,
document_id: self.document_id, document_id: self.document_id,
indexer: self.indexer, indexer: self.indexer,
texts: Vec::new(), texts: Vec::new(),
@ -212,7 +212,7 @@ impl<'a> ser::Serializer for Indexer<'a> {
_len: usize, _len: usize,
) -> Result<Self::SerializeStruct, Self::Error> { ) -> Result<Self::SerializeStruct, Self::Error> {
let indexer = StructIndexer { let indexer = StructIndexer {
attribute: self.attribute, pos: self.pos,
document_id: self.document_id, document_id: self.document_id,
indexer: self.indexer, indexer: self.indexer,
texts: Vec::new(), texts: Vec::new(),
@ -235,7 +235,7 @@ impl<'a> ser::Serializer for Indexer<'a> {
} }
pub struct SeqIndexer<'a> { pub struct SeqIndexer<'a> {
attribute: SchemaAttr, pos: IndexedPos,
document_id: DocumentId, document_id: DocumentId,
indexer: &'a mut RawIndexer, indexer: &'a mut RawIndexer,
texts: Vec<String>, texts: Vec<String>,
@ -257,13 +257,13 @@ impl<'a> ser::SerializeSeq for SeqIndexer<'a> {
fn end(self) -> Result<Self::Ok, Self::Error> { fn end(self) -> Result<Self::Ok, Self::Error> {
let texts = self.texts.iter().map(String::as_str); let texts = self.texts.iter().map(String::as_str);
self.indexer self.indexer
.index_text_seq(self.document_id, self.attribute, texts); .index_text_seq(self.document_id, self.pos, texts);
Ok(None) Ok(None)
} }
} }
pub struct MapIndexer<'a> { pub struct MapIndexer<'a> {
attribute: SchemaAttr, pos: IndexedPos,
document_id: DocumentId, document_id: DocumentId,
indexer: &'a mut RawIndexer, indexer: &'a mut RawIndexer,
texts: Vec<String>, texts: Vec<String>,
@ -294,13 +294,13 @@ impl<'a> ser::SerializeMap for MapIndexer<'a> {
fn end(self) -> Result<Self::Ok, Self::Error> { fn end(self) -> Result<Self::Ok, Self::Error> {
let texts = self.texts.iter().map(String::as_str); let texts = self.texts.iter().map(String::as_str);
self.indexer self.indexer
.index_text_seq(self.document_id, self.attribute, texts); .index_text_seq(self.document_id, self.pos, texts);
Ok(None) Ok(None)
} }
} }
pub struct StructIndexer<'a> { pub struct StructIndexer<'a> {
attribute: SchemaAttr, pos: IndexedPos,
document_id: DocumentId, document_id: DocumentId,
indexer: &'a mut RawIndexer, indexer: &'a mut RawIndexer,
texts: Vec<String>, texts: Vec<String>,
@ -328,13 +328,13 @@ impl<'a> ser::SerializeStruct for StructIndexer<'a> {
fn end(self) -> Result<Self::Ok, Self::Error> { fn end(self) -> Result<Self::Ok, Self::Error> {
let texts = self.texts.iter().map(String::as_str); let texts = self.texts.iter().map(String::as_str);
self.indexer self.indexer
.index_text_seq(self.document_id, self.attribute, texts); .index_text_seq(self.document_id, self.pos, texts);
Ok(None) Ok(None)
} }
} }
pub struct TupleIndexer<'a> { pub struct TupleIndexer<'a> {
attribute: SchemaAttr, pos: IndexedPos,
document_id: DocumentId, document_id: DocumentId,
indexer: &'a mut RawIndexer, indexer: &'a mut RawIndexer,
texts: Vec<String>, texts: Vec<String>,
@ -356,7 +356,7 @@ impl<'a> ser::SerializeTuple for TupleIndexer<'a> {
fn end(self) -> Result<Self::Ok, Self::Error> { fn end(self) -> Result<Self::Ok, Self::Error> {
let texts = self.texts.iter().map(String::as_str); let texts = self.texts.iter().map(String::as_str);
self.indexer self.indexer
.index_text_seq(self.document_id, self.attribute, texts); .index_text_seq(self.document_id, self.pos, texts);
Ok(None) Ok(None)
} }
} }

View File

@ -20,12 +20,13 @@ pub use self::convert_to_string::ConvertToString;
pub use self::deserializer::{Deserializer, DeserializerError}; pub use self::deserializer::{Deserializer, DeserializerError};
pub use self::extract_document_id::{compute_document_id, extract_document_id, value_to_string}; pub use self::extract_document_id::{compute_document_id, extract_document_id, value_to_string};
pub use self::indexer::Indexer; pub use self::indexer::Indexer;
pub use self::serializer::{serialize_value, Serializer}; pub use self::serializer::{serialize_value, serialize_value_with_id, Serializer};
use std::{error::Error, fmt}; use std::{error::Error, fmt};
use serde::ser; use serde::ser;
use serde_json::Error as SerdeJsonError; use serde_json::Error as SerdeJsonError;
use meilisearch_schema::Error as SchemaError;
use crate::ParseNumberError; use crate::ParseNumberError;
@ -36,6 +37,7 @@ pub enum SerializerError {
Zlmdb(heed::Error), Zlmdb(heed::Error),
SerdeJson(SerdeJsonError), SerdeJson(SerdeJsonError),
ParseNumber(ParseNumberError), ParseNumber(ParseNumberError),
Schema(SchemaError),
UnserializableType { type_name: &'static str }, UnserializableType { type_name: &'static str },
UnindexableType { type_name: &'static str }, UnindexableType { type_name: &'static str },
UnrankableType { type_name: &'static str }, UnrankableType { type_name: &'static str },
@ -55,13 +57,14 @@ impl fmt::Display for SerializerError {
f.write_str("serialized document does not have an id according to the schema") f.write_str("serialized document does not have an id according to the schema")
} }
SerializerError::InvalidDocumentIdType => { SerializerError::InvalidDocumentIdType => {
f.write_str("document identifier can only be of type string or number") f.write_str("documents identifiers can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_).")
} }
SerializerError::Zlmdb(e) => write!(f, "heed related error: {}", e), SerializerError::Zlmdb(e) => write!(f, "heed related error: {}", e),
SerializerError::SerdeJson(e) => write!(f, "serde json error: {}", e), SerializerError::SerdeJson(e) => write!(f, "serde json error: {}", e),
SerializerError::ParseNumber(e) => { SerializerError::ParseNumber(e) => {
write!(f, "error while trying to parse a number: {}", e) write!(f, "error while trying to parse a number: {}", e)
} }
SerializerError::Schema(e) => write!(f, "impossible to update schema: {}", e),
SerializerError::UnserializableType { type_name } => { SerializerError::UnserializableType { type_name } => {
write!(f, "{} is not a serializable type", type_name) write!(f, "{} is not a serializable type", type_name)
} }
@ -101,3 +104,9 @@ impl From<ParseNumberError> for SerializerError {
SerializerError::ParseNumber(error) SerializerError::ParseNumber(error)
} }
} }
impl From<SchemaError> for SerializerError {
fn from(error: SchemaError) -> SerializerError {
SerializerError::Schema(error)
}
}

View File

@ -1,4 +1,4 @@
use meilisearch_schema::{Schema, SchemaAttr, SchemaProps}; use meilisearch_schema::{Schema, FieldId};
use serde::ser; use serde::ser;
use crate::database::MainT; use crate::database::MainT;
@ -10,7 +10,7 @@ use super::{ConvertToNumber, ConvertToString, Indexer, SerializerError};
pub struct Serializer<'a, 'b> { pub struct Serializer<'a, 'b> {
pub txn: &'a mut heed::RwTxn<'b, MainT>, pub txn: &'a mut heed::RwTxn<'b, MainT>,
pub schema: &'a Schema, pub schema: &'a mut Schema,
pub document_store: DocumentsFields, pub document_store: DocumentsFields,
pub document_fields_counts: DocumentsFieldsCounts, pub document_fields_counts: DocumentsFieldsCounts,
pub indexer: &'a mut RawIndexer, pub indexer: &'a mut RawIndexer,
@ -193,7 +193,7 @@ impl<'a, 'b> ser::Serializer for Serializer<'a, 'b> {
pub struct MapSerializer<'a, 'b> { pub struct MapSerializer<'a, 'b> {
txn: &'a mut heed::RwTxn<'b, MainT>, txn: &'a mut heed::RwTxn<'b, MainT>,
schema: &'a Schema, schema: &'a mut Schema,
document_id: DocumentId, document_id: DocumentId,
document_store: DocumentsFields, document_store: DocumentsFields,
document_fields_counts: DocumentsFieldsCounts, document_fields_counts: DocumentsFieldsCounts,
@ -233,20 +233,17 @@ impl<'a, 'b> ser::SerializeMap for MapSerializer<'a, 'b> {
V: ser::Serialize, V: ser::Serialize,
{ {
let key = key.serialize(ConvertToString)?; let key = key.serialize(ConvertToString)?;
match self.schema.attribute(&key) { serialize_value(
Some(attribute) => serialize_value( self.txn,
self.txn, key.as_str(),
attribute, self.schema,
self.schema.props(attribute), self.document_id,
self.document_id, self.document_store,
self.document_store, self.document_fields_counts,
self.document_fields_counts, self.indexer,
self.indexer, self.ranked_map,
self.ranked_map, value,
value, )
),
None => Ok(()),
}
} }
fn end(self) -> Result<Self::Ok, Self::Error> { fn end(self) -> Result<Self::Ok, Self::Error> {
@ -256,7 +253,7 @@ impl<'a, 'b> ser::SerializeMap for MapSerializer<'a, 'b> {
pub struct StructSerializer<'a, 'b> { pub struct StructSerializer<'a, 'b> {
txn: &'a mut heed::RwTxn<'b, MainT>, txn: &'a mut heed::RwTxn<'b, MainT>,
schema: &'a Schema, schema: &'a mut Schema,
document_id: DocumentId, document_id: DocumentId,
document_store: DocumentsFields, document_store: DocumentsFields,
document_fields_counts: DocumentsFieldsCounts, document_fields_counts: DocumentsFieldsCounts,
@ -276,20 +273,17 @@ impl<'a, 'b> ser::SerializeStruct for StructSerializer<'a, 'b> {
where where
T: ser::Serialize, T: ser::Serialize,
{ {
match self.schema.attribute(key) { serialize_value(
Some(attribute) => serialize_value( self.txn,
self.txn, key,
attribute, self.schema,
self.schema.props(attribute), self.document_id,
self.document_id, self.document_store,
self.document_store, self.document_fields_counts,
self.document_fields_counts, self.indexer,
self.indexer, self.ranked_map,
self.ranked_map, value,
value, )
),
None => Ok(()),
}
} }
fn end(self) -> Result<Self::Ok, Self::Error> { fn end(self) -> Result<Self::Ok, Self::Error> {
@ -297,10 +291,38 @@ impl<'a, 'b> ser::SerializeStruct for StructSerializer<'a, 'b> {
} }
} }
pub fn serialize_value<T: ?Sized>( pub fn serialize_value<'a, T: ?Sized>(
txn: &mut heed::RwTxn<MainT>, txn: &mut heed::RwTxn<MainT>,
attribute: SchemaAttr, attribute: &str,
props: SchemaProps, schema: &'a mut Schema,
document_id: DocumentId,
document_store: DocumentsFields,
documents_fields_counts: DocumentsFieldsCounts,
indexer: &mut RawIndexer,
ranked_map: &mut RankedMap,
value: &T,
) -> Result<(), SerializerError>
where
T: ser::Serialize,
{
let field_id = schema.insert_and_index(&attribute)?;
serialize_value_with_id(
txn,
field_id,
schema,
document_id,
document_store,
documents_fields_counts,
indexer,
ranked_map,
value,
)
}
pub fn serialize_value_with_id<'a, T: ?Sized>(
txn: &mut heed::RwTxn<MainT>,
field_id: FieldId,
schema: &'a Schema,
document_id: DocumentId, document_id: DocumentId,
document_store: DocumentsFields, document_store: DocumentsFields,
documents_fields_counts: DocumentsFieldsCounts, documents_fields_counts: DocumentsFieldsCounts,
@ -312,11 +334,11 @@ where
T: ser::Serialize, T: ser::Serialize,
{ {
let serialized = serde_json::to_vec(value)?; let serialized = serde_json::to_vec(value)?;
document_store.put_document_field(txn, document_id, attribute, &serialized)?; document_store.put_document_field(txn, document_id, field_id, &serialized)?;
if props.is_indexed() { if let Some(indexed_pos) = schema.is_indexed(field_id) {
let indexer = Indexer { let indexer = Indexer {
attribute, pos: *indexed_pos,
indexer, indexer,
document_id, document_id,
}; };
@ -324,15 +346,15 @@ where
documents_fields_counts.put_document_field_count( documents_fields_counts.put_document_field_count(
txn, txn,
document_id, document_id,
attribute, *indexed_pos,
number_of_words as u16, number_of_words as u16,
)?; )?;
} }
} }
if props.is_ranked() { if schema.is_ranked(field_id) {
let number = value.serialize(ConvertToNumber)?; let number = value.serialize(ConvertToNumber)?;
ranked_map.insert(document_id, attribute, number); ranked_map.insert(document_id, field_id, number);
} }
Ok(()) Ok(())

View File

@ -0,0 +1,180 @@
use std::collections::{BTreeMap, BTreeSet, HashSet};
use std::str::FromStr;
use std::iter::IntoIterator;
use serde::{Deserialize, Deserializer, Serialize};
use once_cell::sync::Lazy;
static RANKING_RULE_REGEX: Lazy<regex::Regex> = Lazy::new(|| {
let regex = regex::Regex::new(r"(asc|dsc)\(([a-zA-Z0-9-_]*)\)").unwrap();
regex
});
#[derive(Default, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct Settings {
#[serde(default, deserialize_with = "deserialize_some")]
pub ranking_rules: Option<Option<Vec<String>>>,
#[serde(default, deserialize_with = "deserialize_some")]
pub ranking_distinct: Option<Option<String>>,
#[serde(default, deserialize_with = "deserialize_some")]
pub searchable_attributes: Option<Option<Vec<String>>>,
#[serde(default, deserialize_with = "deserialize_some")]
pub displayed_attributes: Option<Option<HashSet<String>>>,
#[serde(default, deserialize_with = "deserialize_some")]
pub stop_words: Option<Option<BTreeSet<String>>>,
#[serde(default, deserialize_with = "deserialize_some")]
pub synonyms: Option<Option<BTreeMap<String, Vec<String>>>>,
#[serde(default, deserialize_with = "deserialize_some")]
pub index_new_fields: Option<Option<bool>>,
}
// Any value that is present is considered Some value, including null.
fn deserialize_some<'de, T, D>(deserializer: D) -> Result<Option<T>, D::Error>
where T: Deserialize<'de>,
D: Deserializer<'de>
{
Deserialize::deserialize(deserializer).map(Some)
}
impl Settings {
pub fn into_update(&self) -> Result<SettingsUpdate, RankingRuleConversionError> {
let settings = self.clone();
let ranking_rules = match settings.ranking_rules {
Some(Some(rules)) => UpdateState::Update(RankingRule::from_iter(rules.iter())?),
Some(None) => UpdateState::Clear,
None => UpdateState::Nothing,
};
Ok(SettingsUpdate {
ranking_rules,
ranking_distinct: settings.ranking_distinct.into(),
identifier: UpdateState::Nothing,
searchable_attributes: settings.searchable_attributes.into(),
displayed_attributes: settings.displayed_attributes.into(),
stop_words: settings.stop_words.into(),
synonyms: settings.synonyms.into(),
index_new_fields: settings.index_new_fields.into(),
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum UpdateState<T> {
Update(T),
Clear,
Nothing,
}
impl <T> From<Option<Option<T>>> for UpdateState<T> {
fn from(opt: Option<Option<T>>) -> UpdateState<T> {
match opt {
Some(Some(t)) => UpdateState::Update(t),
Some(None) => UpdateState::Clear,
None => UpdateState::Nothing,
}
}
}
#[derive(Debug, Clone)]
pub struct RankingRuleConversionError;
impl std::fmt::Display for RankingRuleConversionError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "impossible to convert into RankingRule")
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum RankingRule {
Typo,
Words,
Proximity,
Attribute,
WordsPosition,
Exact,
Asc(String),
Dsc(String),
}
impl ToString for RankingRule {
fn to_string(&self) -> String {
match self {
RankingRule::Typo => "_typo".to_string(),
RankingRule::Words => "_words".to_string(),
RankingRule::Proximity => "_proximity".to_string(),
RankingRule::Attribute => "_attribute".to_string(),
RankingRule::WordsPosition => "_words_position".to_string(),
RankingRule::Exact => "_exact".to_string(),
RankingRule::Asc(field) => format!("asc({})", field),
RankingRule::Dsc(field) => format!("dsc({})", field),
}
}
}
impl FromStr for RankingRule {
type Err = RankingRuleConversionError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let rule = match s {
"_typo" => RankingRule::Typo,
"_words" => RankingRule::Words,
"_proximity" => RankingRule::Proximity,
"_attribute" => RankingRule::Attribute,
"_words_position" => RankingRule::WordsPosition,
"_exact" => RankingRule::Exact,
_ => {
let captures = RANKING_RULE_REGEX.captures(s).ok_or(RankingRuleConversionError)?;
match (captures.get(1).map(|m| m.as_str()), captures.get(2)) {
(Some("asc"), Some(field)) => RankingRule::Asc(field.as_str().to_string()),
(Some("dsc"), Some(field)) => RankingRule::Dsc(field.as_str().to_string()),
_ => return Err(RankingRuleConversionError)
}
}
};
Ok(rule)
}
}
impl RankingRule {
pub fn field(&self) -> Option<&str> {
match self {
RankingRule::Asc(field) | RankingRule::Dsc(field) => Some(field),
_ => None,
}
}
pub fn from_iter(rules: impl IntoIterator<Item = impl AsRef<str>>) -> Result<Vec<RankingRule>, RankingRuleConversionError> {
rules.into_iter()
.map(|s| RankingRule::from_str(s.as_ref()))
.collect()
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SettingsUpdate {
pub ranking_rules: UpdateState<Vec<RankingRule>>,
pub ranking_distinct: UpdateState<String>,
pub identifier: UpdateState<String>,
pub searchable_attributes: UpdateState<Vec<String>>,
pub displayed_attributes: UpdateState<HashSet<String>>,
pub stop_words: UpdateState<BTreeSet<String>>,
pub synonyms: UpdateState<BTreeMap<String, Vec<String>>>,
pub index_new_fields: UpdateState<bool>,
}
impl Default for SettingsUpdate {
fn default() -> Self {
Self {
ranking_rules: UpdateState::Nothing,
ranking_distinct: UpdateState::Nothing,
identifier: UpdateState::Nothing,
searchable_attributes: UpdateState::Nothing,
displayed_attributes: UpdateState::Nothing,
stop_words: UpdateState::Nothing,
synonyms: UpdateState::Nothing,
index_new_fields: UpdateState::Nothing,
}
}
}

View File

@ -1,14 +1,14 @@
use heed::types::{ByteSlice, OwnedType}; use heed::types::{ByteSlice, OwnedType};
use crate::database::MainT; use crate::database::MainT;
use heed::Result as ZResult; use heed::Result as ZResult;
use meilisearch_schema::SchemaAttr; use meilisearch_schema::FieldId;
use super::DocumentAttrKey; use super::DocumentFieldStoredKey;
use crate::DocumentId; use crate::DocumentId;
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
pub struct DocumentsFields { pub struct DocumentsFields {
pub(crate) documents_fields: heed::Database<OwnedType<DocumentAttrKey>, ByteSlice>, pub(crate) documents_fields: heed::Database<OwnedType<DocumentFieldStoredKey>, ByteSlice>,
} }
impl DocumentsFields { impl DocumentsFields {
@ -16,10 +16,10 @@ impl DocumentsFields {
self, self,
writer: &mut heed::RwTxn<MainT>, writer: &mut heed::RwTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
attribute: SchemaAttr, field: FieldId,
value: &[u8], value: &[u8],
) -> ZResult<()> { ) -> ZResult<()> {
let key = DocumentAttrKey::new(document_id, attribute); let key = DocumentFieldStoredKey::new(document_id, field);
self.documents_fields.put(writer, &key, value) self.documents_fields.put(writer, &key, value)
} }
@ -28,8 +28,8 @@ impl DocumentsFields {
writer: &mut heed::RwTxn<MainT>, writer: &mut heed::RwTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
) -> ZResult<usize> { ) -> ZResult<usize> {
let start = DocumentAttrKey::new(document_id, SchemaAttr::min()); let start = DocumentFieldStoredKey::new(document_id, FieldId::min());
let end = DocumentAttrKey::new(document_id, SchemaAttr::max()); let end = DocumentFieldStoredKey::new(document_id, FieldId::max());
self.documents_fields.delete_range(writer, &(start..=end)) self.documents_fields.delete_range(writer, &(start..=end))
} }
@ -41,9 +41,9 @@ impl DocumentsFields {
self, self,
reader: &'txn heed::RoTxn<MainT>, reader: &'txn heed::RoTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
attribute: SchemaAttr, field: FieldId,
) -> ZResult<Option<&'txn [u8]>> { ) -> ZResult<Option<&'txn [u8]>> {
let key = DocumentAttrKey::new(document_id, attribute); let key = DocumentFieldStoredKey::new(document_id, field);
self.documents_fields.get(reader, &key) self.documents_fields.get(reader, &key)
} }
@ -52,25 +52,25 @@ impl DocumentsFields {
reader: &'txn heed::RoTxn<MainT>, reader: &'txn heed::RoTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
) -> ZResult<DocumentFieldsIter<'txn>> { ) -> ZResult<DocumentFieldsIter<'txn>> {
let start = DocumentAttrKey::new(document_id, SchemaAttr::min()); let start = DocumentFieldStoredKey::new(document_id, FieldId::min());
let end = DocumentAttrKey::new(document_id, SchemaAttr::max()); let end = DocumentFieldStoredKey::new(document_id, FieldId::max());
let iter = self.documents_fields.range(reader, &(start..=end))?; let iter = self.documents_fields.range(reader, &(start..=end))?;
Ok(DocumentFieldsIter { iter }) Ok(DocumentFieldsIter { iter })
} }
} }
pub struct DocumentFieldsIter<'txn> { pub struct DocumentFieldsIter<'txn> {
iter: heed::RoRange<'txn, OwnedType<DocumentAttrKey>, ByteSlice>, iter: heed::RoRange<'txn, OwnedType<DocumentFieldStoredKey>, ByteSlice>,
} }
impl<'txn> Iterator for DocumentFieldsIter<'txn> { impl<'txn> Iterator for DocumentFieldsIter<'txn> {
type Item = ZResult<(SchemaAttr, &'txn [u8])>; type Item = ZResult<(FieldId, &'txn [u8])>;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
match self.iter.next() { match self.iter.next() {
Some(Ok((key, bytes))) => { Some(Ok((key, bytes))) => {
let attr = SchemaAttr(key.attr.get()); let field_id = FieldId(key.field_id.get());
Some(Ok((attr, bytes))) Some(Ok((field_id, bytes)))
} }
Some(Err(e)) => Some(Err(e)), Some(Err(e)) => Some(Err(e)),
None => None, None => None,

View File

@ -1,13 +1,13 @@
use super::DocumentAttrKey; use super::DocumentFieldIndexedKey;
use crate::database::MainT; use crate::database::MainT;
use crate::DocumentId; use crate::DocumentId;
use heed::types::OwnedType; use heed::types::OwnedType;
use heed::Result as ZResult; use heed::Result as ZResult;
use meilisearch_schema::SchemaAttr; use meilisearch_schema::IndexedPos;
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
pub struct DocumentsFieldsCounts { pub struct DocumentsFieldsCounts {
pub(crate) documents_fields_counts: heed::Database<OwnedType<DocumentAttrKey>, OwnedType<u16>>, pub(crate) documents_fields_counts: heed::Database<OwnedType<DocumentFieldIndexedKey>, OwnedType<u16>>,
} }
impl DocumentsFieldsCounts { impl DocumentsFieldsCounts {
@ -15,10 +15,10 @@ impl DocumentsFieldsCounts {
self, self,
writer: &mut heed::RwTxn<MainT>, writer: &mut heed::RwTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
attribute: SchemaAttr, attribute: IndexedPos,
value: u16, value: u16,
) -> ZResult<()> { ) -> ZResult<()> {
let key = DocumentAttrKey::new(document_id, attribute); let key = DocumentFieldIndexedKey::new(document_id, attribute);
self.documents_fields_counts.put(writer, &key, &value) self.documents_fields_counts.put(writer, &key, &value)
} }
@ -27,10 +27,9 @@ impl DocumentsFieldsCounts {
writer: &mut heed::RwTxn<MainT>, writer: &mut heed::RwTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
) -> ZResult<usize> { ) -> ZResult<usize> {
let start = DocumentAttrKey::new(document_id, SchemaAttr::min()); let start = DocumentFieldIndexedKey::new(document_id, IndexedPos::min());
let end = DocumentAttrKey::new(document_id, SchemaAttr::max()); let end = DocumentFieldIndexedKey::new(document_id, IndexedPos::max());
self.documents_fields_counts self.documents_fields_counts.delete_range(writer, &(start..=end))
.delete_range(writer, &(start..=end))
} }
pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> { pub fn clear(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<()> {
@ -41,9 +40,9 @@ impl DocumentsFieldsCounts {
self, self,
reader: &heed::RoTxn<MainT>, reader: &heed::RoTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
attribute: SchemaAttr, attribute: IndexedPos,
) -> ZResult<Option<u16>> { ) -> ZResult<Option<u16>> {
let key = DocumentAttrKey::new(document_id, attribute); let key = DocumentFieldIndexedKey::new(document_id, attribute);
match self.documents_fields_counts.get(reader, &key)? { match self.documents_fields_counts.get(reader, &key)? {
Some(count) => Ok(Some(count)), Some(count) => Ok(Some(count)),
None => Ok(None), None => Ok(None),
@ -55,8 +54,8 @@ impl DocumentsFieldsCounts {
reader: &'txn heed::RoTxn<MainT>, reader: &'txn heed::RoTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
) -> ZResult<DocumentFieldsCountsIter<'txn>> { ) -> ZResult<DocumentFieldsCountsIter<'txn>> {
let start = DocumentAttrKey::new(document_id, SchemaAttr::min()); let start = DocumentFieldIndexedKey::new(document_id, IndexedPos::min());
let end = DocumentAttrKey::new(document_id, SchemaAttr::max()); let end = DocumentFieldIndexedKey::new(document_id, IndexedPos::max());
let iter = self.documents_fields_counts.range(reader, &(start..=end))?; let iter = self.documents_fields_counts.range(reader, &(start..=end))?;
Ok(DocumentFieldsCountsIter { iter }) Ok(DocumentFieldsCountsIter { iter })
} }
@ -79,17 +78,17 @@ impl DocumentsFieldsCounts {
} }
pub struct DocumentFieldsCountsIter<'txn> { pub struct DocumentFieldsCountsIter<'txn> {
iter: heed::RoRange<'txn, OwnedType<DocumentAttrKey>, OwnedType<u16>>, iter: heed::RoRange<'txn, OwnedType<DocumentFieldIndexedKey>, OwnedType<u16>>,
} }
impl Iterator for DocumentFieldsCountsIter<'_> { impl Iterator for DocumentFieldsCountsIter<'_> {
type Item = ZResult<(SchemaAttr, u16)>; type Item = ZResult<(IndexedPos, u16)>;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
match self.iter.next() { match self.iter.next() {
Some(Ok((key, count))) => { Some(Ok((key, count))) => {
let attr = SchemaAttr(key.attr.get()); let indexed_pos = IndexedPos(key.indexed_pos.get());
Some(Ok((attr, count))) Some(Ok((indexed_pos, count)))
} }
Some(Err(e)) => Some(Err(e)), Some(Err(e)) => Some(Err(e)),
None => None, None => None,
@ -99,7 +98,7 @@ impl Iterator for DocumentFieldsCountsIter<'_> {
pub struct DocumentsIdsIter<'txn> { pub struct DocumentsIdsIter<'txn> {
last_seen_id: Option<DocumentId>, last_seen_id: Option<DocumentId>,
iter: heed::RoIter<'txn, OwnedType<DocumentAttrKey>, OwnedType<u16>>, iter: heed::RoIter<'txn, OwnedType<DocumentFieldIndexedKey>, OwnedType<u16>>,
} }
impl Iterator for DocumentsIdsIter<'_> { impl Iterator for DocumentsIdsIter<'_> {
@ -123,18 +122,18 @@ impl Iterator for DocumentsIdsIter<'_> {
} }
pub struct AllDocumentsFieldsCountsIter<'txn> { pub struct AllDocumentsFieldsCountsIter<'txn> {
iter: heed::RoIter<'txn, OwnedType<DocumentAttrKey>, OwnedType<u16>>, iter: heed::RoIter<'txn, OwnedType<DocumentFieldIndexedKey>, OwnedType<u16>>,
} }
impl Iterator for AllDocumentsFieldsCountsIter<'_> { impl Iterator for AllDocumentsFieldsCountsIter<'_> {
type Item = ZResult<(DocumentId, SchemaAttr, u16)>; type Item = ZResult<(DocumentId, IndexedPos, u16)>;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
match self.iter.next() { match self.iter.next() {
Some(Ok((key, count))) => { Some(Ok((key, count))) => {
let docid = DocumentId(key.docid.get()); let docid = DocumentId(key.docid.get());
let attr = SchemaAttr(key.attr.get()); let indexed_pos = IndexedPos(key.indexed_pos.get());
Some(Ok((docid, attr, count))) Some(Ok((docid, indexed_pos, count)))
} }
Some(Err(e)) => Some(Err(e)), Some(Err(e)) => Some(Err(e)),
None => None, None => None,

View File

@ -1,21 +1,26 @@
use crate::database::MainT; use std::sync::Arc;
use crate::RankedMap; use std::collections::HashMap;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str}; use heed::types::{ByteSlice, OwnedType, SerdeBincode, Str};
use heed::Result as ZResult; use heed::Result as ZResult;
use meilisearch_schema::Schema; use meilisearch_schema::Schema;
use std::collections::HashMap;
use std::sync::Arc; use crate::database::MainT;
use crate::RankedMap;
use crate::settings::RankingRule;
const CREATED_AT_KEY: &str = "created-at"; const CREATED_AT_KEY: &str = "created-at";
const CUSTOMS_KEY: &str = "customs-key"; const RANKING_RULES_KEY: &str = "ranking-rules";
const RANKING_DISTINCT_KEY: &str = "ranking-distinct";
const STOP_WORDS_KEY: &str = "stop-words";
const SYNONYMS_KEY: &str = "synonyms";
const CUSTOMS_KEY: &str = "customs";
const FIELDS_FREQUENCY_KEY: &str = "fields-frequency"; const FIELDS_FREQUENCY_KEY: &str = "fields-frequency";
const NAME_KEY: &str = "name"; const NAME_KEY: &str = "name";
const NUMBER_OF_DOCUMENTS_KEY: &str = "number-of-documents"; const NUMBER_OF_DOCUMENTS_KEY: &str = "number-of-documents";
const RANKED_MAP_KEY: &str = "ranked-map"; const RANKED_MAP_KEY: &str = "ranked-map";
const SCHEMA_KEY: &str = "schema"; const SCHEMA_KEY: &str = "schema";
const STOP_WORDS_KEY: &str = "stop-words";
const SYNONYMS_KEY: &str = "synonyms";
const UPDATED_AT_KEY: &str = "updated-at"; const UPDATED_AT_KEY: &str = "updated-at";
const WORDS_KEY: &str = "words"; const WORDS_KEY: &str = "words";
@ -91,23 +96,23 @@ impl Main {
} }
pub fn put_schema(self, writer: &mut heed::RwTxn<MainT>, schema: &Schema) -> ZResult<()> { pub fn put_schema(self, writer: &mut heed::RwTxn<MainT>, schema: &Schema) -> ZResult<()> {
self.main self.main.put::<_, Str, SerdeBincode<Schema>>(writer, SCHEMA_KEY, schema)
.put::<_, Str, SerdeBincode<Schema>>(writer, SCHEMA_KEY, schema)
} }
pub fn schema(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<Schema>> { pub fn schema(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<Schema>> {
self.main self.main.get::<_, Str, SerdeBincode<Schema>>(reader, SCHEMA_KEY)
.get::<_, Str, SerdeBincode<Schema>>(reader, SCHEMA_KEY) }
pub fn delete_schema(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<bool> {
self.main.delete::<_, Str>(writer, SCHEMA_KEY)
} }
pub fn put_ranked_map(self, writer: &mut heed::RwTxn<MainT>, ranked_map: &RankedMap) -> ZResult<()> { pub fn put_ranked_map(self, writer: &mut heed::RwTxn<MainT>, ranked_map: &RankedMap) -> ZResult<()> {
self.main self.main.put::<_, Str, SerdeBincode<RankedMap>>(writer, RANKED_MAP_KEY, &ranked_map)
.put::<_, Str, SerdeBincode<RankedMap>>(writer, RANKED_MAP_KEY, &ranked_map)
} }
pub fn ranked_map(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<RankedMap>> { pub fn ranked_map(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<RankedMap>> {
self.main self.main.get::<_, Str, SerdeBincode<RankedMap>>(reader, RANKED_MAP_KEY)
.get::<_, Str, SerdeBincode<RankedMap>>(reader, RANKED_MAP_KEY)
} }
pub fn put_synonyms_fst(self, writer: &mut heed::RwTxn<MainT>, fst: &fst::Set) -> ZResult<()> { pub fn put_synonyms_fst(self, writer: &mut heed::RwTxn<MainT>, fst: &fst::Set) -> ZResult<()> {
@ -129,8 +134,7 @@ impl Main {
pub fn put_stop_words_fst(self, writer: &mut heed::RwTxn<MainT>, fst: &fst::Set) -> ZResult<()> { pub fn put_stop_words_fst(self, writer: &mut heed::RwTxn<MainT>, fst: &fst::Set) -> ZResult<()> {
let bytes = fst.as_fst().as_bytes(); let bytes = fst.as_fst().as_bytes();
self.main self.main.put::<_, Str, ByteSlice>(writer, STOP_WORDS_KEY, bytes)
.put::<_, Str, ByteSlice>(writer, STOP_WORDS_KEY, bytes)
} }
pub fn stop_words_fst(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<fst::Set>> { pub fn stop_words_fst(self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<fst::Set>> {
@ -184,6 +188,33 @@ impl Main {
} }
} }
pub fn ranking_rules(&self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<Vec<RankingRule>>> {
self.main.get::<_, Str, SerdeBincode<Vec<RankingRule>>>(reader, RANKING_RULES_KEY)
}
pub fn put_ranking_rules(self, writer: &mut heed::RwTxn<MainT>, value: &[RankingRule]) -> ZResult<()> {
self.main.put::<_, Str, SerdeBincode<Vec<RankingRule>>>(writer, RANKING_RULES_KEY, &value.to_vec())
}
pub fn delete_ranking_rules(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<bool> {
self.main.delete::<_, Str>(writer, RANKING_RULES_KEY)
}
pub fn ranking_distinct(&self, reader: &heed::RoTxn<MainT>) -> ZResult<Option<String>> {
if let Some(value) = self.main.get::<_, Str, Str>(reader, RANKING_DISTINCT_KEY)? {
return Ok(Some(value.to_owned()))
}
return Ok(None)
}
pub fn put_ranking_distinct(self, writer: &mut heed::RwTxn<MainT>, value: &str) -> ZResult<()> {
self.main.put::<_, Str, Str>(writer, RANKING_DISTINCT_KEY, value)
}
pub fn delete_ranking_distinct(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<bool> {
self.main.delete::<_, Str>(writer, RANKING_DISTINCT_KEY)
}
pub fn put_customs(self, writer: &mut heed::RwTxn<MainT>, customs: &[u8]) -> ZResult<()> { pub fn put_customs(self, writer: &mut heed::RwTxn<MainT>, customs: &[u8]) -> ZResult<()> {
self.main self.main
.put::<_, Str, ByteSlice>(writer, CUSTOMS_KEY, customs) .put::<_, Str, ByteSlice>(writer, CUSTOMS_KEY, customs)

View File

@ -29,15 +29,16 @@ use std::{mem, ptr};
use heed::Result as ZResult; use heed::Result as ZResult;
use heed::{BytesEncode, BytesDecode}; use heed::{BytesEncode, BytesDecode};
use meilisearch_schema::{Schema, SchemaAttr}; use meilisearch_schema::{IndexedPos, FieldId};
use sdset::{Set, SetBuf}; use sdset::{Set, SetBuf};
use serde::de::{self, Deserialize}; use serde::de::{self, Deserialize};
use zerocopy::{AsBytes, FromBytes}; use zerocopy::{AsBytes, FromBytes};
use crate::criterion::Criteria; use crate::criterion::Criteria;
use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::database::{MainT, UpdateT}; use crate::database::{MainT, UpdateT};
use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::serde::Deserializer; use crate::serde::Deserializer;
use crate::settings::SettingsUpdate;
use crate::{query_builder::QueryBuilder, update, DocIndex, DocumentId, Error, MResult}; use crate::{query_builder::QueryBuilder, update, DocIndex, DocumentId, Error, MResult};
type BEU64 = zerocopy::U64<byteorder::BigEndian>; type BEU64 = zerocopy::U64<byteorder::BigEndian>;
@ -45,16 +46,32 @@ type BEU16 = zerocopy::U16<byteorder::BigEndian>;
#[derive(Debug, Copy, Clone, AsBytes, FromBytes)] #[derive(Debug, Copy, Clone, AsBytes, FromBytes)]
#[repr(C)] #[repr(C)]
pub struct DocumentAttrKey { pub struct DocumentFieldIndexedKey {
docid: BEU64, docid: BEU64,
attr: BEU16, indexed_pos: BEU16,
} }
impl DocumentAttrKey { impl DocumentFieldIndexedKey {
fn new(docid: DocumentId, attr: SchemaAttr) -> DocumentAttrKey { fn new(docid: DocumentId, indexed_pos: IndexedPos) -> DocumentFieldIndexedKey {
DocumentAttrKey { DocumentFieldIndexedKey {
docid: BEU64::new(docid.0), docid: BEU64::new(docid.0),
attr: BEU16::new(attr.0), indexed_pos: BEU16::new(indexed_pos.0),
}
}
}
#[derive(Debug, Copy, Clone, AsBytes, FromBytes)]
#[repr(C)]
pub struct DocumentFieldStoredKey {
docid: BEU64,
field_id: BEU16,
}
impl DocumentFieldStoredKey {
fn new(docid: DocumentId, field_id: FieldId) -> DocumentFieldStoredKey {
DocumentFieldStoredKey {
docid: BEU64::new(docid.0),
field_id: BEU16::new(field_id.0),
} }
} }
} }
@ -207,10 +224,7 @@ impl Index {
let schema = schema.ok_or(Error::SchemaMissing)?; let schema = schema.ok_or(Error::SchemaMissing)?;
let attributes = match attributes { let attributes = match attributes {
Some(attributes) => attributes Some(attributes) => Some(attributes.iter().filter_map(|name| schema.id(*name)).collect()),
.iter()
.map(|name| schema.attribute(name))
.collect(),
None => None, None => None,
}; };
@ -219,7 +233,7 @@ impl Index {
reader, reader,
documents_fields: self.documents_fields, documents_fields: self.documents_fields,
schema: &schema, schema: &schema,
attributes: attributes.as_ref(), fields: attributes.as_ref(),
}; };
Ok(Option::<T>::deserialize(&mut deserializer)?) Ok(Option::<T>::deserialize(&mut deserializer)?)
@ -229,7 +243,7 @@ impl Index {
&self, &self,
reader: &heed::RoTxn<MainT>, reader: &heed::RoTxn<MainT>,
document_id: DocumentId, document_id: DocumentId,
attribute: SchemaAttr, attribute: FieldId,
) -> MResult<Option<T>> { ) -> MResult<Option<T>> {
let bytes = self let bytes = self
.documents_fields .documents_fields
@ -240,16 +254,16 @@ impl Index {
} }
} }
pub fn schema_update(&self, writer: &mut heed::RwTxn<UpdateT>, schema: Schema) -> MResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
update::push_schema_update(writer, self, schema)
}
pub fn customs_update(&self, writer: &mut heed::RwTxn<UpdateT>, customs: Vec<u8>) -> ZResult<u64> { pub fn customs_update(&self, writer: &mut heed::RwTxn<UpdateT>, customs: Vec<u8>) -> ZResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
update::push_customs_update(writer, self.updates, self.updates_results, customs) update::push_customs_update(writer, self.updates, self.updates_results, customs)
} }
pub fn settings_update(&self, writer: &mut heed::RwTxn<UpdateT>, update: SettingsUpdate) -> ZResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
update::push_settings_update(writer, self.updates, self.updates_results, update)
}
pub fn documents_addition<D>(&self) -> update::DocumentsAddition<D> { pub fn documents_addition<D>(&self) -> update::DocumentsAddition<D> {
update::DocumentsAddition::new( update::DocumentsAddition::new(
self.updates, self.updates,
@ -279,30 +293,6 @@ impl Index {
update::push_clear_all(writer, self.updates, self.updates_results) update::push_clear_all(writer, self.updates, self.updates_results)
} }
pub fn synonyms_update(&self) -> update::SynonymsUpdate {
update::SynonymsUpdate::new(
self.updates,
self.updates_results,
self.updates_notifier.clone(),
)
}
pub fn stop_words_addition(&self) -> update::StopWordsAddition {
update::StopWordsAddition::new(
self.updates,
self.updates_results,
self.updates_notifier.clone(),
)
}
pub fn stop_words_deletion(&self) -> update::StopWordsDeletion {
update::StopWordsDeletion::new(
self.updates,
self.updates_results,
self.updates_notifier.clone(),
)
}
pub fn current_update_id(&self, reader: &heed::RoTxn<UpdateT>) -> MResult<Option<u64>> { pub fn current_update_id(&self, reader: &heed::RoTxn<UpdateT>) -> MResult<Option<u64>> {
match self.updates.last_update(reader)? { match self.updates.last_update(reader)? {
Some((id, _)) => Ok(Some(id)), Some((id, _)) => Ok(Some(id)),

View File

@ -19,7 +19,7 @@ pub struct PrefixKey {
impl PrefixKey { impl PrefixKey {
pub fn new(prefix: [u8; 4], index: u64, docid: u64) -> PrefixKey { pub fn new(prefix: [u8; 4], index: u64, docid: u64) -> PrefixKey {
PrefixKey { PrefixKey {
prefix: prefix, prefix,
index: BEU64::new(index), index: BEU64::new(index),
docid: BEU64::new(docid), docid: BEU64::new(docid),
} }

View File

@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
use crate::database::{MainT, UpdateT}; use crate::database::{MainT, UpdateT};
use crate::database::{UpdateEvent, UpdateEventsEmitter}; use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::raw_indexer::RawIndexer; use crate::raw_indexer::RawIndexer;
use crate::serde::{extract_document_id, serialize_value, Deserializer, Serializer}; use crate::serde::{extract_document_id, serialize_value_with_id, Deserializer, Serializer};
use crate::store; use crate::store;
use crate::update::{apply_documents_deletion, compute_short_prefixes, next_update_id, Update}; use crate::update::{apply_documents_deletion, compute_short_prefixes, next_update_id, Update};
use crate::{Error, MResult, RankedMap}; use crate::{Error, MResult, RankedMap};
@ -109,16 +109,16 @@ pub fn apply_documents_addition<'a, 'b>(
) -> MResult<()> { ) -> MResult<()> {
let mut documents_additions = HashMap::new(); let mut documents_additions = HashMap::new();
let schema = match index.main.schema(writer)? { let mut schema = match index.main.schema(writer)? {
Some(schema) => schema, Some(schema) => schema,
None => return Err(Error::SchemaMissing), None => return Err(Error::SchemaMissing),
}; };
let identifier = schema.identifier_name(); let identifier = schema.identifier();
// 1. store documents ids for future deletion // 1. store documents ids for future deletion
for document in addition { for document in addition {
let document_id = match extract_document_id(identifier, &document)? { let document_id = match extract_document_id(&identifier, &document)? {
Some(id) => id, Some(id) => id,
None => return Err(Error::MissingDocumentId), None => return Err(Error::MissingDocumentId),
}; };
@ -147,7 +147,7 @@ pub fn apply_documents_addition<'a, 'b>(
for (document_id, document) in documents_additions { for (document_id, document) in documents_additions {
let serializer = Serializer { let serializer = Serializer {
txn: writer, txn: writer,
schema: &schema, schema: &mut schema,
document_store: index.documents_fields, document_store: index.documents_fields,
document_fields_counts: index.documents_fields_counts, document_fields_counts: index.documents_fields_counts,
indexer: &mut indexer, indexer: &mut indexer,
@ -166,7 +166,7 @@ pub fn apply_documents_addition<'a, 'b>(
indexer, indexer,
)?; )?;
compute_short_prefixes(writer, index)?; index.main.put_schema(writer, &schema)?;
Ok(()) Ok(())
} }
@ -178,16 +178,16 @@ pub fn apply_documents_partial_addition<'a, 'b>(
) -> MResult<()> { ) -> MResult<()> {
let mut documents_additions = HashMap::new(); let mut documents_additions = HashMap::new();
let schema = match index.main.schema(writer)? { let mut schema = match index.main.schema(writer)? {
Some(schema) => schema, Some(schema) => schema,
None => return Err(Error::SchemaMissing), None => return Err(Error::SchemaMissing),
}; };
let identifier = schema.identifier_name(); let identifier = schema.identifier();
// 1. store documents ids for future deletion // 1. store documents ids for future deletion
for mut document in addition { for mut document in addition {
let document_id = match extract_document_id(identifier, &document)? { let document_id = match extract_document_id(&identifier, &document)? {
Some(id) => id, Some(id) => id,
None => return Err(Error::MissingDocumentId), None => return Err(Error::MissingDocumentId),
}; };
@ -197,7 +197,7 @@ pub fn apply_documents_partial_addition<'a, 'b>(
reader: writer, reader: writer,
documents_fields: index.documents_fields, documents_fields: index.documents_fields,
schema: &schema, schema: &schema,
attributes: None, fields: None,
}; };
// retrieve the old document and // retrieve the old document and
@ -233,7 +233,7 @@ pub fn apply_documents_partial_addition<'a, 'b>(
for (document_id, document) in documents_additions { for (document_id, document) in documents_additions {
let serializer = Serializer { let serializer = Serializer {
txn: writer, txn: writer,
schema: &schema, schema: &mut schema,
document_store: index.documents_fields, document_store: index.documents_fields,
document_fields_counts: index.documents_fields_counts, document_fields_counts: index.documents_fields_counts,
indexer: &mut indexer, indexer: &mut indexer,
@ -252,7 +252,7 @@ pub fn apply_documents_partial_addition<'a, 'b>(
indexer, indexer,
)?; )?;
compute_short_prefixes(writer, index)?; index.main.put_schema(writer, &schema)?;
Ok(()) Ok(())
} }
@ -292,22 +292,22 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
for document_id in documents_ids { for document_id in documents_ids {
for result in index.documents_fields.document_fields(writer, *document_id)? { for result in index.documents_fields.document_fields(writer, *document_id)? {
let (attr, bytes) = result?; let (field_id, bytes) = result?;
let value: serde_json::Value = serde_json::from_slice(bytes)?; let value: serde_json::Value = serde_json::from_slice(bytes)?;
ram_store.insert((document_id, attr), value); ram_store.insert((document_id, field_id), value);
} }
for ((docid, attr), value) in ram_store.drain() { for ((docid, field_id), value) in ram_store.drain() {
serialize_value( serialize_value_with_id(
writer, writer,
attr, field_id,
schema.props(attr), &schema,
*docid, *docid,
index.documents_fields, index.documents_fields,
index.documents_fields_counts, index.documents_fields_counts,
&mut indexer, &mut indexer,
&mut ranked_map, &mut ranked_map,
&value, &value
)?; )?;
} }
} }
@ -322,7 +322,7 @@ pub fn reindex_all_documents(writer: &mut heed::RwTxn<MainT>, index: &store::Ind
)?; )?;
} }
compute_short_prefixes(writer, index)?; index.main.put_schema(writer, &schema)?;
Ok(()) Ok(())
} }

View File

@ -40,8 +40,8 @@ impl DocumentsDeletion {
where where
D: serde::Serialize, D: serde::Serialize,
{ {
let identifier = schema.identifier_name(); let identifier = schema.identifier();
let document_id = match extract_document_id(identifier, &document)? { let document_id = match extract_document_id(&identifier, &document)? {
Some(id) => id, Some(id) => id,
None => return Err(Error::MissingDocumentId), None => return Err(Error::MissingDocumentId),
}; };
@ -101,23 +101,12 @@ pub fn apply_documents_deletion(
}; };
// collect the ranked attributes according to the schema // collect the ranked attributes according to the schema
let ranked_attrs: Vec<_> = schema let ranked_fields = schema.ranked();
.iter()
.filter_map(
|(_, attr, prop)| {
if prop.is_ranked() {
Some(attr)
} else {
None
}
},
)
.collect();
let mut words_document_ids = HashMap::new(); let mut words_document_ids = HashMap::new();
for id in idset { for id in idset {
// remove all the ranked attributes from the ranked_map // remove all the ranked attributes from the ranked_map
for ranked_attr in &ranked_attrs { for ranked_attr in ranked_fields {
ranked_map.remove(id, *ranked_attr); ranked_map.remove(id, *ranked_attr);
} }

View File

@ -2,10 +2,7 @@ mod clear_all;
mod customs_update; mod customs_update;
mod documents_addition; mod documents_addition;
mod documents_deletion; mod documents_deletion;
mod schema_update; mod settings_update;
mod stop_words_addition;
mod stop_words_deletion;
mod synonyms_update;
pub use self::clear_all::{apply_clear_all, push_clear_all}; pub use self::clear_all::{apply_clear_all, push_clear_all};
pub use self::customs_update::{apply_customs_update, push_customs_update}; pub use self::customs_update::{apply_customs_update, push_customs_update};
@ -13,13 +10,10 @@ pub use self::documents_addition::{
apply_documents_addition, apply_documents_partial_addition, DocumentsAddition, apply_documents_addition, apply_documents_partial_addition, DocumentsAddition,
}; };
pub use self::documents_deletion::{apply_documents_deletion, DocumentsDeletion}; pub use self::documents_deletion::{apply_documents_deletion, DocumentsDeletion};
pub use self::schema_update::{apply_schema_update, push_schema_update}; pub use self::settings_update::{apply_settings_update, push_settings_update};
pub use self::stop_words_addition::{apply_stop_words_addition, StopWordsAddition};
pub use self::stop_words_deletion::{apply_stop_words_deletion, StopWordsDeletion};
pub use self::synonyms_update::{apply_synonyms_update, SynonymsUpdate};
use std::cmp; use std::cmp;
use std::collections::{BTreeMap, BTreeSet, HashMap}; use std::collections::HashMap;
use std::time::Instant; use std::time::Instant;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
@ -31,7 +25,7 @@ use sdset::Set;
use crate::{store, DocumentId, MResult}; use crate::{store, DocumentId, MResult};
use crate::database::{MainT, UpdateT}; use crate::database::{MainT, UpdateT};
use meilisearch_schema::Schema; use crate::settings::SettingsUpdate;
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Update { pub struct Update {
@ -47,13 +41,6 @@ impl Update {
} }
} }
fn schema(data: Schema) -> Update {
Update {
data: UpdateData::Schema(data),
enqueued_at: Utc::now(),
}
}
fn customs(data: Vec<u8>) -> Update { fn customs(data: Vec<u8>) -> Update {
Update { Update {
data: UpdateData::Customs(data), data: UpdateData::Customs(data),
@ -82,23 +69,9 @@ impl Update {
} }
} }
fn synonyms_update(data: BTreeMap<String, Vec<String>>) -> Update { fn settings(data: SettingsUpdate) -> Update {
Update { Update {
data: UpdateData::SynonymsUpdate(data), data: UpdateData::Settings(data),
enqueued_at: Utc::now(),
}
}
fn stop_words_addition(data: BTreeSet<String>) -> Update {
Update {
data: UpdateData::StopWordsAddition(data),
enqueued_at: Utc::now(),
}
}
fn stop_words_deletion(data: BTreeSet<String>) -> Update {
Update {
data: UpdateData::StopWordsDeletion(data),
enqueued_at: Utc::now(), enqueued_at: Utc::now(),
} }
} }
@ -107,21 +80,17 @@ impl Update {
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub enum UpdateData { pub enum UpdateData {
ClearAll, ClearAll,
Schema(Schema),
Customs(Vec<u8>), Customs(Vec<u8>),
DocumentsAddition(Vec<HashMap<String, serde_json::Value>>), DocumentsAddition(Vec<HashMap<String, serde_json::Value>>),
DocumentsPartial(Vec<HashMap<String, serde_json::Value>>), DocumentsPartial(Vec<HashMap<String, serde_json::Value>>),
DocumentsDeletion(Vec<DocumentId>), DocumentsDeletion(Vec<DocumentId>),
SynonymsUpdate(BTreeMap<String, Vec<String>>), Settings(SettingsUpdate)
StopWordsAddition(BTreeSet<String>),
StopWordsDeletion(BTreeSet<String>),
} }
impl UpdateData { impl UpdateData {
pub fn update_type(&self) -> UpdateType { pub fn update_type(&self) -> UpdateType {
match self { match self {
UpdateData::ClearAll => UpdateType::ClearAll, UpdateData::ClearAll => UpdateType::ClearAll,
UpdateData::Schema(_) => UpdateType::Schema,
UpdateData::Customs(_) => UpdateType::Customs, UpdateData::Customs(_) => UpdateType::Customs,
UpdateData::DocumentsAddition(addition) => UpdateType::DocumentsAddition { UpdateData::DocumentsAddition(addition) => UpdateType::DocumentsAddition {
number: addition.len(), number: addition.len(),
@ -132,14 +101,8 @@ impl UpdateData {
UpdateData::DocumentsDeletion(deletion) => UpdateType::DocumentsDeletion { UpdateData::DocumentsDeletion(deletion) => UpdateType::DocumentsDeletion {
number: deletion.len(), number: deletion.len(),
}, },
UpdateData::SynonymsUpdate(addition) => UpdateType::SynonymsUpdate { UpdateData::Settings(update) => UpdateType::Settings {
number: addition.len(), settings: update.clone(),
},
UpdateData::StopWordsAddition(addition) => UpdateType::StopWordsAddition {
number: addition.len(),
},
UpdateData::StopWordsDeletion(deletion) => UpdateType::StopWordsDeletion {
number: deletion.len(),
}, },
} }
} }
@ -149,14 +112,11 @@ impl UpdateData {
#[serde(tag = "name")] #[serde(tag = "name")]
pub enum UpdateType { pub enum UpdateType {
ClearAll, ClearAll,
Schema,
Customs, Customs,
DocumentsAddition { number: usize }, DocumentsAddition { number: usize },
DocumentsPartial { number: usize }, DocumentsPartial { number: usize },
DocumentsDeletion { number: usize }, DocumentsDeletion { number: usize },
SynonymsUpdate { number: usize }, Settings { settings: SettingsUpdate },
StopWordsAddition { number: usize },
StopWordsDeletion { number: usize },
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -261,14 +221,6 @@ pub fn update_task<'a, 'b>(
(update_type, result, start.elapsed()) (update_type, result, start.elapsed())
} }
UpdateData::Schema(schema) => {
let start = Instant::now();
let update_type = UpdateType::Schema;
let result = apply_schema_update(writer, &schema, index);
(update_type, result, start.elapsed())
}
UpdateData::Customs(customs) => { UpdateData::Customs(customs) => {
let start = Instant::now(); let start = Instant::now();
@ -310,37 +262,18 @@ pub fn update_task<'a, 'b>(
(update_type, result, start.elapsed()) (update_type, result, start.elapsed())
} }
UpdateData::SynonymsUpdate(synonyms) => { UpdateData::Settings(settings) => {
let start = Instant::now(); let start = Instant::now();
let update_type = UpdateType::SynonymsUpdate { let update_type = UpdateType::Settings {
number: synonyms.len(), settings: settings.clone(),
}; };
let result = apply_synonyms_update(writer, index.main, index.synonyms, synonyms); let result = apply_settings_update(
writer,
(update_type, result, start.elapsed()) index,
} settings,
UpdateData::StopWordsAddition(stop_words) => { );
let start = Instant::now();
let update_type = UpdateType::StopWordsAddition {
number: stop_words.len(),
};
let result =
apply_stop_words_addition(writer, index.main, index.postings_lists, stop_words);
(update_type, result, start.elapsed())
}
UpdateData::StopWordsDeletion(stop_words) => {
let start = Instant::now();
let update_type = UpdateType::StopWordsDeletion {
number: stop_words.len(),
};
let result = apply_stop_words_deletion(writer, index, stop_words);
(update_type, result, start.elapsed()) (update_type, result, start.elapsed())
} }

View File

@ -1,64 +0,0 @@
use meilisearch_schema::{Diff, Schema};
use crate::database::{MainT, UpdateT};
use crate::update::documents_addition::reindex_all_documents;
use crate::update::{next_update_id, Update};
use crate::{error::UnsupportedOperation, store, MResult};
pub fn apply_schema_update(
writer: &mut heed::RwTxn<MainT>,
new_schema: &Schema,
index: &store::Index,
) -> MResult<()> {
use UnsupportedOperation::{
CanOnlyIntroduceNewSchemaAttributesAtEnd, CannotRemoveSchemaAttribute,
CannotReorderSchemaAttribute, CannotUpdateSchemaIdentifier,
};
let mut need_full_reindexing = false;
if let Some(old_schema) = index.main.schema(writer)? {
for diff in meilisearch_schema::diff(&old_schema, new_schema) {
match diff {
Diff::IdentChange { .. } => return Err(CannotUpdateSchemaIdentifier.into()),
Diff::AttrMove { .. } => return Err(CannotReorderSchemaAttribute.into()),
Diff::AttrPropsChange { old, new, .. } => {
if new.indexed != old.indexed {
need_full_reindexing = true;
}
if new.ranked != old.ranked {
need_full_reindexing = true;
}
}
Diff::NewAttr { pos, .. } => {
// new attribute not at the end of the schema
if pos < old_schema.number_of_attributes() {
return Err(CanOnlyIntroduceNewSchemaAttributesAtEnd.into());
}
}
Diff::RemovedAttr { .. } => return Err(CannotRemoveSchemaAttribute.into()),
}
}
}
index.main.put_schema(writer, new_schema)?;
if need_full_reindexing {
reindex_all_documents(writer, index)?
}
Ok(())
}
pub fn push_schema_update(
writer: &mut heed::RwTxn<UpdateT>,
index: &store::Index,
schema: Schema,
) -> MResult<u64> {
let last_update_id = next_update_id(writer, index.updates, index.updates_results)?;
let update = Update::schema(schema);
index.updates.put_update(writer, last_update_id, &update)?;
Ok(last_update_id)
}

View File

@ -0,0 +1,297 @@
use std::collections::{BTreeMap, BTreeSet};
use heed::Result as ZResult;
use fst::{set::OpBuilder, SetBuilder};
use sdset::SetBuf;
use meilisearch_schema::Schema;
use crate::database::{MainT, UpdateT};
use crate::settings::{UpdateState, SettingsUpdate, RankingRule};
use crate::update::documents_addition::reindex_all_documents;
use crate::update::{next_update_id, Update};
use crate::{store, MResult, Error};
pub fn push_settings_update(
writer: &mut heed::RwTxn<UpdateT>,
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
settings: SettingsUpdate,
) -> ZResult<u64> {
let last_update_id = next_update_id(writer, updates_store, updates_results_store)?;
let update = Update::settings(settings);
updates_store.put_update(writer, last_update_id, &update)?;
Ok(last_update_id)
}
pub fn apply_settings_update(
writer: &mut heed::RwTxn<MainT>,
index: &store::Index,
settings: SettingsUpdate,
) -> MResult<()> {
let mut must_reindex = false;
let mut schema = match index.main.schema(writer)? {
Some(schema) => schema,
None => {
match settings.identifier.clone() {
UpdateState::Update(id) => Schema::with_identifier(&id),
_ => return Err(Error::MissingIdentifier)
}
}
};
match settings.ranking_rules {
UpdateState::Update(v) => {
let ranked_field: Vec<&str> = v.iter().filter_map(RankingRule::field).collect();
schema.update_ranked(ranked_field)?;
index.main.put_ranking_rules(writer, &v)?;
must_reindex = true;
},
UpdateState::Clear => {
let clear: Vec<&str> = Vec::new();
schema.update_ranked(clear)?;
index.main.delete_ranking_rules(writer)?;
must_reindex = true;
},
UpdateState::Nothing => (),
}
match settings.ranking_distinct {
UpdateState::Update(v) => {
index.main.put_ranking_distinct(writer, &v)?;
},
UpdateState::Clear => {
index.main.delete_ranking_distinct(writer)?;
},
UpdateState::Nothing => (),
}
match settings.index_new_fields {
UpdateState::Update(v) => {
schema.set_index_new_fields(v);
},
UpdateState::Clear => {
schema.set_index_new_fields(true);
},
UpdateState::Nothing => (),
}
match settings.searchable_attributes.clone() {
UpdateState::Update(v) => {
schema.update_indexed(v)?;
must_reindex = true;
},
UpdateState::Clear => {
let clear: Vec<&str> = Vec::new();
schema.update_indexed(clear)?;
must_reindex = true;
},
UpdateState::Nothing => (),
}
match settings.displayed_attributes.clone() {
UpdateState::Update(v) => schema.update_displayed(v)?,
UpdateState::Clear => {
let clear: Vec<&str> = Vec::new();
schema.update_displayed(clear)?;
},
UpdateState::Nothing => (),
}
index.main.put_schema(writer, &schema)?;
match settings.stop_words {
UpdateState::Update(stop_words) => {
if apply_stop_words_update(writer, index, stop_words)? {
must_reindex = true;
}
},
UpdateState::Clear => {
if apply_stop_words_update(writer, index, BTreeSet::new())? {
must_reindex = true;
}
},
UpdateState::Nothing => (),
}
match settings.synonyms {
UpdateState::Update(synonyms) => apply_synonyms_update(writer, index, synonyms)?,
UpdateState::Clear => apply_synonyms_update(writer, index, BTreeMap::new())?,
UpdateState::Nothing => (),
}
if must_reindex {
reindex_all_documents(writer, index)?;
}
Ok(())
}
pub fn apply_stop_words_update(
writer: &mut heed::RwTxn<MainT>,
index: &store::Index,
stop_words: BTreeSet<String>,
) -> MResult<bool> {
let old_stop_words: BTreeSet<String> = index.main
.stop_words_fst(writer)?
.unwrap_or_default()
.stream()
.into_strs().unwrap().into_iter().collect();
let deletion: BTreeSet<String> = old_stop_words.difference(&stop_words).cloned().collect();
let addition: BTreeSet<String> = stop_words.difference(&old_stop_words).cloned().collect();
if !addition.is_empty() {
apply_stop_words_addition(
writer,
index,
addition
)?;
}
if !deletion.is_empty() {
apply_stop_words_deletion(
writer,
index,
deletion
)?;
return Ok(true)
}
let stop_words_fst = fst::Set::from_iter(stop_words)?;
index.main.put_words_fst(writer, &stop_words_fst)?;
Ok(false)
}
fn apply_stop_words_addition(
writer: &mut heed::RwTxn<MainT>,
index: &store::Index,
addition: BTreeSet<String>,
) -> MResult<()> {
let main_store = index.main;
let postings_lists_store = index.postings_lists;
let mut stop_words_builder = SetBuilder::memory();
for word in addition {
stop_words_builder.insert(&word).unwrap();
// we remove every posting list associated to a new stop word
postings_lists_store.del_postings_list(writer, word.as_bytes())?;
}
// create the new delta stop words fst
let delta_stop_words = stop_words_builder
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap();
// we also need to remove all the stop words from the main fst
if let Some(word_fst) = main_store.words_fst(writer)? {
let op = OpBuilder::new()
.add(&word_fst)
.add(&delta_stop_words)
.difference();
let mut word_fst_builder = SetBuilder::memory();
word_fst_builder.extend_stream(op).unwrap();
let word_fst = word_fst_builder
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap();
main_store.put_words_fst(writer, &word_fst)?;
}
// now we add all of these stop words from the main store
let stop_words_fst = main_store.stop_words_fst(writer)?.unwrap_or_default();
let op = OpBuilder::new()
.add(&stop_words_fst)
.add(&delta_stop_words)
.r#union();
let mut stop_words_builder = SetBuilder::memory();
stop_words_builder.extend_stream(op).unwrap();
let stop_words_fst = stop_words_builder
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap();
main_store.put_stop_words_fst(writer, &stop_words_fst)?;
Ok(())
}
fn apply_stop_words_deletion(
writer: &mut heed::RwTxn<MainT>,
index: &store::Index,
deletion: BTreeSet<String>,
) -> MResult<()> {
let mut stop_words_builder = SetBuilder::memory();
for word in deletion {
stop_words_builder.insert(&word).unwrap();
}
// create the new delta stop words fst
let delta_stop_words = stop_words_builder
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap();
// now we delete all of these stop words from the main store
let stop_words_fst = index.main.stop_words_fst(writer)?.unwrap_or_default();
let op = OpBuilder::new()
.add(&stop_words_fst)
.add(&delta_stop_words)
.difference();
let mut stop_words_builder = SetBuilder::memory();
stop_words_builder.extend_stream(op).unwrap();
let stop_words_fst = stop_words_builder
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap();
Ok(index.main.put_stop_words_fst(writer, &stop_words_fst)?)
}
pub fn apply_synonyms_update(
writer: &mut heed::RwTxn<MainT>,
index: &store::Index,
synonyms: BTreeMap<String, Vec<String>>,
) -> MResult<()> {
let main_store = index.main;
let synonyms_store = index.synonyms;
let mut synonyms_builder = SetBuilder::memory();
synonyms_store.clear(writer)?;
for (word, alternatives) in synonyms.clone() {
synonyms_builder.insert(&word).unwrap();
let alternatives = {
let alternatives = SetBuf::from_dirty(alternatives);
let mut alternatives_builder = SetBuilder::memory();
alternatives_builder.extend_iter(alternatives).unwrap();
let bytes = alternatives_builder.into_inner().unwrap();
fst::Set::from_bytes(bytes).unwrap()
};
synonyms_store.put_synonyms(writer, word.as_bytes(), &alternatives)?;
}
let synonyms_set = synonyms_builder
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap();
main_store.put_synonyms_fst(writer, &synonyms_set)?;
Ok(())
}

View File

@ -1,118 +0,0 @@
use std::collections::BTreeSet;
use fst::{set::OpBuilder, SetBuilder};
use crate::database::{MainT, UpdateT};
use crate::automaton::normalize_str;
use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::update::{next_update_id, Update};
use crate::{store, MResult};
pub struct StopWordsAddition {
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
updates_notifier: UpdateEventsEmitter,
stop_words: BTreeSet<String>,
}
impl StopWordsAddition {
pub fn new(
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
updates_notifier: UpdateEventsEmitter,
) -> StopWordsAddition {
StopWordsAddition {
updates_store,
updates_results_store,
updates_notifier,
stop_words: BTreeSet::new(),
}
}
pub fn add_stop_word<S: AsRef<str>>(&mut self, stop_word: S) {
let stop_word = normalize_str(stop_word.as_ref());
self.stop_words.insert(stop_word);
}
pub fn finalize(self, writer: &mut heed::RwTxn<UpdateT>) -> MResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
let update_id = push_stop_words_addition(
writer,
self.updates_store,
self.updates_results_store,
self.stop_words,
)?;
Ok(update_id)
}
}
pub fn push_stop_words_addition(
writer: &mut heed::RwTxn<UpdateT>,
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
addition: BTreeSet<String>,
) -> MResult<u64> {
let last_update_id = next_update_id(writer, updates_store, updates_results_store)?;
let update = Update::stop_words_addition(addition);
updates_store.put_update(writer, last_update_id, &update)?;
Ok(last_update_id)
}
pub fn apply_stop_words_addition(
writer: &mut heed::RwTxn<MainT>,
main_store: store::Main,
postings_lists_store: store::PostingsLists,
addition: BTreeSet<String>,
) -> MResult<()> {
let mut stop_words_builder = SetBuilder::memory();
for word in addition {
stop_words_builder.insert(&word).unwrap();
// we remove every posting list associated to a new stop word
postings_lists_store.del_postings_list(writer, word.as_bytes())?;
}
// create the new delta stop words fst
let delta_stop_words = stop_words_builder
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap();
// we also need to remove all the stop words from the main fst
if let Some(word_fst) = main_store.words_fst(writer)? {
let op = OpBuilder::new()
.add(&word_fst)
.add(&delta_stop_words)
.difference();
let mut word_fst_builder = SetBuilder::memory();
word_fst_builder.extend_stream(op).unwrap();
let word_fst = word_fst_builder
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap();
main_store.put_words_fst(writer, &word_fst)?;
}
// now we add all of these stop words from the main store
let stop_words_fst = main_store.stop_words_fst(writer)?.unwrap_or_default();
let op = OpBuilder::new()
.add(&stop_words_fst)
.add(&delta_stop_words)
.r#union();
let mut stop_words_builder = SetBuilder::memory();
stop_words_builder.extend_stream(op).unwrap();
let stop_words_fst = stop_words_builder
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap();
main_store.put_stop_words_fst(writer, &stop_words_fst)?;
Ok(())
}

View File

@ -1,107 +0,0 @@
use std::collections::BTreeSet;
use fst::{set::OpBuilder, SetBuilder};
use crate::database::{MainT, UpdateT};
use crate::automaton::normalize_str;
use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::update::documents_addition::reindex_all_documents;
use crate::update::{next_update_id, Update};
use crate::{store, MResult};
pub struct StopWordsDeletion {
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
updates_notifier: UpdateEventsEmitter,
stop_words: BTreeSet<String>,
}
impl StopWordsDeletion {
pub fn new(
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
updates_notifier: UpdateEventsEmitter,
) -> StopWordsDeletion {
StopWordsDeletion {
updates_store,
updates_results_store,
updates_notifier,
stop_words: BTreeSet::new(),
}
}
pub fn delete_stop_word<S: AsRef<str>>(&mut self, stop_word: S) {
let stop_word = normalize_str(stop_word.as_ref());
self.stop_words.insert(stop_word);
}
pub fn finalize(self, writer: &mut heed::RwTxn<UpdateT>) -> MResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
let update_id = push_stop_words_deletion(
writer,
self.updates_store,
self.updates_results_store,
self.stop_words,
)?;
Ok(update_id)
}
}
pub fn push_stop_words_deletion(
writer: &mut heed::RwTxn<UpdateT>,
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
deletion: BTreeSet<String>,
) -> MResult<u64> {
let last_update_id = next_update_id(writer, updates_store, updates_results_store)?;
let update = Update::stop_words_deletion(deletion);
updates_store.put_update(writer, last_update_id, &update)?;
Ok(last_update_id)
}
pub fn apply_stop_words_deletion(
writer: &mut heed::RwTxn<MainT>,
index: &store::Index,
deletion: BTreeSet<String>,
) -> MResult<()> {
let mut stop_words_builder = SetBuilder::memory();
for word in deletion {
stop_words_builder.insert(&word).unwrap();
}
// create the new delta stop words fst
let delta_stop_words = stop_words_builder
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap();
// now we delete all of these stop words from the main store
let stop_words_fst = index.main.stop_words_fst(writer)?.unwrap_or_default();
let op = OpBuilder::new()
.add(&stop_words_fst)
.add(&delta_stop_words)
.difference();
let mut stop_words_builder = SetBuilder::memory();
stop_words_builder.extend_stream(op).unwrap();
let stop_words_fst = stop_words_builder
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap();
index.main.put_stop_words_fst(writer, &stop_words_fst)?;
// now that we have setup the stop words
// lets reindex everything...
if let Ok(number) = index.main.number_of_documents(writer) {
if number > 0 {
reindex_all_documents(writer, index)?;
}
}
Ok(())
}

View File

@ -1,103 +0,0 @@
use std::collections::BTreeMap;
use fst::SetBuilder;
use sdset::SetBuf;
use crate::database::{MainT, UpdateT};
use crate::automaton::normalize_str;
use crate::database::{UpdateEvent, UpdateEventsEmitter};
use crate::update::{next_update_id, Update};
use crate::{store, MResult};
pub struct SynonymsUpdate {
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
updates_notifier: UpdateEventsEmitter,
synonyms: BTreeMap<String, Vec<String>>,
}
impl SynonymsUpdate {
pub fn new(
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
updates_notifier: UpdateEventsEmitter,
) -> SynonymsUpdate {
SynonymsUpdate {
updates_store,
updates_results_store,
updates_notifier,
synonyms: BTreeMap::new(),
}
}
pub fn add_synonym<S, T, I>(&mut self, synonym: S, alternatives: I)
where
S: AsRef<str>,
T: AsRef<str>,
I: IntoIterator<Item = T>,
{
let synonym = normalize_str(synonym.as_ref());
let alternatives = alternatives.into_iter().map(|s| s.as_ref().to_lowercase());
self.synonyms
.entry(synonym)
.or_insert_with(Vec::new)
.extend(alternatives);
}
pub fn finalize(self, writer: &mut heed::RwTxn<UpdateT>) -> MResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
let update_id = push_synonyms_update(
writer,
self.updates_store,
self.updates_results_store,
self.synonyms,
)?;
Ok(update_id)
}
}
pub fn push_synonyms_update(
writer: &mut heed::RwTxn<UpdateT>,
updates_store: store::Updates,
updates_results_store: store::UpdatesResults,
addition: BTreeMap<String, Vec<String>>,
) -> MResult<u64> {
let last_update_id = next_update_id(writer, updates_store, updates_results_store)?;
let update = Update::synonyms_update(addition);
updates_store.put_update(writer, last_update_id, &update)?;
Ok(last_update_id)
}
pub fn apply_synonyms_update(
writer: &mut heed::RwTxn<MainT>,
main_store: store::Main,
synonyms_store: store::Synonyms,
addition: BTreeMap<String, Vec<String>>,
) -> MResult<()> {
let mut synonyms_builder = SetBuilder::memory();
synonyms_store.clear(writer)?;
for (word, alternatives) in addition {
synonyms_builder.insert(&word).unwrap();
let alternatives = {
let alternatives = SetBuf::from_dirty(alternatives);
let mut alternatives_builder = SetBuilder::memory();
alternatives_builder.extend_iter(alternatives).unwrap();
let bytes = alternatives_builder.into_inner().unwrap();
fst::Set::from_bytes(bytes).unwrap()
};
synonyms_store.put_synonyms(writer, word.as_bytes(), &alternatives)?;
}
let synonyms = synonyms_builder
.into_inner()
.and_then(fst::Set::from_bytes)
.unwrap();
main_store.put_synonyms_fst(writer, &synonyms)?;
Ok(())
}

View File

@ -14,17 +14,20 @@ name = "meilisearch"
path = "src/main.rs" path = "src/main.rs"
[dependencies] [dependencies]
bincode = "1.2.0" async-std = { version = "1.0.1", features = ["attributes"] }
chrono = { version = "0.4.9", features = ["serde"] } chrono = { version = "0.4.9", features = ["serde"] }
crossbeam-channel = "0.4.0" crossbeam-channel = "0.4.0"
env_logger = "0.7.1" env_logger = "0.7.1"
futures = "0.3.1"
heed = "0.6.1" heed = "0.6.1"
http = "0.1.19" http = "0.1.19"
http-service = "0.4.0"
indexmap = { version = "1.3.0", features = ["serde-1"] } indexmap = { version = "1.3.0", features = ["serde-1"] }
log = "0.4.8" log = "0.4.8"
main_error = "0.1.0" main_error = "0.1.0"
meilisearch-core = { path = "../meilisearch-core", version = "0.8.4" } meilisearch-core = { path = "../meilisearch-core", version = "0.8.4" }
meilisearch-schema = { path = "../meilisearch-schema", version = "0.8.4" } meilisearch-schema = { path = "../meilisearch-schema", version = "0.8.4" }
mime = "0.3.16"
pretty-bytes = "0.2.2" pretty-bytes = "0.2.2"
rand = "0.7.2" rand = "0.7.2"
rayon = "1.2.0" rayon = "1.2.0"
@ -34,30 +37,18 @@ serde_qs = "0.5.1"
siphasher = "0.3.1" siphasher = "0.3.1"
structopt = "0.3.3" structopt = "0.3.3"
sysinfo = "0.9.5" sysinfo = "0.9.5"
tide = "0.6.0"
ureq = { version = "0.11.2", features = ["tls"], default-features = false } ureq = { version = "0.11.2", features = ["tls"], default-features = false }
walkdir = "2.2.9" walkdir = "2.2.9"
whoami = "0.6" whoami = "0.6"
[dependencies.async-compression] [dev-dependencies]
default-features = false http-service-mock = "0.4.0"
features = ["stream", "gzip", "zlib", "brotli", "zstd"] tempdir = "0.3.7"
version = "=0.1.0-alpha.7"
[dependencies.tide] [dev-dependencies.assert-json-diff]
git = "https://github.com/rustasync/tide" git = "https://github.com/qdequele/assert-json-diff"
rev = "e77709370bb24cf776fe6da902467c35131535b1" branch = "master"
[dependencies.tide-log]
git = "https://github.com/rustasync/tide"
rev = "e77709370bb24cf776fe6da902467c35131535b1"
[dependencies.tide-slog]
git = "https://github.com/rustasync/tide"
rev = "e77709370bb24cf776fe6da902467c35131535b1"
[dependencies.tide-compression]
git = "https://github.com/rustasync/tide"
rev = "e77709370bb24cf776fe6da902467c35131535b1"
[build-dependencies] [build-dependencies]
vergen = "3.0.4" vergen = "3.0.4"

View File

@ -5,7 +5,7 @@ use std::sync::Arc;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use heed::types::{SerdeBincode, Str}; use heed::types::{SerdeBincode, Str};
use log::error; use log::error;
use meilisearch_core::{Database, MainT, UpdateT, Error as MError, MResult}; use meilisearch_core::{Database, Error as MError, MResult, MainT, UpdateT};
use sysinfo::Pid; use sysinfo::Pid;
use crate::option::Opt; use crate::option::Opt;
@ -84,13 +84,17 @@ impl DataInner {
let mut fields_frequency = HashMap::<_, usize>::new(); let mut fields_frequency = HashMap::<_, usize>::new();
for result in all_documents_fields { for result in all_documents_fields {
let (_, attr, _) = result?; let (_, attr, _) = result?;
*fields_frequency.entry(attr).or_default() += 1; if let Some(field_id) = schema.indexed_pos_to_field_id(attr) {
*fields_frequency.entry(field_id).or_default() += 1;
}
} }
// convert attributes to their names // convert attributes to their names
let frequency: HashMap<_, _> = fields_frequency let frequency: HashMap<_, _> = fields_frequency
.into_iter() .into_iter()
.map(|(a, c)| (schema.attribute_name(a).to_owned(), c)) .filter_map(|(a, c)| {
schema.name(a).map(|name| (name.to_string(), c))
})
.collect(); .collect();
index index
@ -106,7 +110,7 @@ impl Data {
let api_key = opt.api_key.clone(); let api_key = opt.api_key.clone();
let server_pid = sysinfo::get_current_pid().unwrap(); let server_pid = sysinfo::get_current_pid().unwrap();
let db = Arc::new(Database::open_or_create(opt.db_path.clone()).unwrap()); let db = Arc::new(Database::open_or_create(opt.db_path).unwrap());
let inner_data = DataInner { let inner_data = DataInner {
db: db.clone(), db: db.clone(),

View File

@ -2,10 +2,13 @@ use std::fmt::Display;
use http::status::StatusCode; use http::status::StatusCode;
use log::{error, warn}; use log::{error, warn};
use meilisearch_core::{FstError, HeedError};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tide::response::IntoResponse; use tide::IntoResponse;
use tide::Response; use tide::Response;
use crate::helpers::meilisearch::Error as SearchError;
pub type SResult<T> = Result<T, ResponseError>; pub type SResult<T> = Result<T, ResponseError>;
pub enum ResponseError { pub enum ResponseError {
@ -120,7 +123,56 @@ struct ErrorMessage {
fn error(message: String, status: StatusCode) -> Response { fn error(message: String, status: StatusCode) -> Response {
let message = ErrorMessage { message }; let message = ErrorMessage { message };
tide::response::json(message) tide::Response::new(status.as_u16())
.with_status(status) .body_json(&message)
.into_response() .unwrap()
}
impl From<serde_json::Error> for ResponseError {
fn from(err: serde_json::Error) -> ResponseError {
ResponseError::internal(err)
}
}
impl From<meilisearch_core::Error> for ResponseError {
fn from(err: meilisearch_core::Error) -> ResponseError {
ResponseError::internal(err)
}
}
impl From<HeedError> for ResponseError {
fn from(err: HeedError) -> ResponseError {
ResponseError::internal(err)
}
}
impl From<FstError> for ResponseError {
fn from(err: FstError) -> ResponseError {
ResponseError::internal(err)
}
}
impl From<SearchError> for ResponseError {
fn from(err: SearchError) -> ResponseError {
ResponseError::internal(err)
}
}
impl From<meilisearch_core::settings::RankingRuleConversionError> for ResponseError {
fn from(err: meilisearch_core::settings::RankingRuleConversionError) -> ResponseError {
ResponseError::internal(err)
}
}
pub trait IntoInternalError<T> {
fn into_internal_error(self) -> SResult<T>;
}
impl<T> IntoInternalError<T> for Option<T> {
fn into_internal_error(self) -> SResult<T> {
match self {
Some(value) => Ok(value),
None => Err(ResponseError::internal("Heed cannot find requested value")),
}
}
} }

View File

@ -1,13 +1,3 @@
use crate::routes::setting::{RankingOrdering, Setting};
use indexmap::IndexMap;
use log::{error, warn};
use meilisearch_core::criterion::*;
use meilisearch_core::Highlight;
use meilisearch_core::{Index, RankedMap};
use meilisearch_core::MainT;
use meilisearch_schema::{Schema, SchemaAttr};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::convert::From; use std::convert::From;
@ -15,6 +5,15 @@ use std::error;
use std::fmt; use std::fmt;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use indexmap::IndexMap;
use log::error;
use meilisearch_core::criterion::*;
use meilisearch_core::settings::RankingRule;
use meilisearch_core::{Highlight, Index, MainT, RankedMap};
use meilisearch_schema::{FieldId, Schema};
use serde::{Deserialize, Serialize};
use serde_json::Value;
#[derive(Debug)] #[derive(Debug)]
pub enum Error { pub enum Error {
SearchDocuments(String), SearchDocuments(String),
@ -64,6 +63,12 @@ impl From<meilisearch_core::Error> for Error {
} }
} }
impl From<heed::Error> for Error {
fn from(error: heed::Error) -> Self {
Error::Internal(error.to_string())
}
}
pub trait IndexSearchExt { pub trait IndexSearchExt {
fn new_search(&self, query: String) -> SearchBuilder; fn new_search(&self, query: String) -> SearchBuilder;
} }
@ -77,7 +82,6 @@ impl IndexSearchExt for Index {
limit: 20, limit: 20,
attributes_to_crop: None, attributes_to_crop: None,
attributes_to_retrieve: None, attributes_to_retrieve: None,
attributes_to_search_in: None,
attributes_to_highlight: None, attributes_to_highlight: None,
filters: None, filters: None,
timeout: Duration::from_millis(30), timeout: Duration::from_millis(30),
@ -93,7 +97,6 @@ pub struct SearchBuilder<'a> {
limit: usize, limit: usize,
attributes_to_crop: Option<HashMap<String, usize>>, attributes_to_crop: Option<HashMap<String, usize>>,
attributes_to_retrieve: Option<HashSet<String>>, attributes_to_retrieve: Option<HashSet<String>>,
attributes_to_search_in: Option<HashSet<String>>,
attributes_to_highlight: Option<HashSet<String>>, attributes_to_highlight: Option<HashSet<String>>,
filters: Option<String>, filters: Option<String>,
timeout: Duration, timeout: Duration,
@ -127,17 +130,6 @@ impl<'a> SearchBuilder<'a> {
self self
} }
pub fn attributes_to_search_in(&mut self, value: HashSet<String>) -> &SearchBuilder {
self.attributes_to_search_in = Some(value);
self
}
pub fn add_attribute_to_search_in(&mut self, value: String) -> &SearchBuilder {
let attributes_to_search_in = self.attributes_to_search_in.get_or_insert(HashSet::new());
attributes_to_search_in.insert(value);
self
}
pub fn attributes_to_highlight(&mut self, value: HashSet<String>) -> &SearchBuilder { pub fn attributes_to_highlight(&mut self, value: HashSet<String>) -> &SearchBuilder {
self.attributes_to_highlight = Some(value); self.attributes_to_highlight = Some(value);
self self
@ -176,13 +168,6 @@ impl<'a> SearchBuilder<'a> {
None => self.index.query_builder(), None => self.index.query_builder(),
}; };
// Filter searchable fields
if let Some(fields) = &self.attributes_to_search_in {
for attribute in fields.iter().filter_map(|f| schema.attribute(f)) {
query_builder.add_searchable_attribute(attribute.0);
}
}
if let Some(filters) = &self.filters { if let Some(filters) = &self.filters {
let mut split = filters.split(':'); let mut split = filters.split(':');
match (split.next(), split.next()) { match (split.next(), split.next()) {
@ -192,7 +177,7 @@ impl<'a> SearchBuilder<'a> {
let ref_index = &self.index; let ref_index = &self.index;
let value = value.trim().to_lowercase(); let value = value.trim().to_lowercase();
let attr = match schema.attribute(attr) { let attr = match schema.id(attr) {
Some(attr) => attr, Some(attr) => attr,
None => return Err(Error::UnknownFilteredAttribute), None => return Err(Error::UnknownFilteredAttribute),
}; };
@ -221,7 +206,8 @@ impl<'a> SearchBuilder<'a> {
query_builder.with_fetch_timeout(self.timeout); query_builder.with_fetch_timeout(self.timeout);
let start = Instant::now(); let start = Instant::now();
let docs = query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit)); let docs =
query_builder.query(reader, &self.query, self.offset..(self.offset + self.limit));
let time_ms = start.elapsed().as_millis() as usize; let time_ms = start.elapsed().as_millis() as usize;
let mut hits = Vec::with_capacity(self.limit); let mut hits = Vec::with_capacity(self.limit);
@ -260,10 +246,8 @@ impl<'a> SearchBuilder<'a> {
// Transform to readable matches // Transform to readable matches
let matches = calculate_matches(matches, self.attributes_to_retrieve.clone(), &schema); let matches = calculate_matches(matches, self.attributes_to_retrieve.clone(), &schema);
if !self.matches { if let Some(attributes_to_highlight) = &self.attributes_to_highlight {
if let Some(attributes_to_highlight) = &self.attributes_to_highlight { formatted = calculate_highlights(&formatted, &matches, attributes_to_highlight);
formatted = calculate_highlights(&formatted, &matches, attributes_to_highlight);
}
} }
let matches_info = if self.matches { Some(matches) } else { None }; let matches_info = if self.matches { Some(matches) } else { None };
@ -294,75 +278,34 @@ impl<'a> SearchBuilder<'a> {
ranked_map: &'a RankedMap, ranked_map: &'a RankedMap,
schema: &Schema, schema: &Schema,
) -> Result<Option<Criteria<'a>>, Error> { ) -> Result<Option<Criteria<'a>>, Error> {
let current_settings = match self.index.main.customs(reader).unwrap() { let ranking_rules = self.index.main.ranking_rules(reader)?;
Some(bytes) => bincode::deserialize(bytes).unwrap(),
None => Setting::default(),
};
let ranking_rules = &current_settings.ranking_rules;
let ranking_order = &current_settings.ranking_order;
if let Some(ranking_rules) = ranking_rules { if let Some(ranking_rules) = ranking_rules {
let mut builder = CriteriaBuilder::with_capacity(7 + ranking_rules.len()); let mut builder = CriteriaBuilder::with_capacity(7 + ranking_rules.len());
if let Some(ranking_rules_order) = ranking_order { for rule in ranking_rules {
for rule in ranking_rules_order { match rule {
match rule.as_str() { RankingRule::Typo => builder.push(Typo),
"_typo" => builder.push(Typo), RankingRule::Words => builder.push(Words),
"_words" => builder.push(Words), RankingRule::Proximity => builder.push(Proximity),
"_proximity" => builder.push(Proximity), RankingRule::Attribute => builder.push(Attribute),
"_attribute" => builder.push(Attribute), RankingRule::WordsPosition => builder.push(WordsPosition),
"_words_position" => builder.push(WordsPosition), RankingRule::Exact => builder.push(Exact),
"_exact" => builder.push(Exact), RankingRule::Asc(field) => {
_ => { match SortByAttr::lower_is_better(&ranked_map, &schema, &field) {
let order = match ranking_rules.get(rule.as_str()) { Ok(rule) => builder.push(rule),
Some(o) => o, Err(err) => error!("Error during criteria builder; {:?}", err),
None => continue, }
}; }
RankingRule::Dsc(field) => {
let custom_ranking = match order { match SortByAttr::higher_is_better(&ranked_map, &schema, &field) {
RankingOrdering::Asc => { Ok(rule) => builder.push(rule),
SortByAttr::lower_is_better(&ranked_map, &schema, &rule) Err(err) => error!("Error during criteria builder; {:?}", err),
.unwrap()
}
RankingOrdering::Dsc => {
SortByAttr::higher_is_better(&ranked_map, &schema, &rule)
.unwrap()
}
};
builder.push(custom_ranking);
} }
} }
} }
builder.push(DocumentId);
return Ok(Some(builder.build()));
} else {
builder.push(Typo);
builder.push(Words);
builder.push(Proximity);
builder.push(Attribute);
builder.push(WordsPosition);
builder.push(Exact);
for (rule, order) in ranking_rules.iter() {
let custom_ranking = match order {
RankingOrdering::Asc => {
SortByAttr::lower_is_better(&ranked_map, &schema, &rule)
}
RankingOrdering::Dsc => {
SortByAttr::higher_is_better(&ranked_map, &schema, &rule)
}
};
if let Ok(custom_ranking) = custom_ranking {
builder.push(custom_ranking);
} else {
// TODO push this warning to a log tree
warn!("Custom ranking cannot be added; Attribute {} not registered for ranking", rule)
}
}
builder.push(DocumentId);
return Ok(Some(builder.build()));
} }
builder.push(DocumentId);
return Ok(Some(builder.build()));
} }
Ok(None) Ok(None)
@ -406,8 +349,6 @@ pub struct SearchResult {
pub limit: usize, pub limit: usize,
pub processing_time_ms: usize, pub processing_time_ms: usize,
pub query: String, pub query: String,
// pub parsed_query: String,
// pub params: Option<String>,
} }
fn crop_text( fn crop_text(
@ -441,14 +382,14 @@ fn crop_document(
matches.sort_unstable_by_key(|m| (m.char_index, m.char_length)); matches.sort_unstable_by_key(|m| (m.char_index, m.char_length));
for (field, length) in fields { for (field, length) in fields {
let attribute = match schema.attribute(field) { let attribute = match schema.id(field) {
Some(attribute) => attribute, Some(attribute) => attribute,
None => continue, None => continue,
}; };
let selected_matches = matches let selected_matches = matches
.iter() .iter()
.filter(|m| SchemaAttr::new(m.attribute) == attribute) .filter(|m| FieldId::new(m.attribute) == attribute)
.cloned(); .cloned();
if let Some(Value::String(ref mut original_text)) = document.get_mut(field) { if let Some(Value::String(ref mut original_text)) = document.get_mut(field) {
@ -457,7 +398,7 @@ fn crop_document(
*original_text = cropped_text; *original_text = cropped_text;
matches.retain(|m| SchemaAttr::new(m.attribute) != attribute); matches.retain(|m| FieldId::new(m.attribute) != attribute);
matches.extend_from_slice(&cropped_matches); matches.extend_from_slice(&cropped_matches);
} }
} }
@ -470,26 +411,28 @@ fn calculate_matches(
) -> MatchesInfos { ) -> MatchesInfos {
let mut matches_result: HashMap<String, Vec<MatchPosition>> = HashMap::new(); let mut matches_result: HashMap<String, Vec<MatchPosition>> = HashMap::new();
for m in matches.iter() { for m in matches.iter() {
let attribute = schema if let Some(attribute) = schema.name(FieldId::new(m.attribute)) {
.attribute_name(SchemaAttr::new(m.attribute)) if let Some(attributes_to_retrieve) = attributes_to_retrieve.clone() {
.to_string(); if !attributes_to_retrieve.contains(attribute) {
if let Some(attributes_to_retrieve) = attributes_to_retrieve.clone() { continue;
if !attributes_to_retrieve.contains(attribute.as_str()) { }
}
if !schema.displayed_name().contains(attribute) {
continue; continue;
} }
}; if let Some(pos) = matches_result.get_mut(attribute) {
if let Some(pos) = matches_result.get_mut(&attribute) { pos.push(MatchPosition {
pos.push(MatchPosition { start: m.char_index as usize,
start: m.char_index as usize, length: m.char_length as usize,
length: m.char_length as usize, });
}); } else {
} else { let mut positions = Vec::new();
let mut positions = Vec::new(); positions.push(MatchPosition {
positions.push(MatchPosition { start: m.char_index as usize,
start: m.char_index as usize, length: m.char_length as usize,
length: m.char_length as usize, });
}); matches_result.insert(attribute.to_string(), positions);
matches_result.insert(attribute, positions); }
} }
} }
for (_, val) in matches_result.iter_mut() { for (_, val) in matches_result.iter_mut() {

View File

@ -4,38 +4,40 @@ use crate::Data;
use chrono::Utc; use chrono::Utc;
use heed::types::{SerdeBincode, Str}; use heed::types::{SerdeBincode, Str};
use meilisearch_core::Index; use meilisearch_core::Index;
use tide::Context; use tide::Request;
pub trait ContextExt { pub trait RequestExt {
fn is_allowed(&self, acl: ACL) -> SResult<()>; fn is_allowed(&self, acl: ACL) -> SResult<()>;
fn header(&self, name: &str) -> Result<String, ResponseError>; fn header(&self, name: &str) -> SResult<String>;
fn url_param(&self, name: &str) -> Result<String, ResponseError>; fn url_param(&self, name: &str) -> SResult<String>;
fn index(&self) -> Result<Index, ResponseError>; fn index(&self) -> SResult<Index>;
fn identifier(&self) -> Result<String, ResponseError>; fn identifier(&self) -> SResult<String>;
} }
impl ContextExt for Context<Data> { impl RequestExt for Request<Data> {
fn is_allowed(&self, acl: ACL) -> SResult<()> { fn is_allowed(&self, acl: ACL) -> SResult<()> {
let api_key = match &self.state().api_key { let api_key = match &self.state().api_key {
Some(api_key) => api_key, Some(api_key) => api_key,
None => return Ok(()), None => return Ok(()),
}; };
let user_api_key = self.header("X-Meili-API-Key")?; let user_api_key = self
.header("X-Meili-API-Key")
.ok_or(ResponseError::missing_header("X-Meili-API-Key"))?;
if user_api_key == *api_key { if user_api_key == *api_key {
return Ok(()); return Ok(());
} }
let request_index: Option<String> = None; //self.param::<String>("index").ok(); let request_index: Option<String> = None; //self.param::<String>("index").ok();
let db = &self.state().db; let db = &self.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let token_key = format!("{}{}", TOKEN_PREFIX_KEY, user_api_key); let token_key = format!("{}{}", TOKEN_PREFIX_KEY, user_api_key);
let token_config = db let token_config = db
.common_store() .common_store()
.get::<_, Str, SerdeBincode<Token>>(&reader, &token_key) .get::<_, Str, SerdeBincode<Token>>(&reader, &token_key)?
.map_err(ResponseError::internal)?
.ok_or(ResponseError::invalid_token(format!( .ok_or(ResponseError::invalid_token(format!(
"Api key does not exist: {}", "Api key does not exist: {}",
user_api_key user_api_key
@ -72,7 +74,7 @@ impl ContextExt for Context<Data> {
Ok(()) Ok(())
} }
fn header(&self, name: &str) -> Result<String, ResponseError> { fn header(&self, name: &str) -> SResult<String> {
let header = self let header = self
.headers() .headers()
.get(name) .get(name)
@ -83,14 +85,14 @@ impl ContextExt for Context<Data> {
Ok(header) Ok(header)
} }
fn url_param(&self, name: &str) -> Result<String, ResponseError> { fn url_param(&self, name: &str) -> SResult<String> {
let param = self let param = self
.param::<String>(name) .param::<String>(name)
.map_err(|e| ResponseError::bad_parameter(name, e))?; .map_err(|_| ResponseError::bad_parameter("identifier", name))?;
Ok(param) Ok(param)
} }
fn index(&self) -> Result<Index, ResponseError> { fn index(&self) -> SResult<Index> {
let index_uid = self.url_param("index")?; let index_uid = self.url_param("index")?;
let index = self let index = self
.state() .state()
@ -100,10 +102,10 @@ impl ContextExt for Context<Data> {
Ok(index) Ok(index)
} }
fn identifier(&self) -> Result<String, ResponseError> { fn identifier(&self) -> SResult<String> {
let name = self let name = self
.param::<String>("identifier") .param::<String>("identifier")
.map_err(|e| ResponseError::bad_parameter("identifier", e))?; .map_err(|_| ResponseError::bad_parameter("identifier", "identifier"))?;
Ok(name) Ok(name)
} }

View File

@ -1,3 +1,5 @@
#![allow(clippy::or_fun_call)]
pub mod data; pub mod data;
pub mod error; pub mod error;
pub mod helpers; pub mod helpers;

View File

@ -1,12 +1,11 @@
use std::env::VarError::NotPresent; use std::env::VarError::NotPresent;
use std::{env, thread}; use std::{env, thread};
use http::header::HeaderValue; use async_std::task;
use log::info; use log::info;
use main_error::MainError; use main_error::MainError;
use structopt::StructOpt; use structopt::StructOpt;
use tide::middleware::{CorsMiddleware, CorsOrigin}; use tide::middleware::{Cors, RequestLogger};
use tide_log::RequestLogger;
use meilisearch_http::data::Data; use meilisearch_http::data::Data;
use meilisearch_http::option::Opt; use meilisearch_http::option::Opt;
@ -26,7 +25,7 @@ pub fn main() -> Result<(), MainError> {
let data = Data::new(opt.clone()); let data = Data::new(opt.clone());
if env::var("MEILI_NO_ANALYTICS") == Err(NotPresent) { if env::var("MEILI_NO_ANALYTICS") == Err(NotPresent) {
thread::spawn(|| analytics::analytics_sender()); thread::spawn(analytics::analytics_sender);
} }
let data_cloned = data.clone(); let data_cloned = data.clone();
@ -34,21 +33,15 @@ pub fn main() -> Result<(), MainError> {
index_update_callback(name, &data_cloned, status); index_update_callback(name, &data_cloned, status);
})); }));
let mut app = tide::App::with_state(data); let mut app = tide::with_state(data);
app.middleware( app.middleware(Cors::new());
CorsMiddleware::new()
.allow_origin(CorsOrigin::from("*"))
.allow_methods(HeaderValue::from_static("GET, POST, OPTIONS")),
);
app.middleware(RequestLogger::new()); app.middleware(RequestLogger::new());
app.middleware(tide_compression::Compression::new());
app.middleware(tide_compression::Decompression::new());
routes::load_routes(&mut app); routes::load_routes(&mut app);
info!("Server HTTP enabled"); info!("Server HTTP enabled");
app.run(opt.http_addr)?;
task::block_on(app.listen(opt.http_addr))?;
Ok(()) Ok(())
} }

View File

@ -1,3 +1,2 @@
pub mod schema;
pub mod token; pub mod token;
pub mod update_operation; pub mod update_operation;

View File

@ -1,118 +0,0 @@
use std::collections::HashSet;
use indexmap::IndexMap;
use meilisearch_schema::{Schema, SchemaBuilder, SchemaProps};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum FieldProperties {
Identifier,
Indexed,
Displayed,
Ranked,
}
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
pub struct SchemaBody(IndexMap<String, HashSet<FieldProperties>>);
impl From<Schema> for SchemaBody {
fn from(value: Schema) -> SchemaBody {
let mut map = IndexMap::new();
for (name, _attr, props) in value.iter() {
let old_properties = map.entry(name.to_owned()).or_insert(HashSet::new());
if props.is_indexed() {
old_properties.insert(FieldProperties::Indexed);
}
if props.is_displayed() {
old_properties.insert(FieldProperties::Displayed);
}
if props.is_ranked() {
old_properties.insert(FieldProperties::Ranked);
}
}
let old_properties = map
.entry(value.identifier_name().to_string())
.or_insert(HashSet::new());
old_properties.insert(FieldProperties::Identifier);
old_properties.insert(FieldProperties::Displayed);
SchemaBody(map)
}
}
impl Into<Schema> for SchemaBody {
fn into(self) -> Schema {
let mut identifier = "documentId".to_string();
let mut attributes = IndexMap::new();
for (field, properties) in self.0 {
let mut indexed = false;
let mut displayed = false;
let mut ranked = false;
for property in properties {
match property {
FieldProperties::Indexed => indexed = true,
FieldProperties::Displayed => displayed = true,
FieldProperties::Ranked => ranked = true,
FieldProperties::Identifier => identifier = field.clone(),
}
}
attributes.insert(
field,
SchemaProps {
indexed,
displayed,
ranked,
},
);
}
let mut builder = SchemaBuilder::with_identifier(identifier);
for (field, props) in attributes {
builder.new_attribute(field, props);
}
builder.build()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_schema_body_conversion() {
let schema_body = r#"
{
"id": ["identifier", "indexed", "displayed"],
"title": ["indexed", "displayed"],
"date": ["displayed"]
}
"#;
let schema_builder = r#"
{
"identifier": "id",
"attributes": {
"id": {
"indexed": true,
"displayed": true
},
"title": {
"indexed": true,
"displayed": true
},
"date": {
"displayed": true
}
}
}
"#;
let schema_body: SchemaBody = serde_json::from_str(schema_body).unwrap();
let schema_builder: SchemaBuilder = serde_json::from_str(schema_builder).unwrap();
let schema_from_body: Schema = schema_body.into();
let schema_from_builder: Schema = schema_builder.build();
assert_eq!(schema_from_body, schema_from_builder);
}
}

View File

@ -1,19 +1,17 @@
use std::collections::{BTreeSet, HashSet}; use std::collections::{BTreeSet, HashSet};
use http::StatusCode;
use indexmap::IndexMap; use indexmap::IndexMap;
use meilisearch_core::settings::{SettingsUpdate, UpdateState};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use tide::querystring::ContextExt as QSContextExt; use tide::{Request, Response};
use tide::response::IntoResponse;
use tide::{Context, Response};
use crate::error::{ResponseError, SResult}; use crate::error::{ResponseError, SResult};
use crate::helpers::tide::ContextExt; use crate::helpers::tide::RequestExt;
use crate::models::token::ACL::*; use crate::models::token::ACL::*;
use crate::Data; use crate::Data;
pub async fn get_document(ctx: Context<Data>) -> SResult<Response> { pub async fn get_document(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(DocumentsRead)?; ctx.is_allowed(DocumentsRead)?;
let index = ctx.index()?; let index = ctx.index()?;
@ -22,18 +20,17 @@ pub async fn get_document(ctx: Context<Data>) -> SResult<Response> {
let document_id = meilisearch_core::serde::compute_document_id(identifier.clone()); let document_id = meilisearch_core::serde::compute_document_id(identifier.clone());
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let response = index let response = index
.document::<IndexMap<String, Value>>(&reader, None, document_id) .document::<IndexMap<String, Value>>(&reader, None, document_id)?
.map_err(ResponseError::internal)?
.ok_or(ResponseError::document_not_found(&identifier))?; .ok_or(ResponseError::document_not_found(&identifier))?;
if response.is_empty() { if response.is_empty() {
return Err(ResponseError::document_not_found(identifier)); return Err(ResponseError::document_not_found(identifier));
} }
Ok(tide::response::json(response)) Ok(tide::Response::new(200).body_json(&response)?)
} }
#[derive(Default, Serialize)] #[derive(Default, Serialize)]
@ -42,28 +39,22 @@ pub struct IndexUpdateResponse {
pub update_id: u64, pub update_id: u64,
} }
pub async fn delete_document(ctx: Context<Data>) -> SResult<Response> { pub async fn delete_document(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(DocumentsWrite)?; ctx.is_allowed(DocumentsWrite)?;
let index = ctx.index()?; let index = ctx.index()?;
let identifier = ctx.identifier()?; let identifier = ctx.identifier()?;
let document_id = meilisearch_core::serde::compute_document_id(identifier.clone()); let document_id = meilisearch_core::serde::compute_document_id(identifier);
let db = &ctx.state().db; let db = &ctx.state().db;
let mut update_writer = db.update_write_txn().map_err(ResponseError::internal)?; let mut update_writer = db.update_write_txn()?;
let mut documents_deletion = index.documents_deletion(); let mut documents_deletion = index.documents_deletion();
documents_deletion.delete_document_by_id(document_id); documents_deletion.delete_document_by_id(document_id);
let update_id = documents_deletion let update_id = documents_deletion.finalize(&mut update_writer)?;
.finalize(&mut update_writer)
.map_err(ResponseError::internal)?;
update_writer.commit().map_err(ResponseError::internal)?; update_writer.commit()?;
let response_body = IndexUpdateResponse { update_id }; let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body) Ok(tide::Response::new(202).body_json(&response_body)?)
.with_status(StatusCode::ACCEPTED)
.into_response())
} }
#[derive(Default, Deserialize)] #[derive(Default, Deserialize)]
@ -74,23 +65,24 @@ struct BrowseQuery {
attributes_to_retrieve: Option<String>, attributes_to_retrieve: Option<String>,
} }
pub async fn get_all_documents(ctx: Context<Data>) -> SResult<Response> { pub async fn get_all_documents(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(DocumentsRead)?; ctx.is_allowed(DocumentsRead)?;
let index = ctx.index()?; let index = ctx.index()?;
let query: BrowseQuery = ctx.url_query().unwrap_or(BrowseQuery::default()); let query: BrowseQuery = ctx.query().unwrap_or_default();
let offset = query.offset.unwrap_or(0); let offset = query.offset.unwrap_or(0);
let limit = query.limit.unwrap_or(20); let limit = query.limit.unwrap_or(20);
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let documents_ids: Result<BTreeSet<_>, _> = let documents_ids: Result<BTreeSet<_>, _> = index
match index.documents_fields_counts.documents_ids(&reader) { .documents_fields_counts
Ok(documents_ids) => documents_ids.skip(offset).take(limit).collect(), .documents_ids(&reader)?
Err(e) => return Err(ResponseError::internal(e)), .skip(offset)
}; .take(limit)
.collect();
let documents_ids = match documents_ids { let documents_ids = match documents_ids {
Ok(documents_ids) => documents_ids, Ok(documents_ids) => documents_ids,
@ -114,55 +106,50 @@ pub async fn get_all_documents(ctx: Context<Data>) -> SResult<Response> {
} }
} }
Ok(tide::response::json(response_body)) Ok(tide::Response::new(200).body_json(&response_body)?)
} }
fn infered_schema(document: &IndexMap<String, Value>) -> Option<meilisearch_schema::Schema> { fn find_identifier(document: &IndexMap<String, Value>) -> Option<String> {
use meilisearch_schema::{SchemaBuilder, DISPLAYED, INDEXED};
let mut identifier = None;
for key in document.keys() { for key in document.keys() {
if identifier.is_none() && key.to_lowercase().contains("id") { if key.to_lowercase().contains("id") {
identifier = Some(key); return Some(key.to_string());
} }
} }
None
match identifier {
Some(identifier) => {
let mut builder = SchemaBuilder::with_identifier(identifier);
for key in document.keys() {
builder.new_attribute(key, DISPLAYED | INDEXED);
}
Some(builder.build())
}
None => None,
}
} }
async fn update_multiple_documents(mut ctx: Context<Data>, is_partial: bool) -> SResult<Response> { #[derive(Default, Deserialize)]
#[serde(deny_unknown_fields)]
struct UpdateDocumentsQuery {
identifier: Option<String>,
}
async fn update_multiple_documents(mut ctx: Request<Data>, is_partial: bool) -> SResult<Response> {
ctx.is_allowed(DocumentsWrite)?; ctx.is_allowed(DocumentsWrite)?;
let index = ctx.index()?;
let data: Vec<IndexMap<String, Value>> = let data: Vec<IndexMap<String, Value>> =
ctx.body_json().await.map_err(ResponseError::bad_request)?; ctx.body_json().await.map_err(ResponseError::bad_request)?;
let index = ctx.index()?; let query: UpdateDocumentsQuery = ctx.query().unwrap_or_default();
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let mut update_writer = db.update_write_txn().map_err(ResponseError::internal)?; let mut update_writer = db.update_write_txn()?;
let current_schema = index.main.schema(&reader)?;
let current_schema = index
.main
.schema(&reader)
.map_err(ResponseError::internal)?;
if current_schema.is_none() { if current_schema.is_none() {
match data.first().and_then(infered_schema) { let id = match query.identifier {
Some(schema) => { Some(id) => id,
index None => match data.first().and_then(|docs| find_identifier(docs)) {
.schema_update(&mut update_writer, schema) Some(id) => id,
.map_err(ResponseError::internal)?; None => return Err(ResponseError::bad_request("Could not infer a schema")),
} },
None => return Err(ResponseError::bad_request("Could not infer a schema")), };
} let settings_update = SettingsUpdate{
identifier: UpdateState::Update(id),
..SettingsUpdate::default()
};
index.settings_update(&mut update_writer, settings_update)?;
} }
let mut document_addition = if is_partial { let mut document_addition = if is_partial {
@ -175,34 +162,29 @@ async fn update_multiple_documents(mut ctx: Context<Data>, is_partial: bool) ->
document_addition.update_document(document); document_addition.update_document(document);
} }
let update_id = document_addition let update_id = document_addition.finalize(&mut update_writer)?;
.finalize(&mut update_writer) update_writer.commit()?;
.map_err(ResponseError::internal)?;
update_writer.commit().map_err(ResponseError::internal)?;
let response_body = IndexUpdateResponse { update_id }; let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body) Ok(tide::Response::new(202).body_json(&response_body)?)
.with_status(StatusCode::ACCEPTED)
.into_response())
} }
pub async fn add_or_replace_multiple_documents(ctx: Context<Data>) -> SResult<Response> { pub async fn add_or_replace_multiple_documents(ctx: Request<Data>) -> SResult<Response> {
update_multiple_documents(ctx, false).await update_multiple_documents(ctx, false).await
} }
pub async fn add_or_update_multiple_documents(ctx: Context<Data>) -> SResult<Response> { pub async fn add_or_update_multiple_documents(ctx: Request<Data>) -> SResult<Response> {
update_multiple_documents(ctx, true).await update_multiple_documents(ctx, true).await
} }
pub async fn delete_multiple_documents(mut ctx: Context<Data>) -> SResult<Response> { pub async fn delete_multiple_documents(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(DocumentsWrite)?; ctx.is_allowed(DocumentsWrite)?;
let data: Vec<Value> = ctx.body_json().await.map_err(ResponseError::bad_request)?; let data: Vec<Value> = ctx.body_json().await.map_err(ResponseError::bad_request)?;
let index = ctx.index()?; let index = ctx.index()?;
let db = &ctx.state().db; let db = &ctx.state().db;
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?; let mut writer = db.update_write_txn()?;
let mut documents_deletion = index.documents_deletion(); let mut documents_deletion = index.documents_deletion();
@ -213,33 +195,25 @@ pub async fn delete_multiple_documents(mut ctx: Context<Data>) -> SResult<Respon
} }
} }
let update_id = documents_deletion let update_id = documents_deletion.finalize(&mut writer)?;
.finalize(&mut writer)
.map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?; writer.commit()?;
let response_body = IndexUpdateResponse { update_id }; let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body) Ok(tide::Response::new(202).body_json(&response_body)?)
.with_status(StatusCode::ACCEPTED)
.into_response())
} }
pub async fn clear_all_documents(ctx: Context<Data>) -> SResult<Response> { pub async fn clear_all_documents(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(DocumentsWrite)?; ctx.is_allowed(DocumentsWrite)?;
let index = ctx.index()?; let index = ctx.index()?;
let db = &ctx.state().db; let db = &ctx.state().db;
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?; let mut writer = db.update_write_txn()?;
let update_id = index let update_id = index.clear_all(&mut writer)?;
.clear_all(&mut writer) writer.commit()?;
.map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?;
let response_body = IndexUpdateResponse { update_id }; let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body) Ok(tide::Response::new(202).body_json(&response_body)?)
.with_status(StatusCode::ACCEPTED)
.into_response())
} }

View File

@ -1,17 +1,17 @@
use crate::error::{ResponseError, SResult}; use crate::error::{ResponseError, SResult};
use crate::helpers::tide::ContextExt; use crate::helpers::tide::RequestExt;
use crate::models::token::ACL::*; use crate::models::token::ACL::*;
use crate::Data; use crate::Data;
use heed::types::{Str, Unit}; use heed::types::{Str, Unit};
use serde::Deserialize; use serde::Deserialize;
use tide::Context; use tide::{Request, Response};
const UNHEALTHY_KEY: &str = "_is_unhealthy"; const UNHEALTHY_KEY: &str = "_is_unhealthy";
pub async fn get_health(ctx: Context<Data>) -> SResult<()> { pub async fn get_health(ctx: Request<Data>) -> SResult<Response> {
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let common_store = ctx.state().db.common_store(); let common_store = ctx.state().db.common_store();
@ -19,45 +19,29 @@ pub async fn get_health(ctx: Context<Data>) -> SResult<()> {
return Err(ResponseError::Maintenance); return Err(ResponseError::Maintenance);
} }
Ok(()) Ok(tide::Response::new(200))
} }
pub async fn set_healthy(ctx: Context<Data>) -> SResult<()> { pub async fn set_healthy(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
let db = &ctx.state().db; let db = &ctx.state().db;
let mut writer = db.main_write_txn().map_err(ResponseError::internal)?; let mut writer = db.main_write_txn()?;
let common_store = ctx.state().db.common_store(); let common_store = ctx.state().db.common_store();
match common_store.delete::<_, Str>(&mut writer, UNHEALTHY_KEY) { common_store.delete::<_, Str>(&mut writer, UNHEALTHY_KEY)?;
Ok(_) => (), writer.commit()?;
Err(e) => return Err(ResponseError::internal(e)),
}
if let Err(e) = writer.commit() { Ok(tide::Response::new(200))
return Err(ResponseError::internal(e));
}
Ok(())
} }
pub async fn set_unhealthy(ctx: Context<Data>) -> SResult<()> { pub async fn set_unhealthy(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
let db = &ctx.state().db; let db = &ctx.state().db;
let mut writer = db.main_write_txn().map_err(ResponseError::internal)?; let mut writer = db.main_write_txn()?;
let common_store = ctx.state().db.common_store(); let common_store = ctx.state().db.common_store();
common_store.put::<_, Str, Unit>(&mut writer, UNHEALTHY_KEY, &())?;
writer.commit()?;
if let Err(e) = common_store.put::<_, Str, Unit>(&mut writer, UNHEALTHY_KEY, &()) { Ok(tide::Response::new(200))
return Err(ResponseError::internal(e));
}
if let Err(e) = writer.commit() {
return Err(ResponseError::internal(e));
}
Ok(())
} }
#[derive(Deserialize, Clone)] #[derive(Deserialize, Clone)]
@ -65,7 +49,7 @@ struct HealtBody {
health: bool, health: bool,
} }
pub async fn change_healthyness(mut ctx: Context<Data>) -> SResult<()> { pub async fn change_healthyness(mut ctx: Request<Data>) -> SResult<Response> {
let body: HealtBody = ctx.body_json().await.map_err(ResponseError::bad_request)?; let body: HealtBody = ctx.body_json().await.map_err(ResponseError::bad_request)?;
if body.health { if body.health {

View File

@ -1,20 +1,15 @@
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use http::StatusCode;
use log::error; use log::error;
use meilisearch_core::ProcessedUpdateResult; use meilisearch_core::ProcessedUpdateResult;
use meilisearch_schema::{Schema, SchemaBuilder}; use meilisearch_schema::Schema;
use rand::seq::SliceRandom; use rand::seq::SliceRandom;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use tide::querystring::ContextExt as QSContextExt; use tide::{Request, Response};
use tide::response::IntoResponse;
use tide::{Context, Response};
use crate::error::{ResponseError, SResult}; use crate::error::{IntoInternalError, ResponseError, SResult};
use crate::helpers::tide::ContextExt; use crate::helpers::tide::RequestExt;
use crate::models::schema::SchemaBody;
use crate::models::token::ACL::*; use crate::models::token::ACL::*;
use crate::routes::document::IndexUpdateResponse;
use crate::Data; use crate::Data;
fn generate_uid() -> String { fn generate_uid() -> String {
@ -26,13 +21,13 @@ fn generate_uid() -> String {
.collect() .collect()
} }
pub async fn list_indexes(ctx: Context<Data>) -> SResult<Response> { pub async fn list_indexes(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?; ctx.is_allowed(IndexesRead)?;
let indexes_uids = ctx.state().db.indexes_uids(); let indexes_uids = ctx.state().db.indexes_uids();
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let mut response_body = Vec::new(); let mut response_body = Vec::new();
@ -41,27 +36,21 @@ pub async fn list_indexes(ctx: Context<Data>) -> SResult<Response> {
match index { match index {
Some(index) => { Some(index) => {
let name = index let name = index.main.name(&reader)?.into_internal_error()?;
.main let created_at = index.main.created_at(&reader)?.into_internal_error()?;
.name(&reader) let updated_at = index.main.updated_at(&reader)?.into_internal_error()?;
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'name' not found"))?; let identifier = match index.main.schema(&reader) {
let created_at = index Ok(Some(schema)) => Some(schema.identifier().to_owned()),
.main _ => None
.created_at(&reader) };
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'created_at' date not found"))?;
let updated_at = index
.main
.updated_at(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'updated_at' date not found"))?;
let index_response = IndexResponse { let index_response = IndexResponse {
name, name,
uid: index_uid, uid: index_uid,
created_at, created_at,
updated_at, updated_at,
identifier,
}; };
response_body.push(index_response); response_body.push(index_response);
} }
@ -72,7 +61,7 @@ pub async fn list_indexes(ctx: Context<Data>) -> SResult<Response> {
} }
} }
Ok(tide::response::json(response_body)) Ok(tide::Response::new(200).body_json(&response_body)?)
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
@ -82,49 +71,44 @@ struct IndexResponse {
uid: String, uid: String,
created_at: DateTime<Utc>, created_at: DateTime<Utc>,
updated_at: DateTime<Utc>, updated_at: DateTime<Utc>,
identifier: Option<String>,
} }
pub async fn get_index(ctx: Context<Data>) -> SResult<Response> { pub async fn get_index(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?; ctx.is_allowed(IndexesRead)?;
let index = ctx.index()?; let index = ctx.index()?;
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let uid = ctx.url_param("index")?; let uid = ctx.url_param("index")?;
let name = index let name = index.main.name(&reader)?.into_internal_error()?;
.main let created_at = index.main.created_at(&reader)?.into_internal_error()?;
.name(&reader) let updated_at = index.main.updated_at(&reader)?.into_internal_error()?;
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'name' not found"))?; let identifier = match index.main.schema(&reader) {
let created_at = index Ok(Some(schema)) => Some(schema.identifier().to_owned()),
.main _ => None
.created_at(&reader) };
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'created_at' date not found"))?;
let updated_at = index
.main
.updated_at(&reader)
.map_err(ResponseError::internal)?
.ok_or(ResponseError::internal("'updated_at' date not found"))?;
let response_body = IndexResponse { let response_body = IndexResponse {
name, name,
uid, uid,
created_at, created_at,
updated_at, updated_at,
identifier
}; };
Ok(tide::response::json(response_body)) Ok(tide::Response::new(200).body_json(&response_body)?)
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)] #[serde(rename_all = "camelCase", deny_unknown_fields)]
struct IndexCreateRequest { struct IndexCreateRequest {
name: String, name: Option<String>,
uid: Option<String>, uid: Option<String>,
schema: Option<SchemaBody>, identifier: Option<String>,
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
@ -132,14 +116,12 @@ struct IndexCreateRequest {
struct IndexCreateResponse { struct IndexCreateResponse {
name: String, name: String,
uid: String, uid: String,
schema: Option<SchemaBody>,
#[serde(skip_serializing_if = "Option::is_none")]
update_id: Option<u64>,
created_at: DateTime<Utc>, created_at: DateTime<Utc>,
updated_at: DateTime<Utc>, updated_at: DateTime<Utc>,
identifier: Option<String>,
} }
pub async fn create_index(mut ctx: Context<Data>) -> SResult<Response> { pub async fn create_index(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesWrite)?; ctx.is_allowed(IndexesWrite)?;
let body = ctx let body = ctx
@ -147,6 +129,12 @@ pub async fn create_index(mut ctx: Context<Data>) -> SResult<Response> {
.await .await
.map_err(ResponseError::bad_request)?; .map_err(ResponseError::bad_request)?;
if let (None, None) = (body.name.clone(), body.uid.clone()) {
return Err(ResponseError::bad_request(
"Index creation must have an uid",
));
}
let db = &ctx.state().db; let db = &ctx.state().db;
let uid = match body.uid { let uid = match body.uid {
@ -164,44 +152,42 @@ pub async fn create_index(mut ctx: Context<Data>) -> SResult<Response> {
Err(e) => return Err(ResponseError::create_index(e)), Err(e) => return Err(ResponseError::create_index(e)),
}; };
let mut writer = db.main_write_txn().map_err(ResponseError::internal)?; let mut writer = db.main_write_txn()?;
let mut update_writer = db.update_write_txn().map_err(ResponseError::internal)?; let name = body.name.unwrap_or(uid.clone());
created_index.main.put_name(&mut writer, &name)?;
created_index let created_at = created_index
.main .main
.put_name(&mut writer, &body.name) .created_at(&writer)?
.map_err(ResponseError::internal)?; .into_internal_error()?;
let updated_at = created_index
.main
.updated_at(&writer)?
.into_internal_error()?;
let schema: Option<Schema> = body.schema.clone().map(Into::into); if let Some(id) = body.identifier.clone() {
let mut response_update_id = None; created_index
if let Some(schema) = schema { .main
let update_id = created_index .put_schema(&mut writer, &Schema::with_identifier(&id))?;
.schema_update(&mut update_writer, schema)
.map_err(ResponseError::internal)?;
response_update_id = Some(update_id)
} }
writer.commit().map_err(ResponseError::internal)?; writer.commit()?;
update_writer.commit().map_err(ResponseError::internal)?;
let response_body = IndexCreateResponse { let response_body = IndexCreateResponse {
name: body.name, name,
uid, uid,
schema: body.schema, created_at,
update_id: response_update_id, updated_at,
created_at: Utc::now(), identifier: body.identifier,
updated_at: Utc::now(),
}; };
Ok(tide::response::json(response_body) Ok(tide::Response::new(201).body_json(&response_body)?)
.with_status(StatusCode::CREATED)
.into_response())
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)] #[serde(rename_all = "camelCase", deny_unknown_fields)]
struct UpdateIndexRequest { struct UpdateIndexRequest {
name: String, name: Option<String>,
identifier: Option<String>,
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
@ -211,9 +197,10 @@ struct UpdateIndexResponse {
uid: String, uid: String,
created_at: DateTime<Utc>, created_at: DateTime<Utc>,
updated_at: DateTime<Utc>, updated_at: DateTime<Utc>,
identifier: Option<String>,
} }
pub async fn update_index(mut ctx: Context<Data>) -> SResult<Response> { pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesWrite)?; ctx.is_allowed(IndexesWrite)?;
let body = ctx let body = ctx
@ -225,167 +212,81 @@ pub async fn update_index(mut ctx: Context<Data>) -> SResult<Response> {
let index = ctx.index()?; let index = ctx.index()?;
let db = &ctx.state().db; let db = &ctx.state().db;
let mut writer = db.main_write_txn().map_err(ResponseError::internal)?; let mut writer = db.main_write_txn()?;
index if let Some(name) = body.name {
.main index.main.put_name(&mut writer, &name)?;
.put_name(&mut writer, &body.name) }
.map_err(ResponseError::internal)?;
index if let Some(identifier) = body.identifier {
.main if let Ok(Some(_)) = index.main.schema(&writer) {
.put_updated_at(&mut writer) return Err(ResponseError::bad_request("The index identifier cannot be updated"));
.map_err(ResponseError::internal)?; }
index.main.put_schema(&mut writer, &Schema::with_identifier(&identifier))?;
}
writer.commit().map_err(ResponseError::internal)?; index.main.put_updated_at(&mut writer)?;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; writer.commit()?;
let created_at = index let reader = db.main_read_txn()?;
.main let name = index.main.name(&reader)?.into_internal_error()?;
.created_at(&reader) let created_at = index.main.created_at(&reader)?.into_internal_error()?;
.map_err(ResponseError::internal)? let updated_at = index.main.updated_at(&reader)?.into_internal_error()?;
.ok_or(ResponseError::internal("'created_at' date not found"))?;
let updated_at = index let identifier = match index.main.schema(&reader) {
.main Ok(Some(schema)) => Some(schema.identifier().to_owned()),
.updated_at(&reader) _ => None
.map_err(ResponseError::internal)? };
.ok_or(ResponseError::internal("'updated_at' date not found"))?;
let response_body = UpdateIndexResponse { let response_body = UpdateIndexResponse {
name: body.name, name,
uid: index_uid, uid: index_uid,
created_at, created_at,
updated_at, updated_at,
identifier
}; };
Ok(tide::response::json(response_body) Ok(tide::Response::new(200).body_json(&response_body)?)
.with_status(StatusCode::OK)
.into_response())
} }
#[derive(Default, Deserialize)] pub async fn get_update_status(ctx: Request<Data>) -> SResult<Response> {
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct SchemaParams {
raw: bool,
}
pub async fn get_index_schema(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?;
let index = ctx.index()?;
// Tide doesn't support "no query param"
let params: SchemaParams = ctx.url_query().unwrap_or_default();
let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let schema = index
.main
.schema(&reader)
.map_err(ResponseError::open_index)?;
match schema {
Some(schema) => {
if params.raw {
Ok(tide::response::json(schema))
} else {
Ok(tide::response::json(SchemaBody::from(schema)))
}
}
None => Err(ResponseError::not_found("missing index schema")),
}
}
pub async fn update_schema(mut ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesWrite)?;
let index_uid = ctx.url_param("index")?;
let params: SchemaParams = ctx.url_query().unwrap_or_default();
let schema = if params.raw {
ctx.body_json::<SchemaBuilder>()
.await
.map_err(ResponseError::bad_request)?
.build()
} else {
ctx.body_json::<SchemaBody>()
.await
.map_err(ResponseError::bad_request)?
.into()
};
let db = &ctx.state().db;
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
let index = db
.open_index(&index_uid)
.ok_or(ResponseError::index_not_found(index_uid))?;
let update_id = index
.schema_update(&mut writer, schema.clone())
.map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body)
.with_status(StatusCode::ACCEPTED)
.into_response())
}
pub async fn get_update_status(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?; ctx.is_allowed(IndexesRead)?;
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.update_read_txn().map_err(ResponseError::internal)?; let reader = db.update_read_txn()?;
let update_id = ctx let update_id = ctx
.param::<u64>("update_id") .param::<u64>("update_id")
.map_err(|e| ResponseError::bad_parameter("update_id", e))?; .map_err(|e| ResponseError::bad_parameter("update_id", e))?;
let index = ctx.index()?; let index = ctx.index()?;
let status = index let status = index.update_status(&reader, update_id)?;
.update_status(&reader, update_id)
.map_err(ResponseError::internal)?;
let response = match status { let response = match status {
Some(status) => tide::response::json(status) Some(status) => tide::Response::new(200).body_json(&status).unwrap(),
.with_status(StatusCode::OK) None => tide::Response::new(404)
.into_response(), .body_json(&json!({ "message": "unknown update id" }))
None => tide::response::json(json!({ "message": "unknown update id" })) .unwrap(),
.with_status(StatusCode::NOT_FOUND)
.into_response(),
}; };
Ok(response) Ok(response)
} }
pub async fn get_all_updates_status(ctx: Context<Data>) -> SResult<Response> { pub async fn get_all_updates_status(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?; ctx.is_allowed(IndexesRead)?;
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.update_read_txn().map_err(ResponseError::internal)?; let reader = db.update_read_txn()?;
let index = ctx.index()?; let index = ctx.index()?;
let all_status = index let response = index.all_updates_status(&reader)?;
.all_updates_status(&reader) Ok(tide::Response::new(200).body_json(&response).unwrap())
.map_err(ResponseError::internal)?;
let response = tide::response::json(all_status)
.with_status(StatusCode::OK)
.into_response();
Ok(response)
} }
pub async fn delete_index(ctx: Context<Data>) -> SResult<StatusCode> { pub async fn delete_index(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesWrite)?; ctx.is_allowed(IndexesWrite)?;
let _ = ctx.index()?; let _ = ctx.index()?;
let index_uid = ctx.url_param("index")?; let index_uid = ctx.url_param("index")?;
ctx.state().db.delete_index(&index_uid).map_err(ResponseError::internal)?; ctx.state().db.delete_index(&index_uid)?;
Ok(StatusCode::NO_CONTENT) Ok(tide::Response::new(204))
} }
pub fn index_update_callback(index_uid: &str, data: &Data, status: ProcessedUpdateResult) { pub fn index_update_callback(index_uid: &str, data: &Data, status: ProcessedUpdateResult) {

View File

@ -1,14 +1,12 @@
use chrono::serde::ts_seconds; use chrono::serde::ts_seconds;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use heed::types::{SerdeBincode, Str}; use heed::types::{SerdeBincode, Str};
use http::StatusCode;
use rand::seq::SliceRandom; use rand::seq::SliceRandom;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tide::response::IntoResponse; use tide::{Request, Response};
use tide::{Context, Response};
use crate::error::{ResponseError, SResult}; use crate::error::{ResponseError, SResult};
use crate::helpers::tide::ContextExt; use crate::helpers::tide::RequestExt;
use crate::models::token::ACL::*; use crate::models::token::ACL::*;
use crate::models::token::*; use crate::models::token::*;
use crate::Data; use crate::Data;
@ -22,47 +20,45 @@ fn generate_api_key() -> String {
.collect() .collect()
} }
pub async fn list(ctx: Context<Data>) -> SResult<Response> { pub async fn list(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let common_store = db.common_store(); let common_store = db.common_store();
let mut response: Vec<Token> = Vec::new(); let mut response: Vec<Token> = Vec::new();
let iter = common_store let iter =
.prefix_iter::<_, Str, SerdeBincode<Token>>(&reader, TOKEN_PREFIX_KEY) common_store.prefix_iter::<_, Str, SerdeBincode<Token>>(&reader, TOKEN_PREFIX_KEY)?;
.map_err(ResponseError::internal)?;
for result in iter { for result in iter {
let (_, token) = result.map_err(ResponseError::internal)?; let (_, token) = result?;
response.push(token); response.push(token);
} }
Ok(tide::response::json(response)) Ok(tide::Response::new(200).body_json(&response).unwrap())
} }
pub async fn get(ctx: Context<Data>) -> SResult<Response> { pub async fn get(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
let request_key = ctx.url_param("key")?; let request_key = ctx.url_param("key")?;
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let token_key = format!("{}{}", TOKEN_PREFIX_KEY, request_key); let token_key = format!("{}{}", TOKEN_PREFIX_KEY, request_key);
let token_config = db let token_config = db
.common_store() .common_store()
.get::<_, Str, SerdeBincode<Token>>(&reader, &token_key) .get::<_, Str, SerdeBincode<Token>>(&reader, &token_key)?
.map_err(ResponseError::internal)?
.ok_or(ResponseError::not_found(format!( .ok_or(ResponseError::not_found(format!(
"token key: {}", "token key: {}",
token_key token_key
)))?; )))?;
Ok(tide::response::json(token_config)) Ok(tide::Response::new(200).body_json(&token_config).unwrap())
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
@ -75,7 +71,7 @@ pub struct CreatedRequest {
expires_at: DateTime<Utc>, expires_at: DateTime<Utc>,
} }
pub async fn create(mut ctx: Context<Data>) -> SResult<Response> { pub async fn create(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
let data: CreatedRequest = ctx.body_json().await.map_err(ResponseError::bad_request)?; let data: CreatedRequest = ctx.body_json().await.map_err(ResponseError::bad_request)?;
@ -95,17 +91,18 @@ pub async fn create(mut ctx: Context<Data>) -> SResult<Response> {
}; };
let db = &ctx.state().db; let db = &ctx.state().db;
let mut writer = db.main_write_txn().map_err(ResponseError::internal)?; let mut writer = db.main_write_txn()?;
db.common_store() db.common_store().put::<_, Str, SerdeBincode<Token>>(
.put::<_, Str, SerdeBincode<Token>>(&mut writer, &token_key, &token_definition) &mut writer,
.map_err(ResponseError::internal)?; &token_key,
&token_definition,
)?;
writer.commit().map_err(ResponseError::internal)?; writer.commit()?;
Ok(tide::Response::new(201)
Ok(tide::response::json(token_definition) .body_json(&token_definition)
.with_status(StatusCode::CREATED) .unwrap())
.into_response())
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
@ -118,22 +115,21 @@ pub struct UpdatedRequest {
revoked: Option<bool>, revoked: Option<bool>,
} }
pub async fn update(mut ctx: Context<Data>) -> SResult<Response> { pub async fn update(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
let request_key = ctx.url_param("key")?; let request_key = ctx.url_param("key")?;
let data: UpdatedRequest = ctx.body_json().await.map_err(ResponseError::bad_request)?; let data: UpdatedRequest = ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db; let db = &ctx.state().db;
let mut writer = db.main_write_txn().map_err(ResponseError::internal)?; let mut writer = db.main_write_txn()?;
let common_store = db.common_store(); let common_store = db.common_store();
let token_key = format!("{}{}", TOKEN_PREFIX_KEY, request_key); let token_key = format!("{}{}", TOKEN_PREFIX_KEY, request_key);
let mut token_config = common_store let mut token_config = common_store
.get::<_, Str, SerdeBincode<Token>>(&writer, &token_key) .get::<_, Str, SerdeBincode<Token>>(&writer, &token_key)?
.map_err(ResponseError::internal)?
.ok_or(ResponseError::not_found(format!( .ok_or(ResponseError::not_found(format!(
"token key: {}", "token key: {}",
token_key token_key
@ -143,52 +139,34 @@ pub async fn update(mut ctx: Context<Data>) -> SResult<Response> {
if let Some(description) = data.description { if let Some(description) = data.description {
token_config.description = description; token_config.description = description;
} }
if let Some(acl) = data.acl { if let Some(acl) = data.acl {
token_config.acl = acl; token_config.acl = acl;
} }
if let Some(indexes) = data.indexes { if let Some(indexes) = data.indexes {
token_config.indexes = indexes; token_config.indexes = indexes;
} }
if let Some(expires_at) = data.expires_at { if let Some(expires_at) = data.expires_at {
token_config.expires_at = expires_at; token_config.expires_at = expires_at;
} }
if let Some(revoked) = data.revoked { if let Some(revoked) = data.revoked {
token_config.revoked = revoked; token_config.revoked = revoked;
} }
token_config.updated_at = Utc::now(); token_config.updated_at = Utc::now();
common_store.put::<_, Str, SerdeBincode<Token>>(&mut writer, &token_key, &token_config)?;
writer.commit()?;
common_store Ok(tide::Response::new(200).body_json(&token_config).unwrap())
.put::<_, Str, SerdeBincode<Token>>(&mut writer, &token_key, &token_config)
.map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?;
Ok(tide::response::json(token_config)
.with_status(StatusCode::OK)
.into_response())
} }
pub async fn delete(ctx: Context<Data>) -> SResult<StatusCode> { pub async fn delete(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
let request_key = ctx.url_param("key")?; let request_key = ctx.url_param("key")?;
let db = &ctx.state().db; let db = &ctx.state().db;
let mut writer = db.main_write_txn().map_err(ResponseError::internal)?; let mut writer = db.main_write_txn()?;
let common_store = db.common_store(); let common_store = db.common_store();
let token_key = format!("{}{}", TOKEN_PREFIX_KEY, request_key); let token_key = format!("{}{}", TOKEN_PREFIX_KEY, request_key);
common_store.delete::<_, Str>(&mut writer, &token_key)?;
common_store writer.commit()?;
.delete::<_, Str>(&mut writer, &token_key) Ok(tide::Response::new(204))
.map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?;
Ok(StatusCode::NO_CONTENT)
} }

View File

@ -1,4 +1,7 @@
use crate::data::Data; use crate::data::Data;
use std::future::Future;
use tide::IntoResponse;
use tide::Response;
pub mod document; pub mod document;
pub mod health; pub mod health;
@ -10,114 +13,132 @@ pub mod stats;
pub mod stop_words; pub mod stop_words;
pub mod synonym; pub mod synonym;
pub fn load_routes(app: &mut tide::App<Data>) { async fn into_response<T: IntoResponse, U: IntoResponse>(
app.at("").nest(|router| { x: impl Future<Output = Result<T, U>>,
// expose the web interface static files ) -> Response {
router.at("/").get(|_| async { match x.await {
let content = include_str!("../../public/interface.html").to_owned(); Ok(resp) => resp.into_response(),
tide::http::Response::builder() Err(resp) => resp.into_response(),
.header(tide::http::header::CONTENT_TYPE, "text/html; charset=utf-8") }
.status(tide::http::StatusCode::OK) }
.body(content).unwrap()
}); pub fn load_routes(app: &mut tide::Server<Data>) {
router.at("/bulma.min.css").get(|_| async { app.at("/").get(|_| {
let content = include_str!("../../public/bulma.min.css"); async move {
tide::http::Response::builder() tide::Response::new(200)
.header(tide::http::header::CONTENT_TYPE, "text/css; charset=utf-8") .body_string(include_str!("../../public/interface.html").to_string())
.status(tide::http::StatusCode::OK) .set_mime(mime::TEXT_HTML_UTF_8)
.body(content).unwrap() }
}); });
app.at("/bulma.min.css").get(|_| {
router.at("/indexes").nest(|router| { async {
router tide::Response::new(200)
.at("/") .body_string(include_str!("../../public/bulma.min.css").to_string())
.get(index::list_indexes) .set_mime(mime::TEXT_CSS_UTF_8)
.post(index::create_index); }
});
router.at("/search").post(search::search_multi_index);
app.at("/indexes")
router.at("/:index").nest(|router| { .get(|ctx| into_response(index::list_indexes(ctx)))
router.at("/search").get(search::search_with_url_query); .post(|ctx| into_response(index::create_index(ctx)));
router.at("/updates").nest(|router| { app.at("/indexes/search")
router.at("/").get(index::get_all_updates_status); .post(|ctx| into_response(search::search_multi_index(ctx)));
router.at("/:update_id").get(index::get_update_status); app.at("/indexes/:index")
}); .get(|ctx| into_response(index::get_index(ctx)))
.put(|ctx| into_response(index::update_index(ctx)))
router .delete(|ctx| into_response(index::delete_index(ctx)));
.at("/")
.get(index::get_index) app.at("/indexes/:index/search")
.put(index::update_index) .get(|ctx| into_response(search::search_with_url_query(ctx)));
.delete(index::delete_index);
app.at("/indexes/:index/updates")
router .get(|ctx| into_response(index::get_all_updates_status(ctx)));
.at("/schema")
.get(index::get_index_schema) app.at("/indexes/:index/updates/:update_id")
.put(index::update_schema); .get(|ctx| into_response(index::get_update_status(ctx)));
router.at("/documents").nest(|router| { app.at("/indexes/:index/documents")
router .get(|ctx| into_response(document::get_all_documents(ctx)))
.at("/") .post(|ctx| into_response(document::add_or_replace_multiple_documents(ctx)))
.get(document::get_all_documents) .put(|ctx| into_response(document::add_or_update_multiple_documents(ctx)))
.post(document::add_or_replace_multiple_documents) .delete(|ctx| into_response(document::clear_all_documents(ctx)));
.put(document::add_or_update_multiple_documents)
.delete(document::clear_all_documents); app.at("/indexes/:index/documents/:identifier")
.get(|ctx| into_response(document::get_document(ctx)))
router.at("/:identifier").nest(|router| { .delete(|ctx| into_response(document::delete_document(ctx)));
router
.at("/") app.at("/indexes/:index/documents/:identifier/delete-batch")
.get(document::get_document) .post(|ctx| into_response(document::delete_multiple_documents(ctx)));
.delete(document::delete_document);
}); app.at("/indexes/:index/settings")
.get(|ctx| into_response(setting::get_all(ctx)))
router .post(|ctx| into_response(setting::update_all(ctx)))
.at("/delete") .delete(|ctx| into_response(setting::delete_all(ctx)));
.post(document::delete_multiple_documents);
}); app.at("/indexes/:index/settings/ranking-rules")
.get(|ctx| into_response(setting::get_rules(ctx)))
router.at("/synonyms") .post(|ctx| into_response(setting::update_rules(ctx)))
.get(synonym::get) .delete(|ctx| into_response(setting::delete_rules(ctx)));
.post(synonym::update);
app.at("/indexes/:index/settings/ranking-distinct")
router.at("/stop-words").nest(|router| { .get(|ctx| into_response(setting::get_distinct(ctx)))
router .post(|ctx| into_response(setting::update_distinct(ctx)))
.at("/") .delete(|ctx| into_response(setting::delete_distinct(ctx)));
.get(stop_words::list)
.patch(stop_words::add) app.at("/indexes/:index/settings/identifier")
.post(stop_words::delete); .get(|ctx| into_response(setting::get_identifier(ctx)));
});
app.at("/indexes/:index/settings/searchable-attributes")
router .get(|ctx| into_response(setting::get_searchable(ctx)))
.at("/settings") .post(|ctx| into_response(setting::update_searchable(ctx)))
.get(setting::get) .delete(|ctx| into_response(setting::delete_searchable(ctx)));
.post(setting::update);
}); app.at("/indexes/:index/settings/displayed-attributes")
}); .get(|ctx| into_response(setting::displayed(ctx)))
.post(|ctx| into_response(setting::update_displayed(ctx)))
router.at("/keys").nest(|router| { .delete(|ctx| into_response(setting::delete_displayed(ctx)));
router.at("/").get(key::list).post(key::create);
app.at("/indexes/:index/settings/index-new-field")
router .get(|ctx| into_response(setting::get_index_new_fields(ctx)))
.at("/:key") .post(|ctx| into_response(setting::update_index_new_fields(ctx)));
.get(key::get)
.put(key::update) app.at("/indexes/:index/settings/synonyms")
.delete(key::delete); .get(|ctx| into_response(synonym::get(ctx)))
}); .post(|ctx| into_response(synonym::update(ctx)))
}); .delete(|ctx| into_response(synonym::delete(ctx)));
app.at("").nest(|router| { app.at("/indexes/:index/settings/stop_words")
router .get(|ctx| into_response(stop_words::get(ctx)))
.at("/health") .post(|ctx| into_response(stop_words::update(ctx)))
.get(health::get_health) .delete(|ctx| into_response(stop_words::delete(ctx)));
.put(health::change_healthyness);
app.at("/indexes/:index/stats")
router.at("/stats").get(stats::get_stats); .get(|ctx| into_response(stats::index_stats(ctx)));
router.at("/stats/:index").get(stats::index_stat);
router.at("/version").get(stats::get_version); app.at("/keys/")
router.at("/sys-info").get(stats::get_sys_info); .get(|ctx| into_response(key::list(ctx)))
router .post(|ctx| into_response(key::create(ctx)));
.at("/sys-info/pretty")
.get(stats::get_sys_info_pretty); app.at("/keys/:key")
}); .get(|ctx| into_response(key::get(ctx)))
.put(|ctx| into_response(key::update(ctx)))
.delete(|ctx| into_response(key::delete(ctx)));
app.at("/health")
.get(|ctx| into_response(health::get_health(ctx)))
.put(|ctx| into_response(health::change_healthyness(ctx)));
app.at("/stats")
.get(|ctx| into_response(stats::get_stats(ctx)));
app.at("/version")
.get(|ctx| into_response(stats::get_version(ctx)));
app.at("/sys-info")
.get(|ctx| into_response(stats::get_sys_info(ctx)));
app.at("/sys-info/pretty")
.get(|ctx| into_response(stats::get_sys_info_pretty(ctx)));
} }

View File

@ -5,12 +5,11 @@ use std::time::Duration;
use meilisearch_core::Index; use meilisearch_core::Index;
use rayon::iter::{IntoParallelIterator, ParallelIterator}; use rayon::iter::{IntoParallelIterator, ParallelIterator};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tide::querystring::ContextExt as QSContextExt; use tide::{Request, Response};
use tide::{Context, Response};
use crate::error::{ResponseError, SResult}; use crate::error::{ResponseError, SResult};
use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit}; use crate::helpers::meilisearch::{Error, IndexSearchExt, SearchHit};
use crate::helpers::tide::ContextExt; use crate::helpers::tide::RequestExt;
use crate::Data; use crate::Data;
#[derive(Deserialize)] #[derive(Deserialize)]
@ -20,7 +19,6 @@ struct SearchQuery {
offset: Option<usize>, offset: Option<usize>,
limit: Option<usize>, limit: Option<usize>,
attributes_to_retrieve: Option<String>, attributes_to_retrieve: Option<String>,
attributes_to_search_in: Option<String>,
attributes_to_crop: Option<String>, attributes_to_crop: Option<String>,
crop_length: Option<usize>, crop_length: Option<usize>,
attributes_to_highlight: Option<String>, attributes_to_highlight: Option<String>,
@ -29,21 +27,20 @@ struct SearchQuery {
matches: Option<bool>, matches: Option<bool>,
} }
pub async fn search_with_url_query(ctx: Context<Data>) -> SResult<Response> { pub async fn search_with_url_query(ctx: Request<Data>) -> SResult<Response> {
// ctx.is_allowed(DocumentsRead)?; // ctx.is_allowed(DocumentsRead)?;
let index = ctx.index()?; let index = ctx.index()?;
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let schema = index let schema = index
.main .main
.schema(&reader) .schema(&reader)?
.map_err(ResponseError::internal)?
.ok_or(ResponseError::open_index("No Schema found"))?; .ok_or(ResponseError::open_index("No Schema found"))?;
let query: SearchQuery = ctx let query: SearchQuery = ctx
.url_query() .query()
.map_err(|_| ResponseError::bad_request("invalid query parameter"))?; .map_err(|_| ResponseError::bad_request("invalid query parameter"))?;
let mut search_builder = index.new_search(query.q.clone()); let mut search_builder = index.new_search(query.q.clone());
@ -60,17 +57,14 @@ pub async fn search_with_url_query(ctx: Context<Data>) -> SResult<Response> {
search_builder.add_retrievable_field(attr.to_string()); search_builder.add_retrievable_field(attr.to_string());
} }
} }
if let Some(attributes_to_search_in) = query.attributes_to_search_in {
for attr in attributes_to_search_in.split(',') {
search_builder.add_attribute_to_search_in(attr.to_string());
}
}
if let Some(attributes_to_crop) = query.attributes_to_crop { if let Some(attributes_to_crop) = query.attributes_to_crop {
let crop_length = query.crop_length.unwrap_or(200); let crop_length = query.crop_length.unwrap_or(200);
if attributes_to_crop == "*" { if attributes_to_crop == "*" {
let attributes_to_crop = schema let attributes_to_crop = schema
.displayed_name()
.iter() .iter()
.map(|(attr, ..)| (attr.to_string(), crop_length)) .map(|attr| (attr.to_string(), crop_length))
.collect(); .collect();
search_builder.attributes_to_crop(attributes_to_crop); search_builder.attributes_to_crop(attributes_to_crop);
} else { } else {
@ -84,11 +78,15 @@ pub async fn search_with_url_query(ctx: Context<Data>) -> SResult<Response> {
if let Some(attributes_to_highlight) = query.attributes_to_highlight { if let Some(attributes_to_highlight) = query.attributes_to_highlight {
let attributes_to_highlight = if attributes_to_highlight == "*" { let attributes_to_highlight = if attributes_to_highlight == "*" {
schema.iter().map(|(attr, ..)| attr.to_string()).collect() schema
.displayed_name()
.iter()
.map(|s| s.to_string())
.collect()
} else { } else {
attributes_to_highlight attributes_to_highlight
.split(',') .split(',')
.map(ToString::to_string) .map(|s| s.to_string())
.collect() .collect()
}; };
@ -115,7 +113,7 @@ pub async fn search_with_url_query(ctx: Context<Data>) -> SResult<Response> {
Err(others) => return Err(ResponseError::bad_request(others)), Err(others) => return Err(ResponseError::bad_request(others)),
}; };
Ok(tide::response::json(response)) Ok(tide::Response::new(200).body_json(&response).unwrap())
} }
#[derive(Clone, Deserialize)] #[derive(Clone, Deserialize)]
@ -126,7 +124,7 @@ struct SearchMultiBody {
offset: Option<usize>, offset: Option<usize>,
limit: Option<usize>, limit: Option<usize>,
attributes_to_retrieve: Option<HashSet<String>>, attributes_to_retrieve: Option<HashSet<String>>,
attributes_to_search_in: Option<HashSet<String>>, searchable_attributes: Option<HashSet<String>>,
attributes_to_crop: Option<HashMap<String, usize>>, attributes_to_crop: Option<HashMap<String, usize>>,
attributes_to_highlight: Option<HashSet<String>>, attributes_to_highlight: Option<HashSet<String>>,
filters: Option<String>, filters: Option<String>,
@ -144,7 +142,7 @@ struct SearchMultiBodyResponse {
query: String, query: String,
} }
pub async fn search_multi_index(mut ctx: Context<Data>) -> SResult<Response> { pub async fn search_multi_index(mut ctx: Request<Data>) -> SResult<Response> {
// ctx.is_allowed(DocumentsRead)?; // ctx.is_allowed(DocumentsRead)?;
let body = ctx let body = ctx
.body_json::<SearchMultiBody>() .body_json::<SearchMultiBody>()
@ -189,9 +187,6 @@ pub async fn search_multi_index(mut ctx: Context<Data>) -> SResult<Response> {
if let Some(attributes_to_retrieve) = par_body.attributes_to_retrieve.clone() { if let Some(attributes_to_retrieve) = par_body.attributes_to_retrieve.clone() {
search_builder.attributes_to_retrieve(attributes_to_retrieve); search_builder.attributes_to_retrieve(attributes_to_retrieve);
} }
if let Some(attributes_to_search_in) = par_body.attributes_to_search_in.clone() {
search_builder.attributes_to_search_in(attributes_to_search_in);
}
if let Some(attributes_to_crop) = par_body.attributes_to_crop.clone() { if let Some(attributes_to_crop) = par_body.attributes_to_crop.clone() {
search_builder.attributes_to_crop(attributes_to_crop); search_builder.attributes_to_crop(attributes_to_crop);
} }
@ -210,10 +205,8 @@ pub async fn search_multi_index(mut ctx: Context<Data>) -> SResult<Response> {
} }
} }
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let response = search_builder let response = search_builder.search(&reader)?;
.search(&reader)
.map_err(ResponseError::internal)?;
Ok((index_uid, response)) Ok((index_uid, response))
}) })
.collect(); .collect();
@ -239,5 +232,5 @@ pub async fn search_multi_index(mut ctx: Context<Data>) -> SResult<Response> {
query: body.query, query: body.query,
}; };
Ok(tide::response::json(response)) Ok(tide::Response::new(200).body_json(&response).unwrap())
} }

View File

@ -1,107 +1,421 @@
use std::collections::HashMap; use meilisearch_core::settings::{Settings, SettingsUpdate, UpdateState};
use serde::Deserialize;
use http::StatusCode; use std::collections::{BTreeMap, BTreeSet, HashSet};
use serde::{Deserialize, Serialize, Deserializer}; use tide::{Request, Response};
use tide::response::IntoResponse;
use tide::{Context, Response};
use crate::error::{ResponseError, SResult}; use crate::error::{ResponseError, SResult};
use crate::helpers::tide::ContextExt; use crate::helpers::tide::RequestExt;
use crate::models::token::ACL::*; use crate::models::token::ACL::*;
use crate::routes::document::IndexUpdateResponse; use crate::routes::document::IndexUpdateResponse;
use crate::Data; use crate::Data;
#[derive(Default, Serialize, Deserialize)] pub async fn get_all(ctx: Request<Data>) -> SResult<Response> {
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct Setting {
pub ranking_order: Option<RankingOrder>,
pub distinct_field: Option<DistinctField>,
pub ranking_rules: Option<RankingRules>,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum RankingOrdering {
Asc,
Dsc,
}
pub type RankingOrder = Vec<String>;
pub type DistinctField = String;
pub type RankingRules = HashMap<String, RankingOrdering>;
pub async fn get(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsRead)?; ctx.is_allowed(SettingsRead)?;
let index = ctx.index()?; let index = ctx.index()?;
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let settings = match index.main.customs(&reader).unwrap() { let stop_words_fst = index.main.stop_words_fst(&reader)?;
Some(bytes) => bincode::deserialize(bytes).unwrap(), let stop_words = stop_words_fst.unwrap_or_default().stream().into_strs()?;
None => Setting::default(), let stop_words: BTreeSet<String> = stop_words.into_iter().collect();
let stop_words = if !stop_words.is_empty() {
Some(stop_words)
} else {
None
}; };
Ok(tide::response::json(settings)) let synonyms_fst = index.main.synonyms_fst(&reader)?.unwrap_or_default();
let synonyms_list = synonyms_fst.stream().into_strs()?;
let mut synonyms = BTreeMap::new();
let index_synonyms = &index.synonyms;
for synonym in synonyms_list {
let alternative_list = index_synonyms.synonyms(&reader, synonym.as_bytes())?;
if let Some(list) = alternative_list {
let list = list.stream().into_strs()?;
synonyms.insert(synonym, list);
}
}
let synonyms = if !synonyms.is_empty() {
Some(synonyms)
} else {
None
};
let ranking_rules = match index.main.ranking_rules(&reader)? {
Some(rules) => Some(rules.iter().map(|r| r.to_string()).collect()),
None => None,
};
let ranking_distinct = index.main.ranking_distinct(&reader)?;
let schema = index.main.schema(&reader)?;
let searchable_attributes = schema.clone().map(|s| {
let attrs = s.indexed_name()
.iter()
.map(|s| (*s).to_string())
.collect::<Vec<String>>();
if attrs.is_empty() {
None
} else {
Some(attrs)
}
});
let displayed_attributes = schema.clone().map(|s| {
let attrs = s.displayed_name()
.iter()
.map(|s| (*s).to_string())
.collect::<HashSet<String>>();
if attrs.is_empty() {
None
} else {
Some(attrs)
}
});
let index_new_fields = schema.map(|s| s.index_new_fields());
let settings = Settings {
ranking_rules: Some(ranking_rules),
ranking_distinct: Some(ranking_distinct),
searchable_attributes,
displayed_attributes,
stop_words: Some(stop_words),
synonyms: Some(synonyms),
index_new_fields: Some(index_new_fields),
};
Ok(tide::Response::new(200).body_json(&settings).unwrap())
} }
#[derive(Deserialize)] #[derive(Default, Clone, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)] #[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct SettingBody { pub struct UpdateSettings {
#[serde(default, deserialize_with = "deserialize_some")] pub ranking_rules: Option<Vec<String>>,
pub ranking_order: Option<Option<RankingOrder>>, pub ranking_distinct: Option<String>,
#[serde(default, deserialize_with = "deserialize_some")] pub identifier: Option<String>,
pub distinct_field: Option<Option<DistinctField>>, pub searchable_attributes: Option<Vec<String>>,
#[serde(default, deserialize_with = "deserialize_some")] pub displayed_attributes: Option<HashSet<String>>,
pub ranking_rules: Option<Option<RankingRules>>, pub stop_words: Option<BTreeSet<String>>,
pub synonyms: Option<BTreeMap<String, Vec<String>>>,
pub index_new_fields: Option<bool>,
} }
// Any value that is present is considered Some value, including null. pub async fn update_all(mut ctx: Request<Data>) -> SResult<Response> {
fn deserialize_some<'de, T, D>(deserializer: D) -> Result<Option<T>, D::Error>
where T: Deserialize<'de>,
D: Deserializer<'de>
{
Deserialize::deserialize(deserializer).map(Some)
}
pub async fn update(mut ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsWrite)?; ctx.is_allowed(SettingsWrite)?;
let settings: SettingBody = ctx.body_json().await.map_err(ResponseError::bad_request)?;
let index = ctx.index()?; let index = ctx.index()?;
let settings_update: UpdateSettings =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
let mut current_settings = match index.main.customs(&reader).unwrap() { let settings = Settings {
Some(bytes) => bincode::deserialize(bytes).unwrap(), ranking_rules: Some(settings_update.ranking_rules),
None => Setting::default(), ranking_distinct: Some(settings_update.ranking_distinct),
searchable_attributes: Some(settings_update.searchable_attributes),
displayed_attributes: Some(settings_update.displayed_attributes),
stop_words: Some(settings_update.stop_words),
synonyms: Some(settings_update.synonyms),
index_new_fields: Some(settings_update.index_new_fields),
}; };
if let Some(ranking_order) = settings.ranking_order { let mut writer = db.update_write_txn()?;
current_settings.ranking_order = ranking_order; let update_id = index.settings_update(&mut writer, settings.into_update()?)?;
} writer.commit()?;
if let Some(distinct_field) = settings.distinct_field {
current_settings.distinct_field = distinct_field;
}
if let Some(ranking_rules) = settings.ranking_rules {
current_settings.ranking_rules = ranking_rules;
}
let bytes = bincode::serialize(&current_settings).unwrap();
let update_id = index
.customs_update(&mut writer, bytes)
.map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?;
let response_body = IndexUpdateResponse { update_id }; let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body) Ok(tide::Response::new(202).body_json(&response_body)?)
.with_status(StatusCode::ACCEPTED) }
.into_response())
pub async fn delete_all(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsWrite)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let mut writer = db.update_write_txn()?;
let settings = SettingsUpdate {
ranking_rules: UpdateState::Clear,
ranking_distinct: UpdateState::Clear,
identifier: UpdateState::Clear,
searchable_attributes: UpdateState::Clear,
displayed_attributes: UpdateState::Clear,
stop_words: UpdateState::Clear,
synonyms: UpdateState::Clear,
index_new_fields: UpdateState::Clear,
};
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
}
pub async fn get_rules(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsRead)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let ranking_rules: Option<Vec<String>> = match index.main.ranking_rules(&reader)? {
Some(rules) => Some(rules.iter().map(|r| r.to_string()).collect()),
None => None,
};
Ok(tide::Response::new(200).body_json(&ranking_rules).unwrap())
}
pub async fn update_rules(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsWrite)?;
let index = ctx.index()?;
let ranking_rules: Option<Vec<String>> =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
let settings = Settings {
ranking_rules: Some(ranking_rules),
..Settings::default()
};
let mut writer = db.update_write_txn()?;
let update_id = index.settings_update(&mut writer, settings.into_update()?)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
}
pub async fn delete_rules(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsWrite)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let mut writer = db.update_write_txn()?;
let settings = SettingsUpdate {
ranking_rules: UpdateState::Clear,
..SettingsUpdate::default()
};
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
}
pub async fn get_distinct(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsRead)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let ranking_distinct = index.main.ranking_distinct(&reader)?;
Ok(tide::Response::new(200)
.body_json(&ranking_distinct)
.unwrap())
}
pub async fn update_distinct(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsWrite)?;
let index = ctx.index()?;
let ranking_distinct: Option<String> =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
let settings = Settings {
ranking_distinct: Some(ranking_distinct),
..Settings::default()
};
let mut writer = db.update_write_txn()?;
let update_id = index.settings_update(&mut writer, settings.into_update()?)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
}
pub async fn delete_distinct(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsWrite)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let mut writer = db.update_write_txn()?;
let settings = SettingsUpdate {
ranking_distinct: UpdateState::Clear,
..SettingsUpdate::default()
};
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
}
pub async fn get_identifier(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsRead)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let schema = index.main.schema(&reader)?;
let identifier = schema.map(|s| s.identifier().to_string());
Ok(tide::Response::new(200).body_json(&identifier).unwrap())
}
pub async fn get_searchable(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsRead)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let schema = index.main.schema(&reader)?;
let searchable_attributes: Option<HashSet<String>> =
schema.map(|s| s.indexed_name().iter().map(|i| (*i).to_string()).collect());
Ok(tide::Response::new(200)
.body_json(&searchable_attributes)
.unwrap())
}
pub async fn update_searchable(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsWrite)?;
let index = ctx.index()?;
let searchable_attributes: Option<Vec<String>> =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
let settings = Settings {
searchable_attributes: Some(searchable_attributes),
..Settings::default()
};
let mut writer = db.update_write_txn()?;
let update_id = index.settings_update(&mut writer, settings.into_update()?)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
}
pub async fn delete_searchable(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsWrite)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let settings = SettingsUpdate {
searchable_attributes: UpdateState::Clear,
..SettingsUpdate::default()
};
let mut writer = db.update_write_txn()?;
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
}
pub async fn displayed(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsRead)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let schema = index.main.schema(&reader)?;
let displayed_attributes: Option<HashSet<String>> = schema.map(|s| {
s.displayed_name()
.iter()
.map(|i| (*i).to_string())
.collect()
});
Ok(tide::Response::new(200)
.body_json(&displayed_attributes)
.unwrap())
}
pub async fn update_displayed(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsWrite)?;
let index = ctx.index()?;
let displayed_attributes: Option<HashSet<String>> =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
let settings = Settings {
displayed_attributes: Some(displayed_attributes),
..Settings::default()
};
let mut writer = db.update_write_txn()?;
let update_id = index.settings_update(&mut writer, settings.into_update()?)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
}
pub async fn delete_displayed(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsWrite)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let settings = SettingsUpdate {
displayed_attributes: UpdateState::Clear,
..SettingsUpdate::default()
};
let mut writer = db.update_write_txn()?;
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
}
pub async fn get_index_new_fields(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsRead)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let reader = db.main_read_txn()?;
let schema = index.main.schema(&reader)?;
let index_new_fields = schema.map(|s| s.index_new_fields());
Ok(tide::Response::new(200)
.body_json(&index_new_fields)
.unwrap())
}
pub async fn update_index_new_fields(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsWrite)?;
let index = ctx.index()?;
let index_new_fields: Option<bool> =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db;
let settings = Settings {
index_new_fields: Some(index_new_fields),
..Settings::default()
};
let mut writer = db.update_write_txn()?;
let update_id = index.settings_update(&mut writer, settings.into_update()?)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
} }

View File

@ -5,11 +5,11 @@ use log::error;
use pretty_bytes::converter::convert; use pretty_bytes::converter::convert;
use serde::Serialize; use serde::Serialize;
use sysinfo::{NetworkExt, Pid, ProcessExt, ProcessorExt, System, SystemExt}; use sysinfo::{NetworkExt, Pid, ProcessExt, ProcessorExt, System, SystemExt};
use tide::{Context, Response}; use tide::{Request, Response};
use walkdir::WalkDir; use walkdir::WalkDir;
use crate::error::{ResponseError, SResult}; use crate::error::{IntoInternalError, SResult};
use crate::helpers::tide::ContextExt; use crate::helpers::tide::RequestExt;
use crate::models::token::ACL::*; use crate::models::token::ACL::*;
use crate::Data; use crate::Data;
@ -21,38 +21,26 @@ struct IndexStatsResponse {
fields_frequency: HashMap<String, usize>, fields_frequency: HashMap<String, usize>,
} }
pub async fn index_stat(ctx: Context<Data>) -> SResult<Response> { pub async fn index_stats(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
let index_uid = ctx.url_param("index")?; let index_uid = ctx.url_param("index")?;
let index = ctx.index()?; let index = ctx.index()?;
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let update_reader = db.update_read_txn().map_err(ResponseError::internal)?; let update_reader = db.update_read_txn()?;
let number_of_documents = index.main.number_of_documents(&reader)?;
let number_of_documents = index let fields_frequency = index.main.fields_frequency(&reader)?.unwrap_or_default();
.main
.number_of_documents(&reader)
.map_err(ResponseError::internal)?;
let fields_frequency = index
.main
.fields_frequency(&reader)
.map_err(ResponseError::internal)?
.unwrap_or_default();
let is_indexing = ctx let is_indexing = ctx
.state() .state()
.is_indexing(&update_reader, &index_uid) .is_indexing(&update_reader, &index_uid)?
.map_err(ResponseError::internal)? .into_internal_error()?;
.ok_or(ResponseError::internal("'is_indexing' date not found"))?;
let response = IndexStatsResponse { let response = IndexStatsResponse {
number_of_documents, number_of_documents,
is_indexing, is_indexing,
fields_frequency, fields_frequency,
}; };
Ok(tide::response::json(response)) Ok(tide::Response::new(200).body_json(&response).unwrap())
} }
#[derive(Serialize)] #[derive(Serialize)]
@ -63,14 +51,14 @@ struct StatsResult {
indexes: HashMap<String, IndexStatsResponse>, indexes: HashMap<String, IndexStatsResponse>,
} }
pub async fn get_stats(ctx: Context<Data>) -> SResult<Response> { pub async fn get_stats(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
let mut index_list = HashMap::new(); let mut index_list = HashMap::new();
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let update_reader = db.update_read_txn().map_err(ResponseError::internal)?; let update_reader = db.update_read_txn()?;
let indexes_set = ctx.state().db.indexes_uids(); let indexes_set = ctx.state().db.indexes_uids();
for index_uid in indexes_set { for index_uid in indexes_set {
@ -78,22 +66,14 @@ pub async fn get_stats(ctx: Context<Data>) -> SResult<Response> {
match index { match index {
Some(index) => { Some(index) => {
let number_of_documents = index let number_of_documents = index.main.number_of_documents(&reader)?;
.main
.number_of_documents(&reader)
.map_err(ResponseError::internal)?;
let fields_frequency = index let fields_frequency = index.main.fields_frequency(&reader)?.unwrap_or_default();
.main
.fields_frequency(&reader)
.map_err(ResponseError::internal)?
.unwrap_or_default();
let is_indexing = ctx let is_indexing = ctx
.state() .state()
.is_indexing(&update_reader, &index_uid) .is_indexing(&update_reader, &index_uid)?
.map_err(ResponseError::internal)? .into_internal_error()?;
.ok_or(ResponseError::internal("'is_indexing' date not found"))?;
let response = IndexStatsResponse { let response = IndexStatsResponse {
number_of_documents, number_of_documents,
@ -116,10 +96,7 @@ pub async fn get_stats(ctx: Context<Data>) -> SResult<Response> {
.filter(|metadata| metadata.is_file()) .filter(|metadata| metadata.is_file())
.fold(0, |acc, m| acc + m.len()); .fold(0, |acc, m| acc + m.len());
let last_update = ctx let last_update = ctx.state().last_update(&reader)?;
.state()
.last_update(&reader)
.map_err(ResponseError::internal)?;
let response = StatsResult { let response = StatsResult {
database_size, database_size,
@ -127,7 +104,7 @@ pub async fn get_stats(ctx: Context<Data>) -> SResult<Response> {
indexes: index_list, indexes: index_list,
}; };
Ok(tide::response::json(response)) Ok(tide::Response::new(200).body_json(&response).unwrap())
} }
#[derive(Serialize)] #[derive(Serialize)]
@ -138,7 +115,7 @@ struct VersionResponse {
pkg_version: String, pkg_version: String,
} }
pub async fn get_version(ctx: Context<Data>) -> SResult<Response> { pub async fn get_version(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
let response = VersionResponse { let response = VersionResponse {
commit_sha: env!("VERGEN_SHA").to_string(), commit_sha: env!("VERGEN_SHA").to_string(),
@ -146,7 +123,7 @@ pub async fn get_version(ctx: Context<Data>) -> SResult<Response> {
pkg_version: env!("CARGO_PKG_VERSION").to_string(), pkg_version: env!("CARGO_PKG_VERSION").to_string(),
}; };
Ok(tide::response::json(response)) Ok(tide::Response::new(200).body_json(&response).unwrap())
} }
#[derive(Serialize)] #[derive(Serialize)]
@ -236,9 +213,10 @@ pub(crate) fn report(pid: Pid) -> SysInfo {
info info
} }
pub async fn get_sys_info(ctx: Context<Data>) -> SResult<Response> { pub async fn get_sys_info(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
Ok(tide::response::json(report(ctx.state().server_pid))) let response = report(ctx.state().server_pid);
Ok(tide::Response::new(200).body_json(&response).unwrap())
} }
#[derive(Serialize)] #[derive(Serialize)]
@ -332,7 +310,8 @@ pub(crate) fn report_pretty(pid: Pid) -> SysInfoPretty {
info info
} }
pub async fn get_sys_info_pretty(ctx: Context<Data>) -> SResult<Response> { pub async fn get_sys_info_pretty(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(Admin)?; ctx.is_allowed(Admin)?;
Ok(tide::response::json(report_pretty(ctx.state().server_pid))) let response = report_pretty(ctx.state().server_pid);
Ok(tide::Response::new(200).body_json(&response).unwrap())
} }

View File

@ -1,82 +1,63 @@
use http::StatusCode; use std::collections::BTreeSet;
use tide::response::IntoResponse;
use tide::{Context, Response}; use meilisearch_core::settings::{SettingsUpdate, UpdateState};
use tide::{Request, Response};
use crate::error::{ResponseError, SResult}; use crate::error::{ResponseError, SResult};
use crate::helpers::tide::ContextExt; use crate::helpers::tide::RequestExt;
use crate::models::token::ACL::*; use crate::models::token::ACL::*;
use crate::routes::document::IndexUpdateResponse; use crate::routes::document::IndexUpdateResponse;
use crate::Data; use crate::Data;
pub async fn list(ctx: Context<Data>) -> SResult<Response> { pub async fn get(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsRead)?; ctx.is_allowed(SettingsRead)?;
let index = ctx.index()?; let index = ctx.index()?;
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let stop_words_fst = index.main.stop_words_fst(&reader)?;
let stop_words = stop_words_fst.unwrap_or_default().stream().into_strs()?;
let stop_words_fst = index Ok(tide::Response::new(200).body_json(&stop_words).unwrap())
.main
.stop_words_fst(&reader)
.map_err(ResponseError::internal)?;
let stop_words = stop_words_fst
.unwrap_or_default()
.stream()
.into_strs()
.map_err(ResponseError::internal)?;
Ok(tide::response::json(stop_words))
} }
pub async fn add(mut ctx: Context<Data>) -> SResult<Response> { pub async fn update(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsRead)?; ctx.is_allowed(SettingsRead)?;
let index = ctx.index()?; let index = ctx.index()?;
let data: Vec<String> = ctx.body_json().await.map_err(ResponseError::bad_request)?; let data: BTreeSet<String> = ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db; let db = &ctx.state().db;
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?; let mut writer = db.update_write_txn()?;
let mut stop_words_addition = index.stop_words_addition(); let settings = SettingsUpdate {
for stop_word in data { stop_words: UpdateState::Update(data),
stop_words_addition.add_stop_word(stop_word); ..SettingsUpdate::default()
} };
let update_id = stop_words_addition let update_id = index.settings_update(&mut writer, settings)?;
.finalize(&mut writer)
.map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?; writer.commit()?;
let response_body = IndexUpdateResponse { update_id }; let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body) Ok(tide::Response::new(202).body_json(&response_body)?)
.with_status(StatusCode::ACCEPTED)
.into_response())
} }
pub async fn delete(mut ctx: Context<Data>) -> SResult<Response> { pub async fn delete(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsRead)?; ctx.is_allowed(SettingsRead)?;
let index = ctx.index()?; let index = ctx.index()?;
let data: Vec<String> = ctx.body_json().await.map_err(ResponseError::bad_request)?;
let db = &ctx.state().db; let db = &ctx.state().db;
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?; let mut writer = db.update_write_txn()?;
let mut stop_words_deletion = index.stop_words_deletion(); let settings = SettingsUpdate {
for stop_word in data { stop_words: UpdateState::Clear,
stop_words_deletion.delete_stop_word(stop_word); ..SettingsUpdate::default()
} };
let update_id = stop_words_deletion let update_id = index.settings_update(&mut writer, settings)?;
.finalize(&mut writer)
.map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?; writer.commit()?;
let response_body = IndexUpdateResponse { update_id }; let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body) Ok(tide::Response::new(202).body_json(&response_body)?)
.with_status(StatusCode::ACCEPTED)
.into_response())
} }

View File

@ -1,73 +1,82 @@
use std::collections::HashMap; use std::collections::BTreeMap;
use http::StatusCode;
use tide::response::IntoResponse;
use tide::{Context, Response};
use indexmap::IndexMap; use indexmap::IndexMap;
use meilisearch_core::settings::{SettingsUpdate, UpdateState};
use tide::{Request, Response};
use crate::error::{ResponseError, SResult}; use crate::error::{ResponseError, SResult};
use crate::helpers::tide::ContextExt; use crate::helpers::tide::RequestExt;
use crate::models::token::ACL::*; use crate::models::token::ACL::*;
use crate::routes::document::IndexUpdateResponse; use crate::routes::document::IndexUpdateResponse;
use crate::Data; use crate::Data;
pub async fn get(ctx: Context<Data>) -> SResult<Response> { pub async fn get(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsRead)?; ctx.is_allowed(SettingsRead)?;
let index = ctx.index()?; let index = ctx.index()?;
let db = &ctx.state().db; let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?; let reader = db.main_read_txn()?;
let synonyms_fst = index let synonyms_fst = index.main.synonyms_fst(&reader)?.unwrap_or_default();
.main let synonyms_list = synonyms_fst.stream().into_strs()?;
.synonyms_fst(&reader)
.map_err(ResponseError::internal)?;
let synonyms_fst = synonyms_fst.unwrap_or_default(); let mut synonyms = IndexMap::new();
let synonyms_list = synonyms_fst.stream().into_strs().map_err(ResponseError::internal)?;
let mut response = IndexMap::new();
let index_synonyms = &index.synonyms; let index_synonyms = &index.synonyms;
for synonym in synonyms_list { for synonym in synonyms_list {
let alternative_list = index_synonyms let alternative_list = index_synonyms.synonyms(&reader, synonym.as_bytes())?;
.synonyms(&reader, synonym.as_bytes())
.map_err(ResponseError::internal)?;
if let Some(list) = alternative_list { if let Some(list) = alternative_list {
let list = list.stream().into_strs().map_err(ResponseError::internal)?; let list = list.stream().into_strs()?;
response.insert(synonym, list); synonyms.insert(synonym, list);
} }
} }
Ok(tide::response::json(response)) Ok(tide::Response::new(200).body_json(&synonyms).unwrap())
} }
pub async fn update(mut ctx: Context<Data>) -> SResult<Response> { pub async fn update(mut ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsWrite)?; ctx.is_allowed(SettingsWrite)?;
let data: HashMap<String, Vec<String>> = ctx.body_json().await.map_err(ResponseError::bad_request)?; let data: BTreeMap<String, Vec<String>> =
ctx.body_json().await.map_err(ResponseError::bad_request)?;
let index = ctx.index()?; let index = ctx.index()?;
let db = &ctx.state().db; let db = &ctx.state().db;
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?; let mut writer = db.update_write_txn()?;
let mut synonyms_update = index.synonyms_update(); let settings = SettingsUpdate {
synonyms: UpdateState::Update(data),
..SettingsUpdate::default()
};
for (input, synonyms) in data { let update_id = index.settings_update(&mut writer, settings)?;
synonyms_update.add_synonym(input, synonyms.into_iter());
}
let update_id = synonyms_update writer.commit()?;
.finalize(&mut writer)
.map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?;
let response_body = IndexUpdateResponse { update_id }; let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body) Ok(tide::Response::new(202).body_json(&response_body)?)
.with_status(StatusCode::ACCEPTED) }
.into_response())
pub async fn delete(ctx: Request<Data>) -> SResult<Response> {
ctx.is_allowed(SettingsWrite)?;
let index = ctx.index()?;
let db = &ctx.state().db;
let mut writer = db.update_write_txn()?;
let settings = SettingsUpdate {
synonyms: UpdateState::Clear,
..SettingsUpdate::default()
};
let update_id = index.settings_update(&mut writer, settings)?;
writer.commit()?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::Response::new(202).body_json(&response_body)?)
} }

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,185 @@
#![allow(dead_code)]
use serde_json::Value;
use std::error::Error;
use std::time::Duration;
use assert_json_diff::assert_json_eq;
use async_std::io::prelude::*;
use async_std::task::{block_on, sleep};
use http_service::Body;
use http_service_mock::{make_server, TestBackend};
use meilisearch_http::data::Data;
use meilisearch_http::option::Opt;
use meilisearch_http::routes;
use serde_json::json;
use tempdir::TempDir;
use tide::server::Service;
pub fn setup_server() -> Result<TestBackend<Service<Data>>, Box<dyn Error>> {
let tmp_dir = TempDir::new("meilisearch")?;
let opt = Opt {
db_path: tmp_dir.path().to_str().unwrap().to_string(),
http_addr: "127.0.0.1:7700".to_owned(),
api_key: None,
no_analytics: true,
};
let data = Data::new(opt.clone());
let mut app = tide::with_state(data);
routes::load_routes(&mut app);
let http_server = app.into_http_service();
Ok(make_server(http_server)?)
}
pub fn enrich_server_with_movies_index(
server: &mut TestBackend<Service<Data>>,
) -> Result<(), Box<dyn Error>> {
let body = json!({
"uid": "movies",
"identifier": "id",
})
.to_string()
.into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let _res = server.simulate(req).unwrap();
Ok(())
}
pub fn enrich_server_with_movies_settings(
server: &mut TestBackend<Service<Data>>,
) -> Result<(), Box<dyn Error>> {
let json = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"dsc(popularity)",
"_exact",
"dsc(vote_average)",
],
"rankingDistinct": null,
"searchableAttributes": [
"title",
"tagline",
"overview",
"cast",
"director",
"producer",
"production_companies",
"genres",
],
"displayedAttributes": [
"title",
"director",
"producer",
"tagline",
"genres",
"id",
"overview",
"vote_count",
"vote_average",
"poster_path",
"popularity",
],
"stopWords": null,
"synonyms": null,
"indexNewFields": false,
});
let body = json.to_string().into_bytes();
let req = http::Request::post("/indexes/movies/settings")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let response: Value = serde_json::from_slice(&buf).unwrap();
assert!(response["updateId"].as_u64().is_some());
wait_update_id(server, response["updateId"].as_u64().unwrap());
Ok(())
}
pub fn enrich_server_with_movies_documents(
server: &mut TestBackend<Service<Data>>,
) -> Result<(), Box<dyn Error>> {
let body = include_bytes!("assets/movies.json").to_vec();
let req = http::Request::post("/indexes/movies/documents")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let response: Value = serde_json::from_slice(&buf).unwrap();
assert!(response["updateId"].as_u64().is_some());
wait_update_id(server, response["updateId"].as_u64().unwrap());
Ok(())
}
pub fn search(server: &mut TestBackend<Service<Data>>, query: &str, expect: Value) {
let req = http::Request::get(format!("/indexes/movies/search?{}", query))
.body(Body::empty())
.unwrap();
let res = server.simulate(req).unwrap();
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let response: Value = serde_json::from_slice(&buf).unwrap();
assert_json_eq!(expect, response["hits"].clone(), ordered: false)
}
pub fn update_config(server: &mut TestBackend<Service<Data>>, config: Value) {
let body = config.to_string().into_bytes();
let req = http::Request::post("/indexes/movies/settings")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 202);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let response: Value = serde_json::from_slice(&buf).unwrap();
assert!(response["updateId"].as_u64().is_some());
wait_update_id(server, response["updateId"].as_u64().unwrap());
}
pub fn wait_update_id(server: &mut TestBackend<Service<Data>>, update_id: u64) {
loop {
let req = http::Request::get(format!("/indexes/movies/updates/{}", update_id))
.body(Body::empty())
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let response: Value = serde_json::from_slice(&buf).unwrap();
if response["status"] == "processed" {
return
}
block_on(sleep(Duration::from_secs(1)));
}
}

View File

@ -0,0 +1,56 @@
use http_service::Body;
use serde_json::json;
use std::convert::Into;
mod common;
#[test]
fn test_healthyness() {
let mut server = common::setup_server().unwrap();
// Check that the server is healthy
let req = http::Request::get("/health").body(Body::empty()).unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
// Set the serve Unhealthy
let body = json!({
"health": false,
})
.to_string()
.into_bytes();
let req = http::Request::put("/health")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
// Check that the server is unhealthy
let req = http::Request::get("/health").body(Body::empty()).unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 503);
// Set the server healthy
let body = json!({
"health": true,
})
.to_string()
.into_bytes();
let req = http::Request::put("/health")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
// Check if the server is healthy
let req = http::Request::get("/health").body(Body::empty()).unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
}

View File

@ -0,0 +1,649 @@
use async_std::io::prelude::*;
use async_std::task::block_on;
use http_service::Body;
use serde_json::json;
use serde_json::Value;
mod common;
#[test]
fn create_index_with_name() {
let mut server = common::setup_server().unwrap();
// 1 - Create a new index
// Index with only a name "movies"
// POST: /indexes
let body = json!({
"name": "movies",
})
.to_string()
.into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 201);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res1_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res1_value.as_object().unwrap().len(), 5);
let r1_name = res1_value["name"].as_str().unwrap();
let r1_uid = res1_value["uid"].as_str().unwrap();
let r1_created_at = res1_value["createdAt"].as_str().unwrap();
let r1_updated_at = res1_value["updatedAt"].as_str().unwrap();
assert_eq!(r1_name, "movies");
assert_eq!(r1_uid.len(), 8);
assert!(r1_created_at.len() > 1);
assert!(r1_updated_at.len() > 1);
// 2 - Check the list of indexes
// Must have 1 index with the exact same content that the request 1
// GET: /indexes
let req = http::Request::get("/indexes").body(Body::empty()).unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res2_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res2_value.as_array().unwrap().len(), 1);
assert_eq!(res2_value[0].as_object().unwrap().len(), 5);
let r2_name = res2_value[0]["name"].as_str().unwrap();
let r2_uid = res2_value[0]["uid"].as_str().unwrap();
let r2_created_at = res2_value[0]["createdAt"].as_str().unwrap();
let r2_updated_at = res2_value[0]["updatedAt"].as_str().unwrap();
assert_eq!(r2_name, r1_name);
assert_eq!(r2_uid.len(), r1_uid.len());
assert_eq!(r2_created_at.len(), r1_created_at.len());
assert_eq!(r2_updated_at.len(), r1_updated_at.len());
}
#[test]
fn create_index_with_uid() {
let mut server = common::setup_server().unwrap();
// 1 - Create a new index
// Index with only an uid "movies"
// POST: /indexes
let body = json!({
"uid": "movies",
})
.to_string()
.into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 201);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res1_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res1_value.as_object().unwrap().len(), 5);
let r1_name = res1_value["name"].as_str().unwrap();
let r1_uid = res1_value["uid"].as_str().unwrap();
let r1_created_at = res1_value["createdAt"].as_str().unwrap();
let r1_updated_at = res1_value["updatedAt"].as_str().unwrap();
assert_eq!(r1_name, "movies");
assert_eq!(r1_uid, "movies");
assert!(r1_created_at.len() > 1);
assert!(r1_updated_at.len() > 1);
// 2 - Check the list of indexes
// Must have 1 index with the exact same content that the request 1
// GET: /indexes
let req = http::Request::get("/indexes").body(Body::empty()).unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res2_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res2_value.as_array().unwrap().len(), 1);
assert_eq!(res2_value[0].as_object().unwrap().len(), 5);
let r2_name = res2_value[0]["name"].as_str().unwrap();
let r2_uid = res2_value[0]["uid"].as_str().unwrap();
let r2_created_at = res2_value[0]["createdAt"].as_str().unwrap();
let r2_updated_at = res2_value[0]["updatedAt"].as_str().unwrap();
assert_eq!(r2_name, r1_name);
assert_eq!(r2_uid, r1_uid);
assert_eq!(r2_created_at.len(), r1_created_at.len());
assert_eq!(r2_updated_at.len(), r1_updated_at.len());
}
#[test]
fn create_index_with_name_and_uid() {
let mut server = common::setup_server().unwrap();
// 1 - Create a new index
// Index with a name "Films" and an uid "fn_movies"
// POST: /indexes
let body = json!({
"name": "Films",
"uid": "fr_movies",
})
.to_string()
.into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 201);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res1_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res1_value.as_object().unwrap().len(), 5);
let r1_name = res1_value["name"].as_str().unwrap();
let r1_uid = res1_value["uid"].as_str().unwrap();
let r1_created_at = res1_value["createdAt"].as_str().unwrap();
let r1_updated_at = res1_value["updatedAt"].as_str().unwrap();
assert_eq!(r1_name, "Films");
assert_eq!(r1_uid, "fr_movies");
assert!(r1_created_at.len() > 1);
assert!(r1_updated_at.len() > 1);
// 2 - Check the list of indexes
// Must have 1 index with the exact same content that the request 1
// GET: /indexes
let req = http::Request::get("/indexes").body(Body::empty()).unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res2_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res2_value.as_array().unwrap().len(), 1);
assert_eq!(res2_value[0].as_object().unwrap().len(), 5);
let r2_name = res2_value[0]["name"].as_str().unwrap();
let r2_uid = res2_value[0]["uid"].as_str().unwrap();
let r2_created_at = res2_value[0]["createdAt"].as_str().unwrap();
let r2_updated_at = res2_value[0]["updatedAt"].as_str().unwrap();
assert_eq!(r2_name, r1_name);
assert_eq!(r2_uid, r1_uid);
assert_eq!(r2_created_at.len(), r1_created_at.len());
assert_eq!(r2_updated_at.len(), r1_updated_at.len());
}
#[test]
fn rename_index() {
let mut server = common::setup_server().unwrap();
// 1 - Create a new index
// Index with only a name "movies"
// POST: /indexes
let body = json!({
"name": "movies",
})
.to_string()
.into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 201);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res1_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res1_value.as_object().unwrap().len(), 5);
let r1_name = res1_value["name"].as_str().unwrap();
let r1_uid = res1_value["uid"].as_str().unwrap();
let r1_created_at = res1_value["createdAt"].as_str().unwrap();
let r1_updated_at = res1_value["updatedAt"].as_str().unwrap();
assert_eq!(r1_name, "movies");
assert_eq!(r1_uid.len(), 8);
assert!(r1_created_at.len() > 1);
assert!(r1_updated_at.len() > 1);
// 2 - Update an index name
// Update "movies" to "TV Shows"
// PUT: /indexes/:uid
let body = json!({
"name": "TV Shows",
})
.to_string()
.into_bytes();
let req = http::Request::put(format!("/indexes/{}", r1_uid))
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res2_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res2_value.as_object().unwrap().len(), 5);
let r2_name = res2_value["name"].as_str().unwrap();
let r2_uid = res2_value["uid"].as_str().unwrap();
let r2_created_at = res2_value["createdAt"].as_str().unwrap();
let r2_updated_at = res2_value["updatedAt"].as_str().unwrap();
assert_eq!(r2_name, "TV Shows");
assert_eq!(r2_uid, r1_uid);
assert_eq!(r2_created_at, r1_created_at);
assert!(r2_updated_at.len() > 1);
// 3 - Check the list of indexes
// Must have 1 index with the exact same content that the request 2
// GET: /indexes
let req = http::Request::get("/indexes").body(Body::empty()).unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res3_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res3_value.as_array().unwrap().len(), 1);
assert_eq!(res3_value[0].as_object().unwrap().len(), 5);
let r3_name = res3_value[0]["name"].as_str().unwrap();
let r3_uid = res3_value[0]["uid"].as_str().unwrap();
let r3_created_at = res3_value[0]["createdAt"].as_str().unwrap();
let r3_updated_at = res3_value[0]["updatedAt"].as_str().unwrap();
assert_eq!(r3_name, r2_name);
assert_eq!(r3_uid.len(), r1_uid.len());
assert_eq!(r3_created_at.len(), r1_created_at.len());
assert_eq!(r3_updated_at.len(), r2_updated_at.len());
}
#[test]
fn delete_index_and_recreate_it() {
let mut server = common::setup_server().unwrap();
// 1 - Create a new index
// Index with only a name "movies"
// POST: /indexes
let body = json!({
"name": "movies",
})
.to_string()
.into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 201);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res1_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res1_value.as_object().unwrap().len(), 5);
let r1_name = res1_value["name"].as_str().unwrap();
let r1_uid = res1_value["uid"].as_str().unwrap();
let r1_created_at = res1_value["createdAt"].as_str().unwrap();
let r1_updated_at = res1_value["updatedAt"].as_str().unwrap();
assert_eq!(r1_name, "movies");
assert_eq!(r1_uid.len(), 8);
assert!(r1_created_at.len() > 1);
assert!(r1_updated_at.len() > 1);
// 2 - Check the list of indexes
// Must have 1 index with the exact same content that the request 1
// GET: /indexes
let req = http::Request::get("/indexes").body(Body::empty()).unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res2_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res2_value.as_array().unwrap().len(), 1);
assert_eq!(res2_value[0].as_object().unwrap().len(), 5);
let r2_name = res2_value[0]["name"].as_str().unwrap();
let r2_uid = res2_value[0]["uid"].as_str().unwrap();
let r2_created_at = res2_value[0]["createdAt"].as_str().unwrap();
let r2_updated_at = res2_value[0]["updatedAt"].as_str().unwrap();
assert_eq!(r2_name, r1_name);
assert_eq!(r2_uid.len(), r1_uid.len());
assert_eq!(r2_created_at.len(), r1_created_at.len());
assert_eq!(r2_updated_at.len(), r1_updated_at.len());
// 3- Delete an index
// Update "movies" to "TV Shows"
// DELETE: /indexes/:uid
let req = http::Request::delete(format!("/indexes/{}", r1_uid))
.body(Body::empty())
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 204);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
assert_eq!(buf.len(), 0);
// 4 - Check the list of indexes
// Must have 0 index
// GET: /indexes
let req = http::Request::get("/indexes").body(Body::empty()).unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res2_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res2_value.as_array().unwrap().len(), 0);
// 5 - Create a new index
// Index with only a name "movies"
// POST: /indexes
let body = json!({
"name": "movies",
})
.to_string()
.into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 201);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res1_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res1_value.as_object().unwrap().len(), 5);
let r1_name = res1_value["name"].as_str().unwrap();
let r1_uid = res1_value["uid"].as_str().unwrap();
let r1_created_at = res1_value["createdAt"].as_str().unwrap();
let r1_updated_at = res1_value["updatedAt"].as_str().unwrap();
assert_eq!(r1_name, "movies");
assert_eq!(r1_uid.len(), 8);
assert!(r1_created_at.len() > 1);
assert!(r1_updated_at.len() > 1);
// 6 - Check the list of indexes
// Must have 1 index with the exact same content that the request 1
// GET: /indexes
let req = http::Request::get("/indexes").body(Body::empty()).unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res2_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res2_value.as_array().unwrap().len(), 1);
assert_eq!(res2_value[0].as_object().unwrap().len(), 5);
let r2_name = res2_value[0]["name"].as_str().unwrap();
let r2_uid = res2_value[0]["uid"].as_str().unwrap();
let r2_created_at = res2_value[0]["createdAt"].as_str().unwrap();
let r2_updated_at = res2_value[0]["updatedAt"].as_str().unwrap();
assert_eq!(r2_name, r1_name);
assert_eq!(r2_uid.len(), r1_uid.len());
assert_eq!(r2_created_at.len(), r1_created_at.len());
assert_eq!(r2_updated_at.len(), r1_updated_at.len());
}
#[test]
fn check_multiples_indexes() {
let mut server = common::setup_server().unwrap();
// 1 - Create a new index
// Index with only a name "movies"
// POST: /indexes
let body = json!({
"name": "movies",
})
.to_string()
.into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 201);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res1_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res1_value.as_object().unwrap().len(), 5);
let r1_name = res1_value["name"].as_str().unwrap();
let r1_uid = res1_value["uid"].as_str().unwrap();
let r1_created_at = res1_value["createdAt"].as_str().unwrap();
let r1_updated_at = res1_value["updatedAt"].as_str().unwrap();
assert_eq!(r1_name, "movies");
assert_eq!(r1_uid.len(), 8);
assert!(r1_created_at.len() > 1);
assert!(r1_updated_at.len() > 1);
// 2 - Check the list of indexes
// Must have 1 index with the exact same content that the request 1
// GET: /indexes
let req = http::Request::get("/indexes").body(Body::empty()).unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res2_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res2_value.as_array().unwrap().len(), 1);
assert_eq!(res2_value[0].as_object().unwrap().len(), 5);
let r2_0_name = res2_value[0]["name"].as_str().unwrap();
let r2_0_uid = res2_value[0]["uid"].as_str().unwrap();
let r2_0_created_at = res2_value[0]["createdAt"].as_str().unwrap();
let r2_0_updated_at = res2_value[0]["updatedAt"].as_str().unwrap();
assert_eq!(r2_0_name, r1_name);
assert_eq!(r2_0_uid.len(), r1_uid.len());
assert_eq!(r2_0_created_at.len(), r1_created_at.len());
assert_eq!(r2_0_updated_at.len(), r1_updated_at.len());
// 3 - Create a new index
// Index with only a name "films"
// POST: /indexes
let body = json!({
"name": "films",
})
.to_string()
.into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 201);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res3_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res3_value.as_object().unwrap().len(), 5);
let r3_name = res3_value["name"].as_str().unwrap();
let r3_uid = res3_value["uid"].as_str().unwrap();
let r3_created_at = res3_value["createdAt"].as_str().unwrap();
let r3_updated_at = res3_value["updatedAt"].as_str().unwrap();
assert_eq!(r3_name, "films");
assert_eq!(r3_uid.len(), 8);
assert!(r3_created_at.len() > 1);
assert!(r3_updated_at.len() > 1);
// 4 - Check the list of indexes
// Must have 2 index with the exact same content that the request 1 and 3
// GET: /indexes
let req = http::Request::get("/indexes").body(Body::empty()).unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res4_value: Value = serde_json::from_slice(&buf).unwrap();
assert_eq!(res4_value.as_array().unwrap().len(), 2);
assert_eq!(res4_value[0].as_object().unwrap().len(), 5);
let r4_0_name = res4_value[0]["name"].as_str().unwrap();
let r4_0_uid = res4_value[0]["uid"].as_str().unwrap();
let r4_0_created_at = res4_value[0]["createdAt"].as_str().unwrap();
let r4_0_updated_at = res4_value[0]["updatedAt"].as_str().unwrap();
assert_eq!(res4_value[1].as_object().unwrap().len(), 5);
let r4_1_name = res4_value[1]["name"].as_str().unwrap();
let r4_1_uid = res4_value[1]["uid"].as_str().unwrap();
let r4_1_created_at = res4_value[1]["createdAt"].as_str().unwrap();
let r4_1_updated_at = res4_value[1]["updatedAt"].as_str().unwrap();
if r4_0_name == r1_name {
assert_eq!(r4_0_name, r1_name);
assert_eq!(r4_0_uid.len(), r1_uid.len());
assert_eq!(r4_0_created_at.len(), r1_created_at.len());
assert_eq!(r4_0_updated_at.len(), r1_updated_at.len());
} else {
assert_eq!(r4_0_name, r3_name);
assert_eq!(r4_0_uid.len(), r3_uid.len());
assert_eq!(r4_0_created_at.len(), r3_created_at.len());
assert_eq!(r4_0_updated_at.len(), r3_updated_at.len());
}
if r4_1_name == r1_name {
assert_eq!(r4_1_name, r1_name);
assert_eq!(r4_1_uid.len(), r1_uid.len());
assert_eq!(r4_1_created_at.len(), r1_created_at.len());
assert_eq!(r4_1_updated_at.len(), r1_updated_at.len());
} else {
assert_eq!(r4_1_name, r3_name);
assert_eq!(r4_1_uid.len(), r3_uid.len());
assert_eq!(r4_1_created_at.len(), r3_created_at.len());
assert_eq!(r4_1_updated_at.len(), r3_updated_at.len());
}
}
#[test]
fn create_index_failed() {
let mut server = common::setup_server().unwrap();
// 1 - Push index creation with empty body
// POST: /indexes
let req = http::Request::post("/indexes").body(Body::empty()).unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 400);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res_value: Value = serde_json::from_slice(&buf).unwrap();
let message = res_value["message"].as_str().unwrap();
assert_eq!(res_value.as_object().unwrap().len(), 1);
assert_eq!(message, "invalid data");
// 2 - Push index creation with empty json body
// POST: /indexes
let body = json!({}).to_string().into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 400);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res_value: Value = serde_json::from_slice(&buf).unwrap();
let message = res_value["message"].as_str().unwrap();
assert_eq!(res_value.as_object().unwrap().len(), 1);
assert_eq!(message, "Index creation must have an uid");
// 3 - Create a index with extra data
// POST: /indexes
let body = json!({
"name": "movies",
"active": true
})
.to_string()
.into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 400);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res_value: Value = serde_json::from_slice(&buf).unwrap();
let message = res_value["message"].as_str().unwrap();
assert_eq!(res_value.as_object().unwrap().len(), 1);
assert_eq!(message, "invalid data");
// 3 - Create a index with wrong data type
// POST: /indexes
let body = json!({
"name": "movies",
"uid": 0
})
.to_string()
.into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 400);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res_value: Value = serde_json::from_slice(&buf).unwrap();
let message = res_value["message"].as_str().unwrap();
assert_eq!(res_value.as_object().unwrap().len(), 1);
assert_eq!(message, "invalid data");
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,321 @@
use std::convert::Into;
use std::time::Duration;
use assert_json_diff::assert_json_eq;
use async_std::io::prelude::*;
use async_std::task::{block_on, sleep};
use http_service::Body;
use serde_json::json;
use serde_json::Value;
mod common;
// Process:
// - Write a full settings update
// - Delete all settings
// Check:
// - Settings are deleted, all fields are null
// - POST success repond Status Code 202
// - Get success repond Status Code 200
// - Delete success repond Status Code 202
#[test]
fn write_all_and_delete() {
let mut server = common::setup_server().unwrap();
// 1 - Create the index
let body = json!({
"uid": "movies",
"identifier": "id",
})
.to_string()
.into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 201);
// 2 - Send the settings
let json = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exact",
"dsc(release_date)",
"dsc(rank)",
],
"rankingDistinct": "movie_id",
"searchableAttributes": [
"id",
"movie_id",
"title",
"description",
"poster",
"release_date",
"rank",
],
"displayedAttributes": [
"title",
"description",
"poster",
"release_date",
"rank",
],
"stopWords": [
"the",
"a",
"an",
],
"synonyms": {
"wolverine": ["xmen", "logan"],
"logan": ["wolverine"],
},
"indexNewFields": false,
});
let body = json.to_string().into_bytes();
let req = http::Request::post("/indexes/movies/settings")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 202);
block_on(sleep(Duration::from_secs(1)));
// 3 - Get all settings and compare to the previous one
let req = http::Request::get("/indexes/movies/settings")
.body(Body::empty())
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res_value: Value = serde_json::from_slice(&buf).unwrap();
assert_json_eq!(json, res_value, ordered: false);
// 4 - Delete all settings
let req = http::Request::delete("/indexes/movies/settings")
.body(Body::empty())
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 202);
block_on(sleep(Duration::from_secs(2)));
// 5 - Get all settings and check if they are empty
let req = http::Request::get("/indexes/movies/settings")
.body(Body::empty())
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res_value: Value = serde_json::from_slice(&buf).unwrap();
let json = json!({
"rankingRules": null,
"rankingDistinct": null,
"searchableAttributes": null,
"displayedAttributes": null,
"stopWords": null,
"synonyms": null,
"indexNewFields": true,
});
assert_json_eq!(json, res_value, ordered: false);
}
// Process:
// - Write a full setting update
// - Rewrite an other settings confirmation
// Check:
// - Settings are overwrited
// - Forgotten attributes are deleted
// - Null attributes are deleted
// - Empty attribute are deleted
#[test]
fn write_all_and_update() {
let mut server = common::setup_server().unwrap();
// 1 - Create the index
let body = json!({
"uid": "movies",
"identifier": "id",
})
.to_string()
.into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 201);
// 2 - Send the settings
let json = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exact",
"dsc(release_date)",
"dsc(rank)",
],
"rankingDistinct": "movie_id",
"searchableAttributes": [
"uid",
"movie_id",
"title",
"description",
"poster",
"release_date",
"rank",
],
"displayedAttributes": [
"title",
"description",
"poster",
"release_date",
"rank",
],
"stopWords": [
"the",
"a",
"an",
],
"synonyms": {
"wolverine": ["xmen", "logan"],
"logan": ["wolverine"],
},
"indexNewFields": false,
});
let body = json.to_string().into_bytes();
let req = http::Request::post("/indexes/movies/settings")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 202);
block_on(sleep(Duration::from_secs(1)));
// 3 - Get all settings and compare to the previous one
let req = http::Request::get("/indexes/movies/settings")
.body(Body::empty())
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res_value: Value = serde_json::from_slice(&buf).unwrap();
assert_json_eq!(json, res_value, ordered: false);
// 4 - Update all settings
let json_update = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exact",
"dsc(release_date)",
],
"searchableAttributes": [
"title",
"description",
"uid",
],
"displayedAttributes": [
"title",
"description",
"release_date",
"rank",
"poster",
],
"stopWords": [
],
"synonyms": {
"wolverine": ["xmen", "logan"],
"logan": ["wolverine", "xmen"],
},
"indexNewFields": false,
});
let body_update = json_update.to_string().into_bytes();
let req = http::Request::post("/indexes/movies/settings")
.body(Body::from(body_update))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 202);
block_on(sleep(Duration::from_secs(1)));
// 5 - Get all settings and check if the content is the same of (4)
let req = http::Request::get("/indexes/movies/settings")
.body(Body::empty())
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res_value: Value = serde_json::from_slice(&buf).unwrap();
let res_expected = json!({
"rankingRules": [
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exact",
"dsc(release_date)",
],
"rankingDistinct": null,
"searchableAttributes": [
"title",
"description",
"uid",
],
"displayedAttributes": [
"title",
"description",
"release_date",
"rank",
"poster",
],
"stopWords": null,
"synonyms": {
"wolverine": ["xmen", "logan"],
"logan": ["wolverine", "xmen"],
},
"indexNewFields": false
});
assert_json_eq!(res_expected, res_value, ordered: false);
}

View File

@ -0,0 +1,212 @@
use std::time::Duration;
use assert_json_diff::assert_json_eq;
use async_std::io::prelude::*;
use async_std::task::{block_on, sleep};
use http_service::Body;
use serde_json::json;
use serde_json::Value;
mod common;
// Process:
// - Write a full settings update
// - Delete all settings
// Check:
// - Settings are deleted, all fields are null
// - POST success repond Status Code 202
// - Get success repond Status Code 200
// - Delete success repond Status Code 202
#[test]
fn write_all_and_delete() {
let mut server = common::setup_server().unwrap();
// 1 - Create the index
let body = json!({
"uid": "movies",
"identifier": "uid",
})
.to_string()
.into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 201);
// 2 - Send the settings
let json = json!([
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exact",
"dsc(release_date)",
"dsc(rank)",
]);
let body = json.to_string().into_bytes();
let req = http::Request::post("/indexes/movies/settings/ranking-rules")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 202);
block_on(sleep(Duration::from_secs(2)));
// 3 - Get all settings and compare to the previous one
let req = http::Request::get("/indexes/movies/settings/ranking-rules")
.body(Body::empty())
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res_value: Value = serde_json::from_slice(&buf).unwrap();
assert_json_eq!(json, res_value, ordered: false);
// 4 - Delete all settings
let req = http::Request::delete("/indexes/movies/settings/ranking-rules")
.body(Body::empty())
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 202);
block_on(sleep(Duration::from_secs(2)));
// 5 - Get all settings and check if they are empty
let req = http::Request::get("/indexes/movies/settings/ranking-rules")
.body(Body::empty())
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res_value: Value = serde_json::from_slice(&buf).unwrap();
let json = json!(null);
assert_json_eq!(json, res_value, ordered: false);
}
// Process:
// - Write a full setting update
// - Rewrite an other settings confirmation
// Check:
// - Settings are overwrited
// - Forgotten attributes are deleted
// - Null attributes are deleted
// - Empty attribute are deleted
#[test]
fn write_all_and_update() {
let mut server = common::setup_server().unwrap();
// 1 - Create the index
let body = json!({
"uid": "movies",
"identifier": "uid",
})
.to_string()
.into_bytes();
let req = http::Request::post("/indexes")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 201);
// 2 - Send the settings
let json = json!([
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exact",
"dsc(release_date)",
"dsc(rank)",
]);
let body = json.to_string().into_bytes();
let req = http::Request::post("/indexes/movies/settings/ranking-rules")
.body(Body::from(body))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 202);
block_on(sleep(Duration::from_secs(1)));
// 3 - Get all settings and compare to the previous one
let req = http::Request::get("/indexes/movies/settings/ranking-rules")
.body(Body::empty())
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res_value: Value = serde_json::from_slice(&buf).unwrap();
assert_json_eq!(json, res_value, ordered: false);
// 4 - Update all settings
let json_update = json!([
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exact",
"dsc(release_date)",
]);
let body_update = json_update.to_string().into_bytes();
let req = http::Request::post("/indexes/movies/settings/ranking-rules")
.body(Body::from(body_update))
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 202);
block_on(sleep(Duration::from_secs(1)));
// 5 - Get all settings and check if the content is the same of (4)
let req = http::Request::get("/indexes/movies/settings/ranking-rules")
.body(Body::empty())
.unwrap();
let res = server.simulate(req).unwrap();
assert_eq!(res.status(), 200);
let mut buf = Vec::new();
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
let res_value: Value = serde_json::from_slice(&buf).unwrap();
let res_expected = json!([
"_typo",
"_words",
"_proximity",
"_attribute",
"_words_position",
"_exact",
"dsc(release_date)",
]);
assert_json_eq!(res_expected, res_value, ordered: false);
}

View File

@ -0,0 +1,22 @@
use std::{error, fmt};
pub type SResult<T> = Result<T, Error>;
#[derive(Debug)]
pub enum Error {
FieldNameNotFound(String),
MaxFieldsLimitExceeded,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::Error::*;
match self {
FieldNameNotFound(field) => write!(f, "The field {:?} doesn't exist", field),
MaxFieldsLimitExceeded => write!(f, "The maximum of possible reattributed field id has been reached"),
}
}
}
impl error::Error for Error {}

View File

@ -0,0 +1,77 @@
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
use crate::{SResult, FieldId};
#[derive(Debug, Default, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct FieldsMap {
name_map: HashMap<String, FieldId>,
id_map: HashMap<FieldId, String>,
next_id: FieldId
}
impl FieldsMap {
pub fn len(&self) -> usize {
self.name_map.len()
}
pub fn is_empty(&self) -> bool {
self.name_map.is_empty()
}
pub fn insert(&mut self, name: &str) -> SResult<FieldId> {
if let Some(id) = self.name_map.get(name) {
return Ok(*id)
}
let id = self.next_id;
self.next_id = self.next_id.next()?;
self.name_map.insert(name.to_string(), id);
self.id_map.insert(id, name.to_string());
Ok(id)
}
pub fn remove(&mut self, name: &str) {
if let Some(id) = self.name_map.get(name) {
self.id_map.remove(&id);
}
self.name_map.remove(name);
}
pub fn id(&self, name: &str) -> Option<FieldId> {
self.name_map.get(name).copied()
}
pub fn name<I: Into<FieldId>>(&self, id: I) -> Option<&str> {
self.id_map.get(&id.into()).map(|s| s.as_str())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn fields_map() {
let mut fields_map = FieldsMap::default();
assert_eq!(fields_map.insert("id").unwrap(), 0.into());
assert_eq!(fields_map.insert("title").unwrap(), 1.into());
assert_eq!(fields_map.insert("descritpion").unwrap(), 2.into());
assert_eq!(fields_map.insert("id").unwrap(), 0.into());
assert_eq!(fields_map.insert("title").unwrap(), 1.into());
assert_eq!(fields_map.insert("descritpion").unwrap(), 2.into());
assert_eq!(fields_map.id("id"), Some(0.into()));
assert_eq!(fields_map.id("title"), Some(1.into()));
assert_eq!(fields_map.id("descritpion"), Some(2.into()));
assert_eq!(fields_map.id("date"), None);
assert_eq!(fields_map.len(), 3);
assert_eq!(fields_map.name(0), Some("id"));
assert_eq!(fields_map.name(1), Some("title"));
assert_eq!(fields_map.name(2), Some("descritpion"));
assert_eq!(fields_map.name(4), None);
fields_map.remove("title");
assert_eq!(fields_map.id("title"), None);
assert_eq!(fields_map.insert("title").unwrap(), 3.into());
assert_eq!(fields_map.len(), 3);
}
}

View File

@ -1,533 +1,70 @@
use std::collections::{BTreeMap, HashMap}; mod error;
use std::ops::BitOr; mod fields_map;
use std::sync::Arc; mod schema;
use std::{fmt, u16};
use indexmap::IndexMap; pub use error::{Error, SResult};
pub use fields_map::FieldsMap;
pub use schema::Schema;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
pub const DISPLAYED: SchemaProps = SchemaProps { #[derive(Serialize, Deserialize, Debug, Copy, Clone, Default, PartialOrd, Ord, PartialEq, Eq, Hash)]
displayed: true, pub struct IndexedPos(pub u16);
indexed: false,
ranked: false,
};
pub const INDEXED: SchemaProps = SchemaProps {
displayed: false,
indexed: true,
ranked: false,
};
pub const RANKED: SchemaProps = SchemaProps {
displayed: false,
indexed: false,
ranked: true,
};
#[derive(Copy, Clone, PartialEq, Eq, Serialize, Deserialize)] impl IndexedPos {
pub struct SchemaProps { pub const fn new(value: u16) -> IndexedPos {
#[serde(default)] IndexedPos(value)
pub displayed: bool,
#[serde(default)]
pub indexed: bool,
#[serde(default)]
pub ranked: bool,
}
impl SchemaProps {
pub fn is_displayed(self) -> bool {
self.displayed
} }
pub fn is_indexed(self) -> bool { pub const fn min() -> IndexedPos {
self.indexed IndexedPos(u16::min_value())
} }
pub fn is_ranked(self) -> bool { pub const fn max() -> IndexedPos {
self.ranked IndexedPos(u16::max_value())
} }
} }
impl BitOr for SchemaProps { impl From<u16> for IndexedPos {
type Output = Self; fn from(value: u16) -> IndexedPos {
IndexedPos(value)
fn bitor(self, other: Self) -> Self::Output {
SchemaProps {
displayed: self.displayed | other.displayed,
indexed: self.indexed | other.indexed,
ranked: self.ranked | other.ranked,
}
} }
} }
impl fmt::Debug for SchemaProps { impl Into<u16> for IndexedPos {
#[allow(non_camel_case_types)] fn into(self) -> u16 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.0
#[derive(Debug)]
struct DISPLAYED;
#[derive(Debug)]
struct INDEXED;
#[derive(Debug)]
struct RANKED;
let mut debug_set = f.debug_set();
if self.displayed {
debug_set.entry(&DISPLAYED);
}
if self.indexed {
debug_set.entry(&INDEXED);
}
if self.ranked {
debug_set.entry(&RANKED);
}
debug_set.finish()
} }
} }
#[derive(Serialize, Deserialize)] #[derive(Serialize, Deserialize, Debug, Copy, Clone, Default, PartialOrd, Ord, PartialEq, Eq, Hash)]
pub struct SchemaBuilder { pub struct FieldId(pub u16);
identifier: String,
attributes: IndexMap<String, SchemaProps>,
}
impl SchemaBuilder { impl FieldId {
pub fn with_identifier<S: Into<String>>(name: S) -> SchemaBuilder { pub const fn new(value: u16) -> FieldId {
SchemaBuilder { FieldId(value)
identifier: name.into(),
attributes: IndexMap::new(),
}
} }
pub fn new_attribute<S: Into<String>>(&mut self, name: S, props: SchemaProps) -> SchemaAttr { pub const fn min() -> FieldId {
let len = self.attributes.len(); FieldId(u16::min_value())
if self.attributes.insert(name.into(), props).is_some() {
panic!("Field already inserted.")
}
SchemaAttr(len as u16)
} }
pub fn build(self) -> Schema { pub const fn max() -> FieldId {
let mut attrs = HashMap::new(); FieldId(u16::max_value())
let mut props = Vec::new(); }
for (i, (name, prop)) in self.attributes.into_iter().enumerate() { pub fn next(self) -> SResult<FieldId> {
attrs.insert(name.clone(), SchemaAttr(i as u16)); self.0.checked_add(1).map(FieldId).ok_or(Error::MaxFieldsLimitExceeded)
props.push((name, prop));
}
let identifier = self.identifier;
Schema {
inner: Arc::new(InnerSchema {
identifier,
attrs,
props,
}),
}
} }
} }
#[derive(Clone, PartialEq, Eq)] impl From<u16> for FieldId {
pub struct Schema { fn from(value: u16) -> FieldId {
inner: Arc<InnerSchema>, FieldId(value)
}
#[derive(Clone, PartialEq, Eq)]
struct InnerSchema {
identifier: String,
attrs: HashMap<String, SchemaAttr>,
props: Vec<(String, SchemaProps)>,
}
impl Schema {
fn to_builder(&self) -> SchemaBuilder {
let identifier = self.inner.identifier.clone();
let attributes = self.attributes_ordered();
SchemaBuilder {
identifier,
attributes,
}
}
fn attributes_ordered(&self) -> IndexMap<String, SchemaProps> {
let mut ordered = BTreeMap::new();
for (name, attr) in &self.inner.attrs {
let (_, props) = self.inner.props[attr.0 as usize];
ordered.insert(attr.0, (name, props));
}
let mut attributes = IndexMap::with_capacity(ordered.len());
for (_, (name, props)) in ordered {
attributes.insert(name.clone(), props);
}
attributes
}
pub fn number_of_attributes(&self) -> usize {
self.inner.attrs.len()
}
pub fn props(&self, attr: SchemaAttr) -> SchemaProps {
let (_, props) = self.inner.props[attr.0 as usize];
props
}
pub fn identifier_name(&self) -> &str {
&self.inner.identifier
}
pub fn attribute<S: AsRef<str>>(&self, name: S) -> Option<SchemaAttr> {
self.inner.attrs.get(name.as_ref()).cloned()
}
pub fn attribute_name(&self, attr: SchemaAttr) -> &str {
let (name, _) = &self.inner.props[attr.0 as usize];
name
}
pub fn iter<'a>(&'a self) -> impl Iterator<Item = (&str, SchemaAttr, SchemaProps)> + 'a {
self.inner.props.iter().map(move |(name, prop)| {
let attr = self.inner.attrs.get(name).unwrap();
(name.as_str(), *attr, *prop)
})
} }
} }
impl Serialize for Schema { impl Into<u16> for FieldId {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn into(self) -> u16 {
where self.0
S: serde::ser::Serializer,
{
self.to_builder().serialize(serializer)
}
}
impl<'de> Deserialize<'de> for Schema {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let builder = SchemaBuilder::deserialize(deserializer)?;
Ok(builder.build())
}
}
impl fmt::Debug for Schema {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let builder = self.to_builder();
f.debug_struct("Schema")
.field("identifier", &builder.identifier)
.field("attributes", &builder.attributes)
.finish()
}
}
#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)]
pub struct SchemaAttr(pub u16);
impl SchemaAttr {
pub const fn new(value: u16) -> SchemaAttr {
SchemaAttr(value)
}
pub const fn min() -> SchemaAttr {
SchemaAttr(u16::min_value())
}
pub const fn max() -> SchemaAttr {
SchemaAttr(u16::max_value())
}
pub fn next(self) -> Option<SchemaAttr> {
self.0.checked_add(1).map(SchemaAttr)
}
pub fn prev(self) -> Option<SchemaAttr> {
self.0.checked_sub(1).map(SchemaAttr)
}
}
impl fmt::Display for SchemaAttr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Diff {
IdentChange {
old: String,
new: String,
},
AttrMove {
name: String,
old: usize,
new: usize,
},
AttrPropsChange {
name: String,
old: SchemaProps,
new: SchemaProps,
},
NewAttr {
name: String,
pos: usize,
props: SchemaProps,
},
RemovedAttr {
name: String,
},
}
pub fn diff(old: &Schema, new: &Schema) -> Vec<Diff> {
use Diff::{AttrMove, AttrPropsChange, IdentChange, NewAttr, RemovedAttr};
let mut differences = Vec::new();
let old = old.to_builder();
let new = new.to_builder();
// check if the old identifier differs from the new one
if old.identifier != new.identifier {
let old = old.identifier;
let new = new.identifier;
differences.push(IdentChange { old, new });
}
// compare all old attributes positions
// and properties with the new ones
for (pos, (name, props)) in old.attributes.iter().enumerate() {
match new.attributes.get_full(name) {
Some((npos, _, nprops)) => {
if pos != npos {
let name = name.clone();
differences.push(AttrMove {
name,
old: pos,
new: npos,
});
}
if props != nprops {
let name = name.clone();
differences.push(AttrPropsChange {
name,
old: *props,
new: *nprops,
});
}
}
None => differences.push(RemovedAttr { name: name.clone() }),
}
}
// retrieve all attributes that
// were not present in the old schema
for (pos, (name, props)) in new.attributes.iter().enumerate() {
if !old.attributes.contains_key(name) {
let name = name.clone();
differences.push(NewAttr {
name,
pos,
props: *props,
});
}
}
differences
}
#[cfg(test)]
mod tests {
use super::*;
use std::error::Error;
#[test]
fn difference() {
use Diff::{AttrMove, AttrPropsChange, IdentChange, NewAttr, RemovedAttr};
let mut builder = SchemaBuilder::with_identifier("id");
builder.new_attribute("alpha", DISPLAYED);
builder.new_attribute("beta", DISPLAYED | INDEXED);
builder.new_attribute("gamma", INDEXED);
builder.new_attribute("omega", INDEXED);
let old = builder.build();
let mut builder = SchemaBuilder::with_identifier("kiki");
builder.new_attribute("beta", DISPLAYED | INDEXED);
builder.new_attribute("alpha", DISPLAYED | INDEXED);
builder.new_attribute("delta", RANKED);
builder.new_attribute("gamma", DISPLAYED);
let new = builder.build();
let differences = diff(&old, &new);
let expected = &[
IdentChange {
old: format!("id"),
new: format!("kiki"),
},
AttrMove {
name: format!("alpha"),
old: 0,
new: 1,
},
AttrPropsChange {
name: format!("alpha"),
old: DISPLAYED,
new: DISPLAYED | INDEXED,
},
AttrMove {
name: format!("beta"),
old: 1,
new: 0,
},
AttrMove {
name: format!("gamma"),
old: 2,
new: 3,
},
AttrPropsChange {
name: format!("gamma"),
old: INDEXED,
new: DISPLAYED,
},
RemovedAttr {
name: format!("omega"),
},
NewAttr {
name: format!("delta"),
pos: 2,
props: RANKED,
},
];
assert_eq!(&differences, expected)
}
#[test]
fn serialize_deserialize() -> bincode::Result<()> {
let mut builder = SchemaBuilder::with_identifier("id");
builder.new_attribute("alpha", DISPLAYED);
builder.new_attribute("beta", DISPLAYED | INDEXED);
builder.new_attribute("gamma", INDEXED);
let schema = builder.build();
let mut buffer = Vec::new();
bincode::serialize_into(&mut buffer, &schema)?;
let schema2 = bincode::deserialize_from(buffer.as_slice())?;
assert_eq!(schema, schema2);
Ok(())
}
#[test]
fn serialize_deserialize_toml() -> Result<(), Box<dyn Error>> {
let mut builder = SchemaBuilder::with_identifier("id");
builder.new_attribute("alpha", DISPLAYED);
builder.new_attribute("beta", DISPLAYED | INDEXED);
builder.new_attribute("gamma", INDEXED);
let schema = builder.build();
let buffer = toml::to_vec(&schema)?;
let schema2 = toml::from_slice(buffer.as_slice())?;
assert_eq!(schema, schema2);
let data = r#"
identifier = "id"
[attributes."alpha"]
displayed = true
[attributes."beta"]
displayed = true
indexed = true
[attributes."gamma"]
indexed = true
"#;
let schema2 = toml::from_str(data)?;
assert_eq!(schema, schema2);
Ok(())
}
#[test]
fn serialize_deserialize_json() -> Result<(), Box<dyn Error>> {
let mut builder = SchemaBuilder::with_identifier("id");
builder.new_attribute("alpha", DISPLAYED);
builder.new_attribute("beta", DISPLAYED | INDEXED);
builder.new_attribute("gamma", INDEXED);
let schema = builder.build();
let buffer = serde_json::to_vec(&schema)?;
let schema2 = serde_json::from_slice(buffer.as_slice())?;
assert_eq!(schema, schema2);
let data = r#"
{
"identifier": "id",
"attributes": {
"alpha": {
"displayed": true
},
"beta": {
"displayed": true,
"indexed": true
},
"gamma": {
"indexed": true
}
}
}"#;
let schema2 = serde_json::from_str(data)?;
assert_eq!(schema, schema2);
Ok(())
}
#[test]
fn debug_output() {
use std::fmt::Write as _;
let mut builder = SchemaBuilder::with_identifier("id");
builder.new_attribute("alpha", DISPLAYED);
builder.new_attribute("beta", DISPLAYED | INDEXED);
builder.new_attribute("gamma", INDEXED);
let schema = builder.build();
let mut output = String::new();
let _ = write!(&mut output, "{:#?}", schema);
let expected = r#"Schema {
identifier: "id",
attributes: {
"alpha": {
DISPLAYED,
},
"beta": {
DISPLAYED,
INDEXED,
},
"gamma": {
INDEXED,
},
},
}"#;
assert_eq!(output, expected);
let mut output = String::new();
let _ = write!(&mut output, "{:?}", schema);
let expected = r#"Schema { identifier: "id", attributes: {"alpha": {DISPLAYED}, "beta": {DISPLAYED, INDEXED}, "gamma": {INDEXED}} }"#;
assert_eq!(output, expected);
} }
} }

View File

@ -0,0 +1,200 @@
use crate::{FieldsMap, FieldId, SResult, Error, IndexedPos};
use serde::{Serialize, Deserialize};
use std::collections::{HashMap, HashSet};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct Schema {
fields_map: FieldsMap,
identifier: FieldId,
ranked: HashSet<FieldId>,
displayed: HashSet<FieldId>,
indexed: Vec<FieldId>,
indexed_map: HashMap<FieldId, IndexedPos>,
index_new_fields: bool,
}
impl Schema {
pub fn with_identifier(name: &str) -> Schema {
let mut fields_map = FieldsMap::default();
let field_id = fields_map.insert(name).unwrap();
Schema {
fields_map,
identifier: field_id,
ranked: HashSet::new(),
displayed: HashSet::new(),
indexed: Vec::new(),
indexed_map: HashMap::new(),
index_new_fields: true,
}
}
pub fn identifier(&self) -> &str {
self.fields_map.name(self.identifier).unwrap()
}
pub fn set_identifier(&mut self, id: &str) -> SResult<()> {
match self.id(id) {
Some(id) => {
self.identifier = id;
Ok(())
},
None => Err(Error::FieldNameNotFound(id.to_string()))
}
}
pub fn id(&self, name: &str) -> Option<FieldId> {
self.fields_map.id(name)
}
pub fn name<I: Into<FieldId>>(&self, id: I) -> Option<&str> {
self.fields_map.name(id)
}
pub fn contains(&self, name: &str) -> bool {
self.fields_map.id(name).is_some()
}
pub fn insert(&mut self, name: &str) -> SResult<FieldId> {
self.fields_map.insert(name)
}
pub fn insert_and_index(&mut self, name: &str) -> SResult<FieldId> {
match self.fields_map.id(name) {
Some(id) => {
Ok(id)
}
None => {
if self.index_new_fields {
self.set_indexed(name)?;
self.set_displayed(name)
} else {
self.fields_map.insert(name)
}
}
}
}
pub fn ranked(&self) -> &HashSet<FieldId> {
&self.ranked
}
pub fn ranked_name(&self) -> HashSet<&str> {
self.ranked.iter().filter_map(|a| self.name(*a)).collect()
}
pub fn displayed(&self) -> &HashSet<FieldId> {
&self.displayed
}
pub fn displayed_name(&self) -> HashSet<&str> {
self.displayed.iter().filter_map(|a| self.name(*a)).collect()
}
pub fn indexed(&self) -> &Vec<FieldId> {
&self.indexed
}
pub fn indexed_name(&self) -> Vec<&str> {
self.indexed.iter().filter_map(|a| self.name(*a)).collect()
}
pub fn set_ranked(&mut self, name: &str) -> SResult<FieldId> {
let id = self.fields_map.insert(name)?;
self.ranked.insert(id);
Ok(id)
}
pub fn set_displayed(&mut self, name: &str) -> SResult<FieldId> {
let id = self.fields_map.insert(name)?;
self.displayed.insert(id);
Ok(id)
}
pub fn set_indexed(&mut self, name: &str) -> SResult<(FieldId, IndexedPos)> {
let id = self.fields_map.insert(name)?;
if let Some(indexed_pos) = self.indexed_map.get(&id) {
return Ok((id, *indexed_pos))
};
let pos = self.indexed.len() as u16;
self.indexed.push(id);
self.indexed_map.insert(id, pos.into());
Ok((id, pos.into()))
}
pub fn remove_ranked(&mut self, name: &str) {
if let Some(id) = self.fields_map.id(name) {
self.ranked.remove(&id);
}
}
pub fn remove_displayed(&mut self, name: &str) {
if let Some(id) = self.fields_map.id(name) {
self.displayed.remove(&id);
}
}
pub fn remove_indexed(&mut self, name: &str) {
if let Some(id) = self.fields_map.id(name) {
self.indexed_map.remove(&id);
self.indexed.retain(|x| *x != id);
}
}
pub fn is_ranked(&self, id: FieldId) -> bool {
self.ranked.get(&id).is_some()
}
pub fn is_displayed(&self, id: FieldId) -> bool {
self.displayed.get(&id).is_some()
}
pub fn is_indexed(&self, id: FieldId) -> Option<&IndexedPos> {
self.indexed_map.get(&id)
}
pub fn indexed_pos_to_field_id<I: Into<IndexedPos>>(&self, pos: I) -> Option<FieldId> {
let indexed_pos = pos.into().0 as usize;
if indexed_pos < self.indexed.len() {
Some(self.indexed[indexed_pos as usize])
} else {
None
}
}
pub fn update_ranked<S: AsRef<str>>(&mut self, data: impl IntoIterator<Item = S>) -> SResult<()> {
self.ranked.clear();
for name in data {
self.set_ranked(name.as_ref())?;
}
Ok(())
}
pub fn update_displayed<S: AsRef<str>>(&mut self, data: impl IntoIterator<Item = S>) -> SResult<()> {
self.displayed.clear();
for name in data {
self.set_displayed(name.as_ref())?;
}
Ok(())
}
pub fn update_indexed<S: AsRef<str>>(&mut self, data: Vec<S>) -> SResult<()> {
self.indexed.clear();
self.indexed_map.clear();
for name in data {
self.set_indexed(name.as_ref())?;
}
Ok(())
}
pub fn index_new_fields(&self) -> bool {
self.index_new_fields
}
pub fn set_index_new_fields(&mut self, value: bool) {
self.index_new_fields = value;
}
}

View File

@ -28,6 +28,7 @@ pub struct DocIndex {
/// The attribute in the document where the word was found /// The attribute in the document where the word was found
/// along with the index in it. /// along with the index in it.
/// This is an IndexedPos and not a FieldId. Must be converted each time.
pub attribute: u16, pub attribute: u16,
pub word_index: u16, pub word_index: u16,