adapt meilisearch-http to the new schemaless option

This commit is contained in:
qdequele 2020-01-14 17:26:27 +01:00
parent 21d122a870
commit 4f0ead625b
No known key found for this signature in database
GPG Key ID: B3F0A000EBF11745
13 changed files with 143 additions and 359 deletions

1
Cargo.lock generated
View File

@ -970,6 +970,7 @@ dependencies = [
"meilisearch-types 0.8.4", "meilisearch-types 0.8.4",
"once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "ordered-float 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustyline 5.0.4 (registry+https://github.com/rust-lang/crates.io-index)", "rustyline 5.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"sdset 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "sdset 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.102 (registry+https://github.com/rust-lang/crates.io-index)",

View File

@ -32,6 +32,7 @@ serde_json = "1.0.41"
siphasher = "0.3.1" siphasher = "0.3.1"
slice-group-by = "0.2.6" slice-group-by = "0.2.6"
zerocopy = "0.2.8" zerocopy = "0.2.8"
regex = "1"
[dev-dependencies] [dev-dependencies]
assert_matches = "1.3" assert_matches = "1.3"

View File

@ -1,6 +1,14 @@
use std::sync::Mutex;
use std::collections::{BTreeMap, BTreeSet}; use std::collections::{BTreeMap, BTreeSet};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use once_cell::sync::Lazy;
static RANKING_RULE_REGEX: Lazy<Mutex<regex::Regex>> = Lazy::new(|| {
let regex = regex::Regex::new(r"(asc|dsc)\(([a-zA-Z0-9-_]*)\)").unwrap();
Mutex::new(regex)
});
#[derive(Default, Clone, Serialize, Deserialize)] #[derive(Default, Clone, Serialize, Deserialize)]
pub struct Settings { pub struct Settings {
@ -17,8 +25,36 @@ pub struct Settings {
impl Into<SettingsUpdate> for Settings { impl Into<SettingsUpdate> for Settings {
fn into(self) -> SettingsUpdate { fn into(self) -> SettingsUpdate {
let settings = self.clone(); let settings = self.clone();
let ranking_rules = match settings.ranking_rules {
Some(rules) => {
let mut final_rules = Vec::new();
for rule in rules {
let parsed_rule = match rule.as_str() {
"_typo" => RankingRule::Typo,
"_words" => RankingRule::Words,
"_proximity" => RankingRule::Proximity,
"_attribute" => RankingRule::Attribute,
"_words_position" => RankingRule::WordsPosition,
"_exact" => RankingRule::Exact,
_ => {
let captures = RANKING_RULE_REGEX.lock().unwrap().captures(&rule).unwrap();
match captures[0].as_ref() {
"asc" => RankingRule::Asc(captures[1].to_string()),
"dsc" => RankingRule::Dsc(captures[1].to_string()),
_ => continue
}
}
};
final_rules.push(parsed_rule);
}
Some(final_rules)
}
None => None
};
SettingsUpdate { SettingsUpdate {
ranking_rules: settings.ranking_rules.into(), ranking_rules: ranking_rules.into(),
ranking_distinct: settings.ranking_distinct.into(), ranking_distinct: settings.ranking_distinct.into(),
attribute_identifier: settings.attribute_identifier.into(), attribute_identifier: settings.attribute_identifier.into(),
attributes_searchable: settings.attributes_searchable.into(), attributes_searchable: settings.attributes_searchable.into(),
@ -57,9 +93,21 @@ impl<T> UpdateState<T> {
} }
} }
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum RankingRule {
Typo,
Words,
Proximity,
Attribute,
WordsPosition,
Exact,
Asc(String),
Dsc(String),
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SettingsUpdate { pub struct SettingsUpdate {
pub ranking_rules: UpdateState<Vec<String>>, pub ranking_rules: UpdateState<Vec<RankingRule>>,
pub ranking_distinct: UpdateState<String>, pub ranking_distinct: UpdateState<String>,
pub attribute_identifier: UpdateState<String>, pub attribute_identifier: UpdateState<String>,
pub attributes_searchable: UpdateState<Vec<String>>, pub attributes_searchable: UpdateState<Vec<String>>,

View File

@ -8,6 +8,7 @@ use meilisearch_schema::Schema;
use crate::database::MainT; use crate::database::MainT;
use crate::RankedMap; use crate::RankedMap;
use crate::settings::RankingRule;
const CREATED_AT_KEY: &str = "created-at"; const CREATED_AT_KEY: &str = "created-at";
const RANKING_RULES_KEY: &str = "ranking-rules-key"; const RANKING_RULES_KEY: &str = "ranking-rules-key";
@ -188,12 +189,12 @@ impl Main {
} }
} }
pub fn ranking_rules<'txn>(&self, reader: &'txn heed::RoTxn<MainT>) -> ZResult<Option<Vec<String>>> { pub fn ranking_rules<'txn>(&self, reader: &'txn heed::RoTxn<MainT>) -> ZResult<Option<Vec<RankingRule>>> {
self.main.get::<_, Str, SerdeBincode<Vec<String>>>(reader, RANKING_RULES_KEY) self.main.get::<_, Str, SerdeBincode<Vec<RankingRule>>>(reader, RANKING_RULES_KEY)
} }
pub fn put_ranking_rules(self, writer: &mut heed::RwTxn<MainT>, value: Vec<String>) -> ZResult<()> { pub fn put_ranking_rules(self, writer: &mut heed::RwTxn<MainT>, value: Vec<RankingRule>) -> ZResult<()> {
self.main.put::<_, Str, SerdeBincode<Vec<String>>>(writer, RANKING_RULES_KEY, &value) self.main.put::<_, Str, SerdeBincode<Vec<RankingRule>>>(writer, RANKING_RULES_KEY, &value)
} }
pub fn delete_ranking_rules(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<bool> { pub fn delete_ranking_rules(self, writer: &mut heed::RwTxn<MainT>) -> ZResult<bool> {

View File

@ -43,22 +43,6 @@ use crate::{query_builder::QueryBuilder, update, DocIndex, DocumentId, Error, MR
type BEU64 = zerocopy::U64<byteorder::BigEndian>; type BEU64 = zerocopy::U64<byteorder::BigEndian>;
type BEU16 = zerocopy::U16<byteorder::BigEndian>; type BEU16 = zerocopy::U16<byteorder::BigEndian>;
// #[derive(Debug, Copy, Clone, AsBytes, FromBytes)]
// #[repr(C)]
// pub struct DocumentAttrKey {
// docid: BEU64,
// indexed_pos: BEU16,
// }
// impl DocumentAttrKey {
// fn new(docid: DocumentId, indexed_pos: IndexedPos) -> DocumentAttrKey {
// DocumentAttrKey {
// docid: BEU64::new(docid.0),
// indexed_pos: BEU16::new(indexed_pos.0),
// }
// }
// }
#[derive(Debug, Copy, Clone, AsBytes, FromBytes)] #[derive(Debug, Copy, Clone, AsBytes, FromBytes)]
#[repr(C)] #[repr(C)]
pub struct DocumentFieldIndexedKey { pub struct DocumentFieldIndexedKey {
@ -271,7 +255,6 @@ impl Index {
} }
} }
pub fn customs_update(&self, writer: &mut heed::RwTxn<UpdateT>, customs: Vec<u8>) -> ZResult<u64> { pub fn customs_update(&self, writer: &mut heed::RwTxn<UpdateT>, customs: Vec<u8>) -> ZResult<u64> {
let _ = self.updates_notifier.send(UpdateEvent::NewUpdate); let _ = self.updates_notifier.send(UpdateEvent::NewUpdate);
update::push_customs_update(writer, self.updates, self.updates_results, customs) update::push_customs_update(writer, self.updates, self.updates_results, customs)

View File

@ -84,13 +84,13 @@ impl DataInner {
let mut fields_frequency = HashMap::<_, usize>::new(); let mut fields_frequency = HashMap::<_, usize>::new();
for result in all_documents_fields { for result in all_documents_fields {
let (_, attr, _) = result?; let (_, attr, _) = result?;
*fields_frequency.entry(attr).or_default() += 1; *fields_frequency.entry(schema.indexed_pos_to_field_id(attr).unwrap()).or_default() += 1;
} }
// convert attributes to their names // convert attributes to their names
let frequency: HashMap<_, _> = fields_frequency let frequency: HashMap<_, _> = fields_frequency
.into_iter() .into_iter()
.map(|(a, c)| (schema.attribute_name(a).to_owned(), c)) .map(|(a, c)| (schema.get_name(a).unwrap(), c))
.collect(); .collect();
index index

View File

@ -1,11 +1,11 @@
use crate::routes::setting::{RankingOrdering, Setting};
use indexmap::IndexMap; use indexmap::IndexMap;
use log::{error, warn}; use log::error;
use meilisearch_core::criterion::*; use meilisearch_core::criterion::*;
use meilisearch_core::Highlight; use meilisearch_core::Highlight;
use meilisearch_core::{Index, RankedMap}; use meilisearch_core::{Index, RankedMap};
use meilisearch_core::MainT; use meilisearch_core::MainT;
use meilisearch_schema::{Schema, SchemaAttr}; use meilisearch_core::settings::RankingRule;
use meilisearch_schema::{Schema, FieldId};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use std::cmp::Ordering; use std::cmp::Ordering;
@ -172,7 +172,7 @@ impl<'a> SearchBuilder<'a> {
let ref_index = &self.index; let ref_index = &self.index;
let value = value.trim().to_lowercase(); let value = value.trim().to_lowercase();
let attr = match schema.attribute(attr) { let attr = match schema.get_id(attr) {
Some(attr) => attr, Some(attr) => attr,
None => return Err(Error::UnknownFilteredAttribute), None => return Err(Error::UnknownFilteredAttribute),
}; };
@ -274,75 +274,24 @@ impl<'a> SearchBuilder<'a> {
ranked_map: &'a RankedMap, ranked_map: &'a RankedMap,
schema: &Schema, schema: &Schema,
) -> Result<Option<Criteria<'a>>, Error> { ) -> Result<Option<Criteria<'a>>, Error> {
let current_settings = match self.index.main.customs(reader).unwrap() { let ranking_rules = self.index.main.ranking_rules(reader).unwrap();
Some(bytes) => bincode::deserialize(bytes).unwrap(),
None => Setting::default(),
};
let ranking_rules = &current_settings.ranking_rules;
let ranking_order = &current_settings.ranking_order;
if let Some(ranking_rules) = ranking_rules { if let Some(ranking_rules) = ranking_rules {
let mut builder = CriteriaBuilder::with_capacity(7 + ranking_rules.len()); let mut builder = CriteriaBuilder::with_capacity(7 + ranking_rules.len());
if let Some(ranking_rules_order) = ranking_order { for rule in ranking_rules {
for rule in ranking_rules_order { match rule {
match rule.as_str() { RankingRule::Typo => builder.push(Typo),
"_typo" => builder.push(Typo), RankingRule::Words => builder.push(Words),
"_words" => builder.push(Words), RankingRule::Proximity => builder.push(Proximity),
"_proximity" => builder.push(Proximity), RankingRule::Attribute => builder.push(Attribute),
"_attribute" => builder.push(Attribute), RankingRule::WordsPosition => builder.push(WordsPosition),
"_words_position" => builder.push(WordsPosition), RankingRule::Exact => builder.push(Exact),
"_exact" => builder.push(Exact), RankingRule::Asc(field) => builder.push(SortByAttr::lower_is_better(&ranked_map, &schema, &field).unwrap()),
_ => { RankingRule::Dsc(field) => builder.push(SortByAttr::higher_is_better(&ranked_map, &schema, &field).unwrap()),
let order = match ranking_rules.get(rule.as_str()) {
Some(o) => o,
None => continue,
}; };
let custom_ranking = match order {
RankingOrdering::Asc => {
SortByAttr::lower_is_better(&ranked_map, &schema, &rule)
.unwrap()
}
RankingOrdering::Dsc => {
SortByAttr::higher_is_better(&ranked_map, &schema, &rule)
.unwrap()
}
};
builder.push(custom_ranking);
}
}
} }
builder.push(DocumentId); builder.push(DocumentId);
return Ok(Some(builder.build())); return Ok(Some(builder.build()));
} else {
builder.push(Typo);
builder.push(Words);
builder.push(Proximity);
builder.push(Attribute);
builder.push(WordsPosition);
builder.push(Exact);
for (rule, order) in ranking_rules.iter() {
let custom_ranking = match order {
RankingOrdering::Asc => {
SortByAttr::lower_is_better(&ranked_map, &schema, &rule)
}
RankingOrdering::Dsc => {
SortByAttr::higher_is_better(&ranked_map, &schema, &rule)
}
};
if let Ok(custom_ranking) = custom_ranking {
builder.push(custom_ranking);
} else {
// TODO push this warning to a log tree
warn!("Custom ranking cannot be added; Attribute {} not registered for ranking", rule)
}
}
builder.push(DocumentId);
return Ok(Some(builder.build()));
}
} }
Ok(None) Ok(None)
@ -421,14 +370,14 @@ fn crop_document(
matches.sort_unstable_by_key(|m| (m.char_index, m.char_length)); matches.sort_unstable_by_key(|m| (m.char_index, m.char_length));
for (field, length) in fields { for (field, length) in fields {
let attribute = match schema.attribute(field) { let attribute = match schema.get_id(field) {
Some(attribute) => attribute, Some(attribute) => attribute,
None => continue, None => continue,
}; };
let selected_matches = matches let selected_matches = matches
.iter() .iter()
.filter(|m| SchemaAttr::new(m.attribute) == attribute) .filter(|m| FieldId::new(m.attribute) == attribute)
.cloned(); .cloned();
if let Some(Value::String(ref mut original_text)) = document.get_mut(field) { if let Some(Value::String(ref mut original_text)) = document.get_mut(field) {
@ -437,7 +386,7 @@ fn crop_document(
*original_text = cropped_text; *original_text = cropped_text;
matches.retain(|m| SchemaAttr::new(m.attribute) != attribute); matches.retain(|m| FieldId::new(m.attribute) != attribute);
matches.extend_from_slice(&cropped_matches); matches.extend_from_slice(&cropped_matches);
} }
} }
@ -450,9 +399,7 @@ fn calculate_matches(
) -> MatchesInfos { ) -> MatchesInfos {
let mut matches_result: HashMap<String, Vec<MatchPosition>> = HashMap::new(); let mut matches_result: HashMap<String, Vec<MatchPosition>> = HashMap::new();
for m in matches.iter() { for m in matches.iter() {
let attribute = schema if let Some(attribute) = schema.get_name(FieldId::new(m.attribute)) {
.attribute_name(SchemaAttr::new(m.attribute))
.to_string();
if let Some(attributes_to_retrieve) = attributes_to_retrieve.clone() { if let Some(attributes_to_retrieve) = attributes_to_retrieve.clone() {
if !attributes_to_retrieve.contains(attribute.as_str()) { if !attributes_to_retrieve.contains(attribute.as_str()) {
continue; continue;
@ -472,6 +419,7 @@ fn calculate_matches(
matches_result.insert(attribute, positions); matches_result.insert(attribute, positions);
} }
} }
}
for (_, val) in matches_result.iter_mut() { for (_, val) in matches_result.iter_mut() {
val.sort_unstable(); val.sort_unstable();
val.dedup(); val.dedup();

View File

@ -1,3 +1,2 @@
pub mod schema;
pub mod token; pub mod token;
pub mod update_operation; pub mod update_operation;

View File

@ -1,118 +0,0 @@
use std::collections::HashSet;
use indexmap::IndexMap;
use meilisearch_schema::{Schema, SchemaBuilder, SchemaProps};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum FieldProperties {
Identifier,
Indexed,
Displayed,
Ranked,
}
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
pub struct SchemaBody(IndexMap<String, HashSet<FieldProperties>>);
impl From<Schema> for SchemaBody {
fn from(value: Schema) -> SchemaBody {
let mut map = IndexMap::new();
for (name, _attr, props) in value.iter() {
let old_properties = map.entry(name.to_owned()).or_insert(HashSet::new());
if props.is_indexed() {
old_properties.insert(FieldProperties::Indexed);
}
if props.is_displayed() {
old_properties.insert(FieldProperties::Displayed);
}
if props.is_ranked() {
old_properties.insert(FieldProperties::Ranked);
}
}
let old_properties = map
.entry(value.identifier_name().to_string())
.or_insert(HashSet::new());
old_properties.insert(FieldProperties::Identifier);
old_properties.insert(FieldProperties::Displayed);
SchemaBody(map)
}
}
impl Into<Schema> for SchemaBody {
fn into(self) -> Schema {
let mut identifier = "documentId".to_string();
let mut attributes = IndexMap::new();
for (field, properties) in self.0 {
let mut indexed = false;
let mut displayed = false;
let mut ranked = false;
for property in properties {
match property {
FieldProperties::Indexed => indexed = true,
FieldProperties::Displayed => displayed = true,
FieldProperties::Ranked => ranked = true,
FieldProperties::Identifier => identifier = field.clone(),
}
}
attributes.insert(
field,
SchemaProps {
indexed,
displayed,
ranked,
},
);
}
let mut builder = SchemaBuilder::with_identifier(identifier);
for (field, props) in attributes {
builder.new_attribute(field, props);
}
builder.build()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_schema_body_conversion() {
let schema_body = r#"
{
"id": ["identifier", "indexed", "displayed"],
"title": ["indexed", "displayed"],
"date": ["displayed"]
}
"#;
let schema_builder = r#"
{
"identifier": "id",
"attributes": {
"id": {
"indexed": true,
"displayed": true
},
"title": {
"indexed": true,
"displayed": true
},
"date": {
"displayed": true
}
}
}
"#;
let schema_body: SchemaBody = serde_json::from_str(schema_body).unwrap();
let schema_builder: SchemaBuilder = serde_json::from_str(schema_builder).unwrap();
let schema_from_body: Schema = schema_body.into();
let schema_from_builder: Schema = schema_builder.build();
assert_eq!(schema_from_body, schema_from_builder);
}
}

View File

@ -1,3 +1,4 @@
use std::collections::{BTreeSet, HashSet}; use std::collections::{BTreeSet, HashSet};
use http::StatusCode; use http::StatusCode;
@ -7,6 +8,7 @@ use serde_json::Value;
use tide::querystring::ContextExt as QSContextExt; use tide::querystring::ContextExt as QSContextExt;
use tide::response::IntoResponse; use tide::response::IntoResponse;
use tide::{Context, Response}; use tide::{Context, Response};
use meilisearch_core::settings::Settings;
use crate::error::{ResponseError, SResult}; use crate::error::{ResponseError, SResult};
use crate::helpers::tide::ContextExt; use crate::helpers::tide::ContextExt;
@ -117,27 +119,13 @@ pub async fn get_all_documents(ctx: Context<Data>) -> SResult<Response> {
Ok(tide::response::json(response_body)) Ok(tide::response::json(response_body))
} }
fn infered_schema(document: &IndexMap<String, Value>, identifier: Option<String>) -> Option<meilisearch_schema::Schema> { fn find_identifier(document: &IndexMap<String, Value>) -> Option<String> {
use meilisearch_schema::{SchemaBuilder, DISPLAYED, INDEXED};
let mut identifier = identifier;
for key in document.keys() { for key in document.keys() {
if identifier.is_none() && key.to_lowercase().contains("id") { if key.to_lowercase().contains("id") {
identifier = Some(key.to_string()); return Some(key.to_string())
break;
} }
} }
return None
match identifier {
Some(identifier) => {
let mut builder = SchemaBuilder::with_identifier(identifier);
for key in document.keys() {
builder.new_attribute(key, DISPLAYED | INDEXED);
}
Some(builder.build())
}
None => None,
}
} }
#[derive(Default, Deserialize)] #[derive(Default, Deserialize)]
@ -165,15 +153,23 @@ async fn update_multiple_documents(mut ctx: Context<Data>, is_partial: bool) ->
.schema(&reader) .schema(&reader)
.map_err(ResponseError::internal)?; .map_err(ResponseError::internal)?;
if current_schema.is_none() { if current_schema.is_none() {
match data.first().and_then(|docs| infered_schema(docs, query.identifier)) { let id = match query.identifier {
Some(schema) => { Some(id) => id,
index None => {
.schema_update(&mut update_writer, schema) match data.first().and_then(|docs| find_identifier(docs)) {
.map_err(ResponseError::internal)?; Some(id) => id,
}
None => return Err(ResponseError::bad_request("Could not infer a schema")), None => return Err(ResponseError::bad_request("Could not infer a schema")),
} }
} }
};
let settings = Settings {
attribute_identifier: Some(id),
..Settings::default()
};
index
.settings_update(&mut update_writer, settings.into())
.map_err(ResponseError::internal)?;
}
let mut document_addition = if is_partial { let mut document_addition = if is_partial {
index.documents_partial_addition() index.documents_partial_addition()

View File

@ -2,19 +2,16 @@ use chrono::{DateTime, Utc};
use http::StatusCode; use http::StatusCode;
use log::error; use log::error;
use meilisearch_core::ProcessedUpdateResult; use meilisearch_core::ProcessedUpdateResult;
use meilisearch_schema::{Schema, SchemaBuilder}; // use meilisearch_schema::Schema;
use rand::seq::SliceRandom; use rand::seq::SliceRandom;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use tide::querystring::ContextExt as QSContextExt;
use tide::response::IntoResponse; use tide::response::IntoResponse;
use tide::{Context, Response}; use tide::{Context, Response};
use crate::error::{ResponseError, SResult}; use crate::error::{ResponseError, SResult};
use crate::helpers::tide::ContextExt; use crate::helpers::tide::ContextExt;
use crate::models::schema::SchemaBody;
use crate::models::token::ACL::*; use crate::models::token::ACL::*;
use crate::routes::document::IndexUpdateResponse;
use crate::Data; use crate::Data;
fn generate_uid() -> String { fn generate_uid() -> String {
@ -124,7 +121,7 @@ pub async fn get_index(ctx: Context<Data>) -> SResult<Response> {
struct IndexCreateRequest { struct IndexCreateRequest {
name: String, name: String,
uid: Option<String>, uid: Option<String>,
schema: Option<SchemaBody>, // schema: Option<SchemaBody>,
} }
#[derive(Debug, Serialize)] #[derive(Debug, Serialize)]
@ -132,9 +129,9 @@ struct IndexCreateRequest {
struct IndexCreateResponse { struct IndexCreateResponse {
name: String, name: String,
uid: String, uid: String,
schema: Option<SchemaBody>, // schema: Option<SchemaBody>,
#[serde(skip_serializing_if = "Option::is_none")] // #[serde(skip_serializing_if = "Option::is_none")]
update_id: Option<u64>, // update_id: Option<u64>,
created_at: DateTime<Utc>, created_at: DateTime<Utc>,
updated_at: DateTime<Utc>, updated_at: DateTime<Utc>,
} }
@ -165,30 +162,29 @@ pub async fn create_index(mut ctx: Context<Data>) -> SResult<Response> {
}; };
let mut writer = db.main_write_txn().map_err(ResponseError::internal)?; let mut writer = db.main_write_txn().map_err(ResponseError::internal)?;
let mut update_writer = db.update_write_txn().map_err(ResponseError::internal)?;
created_index created_index
.main .main
.put_name(&mut writer, &body.name) .put_name(&mut writer, &body.name)
.map_err(ResponseError::internal)?; .map_err(ResponseError::internal)?;
let schema: Option<Schema> = body.schema.clone().map(Into::into); // let schema: Option<Schema> = body.schema.clone().map(Into::into);
let mut response_update_id = None; // let mut response_update_id = None;
if let Some(schema) = schema { // if let Some(schema) = schema {
let update_id = created_index // let update_id = created_index
.schema_update(&mut update_writer, schema) // .schema_update(&mut update_writer, schema)
.map_err(ResponseError::internal)?; // .map_err(ResponseError::internal)?;
response_update_id = Some(update_id) // response_update_id = Some(update_id)
} // }
writer.commit().map_err(ResponseError::internal)?; // writer.commit().map_err(ResponseError::internal)?;
update_writer.commit().map_err(ResponseError::internal)?; // update_writer.commit().map_err(ResponseError::internal)?;
let response_body = IndexCreateResponse { let response_body = IndexCreateResponse {
name: body.name, name: body.name,
uid, uid,
schema: body.schema, // schema: body.schema,
update_id: response_update_id, // update_id: update_id,
created_at: Utc::now(), created_at: Utc::now(),
updated_at: Utc::now(), updated_at: Utc::now(),
}; };
@ -263,78 +259,6 @@ pub async fn update_index(mut ctx: Context<Data>) -> SResult<Response> {
.into_response()) .into_response())
} }
#[derive(Default, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
struct SchemaParams {
raw: bool,
}
pub async fn get_index_schema(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?;
let index = ctx.index()?;
// Tide doesn't support "no query param"
let params: SchemaParams = ctx.url_query().unwrap_or_default();
let db = &ctx.state().db;
let reader = db.main_read_txn().map_err(ResponseError::internal)?;
let schema = index
.main
.schema(&reader)
.map_err(ResponseError::open_index)?;
match schema {
Some(schema) => {
if params.raw {
Ok(tide::response::json(schema))
} else {
Ok(tide::response::json(SchemaBody::from(schema)))
}
}
None => Err(ResponseError::not_found("missing index schema")),
}
}
pub async fn update_schema(mut ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesWrite)?;
let index_uid = ctx.url_param("index")?;
let params: SchemaParams = ctx.url_query().unwrap_or_default();
let schema = if params.raw {
ctx.body_json::<SchemaBuilder>()
.await
.map_err(ResponseError::bad_request)?
.build()
} else {
ctx.body_json::<SchemaBody>()
.await
.map_err(ResponseError::bad_request)?
.into()
};
let db = &ctx.state().db;
let mut writer = db.update_write_txn().map_err(ResponseError::internal)?;
let index = db
.open_index(&index_uid)
.ok_or(ResponseError::index_not_found(index_uid))?;
let update_id = index
.schema_update(&mut writer, schema.clone())
.map_err(ResponseError::internal)?;
writer.commit().map_err(ResponseError::internal)?;
let response_body = IndexUpdateResponse { update_id };
Ok(tide::response::json(response_body)
.with_status(StatusCode::ACCEPTED)
.into_response())
}
pub async fn get_update_status(ctx: Context<Data>) -> SResult<Response> { pub async fn get_update_status(ctx: Context<Data>) -> SResult<Response> {
ctx.is_allowed(IndexesRead)?; ctx.is_allowed(IndexesRead)?;

View File

@ -51,10 +51,10 @@ pub fn load_routes(app: &mut tide::App<Data>) {
.put(index::update_index) .put(index::update_index)
.delete(index::delete_index); .delete(index::delete_index);
router // router
.at("/schema") // .at("/schema")
.get(index::get_index_schema) // .get(index::get_index_schema)
.put(index::update_schema); // .put(index::update_schema);
router.at("/documents").nest(|router| { router.at("/documents").nest(|router| {
router router

View File

@ -64,8 +64,9 @@ pub async fn search_with_url_query(ctx: Context<Data>) -> SResult<Response> {
let crop_length = query.crop_length.unwrap_or(200); let crop_length = query.crop_length.unwrap_or(200);
if attributes_to_crop == "*" { if attributes_to_crop == "*" {
let attributes_to_crop = schema let attributes_to_crop = schema
.get_displayed_name()
.iter() .iter()
.map(|(attr, ..)| (attr.to_string(), crop_length)) .map(|attr| (attr.to_string(), crop_length))
.collect(); .collect();
search_builder.attributes_to_crop(attributes_to_crop); search_builder.attributes_to_crop(attributes_to_crop);
} else { } else {
@ -79,7 +80,7 @@ pub async fn search_with_url_query(ctx: Context<Data>) -> SResult<Response> {
if let Some(attributes_to_highlight) = query.attributes_to_highlight { if let Some(attributes_to_highlight) = query.attributes_to_highlight {
let attributes_to_highlight = if attributes_to_highlight == "*" { let attributes_to_highlight = if attributes_to_highlight == "*" {
schema.iter().map(|(attr, ..)| attr.to_string()).collect() schema.get_displayed_name()
} else { } else {
attributes_to_highlight attributes_to_highlight
.split(',') .split(',')