mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-12-24 13:40:31 +01:00
Merge pull request #509 from meilisearch/fix-internal-schema
Fix internal schema
This commit is contained in:
commit
14c1aba6c7
@ -1,5 +1,5 @@
|
||||
{
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
"searchableAttributes": ["title", "overview"],
|
||||
"displayedAttributes": [
|
||||
"id",
|
||||
|
@ -8,6 +8,7 @@ use crossbeam_channel::{Receiver, Sender};
|
||||
use heed::types::{Str, Unit};
|
||||
use heed::{CompactionOption, Result as ZResult};
|
||||
use log::debug;
|
||||
use meilisearch_schema::Schema;
|
||||
|
||||
use crate::{store, update, Index, MResult};
|
||||
|
||||
@ -242,6 +243,7 @@ impl Database {
|
||||
index.main.put_name(&mut writer, name)?;
|
||||
index.main.put_created_at(&mut writer)?;
|
||||
index.main.put_updated_at(&mut writer)?;
|
||||
index.main.put_schema(&mut writer, &Schema::new())?;
|
||||
|
||||
let env_clone = self.env.clone();
|
||||
let update_env_clone = self.update_env.clone();
|
||||
@ -357,7 +359,7 @@ mod tests {
|
||||
|
||||
use crate::criterion::{self, CriteriaBuilder};
|
||||
use crate::update::{ProcessedUpdateResult, UpdateStatus};
|
||||
use crate::settings::{Settings, SettingsUpdate, UpdateState};
|
||||
use crate::settings::Settings;
|
||||
use crate::{Document, DocumentId};
|
||||
use serde::de::IgnoredAny;
|
||||
use std::sync::mpsc;
|
||||
@ -377,17 +379,11 @@ mod tests {
|
||||
|
||||
database.set_update_callback(Box::new(update_fn));
|
||||
|
||||
let settings_update = SettingsUpdate{
|
||||
identifier: UpdateState::Update("id".to_string()),
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let mut writer = db.update_write_txn().unwrap();
|
||||
let update_id = index.settings_update(&mut writer, settings_update).unwrap();
|
||||
let mut writer = db.main_write_txn().unwrap();
|
||||
index.main.put_schema(&mut writer, &Schema::with_primary_key("id")).unwrap();
|
||||
writer.commit().unwrap();
|
||||
|
||||
// block until the transaction is processed
|
||||
let _ = receiver.iter().find(|id| *id == update_id);
|
||||
|
||||
let settings = {
|
||||
let data = r#"
|
||||
@ -448,18 +444,10 @@ mod tests {
|
||||
|
||||
database.set_update_callback(Box::new(update_fn));
|
||||
|
||||
let settings_update = SettingsUpdate{
|
||||
identifier: UpdateState::Update("id".to_string()),
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let mut writer = db.update_write_txn().unwrap();
|
||||
let update_id = index.settings_update(&mut writer, settings_update).unwrap();
|
||||
let mut writer = db.main_write_txn().unwrap();
|
||||
index.main.put_schema(&mut writer, &Schema::with_primary_key("id")).unwrap();
|
||||
writer.commit().unwrap();
|
||||
|
||||
// block until the transaction is processed
|
||||
let _ = receiver.iter().find(|id| *id == update_id);
|
||||
|
||||
let settings = {
|
||||
let data = r#"
|
||||
{
|
||||
@ -518,18 +506,10 @@ mod tests {
|
||||
|
||||
database.set_update_callback(Box::new(update_fn));
|
||||
|
||||
let settings_update = SettingsUpdate{
|
||||
identifier: UpdateState::Update("id".to_string()),
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let mut writer = db.update_write_txn().unwrap();
|
||||
let update_id = index.settings_update(&mut writer, settings_update).unwrap();
|
||||
let mut writer = db.main_write_txn().unwrap();
|
||||
index.main.put_schema(&mut writer, &Schema::with_primary_key("id")).unwrap();
|
||||
writer.commit().unwrap();
|
||||
|
||||
// block until the transaction is processed
|
||||
let _ = receiver.iter().find(|id| *id == update_id);
|
||||
|
||||
let settings = {
|
||||
let data = r#"
|
||||
{
|
||||
@ -581,18 +561,10 @@ mod tests {
|
||||
|
||||
database.set_update_callback(Box::new(update_fn));
|
||||
|
||||
let settings_update = SettingsUpdate{
|
||||
identifier: UpdateState::Update("id".to_string()),
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let mut writer = db.update_write_txn().unwrap();
|
||||
let update_id = index.settings_update(&mut writer, settings_update).unwrap();
|
||||
let mut writer = db.main_write_txn().unwrap();
|
||||
index.main.put_schema(&mut writer, &Schema::with_primary_key("id")).unwrap();
|
||||
writer.commit().unwrap();
|
||||
|
||||
// block until the transaction is processed
|
||||
let _ = receiver.iter().find(|id| *id == update_id);
|
||||
|
||||
let settings = {
|
||||
let data = r#"
|
||||
{
|
||||
@ -733,18 +705,10 @@ mod tests {
|
||||
|
||||
database.set_update_callback(Box::new(update_fn));
|
||||
|
||||
let settings_update = SettingsUpdate{
|
||||
identifier: UpdateState::Update("id".to_string()),
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let mut writer = db.update_write_txn().unwrap();
|
||||
let update_id = index.settings_update(&mut writer, settings_update).unwrap();
|
||||
let mut writer = db.main_write_txn().unwrap();
|
||||
index.main.put_schema(&mut writer, &Schema::with_primary_key("id")).unwrap();
|
||||
writer.commit().unwrap();
|
||||
|
||||
// block until the transaction is processed
|
||||
let _ = receiver.iter().find(|id| *id == update_id);
|
||||
|
||||
let settings = {
|
||||
let data = r#"
|
||||
{
|
||||
@ -821,18 +785,10 @@ mod tests {
|
||||
|
||||
database.set_update_callback(Box::new(update_fn));
|
||||
|
||||
let settings_update = SettingsUpdate{
|
||||
identifier: UpdateState::Update("id".to_string()),
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let mut writer = db.update_write_txn().unwrap();
|
||||
let update_id = index.settings_update(&mut writer, settings_update).unwrap();
|
||||
let mut writer = db.main_write_txn().unwrap();
|
||||
index.main.put_schema(&mut writer, &Schema::with_primary_key("id")).unwrap();
|
||||
writer.commit().unwrap();
|
||||
|
||||
// block until the transaction is processed
|
||||
let _ = receiver.iter().find(|id| *id == update_id);
|
||||
|
||||
let settings = {
|
||||
let data = r#"
|
||||
{
|
||||
@ -968,18 +924,10 @@ mod tests {
|
||||
|
||||
database.set_update_callback(Box::new(update_fn));
|
||||
|
||||
let settings_update = SettingsUpdate{
|
||||
identifier: UpdateState::Update("id".to_string()),
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let mut writer = db.update_write_txn().unwrap();
|
||||
let update_id = index.settings_update(&mut writer, settings_update).unwrap();
|
||||
let mut writer = db.main_write_txn().unwrap();
|
||||
index.main.put_schema(&mut writer, &Schema::with_primary_key("id")).unwrap();
|
||||
writer.commit().unwrap();
|
||||
|
||||
// block until the transaction is processed
|
||||
let _ = receiver.iter().find(|id| *id == update_id);
|
||||
|
||||
let settings = {
|
||||
let data = r#"
|
||||
{
|
||||
@ -1043,18 +991,10 @@ mod tests {
|
||||
|
||||
database.set_update_callback(Box::new(update_fn));
|
||||
|
||||
let settings_update = SettingsUpdate{
|
||||
identifier: UpdateState::Update("id".to_string()),
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
|
||||
let mut writer = db.update_write_txn().unwrap();
|
||||
let update_id = index.settings_update(&mut writer, settings_update).unwrap();
|
||||
let mut writer = db.main_write_txn().unwrap();
|
||||
index.main.put_schema(&mut writer, &Schema::with_primary_key("id")).unwrap();
|
||||
writer.commit().unwrap();
|
||||
|
||||
// block until the transaction is processed
|
||||
let _ = receiver.iter().find(|id| *id == update_id);
|
||||
|
||||
let settings = {
|
||||
let data = r#"
|
||||
{
|
||||
|
@ -12,7 +12,7 @@ pub type MResult<T> = Result<T, Error>;
|
||||
pub enum Error {
|
||||
Io(io::Error),
|
||||
IndexAlreadyExists,
|
||||
MissingIdentifier,
|
||||
MissingPrimaryKey,
|
||||
SchemaMissing,
|
||||
WordIndexMissing,
|
||||
MissingDocumentId,
|
||||
@ -87,7 +87,7 @@ impl fmt::Display for Error {
|
||||
match self {
|
||||
Io(e) => write!(f, "{}", e),
|
||||
IndexAlreadyExists => write!(f, "index already exists"),
|
||||
MissingIdentifier => write!(f, "schema cannot be built without identifier"),
|
||||
MissingPrimaryKey => write!(f, "schema cannot be built without a primary key"),
|
||||
SchemaMissing => write!(f, "this index does not have a schema"),
|
||||
WordIndexMissing => write!(f, "this index does not have a word index"),
|
||||
MissingDocumentId => write!(f, "document id is missing"),
|
||||
@ -109,7 +109,7 @@ impl error::Error for Error {}
|
||||
#[derive(Debug)]
|
||||
pub enum UnsupportedOperation {
|
||||
SchemaAlreadyExists,
|
||||
CannotUpdateSchemaIdentifier,
|
||||
CannotUpdateSchemaPrimaryKey,
|
||||
CannotReorderSchemaAttribute,
|
||||
CanOnlyIntroduceNewSchemaAttributesAtEnd,
|
||||
CannotRemoveSchemaAttribute,
|
||||
@ -120,7 +120,7 @@ impl fmt::Display for UnsupportedOperation {
|
||||
use self::UnsupportedOperation::*;
|
||||
match self {
|
||||
SchemaAlreadyExists => write!(f, "Cannot update index which already have a schema"),
|
||||
CannotUpdateSchemaIdentifier => write!(f, "Cannot update the identifier of a schema"),
|
||||
CannotUpdateSchemaPrimaryKey => write!(f, "Cannot update the primary key of a schema"),
|
||||
CannotReorderSchemaAttribute => write!(f, "Cannot reorder the attributes of a schema"),
|
||||
CanOnlyIntroduceNewSchemaAttributesAtEnd => {
|
||||
write!(f, "Can only introduce new attributes at end of a schema")
|
||||
|
@ -196,16 +196,6 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
const fn doc_attr_index(document_id: u64, attribute: u16, word_index: u16) -> DocIndex {
|
||||
DocIndex {
|
||||
document_id: DocumentId(document_id),
|
||||
attribute,
|
||||
word_index,
|
||||
char_index: 0,
|
||||
char_length: 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TempDatabase {
|
||||
database: Database,
|
||||
index: Index,
|
||||
@ -269,7 +259,7 @@ mod tests {
|
||||
let mut postings_lists = HashMap::new();
|
||||
let mut fields_counts = HashMap::<_, u16>::new();
|
||||
|
||||
let mut schema = Schema::with_identifier("id");
|
||||
let mut schema = Schema::with_primary_key("id");
|
||||
|
||||
for (word, indexes) in iter {
|
||||
let mut final_indexes = Vec::new();
|
||||
|
@ -8,13 +8,13 @@ use siphasher::sip::SipHasher;
|
||||
use super::{ConvertToString, SerializerError};
|
||||
|
||||
pub fn extract_document_id<D>(
|
||||
identifier: &str,
|
||||
primary_key: &str,
|
||||
document: &D,
|
||||
) -> Result<Option<DocumentId>, SerializerError>
|
||||
where
|
||||
D: serde::Serialize,
|
||||
{
|
||||
let serializer = ExtractDocumentId { identifier };
|
||||
let serializer = ExtractDocumentId { primary_key };
|
||||
document.serialize(serializer)
|
||||
}
|
||||
|
||||
@ -52,7 +52,7 @@ pub fn compute_document_id<H: Hash>(t: H) -> DocumentId {
|
||||
}
|
||||
|
||||
struct ExtractDocumentId<'a> {
|
||||
identifier: &'a str,
|
||||
primary_key: &'a str,
|
||||
}
|
||||
|
||||
impl<'a> ser::Serializer for ExtractDocumentId<'a> {
|
||||
@ -188,7 +188,7 @@ impl<'a> ser::Serializer for ExtractDocumentId<'a> {
|
||||
|
||||
fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
|
||||
let serializer = ExtractDocumentIdMapSerializer {
|
||||
identifier: self.identifier,
|
||||
primary_key: self.primary_key,
|
||||
document_id: None,
|
||||
current_key_name: None,
|
||||
};
|
||||
@ -202,7 +202,7 @@ impl<'a> ser::Serializer for ExtractDocumentId<'a> {
|
||||
_len: usize,
|
||||
) -> Result<Self::SerializeStruct, Self::Error> {
|
||||
let serializer = ExtractDocumentIdStructSerializer {
|
||||
identifier: self.identifier,
|
||||
primary_key: self.primary_key,
|
||||
document_id: None,
|
||||
};
|
||||
|
||||
@ -223,7 +223,7 @@ impl<'a> ser::Serializer for ExtractDocumentId<'a> {
|
||||
}
|
||||
|
||||
pub struct ExtractDocumentIdMapSerializer<'a> {
|
||||
identifier: &'a str,
|
||||
primary_key: &'a str,
|
||||
document_id: Option<DocumentId>,
|
||||
current_key_name: Option<String>,
|
||||
}
|
||||
@ -260,7 +260,7 @@ impl<'a> ser::SerializeMap for ExtractDocumentIdMapSerializer<'a> {
|
||||
{
|
||||
let key = key.serialize(ConvertToString)?;
|
||||
|
||||
if self.identifier == key {
|
||||
if self.primary_key == key {
|
||||
let value = serde_json::to_string(value).and_then(|s| serde_json::from_str(&s))?;
|
||||
match value_to_string(&value).map(|s| compute_document_id(&s)) {
|
||||
Some(document_id) => self.document_id = Some(document_id),
|
||||
@ -277,7 +277,7 @@ impl<'a> ser::SerializeMap for ExtractDocumentIdMapSerializer<'a> {
|
||||
}
|
||||
|
||||
pub struct ExtractDocumentIdStructSerializer<'a> {
|
||||
identifier: &'a str,
|
||||
primary_key: &'a str,
|
||||
document_id: Option<DocumentId>,
|
||||
}
|
||||
|
||||
@ -293,7 +293,7 @@ impl<'a> ser::SerializeStruct for ExtractDocumentIdStructSerializer<'a> {
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
if self.identifier == key {
|
||||
if self.primary_key == key {
|
||||
let value = serde_json::to_string(value).and_then(|s| serde_json::from_str(&s))?;
|
||||
match value_to_string(&value).map(compute_document_id) {
|
||||
Some(document_id) => self.document_id = Some(document_id),
|
||||
|
@ -57,7 +57,7 @@ impl fmt::Display for SerializerError {
|
||||
f.write_str("serialized document does not have an id according to the schema")
|
||||
}
|
||||
SerializerError::InvalidDocumentIdType => {
|
||||
f.write_str("documents identifiers can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_).")
|
||||
f.write_str("a document primary key can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_).")
|
||||
}
|
||||
SerializerError::Zlmdb(e) => write!(f, "heed related error: {}", e),
|
||||
SerializerError::SerdeJson(e) => write!(f, "serde json error: {}", e),
|
||||
|
@ -54,7 +54,7 @@ impl Settings {
|
||||
Ok(SettingsUpdate {
|
||||
ranking_rules,
|
||||
distinct_attribute: settings.distinct_attribute.into(),
|
||||
identifier: UpdateState::Nothing,
|
||||
primary_key: UpdateState::Nothing,
|
||||
searchable_attributes: settings.searchable_attributes.into(),
|
||||
displayed_attributes: settings.displayed_attributes.into(),
|
||||
stop_words: settings.stop_words.into(),
|
||||
@ -160,7 +160,7 @@ impl RankingRule {
|
||||
pub struct SettingsUpdate {
|
||||
pub ranking_rules: UpdateState<Vec<RankingRule>>,
|
||||
pub distinct_attribute: UpdateState<String>,
|
||||
pub identifier: UpdateState<String>,
|
||||
pub primary_key: UpdateState<String>,
|
||||
pub searchable_attributes: UpdateState<Vec<String>>,
|
||||
pub displayed_attributes: UpdateState<HashSet<String>>,
|
||||
pub stop_words: UpdateState<BTreeSet<String>>,
|
||||
@ -173,7 +173,7 @@ impl Default for SettingsUpdate {
|
||||
Self {
|
||||
ranking_rules: UpdateState::Nothing,
|
||||
distinct_attribute: UpdateState::Nothing,
|
||||
identifier: UpdateState::Nothing,
|
||||
primary_key: UpdateState::Nothing,
|
||||
searchable_attributes: UpdateState::Nothing,
|
||||
displayed_attributes: UpdateState::Nothing,
|
||||
stop_words: UpdateState::Nothing,
|
||||
|
@ -115,11 +115,11 @@ pub fn apply_documents_addition<'a, 'b>(
|
||||
None => return Err(Error::SchemaMissing),
|
||||
};
|
||||
|
||||
let identifier = schema.identifier();
|
||||
let primary_key = schema.primary_key().ok_or(Error::MissingPrimaryKey)?;
|
||||
|
||||
// 1. store documents ids for future deletion
|
||||
for document in addition {
|
||||
let document_id = match extract_document_id(&identifier, &document)? {
|
||||
let document_id = match extract_document_id(&primary_key, &document)? {
|
||||
Some(id) => id,
|
||||
None => return Err(Error::MissingDocumentId),
|
||||
};
|
||||
@ -184,11 +184,11 @@ pub fn apply_documents_partial_addition<'a, 'b>(
|
||||
None => return Err(Error::SchemaMissing),
|
||||
};
|
||||
|
||||
let identifier = schema.identifier();
|
||||
let primary_key = schema.primary_key().ok_or(Error::MissingPrimaryKey)?;
|
||||
|
||||
// 1. store documents ids for future deletion
|
||||
for mut document in addition {
|
||||
let document_id = match extract_document_id(&identifier, &document)? {
|
||||
let document_id = match extract_document_id(&primary_key, &document)? {
|
||||
Some(id) => id,
|
||||
None => return Err(Error::MissingDocumentId),
|
||||
};
|
||||
|
@ -40,8 +40,8 @@ impl DocumentsDeletion {
|
||||
where
|
||||
D: serde::Serialize,
|
||||
{
|
||||
let identifier = schema.identifier();
|
||||
let document_id = match extract_document_id(&identifier, &document)? {
|
||||
let primary_key = schema.primary_key().ok_or(Error::MissingPrimaryKey)?;
|
||||
let document_id = match extract_document_id(&primary_key, &document)? {
|
||||
Some(id) => id,
|
||||
None => return Err(Error::MissingDocumentId),
|
||||
};
|
||||
|
@ -35,9 +35,9 @@ pub fn apply_settings_update(
|
||||
let mut schema = match index.main.schema(writer)? {
|
||||
Some(schema) => schema,
|
||||
None => {
|
||||
match settings.identifier.clone() {
|
||||
UpdateState::Update(id) => Schema::with_identifier(&id),
|
||||
_ => return Err(Error::MissingIdentifier)
|
||||
match settings.primary_key.clone() {
|
||||
UpdateState::Update(id) => Schema::with_primary_key(&id),
|
||||
_ => return Err(Error::MissingPrimaryKey)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -13,13 +13,17 @@ pub trait RequestExt {
|
||||
fn is_allowed(&self, acl: ACL) -> SResult<()>;
|
||||
fn url_param(&self, name: &str) -> SResult<String>;
|
||||
fn index(&self) -> SResult<Index>;
|
||||
fn identifier(&self) -> SResult<String>;
|
||||
fn document_id(&self) -> SResult<String>;
|
||||
}
|
||||
|
||||
impl RequestExt for Request<Data> {
|
||||
fn is_allowed(&self, acl: ACL) -> SResult<()> {
|
||||
let user_api_key = self.header("X-Meili-API-Key");
|
||||
|
||||
if self.state().api_keys.master.is_none() {
|
||||
return Ok(())
|
||||
}
|
||||
|
||||
match acl {
|
||||
ACL::Admin => {
|
||||
if user_api_key == self.state().api_keys.master.as_deref() {
|
||||
@ -55,7 +59,7 @@ impl RequestExt for Request<Data> {
|
||||
fn url_param(&self, name: &str) -> SResult<String> {
|
||||
let param = self
|
||||
.param::<String>(name)
|
||||
.map_err(|_| ResponseError::bad_parameter("identifier", name))?;
|
||||
.map_err(|e| ResponseError::bad_parameter(name, e))?;
|
||||
Ok(param)
|
||||
}
|
||||
|
||||
@ -69,10 +73,10 @@ impl RequestExt for Request<Data> {
|
||||
Ok(index)
|
||||
}
|
||||
|
||||
fn identifier(&self) -> SResult<String> {
|
||||
fn document_id(&self) -> SResult<String> {
|
||||
let name = self
|
||||
.param::<String>("identifier")
|
||||
.map_err(|_| ResponseError::bad_parameter("identifier", "identifier"))?;
|
||||
.param::<String>("document_id")
|
||||
.map_err(|_| ResponseError::bad_parameter("documentId", "primaryKey"))?;
|
||||
|
||||
Ok(name)
|
||||
}
|
||||
|
@ -1,7 +1,6 @@
|
||||
use std::collections::{BTreeSet, HashSet};
|
||||
|
||||
use indexmap::IndexMap;
|
||||
use meilisearch_core::settings::{SettingsUpdate, UpdateState};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use tide::{Request, Response};
|
||||
@ -16,18 +15,18 @@ pub async fn get_document(ctx: Request<Data>) -> SResult<Response> {
|
||||
|
||||
let index = ctx.index()?;
|
||||
|
||||
let identifier = ctx.identifier()?;
|
||||
let document_id = meilisearch_core::serde::compute_document_id(identifier.clone());
|
||||
let original_document_id = ctx.document_id()?;
|
||||
let document_id = meilisearch_core::serde::compute_document_id(original_document_id.clone());
|
||||
|
||||
let db = &ctx.state().db;
|
||||
let reader = db.main_read_txn()?;
|
||||
|
||||
let response = index
|
||||
.document::<IndexMap<String, Value>>(&reader, None, document_id)?
|
||||
.ok_or(ResponseError::document_not_found(&identifier))?;
|
||||
.ok_or(ResponseError::document_not_found(&original_document_id))?;
|
||||
|
||||
if response.is_empty() {
|
||||
return Err(ResponseError::document_not_found(identifier));
|
||||
return Err(ResponseError::document_not_found(&original_document_id));
|
||||
}
|
||||
|
||||
Ok(tide::Response::new(200).body_json(&response)?)
|
||||
@ -43,8 +42,8 @@ pub async fn delete_document(ctx: Request<Data>) -> SResult<Response> {
|
||||
ctx.is_allowed(Private)?;
|
||||
|
||||
let index = ctx.index()?;
|
||||
let identifier = ctx.identifier()?;
|
||||
let document_id = meilisearch_core::serde::compute_document_id(identifier);
|
||||
let document_id = ctx.document_id()?;
|
||||
let document_id = meilisearch_core::serde::compute_document_id(document_id);
|
||||
let db = &ctx.state().db;
|
||||
let mut update_writer = db.update_write_txn()?;
|
||||
let mut documents_deletion = index.documents_deletion();
|
||||
@ -109,7 +108,7 @@ pub async fn get_all_documents(ctx: Request<Data>) -> SResult<Response> {
|
||||
Ok(tide::Response::new(200).body_json(&response_body)?)
|
||||
}
|
||||
|
||||
fn find_identifier(document: &IndexMap<String, Value>) -> Option<String> {
|
||||
fn find_primary_key(document: &IndexMap<String, Value>) -> Option<String> {
|
||||
for key in document.keys() {
|
||||
if key.to_lowercase().contains("id") {
|
||||
return Some(key.to_string());
|
||||
@ -121,7 +120,7 @@ fn find_identifier(document: &IndexMap<String, Value>) -> Option<String> {
|
||||
#[derive(Default, Deserialize)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
struct UpdateDocumentsQuery {
|
||||
identifier: Option<String>,
|
||||
document_id: Option<String>,
|
||||
}
|
||||
|
||||
async fn update_multiple_documents(mut ctx: Request<Data>, is_partial: bool) -> SResult<Response> {
|
||||
@ -134,22 +133,26 @@ async fn update_multiple_documents(mut ctx: Request<Data>, is_partial: bool) ->
|
||||
let query: UpdateDocumentsQuery = ctx.query().unwrap_or_default();
|
||||
|
||||
let db = &ctx.state().db;
|
||||
|
||||
let reader = db.main_read_txn()?;
|
||||
let mut update_writer = db.update_write_txn()?;
|
||||
let current_schema = index.main.schema(&reader)?;
|
||||
if current_schema.is_none() {
|
||||
let id = match query.identifier {
|
||||
let mut schema = index
|
||||
.main
|
||||
.schema(&reader)?
|
||||
.ok_or(ResponseError::internal("schema not found"))?;
|
||||
|
||||
if schema.primary_key().is_none() {
|
||||
let id = match query.document_id {
|
||||
Some(id) => id,
|
||||
None => match data.first().and_then(|docs| find_identifier(docs)) {
|
||||
None => match data.first().and_then(|docs| find_primary_key(docs)) {
|
||||
Some(id) => id,
|
||||
None => return Err(ResponseError::bad_request("Could not infer a schema")),
|
||||
},
|
||||
};
|
||||
let settings_update = SettingsUpdate {
|
||||
identifier: UpdateState::Update(id),
|
||||
..SettingsUpdate::default()
|
||||
};
|
||||
index.settings_update(&mut update_writer, settings_update)?;
|
||||
|
||||
let mut writer = db.main_write_txn()?;
|
||||
schema.set_primary_key(&id).map_err(ResponseError::bad_request)?;
|
||||
index.main.put_schema(&mut writer, &schema)?;
|
||||
writer.commit()?;
|
||||
}
|
||||
|
||||
let mut document_addition = if is_partial {
|
||||
@ -162,6 +165,7 @@ async fn update_multiple_documents(mut ctx: Request<Data>, is_partial: bool) ->
|
||||
document_addition.update_document(document);
|
||||
}
|
||||
|
||||
let mut update_writer = db.update_write_txn()?;
|
||||
let update_id = document_addition.finalize(&mut update_writer)?;
|
||||
update_writer.commit()?;
|
||||
|
||||
@ -188,10 +192,10 @@ pub async fn delete_multiple_documents(mut ctx: Request<Data>) -> SResult<Respon
|
||||
|
||||
let mut documents_deletion = index.documents_deletion();
|
||||
|
||||
for identifier in data {
|
||||
if let Some(identifier) = meilisearch_core::serde::value_to_string(&identifier) {
|
||||
for document_id in data {
|
||||
if let Some(document_id) = meilisearch_core::serde::value_to_string(&document_id) {
|
||||
documents_deletion
|
||||
.delete_document_by_id(meilisearch_core::serde::compute_document_id(identifier));
|
||||
.delete_document_by_id(meilisearch_core::serde::compute_document_id(document_id));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::error;
|
||||
use meilisearch_core::ProcessedUpdateResult;
|
||||
use meilisearch_schema::Schema;
|
||||
use rand::seq::SliceRandom;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
@ -40,8 +39,11 @@ pub async fn list_indexes(ctx: Request<Data>) -> SResult<Response> {
|
||||
let created_at = index.main.created_at(&reader)?.into_internal_error()?;
|
||||
let updated_at = index.main.updated_at(&reader)?.into_internal_error()?;
|
||||
|
||||
let identifier = match index.main.schema(&reader) {
|
||||
Ok(Some(schema)) => Some(schema.identifier().to_owned()),
|
||||
let primary_key = match index.main.schema(&reader) {
|
||||
Ok(Some(schema)) => match schema.primary_key() {
|
||||
Some(primary_key) => Some(primary_key.to_owned()),
|
||||
None => None,
|
||||
},
|
||||
_ => None,
|
||||
};
|
||||
|
||||
@ -50,7 +52,7 @@ pub async fn list_indexes(ctx: Request<Data>) -> SResult<Response> {
|
||||
uid: index_uid,
|
||||
created_at,
|
||||
updated_at,
|
||||
identifier,
|
||||
primary_key,
|
||||
};
|
||||
response_body.push(index_response);
|
||||
}
|
||||
@ -71,7 +73,7 @@ struct IndexResponse {
|
||||
uid: String,
|
||||
created_at: DateTime<Utc>,
|
||||
updated_at: DateTime<Utc>,
|
||||
identifier: Option<String>,
|
||||
primary_key: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn get_index(ctx: Request<Data>) -> SResult<Response> {
|
||||
@ -87,8 +89,11 @@ pub async fn get_index(ctx: Request<Data>) -> SResult<Response> {
|
||||
let created_at = index.main.created_at(&reader)?.into_internal_error()?;
|
||||
let updated_at = index.main.updated_at(&reader)?.into_internal_error()?;
|
||||
|
||||
let identifier = match index.main.schema(&reader) {
|
||||
Ok(Some(schema)) => Some(schema.identifier().to_owned()),
|
||||
let primary_key = match index.main.schema(&reader) {
|
||||
Ok(Some(schema)) => match schema.primary_key() {
|
||||
Some(primary_key) => Some(primary_key.to_owned()),
|
||||
None => None,
|
||||
},
|
||||
_ => None,
|
||||
};
|
||||
|
||||
@ -97,7 +102,7 @@ pub async fn get_index(ctx: Request<Data>) -> SResult<Response> {
|
||||
uid,
|
||||
created_at,
|
||||
updated_at,
|
||||
identifier,
|
||||
primary_key,
|
||||
};
|
||||
|
||||
Ok(tide::Response::new(200).body_json(&response_body)?)
|
||||
@ -108,7 +113,7 @@ pub async fn get_index(ctx: Request<Data>) -> SResult<Response> {
|
||||
struct IndexCreateRequest {
|
||||
name: Option<String>,
|
||||
uid: Option<String>,
|
||||
identifier: Option<String>,
|
||||
primary_key: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@ -118,7 +123,7 @@ struct IndexCreateResponse {
|
||||
uid: String,
|
||||
created_at: DateTime<Utc>,
|
||||
updated_at: DateTime<Utc>,
|
||||
identifier: Option<String>,
|
||||
primary_key: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn create_index(mut ctx: Request<Data>) -> SResult<Response> {
|
||||
@ -139,12 +144,15 @@ pub async fn create_index(mut ctx: Request<Data>) -> SResult<Response> {
|
||||
|
||||
let uid = match body.uid {
|
||||
Some(uid) => {
|
||||
if uid.chars().all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_') {
|
||||
if uid
|
||||
.chars()
|
||||
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|
||||
{
|
||||
uid
|
||||
} else {
|
||||
return Err(ResponseError::InvalidIndexUid)
|
||||
return Err(ResponseError::InvalidIndexUid);
|
||||
}
|
||||
},
|
||||
}
|
||||
None => loop {
|
||||
let uid = generate_uid();
|
||||
if db.open_index(&uid).is_none() {
|
||||
@ -170,10 +178,11 @@ pub async fn create_index(mut ctx: Request<Data>) -> SResult<Response> {
|
||||
.updated_at(&writer)?
|
||||
.into_internal_error()?;
|
||||
|
||||
if let Some(id) = body.identifier.clone() {
|
||||
created_index
|
||||
.main
|
||||
.put_schema(&mut writer, &Schema::with_identifier(&id))?;
|
||||
if let Some(id) = body.primary_key.clone() {
|
||||
if let Some(mut schema) = created_index.main.schema(&mut writer)? {
|
||||
schema.set_primary_key(&id).map_err(ResponseError::bad_request)?;
|
||||
created_index.main.put_schema(&mut writer, &schema)?;
|
||||
}
|
||||
}
|
||||
|
||||
writer.commit()?;
|
||||
@ -183,7 +192,7 @@ pub async fn create_index(mut ctx: Request<Data>) -> SResult<Response> {
|
||||
uid,
|
||||
created_at,
|
||||
updated_at,
|
||||
identifier: body.identifier,
|
||||
primary_key: body.primary_key,
|
||||
};
|
||||
|
||||
Ok(tide::Response::new(201).body_json(&response_body)?)
|
||||
@ -193,7 +202,7 @@ pub async fn create_index(mut ctx: Request<Data>) -> SResult<Response> {
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
struct UpdateIndexRequest {
|
||||
name: Option<String>,
|
||||
identifier: Option<String>,
|
||||
primary_key: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@ -203,7 +212,7 @@ struct UpdateIndexResponse {
|
||||
uid: String,
|
||||
created_at: DateTime<Utc>,
|
||||
updated_at: DateTime<Utc>,
|
||||
identifier: Option<String>,
|
||||
primary_key: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
|
||||
@ -224,15 +233,22 @@ pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
|
||||
index.main.put_name(&mut writer, &name)?;
|
||||
}
|
||||
|
||||
if let Some(identifier) = body.identifier {
|
||||
if let Ok(Some(_)) = index.main.schema(&writer) {
|
||||
return Err(ResponseError::bad_request(
|
||||
"The index identifier cannot be updated",
|
||||
));
|
||||
if let Some(id) = body.primary_key.clone() {
|
||||
if let Some(mut schema) = index.main.schema(&mut writer)? {
|
||||
match schema.primary_key() {
|
||||
Some(_) => {
|
||||
return Err(ResponseError::bad_request(
|
||||
"The primary key cannot be updated",
|
||||
));
|
||||
}
|
||||
None => {
|
||||
schema
|
||||
.set_primary_key(&id)
|
||||
.map_err(ResponseError::bad_request)?;
|
||||
index.main.put_schema(&mut writer, &schema)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
index
|
||||
.main
|
||||
.put_schema(&mut writer, &Schema::with_identifier(&identifier))?;
|
||||
}
|
||||
|
||||
index.main.put_updated_at(&mut writer)?;
|
||||
@ -243,8 +259,11 @@ pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
|
||||
let created_at = index.main.created_at(&reader)?.into_internal_error()?;
|
||||
let updated_at = index.main.updated_at(&reader)?.into_internal_error()?;
|
||||
|
||||
let identifier = match index.main.schema(&reader) {
|
||||
Ok(Some(schema)) => Some(schema.identifier().to_owned()),
|
||||
let primary_key = match index.main.schema(&reader) {
|
||||
Ok(Some(schema)) => match schema.primary_key() {
|
||||
Some(primary_key) => Some(primary_key.to_owned()),
|
||||
None => None,
|
||||
},
|
||||
_ => None,
|
||||
};
|
||||
|
||||
@ -253,7 +272,7 @@ pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
|
||||
uid: index_uid,
|
||||
created_at,
|
||||
updated_at,
|
||||
identifier,
|
||||
primary_key,
|
||||
};
|
||||
|
||||
Ok(tide::Response::new(200).body_json(&response_body)?)
|
||||
|
@ -61,7 +61,7 @@ pub fn load_routes(app: &mut tide::Server<Data>) {
|
||||
.put(|ctx| into_response(document::add_or_update_multiple_documents(ctx)))
|
||||
.delete(|ctx| into_response(document::clear_all_documents(ctx)));
|
||||
|
||||
app.at("/indexes/:index/documents/:identifier")
|
||||
app.at("/indexes/:index/documents/:document_id")
|
||||
.get(|ctx| into_response(document::get_document(ctx)))
|
||||
.delete(|ctx| into_response(document::delete_document(ctx)));
|
||||
|
||||
|
@ -45,37 +45,26 @@ pub async fn get_all(ctx: Request<Data>) -> SResult<Response> {
|
||||
let schema = index.main.schema(&reader)?;
|
||||
|
||||
let searchable_attributes = schema.clone().map(|s| {
|
||||
let attrs = s
|
||||
.indexed_name()
|
||||
s.indexed_name()
|
||||
.iter()
|
||||
.map(|s| (*s).to_string())
|
||||
.collect::<Vec<String>>();
|
||||
if attrs.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(attrs)
|
||||
}
|
||||
.collect::<Vec<String>>()
|
||||
});
|
||||
|
||||
let displayed_attributes = schema.clone().map(|s| {
|
||||
let attrs = s
|
||||
.displayed_name()
|
||||
s.displayed_name()
|
||||
.iter()
|
||||
.map(|s| (*s).to_string())
|
||||
.collect::<HashSet<String>>();
|
||||
if attrs.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(attrs)
|
||||
}
|
||||
.collect::<HashSet<String>>()
|
||||
});
|
||||
|
||||
let accept_new_fields = schema.map(|s| s.accept_new_fields());
|
||||
|
||||
let settings = Settings {
|
||||
ranking_rules: Some(Some(ranking_rules)),
|
||||
distinct_attribute: Some(distinct_attribute),
|
||||
searchable_attributes,
|
||||
displayed_attributes,
|
||||
searchable_attributes: Some(searchable_attributes),
|
||||
displayed_attributes: Some(displayed_attributes),
|
||||
stop_words: Some(Some(stop_words)),
|
||||
synonyms: Some(Some(synonyms)),
|
||||
accept_new_fields: Some(accept_new_fields),
|
||||
@ -89,7 +78,7 @@ pub async fn get_all(ctx: Request<Data>) -> SResult<Response> {
|
||||
pub struct UpdateSettings {
|
||||
pub ranking_rules: Option<Vec<String>>,
|
||||
pub distinct_attribute: Option<String>,
|
||||
pub identifier: Option<String>,
|
||||
pub primary_key: Option<String>,
|
||||
pub searchable_attributes: Option<Vec<String>>,
|
||||
pub displayed_attributes: Option<HashSet<String>>,
|
||||
pub stop_words: Option<BTreeSet<String>>,
|
||||
@ -132,7 +121,7 @@ pub async fn delete_all(ctx: Request<Data>) -> SResult<Response> {
|
||||
let settings = SettingsUpdate {
|
||||
ranking_rules: UpdateState::Clear,
|
||||
distinct_attribute: UpdateState::Clear,
|
||||
identifier: UpdateState::Clear,
|
||||
primary_key: UpdateState::Clear,
|
||||
searchable_attributes: UpdateState::Clear,
|
||||
displayed_attributes: UpdateState::Clear,
|
||||
stop_words: UpdateState::Clear,
|
||||
|
@ -57,7 +57,7 @@ impl Server {
|
||||
block_on(res.into_body().read_to_end(&mut buf)).unwrap();
|
||||
let response: Value = serde_json::from_slice(&buf).unwrap();
|
||||
|
||||
if response["status"] == "processed" {
|
||||
if response["status"] == "processed" || response["status"] == "error" {
|
||||
eprintln!("{:#?}", response);
|
||||
return;
|
||||
}
|
||||
@ -69,9 +69,7 @@ impl Server {
|
||||
|
||||
fn get_request(&mut self, url: &str) -> (Value, StatusCode) {
|
||||
eprintln!("get_request: {}", url);
|
||||
let req = http::Request::get(url)
|
||||
.body(Body::empty())
|
||||
.unwrap();
|
||||
let req = http::Request::get(url).body(Body::empty()).unwrap();
|
||||
let res = self.mock.simulate(req).unwrap();
|
||||
let status_code = res.status().clone();
|
||||
|
||||
@ -97,7 +95,7 @@ impl Server {
|
||||
(response, status_code)
|
||||
}
|
||||
|
||||
fn post_request_async(&mut self, url: &str, body: Value) -> (Value, StatusCode) {
|
||||
fn post_request_async(&mut self, url: &str, body: Value) -> (Value, StatusCode) {
|
||||
eprintln!("post_request_async: {}", url);
|
||||
let (response, status_code) = self.post_request(url, body);
|
||||
assert_eq!(status_code, 202);
|
||||
@ -133,9 +131,7 @@ impl Server {
|
||||
|
||||
fn delete_request(&mut self, url: &str) -> (Value, StatusCode) {
|
||||
eprintln!("delete_request: {}", url);
|
||||
let req = http::Request::delete(url)
|
||||
.body(Body::empty())
|
||||
.unwrap();
|
||||
let req = http::Request::delete(url).body(Body::empty()).unwrap();
|
||||
let res = self.mock.simulate(req).unwrap();
|
||||
let status_code = res.status().clone();
|
||||
|
||||
@ -154,7 +150,6 @@ impl Server {
|
||||
(response, status_code)
|
||||
}
|
||||
|
||||
|
||||
// // All Routes
|
||||
|
||||
pub fn list_indexes(&mut self) -> (Value, StatusCode) {
|
||||
@ -221,12 +216,20 @@ impl Server {
|
||||
}
|
||||
|
||||
pub fn get_document(&mut self, document_id: impl ToString) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents/{}", self.uid, document_id.to_string());
|
||||
let url = format!(
|
||||
"/indexes/{}/documents/{}",
|
||||
self.uid,
|
||||
document_id.to_string()
|
||||
);
|
||||
self.get_request(&url)
|
||||
}
|
||||
|
||||
pub fn delete_document(&mut self, document_id: impl ToString) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents/{}", self.uid, document_id.to_string());
|
||||
let url = format!(
|
||||
"/indexes/{}/documents/{}",
|
||||
self.uid,
|
||||
document_id.to_string()
|
||||
);
|
||||
self.delete_request_async(&url)
|
||||
}
|
||||
|
||||
@ -285,8 +288,8 @@ impl Server {
|
||||
self.delete_request_async(&url)
|
||||
}
|
||||
|
||||
pub fn get_identifier(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/identifier", self.uid);
|
||||
pub fn get_primary_key(&mut self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings/primary_key", self.uid);
|
||||
self.get_request(&url)
|
||||
}
|
||||
|
||||
@ -394,7 +397,7 @@ impl Server {
|
||||
pub fn populate_movies(&mut self) {
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
self.create_index(body);
|
||||
|
||||
@ -443,5 +446,4 @@ impl Server {
|
||||
|
||||
self.add_or_replace_multiple_documents(body);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -14,7 +14,6 @@ fn delete() {
|
||||
assert_eq!(status_code, 404);
|
||||
}
|
||||
|
||||
|
||||
// Resolve teh issue https://github.com/meilisearch/MeiliSearch/issues/493
|
||||
#[test]
|
||||
fn delete_batch() {
|
||||
@ -24,7 +23,7 @@ fn delete_batch() {
|
||||
let (_response, status_code) = server.get_document(419704);
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
let body = serde_json::json!([419704,512200,181812]);
|
||||
let body = serde_json::json!([419704, 512200, 181812]);
|
||||
server.delete_multiple_documents(body);
|
||||
|
||||
let (_response, status_code) = server.get_document(419704);
|
||||
|
@ -1,5 +1,5 @@
|
||||
use serde_json::json;
|
||||
use assert_json_diff::assert_json_eq;
|
||||
use serde_json::json;
|
||||
|
||||
mod common;
|
||||
|
||||
@ -8,31 +8,28 @@ fn create_index_with_name() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Create a new index
|
||||
// Index with only a name "movies"
|
||||
// POST: /indexes
|
||||
|
||||
let body = json!({
|
||||
"name": "movies",
|
||||
});
|
||||
|
||||
let (res1_value, status_code) = server.create_index(body);
|
||||
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(res1_value.as_object().unwrap().len(), 5);
|
||||
let r1_name = res1_value["name"].as_str().unwrap();
|
||||
let r1_uid = res1_value["uid"].as_str().unwrap();
|
||||
let r1_created_at = res1_value["createdAt"].as_str().unwrap();
|
||||
let r1_updated_at = res1_value["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r1_name, "movies");
|
||||
assert_eq!(r1_uid.len(), 8);
|
||||
assert!(r1_created_at.len() > 1);
|
||||
assert!(r1_updated_at.len() > 1);
|
||||
|
||||
// 2 - Check the list of indexes
|
||||
// Must have 1 index with the exact same content that the request 1
|
||||
// GET: /indexes
|
||||
|
||||
let (res2_value, status_code) = server.list_indexes();
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(res2_value.as_array().unwrap().len(), 1);
|
||||
assert_eq!(res2_value[0].as_object().unwrap().len(), 5);
|
||||
@ -40,7 +37,6 @@ fn create_index_with_name() {
|
||||
let r2_uid = res2_value[0]["uid"].as_str().unwrap();
|
||||
let r2_created_at = res2_value[0]["createdAt"].as_str().unwrap();
|
||||
let r2_updated_at = res2_value[0]["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r2_name, r1_name);
|
||||
assert_eq!(r2_uid.len(), r1_uid.len());
|
||||
assert_eq!(r2_created_at.len(), r1_created_at.len());
|
||||
@ -52,31 +48,28 @@ fn create_index_with_uid() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Create a new index
|
||||
// Index with only an uid "movies"
|
||||
// POST: /indexes
|
||||
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
});
|
||||
|
||||
let (res1_value, status_code) = server.create_index(body);
|
||||
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(res1_value.as_object().unwrap().len(), 5);
|
||||
let r1_name = res1_value["name"].as_str().unwrap();
|
||||
let r1_uid = res1_value["uid"].as_str().unwrap();
|
||||
let r1_created_at = res1_value["createdAt"].as_str().unwrap();
|
||||
let r1_updated_at = res1_value["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r1_name, "movies");
|
||||
assert_eq!(r1_uid, "movies");
|
||||
assert!(r1_created_at.len() > 1);
|
||||
assert!(r1_updated_at.len() > 1);
|
||||
|
||||
// 2 - Check the list of indexes
|
||||
// Must have 1 index with the exact same content that the request 1
|
||||
// GET: /indexes
|
||||
|
||||
let (res2_value, status_code) = server.list_indexes();
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(res2_value.as_array().unwrap().len(), 1);
|
||||
assert_eq!(res2_value[0].as_object().unwrap().len(), 5);
|
||||
@ -84,7 +77,6 @@ fn create_index_with_uid() {
|
||||
let r2_uid = res2_value[0]["uid"].as_str().unwrap();
|
||||
let r2_created_at = res2_value[0]["createdAt"].as_str().unwrap();
|
||||
let r2_updated_at = res2_value[0]["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r2_name, r1_name);
|
||||
assert_eq!(r2_uid, r1_uid);
|
||||
assert_eq!(r2_created_at.len(), r1_created_at.len());
|
||||
@ -96,41 +88,35 @@ fn create_index_with_name_and_uid() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Create a new index
|
||||
// Index with a name "Films" and an uid "fn_movies"
|
||||
// POST: /indexes
|
||||
|
||||
let body = json!({
|
||||
"name": "Films",
|
||||
"uid": "fr_movies",
|
||||
});
|
||||
let (res1_value, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 201);
|
||||
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(res1_value.as_object().unwrap().len(), 5);
|
||||
let r1_name = res1_value["name"].as_str().unwrap();
|
||||
let r1_uid = res1_value["uid"].as_str().unwrap();
|
||||
let r1_created_at = res1_value["createdAt"].as_str().unwrap();
|
||||
let r1_updated_at = res1_value["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r1_name, "Films");
|
||||
assert_eq!(r1_uid, "fr_movies");
|
||||
assert!(r1_created_at.len() > 1);
|
||||
assert!(r1_updated_at.len() > 1);
|
||||
|
||||
// 2 - Check the list of indexes
|
||||
// Must have 1 index with the exact same content that the request 1
|
||||
// GET: /indexes
|
||||
|
||||
let (res2_value, status_code) = server.list_indexes();
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(res2_value.as_array().unwrap().len(), 1);
|
||||
assert_eq!(res2_value[0].as_object().unwrap().len(), 5);
|
||||
let r2_name = res2_value[0]["name"].as_str().unwrap();
|
||||
let r2_uid = res2_value[0]["uid"].as_str().unwrap();
|
||||
let r2_created_at = res2_value[0]["createdAt"].as_str().unwrap();
|
||||
let r2_updated_at = res2_value[0]["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r2_name, r1_name);
|
||||
assert_eq!(r2_uid, r1_uid);
|
||||
assert_eq!(r2_created_at.len(), r1_created_at.len());
|
||||
@ -140,9 +126,8 @@ fn create_index_with_name_and_uid() {
|
||||
#[test]
|
||||
fn rename_index() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Create a new index
|
||||
// Index with only a name "movies"
|
||||
// POST: /indexes
|
||||
|
||||
let body = json!({
|
||||
"name": "movies",
|
||||
@ -150,55 +135,48 @@ fn rename_index() {
|
||||
});
|
||||
|
||||
let (res1_value, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 201);
|
||||
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(res1_value.as_object().unwrap().len(), 5);
|
||||
let r1_name = res1_value["name"].as_str().unwrap();
|
||||
let r1_uid = res1_value["uid"].as_str().unwrap();
|
||||
let r1_created_at = res1_value["createdAt"].as_str().unwrap();
|
||||
let r1_updated_at = res1_value["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r1_name, "movies");
|
||||
assert_eq!(r1_uid.len(), 6);
|
||||
assert!(r1_created_at.len() > 1);
|
||||
assert!(r1_updated_at.len() > 1);
|
||||
|
||||
// 2 - Update an index name
|
||||
// Update "movies" to "TV Shows"
|
||||
// PUT: /indexes/:uid
|
||||
|
||||
let body = json!({
|
||||
"name": "TV Shows",
|
||||
});
|
||||
|
||||
let (res2_value, status_code) = server.update_index(body);
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(res2_value.as_object().unwrap().len(), 5);
|
||||
let r2_name = res2_value["name"].as_str().unwrap();
|
||||
let r2_uid = res2_value["uid"].as_str().unwrap();
|
||||
let r2_created_at = res2_value["createdAt"].as_str().unwrap();
|
||||
let r2_updated_at = res2_value["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r2_name, "TV Shows");
|
||||
assert_eq!(r2_uid, r1_uid);
|
||||
assert_eq!(r2_created_at, r1_created_at);
|
||||
assert!(r2_updated_at.len() > 1);
|
||||
|
||||
// 3 - Check the list of indexes
|
||||
// Must have 1 index with the exact same content that the request 2
|
||||
// GET: /indexes
|
||||
|
||||
let (res3_value, status_code) = server.list_indexes();
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(res3_value.as_array().unwrap().len(), 1);
|
||||
assert_eq!(res3_value[0].as_object().unwrap().len(), 5);
|
||||
let r3_name = res3_value[0]["name"].as_str().unwrap();
|
||||
let r3_uid = res3_value[0]["uid"].as_str().unwrap();
|
||||
let r3_created_at = res3_value[0]["createdAt"].as_str().unwrap();
|
||||
let r3_updated_at = res3_value[0]["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r3_name, r2_name);
|
||||
assert_eq!(r3_uid.len(), r1_uid.len());
|
||||
assert_eq!(r3_created_at.len(), r1_created_at.len());
|
||||
@ -210,8 +188,6 @@ fn delete_index_and_recreate_it() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Create a new index
|
||||
// Index with only a name "movies"
|
||||
// POST: /indexes
|
||||
|
||||
let body = json!({
|
||||
"name": "movies",
|
||||
@ -219,90 +195,76 @@ fn delete_index_and_recreate_it() {
|
||||
});
|
||||
|
||||
let (res1_value, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 201);
|
||||
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(res1_value.as_object().unwrap().len(), 5);
|
||||
let r1_name = res1_value["name"].as_str().unwrap();
|
||||
let r1_uid = res1_value["uid"].as_str().unwrap();
|
||||
let r1_created_at = res1_value["createdAt"].as_str().unwrap();
|
||||
let r1_updated_at = res1_value["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r1_name, "movies");
|
||||
assert_eq!(r1_uid.len(), 6);
|
||||
assert!(r1_created_at.len() > 1);
|
||||
assert!(r1_updated_at.len() > 1);
|
||||
|
||||
// 2 - Check the list of indexes
|
||||
// Must have 1 index with the exact same content that the request 1
|
||||
// GET: /indexes
|
||||
|
||||
let (res2_value, status_code) = server.list_indexes();
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(res2_value.as_array().unwrap().len(), 1);
|
||||
assert_eq!(res2_value[0].as_object().unwrap().len(), 5);
|
||||
let r2_name = res2_value[0]["name"].as_str().unwrap();
|
||||
let r2_uid = res2_value[0]["uid"].as_str().unwrap();
|
||||
let r2_created_at = res2_value[0]["createdAt"].as_str().unwrap();
|
||||
let r2_updated_at = res2_value[0]["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r2_name, r1_name);
|
||||
assert_eq!(r2_uid.len(), r1_uid.len());
|
||||
assert_eq!(r2_created_at.len(), r1_created_at.len());
|
||||
assert_eq!(r2_updated_at.len(), r1_updated_at.len());
|
||||
|
||||
// 3- Delete an index
|
||||
// Update "movies" to "TV Shows"
|
||||
// DELETE: /indexes/:uid
|
||||
|
||||
let (_res2_value, status_code) = server.delete_index();
|
||||
|
||||
assert_eq!(status_code, 204);
|
||||
|
||||
// 4 - Check the list of indexes
|
||||
// Must have 0 index
|
||||
// GET: /indexes
|
||||
|
||||
let (res2_value, status_code) = server.list_indexes();
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(res2_value.as_array().unwrap().len(), 0);
|
||||
|
||||
// 5 - Create a new index
|
||||
// Index with only a name "movies"
|
||||
// POST: /indexes
|
||||
|
||||
let body = json!({
|
||||
"name": "movies",
|
||||
});
|
||||
|
||||
let (res1_value, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 201);
|
||||
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(res1_value.as_object().unwrap().len(), 5);
|
||||
let r1_name = res1_value["name"].as_str().unwrap();
|
||||
let r1_uid = res1_value["uid"].as_str().unwrap();
|
||||
let r1_created_at = res1_value["createdAt"].as_str().unwrap();
|
||||
let r1_updated_at = res1_value["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r1_name, "movies");
|
||||
assert_eq!(r1_uid.len(), 8);
|
||||
assert!(r1_created_at.len() > 1);
|
||||
assert!(r1_updated_at.len() > 1);
|
||||
|
||||
// 6 - Check the list of indexes
|
||||
// Must have 1 index with the exact same content that the request 1
|
||||
// GET: /indexes
|
||||
|
||||
let (res2_value, status_code) = server.list_indexes();
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
assert_eq!(res2_value.as_array().unwrap().len(), 1);
|
||||
assert_eq!(res2_value[0].as_object().unwrap().len(), 5);
|
||||
let r2_name = res2_value[0]["name"].as_str().unwrap();
|
||||
let r2_uid = res2_value[0]["uid"].as_str().unwrap();
|
||||
let r2_created_at = res2_value[0]["createdAt"].as_str().unwrap();
|
||||
let r2_updated_at = res2_value[0]["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r2_name, r1_name);
|
||||
assert_eq!(r2_uid.len(), r1_uid.len());
|
||||
assert_eq!(r2_created_at.len(), r1_created_at.len());
|
||||
@ -314,89 +276,75 @@ fn check_multiples_indexes() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Create a new index
|
||||
// Index with only a name "movies"
|
||||
// POST: /indexes
|
||||
|
||||
let body = json!({
|
||||
"name": "movies",
|
||||
});
|
||||
|
||||
let (res1_value, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 201);
|
||||
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(res1_value.as_object().unwrap().len(), 5);
|
||||
let r1_name = res1_value["name"].as_str().unwrap();
|
||||
let r1_uid = res1_value["uid"].as_str().unwrap();
|
||||
let r1_created_at = res1_value["createdAt"].as_str().unwrap();
|
||||
let r1_updated_at = res1_value["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r1_name, "movies");
|
||||
assert_eq!(r1_uid.len(), 8);
|
||||
assert!(r1_created_at.len() > 1);
|
||||
assert!(r1_updated_at.len() > 1);
|
||||
|
||||
// 2 - Check the list of indexes
|
||||
// Must have 1 index with the exact same content that the request 1
|
||||
// GET: /indexes
|
||||
|
||||
let (res2_value, status_code) = server.list_indexes();
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(res2_value.as_array().unwrap().len(), 1);
|
||||
assert_eq!(res2_value[0].as_object().unwrap().len(), 5);
|
||||
let r2_0_name = res2_value[0]["name"].as_str().unwrap();
|
||||
let r2_0_uid = res2_value[0]["uid"].as_str().unwrap();
|
||||
let r2_0_created_at = res2_value[0]["createdAt"].as_str().unwrap();
|
||||
let r2_0_updated_at = res2_value[0]["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r2_0_name, r1_name);
|
||||
assert_eq!(r2_0_uid.len(), r1_uid.len());
|
||||
assert_eq!(r2_0_created_at.len(), r1_created_at.len());
|
||||
assert_eq!(r2_0_updated_at.len(), r1_updated_at.len());
|
||||
|
||||
// 3 - Create a new index
|
||||
// Index with only a name "films"
|
||||
// POST: /indexes
|
||||
|
||||
let body = json!({
|
||||
"name": "films",
|
||||
});
|
||||
|
||||
let (res3_value, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 201);
|
||||
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(res3_value.as_object().unwrap().len(), 5);
|
||||
let r3_name = res3_value["name"].as_str().unwrap();
|
||||
let r3_uid = res3_value["uid"].as_str().unwrap();
|
||||
let r3_created_at = res3_value["createdAt"].as_str().unwrap();
|
||||
let r3_updated_at = res3_value["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(r3_name, "films");
|
||||
assert_eq!(r3_uid.len(), 8);
|
||||
assert!(r3_created_at.len() > 1);
|
||||
assert!(r3_updated_at.len() > 1);
|
||||
|
||||
// 4 - Check the list of indexes
|
||||
// Must have 2 index with the exact same content that the request 1 and 3
|
||||
// GET: /indexes
|
||||
|
||||
let (res4_value, status_code) = server.list_indexes();
|
||||
|
||||
assert_eq!(status_code, 200);
|
||||
|
||||
assert_eq!(res4_value.as_array().unwrap().len(), 2);
|
||||
|
||||
assert_eq!(res4_value[0].as_object().unwrap().len(), 5);
|
||||
let r4_0_name = res4_value[0]["name"].as_str().unwrap();
|
||||
let r4_0_uid = res4_value[0]["uid"].as_str().unwrap();
|
||||
let r4_0_created_at = res4_value[0]["createdAt"].as_str().unwrap();
|
||||
let r4_0_updated_at = res4_value[0]["updatedAt"].as_str().unwrap();
|
||||
|
||||
assert_eq!(res4_value[1].as_object().unwrap().len(), 5);
|
||||
let r4_1_name = res4_value[1]["name"].as_str().unwrap();
|
||||
let r4_1_uid = res4_value[1]["uid"].as_str().unwrap();
|
||||
let r4_1_created_at = res4_value[1]["createdAt"].as_str().unwrap();
|
||||
let r4_1_updated_at = res4_value[1]["updatedAt"].as_str().unwrap();
|
||||
|
||||
if r4_0_name == r1_name {
|
||||
assert_eq!(r4_0_name, r1_name);
|
||||
assert_eq!(r4_0_uid.len(), r1_uid.len());
|
||||
@ -408,7 +356,6 @@ fn check_multiples_indexes() {
|
||||
assert_eq!(r4_0_created_at.len(), r3_created_at.len());
|
||||
assert_eq!(r4_0_updated_at.len(), r3_updated_at.len());
|
||||
}
|
||||
|
||||
if r4_1_name == r1_name {
|
||||
assert_eq!(r4_1_name, r1_name);
|
||||
assert_eq!(r4_1_uid.len(), r1_uid.len());
|
||||
@ -427,19 +374,17 @@ fn create_index_failed() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 2 - Push index creation with empty json body
|
||||
// POST: /indexes
|
||||
|
||||
let body = json!({});
|
||||
|
||||
let (res_value, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
assert_eq!(status_code, 400);
|
||||
let message = res_value["message"].as_str().unwrap();
|
||||
assert_eq!(res_value.as_object().unwrap().len(), 1);
|
||||
assert_eq!(message, "Index creation must have an uid");
|
||||
|
||||
// 3 - Create a index with extra data
|
||||
// POST: /indexes
|
||||
|
||||
let body = json!({
|
||||
"name": "movies",
|
||||
@ -447,14 +392,13 @@ fn create_index_failed() {
|
||||
});
|
||||
|
||||
let (res_value, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
assert_eq!(status_code, 400);
|
||||
let message = res_value["message"].as_str().unwrap();
|
||||
assert_eq!(res_value.as_object().unwrap().len(), 1);
|
||||
assert_eq!(message, "invalid data");
|
||||
|
||||
// 3 - Create a index with wrong data type
|
||||
// POST: /indexes
|
||||
|
||||
let body = json!({
|
||||
"name": "movies",
|
||||
@ -462,28 +406,30 @@ fn create_index_failed() {
|
||||
});
|
||||
|
||||
let (res_value, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
assert_eq!(status_code, 400);
|
||||
let message = res_value["message"].as_str().unwrap();
|
||||
assert_eq!(res_value.as_object().unwrap().len(), 1);
|
||||
assert_eq!(message, "invalid data");
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Resolve issue https://github.com/meilisearch/MeiliSearch/issues/492
|
||||
#[test]
|
||||
fn create_index_with_identifier_and_index() {
|
||||
fn create_index_with_primary_key_and_index() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Create the index
|
||||
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
|
||||
let (_response, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 201);
|
||||
|
||||
// 2 - Add content
|
||||
|
||||
let body = json!([{
|
||||
"id": 123,
|
||||
"text": "The mask"
|
||||
@ -491,6 +437,8 @@ fn create_index_with_identifier_and_index() {
|
||||
|
||||
server.add_or_replace_multiple_documents(body.clone());
|
||||
|
||||
// 3 - Retreive document
|
||||
|
||||
let (response, _status_code) = server.get_document(123);
|
||||
|
||||
let expect = json!({
|
||||
@ -502,51 +450,178 @@ fn create_index_with_identifier_and_index() {
|
||||
}
|
||||
|
||||
// Resolve issue https://github.com/meilisearch/MeiliSearch/issues/497
|
||||
// Test when the given index uid is not valid
|
||||
// Should have a 400 status code
|
||||
// Should have the right error message
|
||||
#[test]
|
||||
fn create_index_with_invalid_uid() {
|
||||
let mut server = common::Server::with_uid("");
|
||||
|
||||
// 1 - Create the index with invalid uid
|
||||
|
||||
let body = json!({
|
||||
"uid": "the movies"
|
||||
});
|
||||
|
||||
let (response, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
assert_eq!(status_code, 400);
|
||||
let message = response["message"].as_str().unwrap();
|
||||
assert_eq!(response.as_object().unwrap().len(), 1);
|
||||
assert_eq!(message, "Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_).");
|
||||
|
||||
// 2 - Create the index with invalid uid
|
||||
|
||||
let body = json!({
|
||||
"uid": "%$#"
|
||||
});
|
||||
|
||||
let (response, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
assert_eq!(status_code, 400);
|
||||
let message = response["message"].as_str().unwrap();
|
||||
assert_eq!(response.as_object().unwrap().len(), 1);
|
||||
assert_eq!(message, "Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_).");
|
||||
|
||||
// 3 - Create the index with invalid uid
|
||||
|
||||
let body = json!({
|
||||
"uid": "the~movies"
|
||||
});
|
||||
|
||||
let (response, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
assert_eq!(status_code, 400);
|
||||
let message = response["message"].as_str().unwrap();
|
||||
assert_eq!(response.as_object().unwrap().len(), 1);
|
||||
assert_eq!(message, "Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_).");
|
||||
|
||||
// 4 - Create the index with invalid uid
|
||||
|
||||
let body = json!({
|
||||
"uid": "🎉"
|
||||
});
|
||||
|
||||
let (response, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
assert_eq!(status_code, 400);
|
||||
let message = response["message"].as_str().unwrap();
|
||||
assert_eq!(response.as_object().unwrap().len(), 1);
|
||||
assert_eq!(message, "Index must have a valid uid; Index uid can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_).");
|
||||
}
|
||||
|
||||
// Test that it's possible to add primary_key if it's not already set on index creation
|
||||
#[test]
|
||||
fn create_index_and_add_indentifier_after() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Create the index with no primary_key
|
||||
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
});
|
||||
let (response, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
// 2 - Update the index and add an primary_key.
|
||||
|
||||
let body = json!({
|
||||
"primaryKey": "id",
|
||||
});
|
||||
|
||||
let (response, status_code) = server.update_index(body);
|
||||
assert_eq!(status_code, 200);
|
||||
eprintln!("response: {:#?}", response);
|
||||
assert_eq!(response["primaryKey"].as_str().unwrap(), "id");
|
||||
|
||||
// 3 - Get index to verify if the primary_key is good
|
||||
|
||||
let (response, status_code) = server.get_index();
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response["primaryKey"].as_str().unwrap(), "id");
|
||||
}
|
||||
|
||||
// Test that it's impossible to change the primary_key
|
||||
#[test]
|
||||
fn create_index_and_update_indentifier_after() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Create the index with no primary_key
|
||||
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
let (response, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"].as_str().unwrap(), "id");
|
||||
|
||||
// 2 - Update the index and add an primary_key.
|
||||
|
||||
let body = json!({
|
||||
"primaryKey": "skuid",
|
||||
});
|
||||
|
||||
let (_response, status_code) = server.update_index(body);
|
||||
assert_eq!(status_code, 400);
|
||||
|
||||
// 3 - Get index to verify if the primary_key still the first one
|
||||
|
||||
let (response, status_code) = server.get_index();
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response["primaryKey"].as_str().unwrap(), "id");
|
||||
}
|
||||
|
||||
// Test that schema inference work well
|
||||
#[test]
|
||||
fn create_index_without_primary_key_and_add_document() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Create the index with no primary_key
|
||||
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
});
|
||||
let (response, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
// 2 - Add a document
|
||||
|
||||
let body = json!([{
|
||||
"id": 123,
|
||||
"title": "I'm a legend",
|
||||
}]);
|
||||
|
||||
server.add_or_update_multiple_documents(body);
|
||||
|
||||
// 3 - Get index to verify if the primary_key is good
|
||||
|
||||
let (response, status_code) = server.get_index();
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response["primaryKey"].as_str().unwrap(), "id");
|
||||
}
|
||||
|
||||
// Test search with no primary_key
|
||||
#[test]
|
||||
fn create_index_without_primary_key_and_search() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
|
||||
// 1 - Create the index with no primary_key
|
||||
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
});
|
||||
let (response, status_code) = server.create_index(body);
|
||||
assert_eq!(status_code, 201);
|
||||
assert_eq!(response["primaryKey"], json!(null));
|
||||
|
||||
// 2 - Search
|
||||
|
||||
let query = "q=captain&limit=3";
|
||||
|
||||
let (response, status_code) = server.search(&query);
|
||||
assert_eq!(status_code, 200);
|
||||
assert_eq!(response["hits"].as_array().unwrap().len(), 0);
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::convert::Into;
|
||||
use serde_json::json;
|
||||
use assert_json_diff::assert_json_eq;
|
||||
use serde_json::json;
|
||||
use std::convert::Into;
|
||||
|
||||
mod common;
|
||||
|
||||
@ -644,7 +644,7 @@ fn search_with_settings_basic() {
|
||||
"desc(vote_average)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
"searchableAttributes": [
|
||||
"title",
|
||||
"tagline",
|
||||
@ -751,7 +751,7 @@ fn search_with_settings_stop_words() {
|
||||
"desc(vote_average)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
"searchableAttributes": [
|
||||
"title",
|
||||
"tagline",
|
||||
@ -858,7 +858,7 @@ fn search_with_settings_synonyms() {
|
||||
"desc(vote_average)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
"searchableAttributes": [
|
||||
"title",
|
||||
"tagline",
|
||||
@ -970,7 +970,7 @@ fn search_with_settings_ranking_rules() {
|
||||
"desc(popularity)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
"searchableAttributes": [
|
||||
"title",
|
||||
"tagline",
|
||||
@ -1077,7 +1077,7 @@ fn search_with_settings_searchable_attributes() {
|
||||
"desc(vote_average)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
"searchableAttributes": [
|
||||
"tagline",
|
||||
"overview",
|
||||
@ -1183,7 +1183,7 @@ fn search_with_settings_displayed_attributes() {
|
||||
"desc(vote_average)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
"searchableAttributes": [
|
||||
"title",
|
||||
"tagline",
|
||||
@ -1254,7 +1254,7 @@ fn search_with_settings_searchable_attributes_2() {
|
||||
"desc(vote_average)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
"searchableAttributes": [
|
||||
"tagline",
|
||||
"overview",
|
||||
|
@ -1,6 +1,6 @@
|
||||
use std::convert::Into;
|
||||
use assert_json_diff::assert_json_eq;
|
||||
use serde_json::json;
|
||||
use std::convert::Into;
|
||||
|
||||
mod common;
|
||||
|
||||
@ -252,3 +252,72 @@ fn write_all_and_update() {
|
||||
|
||||
assert_json_eq!(expected, response, ordered: false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_settings() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
});
|
||||
server.create_index(body);
|
||||
|
||||
// 1 - Get all settings and compare to the previous one
|
||||
|
||||
let body = json!({
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [],
|
||||
"displayedAttributes": [],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"acceptNewFields": true,
|
||||
});
|
||||
|
||||
let (response, _status_code) = server.get_all_settings();
|
||||
|
||||
assert_json_eq!(body, response, ordered: false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_settings_2() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body);
|
||||
|
||||
// 1 - Get all settings and compare to the previous one
|
||||
|
||||
let body = json!({
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"id"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"id"
|
||||
],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"acceptNewFields": true,
|
||||
});
|
||||
|
||||
let (response, _status_code) = server.get_all_settings();
|
||||
|
||||
assert_json_eq!(body, response, ordered: false);
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ fn index_new_fields_default() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body);
|
||||
|
||||
@ -60,7 +60,7 @@ fn index_new_fields_true() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body);
|
||||
|
||||
@ -116,7 +116,7 @@ fn index_new_fields_false() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body);
|
||||
|
||||
@ -169,7 +169,7 @@ fn index_new_fields_true_then_false() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body);
|
||||
|
||||
@ -228,7 +228,7 @@ fn index_new_fields_false_then_true() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body);
|
||||
|
||||
|
@ -111,13 +111,11 @@ fn send_undefined_rule() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body);
|
||||
|
||||
let body = json!([
|
||||
"typos",
|
||||
]);
|
||||
let body = json!(["typos",]);
|
||||
|
||||
let (_response, status_code) = server.update_ranking_rules_sync(body);
|
||||
assert_eq!(status_code, 400);
|
||||
@ -128,13 +126,11 @@ fn send_malformed_custom_rule() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body);
|
||||
|
||||
let body = json!([
|
||||
"dsc(truc)",
|
||||
]);
|
||||
let body = json!(["dsc(truc)",]);
|
||||
|
||||
let (_response, status_code) = server.update_ranking_rules_sync(body);
|
||||
assert_eq!(status_code, 400);
|
||||
|
@ -8,7 +8,7 @@ fn update_stop_words() {
|
||||
let mut server = common::Server::with_uid("movies");
|
||||
let body = json!({
|
||||
"uid": "movies",
|
||||
"identifier": "id",
|
||||
"primaryKey": "id",
|
||||
});
|
||||
server.create_index(body);
|
||||
|
||||
@ -19,10 +19,7 @@ fn update_stop_words() {
|
||||
|
||||
// 2 - Update stop words
|
||||
|
||||
let body = json!([
|
||||
"the",
|
||||
"a"
|
||||
]);
|
||||
let body = json!(["the", "a"]);
|
||||
server.update_stop_words(body.clone());
|
||||
|
||||
// 3 - Get all stop words and compare to the previous one
|
||||
|
@ -6,6 +6,7 @@ pub type SResult<T> = Result<T, Error>;
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
FieldNameNotFound(String),
|
||||
PrimaryKeyAlreadyPresent,
|
||||
MaxFieldsLimitExceeded,
|
||||
}
|
||||
|
||||
@ -14,6 +15,7 @@ impl fmt::Display for Error {
|
||||
use self::Error::*;
|
||||
match self {
|
||||
FieldNameNotFound(field) => write!(f, "The field {:?} doesn't exist", field),
|
||||
PrimaryKeyAlreadyPresent => write!(f, "The schema already have an primary key. It's impossible to update it"),
|
||||
MaxFieldsLimitExceeded => write!(f, "The maximum of possible reattributed field id has been reached"),
|
||||
}
|
||||
}
|
||||
|
@ -6,7 +6,7 @@ use std::collections::{HashMap, HashSet};
|
||||
pub struct Schema {
|
||||
fields_map: FieldsMap,
|
||||
|
||||
identifier: FieldId,
|
||||
primary_key: Option<FieldId>,
|
||||
ranked: HashSet<FieldId>,
|
||||
displayed: HashSet<FieldId>,
|
||||
|
||||
@ -17,7 +17,19 @@ pub struct Schema {
|
||||
}
|
||||
|
||||
impl Schema {
|
||||
pub fn with_identifier(name: &str) -> Schema {
|
||||
pub fn new() -> Schema {
|
||||
Schema {
|
||||
fields_map: FieldsMap::default(),
|
||||
primary_key: None,
|
||||
ranked: HashSet::new(),
|
||||
displayed: HashSet::new(),
|
||||
indexed: Vec::new(),
|
||||
indexed_map: HashMap::new(),
|
||||
accept_new_fields: true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_primary_key(name: &str) -> Schema {
|
||||
let mut fields_map = FieldsMap::default();
|
||||
let field_id = fields_map.insert(name).unwrap();
|
||||
|
||||
@ -31,7 +43,7 @@ impl Schema {
|
||||
|
||||
Schema {
|
||||
fields_map,
|
||||
identifier: field_id,
|
||||
primary_key: Some(field_id),
|
||||
ranked: HashSet::new(),
|
||||
displayed,
|
||||
indexed,
|
||||
@ -40,18 +52,21 @@ impl Schema {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn identifier(&self) -> &str {
|
||||
self.fields_map.name(self.identifier).unwrap()
|
||||
pub fn primary_key(&self) -> Option<&str> {
|
||||
self.primary_key.map(|id| self.fields_map.name(id).unwrap())
|
||||
}
|
||||
|
||||
pub fn set_identifier(&mut self, id: &str) -> SResult<()> {
|
||||
match self.id(id) {
|
||||
Some(id) => {
|
||||
self.identifier = id;
|
||||
Ok(())
|
||||
},
|
||||
None => Err(Error::FieldNameNotFound(id.to_string()))
|
||||
pub fn set_primary_key(&mut self, name: &str) -> SResult<FieldId> {
|
||||
if self.primary_key.is_some() {
|
||||
return Err(Error::PrimaryKeyAlreadyPresent)
|
||||
}
|
||||
|
||||
let id = self.insert(name)?;
|
||||
self.primary_key = Some(id);
|
||||
self.set_indexed(name)?;
|
||||
self.set_displayed(name)?;
|
||||
|
||||
Ok(id)
|
||||
}
|
||||
|
||||
pub fn id(&self, name: &str) -> Option<FieldId> {
|
||||
|
Loading…
x
Reference in New Issue
Block a user