mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-26 06:44:27 +01:00
fix comments from review
This commit is contained in:
parent
b06e33f3d3
commit
ef3bcd65ab
@ -87,7 +87,7 @@ impl fmt::Display for Error {
|
|||||||
match self {
|
match self {
|
||||||
Io(e) => write!(f, "{}", e),
|
Io(e) => write!(f, "{}", e),
|
||||||
IndexAlreadyExists => write!(f, "index already exists"),
|
IndexAlreadyExists => write!(f, "index already exists"),
|
||||||
MissingPrimaryKey => write!(f, "schema cannot be built without primary key"),
|
MissingPrimaryKey => write!(f, "schema cannot be built without a primary key"),
|
||||||
SchemaMissing => write!(f, "this index does not have a schema"),
|
SchemaMissing => write!(f, "this index does not have a schema"),
|
||||||
WordIndexMissing => write!(f, "this index does not have a word index"),
|
WordIndexMissing => write!(f, "this index does not have a word index"),
|
||||||
MissingDocumentId => write!(f, "document id is missing"),
|
MissingDocumentId => write!(f, "document id is missing"),
|
||||||
|
@ -57,7 +57,7 @@ impl fmt::Display for SerializerError {
|
|||||||
f.write_str("serialized document does not have an id according to the schema")
|
f.write_str("serialized document does not have an id according to the schema")
|
||||||
}
|
}
|
||||||
SerializerError::InvalidDocumentIdType => {
|
SerializerError::InvalidDocumentIdType => {
|
||||||
f.write_str("documents primary keys can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_).")
|
f.write_str("a document primary key can be of type integer or string only composed of alphanumeric characters, hyphens (-) and underscores (_).")
|
||||||
}
|
}
|
||||||
SerializerError::Zlmdb(e) => write!(f, "heed related error: {}", e),
|
SerializerError::Zlmdb(e) => write!(f, "heed related error: {}", e),
|
||||||
SerializerError::SerdeJson(e) => write!(f, "serde json error: {}", e),
|
SerializerError::SerdeJson(e) => write!(f, "serde json error: {}", e),
|
||||||
|
@ -149,11 +149,10 @@ async fn update_multiple_documents(mut ctx: Request<Data>, is_partial: bool) ->
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
if schema.set_primary_key(&id).is_ok() {
|
let mut writer = db.main_write_txn()?;
|
||||||
let mut writer = db.main_write_txn()?;
|
schema.set_primary_key(&id).map_err(ResponseError::bad_request)?;
|
||||||
index.main.put_schema(&mut writer, &schema)?;
|
index.main.put_schema(&mut writer, &schema)?;
|
||||||
writer.commit()?;
|
writer.commit()?;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut document_addition = if is_partial {
|
let mut document_addition = if is_partial {
|
||||||
|
@ -180,9 +180,8 @@ pub async fn create_index(mut ctx: Request<Data>) -> SResult<Response> {
|
|||||||
|
|
||||||
if let Some(id) = body.primary_key.clone() {
|
if let Some(id) = body.primary_key.clone() {
|
||||||
if let Some(mut schema) = created_index.main.schema(&mut writer)? {
|
if let Some(mut schema) = created_index.main.schema(&mut writer)? {
|
||||||
if let Ok(_) = schema.set_primary_key(&id) {
|
schema.set_primary_key(&id).map_err(ResponseError::bad_request)?;
|
||||||
created_index.main.put_schema(&mut writer, &schema)?;
|
created_index.main.put_schema(&mut writer, &schema)?;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -239,13 +238,14 @@ pub async fn update_index(mut ctx: Request<Data>) -> SResult<Response> {
|
|||||||
match schema.primary_key() {
|
match schema.primary_key() {
|
||||||
Some(_) => {
|
Some(_) => {
|
||||||
return Err(ResponseError::bad_request(
|
return Err(ResponseError::bad_request(
|
||||||
"The index primary key cannot be updated",
|
"The primary key cannot be updated",
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
if let Ok(_) = schema.set_primary_key(&id) {
|
schema
|
||||||
index.main.put_schema(&mut writer, &schema)?;
|
.set_primary_key(&id)
|
||||||
}
|
.map_err(ResponseError::bad_request)?;
|
||||||
|
index.main.put_schema(&mut writer, &schema)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -45,29 +45,26 @@ pub async fn get_all(ctx: Request<Data>) -> SResult<Response> {
|
|||||||
let schema = index.main.schema(&reader)?;
|
let schema = index.main.schema(&reader)?;
|
||||||
|
|
||||||
let searchable_attributes = schema.clone().map(|s| {
|
let searchable_attributes = schema.clone().map(|s| {
|
||||||
let attrs = s
|
s.indexed_name()
|
||||||
.indexed_name()
|
|
||||||
.iter()
|
.iter()
|
||||||
.map(|s| (*s).to_string())
|
.map(|s| (*s).to_string())
|
||||||
.collect::<Vec<String>>();
|
.collect::<Vec<String>>()
|
||||||
Some(attrs)
|
|
||||||
});
|
});
|
||||||
|
|
||||||
let displayed_attributes = schema.clone().map(|s| {
|
let displayed_attributes = schema.clone().map(|s| {
|
||||||
let attrs = s
|
s.displayed_name()
|
||||||
.displayed_name()
|
|
||||||
.iter()
|
.iter()
|
||||||
.map(|s| (*s).to_string())
|
.map(|s| (*s).to_string())
|
||||||
.collect::<HashSet<String>>();
|
.collect::<HashSet<String>>()
|
||||||
Some(attrs)
|
|
||||||
});
|
});
|
||||||
|
|
||||||
let accept_new_fields = schema.map(|s| s.accept_new_fields());
|
let accept_new_fields = schema.map(|s| s.accept_new_fields());
|
||||||
|
|
||||||
let settings = Settings {
|
let settings = Settings {
|
||||||
ranking_rules: Some(Some(ranking_rules)),
|
ranking_rules: Some(Some(ranking_rules)),
|
||||||
distinct_attribute: Some(distinct_attribute),
|
distinct_attribute: Some(distinct_attribute),
|
||||||
searchable_attributes,
|
searchable_attributes: Some(searchable_attributes),
|
||||||
displayed_attributes,
|
displayed_attributes: Some(displayed_attributes),
|
||||||
stop_words: Some(Some(stop_words)),
|
stop_words: Some(Some(stop_words)),
|
||||||
synonyms: Some(Some(synonyms)),
|
synonyms: Some(Some(synonyms)),
|
||||||
accept_new_fields: Some(accept_new_fields),
|
accept_new_fields: Some(accept_new_fields),
|
||||||
|
Loading…
Reference in New Issue
Block a user