763: Fixes error message when lat and lng are unparseable r=loiclec a=ahlner

# Pull Request

## Related issue
Fixes partially [#3007](https://github.com/meilisearch/meilisearch/issues/3007)

## What does this PR do?
- Changes function validate_geo_from_json to return a BadLatitudeAndLongitude if lat or lng is a string and not parseable to f64
- implemented some unittests
- Derived PartialEq for GeoError to use assert_eq! in tests

## PR checklist
Please check if your PR fulfills the following requirements:
- [x] Does this PR fix an existing issue, or have you listed the changes applied in the PR description (and why they are needed)?
- [x] Have you read the contributing guidelines?
- [x] Have you made sure that the title is accurate and descriptive of the changes?

Thank you so much for contributing to Meilisearch!


Co-authored-by: Philipp Ahlner <philipp@ahlner.com>
This commit is contained in:
bors[bot] 2023-01-19 15:15:46 +00:00 committed by GitHub
commit 3521a3a0b2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 47 additions and 30 deletions

View File

@ -2292,4 +2292,38 @@ pub(crate) mod tests {
assert!(all_ids.insert(id)); assert!(all_ids.insert(id));
} }
} }
#[test]
fn bug_3007() {
// https://github.com/meilisearch/meilisearch/issues/3007
use crate::error::{GeoError, UserError};
let index = TempIndex::new();
// Given is an index with a geo field NOT contained in the sortable_fields of the settings
index
.update_settings(|settings| {
settings.set_primary_key("id".to_string());
settings.set_filterable_fields(HashSet::from(["_geo".to_string()]));
})
.unwrap();
// happy path
index.add_documents(documents!({ "id" : 5, "_geo": {"lat": 12.0, "lng": 11.0}})).unwrap();
db_snap!(index, geo_faceted_documents_ids);
// both are unparseable, we expect GeoError::BadLatitudeAndLongitude
let err1 = index
.add_documents(
documents!({ "id" : 6, "_geo": {"lat": "unparseable", "lng": "unparseable"}}),
)
.unwrap_err();
assert!(matches!(
err1,
Error::UserError(UserError::InvalidGeoField(GeoError::BadLatitudeAndLongitude { .. }))
));
db_snap!(index, geo_faceted_documents_ids); // ensure that no more document was inserted
}
} }

View File

@ -0,0 +1,4 @@
---
source: milli/src/index.rs
---
[0, ]

View File

@ -98,7 +98,12 @@ pub fn enrich_documents_batch<R: Read + Seek>(
// If the settings specifies that a _geo field must be used therefore we must check the // If the settings specifies that a _geo field must be used therefore we must check the
// validity of it in all the documents of this batch and this is when we return `Some`. // validity of it in all the documents of this batch and this is when we return `Some`.
let geo_field_id = match documents_batch_index.id("_geo") { let geo_field_id = match documents_batch_index.id("_geo") {
Some(geo_field_id) if index.sortable_fields(rtxn)?.contains("_geo") => Some(geo_field_id), Some(geo_field_id)
if index.sortable_fields(rtxn)?.contains("_geo")
|| index.filterable_fields(rtxn)?.contains("_geo") =>
{
Some(geo_field_id)
}
_otherwise => None, _otherwise => None,
}; };
@ -367,7 +372,9 @@ pub fn extract_finite_float_from_value(value: Value) -> StdResult<f64, Value> {
pub fn validate_geo_from_json(id: &DocumentId, bytes: &[u8]) -> Result<StdResult<(), GeoError>> { pub fn validate_geo_from_json(id: &DocumentId, bytes: &[u8]) -> Result<StdResult<(), GeoError>> {
use GeoError::*; use GeoError::*;
let debug_id = || Value::from(id.debug()); let debug_id = || {
serde_json::from_slice(id.value().as_bytes()).unwrap_or_else(|_| Value::from(id.debug()))
};
match serde_json::from_slice(bytes).map_err(InternalError::SerdeJson)? { match serde_json::from_slice(bytes).map_err(InternalError::SerdeJson)? {
Value::Object(mut object) => match (object.remove("lat"), object.remove("lng")) { Value::Object(mut object) => match (object.remove("lat"), object.remove("lng")) {
(Some(lat), Some(lng)) => { (Some(lat), Some(lng)) => {

View File

@ -965,34 +965,6 @@ mod tests {
.unwrap(); .unwrap();
} }
#[test]
fn index_all_flavour_of_geo() {
let mut index = TempIndex::new();
index.index_documents_config.update_method = IndexDocumentsMethod::ReplaceDocuments;
index
.update_settings(|settings| {
settings.set_filterable_fields(hashset!(S("_geo")));
})
.unwrap();
index
.add_documents(documents!([
{ "id": 0, "_geo": { "lat": 31, "lng": [42] } },
{ "id": 1, "_geo": { "lat": "31" }, "_geo.lng": 42 },
{ "id": 2, "_geo": { "lng": "42" }, "_geo.lat": "31" },
{ "id": 3, "_geo.lat": 31, "_geo.lng": "42" },
]))
.unwrap();
let rtxn = index.read_txn().unwrap();
let mut search = crate::Search::new(&rtxn, &index);
search.filter(crate::Filter::from_str("_geoRadius(31, 42, 0.000001)").unwrap().unwrap());
let crate::SearchResult { documents_ids, .. } = search.execute().unwrap();
assert_eq!(documents_ids, vec![0, 1, 2, 3]);
}
#[test] #[test]
fn geo_error() { fn geo_error() {
let mut index = TempIndex::new(); let mut index = TempIndex::new();