mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-30 00:34:26 +01:00
Merge #552
552: Fix escaped quotes in filter r=Kerollmops a=irevoire Will fix https://github.com/meilisearch/meilisearch/issues/2380 The issue was that in the evaluation of the filter, I was using the deref implementation instead of calling the `value` method of my token. To avoid the problem happening again, I removed the deref implementation; now, you need to either call the `lexeme` or the `value` methods but can't rely on a « default » implementation to get a string out of a token. Co-authored-by: Tamo <tamo@meilisearch.com>
This commit is contained in:
commit
f1d848bb9a
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "benchmarks"
|
||||
version = "0.29.2"
|
||||
version = "0.29.3"
|
||||
edition = "2018"
|
||||
publish = false
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "cli"
|
||||
version = "0.29.2"
|
||||
version = "0.29.3"
|
||||
edition = "2018"
|
||||
description = "A CLI to interact with a milli index"
|
||||
publish = false
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "filter-parser"
|
||||
version = "0.29.2"
|
||||
version = "0.29.3"
|
||||
edition = "2021"
|
||||
description = "The parser for the Meilisearch filter syntax"
|
||||
publish = false
|
||||
|
@ -40,7 +40,6 @@ mod error;
|
||||
mod value;
|
||||
|
||||
use std::fmt::Debug;
|
||||
use std::ops::Deref;
|
||||
use std::str::FromStr;
|
||||
|
||||
pub use condition::{parse_condition, parse_to, Condition};
|
||||
@ -70,14 +69,6 @@ pub struct Token<'a> {
|
||||
value: Option<String>,
|
||||
}
|
||||
|
||||
impl<'a> Deref for Token<'a> {
|
||||
type Target = &'a str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq for Token<'a> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.span.fragment() == other.span.fragment()
|
||||
@ -89,6 +80,10 @@ impl<'a> Token<'a> {
|
||||
Self { span, value }
|
||||
}
|
||||
|
||||
pub fn lexeme(&self) -> &str {
|
||||
&self.span
|
||||
}
|
||||
|
||||
pub fn value(&self) -> &str {
|
||||
self.value.as_ref().map_or(&self.span, |value| value)
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "flatten-serde-json"
|
||||
version = "0.29.2"
|
||||
version = "0.29.3"
|
||||
edition = "2021"
|
||||
description = "Flatten serde-json objects like elastic search"
|
||||
readme = "README.md"
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "helpers"
|
||||
version = "0.29.2"
|
||||
version = "0.29.3"
|
||||
authors = ["Clément Renault <clement@meilisearch.com>"]
|
||||
edition = "2018"
|
||||
description = "A small tool to do operations on the database"
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "http-ui"
|
||||
description = "The HTTP user interface of the milli search engine"
|
||||
version = "0.29.2"
|
||||
version = "0.29.3"
|
||||
authors = ["Clément Renault <clement@meilisearch.com>"]
|
||||
edition = "2018"
|
||||
publish = false
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "infos"
|
||||
version = "0.29.2"
|
||||
version = "0.29.3"
|
||||
authors = ["Clément Renault <clement@meilisearch.com>"]
|
||||
edition = "2018"
|
||||
publish = false
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "json-depth-checker"
|
||||
version = "0.29.2"
|
||||
version = "0.29.3"
|
||||
edition = "2021"
|
||||
description = "A library that indicates if a JSON must be flattened"
|
||||
publish = false
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "milli"
|
||||
version = "0.29.2"
|
||||
version = "0.29.3"
|
||||
authors = ["Kerollmops <clement@meilisearch.com>"]
|
||||
edition = "2018"
|
||||
|
||||
|
@ -1,7 +1,6 @@
|
||||
use std::collections::HashSet;
|
||||
use std::fmt::{Debug, Display};
|
||||
use std::ops::Bound::{self, Excluded, Included};
|
||||
use std::ops::Deref;
|
||||
|
||||
use either::Either;
|
||||
pub use filter_parser::{Condition, Error as FPError, FilterCondition, Span, Token};
|
||||
@ -283,8 +282,9 @@ impl<'a> Filter<'a> {
|
||||
Condition::LowerThanOrEqual(val) => (Included(f64::MIN), Included(val.parse()?)),
|
||||
Condition::Between { from, to } => (Included(from.parse()?), Included(to.parse()?)),
|
||||
Condition::Equal(val) => {
|
||||
let (_original_value, string_docids) =
|
||||
strings_db.get(rtxn, &(field_id, &val.to_lowercase()))?.unwrap_or_default();
|
||||
let (_original_value, string_docids) = strings_db
|
||||
.get(rtxn, &(field_id, &val.value().to_lowercase()))?
|
||||
.unwrap_or_default();
|
||||
let number = val.parse::<f64>().ok();
|
||||
let number_docids = match number {
|
||||
Some(n) => {
|
||||
@ -362,7 +362,7 @@ impl<'a> Filter<'a> {
|
||||
return Ok(RoaringBitmap::new());
|
||||
}
|
||||
} else {
|
||||
match *fid.deref() {
|
||||
match fid.lexeme() {
|
||||
attribute @ "_geo" => {
|
||||
return Err(fid.as_external_error(FilterError::BadGeo(attribute)))?;
|
||||
}
|
||||
@ -461,7 +461,7 @@ mod tests {
|
||||
use maplit::hashset;
|
||||
|
||||
use super::*;
|
||||
use crate::update::{IndexerConfig, Settings};
|
||||
use crate::update::{self, IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings};
|
||||
use crate::Index;
|
||||
|
||||
#[test]
|
||||
@ -598,6 +598,75 @@ mod tests {
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn escaped_quote_in_filter_value_2380() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(10 * 1024 * 1024); // 10 MB
|
||||
let index = Index::new(options, &path).unwrap();
|
||||
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let content = documents!([
|
||||
{
|
||||
"id": "test_1",
|
||||
"monitor_diagonal": "27' to 30'"
|
||||
},
|
||||
{
|
||||
"id": "test_2",
|
||||
"monitor_diagonal": "27\" to 30\""
|
||||
},
|
||||
{
|
||||
"id": "test_3",
|
||||
"monitor_diagonal": "27\" to 30'"
|
||||
},
|
||||
]);
|
||||
|
||||
let config = IndexerConfig::default();
|
||||
let indexing_config = IndexDocumentsConfig::default();
|
||||
let mut builder =
|
||||
IndexDocuments::new(&mut wtxn, &index, &config, indexing_config.clone(), |_| ())
|
||||
.unwrap();
|
||||
builder.add_documents(content).unwrap();
|
||||
builder.execute().unwrap();
|
||||
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
let mut wtxn = index.write_txn().unwrap();
|
||||
let mut builder = update::Settings::new(&mut wtxn, &index, &config);
|
||||
|
||||
builder.set_filterable_fields(hashset!(S("monitor_diagonal")));
|
||||
builder.execute(|_| ()).unwrap();
|
||||
wtxn.commit().unwrap();
|
||||
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
|
||||
let mut search = crate::Search::new(&rtxn, &index);
|
||||
// this filter is copy pasted from #2380 with the exact same espace sequence
|
||||
search.filter(
|
||||
crate::Filter::from_str("monitor_diagonal = '27\" to 30\\''").unwrap().unwrap(),
|
||||
);
|
||||
let crate::SearchResult { documents_ids, .. } = search.execute().unwrap();
|
||||
assert_eq!(documents_ids, vec![2]);
|
||||
|
||||
search.filter(
|
||||
crate::Filter::from_str(r#"monitor_diagonal = "27' to 30'" "#).unwrap().unwrap(),
|
||||
);
|
||||
let crate::SearchResult { documents_ids, .. } = search.execute().unwrap();
|
||||
assert_eq!(documents_ids, vec![0]);
|
||||
|
||||
search.filter(
|
||||
crate::Filter::from_str(r#"monitor_diagonal = "27\" to 30\"" "#).unwrap().unwrap(),
|
||||
);
|
||||
let crate::SearchResult { documents_ids, .. } = search.execute().unwrap();
|
||||
assert_eq!(documents_ids, vec![1]);
|
||||
|
||||
search.filter(
|
||||
crate::Filter::from_str(r#"monitor_diagonal = "27\" to 30'" "#).unwrap().unwrap(),
|
||||
);
|
||||
let crate::SearchResult { documents_ids, .. } = search.execute().unwrap();
|
||||
assert_eq!(documents_ids, vec![2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn geo_radius_error() {
|
||||
let path = tempfile::tempdir().unwrap();
|
||||
|
Loading…
Reference in New Issue
Block a user