mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-26 06:44:27 +01:00
Merge pull request #592 from MarinPostma/query-filters
Implements query filters
This commit is contained in:
commit
210bc68ced
@ -1,5 +1,6 @@
|
|||||||
## 0.9.1 (unreleased)
|
## 0.9.1 (unreleased)
|
||||||
|
|
||||||
|
- Refined filtering (#592)
|
||||||
- Add the number of hits in search result (#541)
|
- Add the number of hits in search result (#541)
|
||||||
- Add support for aligned crop in search result (#543)
|
- Add support for aligned crop in search result (#543)
|
||||||
- Sanitize the content displayed in the web interface (#539)
|
- Sanitize the content displayed in the web interface (#539)
|
||||||
|
1904
Cargo.lock
generated
1904
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -27,12 +27,15 @@ meilisearch-tokenizer = { path = "../meilisearch-tokenizer", version = "0.9.0" }
|
|||||||
meilisearch-types = { path = "../meilisearch-types", version = "0.9.0" }
|
meilisearch-types = { path = "../meilisearch-types", version = "0.9.0" }
|
||||||
once_cell = "1.3.1"
|
once_cell = "1.3.1"
|
||||||
ordered-float = { version = "1.0.2", features = ["serde"] }
|
ordered-float = { version = "1.0.2", features = ["serde"] }
|
||||||
|
pest = { git = "https://github.com/MarinPostma/pest.git", tag = "meilisearch-patch1" }
|
||||||
|
pest_derive = "2.0"
|
||||||
regex = "1.3.6"
|
regex = "1.3.6"
|
||||||
sdset = "0.4.0"
|
sdset = "0.4.0"
|
||||||
serde = { version = "1.0.105", features = ["derive"] }
|
serde = { version = "1.0.105", features = ["derive"] }
|
||||||
serde_json = "1.0.50"
|
serde_json = "1.0.50"
|
||||||
siphasher = "0.3.2"
|
siphasher = "0.3.2"
|
||||||
slice-group-by = "0.2.6"
|
slice-group-by = "0.2.6"
|
||||||
|
unicase = "2.6.0"
|
||||||
zerocopy = "0.3.0"
|
zerocopy = "0.3.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
@ -1,10 +1,13 @@
|
|||||||
use crate::serde::{DeserializerError, SerializerError};
|
use crate::serde::{DeserializerError, SerializerError};
|
||||||
use serde_json::Error as SerdeJsonError;
|
use serde_json::Error as SerdeJsonError;
|
||||||
|
use pest::error::Error as PestError;
|
||||||
|
use crate::filters::Rule;
|
||||||
use std::{error, fmt, io};
|
use std::{error, fmt, io};
|
||||||
|
|
||||||
pub use heed::Error as HeedError;
|
|
||||||
pub use fst::Error as FstError;
|
|
||||||
pub use bincode::Error as BincodeError;
|
pub use bincode::Error as BincodeError;
|
||||||
|
pub use fst::Error as FstError;
|
||||||
|
pub use heed::Error as HeedError;
|
||||||
|
pub use pest::error as pest_error;
|
||||||
|
|
||||||
pub type MResult<T> = Result<T, Error>;
|
pub type MResult<T> = Result<T, Error>;
|
||||||
|
|
||||||
@ -25,6 +28,7 @@ pub enum Error {
|
|||||||
Serializer(SerializerError),
|
Serializer(SerializerError),
|
||||||
Deserializer(DeserializerError),
|
Deserializer(DeserializerError),
|
||||||
UnsupportedOperation(UnsupportedOperation),
|
UnsupportedOperation(UnsupportedOperation),
|
||||||
|
FilterParseError(PestError<Rule>)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<io::Error> for Error {
|
impl From<io::Error> for Error {
|
||||||
@ -33,6 +37,28 @@ impl From<io::Error> for Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<PestError<Rule>> for Error {
|
||||||
|
fn from(error: PestError<Rule>) -> Error {
|
||||||
|
Error::FilterParseError(error.renamed_rules(|r| {
|
||||||
|
let s = match r {
|
||||||
|
Rule::or => "OR",
|
||||||
|
Rule::and => "AND",
|
||||||
|
Rule::not => "NOT",
|
||||||
|
Rule::string => "string",
|
||||||
|
Rule::word => "word",
|
||||||
|
Rule::greater => "field > value",
|
||||||
|
Rule::less => "field < value",
|
||||||
|
Rule::eq => "field = value",
|
||||||
|
Rule::leq => "field <= value",
|
||||||
|
Rule::geq => "field >= value",
|
||||||
|
Rule::key => "key",
|
||||||
|
_ => "other",
|
||||||
|
};
|
||||||
|
s.to_string()
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<meilisearch_schema::Error> for Error {
|
impl From<meilisearch_schema::Error> for Error {
|
||||||
fn from(error: meilisearch_schema::Error) -> Error {
|
fn from(error: meilisearch_schema::Error) -> Error {
|
||||||
Error::Schema(error)
|
Error::Schema(error)
|
||||||
@ -100,6 +126,7 @@ impl fmt::Display for Error {
|
|||||||
Serializer(e) => write!(f, "serializer error; {}", e),
|
Serializer(e) => write!(f, "serializer error; {}", e),
|
||||||
Deserializer(e) => write!(f, "deserializer error; {}", e),
|
Deserializer(e) => write!(f, "deserializer error; {}", e),
|
||||||
UnsupportedOperation(op) => write!(f, "unsupported operation; {}", op),
|
UnsupportedOperation(op) => write!(f, "unsupported operation; {}", op),
|
||||||
|
FilterParseError(e) => write!(f, "error parsing filter; {}", e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
267
meilisearch-core/src/filters/condition.rs
Normal file
267
meilisearch-core/src/filters/condition.rs
Normal file
@ -0,0 +1,267 @@
|
|||||||
|
use std::str::FromStr;
|
||||||
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
|
use crate::error::Error;
|
||||||
|
use crate::{store::Index, DocumentId, MainT};
|
||||||
|
use heed::RoTxn;
|
||||||
|
use meilisearch_schema::{FieldId, Schema};
|
||||||
|
use pest::error::{Error as PestError, ErrorVariant};
|
||||||
|
use pest::iterators::Pair;
|
||||||
|
use serde_json::{Value, Number};
|
||||||
|
use super::parser::Rule;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum ConditionType {
|
||||||
|
Greater,
|
||||||
|
Less,
|
||||||
|
Equal,
|
||||||
|
LessEqual,
|
||||||
|
GreaterEqual,
|
||||||
|
NotEqual,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// We need to infer type when the filter is constructed
|
||||||
|
/// and match every possible types it can be parsed into.
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct ConditionValue<'a> {
|
||||||
|
string: &'a str,
|
||||||
|
boolean: Option<bool>,
|
||||||
|
number: Option<Number>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> ConditionValue<'a> {
|
||||||
|
pub fn new(value: &Pair<'a, Rule>) -> Self {
|
||||||
|
let value = match value.as_rule() {
|
||||||
|
Rule::string | Rule::word => {
|
||||||
|
let string = value.as_str();
|
||||||
|
let boolean = match value.as_str() {
|
||||||
|
"true" => Some(true),
|
||||||
|
"false" => Some(false),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
let number = Number::from_str(value.as_str()).ok();
|
||||||
|
ConditionValue { string, boolean, number }
|
||||||
|
},
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
value
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_str(&self) -> &str {
|
||||||
|
self.string.as_ref()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_number(&self) -> Option<&Number> {
|
||||||
|
self.number.as_ref()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_bool(&self) -> Option<bool> {
|
||||||
|
self.boolean
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Condition<'a> {
|
||||||
|
field: FieldId,
|
||||||
|
condition: ConditionType,
|
||||||
|
value: ConditionValue<'a>
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_field_value<'a>(schema: &Schema, pair: Pair<'a, Rule>) -> Result<(FieldId, ConditionValue<'a>), Error> {
|
||||||
|
let mut items = pair.into_inner();
|
||||||
|
// lexing ensures that we at least have a key
|
||||||
|
let key = items.next().unwrap();
|
||||||
|
let field = schema
|
||||||
|
.id(key.as_str())
|
||||||
|
.ok_or::<PestError<Rule>>(PestError::new_from_span(
|
||||||
|
ErrorVariant::CustomError {
|
||||||
|
message: format!(
|
||||||
|
"attribute `{}` not found, available attributes are: {}",
|
||||||
|
key.as_str(),
|
||||||
|
schema.names().collect::<Vec<_>>().join(", ")
|
||||||
|
),
|
||||||
|
},
|
||||||
|
key.as_span()))?;
|
||||||
|
let value = ConditionValue::new(&items.next().unwrap());
|
||||||
|
Ok((field, value))
|
||||||
|
}
|
||||||
|
|
||||||
|
// undefined behavior with big numbers
|
||||||
|
fn compare_numbers(lhs: &Number, rhs: &Number) -> Option<Ordering> {
|
||||||
|
match (lhs.as_i64(), lhs.as_u64(), lhs.as_f64(),
|
||||||
|
rhs.as_i64(), rhs.as_u64(), rhs.as_f64()) {
|
||||||
|
// i64 u64 f64 i64 u64 f64
|
||||||
|
(Some(lhs), _, _, Some(rhs), _, _) => lhs.partial_cmp(&rhs),
|
||||||
|
(_, Some(lhs), _, _, Some(rhs), _) => lhs.partial_cmp(&rhs),
|
||||||
|
(_, _, Some(lhs), _, _, Some(rhs)) => lhs.partial_cmp(&rhs),
|
||||||
|
(_, _, _, _, _, _) => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Condition<'a> {
|
||||||
|
pub fn less(
|
||||||
|
item: Pair<'a, Rule>,
|
||||||
|
schema: &'a Schema,
|
||||||
|
) -> Result<Self, Error> {
|
||||||
|
let (field, value) = get_field_value(schema, item)?;
|
||||||
|
let condition = ConditionType::Less;
|
||||||
|
Ok(Self { field, condition, value })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn greater(
|
||||||
|
item: Pair<'a, Rule>,
|
||||||
|
schema: &'a Schema,
|
||||||
|
) -> Result<Self, Error> {
|
||||||
|
let (field, value) = get_field_value(schema, item)?;
|
||||||
|
let condition = ConditionType::Greater;
|
||||||
|
Ok(Self { field, condition, value })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn neq(
|
||||||
|
item: Pair<'a, Rule>,
|
||||||
|
schema: &'a Schema,
|
||||||
|
) -> Result<Self, Error> {
|
||||||
|
let (field, value) = get_field_value(schema, item)?;
|
||||||
|
let condition = ConditionType::NotEqual;
|
||||||
|
Ok(Self { field, condition, value })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn geq(
|
||||||
|
item: Pair<'a, Rule>,
|
||||||
|
schema: &'a Schema,
|
||||||
|
) -> Result<Self, Error> {
|
||||||
|
let (field, value) = get_field_value(schema, item)?;
|
||||||
|
let condition = ConditionType::GreaterEqual;
|
||||||
|
Ok(Self { field, condition, value })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn leq(
|
||||||
|
item: Pair<'a, Rule>,
|
||||||
|
schema: &'a Schema,
|
||||||
|
) -> Result<Self, Error> {
|
||||||
|
let (field, value) = get_field_value(schema, item)?;
|
||||||
|
let condition = ConditionType::LessEqual;
|
||||||
|
Ok(Self { field, condition, value })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn eq(
|
||||||
|
item: Pair<'a, Rule>,
|
||||||
|
schema: &'a Schema,
|
||||||
|
) -> Result<Self, Error> {
|
||||||
|
let (field, value) = get_field_value(schema, item)?;
|
||||||
|
let condition = ConditionType::Equal;
|
||||||
|
Ok(Self { field, condition, value })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn test(
|
||||||
|
&self,
|
||||||
|
reader: &RoTxn<MainT>,
|
||||||
|
index: &Index,
|
||||||
|
document_id: DocumentId,
|
||||||
|
) -> Result<bool, Error> {
|
||||||
|
match index.document_attribute::<Value>(reader, document_id, self.field)? {
|
||||||
|
Some(Value::String(s)) => {
|
||||||
|
let value = self.value.as_str();
|
||||||
|
match self.condition {
|
||||||
|
ConditionType::Equal => Ok(unicase::eq(value, &s)),
|
||||||
|
ConditionType::NotEqual => Ok(!unicase::eq(value, &s)),
|
||||||
|
_ => Ok(false)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Some(Value::Number(n)) => {
|
||||||
|
if let Some(value) = self.value.as_number() {
|
||||||
|
if let Some(ord) = compare_numbers(&n, value) {
|
||||||
|
let res = match self.condition {
|
||||||
|
ConditionType::Equal => ord == Ordering::Equal,
|
||||||
|
ConditionType::NotEqual => ord != Ordering::Equal,
|
||||||
|
ConditionType::GreaterEqual => ord != Ordering::Less,
|
||||||
|
ConditionType::LessEqual => ord != Ordering::Greater,
|
||||||
|
ConditionType::Greater => ord == Ordering::Greater,
|
||||||
|
ConditionType::Less => ord == Ordering::Less,
|
||||||
|
};
|
||||||
|
return Ok(res)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(false)
|
||||||
|
},
|
||||||
|
Some(Value::Bool(b)) => {
|
||||||
|
if let Some(value) = self.value.as_bool() {
|
||||||
|
return match self.condition {
|
||||||
|
ConditionType::Equal => Ok(b == value),
|
||||||
|
ConditionType::NotEqual => Ok(b != value),
|
||||||
|
_ => Ok(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(false)
|
||||||
|
},
|
||||||
|
_ => Ok(false),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
use serde_json::Number;
|
||||||
|
use std::cmp::Ordering;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_number_comp() {
|
||||||
|
// test both u64
|
||||||
|
let n1 = Number::from(1u64);
|
||||||
|
let n2 = Number::from(2u64);
|
||||||
|
assert_eq!(Some(Ordering::Less), compare_numbers(&n1, &n2));
|
||||||
|
assert_eq!(Some(Ordering::Greater), compare_numbers(&n2, &n1));
|
||||||
|
let n1 = Number::from(1u64);
|
||||||
|
let n2 = Number::from(1u64);
|
||||||
|
assert_eq!(Some(Ordering::Equal), compare_numbers(&n1, &n2));
|
||||||
|
|
||||||
|
// test both i64
|
||||||
|
let n1 = Number::from(1i64);
|
||||||
|
let n2 = Number::from(2i64);
|
||||||
|
assert_eq!(Some(Ordering::Less), compare_numbers(&n1, &n2));
|
||||||
|
assert_eq!(Some(Ordering::Greater), compare_numbers(&n2, &n1));
|
||||||
|
let n1 = Number::from(1i64);
|
||||||
|
let n2 = Number::from(1i64);
|
||||||
|
assert_eq!(Some(Ordering::Equal), compare_numbers(&n1, &n2));
|
||||||
|
|
||||||
|
// test both f64
|
||||||
|
let n1 = Number::from_f64(1f64).unwrap();
|
||||||
|
let n2 = Number::from_f64(2f64).unwrap();
|
||||||
|
assert_eq!(Some(Ordering::Less), compare_numbers(&n1, &n2));
|
||||||
|
assert_eq!(Some(Ordering::Greater), compare_numbers(&n2, &n1));
|
||||||
|
let n1 = Number::from_f64(1f64).unwrap();
|
||||||
|
let n2 = Number::from_f64(1f64).unwrap();
|
||||||
|
assert_eq!(Some(Ordering::Equal), compare_numbers(&n1, &n2));
|
||||||
|
|
||||||
|
// test one u64 and one f64
|
||||||
|
let n1 = Number::from_f64(1f64).unwrap();
|
||||||
|
let n2 = Number::from(2u64);
|
||||||
|
assert_eq!(Some(Ordering::Less), compare_numbers(&n1, &n2));
|
||||||
|
assert_eq!(Some(Ordering::Greater), compare_numbers(&n2, &n1));
|
||||||
|
|
||||||
|
// equality
|
||||||
|
let n1 = Number::from_f64(1f64).unwrap();
|
||||||
|
let n2 = Number::from(1u64);
|
||||||
|
assert_eq!(Some(Ordering::Equal), compare_numbers(&n1, &n2));
|
||||||
|
assert_eq!(Some(Ordering::Equal), compare_numbers(&n2, &n1));
|
||||||
|
|
||||||
|
// float is neg
|
||||||
|
let n1 = Number::from_f64(-1f64).unwrap();
|
||||||
|
let n2 = Number::from(1u64);
|
||||||
|
assert_eq!(Some(Ordering::Less), compare_numbers(&n1, &n2));
|
||||||
|
assert_eq!(Some(Ordering::Greater), compare_numbers(&n2, &n1));
|
||||||
|
|
||||||
|
// float is too big
|
||||||
|
let n1 = Number::from_f64(std::f64::MAX).unwrap();
|
||||||
|
let n2 = Number::from(1u64);
|
||||||
|
assert_eq!(Some(Ordering::Greater), compare_numbers(&n1, &n2));
|
||||||
|
assert_eq!(Some(Ordering::Less), compare_numbers(&n2, &n1));
|
||||||
|
|
||||||
|
// misc
|
||||||
|
let n1 = Number::from_f64(std::f64::MAX).unwrap();
|
||||||
|
let n2 = Number::from(std::u64::MAX);
|
||||||
|
assert_eq!(Some(Ordering::Greater), compare_numbers(&n1, &n2));
|
||||||
|
assert_eq!(Some( Ordering::Less ), compare_numbers(&n2, &n1));
|
||||||
|
}
|
||||||
|
}
|
125
meilisearch-core/src/filters/mod.rs
Normal file
125
meilisearch-core/src/filters/mod.rs
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
mod parser;
|
||||||
|
mod condition;
|
||||||
|
|
||||||
|
pub(crate) use parser::Rule;
|
||||||
|
|
||||||
|
use std::ops::Not;
|
||||||
|
|
||||||
|
use condition::Condition;
|
||||||
|
use crate::error::Error;
|
||||||
|
use crate::{DocumentId, MainT, store::Index};
|
||||||
|
use heed::RoTxn;
|
||||||
|
use meilisearch_schema::Schema;
|
||||||
|
use parser::{PREC_CLIMBER, FilterParser};
|
||||||
|
use pest::iterators::{Pair, Pairs};
|
||||||
|
use pest::Parser;
|
||||||
|
|
||||||
|
type FilterResult<'a> = Result<Filter<'a>, Error>;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum Filter<'a> {
|
||||||
|
Condition(Condition<'a>),
|
||||||
|
Or(Box<Self>, Box<Self>),
|
||||||
|
And(Box<Self>, Box<Self>),
|
||||||
|
Not(Box<Self>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Filter<'a> {
|
||||||
|
pub fn parse(expr: &'a str, schema: &'a Schema) -> FilterResult<'a> {
|
||||||
|
let mut lexed = FilterParser::parse(Rule::prgm, expr.as_ref())?;
|
||||||
|
Self::build(lexed.next().unwrap().into_inner(), schema)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn test(
|
||||||
|
&self,
|
||||||
|
reader: &RoTxn<MainT>,
|
||||||
|
index: &Index,
|
||||||
|
document_id: DocumentId,
|
||||||
|
) -> Result<bool, Error> {
|
||||||
|
use Filter::*;
|
||||||
|
match self {
|
||||||
|
Condition(c) => c.test(reader, index, document_id),
|
||||||
|
Or(lhs, rhs) => Ok(
|
||||||
|
lhs.test(reader, index, document_id)? || rhs.test(reader, index, document_id)?
|
||||||
|
),
|
||||||
|
And(lhs, rhs) => Ok(
|
||||||
|
lhs.test(reader, index, document_id)? && rhs.test(reader, index, document_id)?
|
||||||
|
),
|
||||||
|
Not(op) => op.test(reader, index, document_id).map(bool::not),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build(expression: Pairs<'a, Rule>, schema: &'a Schema) -> FilterResult<'a> {
|
||||||
|
PREC_CLIMBER.climb(
|
||||||
|
expression,
|
||||||
|
|pair: Pair<Rule>| match pair.as_rule() {
|
||||||
|
Rule::eq => Ok(Filter::Condition(Condition::eq(pair, schema)?)),
|
||||||
|
Rule::greater => Ok(Filter::Condition(Condition::greater(pair, schema)?)),
|
||||||
|
Rule::less => Ok(Filter::Condition(Condition::less(pair, schema)?)),
|
||||||
|
Rule::neq => Ok(Filter::Condition(Condition::neq(pair, schema)?)),
|
||||||
|
Rule::geq => Ok(Filter::Condition(Condition::geq(pair, schema)?)),
|
||||||
|
Rule::leq => Ok(Filter::Condition(Condition::leq(pair, schema)?)),
|
||||||
|
Rule::prgm => Self::build(pair.into_inner(), schema),
|
||||||
|
Rule::not => Ok(Filter::Not(Box::new(Self::build(
|
||||||
|
pair.into_inner(),
|
||||||
|
schema,
|
||||||
|
)?))),
|
||||||
|
_ => unreachable!(),
|
||||||
|
},
|
||||||
|
|lhs: FilterResult, op: Pair<Rule>, rhs: FilterResult| match op.as_rule() {
|
||||||
|
Rule::or => Ok(Filter::Or(Box::new(lhs?), Box::new(rhs?))),
|
||||||
|
Rule::and => Ok(Filter::And(Box::new(lhs?), Box::new(rhs?))),
|
||||||
|
_ => unreachable!(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn invalid_syntax() {
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, "field : id").is_err());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, "field=hello hello").is_err());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, "field=hello OR OR").is_err());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, "OR field:hello").is_err());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"field="hello world"#).is_err());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"field='hello world"#).is_err());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, "NOT field=").is_err());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, "N").is_err());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, "(field=1").is_err());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, "(field=1))").is_err());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, "field=1ORfield=2").is_err());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, "field=1 ( OR field=2)").is_err());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, "hello world=1").is_err());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, "").is_err());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"((((((hello=world)))))"#).is_err());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn valid_syntax() {
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, "field = id").is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, "field=id").is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"field >= 10"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"field <= 10"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"field="hello world""#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"field='hello world'"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"field > 10"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"field < 10"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"field < 10 AND NOT field=5"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"field=true OR NOT field=5"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"NOT field=true OR NOT field=5"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"field='hello world' OR ( NOT field=true OR NOT field=5 )"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"field='hello \'worl\'d' OR ( NOT field=true OR NOT field=5 )"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"field="hello \"worl\"d" OR ( NOT field=true OR NOT field=5 )"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"((((((hello=world))))))"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#""foo bar" > 10"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#""foo bar" = 10"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"'foo bar' = 10"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"'foo bar' <= 10"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"'foo bar' != 10"#).is_ok());
|
||||||
|
assert!(FilterParser::parse(Rule::prgm, r#"bar != 10"#).is_ok());
|
||||||
|
}
|
||||||
|
}
|
28
meilisearch-core/src/filters/parser/grammar.pest
Normal file
28
meilisearch-core/src/filters/parser/grammar.pest
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
key = _{quoted | word}
|
||||||
|
value = _{quoted | word}
|
||||||
|
quoted = _{ (PUSH("'") | PUSH("\"")) ~ string ~ POP }
|
||||||
|
string = {char*}
|
||||||
|
word = ${(LETTER | NUMBER | "_" | "-")+}
|
||||||
|
|
||||||
|
char = _{ !(PEEK | "\\") ~ ANY
|
||||||
|
| "\\" ~ (PEEK | "\\" | "/" | "b" | "f" | "n" | "r" | "t")
|
||||||
|
| "\\" ~ ("u" ~ ASCII_HEX_DIGIT{4})}
|
||||||
|
|
||||||
|
condition = _{eq | greater | less | geq | leq | neq}
|
||||||
|
geq = {key ~ ">=" ~ value}
|
||||||
|
leq = {key ~ "<=" ~ value}
|
||||||
|
neq = {key ~ "!=" ~ value}
|
||||||
|
eq = {key ~ "=" ~ value}
|
||||||
|
greater = {key ~ ">" ~ value}
|
||||||
|
less = {key ~ "<" ~ value}
|
||||||
|
|
||||||
|
prgm = {SOI ~ expr ~ EOI}
|
||||||
|
expr = _{ ( term ~ (operation ~ term)* ) }
|
||||||
|
term = _{ ("(" ~ expr ~ ")") | condition | not }
|
||||||
|
operation = _{ and | or }
|
||||||
|
and = {"AND"}
|
||||||
|
or = {"OR"}
|
||||||
|
|
||||||
|
not = {"NOT" ~ term}
|
||||||
|
|
||||||
|
WHITESPACE = _{ " " }
|
12
meilisearch-core/src/filters/parser/mod.rs
Normal file
12
meilisearch-core/src/filters/parser/mod.rs
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use pest::prec_climber::{Operator, Assoc, PrecClimber};
|
||||||
|
|
||||||
|
pub static PREC_CLIMBER: Lazy<PrecClimber<Rule>> = Lazy::new(|| {
|
||||||
|
use Assoc::*;
|
||||||
|
use Rule::*;
|
||||||
|
pest::prec_climber::PrecClimber::new(vec![Operator::new(or, Left), Operator::new(and, Left)])
|
||||||
|
});
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[grammar = "filters/parser/grammar.pest"]
|
||||||
|
pub struct FilterParser;
|
@ -1,12 +1,15 @@
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate assert_matches;
|
extern crate assert_matches;
|
||||||
|
#[macro_use]
|
||||||
|
extern crate pest_derive;
|
||||||
|
|
||||||
mod automaton;
|
mod automaton;
|
||||||
mod bucket_sort;
|
mod bucket_sort;
|
||||||
mod database;
|
mod database;
|
||||||
mod distinct_map;
|
mod distinct_map;
|
||||||
mod error;
|
mod error;
|
||||||
|
mod filters;
|
||||||
mod levenshtein;
|
mod levenshtein;
|
||||||
mod number;
|
mod number;
|
||||||
mod query_builder;
|
mod query_builder;
|
||||||
@ -23,7 +26,8 @@ pub mod serde;
|
|||||||
pub mod store;
|
pub mod store;
|
||||||
|
|
||||||
pub use self::database::{BoxUpdateFn, Database, MainT, UpdateT};
|
pub use self::database::{BoxUpdateFn, Database, MainT, UpdateT};
|
||||||
pub use self::error::{Error, HeedError, FstError, MResult};
|
pub use self::error::{Error, HeedError, FstError, MResult, pest_error};
|
||||||
|
pub use self::filters::Filter;
|
||||||
pub use self::number::{Number, ParseNumberError};
|
pub use self::number::{Number, ParseNumberError};
|
||||||
pub use self::ranked_map::RankedMap;
|
pub use self::ranked_map::RankedMap;
|
||||||
pub use self::raw_document::RawDocument;
|
pub use self::raw_document::RawDocument;
|
||||||
|
@ -19,6 +19,7 @@ pub enum ResponseError {
|
|||||||
IndexNotFound(String),
|
IndexNotFound(String),
|
||||||
DocumentNotFound(String),
|
DocumentNotFound(String),
|
||||||
MissingHeader(String),
|
MissingHeader(String),
|
||||||
|
FilterParsing(String),
|
||||||
BadParameter(String, String),
|
BadParameter(String, String),
|
||||||
OpenIndex(String),
|
OpenIndex(String),
|
||||||
CreateIndex(String),
|
CreateIndex(String),
|
||||||
@ -73,11 +74,15 @@ impl IntoResponse for ResponseError {
|
|||||||
match self {
|
match self {
|
||||||
ResponseError::Internal(err) => {
|
ResponseError::Internal(err) => {
|
||||||
error!("internal server error: {}", err);
|
error!("internal server error: {}", err);
|
||||||
error(
|
error("Internal server error".to_string(),
|
||||||
String::from("Internal server error"),
|
|
||||||
StatusCode::INTERNAL_SERVER_ERROR,
|
StatusCode::INTERNAL_SERVER_ERROR,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
ResponseError::FilterParsing(err) => {
|
||||||
|
warn!("error paring filter: {}", err);
|
||||||
|
error(format!("parsing error: {}", err),
|
||||||
|
StatusCode::BAD_REQUEST)
|
||||||
|
}
|
||||||
ResponseError::BadRequest(err) => {
|
ResponseError::BadRequest(err) => {
|
||||||
warn!("bad request: {}", err);
|
warn!("bad request: {}", err);
|
||||||
error(err, StatusCode::BAD_REQUEST)
|
error(err, StatusCode::BAD_REQUEST)
|
||||||
@ -159,7 +164,10 @@ impl From<FstError> for ResponseError {
|
|||||||
|
|
||||||
impl From<SearchError> for ResponseError {
|
impl From<SearchError> for ResponseError {
|
||||||
fn from(err: SearchError) -> ResponseError {
|
fn from(err: SearchError) -> ResponseError {
|
||||||
ResponseError::internal(err)
|
match err {
|
||||||
|
SearchError::FilterParsing(s) => ResponseError::FilterParsing(s),
|
||||||
|
_ => ResponseError::internal(err),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,6 +8,7 @@ use std::time::{Duration, Instant};
|
|||||||
|
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use log::error;
|
use log::error;
|
||||||
|
use meilisearch_core::Filter;
|
||||||
use meilisearch_core::criterion::*;
|
use meilisearch_core::criterion::*;
|
||||||
use meilisearch_core::settings::RankingRule;
|
use meilisearch_core::settings::RankingRule;
|
||||||
use meilisearch_core::{Highlight, Index, MainT, RankedMap};
|
use meilisearch_core::{Highlight, Index, MainT, RankedMap};
|
||||||
@ -23,6 +24,7 @@ pub enum Error {
|
|||||||
RetrieveDocument(u64, String),
|
RetrieveDocument(u64, String),
|
||||||
DocumentNotFound(u64),
|
DocumentNotFound(u64),
|
||||||
CropFieldWrongType(String),
|
CropFieldWrongType(String),
|
||||||
|
FilterParsing(String),
|
||||||
AttributeNotFoundOnDocument(String),
|
AttributeNotFoundOnDocument(String),
|
||||||
AttributeNotFoundOnSchema(String),
|
AttributeNotFoundOnSchema(String),
|
||||||
MissingFilterValue,
|
MissingFilterValue,
|
||||||
@ -56,13 +58,26 @@ impl fmt::Display for Error {
|
|||||||
f.write_str("a filter is specifying an unknown schema attribute")
|
f.write_str("a filter is specifying an unknown schema attribute")
|
||||||
}
|
}
|
||||||
Internal(err) => write!(f, "internal error; {}", err),
|
Internal(err) => write!(f, "internal error; {}", err),
|
||||||
|
FilterParsing(err) => write!(f, "filter parsing error: {}", err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<meilisearch_core::Error> for Error {
|
impl From<meilisearch_core::Error> for Error {
|
||||||
fn from(error: meilisearch_core::Error) -> Self {
|
fn from(error: meilisearch_core::Error) -> Self {
|
||||||
Error::Internal(error.to_string())
|
use meilisearch_core::pest_error::LineColLocation::*;
|
||||||
|
match error {
|
||||||
|
meilisearch_core::Error::FilterParseError(e) => {
|
||||||
|
let (line, column) = match e.line_col {
|
||||||
|
Span((line, _), (column, _)) => (line, column),
|
||||||
|
Pos((line, column)) => (line, column),
|
||||||
|
};
|
||||||
|
let message = format!("parsing error on line {} at column {}: {}", line, column, e.variant.message());
|
||||||
|
|
||||||
|
Error::FilterParsing(message)
|
||||||
|
},
|
||||||
|
_ => Error::Internal(error.to_string()),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -171,39 +186,20 @@ impl<'a> SearchBuilder<'a> {
|
|||||||
None => self.index.query_builder(),
|
None => self.index.query_builder(),
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(filters) = &self.filters {
|
if let Some(filter_expression) = &self.filters {
|
||||||
let mut split = filters.split(':');
|
let filter = Filter::parse(filter_expression, &schema)?;
|
||||||
match (split.next(), split.next()) {
|
query_builder.with_filter(move |id| {
|
||||||
(Some(_), None) | (Some(_), Some("")) => return Err(Error::MissingFilterValue),
|
let index = &self.index;
|
||||||
(Some(attr), Some(value)) => {
|
let reader = &reader;
|
||||||
let ref_reader = reader;
|
let filter = &filter;
|
||||||
let ref_index = &self.index;
|
match filter.test(reader, index, id) {
|
||||||
let value = value.trim().to_lowercase();
|
Ok(res) => res,
|
||||||
|
Err(e) => {
|
||||||
let attr = match schema.id(attr) {
|
log::warn!("unexpected error during filtering: {}", e);
|
||||||
Some(attr) => attr,
|
false
|
||||||
None => return Err(Error::UnknownFilteredAttribute),
|
}
|
||||||
};
|
|
||||||
|
|
||||||
query_builder.with_filter(move |id| {
|
|
||||||
let attr = attr;
|
|
||||||
let index = ref_index;
|
|
||||||
let reader = ref_reader;
|
|
||||||
|
|
||||||
match index.document_attribute::<Value>(reader, id, attr) {
|
|
||||||
Ok(Some(Value::String(s))) => s.to_lowercase() == value,
|
|
||||||
Ok(Some(Value::Bool(b))) => {
|
|
||||||
(value == "true" && b) || (value == "false" && !b)
|
|
||||||
}
|
|
||||||
Ok(Some(Value::Array(a))) => {
|
|
||||||
a.into_iter().any(|s| s.as_str() == Some(&value))
|
|
||||||
}
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
(_, _) => (),
|
});
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
query_builder.with_fetch_timeout(self.timeout);
|
query_builder.with_fetch_timeout(self.timeout);
|
||||||
|
@ -438,10 +438,10 @@ fn basic_search() {
|
|||||||
|
|
||||||
// 1 - Simple search with filter
|
// 1 - Simple search with filter
|
||||||
// q: Captain
|
// q: Captain
|
||||||
// limit: 1
|
// limit: 3
|
||||||
// filters: director:Anthony%20Russo
|
// filters: director:Anthony%20Russo
|
||||||
|
|
||||||
let query = "q=captain&limit=3&filters=director:Anthony%20Russo";
|
let query = "q=captain&filters=director%20%3D%20%22Anthony%20Russo%22&limit=3";
|
||||||
|
|
||||||
let expect = json!([
|
let expect = json!([
|
||||||
{
|
{
|
||||||
|
@ -79,6 +79,10 @@ impl Schema {
|
|||||||
self.fields_map.name(id)
|
self.fields_map.name(id)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn names(&self) -> impl Iterator<Item = &str> {
|
||||||
|
self.fields_map.iter().map(|(k, _)| k.as_ref())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn contains(&self, name: &str) -> bool {
|
pub fn contains(&self, name: &str) -> bool {
|
||||||
self.fields_map.id(name).is_some()
|
self.fields_map.id(name).is_some()
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user