add error codes and tests

This commit is contained in:
Tamo 2023-05-02 20:03:45 +02:00
parent 441641397b
commit ed3dfbe729
No known key found for this signature in database
GPG Key ID: 20CD8020AFA88D69
5 changed files with 195 additions and 5 deletions

View File

@ -219,6 +219,10 @@ InvalidDocumentId , InvalidRequest , BAD_REQUEST ;
InvalidDocumentLimit , InvalidRequest , BAD_REQUEST ;
InvalidDocumentOffset , InvalidRequest , BAD_REQUEST ;
InvalidDocumentDeleteFilter , InvalidRequest , BAD_REQUEST ;
InvalidDocumentGetOffset , InvalidRequest , BAD_REQUEST ;
InvalidDocumentGetLimit , InvalidRequest , BAD_REQUEST ;
InvalidDocumentGetFields , InvalidRequest , BAD_REQUEST ;
InvalidDocumentGetFilter , InvalidRequest , BAD_REQUEST ;
InvalidIndexLimit , InvalidRequest , BAD_REQUEST ;
InvalidIndexOffset , InvalidRequest , BAD_REQUEST ;
InvalidIndexPrimaryKey , InvalidRequest , BAD_REQUEST ;

View File

@ -147,13 +147,13 @@ pub struct BrowseQueryGet {
#[derive(Debug, Deserr)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct BrowseQuery {
#[deserr(default, error = DeserrJsonError<InvalidSearchOffset>)]
#[deserr(default, error = DeserrJsonError<InvalidDocumentGetOffset>)]
offset: usize,
#[deserr(default=PAGINATION_DEFAULT_LIMIT, error = DeserrJsonError<InvalidSearchLimit>)]
#[deserr(default = PAGINATION_DEFAULT_LIMIT, error = DeserrJsonError<InvalidDocumentGetLimit>)]
limit: usize,
#[deserr(default, error = DeserrJsonError<InvalidDocumentFields>)]
#[deserr(default, error = DeserrJsonError<InvalidDocumentGetFields>)]
fields: OptionStarOrList<String>,
#[deserr(default, error = DeserrJsonError<InvalidSearchFilter>)]
#[deserr(default, error = DeserrJsonError<InvalidDocumentGetFilter>)]
filter: Option<Value>,
}
@ -529,7 +529,13 @@ fn retrieve_documents<S: AsRef<str>>(
) -> Result<(u64, Vec<Document>), ResponseError> {
let rtxn = index.read_txn()?;
let filter = &filter;
let filter = if let Some(filter) = filter { parse_filter(filter)? } else { None };
let filter = if let Some(filter) = filter {
parse_filter(filter).map_err(|err| {
ResponseError::from_msg(err.to_string(), Code::InvalidDocumentGetFilter)
})?
} else {
None
};
let candidates = if let Some(filter) = filter {
filter.evaluate(&rtxn, index)?

View File

@ -140,6 +140,11 @@ impl Index<'_> {
}
}
pub async fn get_document_by_filter(&self, payload: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/documents/fetch", urlencode(self.uid.as_ref()));
self.service.post(url, payload).await
}
pub async fn wait_task(&self, update_id: u64) -> Value {
// try several times to get status, or panic to not wait forever
let url = format!("/tasks/{}", update_id);

View File

@ -570,3 +570,77 @@ async fn delete_document_by_filter() {
}
"###);
}
#[actix_rt::test]
async fn fetch_document_by_filter() {
let server = Server::new().await;
let index = server.index("doggo");
index.update_settings_filterable_attributes(json!(["color"])).await;
index
.add_documents(
json!([
{ "id": 0, "color": "red" },
{ "id": 1, "color": "blue" },
{ "id": 2, "color": "blue" },
{ "id": 3 },
]),
Some("id"),
)
.await;
index.wait_task(1).await;
let (response, code) = index.get_document_by_filter(json!(null)).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type: expected an object, but found null",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"
}
"###);
let (response, code) = index.get_document_by_filter(json!({ "offset": "doggo" })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.offset`: expected a positive integer, but found a string: `\"doggo\"`",
"code": "invalid_document_get_offset",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_document_get_offset"
}
"###);
let (response, code) = index.get_document_by_filter(json!({ "limit": "doggo" })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.limit`: expected a positive integer, but found a string: `\"doggo\"`",
"code": "invalid_document_get_limit",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_document_get_limit"
}
"###);
let (response, code) = index.get_document_by_filter(json!({ "fields": ["doggo"] })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.fields`: expected a string, but found an array: `[\"doggo\"]`",
"code": "invalid_document_get_fields",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_document_get_fields"
}
"###);
let (response, code) = index.get_document_by_filter(json!({ "filter": true })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid syntax for the filter parameter: `expected String, Array, found: true`.",
"code": "invalid_document_get_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_document_get_filter"
}
"###);
}

View File

@ -1,5 +1,6 @@
use actix_web::test;
use http::header::ACCEPT_ENCODING;
use meili_snap::*;
use serde_json::{json, Value};
use urlencoding::encode as urlencode;
@ -378,3 +379,103 @@ async fn get_documents_displayed_attributes_is_ignored() {
assert_eq!(response.as_object().unwrap().keys().count(), 16);
assert!(response.as_object().unwrap().get("gender").is_some());
}
#[actix_rt::test]
async fn fetch_document_by_filter() {
let server = Server::new().await;
let index = server.index("doggo");
index.update_settings_filterable_attributes(json!(["color"])).await;
index
.add_documents(
json!([
{ "id": 0, "color": "red" },
{ "id": 1, "color": "blue" },
{ "id": 2, "color": "blue" },
{ "id": 3 },
]),
Some("id"),
)
.await;
index.wait_task(1).await;
let (response, code) = index.get_document_by_filter(json!({})).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###"
{
"results": [
{
"id": 0,
"color": "red"
},
{
"id": 1,
"color": "blue"
},
{
"id": 2,
"color": "blue"
},
{
"id": 3
}
],
"offset": 0,
"limit": 20,
"total": 4
}
"###);
let (response, code) = index.get_document_by_filter(json!({ "filter": "color = blue" })).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###"
{
"results": [
{
"id": 1,
"color": "blue"
},
{
"id": 2,
"color": "blue"
}
],
"offset": 0,
"limit": 20,
"total": 2
}
"###);
let (response, code) = index
.get_document_by_filter(json!({ "offset": 1, "limit": 1, "filter": "color != blue" }))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###"
{
"results": [
{
"id": 3
}
],
"offset": 1,
"limit": 1,
"total": 2
}
"###);
let (response, code) = index
.get_document_by_filter(json!({ "limit": 1, "filter": "color != blue", "fields": "color" }))
.await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###"
{
"results": [
{
"color": "red"
}
],
"offset": 0,
"limit": 1,
"total": 2
}
"###);
}