mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-27 07:14:26 +01:00
add test for update errors
This commit is contained in:
parent
d7b49fa671
commit
e92f544fd1
@ -112,7 +112,7 @@ impl Server {
|
|||||||
|
|
||||||
pub async fn wait_update_id(&mut self, update_id: u64) {
|
pub async fn wait_update_id(&mut self, update_id: u64) {
|
||||||
// try 10 times to get status, or panic to not wait forever
|
// try 10 times to get status, or panic to not wait forever
|
||||||
for _ in 1..10 {
|
for _ in 0..10 {
|
||||||
let (response, status_code) = self.get_update_status(update_id).await;
|
let (response, status_code) = self.get_update_status(update_id).await;
|
||||||
assert_eq!(status_code, 200);
|
assert_eq!(status_code, 200);
|
||||||
|
|
||||||
|
@ -1,7 +1,10 @@
|
|||||||
mod common;
|
mod common;
|
||||||
|
|
||||||
use serde_json::json;
|
use std::thread;
|
||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
use actix_http::http::StatusCode;
|
use actix_http::http::StatusCode;
|
||||||
|
use serde_json::{json, Map, Value};
|
||||||
|
|
||||||
macro_rules! assert_error {
|
macro_rules! assert_error {
|
||||||
($code:literal, $type:literal, $status:path, $req:expr) => {
|
($code:literal, $type:literal, $status:path, $req:expr) => {
|
||||||
@ -12,6 +15,25 @@ macro_rules! assert_error {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
macro_rules! assert_error_async {
|
||||||
|
($code:literal, $type:literal, $server:expr, $req:expr) => {
|
||||||
|
let (response, _) = $req;
|
||||||
|
let update_id = response["updateId"].as_u64().unwrap();
|
||||||
|
for _ in 1..10 {
|
||||||
|
let (response, status_code) = $server.get_update_status(update_id).await;
|
||||||
|
assert_eq!(status_code, StatusCode::OK);
|
||||||
|
if response["status"] == "processed" || response["status"] == "failed" {
|
||||||
|
println!("response: {}", response);
|
||||||
|
assert_eq!(response["status"], "failed");
|
||||||
|
assert_eq!(response["errorCode"], $code);
|
||||||
|
assert_eq!(response["errorType"], $type);
|
||||||
|
return
|
||||||
|
}
|
||||||
|
thread::sleep(Duration::from_secs(1));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn index_already_exists_error() {
|
async fn index_already_exists_error() {
|
||||||
let mut server = common::Server::with_uid("test");
|
let mut server = common::Server::with_uid("test");
|
||||||
@ -57,9 +79,43 @@ async fn primary_key_already_present_error() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
#[ignore]
|
|
||||||
async fn max_field_limit_exceeded_error() {
|
async fn max_field_limit_exceeded_error() {
|
||||||
todo!("error reported in update")
|
let mut server = common::Server::test_server().await;
|
||||||
|
let body = json!({
|
||||||
|
"uid": "test",
|
||||||
|
});
|
||||||
|
server.create_index(body).await;
|
||||||
|
let mut doc = Map::with_capacity(70_000);
|
||||||
|
doc.insert("id".into(), Value::String("foo".into()));
|
||||||
|
for i in 0..69_999 {
|
||||||
|
doc.insert(format!("field{}", i), Value::String("foo".into()));
|
||||||
|
}
|
||||||
|
let docs = json!([doc]);
|
||||||
|
assert_error_async!(
|
||||||
|
"max_field_limit_exceeded",
|
||||||
|
"invalid_request_error",
|
||||||
|
server,
|
||||||
|
server.add_or_replace_multiple_documents_sync(docs).await);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn missing_document_id() {
|
||||||
|
let mut server = common::Server::test_server().await;
|
||||||
|
let body = json!({
|
||||||
|
"uid": "test",
|
||||||
|
"primaryKey": "test"
|
||||||
|
});
|
||||||
|
server.create_index(body).await;
|
||||||
|
let docs = json!([
|
||||||
|
{
|
||||||
|
"foo": "bar",
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
assert_error_async!(
|
||||||
|
"missing_document_id",
|
||||||
|
"invalid_request_error",
|
||||||
|
server,
|
||||||
|
server.add_or_replace_multiple_documents_sync(docs).await);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
Loading…
Reference in New Issue
Block a user