mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-12-23 13:10:06 +01:00
create a test with the dry-run parameter enabled
This commit is contained in:
parent
bbf3fb88ca
commit
a478392b7a
@ -100,16 +100,11 @@ impl Index<'_> {
|
||||
pub async fn raw_add_documents(
|
||||
&self,
|
||||
payload: &str,
|
||||
content_type: Option<&str>,
|
||||
headers: Vec<(&str, &str)>,
|
||||
query_parameter: &str,
|
||||
) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents{}", urlencode(self.uid.as_ref()), query_parameter);
|
||||
|
||||
if let Some(content_type) = content_type {
|
||||
self.service.post_str(url, payload, vec![("Content-Type", content_type)]).await
|
||||
} else {
|
||||
self.service.post_str(url, payload, Vec::new()).await
|
||||
}
|
||||
self.service.post_str(url, payload, headers).await
|
||||
}
|
||||
|
||||
pub async fn update_documents(
|
||||
|
@ -1,10 +1,11 @@
|
||||
use actix_web::test;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use meilisearch::Opt;
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::common::encoder::Encoder;
|
||||
use crate::common::{GetAllDocumentsOptions, Server, Value};
|
||||
use crate::common::{default_settings, GetAllDocumentsOptions, Server, Value};
|
||||
use crate::json;
|
||||
|
||||
/// This is the basic usage of our API and every other tests uses the content-type application/json
|
||||
@ -2157,3 +2158,49 @@ async fn batch_several_documents_addition() {
|
||||
assert_eq!(code, 200, "failed with `{}`", response);
|
||||
assert_eq!(response["results"].as_array().unwrap().len(), 120);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn dry_register_file() {
|
||||
let temp = tempfile::tempdir().unwrap();
|
||||
|
||||
let options =
|
||||
Opt { experimental_replication_parameters: true, ..default_settings(temp.path()) };
|
||||
let server = Server::new_with_options(options).await.unwrap();
|
||||
let index = server.index("tamo");
|
||||
|
||||
let documents = r#"
|
||||
{
|
||||
"id": "12",
|
||||
"doggo": "kefir"
|
||||
}
|
||||
"#;
|
||||
|
||||
let (response, code) = index
|
||||
.raw_add_documents(
|
||||
documents,
|
||||
vec![("Content-Type", "application/json"), ("DryRun", "true")],
|
||||
"",
|
||||
)
|
||||
.await;
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"taskUid": 0,
|
||||
"indexUid": "tamo",
|
||||
"status": "enqueued",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"enqueuedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"202 Accepted");
|
||||
|
||||
let (response, code) = index.get_task(response.uid()).await;
|
||||
snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Task `0` not found.",
|
||||
"code": "task_not_found",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#task_not_found"
|
||||
}
|
||||
"###);
|
||||
snapshot!(code, @"404 Not Found");
|
||||
}
|
||||
|
@ -209,7 +209,8 @@ async fn replace_documents_missing_payload() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let (response, code) = index.raw_add_documents("", Some("application/json"), "").await;
|
||||
let (response, code) =
|
||||
index.raw_add_documents("", vec![("Content-Type", "application/json")], "").await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
@ -220,7 +221,8 @@ async fn replace_documents_missing_payload() {
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index.raw_add_documents("", Some("application/x-ndjson"), "").await;
|
||||
let (response, code) =
|
||||
index.raw_add_documents("", vec![("Content-Type", "application/x-ndjson")], "").await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
@ -231,7 +233,8 @@ async fn replace_documents_missing_payload() {
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) = index.raw_add_documents("", Some("text/csv"), "").await;
|
||||
let (response, code) =
|
||||
index.raw_add_documents("", vec![("Content-Type", "text/csv")], "").await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
@ -287,7 +290,7 @@ async fn replace_documents_missing_content_type() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let (response, code) = index.raw_add_documents("", None, "").await;
|
||||
let (response, code) = index.raw_add_documents("", Vec::new(), "").await;
|
||||
snapshot!(code, @"415 Unsupported Media Type");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
@ -299,7 +302,7 @@ async fn replace_documents_missing_content_type() {
|
||||
"###);
|
||||
|
||||
// even with a csv delimiter specified this error is triggered first
|
||||
let (response, code) = index.raw_add_documents("", None, "?csvDelimiter=;").await;
|
||||
let (response, code) = index.raw_add_documents("", Vec::new(), "?csvDelimiter=;").await;
|
||||
snapshot!(code, @"415 Unsupported Media Type");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
@ -345,7 +348,7 @@ async fn replace_documents_bad_content_type() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let (response, code) = index.raw_add_documents("", Some("doggo"), "").await;
|
||||
let (response, code) = index.raw_add_documents("", vec![("Content-Type", "doggo")], "").await;
|
||||
snapshot!(code, @"415 Unsupported Media Type");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
@ -379,8 +382,9 @@ async fn replace_documents_bad_csv_delimiter() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let (response, code) =
|
||||
index.raw_add_documents("", Some("application/json"), "?csvDelimiter").await;
|
||||
let (response, code) = index
|
||||
.raw_add_documents("", vec![("Content-Type", "application/json")], "?csvDelimiter")
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
@ -391,8 +395,9 @@ async fn replace_documents_bad_csv_delimiter() {
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) =
|
||||
index.raw_add_documents("", Some("application/json"), "?csvDelimiter=doggo").await;
|
||||
let (response, code) = index
|
||||
.raw_add_documents("", vec![("Content-Type", "application/json")], "?csvDelimiter=doggo")
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
@ -404,7 +409,11 @@ async fn replace_documents_bad_csv_delimiter() {
|
||||
"###);
|
||||
|
||||
let (response, code) = index
|
||||
.raw_add_documents("", Some("application/json"), &format!("?csvDelimiter={}", encode("🍰")))
|
||||
.raw_add_documents(
|
||||
"",
|
||||
vec![("Content-Type", "application/json")],
|
||||
&format!("?csvDelimiter={}", encode("🍰")),
|
||||
)
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
@ -469,8 +478,9 @@ async fn replace_documents_csv_delimiter_with_bad_content_type() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let (response, code) =
|
||||
index.raw_add_documents("", Some("application/json"), "?csvDelimiter=a").await;
|
||||
let (response, code) = index
|
||||
.raw_add_documents("", vec![("Content-Type", "application/json")], "?csvDelimiter=a")
|
||||
.await;
|
||||
snapshot!(code, @"415 Unsupported Media Type");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
@ -481,8 +491,9 @@ async fn replace_documents_csv_delimiter_with_bad_content_type() {
|
||||
}
|
||||
"###);
|
||||
|
||||
let (response, code) =
|
||||
index.raw_add_documents("", Some("application/x-ndjson"), "?csvDelimiter=a").await;
|
||||
let (response, code) = index
|
||||
.raw_add_documents("", vec![("Content-Type", "application/x-ndjson")], "?csvDelimiter=a")
|
||||
.await;
|
||||
snapshot!(code, @"415 Unsupported Media Type");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
|
Loading…
x
Reference in New Issue
Block a user