Merge remote-tracking branch 'origin/release-v0.28.0' into stable

This commit is contained in:
Clémentine Urquizar 2022-07-11 14:37:21 +02:00
commit 32d6af6527
No known key found for this signature in database
GPG Key ID: D8E7CC7422E77E1A
138 changed files with 7002 additions and 5195 deletions

28
.github/scripts/check-release.sh vendored Normal file
View File

@ -0,0 +1,28 @@
#!/bin/bash
# check_tag $current_tag $file_tag $file_name
function check_tag {
if [[ "$1" != "$2" ]]; then
echo "Error: the current tag does not match the version in $3: found $2 - expected $1"
ret=1
fi
}
ret=0
current_tag=${GITHUB_REF#'refs/tags/v'}
toml_files='*/Cargo.toml'
for toml_file in $toml_files;
do
file_tag="$(grep '^version = ' $toml_file | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')"
check_tag $current_tag $file_tag $toml_file
done
lock_file='Cargo.lock'
lock_tag=$(grep -A 1 'name = "meilisearch-auth"' $lock_file | grep version | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')
check_tag $current_tag $lock_tag $lock_file
if [[ "$ret" -eq 0 ]] ; then
echo 'OK'
fi
exit $ret

View File

@ -1,14 +1,14 @@
#!/bin/sh
# Checks if the current tag should be the latest (in terms of semver and not of release date).
# Ex: previous tag -> v0.10.1
# new tag -> v0.8.12
# The new tag should not be the latest
# So it returns "false", the CI should not run for the release v0.8.2
# Used in GHA in publish-docker-latest.yml
# Was used in our CIs to publish the latest docker image. Not used anymore, will be used again when v1 and v2 will be out and we will want to maintain multiple stable versions.
# Returns "true" or "false" (as a string) to be used in the `if` in GHA
# Checks if the current tag should be the latest (in terms of semver and not of release date).
# Ex: previous tag -> v2.1.1
# new tag -> v1.20.3
# The new tag (v1.20.3) should NOT be the latest
# So it returns "false", the `latest` tag should not be updated for the release v1.20.3 and still need to correspond to v2.1.1
# GLOBAL
GREP_SEMVER_REGEXP='v\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)$' # i.e. v[number].[number].[number]

View File

@ -5,9 +5,33 @@ on:
name: Publish binaries to release
jobs:
check-version:
name: Check the version validity
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
# Check if the tag has the v<nmumber>.<number>.<number> format.
# If yes, it means we are publishing an official release.
# If no, we are releasing a RC, so no need to check the version.
- name: Check tag format
if: github.event_name != 'schedule'
id: check-tag-format
run: |
escaped_tag=$(printf "%q" ${{ github.ref_name }})
if [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
echo ::set-output name=stable::true
else
echo ::set-output name=stable::false
fi
- name: Check release validity
if: steps.check-tag-format.outputs.stable == 'true'
run: bash .github/scripts/check-release.sh
publish:
name: Publish binary for ${{ matrix.os }}
runs-on: ${{ matrix.os }}
needs: check-version
strategy:
fail-fast: false
matrix:
@ -41,6 +65,7 @@ jobs:
publish-aarch64:
name: Publish binary for aarch64
runs-on: ${{ matrix.os }}
needs: check-version
continue-on-error: false
strategy:
fail-fast: false

View File

@ -5,9 +5,18 @@ on:
types: [released]
jobs:
check-version:
name: Check the version validity
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Check release validity
run: bash .github/scripts/check-release.sh
debian:
name: Publish debian packagge
runs-on: ubuntu-18.04
needs: check-version
steps:
- uses: hecrj/setup-rust-action@master
with:
@ -30,6 +39,7 @@ jobs:
homebrew:
name: Bump Homebrew formula
runs-on: ubuntu-18.04
needs: check-version
steps:
- name: Create PR to Homebrew
uses: mislav/bump-homebrew-formula-action@v1

View File

@ -0,0 +1,71 @@
---
on:
schedule:
- cron: '0 4 * * *' # Every day at 4:00am
push:
tags:
- '*'
name: Publish tagged images to Docker Hub
jobs:
docker:
runs-on: docker
steps:
- uses: actions/checkout@v2
# Check if the tag has the v<nmumber>.<number>.<number> format. If yes, it means we are publishing an official release.
# In this situation, we need to set `output.stable` to create/update the following tags (additionally to the `vX.Y.Z` Docker tag):
# - a `vX.Y` (without patch version) Docker tag
# - a `latest` Docker tag
- name: Check tag format
if: github.event_name != 'schedule'
id: check-tag-format
run: |
escaped_tag=$(printf "%q" ${{ github.ref_name }})
if [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
echo ::set-output name=stable::true
else
echo ::set-output name=stable::false
fi
# Check only the validity of the tag for official releases (not for pre-releases or other tags)
- name: Check release validity
if: github.event_name != 'schedule' && steps.check-tag-format.outputs.stable == 'true'
run: bash .github/scripts/check-release.sh
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Docker Hub
if: github.event_name != 'schedule'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Docker meta
id: meta
uses: docker/metadata-action@v3
with:
images: getmeili/meilisearch
# The lastest and `vX.Y` tags are only pushed for the official Meilisearch releases
# See https://github.com/docker/metadata-action#latest-tag
flavor: latest=false
tags: |
type=ref,event=tag
type=semver,pattern=v{{major}}.{{minor}},enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
# We do not push tags for the cron jobs, this is only for test purposes
push: ${{ github.event_name != 'schedule' }}
platforms: linux/amd64,linux/arm64
tags: ${{ steps.meta.outputs.tags }}

View File

@ -1,30 +0,0 @@
---
on:
release:
types: [released]
name: Publish latest image to Docker Hub
jobs:
docker-latest:
runs-on: docker
steps:
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
push: true
platforms: linux/amd64,linux/arm64
tags: getmeili/meilisearch:latest

View File

@ -1,39 +0,0 @@
---
on:
push:
tags:
- '*'
name: Publish tagged image to Docker Hub
jobs:
docker-tag:
runs-on: docker
steps:
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Docker meta
id: meta
uses: docker/metadata-action@v3
with:
images: getmeili/meilisearch
flavor: latest=false
tags: type=ref,event=tag
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
push: true
platforms: linux/amd64,linux/arm64
tags: ${{ steps.meta.outputs.tags }}

View File

@ -12,6 +12,7 @@ on:
env:
CARGO_TERM_COLOR: always
RUST_BACKTRACE: 1
RUSTFLAGS: "-D warnings"
jobs:
tests:
@ -82,7 +83,7 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
toolchain: stable
override: true
components: rustfmt
- name: Cache dependencies

View File

@ -9,6 +9,7 @@ Remember that there are many ways to contribute other than writing code: writing
- [How to Contribute](#how-to-contribute)
- [Development Workflow](#development-workflow)
- [Git Guidelines](#git-guidelines)
- [Release Process (for internal team only)](#release-process-for-internal-team-only)
## Assumptions
@ -78,6 +79,19 @@ Some notes on GitHub PRs:
The draft PRs are recommended when you want to show that you are working on something and make your work visible.
- The branch related to the PR must be **up-to-date with `main`** before merging. Fortunately, this project uses [Bors](https://github.com/bors-ng/bors-ng) to automatically enforce this requirement without the PR author having to rebase manually.
## Release Process (for internal team only)
Meilisearch tools follow the [Semantic Versioning Convention](https://semver.org/).
### Automation to rebase and Merge the PRs
This project integrates a bot that helps us manage pull requests merging.<br>
_[Read more about this](https://github.com/meilisearch/integration-guides/blob/main/resources/bors.md)._
### How to Publish a new Release
The full Meilisearch release process is described in [this guide](https://github.com/meilisearch/core-team/blob/main/resources/meilisearch-release.md). Please follow it carefully before doing any release.
<hr>
Thank you again for reading this through, we can not wait to begin to work with you if you made your way through this contributing guide ❤️

808
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -2,8 +2,14 @@
resolver = "2"
members = [
"meilisearch-http",
"meilisearch-error",
"meilisearch-types",
"meilisearch-lib",
"meilisearch-auth",
"permissive-json-pointer",
]
[profile.dev.package.flate2]
opt-level = 3
[profile.dev.package.milli]
opt-level = 3

View File

@ -58,7 +58,7 @@ meilisearch
#### Docker
```bash
docker run -p 7700:7700 -v "$(pwd)/data.ms:/data.ms" getmeili/meilisearch
docker run -p 7700:7700 -v "$(pwd)/meili_data:/meili_data" getmeili/meilisearch
```
#### Announcing a cloud-hosted Meilisearch
@ -109,7 +109,7 @@ cargo run --release
Let's create an index! If you need a sample dataset, use [this movie database](https://www.notion.so/meilisearch/A-movies-dataset-to-test-Meili-1cbf7c9cfa4247249c40edfa22d7ca87#b5ae399b81834705ba5420ac70358a65). You can also find it in the `datasets/` directory.
```bash
curl -L 'https://bit.ly/2PAcw9l' -o movies.json
curl -L https://docs.meilisearch.com/movies.json -o movies.json
```
Now, you're ready to index some data.

View File

@ -4,7 +4,7 @@ Meilisearch takes the security of our software products and services seriously.
If you believe you have found a security vulnerability in any Meilisearch-owned repository, please report it to us as described below.
## Suported versions
## Supported versions
As long as we are pre-v1.0, only the latest version of Meilisearch will be supported with security updates.

View File

@ -2,7 +2,7 @@ status = [
'Tests on ubuntu-18.04',
'Tests on macos-latest',
'Tests on windows-latest',
# 'Run Clippy',
'Run Clippy',
'Run Rustfmt',
'Run tests in debug',
]

View File

@ -67,8 +67,8 @@ semverLT() {
return 1
}
# Get a token from https://github.com/settings/tokens to increasae rate limit (from 60 to 5000), make sure the token scope is set to 'public_repo'
# Create GITHUB_PAT enviroment variable once you aquired the token to start using it
# Get a token from https://github.com/settings/tokens to increase rate limit (from 60 to 5000), make sure the token scope is set to 'public_repo'
# Create GITHUB_PAT environment variable once you acquired the token to start using it
# Returns the tag of the latest stable release (in terms of semver and not of release date)
get_latest() {
temp_file='temp_file' # temp_file needed because the grep would start before the download is over
@ -89,7 +89,7 @@ get_latest() {
latest=''
current_tag=''
for release_info in $releases; do
if [ $i -eq 0 ]; then # Cheking tag_name
if [ $i -eq 0 ]; then # Checking tag_name
if echo "$release_info" | grep -q "$GREP_SEMVER_REGEXP"; then # If it's not an alpha or beta release
current_tag=$release_info
else

View File

@ -1,15 +1,17 @@
[package]
name = "meilisearch-auth"
version = "0.27.2"
version = "0.28.0"
edition = "2021"
[dependencies]
enum-iterator = "0.7.0"
meilisearch-error = { path = "../meilisearch-error" }
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.26.6" }
hmac = "0.12.1"
meilisearch-types = { path = "../meilisearch-types" }
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.31.1" }
rand = "0.8.4"
serde = { version = "1.0.136", features = ["derive"] }
serde_json = { version = "1.0.79", features = ["preserve_order"] }
sha2 = "0.10.2"
thiserror = "1.0.30"
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
uuid = { version = "1.1.2", features = ["serde", "v4"] }

View File

@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize};
#[repr(u8)]
pub enum Action {
#[serde(rename = "*")]
All = 0,
All = actions::ALL,
#[serde(rename = "search")]
Search = actions::SEARCH,
#[serde(rename = "documents.add")]
@ -32,17 +32,23 @@ pub enum Action {
StatsGet = actions::STATS_GET,
#[serde(rename = "dumps.create")]
DumpsCreate = actions::DUMPS_CREATE,
#[serde(rename = "dumps.get")]
DumpsGet = actions::DUMPS_GET,
#[serde(rename = "version")]
Version = actions::VERSION,
#[serde(rename = "keys.create")]
KeysAdd = actions::KEYS_CREATE,
#[serde(rename = "keys.get")]
KeysGet = actions::KEYS_GET,
#[serde(rename = "keys.update")]
KeysUpdate = actions::KEYS_UPDATE,
#[serde(rename = "keys.delete")]
KeysDelete = actions::KEYS_DELETE,
}
impl Action {
pub fn from_repr(repr: u8) -> Option<Self> {
use actions::*;
match repr {
0 => Some(Self::All),
ALL => Some(Self::All),
SEARCH => Some(Self::Search),
DOCUMENTS_ADD => Some(Self::DocumentsAdd),
DOCUMENTS_GET => Some(Self::DocumentsGet),
@ -56,8 +62,11 @@ impl Action {
SETTINGS_UPDATE => Some(Self::SettingsUpdate),
STATS_GET => Some(Self::StatsGet),
DUMPS_CREATE => Some(Self::DumpsCreate),
DUMPS_GET => Some(Self::DumpsGet),
VERSION => Some(Self::Version),
KEYS_CREATE => Some(Self::KeysAdd),
KEYS_GET => Some(Self::KeysGet),
KEYS_UPDATE => Some(Self::KeysUpdate),
KEYS_DELETE => Some(Self::KeysDelete),
_otherwise => None,
}
}
@ -65,7 +74,7 @@ impl Action {
pub fn repr(&self) -> u8 {
use actions::*;
match self {
Self::All => 0,
Self::All => ALL,
Self::Search => SEARCH,
Self::DocumentsAdd => DOCUMENTS_ADD,
Self::DocumentsGet => DOCUMENTS_GET,
@ -79,13 +88,17 @@ impl Action {
Self::SettingsUpdate => SETTINGS_UPDATE,
Self::StatsGet => STATS_GET,
Self::DumpsCreate => DUMPS_CREATE,
Self::DumpsGet => DUMPS_GET,
Self::Version => VERSION,
Self::KeysAdd => KEYS_CREATE,
Self::KeysGet => KEYS_GET,
Self::KeysUpdate => KEYS_UPDATE,
Self::KeysDelete => KEYS_DELETE,
}
}
}
pub mod actions {
pub(crate) const ALL: u8 = 0;
pub const SEARCH: u8 = 1;
pub const DOCUMENTS_ADD: u8 = 2;
pub const DOCUMENTS_GET: u8 = 3;
@ -99,6 +112,9 @@ pub mod actions {
pub const SETTINGS_UPDATE: u8 = 11;
pub const STATS_GET: u8 = 12;
pub const DUMPS_CREATE: u8 = 13;
pub const DUMPS_GET: u8 = 14;
pub const VERSION: u8 = 15;
pub const KEYS_CREATE: u8 = 16;
pub const KEYS_GET: u8 = 17;
pub const KEYS_UPDATE: u8 = 18;
pub const KEYS_DELETE: u8 = 19;
}

View File

@ -1,5 +1,6 @@
use serde_json::Deserializer;
use std::fs::File;
use std::io::BufRead;
use std::io::BufReader;
use std::io::Write;
use std::path::Path;
@ -36,10 +37,9 @@ impl AuthController {
return Ok(());
}
let mut reader = BufReader::new(File::open(&keys_file_path)?).lines();
while let Some(key) = reader.next().transpose()? {
let key = serde_json::from_str(&key)?;
store.put_api_key(key)?;
let reader = BufReader::new(File::open(&keys_file_path)?);
for key in Deserializer::from_reader(reader).into_iter() {
store.put_api_key(key?)?;
}
Ok(())

View File

@ -1,7 +1,7 @@
use std::error::Error;
use meilisearch_error::ErrorCode;
use meilisearch_error::{internal_error, Code};
use meilisearch_types::error::{Code, ErrorCode};
use meilisearch_types::internal_error;
use serde_json::Value;
pub type Result<T> = std::result::Result<T, AuthControllerError>;
@ -18,8 +18,18 @@ pub enum AuthControllerError {
InvalidApiKeyExpiresAt(Value),
#[error("`description` field value `{0}` is invalid. It should be a string or specified as a null value.")]
InvalidApiKeyDescription(Value),
#[error(
"`name` field value `{0}` is invalid. It should be a string or specified as a null value."
)]
InvalidApiKeyName(Value),
#[error("`uid` field value `{0}` is invalid. It should be a valid UUID v4 string or omitted.")]
InvalidApiKeyUid(Value),
#[error("API key `{0}` not found.")]
ApiKeyNotFound(String),
#[error("`uid` field value `{0}` is already an existing API key.")]
ApiKeyAlreadyExists(String),
#[error("The `{0}` field cannot be modified for the given resource.")]
ImmutableField(String),
#[error("Internal error: {0}")]
Internal(Box<dyn Error + Send + Sync + 'static>),
}
@ -39,7 +49,11 @@ impl ErrorCode for AuthControllerError {
Self::InvalidApiKeyIndexes(_) => Code::InvalidApiKeyIndexes,
Self::InvalidApiKeyExpiresAt(_) => Code::InvalidApiKeyExpiresAt,
Self::InvalidApiKeyDescription(_) => Code::InvalidApiKeyDescription,
Self::InvalidApiKeyName(_) => Code::InvalidApiKeyName,
Self::ApiKeyNotFound(_) => Code::ApiKeyNotFound,
Self::InvalidApiKeyUid(_) => Code::InvalidApiKeyUid,
Self::ApiKeyAlreadyExists(_) => Code::ApiKeyAlreadyExists,
Self::ImmutableField(_) => Code::ImmutableField,
Self::Internal(_) => Code::Internal,
}
}

View File

@ -1,20 +1,25 @@
use crate::action::Action;
use crate::error::{AuthControllerError, Result};
use crate::store::{KeyId, KEY_ID_LENGTH};
use rand::Rng;
use crate::store::KeyId;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::star_or::StarOr;
use serde::{Deserialize, Serialize};
use serde_json::{from_value, Value};
use time::format_description::well_known::Rfc3339;
use time::macros::{format_description, time};
use time::{Date, OffsetDateTime, PrimitiveDateTime};
use uuid::Uuid;
#[derive(Debug, Deserialize, Serialize)]
pub struct Key {
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
pub id: KeyId,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
pub uid: KeyId,
pub actions: Vec<Action>,
pub indexes: Vec<String>,
pub indexes: Vec<StarOr<IndexUid>>,
#[serde(with = "time::serde::rfc3339::option")]
pub expires_at: Option<OffsetDateTime>,
#[serde(with = "time::serde::rfc3339")]
@ -25,16 +30,27 @@ pub struct Key {
impl Key {
pub fn create_from_value(value: Value) -> Result<Self> {
let description = match value.get("description") {
Some(Value::Null) => None,
Some(des) => Some(
from_value(des.clone())
.map_err(|_| AuthControllerError::InvalidApiKeyDescription(des.clone()))?,
),
None => None,
let name = match value.get("name") {
None | Some(Value::Null) => None,
Some(des) => from_value(des.clone())
.map(Some)
.map_err(|_| AuthControllerError::InvalidApiKeyName(des.clone()))?,
};
let id = generate_id();
let description = match value.get("description") {
None | Some(Value::Null) => None,
Some(des) => from_value(des.clone())
.map(Some)
.map_err(|_| AuthControllerError::InvalidApiKeyDescription(des.clone()))?,
};
let uid = value.get("uid").map_or_else(
|| Ok(Uuid::new_v4()),
|uid| {
from_value(uid.clone())
.map_err(|_| AuthControllerError::InvalidApiKeyUid(uid.clone()))
},
)?;
let actions = value
.get("actions")
@ -61,8 +77,9 @@ impl Key {
let updated_at = created_at;
Ok(Self {
name,
description,
id,
uid,
actions,
indexes,
expires_at,
@ -78,20 +95,34 @@ impl Key {
self.description = des?;
}
if let Some(act) = value.get("actions") {
let act = from_value(act.clone())
.map_err(|_| AuthControllerError::InvalidApiKeyActions(act.clone()));
self.actions = act?;
if let Some(des) = value.get("name") {
let des = from_value(des.clone())
.map_err(|_| AuthControllerError::InvalidApiKeyName(des.clone()));
self.name = des?;
}
if let Some(ind) = value.get("indexes") {
let ind = from_value(ind.clone())
.map_err(|_| AuthControllerError::InvalidApiKeyIndexes(ind.clone()));
self.indexes = ind?;
if value.get("uid").is_some() {
return Err(AuthControllerError::ImmutableField("uid".to_string()));
}
if let Some(exp) = value.get("expiresAt") {
self.expires_at = parse_expiration_date(exp)?;
if value.get("actions").is_some() {
return Err(AuthControllerError::ImmutableField("actions".to_string()));
}
if value.get("indexes").is_some() {
return Err(AuthControllerError::ImmutableField("indexes".to_string()));
}
if value.get("expiresAt").is_some() {
return Err(AuthControllerError::ImmutableField("expiresAt".to_string()));
}
if value.get("createdAt").is_some() {
return Err(AuthControllerError::ImmutableField("createdAt".to_string()));
}
if value.get("updatedAt").is_some() {
return Err(AuthControllerError::ImmutableField("updatedAt".to_string()));
}
self.updated_at = OffsetDateTime::now_utc();
@ -101,11 +132,13 @@ impl Key {
pub(crate) fn default_admin() -> Self {
let now = OffsetDateTime::now_utc();
let uid = Uuid::new_v4();
Self {
description: Some("Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)".to_string()),
id: generate_id(),
name: Some("Default Admin API Key".to_string()),
description: Some("Use it for anything that is not a search operation. Caution! Do not expose it on a public frontend".to_string()),
uid,
actions: vec![Action::All],
indexes: vec!["*".to_string()],
indexes: vec![StarOr::Star],
expires_at: None,
created_at: now,
updated_at: now,
@ -114,13 +147,13 @@ impl Key {
pub(crate) fn default_search() -> Self {
let now = OffsetDateTime::now_utc();
let uid = Uuid::new_v4();
Self {
description: Some(
"Default Search API Key (Use it to search from the frontend)".to_string(),
),
id: generate_id(),
name: Some("Default Search API Key".to_string()),
description: Some("Use it to search from the frontend".to_string()),
uid,
actions: vec![Action::Search],
indexes: vec!["*".to_string()],
indexes: vec![StarOr::Star],
expires_at: None,
created_at: now,
updated_at: now,
@ -128,19 +161,6 @@ impl Key {
}
}
/// Generate a printable key of 64 characters using thread_rng.
fn generate_id() -> [u8; KEY_ID_LENGTH] {
const CHARSET: &[u8] = b"abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ";
let mut rng = rand::thread_rng();
let mut bytes = [0; KEY_ID_LENGTH];
for byte in bytes.iter_mut() {
*byte = CHARSET[rng.gen_range(0..CHARSET.len())];
}
bytes
}
fn parse_expiration_date(value: &Value) -> Result<Option<OffsetDateTime>> {
match value {
Value::String(string) => OffsetDateTime::parse(string, &Rfc3339)

View File

@ -5,18 +5,20 @@ mod key;
mod store;
use std::collections::{HashMap, HashSet};
use std::ops::Deref;
use std::path::Path;
use std::str::from_utf8;
use std::sync::Arc;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use sha2::{Digest, Sha256};
use time::OffsetDateTime;
use uuid::Uuid;
pub use action::{actions, Action};
use error::{AuthControllerError, Result};
pub use key::Key;
use meilisearch_types::star_or::StarOr;
use store::generate_key_as_hexa;
pub use store::open_auth_store_env;
use store::HeedAuthStore;
@ -42,62 +44,77 @@ impl AuthController {
pub fn create_key(&self, value: Value) -> Result<Key> {
let key = Key::create_from_value(value)?;
self.store.put_api_key(key)
match self.store.get_api_key(key.uid)? {
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(
key.uid.to_string(),
)),
None => self.store.put_api_key(key),
}
}
pub fn update_key(&self, key: impl AsRef<str>, value: Value) -> Result<Key> {
let mut key = self.get_key(key)?;
pub fn update_key(&self, uid: Uuid, value: Value) -> Result<Key> {
let mut key = self.get_key(uid)?;
key.update_from_value(value)?;
self.store.put_api_key(key)
}
pub fn get_key(&self, key: impl AsRef<str>) -> Result<Key> {
pub fn get_key(&self, uid: Uuid) -> Result<Key> {
self.store
.get_api_key(&key)?
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))
.get_api_key(uid)?
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string()))
}
pub fn get_optional_uid_from_encoded_key(&self, encoded_key: &[u8]) -> Result<Option<Uuid>> {
match &self.master_key {
Some(master_key) => self
.store
.get_uid_from_encoded_key(encoded_key, master_key.as_bytes()),
None => Ok(None),
}
}
pub fn get_uid_from_encoded_key(&self, encoded_key: &str) -> Result<Uuid> {
self.get_optional_uid_from_encoded_key(encoded_key.as_bytes())?
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(encoded_key.to_string()))
}
pub fn get_key_filters(
&self,
key: impl AsRef<str>,
uid: Uuid,
search_rules: Option<SearchRules>,
) -> Result<AuthFilter> {
let mut filters = AuthFilter::default();
if self
.master_key
.as_ref()
.map_or(false, |master_key| master_key != key.as_ref())
{
let key = self
.store
.get_api_key(&key)?
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(key.as_ref().to_string()))?;
let key = self
.store
.get_api_key(uid)?
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string()))?;
if !key.indexes.iter().any(|i| i.as_str() == "*") {
filters.search_rules = match search_rules {
// Intersect search_rules with parent key authorized indexes.
Some(search_rules) => SearchRules::Map(
key.indexes
.into_iter()
.filter_map(|index| {
search_rules
.get_index_search_rules(&index)
.map(|index_search_rules| (index, Some(index_search_rules)))
})
.collect(),
),
None => SearchRules::Set(key.indexes.into_iter().collect()),
};
} else if let Some(search_rules) = search_rules {
filters.search_rules = search_rules;
}
filters.allow_index_creation = key
.actions
.iter()
.any(|&action| action == Action::IndexesAdd || action == Action::All);
if !key.indexes.iter().any(|i| i == &StarOr::Star) {
filters.search_rules = match search_rules {
// Intersect search_rules with parent key authorized indexes.
Some(search_rules) => SearchRules::Map(
key.indexes
.into_iter()
.filter_map(|index| {
search_rules.get_index_search_rules(index.deref()).map(
|index_search_rules| {
(String::from(index), Some(index_search_rules))
},
)
})
.collect(),
),
None => SearchRules::Set(key.indexes.into_iter().map(String::from).collect()),
};
} else if let Some(search_rules) = search_rules {
filters.search_rules = search_rules;
}
filters.allow_index_creation = key
.actions
.iter()
.any(|&action| action == Action::IndexesAdd || action == Action::All);
Ok(filters)
}
@ -105,13 +122,11 @@ impl AuthController {
self.store.list_api_keys()
}
pub fn delete_key(&self, key: impl AsRef<str>) -> Result<()> {
if self.store.delete_api_key(&key)? {
pub fn delete_key(&self, uid: Uuid) -> Result<()> {
if self.store.delete_api_key(uid)? {
Ok(())
} else {
Err(AuthControllerError::ApiKeyNotFound(
key.as_ref().to_string(),
))
Err(AuthControllerError::ApiKeyNotFound(uid.to_string()))
}
}
@ -121,32 +136,32 @@ impl AuthController {
/// Generate a valid key from a key id using the current master key.
/// Returns None if no master key has been set.
pub fn generate_key(&self, id: &str) -> Option<String> {
pub fn generate_key(&self, uid: Uuid) -> Option<String> {
self.master_key
.as_ref()
.map(|master_key| generate_key(master_key.as_bytes(), id))
.map(|master_key| generate_key_as_hexa(uid, master_key.as_bytes()))
}
/// Check if the provided key is authorized to make a specific action
/// without checking if the key is valid.
pub fn is_key_authorized(
&self,
key: &[u8],
uid: Uuid,
action: Action,
index: Option<&str>,
) -> Result<bool> {
match self
.store
// check if the key has access to all indexes.
.get_expiration_date(key, action, None)?
.get_expiration_date(uid, action, None)?
.or(match index {
// else check if the key has access to the requested index.
Some(index) => {
self.store
.get_expiration_date(key, action, Some(index.as_bytes()))?
.get_expiration_date(uid, action, Some(index.as_bytes()))?
}
// or to any index if no index has been requested.
None => self.store.prefix_first_expiration_date(key, action)?,
None => self.store.prefix_first_expiration_date(uid, action)?,
}) {
// check expiration date.
Some(Some(exp)) => Ok(OffsetDateTime::now_utc() < exp),
@ -156,29 +171,6 @@ impl AuthController {
None => Ok(false),
}
}
/// Check if the provided key is valid
/// without checking if the key is authorized to make a specific action.
pub fn is_key_valid(&self, key: &[u8]) -> Result<bool> {
if let Some(id) = self.store.get_key_id(key) {
let id = from_utf8(&id)?;
if let Some(generated) = self.generate_key(id) {
return Ok(generated.as_bytes() == key);
}
}
Ok(false)
}
/// Check if the provided key is valid
/// and is authorized to make a specific action.
pub fn authenticate(&self, key: &[u8], action: Action, index: Option<&str>) -> Result<bool> {
if self.is_key_authorized(key, action, index)? {
self.is_key_valid(key)
} else {
Ok(false)
}
}
}
pub struct AuthFilter {
@ -258,12 +250,6 @@ pub struct IndexSearchRules {
pub filter: Option<serde_json::Value>,
}
fn generate_key(master_key: &[u8], keyid: &str) -> String {
let key = [keyid.as_bytes(), master_key].concat();
let sha = Sha256::digest(&key);
format!("{}{:x}", keyid, sha)
}
fn generate_default_keys(store: &HeedAuthStore) -> Result<()> {
store.put_api_key(Key::default_admin())?;
store.put_api_key(Key::default_search())?;

View File

@ -1,27 +1,32 @@
use enum_iterator::IntoEnumIterator;
use std::borrow::Cow;
use std::cmp::Reverse;
use std::convert::TryFrom;
use std::convert::TryInto;
use std::fs::create_dir_all;
use std::ops::Deref;
use std::path::Path;
use std::str;
use std::sync::Arc;
use enum_iterator::IntoEnumIterator;
use hmac::{Hmac, Mac};
use meilisearch_types::star_or::StarOr;
use milli::heed::types::{ByteSlice, DecodeIgnore, SerdeJson};
use milli::heed::{Database, Env, EnvOpenOptions, RwTxn};
use sha2::Sha256;
use time::OffsetDateTime;
use uuid::fmt::Hyphenated;
use uuid::Uuid;
use super::error::Result;
use super::{Action, Key};
const AUTH_STORE_SIZE: usize = 1_073_741_824; //1GiB
pub const KEY_ID_LENGTH: usize = 8;
const AUTH_DB_PATH: &str = "auth";
const KEY_DB_NAME: &str = "api-keys";
const KEY_ID_ACTION_INDEX_EXPIRATION_DB_NAME: &str = "keyid-action-index-expiration";
pub type KeyId = [u8; KEY_ID_LENGTH];
pub type KeyId = Uuid;
#[derive(Clone)]
pub struct HeedAuthStore {
@ -73,12 +78,13 @@ impl HeedAuthStore {
}
pub fn put_api_key(&self, key: Key) -> Result<Key> {
let uid = key.uid;
let mut wtxn = self.env.write_txn()?;
self.keys.put(&mut wtxn, &key.id, &key)?;
let id = key.id;
self.keys.put(&mut wtxn, uid.as_bytes(), &key)?;
// delete key from inverted database before refilling it.
self.delete_key_from_inverted_db(&mut wtxn, &id)?;
self.delete_key_from_inverted_db(&mut wtxn, &uid)?;
// create inverted database.
let db = self.action_keyid_index_expiration;
@ -89,17 +95,17 @@ impl HeedAuthStore {
key.actions.clone()
};
let no_index_restriction = key.indexes.contains(&"*".to_owned());
let no_index_restriction = key.indexes.contains(&StarOr::Star);
for action in actions {
if no_index_restriction {
// If there is no index restriction we put None.
db.put(&mut wtxn, &(&id, &action, None), &key.expires_at)?;
db.put(&mut wtxn, &(&uid, &action, None), &key.expires_at)?;
} else {
// else we create a key for each index.
for index in key.indexes.iter() {
db.put(
&mut wtxn,
&(&id, &action, Some(index.as_bytes())),
&(&uid, &action, Some(index.deref().as_bytes())),
&key.expires_at,
)?;
}
@ -111,24 +117,42 @@ impl HeedAuthStore {
Ok(key)
}
pub fn get_api_key(&self, key: impl AsRef<str>) -> Result<Option<Key>> {
pub fn get_api_key(&self, uid: Uuid) -> Result<Option<Key>> {
let rtxn = self.env.read_txn()?;
match self.get_key_id(key.as_ref().as_bytes()) {
Some(id) => self.keys.get(&rtxn, &id).map_err(|e| e.into()),
None => Ok(None),
}
self.keys.get(&rtxn, uid.as_bytes()).map_err(|e| e.into())
}
pub fn delete_api_key(&self, key: impl AsRef<str>) -> Result<bool> {
pub fn get_uid_from_encoded_key(
&self,
encoded_key: &[u8],
master_key: &[u8],
) -> Result<Option<Uuid>> {
let rtxn = self.env.read_txn()?;
let uid = self
.keys
.remap_data_type::<DecodeIgnore>()
.iter(&rtxn)?
.filter_map(|res| match res {
Ok((uid, _)) => {
let (uid, _) = try_split_array_at(uid)?;
let uid = Uuid::from_bytes(*uid);
if generate_key_as_hexa(uid, master_key).as_bytes() == encoded_key {
Some(uid)
} else {
None
}
}
Err(_) => None,
})
.next();
Ok(uid)
}
pub fn delete_api_key(&self, uid: Uuid) -> Result<bool> {
let mut wtxn = self.env.write_txn()?;
let existing = match self.get_key_id(key.as_ref().as_bytes()) {
Some(id) => {
let existing = self.keys.delete(&mut wtxn, &id)?;
self.delete_key_from_inverted_db(&mut wtxn, &id)?;
existing
}
None => false,
};
let existing = self.keys.delete(&mut wtxn, uid.as_bytes())?;
self.delete_key_from_inverted_db(&mut wtxn, &uid)?;
wtxn.commit()?;
Ok(existing)
@ -147,49 +171,37 @@ impl HeedAuthStore {
pub fn get_expiration_date(
&self,
key: &[u8],
uid: Uuid,
action: Action,
index: Option<&[u8]>,
) -> Result<Option<Option<OffsetDateTime>>> {
let rtxn = self.env.read_txn()?;
match self.get_key_id(key) {
Some(id) => {
let tuple = (&id, &action, index);
Ok(self.action_keyid_index_expiration.get(&rtxn, &tuple)?)
}
None => Ok(None),
}
let tuple = (&uid, &action, index);
Ok(self.action_keyid_index_expiration.get(&rtxn, &tuple)?)
}
pub fn prefix_first_expiration_date(
&self,
key: &[u8],
uid: Uuid,
action: Action,
) -> Result<Option<Option<OffsetDateTime>>> {
let rtxn = self.env.read_txn()?;
match self.get_key_id(key) {
Some(id) => {
let tuple = (&id, &action, None);
Ok(self
.action_keyid_index_expiration
.prefix_iter(&rtxn, &tuple)?
.next()
.transpose()?
.map(|(_, expiration)| expiration))
}
None => Ok(None),
}
}
let tuple = (&uid, &action, None);
let exp = self
.action_keyid_index_expiration
.prefix_iter(&rtxn, &tuple)?
.next()
.transpose()?
.map(|(_, expiration)| expiration);
pub fn get_key_id(&self, key: &[u8]) -> Option<KeyId> {
try_split_array_at::<_, KEY_ID_LENGTH>(key).map(|(id, _)| *id)
Ok(exp)
}
fn delete_key_from_inverted_db(&self, wtxn: &mut RwTxn, key: &KeyId) -> Result<()> {
let mut iter = self
.action_keyid_index_expiration
.remap_types::<ByteSlice, DecodeIgnore>()
.prefix_iter_mut(wtxn, key)?;
.prefix_iter_mut(wtxn, key.as_bytes())?;
while iter.next().transpose()?.is_some() {
// safety: we don't keep references from inside the LMDB database.
unsafe { iter.del_current()? };
@ -200,21 +212,22 @@ impl HeedAuthStore {
}
/// Codec allowing to retrieve the expiration date of an action,
/// optionnally on a spcific index, for a given key.
/// optionally on a specific index, for a given key.
pub struct KeyIdActionCodec;
impl<'a> milli::heed::BytesDecode<'a> for KeyIdActionCodec {
type DItem = (KeyId, Action, Option<&'a [u8]>);
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
let (key_id, action_bytes) = try_split_array_at(bytes)?;
let (key_id_bytes, action_bytes) = try_split_array_at(bytes)?;
let (action_bytes, index) = match try_split_array_at(action_bytes)? {
(action, []) => (action, None),
(action, index) => (action, Some(index)),
};
let key_id = Uuid::from_bytes(*key_id_bytes);
let action = Action::from_repr(u8::from_be_bytes(*action_bytes))?;
Some((*key_id, action, index))
Some((key_id, action, index))
}
}
@ -224,7 +237,7 @@ impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec {
fn bytes_encode((key_id, action, index): &Self::EItem) -> Option<Cow<[u8]>> {
let mut bytes = Vec::new();
bytes.extend_from_slice(*key_id);
bytes.extend_from_slice(key_id.as_bytes());
let action_bytes = u8::to_be_bytes(action.repr());
bytes.extend_from_slice(&action_bytes);
if let Some(index) = index {
@ -235,6 +248,19 @@ impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec {
}
}
pub fn generate_key_as_hexa(uid: Uuid, master_key: &[u8]) -> String {
// format uid as hyphenated allowing user to generate their own keys.
let mut uid_buffer = [0; Hyphenated::LENGTH];
let uid = uid.hyphenated().encode_lower(&mut uid_buffer);
// new_from_slice function never fail.
let mut mac = Hmac::<Sha256>::new_from_slice(master_key).unwrap();
mac.update(uid.as_bytes());
let result = mac.finalize();
format!("{:x}", result.into_bytes())
}
/// Divides one slice into two at an index, returns `None` if mid is out of bounds.
pub fn try_split_at<T>(slice: &[T], mid: usize) -> Option<(&[T], &[T])> {
if mid <= slice.len() {

View File

@ -4,7 +4,7 @@ description = "Meilisearch HTTP server"
edition = "2021"
license = "MIT"
name = "meilisearch-http"
version = "0.27.2"
version = "0.28.0"
[[bin]]
name = "meilisearch"
@ -45,7 +45,7 @@ itertools = "0.10.3"
jsonwebtoken = "8.0.1"
log = "0.4.14"
meilisearch-auth = { path = "../meilisearch-auth" }
meilisearch-error = { path = "../meilisearch-error" }
meilisearch-types = { path = "../meilisearch-types" }
meilisearch-lib = { path = "../meilisearch-lib" }
mime = "0.3.16"
num_cpus = "1.13.1"
@ -57,10 +57,12 @@ platform-dirs = "0.3.0"
rand = "0.8.5"
rayon = "1.5.1"
regex = "1.5.5"
reqwest = { version = "0.11.4", features = ["rustls-tls", "json"], default-features = false }
rustls = "0.20.4"
rustls-pemfile = "0.3.0"
segment = { version = "0.2.0", optional = true }
serde = { version = "1.0.136", features = ["derive"] }
serde-cs = "0.2.3"
serde_json = { version = "1.0.79", features = ["preserve_order"] }
sha2 = "0.10.2"
siphasher = "0.3.10"
@ -73,16 +75,16 @@ thiserror = "1.0.30"
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
tokio = { version = "1.17.0", features = ["full"] }
tokio-stream = "0.1.8"
uuid = { version = "0.8.2", features = ["serde"] }
uuid = { version = "1.1.2", features = ["serde", "v4"] }
walkdir = "2.3.2"
[dev-dependencies]
actix-rt = "2.7.0"
assert-json-diff = "2.0.1"
manifest-dir-macros = "0.1.14"
maplit = "1.0.2"
paste = "1.0.6"
serde_url_params = "0.2.1"
urlencoding = "2.1.0"
yaup = "0.2.0"
[features]
default = ["analytics", "mini-dashboard"]
@ -103,5 +105,5 @@ mini-dashboard = [
tikv-jemallocator = "0.4.3"
[package.metadata.mini-dashboard]
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.1.10/build.zip"
sha1 = "1adf96592c267425c110bfefc36b7fc6bfb0f93d"
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.0/build.zip"
sha1 = "25d1615c608541375a08bd722c3fd3315f926be6"

View File

@ -61,7 +61,7 @@ pub trait Analytics: Sync + Send {
/// The method used to publish most analytics that do not need to be batched every hours
fn publish(&self, event_name: String, send: Value, request: Option<&HttpRequest>);
/// This method should be called to aggergate a get search
/// This method should be called to aggregate a get search
fn get_search(&self, aggregate: SearchAggregator);
/// This method should be called to aggregate a post search

View File

@ -31,6 +31,8 @@ use crate::Opt;
use super::{config_user_id_path, MEILISEARCH_CONFIG_PATH};
const ANALYTICS_HEADER: &str = "X-Meilisearch-Client";
/// Write the instance-uid in the `data.ms` and in `~/.config/MeiliSearch/path-to-db-instance-uid`. Ignore the errors.
fn write_user_id(db_path: &Path, user_id: &str) {
let _ = fs::write(db_path.join("instance-uid"), user_id.as_bytes());
@ -48,7 +50,8 @@ const SEGMENT_API_KEY: &str = "P3FWhhEsJiEDCuEHpmcN9DHcK4hVfBvb";
pub fn extract_user_agents(request: &HttpRequest) -> Vec<String> {
request
.headers()
.get(USER_AGENT)
.get(ANALYTICS_HEADER)
.or_else(|| request.headers().get(USER_AGENT))
.map(|header| header.to_str().ok())
.flatten()
.unwrap_or("unknown")
@ -78,7 +81,19 @@ impl SegmentAnalytics {
let user_id = user_id.unwrap_or_else(|| Uuid::new_v4().to_string());
write_user_id(&opt.db_path, &user_id);
let client = HttpClient::default();
let client = reqwest::Client::builder()
.connect_timeout(Duration::from_secs(10))
.build();
// if reqwest throws an error we won't be able to send analytics
if client.is_err() {
return super::MockAnalytics::new(opt);
}
let client = HttpClient::new(
client.unwrap(),
"https://telemetry.meilisearch.com".to_string(),
);
let user = User::UserId { user_id };
let mut batcher = AutoBatcher::new(client, Batcher::new(None), SEGMENT_API_KEY.to_string());
@ -130,11 +145,7 @@ impl SegmentAnalytics {
impl super::Analytics for SegmentAnalytics {
fn publish(&self, event_name: String, mut send: Value, request: Option<&HttpRequest>) {
let user_agent = request
.map(|req| req.headers().get(USER_AGENT))
.flatten()
.map(|header| header.to_str().unwrap_or("unknown"))
.map(|s| s.split(';').map(str::trim).collect::<Vec<&str>>());
let user_agent = request.map(|req| extract_user_agents(req));
send["user-agent"] = json!(user_agent);
let event = Track {
@ -363,7 +374,7 @@ pub struct SearchAggregator {
highlight_pre_tag: bool,
highlight_post_tag: bool,
crop_marker: bool,
matches: bool,
show_matches_position: bool,
crop_length: bool,
}
@ -415,11 +426,11 @@ impl SearchAggregator {
ret.max_limit = query.limit;
ret.max_offset = query.offset.unwrap_or_default();
ret.highlight_pre_tag = query.highlight_pre_tag != DEFAULT_HIGHLIGHT_PRE_TAG;
ret.highlight_post_tag = query.highlight_post_tag != DEFAULT_HIGHLIGHT_POST_TAG;
ret.crop_marker = query.crop_marker != DEFAULT_CROP_MARKER;
ret.crop_length = query.crop_length != DEFAULT_CROP_LENGTH;
ret.matches = query.matches;
ret.highlight_pre_tag = query.highlight_pre_tag != DEFAULT_HIGHLIGHT_PRE_TAG();
ret.highlight_post_tag = query.highlight_post_tag != DEFAULT_HIGHLIGHT_POST_TAG();
ret.crop_marker = query.crop_marker != DEFAULT_CROP_MARKER();
ret.crop_length = query.crop_length != DEFAULT_CROP_LENGTH();
ret.show_matches_position = query.show_matches_position;
ret
}
@ -472,7 +483,7 @@ impl SearchAggregator {
self.highlight_pre_tag |= other.highlight_pre_tag;
self.highlight_post_tag |= other.highlight_post_tag;
self.crop_marker |= other.crop_marker;
self.matches |= other.matches;
self.show_matches_position |= other.show_matches_position;
self.crop_length |= other.crop_length;
}
@ -484,7 +495,7 @@ impl SearchAggregator {
let percentile_99th = 0.99 * (self.total_succeeded as f64 - 1.) + 1.;
// we get all the values in a sorted manner
let time_spent = self.time_spent.into_sorted_vec();
// We are only intersted by the slowest value of the 99th fastest results
// We are only interested by the slowest value of the 99th fastest results
let time_spent = time_spent.get(percentile_99th as usize);
let properties = json!({
@ -515,7 +526,7 @@ impl SearchAggregator {
"highlight_pre_tag": self.highlight_pre_tag,
"highlight_post_tag": self.highlight_post_tag,
"crop_marker": self.crop_marker,
"matches": self.matches,
"show_matches_position": self.show_matches_position,
"crop_length": self.crop_length,
},
});
@ -563,8 +574,8 @@ impl DocumentsAggregator {
let content_type = request
.headers()
.get(CONTENT_TYPE)
.map(|s| s.to_str().unwrap_or("unkown"))
.unwrap_or("unkown")
.and_then(|s| s.to_str().ok())
.unwrap_or("unknown")
.to_string();
ret.content_types.insert(content_type);
ret.index_creation = index_creation;
@ -580,13 +591,13 @@ impl DocumentsAggregator {
self.updated |= other.updated;
// we can't create a union because there is no `into_union` method
for user_agent in other.user_agents.into_iter() {
for user_agent in other.user_agents {
self.user_agents.insert(user_agent);
}
for primary_key in other.primary_keys.into_iter() {
for primary_key in other.primary_keys {
self.primary_keys.insert(primary_key);
}
for content_type in other.content_types.into_iter() {
for content_type in other.content_types {
self.content_types.insert(content_type);
}
self.index_creation |= other.index_creation;

View File

@ -1,6 +1,6 @@
use actix_web as aweb;
use aweb::error::{JsonPayloadError, QueryPayloadError};
use meilisearch_error::{Code, ErrorCode, ResponseError};
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
#[derive(Debug, thiserror::Error)]
pub enum MeilisearchHttpError {

View File

@ -1,4 +1,4 @@
use meilisearch_error::{Code, ErrorCode};
use meilisearch_types::error::{Code, ErrorCode};
#[derive(Debug, thiserror::Error)]
pub enum AuthenticationError {

View File

@ -5,12 +5,11 @@ use std::ops::Deref;
use std::pin::Pin;
use actix_web::FromRequest;
use error::AuthenticationError;
use futures::future::err;
use futures::Future;
use meilisearch_error::{Code, ResponseError};
use error::AuthenticationError;
use meilisearch_auth::{AuthController, AuthFilter};
use meilisearch_types::error::{Code, ResponseError};
pub struct GuardedData<P, D> {
data: D,
@ -132,6 +131,7 @@ pub mod policies {
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation};
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use uuid::Uuid;
use crate::extractors::authentication::Policy;
use meilisearch_auth::{Action, AuthController, AuthFilter, SearchRules};
@ -146,34 +146,21 @@ pub mod policies {
validation
}
/// Extracts the key prefix used to sign the payload from the payload, without performing any validation.
fn extract_key_prefix(token: &str) -> Option<String> {
/// Extracts the key id used to sign the payload, without performing any validation.
fn extract_key_id(token: &str) -> Option<Uuid> {
let mut validation = tenant_token_validation();
validation.insecure_disable_signature_validation();
let dummy_key = DecodingKey::from_secret(b"secret");
let token_data = decode::<Claims>(token, &dummy_key, &validation).ok()?;
// get token fields without validating it.
let Claims { api_key_prefix, .. } = token_data.claims;
Some(api_key_prefix)
let Claims { api_key_uid, .. } = token_data.claims;
Some(api_key_uid)
}
pub struct MasterPolicy;
impl Policy for MasterPolicy {
fn authenticate(
auth: AuthController,
token: &str,
_index: Option<&str>,
) -> Option<AuthFilter> {
if let Some(master_key) = auth.get_master_key() {
if master_key == token {
return Some(AuthFilter::default());
}
}
None
}
fn is_keys_action(action: u8) -> bool {
use actions::*;
matches!(action, KEYS_GET | KEYS_CREATE | KEYS_UPDATE | KEYS_DELETE)
}
pub struct ActionPolicy<const A: u8>;
@ -185,7 +172,12 @@ pub mod policies {
index: Option<&str>,
) -> Option<AuthFilter> {
// authenticate if token is the master key.
if auth.get_master_key().map_or(true, |mk| mk == token) {
// master key can only have access to keys routes.
// if master key is None only keys routes are inaccessible.
if auth
.get_master_key()
.map_or_else(|| !is_keys_action(A), |mk| mk == token)
{
return Some(AuthFilter::default());
}
@ -195,8 +187,10 @@ pub mod policies {
return Some(filters);
} else if let Some(action) = Action::from_repr(A) {
// API key
if let Ok(true) = auth.authenticate(token.as_bytes(), action, index) {
return auth.get_key_filters(token, None).ok();
if let Ok(Some(uid)) = auth.get_optional_uid_from_encoded_key(token.as_bytes()) {
if let Ok(true) = auth.is_key_authorized(uid, action, index) {
return auth.get_key_filters(uid, None).ok();
}
}
}
@ -215,14 +209,11 @@ pub mod policies {
return None;
}
let api_key_prefix = extract_key_prefix(token)?;
let uid = extract_key_id(token)?;
// check if parent key is authorized to do the action.
if auth
.is_key_authorized(api_key_prefix.as_bytes(), Action::Search, index)
.ok()?
{
if auth.is_key_authorized(uid, Action::Search, index).ok()? {
// Check if tenant token is valid.
let key = auth.generate_key(&api_key_prefix)?;
let key = auth.generate_key(uid)?;
let data = decode::<Claims>(
token,
&DecodingKey::from_secret(key.as_bytes()),
@ -245,7 +236,7 @@ pub mod policies {
}
return auth
.get_key_filters(api_key_prefix, Some(data.claims.search_rules))
.get_key_filters(uid, Some(data.claims.search_rules))
.ok();
}
@ -258,6 +249,6 @@ pub mod policies {
struct Claims {
search_rules: SearchRules,
exp: Option<i64>,
api_key_prefix: String,
api_key_uid: Uuid,
}
}

View File

@ -2,7 +2,7 @@
#[macro_use]
pub mod error;
pub mod analytics;
mod task;
pub mod task;
#[macro_use]
pub mod extractors;
pub mod helpers;
@ -31,7 +31,7 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<MeiliSearch> {
let mut meilisearch = MeiliSearch::builder();
// enable autobatching?
let _ = AUTOBATCHING_ENABLED.store(
AUTOBATCHING_ENABLED.store(
opt.scheduler_options.enable_auto_batching,
std::sync::atomic::Ordering::Relaxed,
);
@ -148,10 +148,10 @@ macro_rules! create_app {
use actix_web::middleware::TrailingSlash;
use actix_web::App;
use actix_web::{middleware, web};
use meilisearch_error::ResponseError;
use meilisearch_http::error::MeilisearchHttpError;
use meilisearch_http::routes;
use meilisearch_http::{configure_data, dashboard};
use meilisearch_types::error::ResponseError;
App::new()
.configure(|s| configure_data(s, $data.clone(), $auth.clone(), &$opt, $analytics))

View File

@ -1,6 +1,7 @@
use std::env;
use std::sync::Arc;
use actix_web::http::KeepAlive;
use actix_web::HttpServer;
use clap::Parser;
use meilisearch_auth::AuthController;
@ -83,7 +84,8 @@ async fn run_http(
)
})
// Disable signals allows the server to terminate immediately when a user enter CTRL-C
.disable_signals();
.disable_signals()
.keep_alive(KeepAlive::Os);
if let Some(config) = opt.get_ssl_config()? {
http_server

View File

@ -1,17 +1,19 @@
use std::str;
use actix_web::{web, HttpRequest, HttpResponse};
use meilisearch_auth::{error::AuthControllerError, Action, AuthController, Key};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use time::OffsetDateTime;
use uuid::Uuid;
use meilisearch_auth::{error::AuthControllerError, Action, AuthController, Key};
use meilisearch_types::error::{Code, ResponseError};
use crate::extractors::{
authentication::{policies::*, GuardedData},
sequential_extractor::SeqHandler,
};
use meilisearch_error::{Code, ResponseError};
use crate::routes::Pagination;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
@ -20,7 +22,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.route(web::get().to(SeqHandler(list_api_keys))),
)
.service(
web::resource("/{api_key}")
web::resource("/{key}")
.route(web::get().to(SeqHandler(get_api_key)))
.route(web::patch().to(SeqHandler(patch_api_key)))
.route(web::delete().to(SeqHandler(delete_api_key))),
@ -28,7 +30,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
}
pub async fn create_api_key(
auth_controller: GuardedData<MasterPolicy, AuthController>,
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_CREATE }>, AuthController>,
body: web::Json<Value>,
_req: HttpRequest,
) -> Result<HttpResponse, ResponseError> {
@ -44,30 +46,35 @@ pub async fn create_api_key(
}
pub async fn list_api_keys(
auth_controller: GuardedData<MasterPolicy, AuthController>,
_req: HttpRequest,
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
paginate: web::Query<Pagination>,
) -> Result<HttpResponse, ResponseError> {
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
let keys = auth_controller.list_keys()?;
let res: Vec<_> = keys
.into_iter()
.map(|k| KeyView::from_key(k, &auth_controller))
.collect();
Ok(res)
let page_view = paginate.auto_paginate_sized(
keys.into_iter()
.map(|k| KeyView::from_key(k, &auth_controller)),
);
Ok(page_view)
})
.await
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
Ok(HttpResponse::Ok().json(KeyListView::from(res)))
Ok(HttpResponse::Ok().json(page_view))
}
pub async fn get_api_key(
auth_controller: GuardedData<MasterPolicy, AuthController>,
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
path: web::Path<AuthParam>,
) -> Result<HttpResponse, ResponseError> {
let api_key = path.into_inner().api_key;
let key = path.into_inner().key;
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
let key = auth_controller.get_key(&api_key)?;
let uid =
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
let key = auth_controller.get_key(uid)?;
Ok(KeyView::from_key(key, &auth_controller))
})
.await
@ -77,14 +84,17 @@ pub async fn get_api_key(
}
pub async fn patch_api_key(
auth_controller: GuardedData<MasterPolicy, AuthController>,
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_UPDATE }>, AuthController>,
body: web::Json<Value>,
path: web::Path<AuthParam>,
) -> Result<HttpResponse, ResponseError> {
let api_key = path.into_inner().api_key;
let key = path.into_inner().key;
let body = body.into_inner();
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
let key = auth_controller.update_key(&api_key, body)?;
let uid =
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
let key = auth_controller.update_key(uid, body)?;
Ok(KeyView::from_key(key, &auth_controller))
})
.await
@ -94,27 +104,33 @@ pub async fn patch_api_key(
}
pub async fn delete_api_key(
auth_controller: GuardedData<MasterPolicy, AuthController>,
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_DELETE }>, AuthController>,
path: web::Path<AuthParam>,
) -> Result<HttpResponse, ResponseError> {
let api_key = path.into_inner().api_key;
tokio::task::spawn_blocking(move || auth_controller.delete_key(&api_key))
.await
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
let key = path.into_inner().key;
tokio::task::spawn_blocking(move || {
let uid =
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
auth_controller.delete_key(uid)
})
.await
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::Internal))??;
Ok(HttpResponse::NoContent().finish())
}
#[derive(Deserialize)]
pub struct AuthParam {
api_key: String,
key: String,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct KeyView {
name: Option<String>,
description: Option<String>,
key: String,
uid: Uuid,
actions: Vec<Action>,
indexes: Vec<String>,
#[serde(serialize_with = "time::serde::rfc3339::option::serialize")]
@ -127,28 +143,18 @@ struct KeyView {
impl KeyView {
fn from_key(key: Key, auth: &AuthController) -> Self {
let key_id = str::from_utf8(&key.id).unwrap();
let generated_key = auth.generate_key(key_id).unwrap_or_default();
let generated_key = auth.generate_key(key.uid).unwrap_or_default();
KeyView {
name: key.name,
description: key.description,
key: generated_key,
uid: key.uid,
actions: key.actions,
indexes: key.indexes,
indexes: key.indexes.into_iter().map(String::from).collect(),
expires_at: key.expires_at,
created_at: key.created_at,
updated_at: key.updated_at,
}
}
}
#[derive(Debug, Serialize)]
struct KeyListView {
results: Vec<KeyView>,
}
impl From<Vec<KeyView>> for KeyListView {
fn from(results: Vec<KeyView>) -> Self {
Self { results }
}
}

View File

@ -1,19 +1,16 @@
use actix_web::{web, HttpRequest, HttpResponse};
use log::debug;
use meilisearch_error::ResponseError;
use meilisearch_lib::MeiliSearch;
use serde::{Deserialize, Serialize};
use meilisearch_types::error::ResponseError;
use serde_json::json;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler;
use crate::task::SummarizedTaskView;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))))
.service(
web::resource("/{dump_uid}/status").route(web::get().to(SeqHandler(get_dump_status))),
);
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))));
}
pub async fn create_dump(
@ -23,29 +20,8 @@ pub async fn create_dump(
) -> Result<HttpResponse, ResponseError> {
analytics.publish("Dump Created".to_string(), json!({}), Some(&req));
let res = meilisearch.create_dump().await?;
let res: SummarizedTaskView = meilisearch.register_dump_task().await?.into();
debug!("returns: {:?}", res);
Ok(HttpResponse::Accepted().json(res))
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct DumpStatusResponse {
status: String,
}
#[derive(Deserialize)]
struct DumpParam {
dump_uid: String,
}
async fn get_dump_status(
meilisearch: GuardedData<ActionPolicy<{ actions::DUMPS_GET }>, MeiliSearch>,
path: web::Path<DumpParam>,
) -> Result<HttpResponse, ResponseError> {
let res = meilisearch.dump_info(path.dump_uid.clone()).await?;
debug!("returns: {:?}", res);
Ok(HttpResponse::Ok().json(res))
}

View File

@ -6,13 +6,15 @@ use actix_web::{web, HttpRequest, HttpResponse};
use bstr::ByteSlice;
use futures::{Stream, StreamExt};
use log::debug;
use meilisearch_error::ResponseError;
use meilisearch_lib::index_controller::{DocumentAdditionFormat, Update};
use meilisearch_lib::milli::update::IndexDocumentsMethod;
use meilisearch_lib::MeiliSearch;
use meilisearch_types::error::ResponseError;
use meilisearch_types::star_or::StarOr;
use mime::Mime;
use once_cell::sync::Lazy;
use serde::Deserialize;
use serde_cs::vec::CS;
use serde_json::Value;
use tokio::sync::mpsc;
@ -21,11 +23,9 @@ use crate::error::MeilisearchHttpError;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::payload::Payload;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::{fold_star_or, PaginationView};
use crate::task::SummarizedTaskView;
const DEFAULT_RETRIEVE_DOCUMENTS_OFFSET: usize = 0;
const DEFAULT_RETRIEVE_DOCUMENTS_LIMIT: usize = 20;
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
vec![
"application/json".to_string(),
@ -86,14 +86,24 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
);
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct GetDocument {
fields: Option<CS<StarOr<String>>>,
}
pub async fn get_document(
meilisearch: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, MeiliSearch>,
path: web::Path<DocumentParam>,
params: web::Query<GetDocument>,
) -> Result<HttpResponse, ResponseError> {
let index = path.index_uid.clone();
let id = path.document_id.clone();
let GetDocument { fields } = params.into_inner();
let attributes_to_retrieve = fields.and_then(fold_star_or);
let document = meilisearch
.document(index, id, None as Option<Vec<String>>)
.document(index, id, attributes_to_retrieve)
.await?;
debug!("returns: {:?}", document);
Ok(HttpResponse::Ok().json(document))
@ -116,9 +126,11 @@ pub async fn delete_document(
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct BrowseQuery {
offset: Option<usize>,
limit: Option<usize>,
attributes_to_retrieve: Option<String>,
#[serde(default)]
offset: usize,
#[serde(default = "crate::routes::PAGINATION_DEFAULT_LIMIT")]
limit: usize,
fields: Option<CS<StarOr<String>>>,
}
pub async fn get_all_documents(
@ -127,27 +139,21 @@ pub async fn get_all_documents(
params: web::Query<BrowseQuery>,
) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", params);
let attributes_to_retrieve = params.attributes_to_retrieve.as_ref().and_then(|attrs| {
let mut names = Vec::new();
for name in attrs.split(',').map(String::from) {
if name == "*" {
return None;
}
names.push(name);
}
Some(names)
});
let BrowseQuery {
limit,
offset,
fields,
} = params.into_inner();
let attributes_to_retrieve = fields.and_then(fold_star_or);
let documents = meilisearch
.documents(
path.into_inner(),
params.offset.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_OFFSET),
params.limit.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_LIMIT),
attributes_to_retrieve,
)
let (total, documents) = meilisearch
.documents(path.into_inner(), offset, limit, attributes_to_retrieve)
.await?;
debug!("returns: {:?}", documents);
Ok(HttpResponse::Ok().json(documents))
let ret = PaginationView::new(offset, limit, total as usize, documents);
debug!("returns: {:?}", ret);
Ok(HttpResponse::Ok().json(ret))
}
#[derive(Deserialize, Debug)]

View File

@ -1,8 +1,8 @@
use actix_web::{web, HttpRequest, HttpResponse};
use log::debug;
use meilisearch_error::ResponseError;
use meilisearch_lib::index_controller::Update;
use meilisearch_lib::MeiliSearch;
use meilisearch_types::error::ResponseError;
use serde::{Deserialize, Serialize};
use serde_json::json;
use time::OffsetDateTime;
@ -12,10 +12,11 @@ use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler;
use crate::task::SummarizedTaskView;
use super::Pagination;
pub mod documents;
pub mod search;
pub mod settings;
pub mod tasks;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(
@ -28,30 +29,32 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(
web::resource("")
.route(web::get().to(SeqHandler(get_index)))
.route(web::put().to(SeqHandler(update_index)))
.route(web::patch().to(SeqHandler(update_index)))
.route(web::delete().to(SeqHandler(delete_index))),
)
.service(web::resource("/stats").route(web::get().to(SeqHandler(get_index_stats))))
.service(web::scope("/documents").configure(documents::configure))
.service(web::scope("/search").configure(search::configure))
.service(web::scope("/tasks").configure(tasks::configure))
.service(web::scope("/settings").configure(settings::configure)),
);
}
pub async fn list_indexes(
data: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, MeiliSearch>,
paginate: web::Query<Pagination>,
) -> Result<HttpResponse, ResponseError> {
let search_rules = &data.filters().search_rules;
let indexes: Vec<_> = data
.list_indexes()
.await?
let indexes: Vec<_> = data.list_indexes().await?;
let nb_indexes = indexes.len();
let iter = indexes
.into_iter()
.filter(|i| search_rules.is_index_authorized(&i.uid))
.collect();
.filter(|i| search_rules.is_index_authorized(&i.uid));
let ret = paginate
.into_inner()
.auto_paginate_unsized(nb_indexes, iter);
debug!("returns: {:?}", indexes);
Ok(HttpResponse::Ok().json(indexes))
debug!("returns: {:?}", ret);
Ok(HttpResponse::Ok().json(ret))
}
#[derive(Debug, Deserialize)]

View File

@ -1,13 +1,14 @@
use actix_web::{web, HttpRequest, HttpResponse};
use log::debug;
use meilisearch_auth::IndexSearchRules;
use meilisearch_error::ResponseError;
use meilisearch_lib::index::{
default_crop_length, default_crop_marker, default_highlight_post_tag,
default_highlight_pre_tag, SearchQuery, DEFAULT_SEARCH_LIMIT,
SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG,
DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
};
use meilisearch_lib::MeiliSearch;
use meilisearch_types::error::ResponseError;
use serde::Deserialize;
use serde_cs::vec::CS;
use serde_json::Value;
use crate::analytics::{Analytics, SearchAggregator};
@ -28,42 +29,26 @@ pub struct SearchQueryGet {
q: Option<String>,
offset: Option<usize>,
limit: Option<usize>,
attributes_to_retrieve: Option<String>,
attributes_to_crop: Option<String>,
#[serde(default = "default_crop_length")]
attributes_to_retrieve: Option<CS<String>>,
attributes_to_crop: Option<CS<String>>,
#[serde(default = "DEFAULT_CROP_LENGTH")]
crop_length: usize,
attributes_to_highlight: Option<String>,
attributes_to_highlight: Option<CS<String>>,
filter: Option<String>,
sort: Option<String>,
#[serde(default = "Default::default")]
matches: bool,
facets_distribution: Option<String>,
#[serde(default = "default_highlight_pre_tag")]
show_matches_position: bool,
facets: Option<CS<String>>,
#[serde(default = "DEFAULT_HIGHLIGHT_PRE_TAG")]
highlight_pre_tag: String,
#[serde(default = "default_highlight_post_tag")]
#[serde(default = "DEFAULT_HIGHLIGHT_POST_TAG")]
highlight_post_tag: String,
#[serde(default = "default_crop_marker")]
#[serde(default = "DEFAULT_CROP_MARKER")]
crop_marker: String,
}
impl From<SearchQueryGet> for SearchQuery {
fn from(other: SearchQueryGet) -> Self {
let attributes_to_retrieve = other
.attributes_to_retrieve
.map(|attrs| attrs.split(',').map(String::from).collect());
let attributes_to_crop = other
.attributes_to_crop
.map(|attrs| attrs.split(',').map(String::from).collect());
let attributes_to_highlight = other
.attributes_to_highlight
.map(|attrs| attrs.split(',').map(String::from).collect());
let facets_distribution = other
.facets_distribution
.map(|attrs| attrs.split(',').map(String::from).collect());
let filter = match other.filter {
Some(f) => match serde_json::from_str(&f) {
Ok(v) => Some(v),
@ -72,20 +57,22 @@ impl From<SearchQueryGet> for SearchQuery {
None => None,
};
let sort = other.sort.map(|attr| fix_sort_query_parameters(&attr));
Self {
q: other.q,
offset: other.offset,
limit: other.limit.unwrap_or(DEFAULT_SEARCH_LIMIT),
attributes_to_retrieve,
attributes_to_crop,
limit: other.limit.unwrap_or_else(DEFAULT_SEARCH_LIMIT),
attributes_to_retrieve: other
.attributes_to_retrieve
.map(|o| o.into_iter().collect()),
attributes_to_crop: other.attributes_to_crop.map(|o| o.into_iter().collect()),
crop_length: other.crop_length,
attributes_to_highlight,
attributes_to_highlight: other
.attributes_to_highlight
.map(|o| o.into_iter().collect()),
filter,
sort,
matches: other.matches,
facets_distribution,
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
show_matches_position: other.show_matches_position,
facets: other.facets.map(|o| o.into_iter().collect()),
highlight_pre_tag: other.highlight_pre_tag,
highlight_post_tag: other.highlight_post_tag,
crop_marker: other.crop_marker,
@ -124,10 +111,9 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
sort_parameters.push(current_sort.to_string());
merge = true;
} else if merge && !sort_parameters.is_empty() {
sort_parameters
.last_mut()
.unwrap()
.push_str(&format!(",{}", current_sort));
let s = sort_parameters.last_mut().unwrap();
s.push(',');
s.push_str(current_sort);
if current_sort.ends_with("):desc") || current_sort.ends_with("):asc") {
merge = false;
}
@ -169,10 +155,6 @@ pub async fn search_with_url_query(
let search_result = search_result?;
// Tests that the nb_hits is always set to false
#[cfg(test)]
assert!(!search_result.exhaustive_nb_hits);
debug!("returns: {:?}", search_result);
Ok(HttpResponse::Ok().json(search_result))
}
@ -207,10 +189,6 @@ pub async fn search_with_post(
let search_result = search_result?;
// Tests that the nb_hits is always set to false
#[cfg(test)]
assert!(!search_result.exhaustive_nb_hits);
debug!("returns: {:?}", search_result);
Ok(HttpResponse::Ok().json(search_result))
}

View File

@ -1,10 +1,10 @@
use log::debug;
use actix_web::{web, HttpRequest, HttpResponse};
use meilisearch_error::ResponseError;
use meilisearch_lib::index::{Settings, Unchecked};
use meilisearch_lib::index_controller::Update;
use meilisearch_lib::MeiliSearch;
use meilisearch_types::error::ResponseError;
use serde_json::json;
use crate::analytics::Analytics;
@ -13,7 +13,7 @@ use crate::task::SummarizedTaskView;
#[macro_export]
macro_rules! make_setting_route {
($route:literal, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
($route:literal, $update_verb:ident, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
pub mod $attr {
use actix_web::{web, HttpRequest, HttpResponse, Resource};
use log::debug;
@ -21,7 +21,7 @@ macro_rules! make_setting_route {
use meilisearch_lib::milli::update::Setting;
use meilisearch_lib::{index::Settings, index_controller::Update, MeiliSearch};
use meilisearch_error::ResponseError;
use meilisearch_types::error::ResponseError;
use $crate::analytics::Analytics;
use $crate::extractors::authentication::{policies::*, GuardedData};
use $crate::extractors::sequential_extractor::SeqHandler;
@ -100,18 +100,27 @@ macro_rules! make_setting_route {
pub fn resources() -> Resource {
Resource::new($route)
.route(web::get().to(SeqHandler(get)))
.route(web::post().to(SeqHandler(update)))
.route(web::$update_verb().to(SeqHandler(update)))
.route(web::delete().to(SeqHandler(delete)))
}
}
};
($route:literal, $type:ty, $attr:ident, $camelcase_attr:literal) => {
make_setting_route!($route, $type, $attr, $camelcase_attr, _analytics, |_, _| {});
($route:literal, $update_verb:ident, $type:ty, $attr:ident, $camelcase_attr:literal) => {
make_setting_route!(
$route,
$update_verb,
$type,
$attr,
$camelcase_attr,
_analytics,
|_, _| {}
);
};
}
make_setting_route!(
"/filterable-attributes",
put,
std::collections::BTreeSet<String>,
filterable_attributes,
"filterableAttributes",
@ -134,6 +143,7 @@ make_setting_route!(
make_setting_route!(
"/sortable-attributes",
put,
std::collections::BTreeSet<String>,
sortable_attributes,
"sortableAttributes",
@ -156,6 +166,7 @@ make_setting_route!(
make_setting_route!(
"/displayed-attributes",
put,
Vec<String>,
displayed_attributes,
"displayedAttributes"
@ -163,6 +174,7 @@ make_setting_route!(
make_setting_route!(
"/typo-tolerance",
patch,
meilisearch_lib::index::updates::TypoSettings,
typo_tolerance,
"typoTolerance",
@ -204,6 +216,7 @@ make_setting_route!(
make_setting_route!(
"/searchable-attributes",
put,
Vec<String>,
searchable_attributes,
"searchableAttributes",
@ -225,6 +238,7 @@ make_setting_route!(
make_setting_route!(
"/stop-words",
put,
std::collections::BTreeSet<String>,
stop_words,
"stopWords"
@ -232,6 +246,7 @@ make_setting_route!(
make_setting_route!(
"/synonyms",
put,
std::collections::BTreeMap<String, Vec<String>>,
synonyms,
"synonyms"
@ -239,6 +254,7 @@ make_setting_route!(
make_setting_route!(
"/distinct-attribute",
put,
String,
distinct_attribute,
"distinctAttribute"
@ -246,6 +262,7 @@ make_setting_route!(
make_setting_route!(
"/ranking-rules",
put,
Vec<String>,
ranking_rules,
"rankingRules",
@ -265,13 +282,57 @@ make_setting_route!(
}
);
make_setting_route!(
"/faceting",
patch,
meilisearch_lib::index::updates::FacetingSettings,
faceting,
"faceting",
analytics,
|setting: &Option<meilisearch_lib::index::updates::FacetingSettings>, req: &HttpRequest| {
use serde_json::json;
analytics.publish(
"Faceting Updated".to_string(),
json!({
"faceting": {
"max_values_per_facet": setting.as_ref().and_then(|s| s.max_values_per_facet.set()),
},
}),
Some(req),
);
}
);
make_setting_route!(
"/pagination",
patch,
meilisearch_lib::index::updates::PaginationSettings,
pagination,
"pagination",
analytics,
|setting: &Option<meilisearch_lib::index::updates::PaginationSettings>, req: &HttpRequest| {
use serde_json::json;
analytics.publish(
"Pagination Updated".to_string(),
json!({
"pagination": {
"max_total_hits": setting.as_ref().and_then(|s| s.max_total_hits.set()),
},
}),
Some(req),
);
}
);
macro_rules! generate_configure {
($($mod:ident),*) => {
pub fn configure(cfg: &mut web::ServiceConfig) {
use crate::extractors::sequential_extractor::SeqHandler;
cfg.service(
web::resource("")
.route(web::post().to(SeqHandler(update_all)))
.route(web::patch().to(SeqHandler(update_all)))
.route(web::get().to(SeqHandler(get_all)))
.route(web::delete().to(SeqHandler(delete_all))))
$(.service($mod::resources()))*;
@ -288,7 +349,9 @@ generate_configure!(
stop_words,
synonyms,
ranking_rules,
typo_tolerance
typo_tolerance,
pagination,
faceting
);
pub async fn update_all(
@ -348,6 +411,18 @@ pub async fn update_all(
.map(|s| s.two_typos.set()))
.flatten(),
},
"faceting": {
"max_values_per_facet": settings.faceting
.as_ref()
.set()
.and_then(|s| s.max_values_per_facet.as_ref().set()),
},
"pagination": {
"max_total_hits": settings.pagination
.as_ref()
.set()
.and_then(|s| s.max_total_hits.as_ref().set()),
},
}),
Some(&req),
);

View File

@ -1,80 +0,0 @@
use actix_web::{web, HttpRequest, HttpResponse};
use log::debug;
use meilisearch_error::ResponseError;
use meilisearch_lib::MeiliSearch;
use serde::{Deserialize, Serialize};
use serde_json::json;
use time::OffsetDateTime;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler;
use crate::task::{TaskListView, TaskView};
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_all_tasks_status))))
.service(web::resource("{task_id}").route(web::get().to(SeqHandler(get_task_status))));
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct UpdateIndexResponse {
name: String,
uid: String,
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
created_at: OffsetDateTime,
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
updated_at: OffsetDateTime,
#[serde(serialize_with = "time::serde::rfc3339::serialize")]
primary_key: OffsetDateTime,
}
#[derive(Deserialize)]
pub struct UpdateParam {
index_uid: String,
task_id: u64,
}
pub async fn get_task_status(
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
index_uid: web::Path<UpdateParam>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
analytics.publish(
"Index Tasks Seen".to_string(),
json!({ "per_task_uid": true }),
Some(&req),
);
let UpdateParam { index_uid, task_id } = index_uid.into_inner();
let task: TaskView = meilisearch.get_index_task(index_uid, task_id).await?.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Ok().json(task))
}
pub async fn get_all_tasks_status(
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
index_uid: web::Path<String>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
analytics.publish(
"Index Tasks Seen".to_string(),
json!({ "per_task_uid": false }),
Some(&req),
);
let tasks: TaskListView = meilisearch
.list_index_task(index_uid.into_inner(), None, None)
.await?
.into_iter()
.map(TaskView::from)
.collect::<Vec<_>>()
.into();
debug!("returns: {:?}", tasks);
Ok(HttpResponse::Ok().json(tasks))
}

View File

@ -1,11 +1,13 @@
use actix_web::{web, HttpResponse};
use log::debug;
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use meilisearch_error::ResponseError;
use meilisearch_lib::index::{Settings, Unchecked};
use meilisearch_lib::MeiliSearch;
use meilisearch_types::error::ResponseError;
use meilisearch_types::star_or::StarOr;
use crate::extractors::authentication::{policies::*, GuardedData};
@ -24,6 +26,101 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(web::scope("/indexes").configure(indexes::configure));
}
/// Extracts the raw values from the `StarOr` types and
/// return None if a `StarOr::Star` is encountered.
pub fn fold_star_or<T, O>(content: impl IntoIterator<Item = StarOr<T>>) -> Option<O>
where
O: FromIterator<T>,
{
content
.into_iter()
.map(|value| match value {
StarOr::Star => None,
StarOr::Other(val) => Some(val),
})
.collect()
}
const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20;
#[derive(Debug, Clone, Copy, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct Pagination {
#[serde(default)]
pub offset: usize,
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
pub limit: usize,
}
#[derive(Debug, Clone, Serialize)]
pub struct PaginationView<T> {
pub results: Vec<T>,
pub offset: usize,
pub limit: usize,
pub total: usize,
}
impl Pagination {
/// Given the full data to paginate, returns the selected section.
pub fn auto_paginate_sized<T>(
self,
content: impl IntoIterator<Item = T> + ExactSizeIterator,
) -> PaginationView<T>
where
T: Serialize,
{
let total = content.len();
let content: Vec<_> = content
.into_iter()
.skip(self.offset)
.take(self.limit)
.collect();
self.format_with(total, content)
}
/// Given an iterator and the total number of elements, returns the selected section.
pub fn auto_paginate_unsized<T>(
self,
total: usize,
content: impl IntoIterator<Item = T>,
) -> PaginationView<T>
where
T: Serialize,
{
let content: Vec<_> = content
.into_iter()
.skip(self.offset)
.take(self.limit)
.collect();
self.format_with(total, content)
}
/// Given the data already paginated + the total number of elements, it stores
/// everything in a [PaginationResult].
pub fn format_with<T>(self, total: usize, results: Vec<T>) -> PaginationView<T>
where
T: Serialize,
{
PaginationView {
results,
offset: self.offset,
limit: self.limit,
total,
}
}
}
impl<T> PaginationView<T> {
pub fn new(offset: usize, limit: usize, total: usize, results: Vec<T>) -> Self {
Self {
offset,
limit,
results,
total,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[allow(clippy::large_enum_variant)]
#[serde(tag = "name")]

View File

@ -1,49 +1,172 @@
use actix_web::{web, HttpRequest, HttpResponse};
use meilisearch_error::ResponseError;
use meilisearch_lib::tasks::task::TaskId;
use meilisearch_lib::tasks::task::{TaskContent, TaskEvent, TaskId};
use meilisearch_lib::tasks::TaskFilter;
use meilisearch_lib::MeiliSearch;
use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::star_or::StarOr;
use serde::Deserialize;
use serde_cs::vec::CS;
use serde_json::json;
use crate::analytics::Analytics;
use crate::extractors::authentication::{policies::*, GuardedData};
use crate::extractors::sequential_extractor::SeqHandler;
use crate::task::{TaskListView, TaskView};
use crate::task::{TaskListView, TaskStatus, TaskType, TaskView};
use super::fold_star_or;
const DEFAULT_LIMIT: fn() -> usize = || 20;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::get().to(SeqHandler(get_tasks))))
.service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task))));
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct TasksFilterQuery {
#[serde(rename = "type")]
type_: Option<CS<StarOr<TaskType>>>,
status: Option<CS<StarOr<TaskStatus>>>,
index_uid: Option<CS<StarOr<IndexUid>>>,
#[serde(default = "DEFAULT_LIMIT")]
limit: usize,
from: Option<TaskId>,
}
#[rustfmt::skip]
fn task_type_matches_content(type_: &TaskType, content: &TaskContent) -> bool {
matches!((type_, content),
(TaskType::IndexCreation, TaskContent::IndexCreation { .. })
| (TaskType::IndexUpdate, TaskContent::IndexUpdate { .. })
| (TaskType::IndexDeletion, TaskContent::IndexDeletion { .. })
| (TaskType::DocumentAdditionOrUpdate, TaskContent::DocumentAddition { .. })
| (TaskType::DocumentDeletion, TaskContent::DocumentDeletion{ .. })
| (TaskType::SettingsUpdate, TaskContent::SettingsUpdate { .. })
)
}
#[rustfmt::skip]
fn task_status_matches_events(status: &TaskStatus, events: &[TaskEvent]) -> bool {
events.last().map_or(false, |event| {
matches!((status, event),
(TaskStatus::Enqueued, TaskEvent::Created(_))
| (TaskStatus::Processing, TaskEvent::Processing(_) | TaskEvent::Batched { .. })
| (TaskStatus::Succeeded, TaskEvent::Succeeded { .. })
| (TaskStatus::Failed, TaskEvent::Failed { .. }),
)
})
}
async fn get_tasks(
meilisearch: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, MeiliSearch>,
params: web::Query<TasksFilterQuery>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
let TasksFilterQuery {
type_,
status,
index_uid,
limit,
from,
} = params.into_inner();
let search_rules = &meilisearch.filters().search_rules;
// We first transform a potential indexUid=* into a "not specified indexUid filter"
// for every one of the filters: type, status, and indexUid.
let type_: Option<Vec<_>> = type_.and_then(fold_star_or);
let status: Option<Vec<_>> = status.and_then(fold_star_or);
let index_uid: Option<Vec<_>> = index_uid.and_then(fold_star_or);
analytics.publish(
"Tasks Seen".to_string(),
json!({ "per_task_uid": false }),
json!({
"filtered_by_index_uid": index_uid.as_ref().map_or(false, |v| !v.is_empty()),
"filtered_by_type": type_.as_ref().map_or(false, |v| !v.is_empty()),
"filtered_by_status": status.as_ref().map_or(false, |v| !v.is_empty()),
}),
Some(&req),
);
let search_rules = &meilisearch.filters().search_rules;
let filters = if search_rules.is_index_authorized("*") {
None
} else {
let mut filters = TaskFilter::default();
for (index, _policy) in search_rules.clone() {
filters.filter_index(index);
// Then we filter on potential indexes and make sure that the search filter
// restrictions are also applied.
let indexes_filters = match index_uid {
Some(indexes) => {
let mut filters = TaskFilter::default();
for name in indexes {
if search_rules.is_index_authorized(&name) {
filters.filter_index(name.to_string());
}
}
Some(filters)
}
None => {
if search_rules.is_index_authorized("*") {
None
} else {
let mut filters = TaskFilter::default();
for (index, _policy) in search_rules.clone() {
filters.filter_index(index);
}
Some(filters)
}
}
Some(filters)
};
let tasks: TaskListView = meilisearch
.list_tasks(filters, None, None)
// Then we complete the task filter with other potential status and types filters.
let filters = if type_.is_some() || status.is_some() {
let mut filters = indexes_filters.unwrap_or_default();
filters.filter_fn(move |task| {
let matches_type = match &type_ {
Some(types) => types
.iter()
.any(|t| task_type_matches_content(t, &task.content)),
None => true,
};
let matches_status = match &status {
Some(statuses) => statuses
.iter()
.any(|t| task_status_matches_events(t, &task.events)),
None => true,
};
matches_type && matches_status
});
Some(filters)
} else {
indexes_filters
};
// We +1 just to know if there is more after this "page" or not.
let limit = limit.saturating_add(1);
let mut tasks_results: Vec<_> = meilisearch
.list_tasks(filters, Some(limit), from)
.await?
.into_iter()
.map(TaskView::from)
.collect::<Vec<_>>()
.into();
.collect();
// If we were able to fetch the number +1 tasks we asked
// it means that there is more to come.
let next = if tasks_results.len() == limit {
tasks_results.pop().map(|t| t.uid)
} else {
None
};
let from = tasks_results.first().map(|t| t.uid);
let tasks = TaskListView {
results: tasks_results,
limit: limit.saturating_sub(1),
from,
next,
};
Ok(HttpResponse::Ok().json(tasks))
}

View File

@ -1,62 +1,137 @@
use std::fmt::Write;
use std::error::Error;
use std::fmt::{self, Write};
use std::str::FromStr;
use std::write;
use meilisearch_error::ResponseError;
use meilisearch_lib::index::{Settings, Unchecked};
use meilisearch_lib::milli::update::IndexDocumentsMethod;
use meilisearch_lib::tasks::batch::BatchId;
use meilisearch_lib::tasks::task::{
DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult,
};
use serde::{Serialize, Serializer};
use meilisearch_types::error::ResponseError;
use serde::{Deserialize, Serialize, Serializer};
use time::{Duration, OffsetDateTime};
use crate::AUTOBATCHING_ENABLED;
#[derive(Debug, Serialize)]
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
enum TaskType {
pub enum TaskType {
IndexCreation,
IndexUpdate,
IndexDeletion,
DocumentAddition,
DocumentPartial,
DocumentAdditionOrUpdate,
DocumentDeletion,
SettingsUpdate,
ClearAll,
DumpCreation,
}
impl From<TaskContent> for TaskType {
fn from(other: TaskContent) -> Self {
match other {
TaskContent::DocumentAddition {
merge_strategy: IndexDocumentsMethod::ReplaceDocuments,
..
} => TaskType::DocumentAddition,
TaskContent::DocumentAddition {
merge_strategy: IndexDocumentsMethod::UpdateDocuments,
..
} => TaskType::DocumentPartial,
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => TaskType::ClearAll,
TaskContent::DocumentDeletion(DocumentDeletion::Ids(_)) => TaskType::DocumentDeletion,
TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate,
TaskContent::IndexDeletion => TaskType::IndexDeletion,
TaskContent::IndexCreation { .. } => TaskType::IndexCreation,
TaskContent::IndexUpdate { .. } => TaskType::IndexUpdate,
_ => unreachable!("unexpected task type"),
TaskContent::IndexDeletion { .. } => TaskType::IndexDeletion,
TaskContent::DocumentAddition { .. } => TaskType::DocumentAdditionOrUpdate,
TaskContent::DocumentDeletion { .. } => TaskType::DocumentDeletion,
TaskContent::SettingsUpdate { .. } => TaskType::SettingsUpdate,
TaskContent::Dump { .. } => TaskType::DumpCreation,
}
}
}
#[derive(Debug, Serialize)]
#[derive(Debug)]
pub struct TaskTypeError {
invalid_type: String,
}
impl fmt::Display for TaskTypeError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"invalid task type `{}`, expecting one of: \
indexCreation, indexUpdate, indexDeletion, documentAdditionOrUpdate, \
documentDeletion, settingsUpdate, dumpCreation",
self.invalid_type
)
}
}
impl Error for TaskTypeError {}
impl FromStr for TaskType {
type Err = TaskTypeError;
fn from_str(type_: &str) -> Result<Self, TaskTypeError> {
if type_.eq_ignore_ascii_case("indexCreation") {
Ok(TaskType::IndexCreation)
} else if type_.eq_ignore_ascii_case("indexUpdate") {
Ok(TaskType::IndexUpdate)
} else if type_.eq_ignore_ascii_case("indexDeletion") {
Ok(TaskType::IndexDeletion)
} else if type_.eq_ignore_ascii_case("documentAdditionOrUpdate") {
Ok(TaskType::DocumentAdditionOrUpdate)
} else if type_.eq_ignore_ascii_case("documentDeletion") {
Ok(TaskType::DocumentDeletion)
} else if type_.eq_ignore_ascii_case("settingsUpdate") {
Ok(TaskType::SettingsUpdate)
} else if type_.eq_ignore_ascii_case("dumpCreation") {
Ok(TaskType::DumpCreation)
} else {
Err(TaskTypeError {
invalid_type: type_.to_string(),
})
}
}
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
enum TaskStatus {
pub enum TaskStatus {
Enqueued,
Processing,
Succeeded,
Failed,
}
#[derive(Debug)]
pub struct TaskStatusError {
invalid_status: String,
}
impl fmt::Display for TaskStatusError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"invalid task status `{}`, expecting one of: \
enqueued, processing, succeeded, or failed",
self.invalid_status,
)
}
}
impl Error for TaskStatusError {}
impl FromStr for TaskStatus {
type Err = TaskStatusError;
fn from_str(status: &str) -> Result<Self, TaskStatusError> {
if status.eq_ignore_ascii_case("enqueued") {
Ok(TaskStatus::Enqueued)
} else if status.eq_ignore_ascii_case("processing") {
Ok(TaskStatus::Processing)
} else if status.eq_ignore_ascii_case("succeeded") {
Ok(TaskStatus::Succeeded)
} else if status.eq_ignore_ascii_case("failed") {
Ok(TaskStatus::Failed)
} else {
Err(TaskStatusError {
invalid_status: status.to_string(),
})
}
}
}
#[derive(Debug, Serialize)]
#[serde(untagged)]
#[allow(clippy::large_enum_variant)]
@ -80,6 +155,8 @@ enum TaskDetails {
},
#[serde(rename_all = "camelCase")]
ClearAll { deleted_documents: Option<u64> },
#[serde(rename_all = "camelCase")]
Dump { dump_uid: String },
}
/// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for
@ -136,8 +213,8 @@ fn serialize_duration<S: Serializer>(
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct TaskView {
uid: TaskId,
index_uid: String,
pub uid: TaskId,
index_uid: Option<String>,
status: TaskStatus,
#[serde(rename = "type")]
task_type: TaskType,
@ -159,46 +236,44 @@ pub struct TaskView {
impl From<Task> for TaskView {
fn from(task: Task) -> Self {
let index_uid = task.index_uid().map(String::from);
let Task {
id,
index_uid,
content,
events,
} = task;
let (task_type, mut details) = match content {
TaskContent::DocumentAddition {
merge_strategy,
documents_count,
..
documents_count, ..
} => {
let details = TaskDetails::DocumentAddition {
received_documents: documents_count,
indexed_documents: None,
};
let task_type = match merge_strategy {
IndexDocumentsMethod::UpdateDocuments => TaskType::DocumentPartial,
IndexDocumentsMethod::ReplaceDocuments => TaskType::DocumentAddition,
_ => unreachable!("Unexpected document merge strategy."),
};
(task_type, Some(details))
(TaskType::DocumentAdditionOrUpdate, Some(details))
}
TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids)) => (
TaskContent::DocumentDeletion {
deletion: DocumentDeletion::Ids(ids),
..
} => (
TaskType::DocumentDeletion,
Some(TaskDetails::DocumentDeletion {
received_document_ids: ids.len(),
deleted_documents: None,
}),
),
TaskContent::DocumentDeletion(DocumentDeletion::Clear) => (
TaskType::ClearAll,
TaskContent::DocumentDeletion {
deletion: DocumentDeletion::Clear,
..
} => (
TaskType::DocumentDeletion,
Some(TaskDetails::ClearAll {
deleted_documents: None,
}),
),
TaskContent::IndexDeletion => (
TaskContent::IndexDeletion { .. } => (
TaskType::IndexDeletion,
Some(TaskDetails::ClearAll {
deleted_documents: None,
@ -208,14 +283,18 @@ impl From<Task> for TaskView {
TaskType::SettingsUpdate,
Some(TaskDetails::Settings { settings }),
),
TaskContent::IndexCreation { primary_key } => (
TaskContent::IndexCreation { primary_key, .. } => (
TaskType::IndexCreation,
Some(TaskDetails::IndexInfo { primary_key }),
),
TaskContent::IndexUpdate { primary_key } => (
TaskContent::IndexUpdate { primary_key, .. } => (
TaskType::IndexUpdate,
Some(TaskDetails::IndexInfo { primary_key }),
),
TaskContent::Dump { uid } => (
TaskType::DumpCreation,
Some(TaskDetails::Dump { dump_uid: uid }),
),
};
// An event always has at least one event: "Created"
@ -223,7 +302,7 @@ impl From<Task> for TaskView {
TaskEvent::Created(_) => (TaskStatus::Enqueued, None, None),
TaskEvent::Batched { .. } => (TaskStatus::Enqueued, None, None),
TaskEvent::Processing(_) => (TaskStatus::Processing, None, None),
TaskEvent::Succeded { timestamp, result } => {
TaskEvent::Succeeded { timestamp, result } => {
match (result, &mut details) {
(
TaskResult::DocumentAddition {
@ -313,7 +392,7 @@ impl From<Task> for TaskView {
Self {
uid: id,
index_uid: index_uid.into_inner(),
index_uid,
status,
task_type,
details,
@ -329,20 +408,17 @@ impl From<Task> for TaskView {
#[derive(Debug, Serialize)]
pub struct TaskListView {
results: Vec<TaskView>,
}
impl From<Vec<TaskView>> for TaskListView {
fn from(results: Vec<TaskView>) -> Self {
Self { results }
}
pub results: Vec<TaskView>,
pub limit: usize,
pub from: Option<TaskId>,
pub next: Option<TaskId>,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SummarizedTaskView {
uid: TaskId,
index_uid: String,
task_uid: TaskId,
index_uid: Option<String>,
status: TaskStatus,
#[serde(rename = "type")]
task_type: TaskType,
@ -364,8 +440,8 @@ impl From<Task> for SummarizedTaskView {
};
Self {
uid: other.id,
index_uid: other.index_uid.to_string(),
task_uid: other.id,
index_uid: other.index_uid().map(String::from),
status: TaskStatus::Enqueued,
task_type: other.content.into(),
enqueued_at,

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@ -16,9 +16,9 @@ pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'
("GET", "/indexes/products/documents/0") => hashset!{"documents.get", "*"},
("DELETE", "/indexes/products/documents/0") => hashset!{"documents.delete", "*"},
("GET", "/tasks") => hashset!{"tasks.get", "*"},
("GET", "/indexes/products/tasks") => hashset!{"tasks.get", "*"},
("GET", "/indexes/products/tasks/0") => hashset!{"tasks.get", "*"},
("PUT", "/indexes/products/") => hashset!{"indexes.update", "*"},
("GET", "/tasks?indexUid=products") => hashset!{"tasks.get", "*"},
("GET", "/tasks/0") => hashset!{"tasks.get", "*"},
("PATCH", "/indexes/products/") => hashset!{"indexes.update", "*"},
("GET", "/indexes/products/") => hashset!{"indexes.get", "*"},
("DELETE", "/indexes/products/") => hashset!{"indexes.delete", "*"},
("POST", "/indexes") => hashset!{"indexes.create", "*"},
@ -33,20 +33,25 @@ pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'
("GET", "/indexes/products/settings/stop-words") => hashset!{"settings.get", "*"},
("GET", "/indexes/products/settings/synonyms") => hashset!{"settings.get", "*"},
("DELETE", "/indexes/products/settings") => hashset!{"settings.update", "*"},
("POST", "/indexes/products/settings") => hashset!{"settings.update", "*"},
("POST", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.update", "*"},
("POST", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.update", "*"},
("POST", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.update", "*"},
("POST", "/indexes/products/settings/ranking-rules") => hashset!{"settings.update", "*"},
("POST", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.update", "*"},
("POST", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.update", "*"},
("POST", "/indexes/products/settings/stop-words") => hashset!{"settings.update", "*"},
("POST", "/indexes/products/settings/synonyms") => hashset!{"settings.update", "*"},
("PATCH", "/indexes/products/settings") => hashset!{"settings.update", "*"},
("PATCH", "/indexes/products/settings/typo-tolerance") => hashset!{"settings.update", "*"},
("PUT", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.update", "*"},
("PUT", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.update", "*"},
("PUT", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.update", "*"},
("PUT", "/indexes/products/settings/ranking-rules") => hashset!{"settings.update", "*"},
("PUT", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.update", "*"},
("PUT", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.update", "*"},
("PUT", "/indexes/products/settings/stop-words") => hashset!{"settings.update", "*"},
("PUT", "/indexes/products/settings/synonyms") => hashset!{"settings.update", "*"},
("GET", "/indexes/products/stats") => hashset!{"stats.get", "*"},
("GET", "/stats") => hashset!{"stats.get", "*"},
("POST", "/dumps") => hashset!{"dumps.create", "*"},
("GET", "/dumps/0/status") => hashset!{"dumps.get", "*"},
("GET", "/version") => hashset!{"version", "*"},
("PATCH", "/keys/mykey/") => hashset!{"keys.update", "*"},
("GET", "/keys/mykey/") => hashset!{"keys.get", "*"},
("DELETE", "/keys/mykey/") => hashset!{"keys.delete", "*"},
("POST", "/keys") => hashset!{"keys.create", "*"},
("GET", "/keys") => hashset!{"keys.get", "*"},
}
});
@ -81,7 +86,7 @@ async fn error_access_expired_key() {
});
let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201);
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
let key = response["key"].as_str().unwrap();
@ -93,8 +98,14 @@ async fn error_access_expired_key() {
for (method, route) in AUTHORIZATIONS.keys() {
let (response, code) = server.dummy_request(method, route).await;
assert_eq!(response, INVALID_RESPONSE.clone());
assert_eq!(code, 403);
assert_eq!(
response,
INVALID_RESPONSE.clone(),
"on route: {:?} - {:?}",
method,
route
);
assert_eq!(403, code, "{:?}", &response);
}
}
@ -111,7 +122,7 @@ async fn error_access_unauthorized_index() {
});
let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201);
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
let key = response["key"].as_str().unwrap();
@ -124,8 +135,14 @@ async fn error_access_unauthorized_index() {
{
let (response, code) = server.dummy_request(method, route).await;
assert_eq!(response, INVALID_RESPONSE.clone());
assert_eq!(code, 403);
assert_eq!(
response,
INVALID_RESPONSE.clone(),
"on route: {:?} - {:?}",
method,
route
);
assert_eq!(403, code, "{:?}", &response);
}
}
@ -133,36 +150,54 @@ async fn error_access_unauthorized_index() {
#[cfg_attr(target_os = "windows", ignore)]
async fn error_access_unauthorized_action() {
let mut server = Server::new_auth().await;
server.use_api_key("MASTER_KEY");
let content = json!({
"indexes": ["products"],
"actions": [],
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
});
let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201);
assert!(response["key"].is_string());
let key = response["key"].as_str().unwrap();
server.use_api_key(&key);
for ((method, route), action) in AUTHORIZATIONS.iter() {
// create a new API key letting only the needed action.
server.use_api_key("MASTER_KEY");
// Patch API key letting all rights but the needed one.
let content = json!({
"indexes": ["products"],
"actions": ALL_ACTIONS.difference(action).collect::<Vec<_>>(),
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
});
let (_, code) = server.patch_api_key(&key, content).await;
assert_eq!(code, 200);
let (response, code) = server.add_api_key(content).await;
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
let key = response["key"].as_str().unwrap();
server.use_api_key(&key);
let (response, code) = server.dummy_request(method, route).await;
assert_eq!(response, INVALID_RESPONSE.clone());
assert_eq!(code, 403);
assert_eq!(
response,
INVALID_RESPONSE.clone(),
"on route: {:?} - {:?}",
method,
route
);
assert_eq!(403, code, "{:?}", &response);
}
}
#[actix_rt::test]
#[cfg_attr(target_os = "windows", ignore)]
async fn access_authorized_master_key() {
let mut server = Server::new_auth().await;
server.use_api_key("MASTER_KEY");
// master key must have access to all routes.
for ((method, route), _) in AUTHORIZATIONS.iter() {
let (response, code) = server.dummy_request(method, route).await;
assert_ne!(
response,
INVALID_RESPONSE.clone(),
"on route: {:?} - {:?}",
method,
route
);
assert_ne!(code, 403);
}
}
@ -170,36 +205,34 @@ async fn error_access_unauthorized_action() {
#[cfg_attr(target_os = "windows", ignore)]
async fn access_authorized_restricted_index() {
let mut server = Server::new_auth().await;
server.use_api_key("MASTER_KEY");
let content = json!({
"indexes": ["products"],
"actions": [],
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
});
let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201);
assert!(response["key"].is_string());
let key = response["key"].as_str().unwrap();
server.use_api_key(&key);
for ((method, route), actions) in AUTHORIZATIONS.iter() {
for action in actions {
// Patch API key letting only the needed action.
// create a new API key letting only the needed action.
server.use_api_key("MASTER_KEY");
let content = json!({
"indexes": ["products"],
"actions": [action],
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
});
server.use_api_key("MASTER_KEY");
let (_, code) = server.patch_api_key(&key, content).await;
assert_eq!(code, 200);
let (response, code) = server.add_api_key(content).await;
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
let key = response["key"].as_str().unwrap();
server.use_api_key(&key);
let (response, code) = server.dummy_request(method, route).await;
assert_ne!(response, INVALID_RESPONSE.clone());
assert_ne!(
response,
INVALID_RESPONSE.clone(),
"on route: {:?} - {:?} with action: {:?}",
method,
route,
action
);
assert_ne!(code, 403);
}
}
@ -209,36 +242,35 @@ async fn access_authorized_restricted_index() {
#[cfg_attr(target_os = "windows", ignore)]
async fn access_authorized_no_index_restriction() {
let mut server = Server::new_auth().await;
server.use_api_key("MASTER_KEY");
let content = json!({
"indexes": ["*"],
"actions": [],
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
});
let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201);
assert!(response["key"].is_string());
let key = response["key"].as_str().unwrap();
server.use_api_key(&key);
for ((method, route), actions) in AUTHORIZATIONS.iter() {
for action in actions {
// create a new API key letting only the needed action.
server.use_api_key("MASTER_KEY");
// Patch API key letting only the needed action.
let content = json!({
"indexes": ["*"],
"actions": [action],
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
});
let (_, code) = server.patch_api_key(&key, content).await;
assert_eq!(code, 200);
let (response, code) = server.add_api_key(content).await;
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
let key = response["key"].as_str().unwrap();
server.use_api_key(&key);
let (response, code) = server.dummy_request(method, route).await;
assert_ne!(response, INVALID_RESPONSE.clone());
assert_ne!(
response,
INVALID_RESPONSE.clone(),
"on route: {:?} - {:?} with action: {:?}",
method,
route,
action
);
assert_ne!(code, 403);
}
}
@ -248,16 +280,16 @@ async fn access_authorized_no_index_restriction() {
#[cfg_attr(target_os = "windows", ignore)]
async fn access_authorized_stats_restricted_index() {
let mut server = Server::new_auth().await;
server.use_api_key("MASTER_KEY");
server.use_admin_key("MASTER_KEY").await;
// create index `test`
let index = server.index("test");
let (_, code) = index.create(Some("id")).await;
assert_eq!(code, 202);
let (response, code) = index.create(Some("id")).await;
assert_eq!(202, code, "{:?}", &response);
// create index `products`
let index = server.index("products");
let (_, code) = index.create(Some("product_id")).await;
assert_eq!(code, 202);
let (response, code) = index.create(Some("product_id")).await;
assert_eq!(202, code, "{:?}", &response);
index.wait_task(0).await;
// create key with access on `products` index only.
@ -267,7 +299,7 @@ async fn access_authorized_stats_restricted_index() {
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
});
let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201);
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
// use created key.
@ -275,7 +307,7 @@ async fn access_authorized_stats_restricted_index() {
server.use_api_key(&key);
let (response, code) = server.stats().await;
assert_eq!(code, 200);
assert_eq!(200, code, "{:?}", &response);
// key should have access on `products` index.
assert!(response["indexes"].get("products").is_some());
@ -288,16 +320,16 @@ async fn access_authorized_stats_restricted_index() {
#[cfg_attr(target_os = "windows", ignore)]
async fn access_authorized_stats_no_index_restriction() {
let mut server = Server::new_auth().await;
server.use_api_key("MASTER_KEY");
server.use_admin_key("MASTER_KEY").await;
// create index `test`
let index = server.index("test");
let (_, code) = index.create(Some("id")).await;
assert_eq!(code, 202);
let (response, code) = index.create(Some("id")).await;
assert_eq!(202, code, "{:?}", &response);
// create index `products`
let index = server.index("products");
let (_, code) = index.create(Some("product_id")).await;
assert_eq!(code, 202);
let (response, code) = index.create(Some("product_id")).await;
assert_eq!(202, code, "{:?}", &response);
index.wait_task(0).await;
// create key with access on all indexes.
@ -307,7 +339,7 @@ async fn access_authorized_stats_no_index_restriction() {
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
});
let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201);
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
// use created key.
@ -315,7 +347,7 @@ async fn access_authorized_stats_no_index_restriction() {
server.use_api_key(&key);
let (response, code) = server.stats().await;
assert_eq!(code, 200);
assert_eq!(200, code, "{:?}", &response);
// key should have access on `products` index.
assert!(response["indexes"].get("products").is_some());
@ -328,16 +360,16 @@ async fn access_authorized_stats_no_index_restriction() {
#[cfg_attr(target_os = "windows", ignore)]
async fn list_authorized_indexes_restricted_index() {
let mut server = Server::new_auth().await;
server.use_api_key("MASTER_KEY");
server.use_admin_key("MASTER_KEY").await;
// create index `test`
let index = server.index("test");
let (_, code) = index.create(Some("id")).await;
assert_eq!(code, 202);
let (response, code) = index.create(Some("id")).await;
assert_eq!(202, code, "{:?}", &response);
// create index `products`
let index = server.index("products");
let (_, code) = index.create(Some("product_id")).await;
assert_eq!(code, 202);
let (response, code) = index.create(Some("product_id")).await;
assert_eq!(202, code, "{:?}", &response);
index.wait_task(0).await;
// create key with access on `products` index only.
@ -347,17 +379,17 @@ async fn list_authorized_indexes_restricted_index() {
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
});
let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201);
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
// use created key.
let key = response["key"].as_str().unwrap();
server.use_api_key(&key);
let (response, code) = server.list_indexes().await;
assert_eq!(code, 200);
let (response, code) = server.list_indexes(None, None).await;
assert_eq!(200, code, "{:?}", &response);
let response = response.as_array().unwrap();
let response = response["results"].as_array().unwrap();
// key should have access on `products` index.
assert!(response.iter().any(|index| index["uid"] == "products"));
@ -369,16 +401,16 @@ async fn list_authorized_indexes_restricted_index() {
#[cfg_attr(target_os = "windows", ignore)]
async fn list_authorized_indexes_no_index_restriction() {
let mut server = Server::new_auth().await;
server.use_api_key("MASTER_KEY");
server.use_admin_key("MASTER_KEY").await;
// create index `test`
let index = server.index("test");
let (_, code) = index.create(Some("id")).await;
assert_eq!(code, 202);
let (response, code) = index.create(Some("id")).await;
assert_eq!(202, code, "{:?}", &response);
// create index `products`
let index = server.index("products");
let (_, code) = index.create(Some("product_id")).await;
assert_eq!(code, 202);
let (response, code) = index.create(Some("product_id")).await;
assert_eq!(202, code, "{:?}", &response);
index.wait_task(0).await;
// create key with access on all indexes.
@ -388,17 +420,17 @@ async fn list_authorized_indexes_no_index_restriction() {
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
});
let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201);
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
// use created key.
let key = response["key"].as_str().unwrap();
server.use_api_key(&key);
let (response, code) = server.list_indexes().await;
assert_eq!(code, 200);
let (response, code) = server.list_indexes(None, None).await;
assert_eq!(200, code, "{:?}", &response);
let response = response.as_array().unwrap();
let response = response["results"].as_array().unwrap();
// key should have access on `products` index.
assert!(response.iter().any(|index| index["uid"] == "products"));
@ -409,16 +441,16 @@ async fn list_authorized_indexes_no_index_restriction() {
#[actix_rt::test]
async fn list_authorized_tasks_restricted_index() {
let mut server = Server::new_auth().await;
server.use_api_key("MASTER_KEY");
server.use_admin_key("MASTER_KEY").await;
// create index `test`
let index = server.index("test");
let (_, code) = index.create(Some("id")).await;
assert_eq!(code, 202);
let (response, code) = index.create(Some("id")).await;
assert_eq!(202, code, "{:?}", &response);
// create index `products`
let index = server.index("products");
let (_, code) = index.create(Some("product_id")).await;
assert_eq!(code, 202);
let (response, code) = index.create(Some("product_id")).await;
assert_eq!(202, code, "{:?}", &response);
index.wait_task(0).await;
// create key with access on `products` index only.
@ -428,7 +460,7 @@ async fn list_authorized_tasks_restricted_index() {
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
});
let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201);
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
// use created key.
@ -436,7 +468,7 @@ async fn list_authorized_tasks_restricted_index() {
server.use_api_key(&key);
let (response, code) = server.service.get("/tasks").await;
assert_eq!(code, 200);
assert_eq!(200, code, "{:?}", &response);
println!("{}", response);
let response = response["results"].as_array().unwrap();
// key should have access on `products` index.
@ -449,16 +481,16 @@ async fn list_authorized_tasks_restricted_index() {
#[actix_rt::test]
async fn list_authorized_tasks_no_index_restriction() {
let mut server = Server::new_auth().await;
server.use_api_key("MASTER_KEY");
server.use_admin_key("MASTER_KEY").await;
// create index `test`
let index = server.index("test");
let (_, code) = index.create(Some("id")).await;
assert_eq!(code, 202);
let (response, code) = index.create(Some("id")).await;
assert_eq!(202, code, "{:?}", &response);
// create index `products`
let index = server.index("products");
let (_, code) = index.create(Some("product_id")).await;
assert_eq!(code, 202);
let (response, code) = index.create(Some("product_id")).await;
assert_eq!(202, code, "{:?}", &response);
index.wait_task(0).await;
// create key with access on all indexes.
@ -468,7 +500,7 @@ async fn list_authorized_tasks_no_index_restriction() {
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
});
let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201);
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
// use created key.
@ -476,7 +508,7 @@ async fn list_authorized_tasks_no_index_restriction() {
server.use_api_key(&key);
let (response, code) = server.service.get("/tasks").await;
assert_eq!(code, 200);
assert_eq!(200, code, "{:?}", &response);
let response = response["results"].as_array().unwrap();
// key should have access on `products` index.
@ -499,7 +531,7 @@ async fn error_creating_index_without_action() {
"expiresAt": "2050-11-13T00:00:00Z"
});
let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201);
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
// use created key.
@ -523,8 +555,8 @@ async fn error_creating_index_without_action() {
]);
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202, "{:?}", response);
let task_id = response["uid"].as_u64().unwrap();
assert_eq!(202, code, "{:?}", &response);
let task_id = response["taskUid"].as_u64().unwrap();
let response = index.wait_task(task_id).await;
assert_eq!(response["status"], "failed");
@ -534,8 +566,8 @@ async fn error_creating_index_without_action() {
let settings = json!({ "distinctAttribute": "test"});
let (response, code) = index.update_settings(settings).await;
assert_eq!(code, 202);
let task_id = response["uid"].as_u64().unwrap();
assert_eq!(202, code, "{:?}", &response);
let task_id = response["taskUid"].as_u64().unwrap();
let response = index.wait_task(task_id).await;
@ -544,8 +576,8 @@ async fn error_creating_index_without_action() {
// try to create a index via add specialized settings route
let (response, code) = index.update_distinct_attribute(json!("test")).await;
assert_eq!(code, 202);
let task_id = response["uid"].as_u64().unwrap();
assert_eq!(202, code, "{:?}", &response);
let task_id = response["taskUid"].as_u64().unwrap();
let response = index.wait_task(task_id).await;
@ -566,7 +598,7 @@ async fn lazy_create_index() {
});
let (response, code) = server.add_api_key(content).await;
assert_eq!(code, 201);
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
// use created key.
@ -583,13 +615,13 @@ async fn lazy_create_index() {
]);
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202, "{:?}", response);
let task_id = response["uid"].as_u64().unwrap();
assert_eq!(202, code, "{:?}", &response);
let task_id = response["taskUid"].as_u64().unwrap();
index.wait_task(task_id).await;
let (response, code) = index.get_task(task_id).await;
assert_eq!(code, 200);
assert_eq!(200, code, "{:?}", &response);
assert_eq!(response["status"], "succeeded");
// try to create a index via add settings route
@ -597,24 +629,24 @@ async fn lazy_create_index() {
let settings = json!({ "distinctAttribute": "test"});
let (response, code) = index.update_settings(settings).await;
assert_eq!(code, 202);
let task_id = response["uid"].as_u64().unwrap();
assert_eq!(202, code, "{:?}", &response);
let task_id = response["taskUid"].as_u64().unwrap();
index.wait_task(task_id).await;
let (response, code) = index.get_task(task_id).await;
assert_eq!(code, 200);
assert_eq!(200, code, "{:?}", &response);
assert_eq!(response["status"], "succeeded");
// try to create a index via add specialized settings route
let index = server.index("test2");
let (response, code) = index.update_distinct_attribute(json!("test")).await;
assert_eq!(code, 202);
let task_id = response["uid"].as_u64().unwrap();
assert_eq!(202, code, "{:?}", &response);
let task_id = response["taskUid"].as_u64().unwrap();
index.wait_task(task_id).await;
let (response, code) = index.get_task(task_id).await;
assert_eq!(code, 200);
assert_eq!(200, code, "{:?}", &response);
assert_eq!(response["status"], "succeeded");
}

View File

@ -13,6 +13,15 @@ impl Server {
self.service.api_key = Some(api_key.as_ref().to_string());
}
/// Fetch and use the default admin key for nexts http requests.
pub async fn use_admin_key(&mut self, master_key: impl AsRef<str>) {
self.use_api_key(master_key);
let (response, code) = self.list_api_keys().await;
assert_eq!(200, code, "{:?}", response);
let admin_key = &response["results"][1]["key"];
self.use_api_key(admin_key.as_str().unwrap());
}
pub async fn add_api_key(&self, content: Value) -> (Value, StatusCode) {
let url = "/keys";
self.service.post(url, content).await

View File

@ -8,11 +8,15 @@ use time::{Duration, OffsetDateTime};
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
fn generate_tenant_token(parent_key: impl AsRef<str>, mut body: HashMap<&str, Value>) -> String {
fn generate_tenant_token(
parent_uid: impl AsRef<str>,
parent_key: impl AsRef<str>,
mut body: HashMap<&str, Value>,
) -> String {
use jsonwebtoken::{encode, EncodingKey, Header};
let key_id = &parent_key.as_ref()[..8];
body.insert("apiKeyPrefix", json!(key_id));
let parent_uid = parent_uid.as_ref();
body.insert("apiKeyUid", json!(parent_uid));
encode(
&Header::default(),
&body,
@ -114,7 +118,7 @@ static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
macro_rules! compute_autorized_search {
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => {
let mut server = Server::new_auth().await;
server.use_api_key("MASTER_KEY");
server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -130,9 +134,10 @@ macro_rules! compute_autorized_search {
let (response, code) = server.add_api_key(key_content.clone()).await;
assert_eq!(code, 201);
let key = response["key"].as_str().unwrap();
let uid = response["uid"].as_str().unwrap();
for tenant_token in $tenant_tokens.iter() {
let web_token = generate_tenant_token(&key, tenant_token.clone());
let web_token = generate_tenant_token(&uid, &key, tenant_token.clone());
server.use_api_key(&web_token);
let index = server.index("sales");
index
@ -160,7 +165,7 @@ macro_rules! compute_autorized_search {
macro_rules! compute_forbidden_search {
($tenant_tokens:expr, $parent_keys:expr) => {
let mut server = Server::new_auth().await;
server.use_api_key("MASTER_KEY");
server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales");
let documents = DOCUMENTS.clone();
index.add_documents(documents, None).await;
@ -172,9 +177,10 @@ macro_rules! compute_forbidden_search {
let (response, code) = server.add_api_key(key_content.clone()).await;
assert_eq!(code, 201, "{:?}", response);
let key = response["key"].as_str().unwrap();
let uid = response["uid"].as_str().unwrap();
for tenant_token in $tenant_tokens.iter() {
let web_token = generate_tenant_token(&key, tenant_token.clone());
let web_token = generate_tenant_token(&uid, &key, tenant_token.clone());
server.use_api_key(&web_token);
let index = server.index("sales");
index
@ -461,12 +467,13 @@ async fn error_access_forbidden_routes() {
assert!(response["key"].is_string());
let key = response["key"].as_str().unwrap();
let uid = response["uid"].as_str().unwrap();
let tenant_token = hashmap! {
"searchRules" => json!(["*"]),
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
};
let web_token = generate_tenant_token(&key, tenant_token);
let web_token = generate_tenant_token(&uid, &key, tenant_token);
server.use_api_key(&web_token);
for ((method, route), actions) in AUTHORIZATIONS.iter() {
@ -496,12 +503,13 @@ async fn error_access_expired_parent_key() {
assert!(response["key"].is_string());
let key = response["key"].as_str().unwrap();
let uid = response["uid"].as_str().unwrap();
let tenant_token = hashmap! {
"searchRules" => json!(["*"]),
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
};
let web_token = generate_tenant_token(&key, tenant_token);
let web_token = generate_tenant_token(&uid, &key, tenant_token);
server.use_api_key(&web_token);
// test search request while parent_key is not expired
@ -538,12 +546,13 @@ async fn error_access_modified_token() {
assert!(response["key"].is_string());
let key = response["key"].as_str().unwrap();
let uid = response["uid"].as_str().unwrap();
let tenant_token = hashmap! {
"searchRules" => json!(["products"]),
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
};
let web_token = generate_tenant_token(&key, tenant_token);
let web_token = generate_tenant_token(&uid, &key, tenant_token);
server.use_api_key(&web_token);
// test search request while web_token is valid
@ -558,7 +567,7 @@ async fn error_access_modified_token() {
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
};
let alt = generate_tenant_token(&key, tenant_token);
let alt = generate_tenant_token(&uid, &key, tenant_token);
let altered_token = [
web_token.split('.').next().unwrap(),
alt.split('.').nth(1).unwrap(),

View File

@ -1,32 +1,16 @@
use std::{
fmt::Write,
panic::{catch_unwind, resume_unwind, UnwindSafe},
time::Duration,
};
use actix_web::http::StatusCode;
use paste::paste;
use serde_json::{json, Value};
use tokio::time::sleep;
use urlencoding::encode;
use super::service::Service;
macro_rules! make_settings_test_routes {
($($name:ident),+) => {
$(paste! {
pub async fn [<update_$name>](&self, value: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/{}", encode(self.uid.as_ref()).to_string(), stringify!($name).replace("_", "-"));
self.service.post(url, value).await
}
pub async fn [<get_$name>](&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/{}", encode(self.uid.as_ref()).to_string(), stringify!($name).replace("_", "-"));
self.service.get(url).await
}
})*
};
}
pub struct Index<'a> {
pub uid: String,
pub service: &'a Service,
@ -46,7 +30,7 @@ impl Index<'_> {
.post_str(url, include_str!("../assets/test_set.json"))
.await;
assert_eq!(code, 202);
let update_id = response["uid"].as_i64().unwrap();
let update_id = response["taskUid"].as_i64().unwrap();
self.wait_task(update_id as u64).await;
update_id as u64
}
@ -65,7 +49,7 @@ impl Index<'_> {
});
let url = format!("/indexes/{}", encode(self.uid.as_ref()));
self.service.put(url, body).await
self.service.patch(url, body).await
}
pub async fn delete(&self) -> (Value, StatusCode) {
@ -106,55 +90,67 @@ impl Index<'_> {
}
pub async fn wait_task(&self, update_id: u64) -> Value {
// try 10 times to get status, or panic to not wait forever
// try several times to get status, or panic to not wait forever
let url = format!("/tasks/{}", update_id);
for _ in 0..10 {
for _ in 0..100 {
let (response, status_code) = self.service.get(&url).await;
assert_eq!(status_code, 200, "response: {}", response);
assert_eq!(200, status_code, "response: {}", response);
if response["status"] == "succeeded" || response["status"] == "failed" {
return response;
}
sleep(Duration::from_secs(1)).await;
// wait 0.5 second.
sleep(Duration::from_millis(500)).await;
}
panic!("Timeout waiting for update id");
}
pub async fn get_task(&self, update_id: u64) -> (Value, StatusCode) {
let url = format!("/indexes/{}/tasks/{}", self.uid, update_id);
let url = format!("/tasks/{}", update_id);
self.service.get(url).await
}
pub async fn list_tasks(&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/tasks", self.uid);
let url = format!("/tasks?indexUid={}", self.uid);
self.service.get(url).await
}
pub async fn filtered_tasks(&self, type_: &[&str], status: &[&str]) -> (Value, StatusCode) {
let mut url = format!("/tasks?indexUid={}", self.uid);
if !type_.is_empty() {
let _ = write!(url, "&type={}", type_.join(","));
}
if !status.is_empty() {
let _ = write!(url, "&status={}", status.join(","));
}
self.service.get(url).await
}
pub async fn get_document(
&self,
id: u64,
_options: Option<GetDocumentOptions>,
options: Option<GetDocumentOptions>,
) -> (Value, StatusCode) {
let url = format!("/indexes/{}/documents/{}", encode(self.uid.as_ref()), id);
let mut url = format!("/indexes/{}/documents/{}", encode(self.uid.as_ref()), id);
if let Some(fields) = options.and_then(|o| o.fields) {
let _ = write!(url, "?fields={}", fields.join(","));
}
self.service.get(url).await
}
pub async fn get_all_documents(&self, options: GetAllDocumentsOptions) -> (Value, StatusCode) {
let mut url = format!("/indexes/{}/documents?", encode(self.uid.as_ref()));
if let Some(limit) = options.limit {
url.push_str(&format!("limit={}&", limit));
let _ = write!(url, "limit={}&", limit);
}
if let Some(offset) = options.offset {
url.push_str(&format!("offset={}&", offset));
let _ = write!(url, "offset={}&", offset);
}
if let Some(attributes_to_retrieve) = options.attributes_to_retrieve {
url.push_str(&format!(
"attributesToRetrieve={}&",
attributes_to_retrieve.join(",")
));
let _ = write!(url, "fields={}&", attributes_to_retrieve.join(","));
}
self.service.get(url).await
@ -187,7 +183,7 @@ impl Index<'_> {
pub async fn update_settings(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings", encode(self.uid.as_ref()));
self.service.post(url, settings).await
self.service.patch(url, settings).await
}
pub async fn delete_settings(&self) -> (Value, StatusCode) {
@ -226,15 +222,33 @@ impl Index<'_> {
}
pub async fn search_get(&self, query: Value) -> (Value, StatusCode) {
let params = serde_url_params::to_string(&query).unwrap();
let params = yaup::to_string(&query).unwrap();
let url = format!("/indexes/{}/search?{}", encode(self.uid.as_ref()), params);
self.service.get(url).await
}
make_settings_test_routes!(distinct_attribute);
pub async fn update_distinct_attribute(&self, value: Value) -> (Value, StatusCode) {
let url = format!(
"/indexes/{}/settings/{}",
encode(self.uid.as_ref()),
"distinct-attribute"
);
self.service.put(url, value).await
}
pub async fn get_distinct_attribute(&self) -> (Value, StatusCode) {
let url = format!(
"/indexes/{}/settings/{}",
encode(self.uid.as_ref()),
"distinct-attribute"
);
self.service.get(url).await
}
}
pub struct GetDocumentOptions;
pub struct GetDocumentOptions {
pub fields: Option<Vec<&'static str>>,
}
#[derive(Debug, Default)]
pub struct GetAllDocumentsOptions {

View File

@ -3,7 +3,7 @@ pub mod server;
pub mod service;
pub use index::{GetAllDocumentsOptions, GetDocumentOptions};
pub use server::Server;
pub use server::{default_settings, Server};
/// Performs a search test on both post and get routes
#[macro_export]

View File

@ -52,16 +52,13 @@ impl Server {
}
}
pub async fn new_auth() -> Self {
let dir = TempDir::new().unwrap();
pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self {
if cfg!(windows) {
std::env::set_var("TMP", TEST_TEMP_DIR.path());
} else {
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
}
let mut options = default_settings(dir.path());
options.master_key = Some("MASTER_KEY".to_string());
let meilisearch = setup_meilisearch(&options).unwrap();
@ -79,9 +76,15 @@ impl Server {
}
}
pub async fn new_with_options(options: Opt) -> Self {
let meilisearch = setup_meilisearch(&options).unwrap();
let auth = AuthController::new(&options.db_path, &options.master_key).unwrap();
pub async fn new_auth() -> Self {
let dir = TempDir::new().unwrap();
let options = default_settings(dir.path());
Self::new_auth_with_options(options, dir).await
}
pub async fn new_with_options(options: Opt) -> Result<Self, anyhow::Error> {
let meilisearch = setup_meilisearch(&options)?;
let auth = AuthController::new(&options.db_path, &options.master_key)?;
let service = Service {
meilisearch,
auth,
@ -89,10 +92,10 @@ impl Server {
api_key: None,
};
Server {
Ok(Server {
service,
_dir: None,
}
})
}
/// Returns a view to an index. There is no guarantee that the index exists.
@ -103,8 +106,27 @@ impl Server {
}
}
pub async fn list_indexes(&self) -> (Value, StatusCode) {
self.service.get("/indexes").await
pub async fn list_indexes(
&self,
offset: Option<usize>,
limit: Option<usize>,
) -> (Value, StatusCode) {
let (offset, limit) = (
offset.map(|offset| format!("offset={offset}")),
limit.map(|limit| format!("limit={limit}")),
);
let query_parameter = offset
.as_ref()
.zip(limit.as_ref())
.map(|(offset, limit)| format!("{offset}&{limit}"))
.or_else(|| offset.xor(limit));
if let Some(query_parameter) = query_parameter {
self.service
.get(format!("/indexes?{query_parameter}"))
.await
} else {
self.service.get("/indexes").await
}
}
pub async fn version(&self) -> (Value, StatusCode) {
@ -131,8 +153,8 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
env: "development".to_owned(),
#[cfg(all(not(debug_assertions), feature = "analytics"))]
no_analytics: true,
max_index_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(),
max_task_db_size: Byte::from_unit(4.0, ByteUnit::GiB).unwrap(),
max_index_size: Byte::from_unit(100.0, ByteUnit::MiB).unwrap(),
max_task_db_size: Byte::from_unit(1.0, ByteUnit::GiB).unwrap(),
http_payload_size_limit: Byte::from_unit(10.0, ByteUnit::MiB).unwrap(),
snapshot_dir: ".".into(),
indexer_options: IndexerOpts {

View File

@ -7,23 +7,45 @@ use actix_web::test;
use meilisearch_http::{analytics, create_app};
use serde_json::{json, Value};
enum HttpVerb {
Put,
Patch,
Post,
Get,
Delete,
}
impl HttpVerb {
fn test_request(&self) -> test::TestRequest {
match self {
HttpVerb::Put => test::TestRequest::put(),
HttpVerb::Patch => test::TestRequest::patch(),
HttpVerb::Post => test::TestRequest::post(),
HttpVerb::Get => test::TestRequest::get(),
HttpVerb::Delete => test::TestRequest::delete(),
}
}
}
#[actix_rt::test]
async fn error_json_bad_content_type() {
use HttpVerb::{Patch, Post, Put};
let routes = [
// all the POST routes except the dumps that can be created without any body or content-type
// all the routes except the dumps that can be created without any body or content-type
// and the search that is not a strict json
"/indexes",
"/indexes/doggo/documents/delete-batch",
"/indexes/doggo/search",
"/indexes/doggo/settings",
"/indexes/doggo/settings/displayed-attributes",
"/indexes/doggo/settings/distinct-attribute",
"/indexes/doggo/settings/filterable-attributes",
"/indexes/doggo/settings/ranking-rules",
"/indexes/doggo/settings/searchable-attributes",
"/indexes/doggo/settings/sortable-attributes",
"/indexes/doggo/settings/stop-words",
"/indexes/doggo/settings/synonyms",
(Post, "/indexes"),
(Post, "/indexes/doggo/documents/delete-batch"),
(Post, "/indexes/doggo/search"),
(Patch, "/indexes/doggo/settings"),
(Put, "/indexes/doggo/settings/displayed-attributes"),
(Put, "/indexes/doggo/settings/distinct-attribute"),
(Put, "/indexes/doggo/settings/filterable-attributes"),
(Put, "/indexes/doggo/settings/ranking-rules"),
(Put, "/indexes/doggo/settings/searchable-attributes"),
(Put, "/indexes/doggo/settings/sortable-attributes"),
(Put, "/indexes/doggo/settings/stop-words"),
(Put, "/indexes/doggo/settings/synonyms"),
];
let bad_content_types = [
"application/csv",
@ -45,10 +67,11 @@ async fn error_json_bad_content_type() {
analytics::MockAnalytics::new(&server.service.options).0
))
.await;
for route in routes {
for (verb, route) in routes {
// Good content-type, we probably have an error since we didn't send anything in the json
// so we only ensure we didn't get a bad media type error.
let req = test::TestRequest::post()
let req = verb
.test_request()
.uri(route)
.set_payload(document)
.insert_header(("content-type", "application/json"))
@ -59,7 +82,8 @@ async fn error_json_bad_content_type() {
"calling the route `{}` with a content-type of json isn't supposed to throw a bad media type error", route);
// No content-type.
let req = test::TestRequest::post()
let req = verb
.test_request()
.uri(route)
.set_payload(document)
.to_request();
@ -82,7 +106,8 @@ async fn error_json_bad_content_type() {
for bad_content_type in bad_content_types {
// Always bad content-type
let req = test::TestRequest::post()
let req = verb
.test_request()
.uri(route)
.set_payload(document.to_string())
.insert_header(("content-type", bad_content_type))

View File

@ -35,7 +35,7 @@ async fn add_documents_test_json_content_types() {
let body = test::read_body(res).await;
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
assert_eq!(status_code, 202);
assert_eq!(response["uid"], 0);
assert_eq!(response["taskUid"], 0);
// put
let req = test::TestRequest::put()
@ -48,7 +48,7 @@ async fn add_documents_test_json_content_types() {
let body = test::read_body(res).await;
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
assert_eq!(status_code, 202);
assert_eq!(response["uid"], 1);
assert_eq!(response["taskUid"], 1);
}
/// any other content-type is must be refused
@ -599,7 +599,7 @@ async fn add_documents_no_index_creation() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
assert_eq!(response["uid"], 0);
assert_eq!(response["taskUid"], 0);
/*
* currently we dont check these field to stay ISO with meilisearch
* assert_eq!(response["status"], "pending");
@ -615,7 +615,7 @@ async fn add_documents_no_index_creation() {
assert_eq!(code, 200);
assert_eq!(response["status"], "succeeded");
assert_eq!(response["uid"], 0);
assert_eq!(response["type"], "documentAddition");
assert_eq!(response["type"], "documentAdditionOrUpdate");
assert_eq!(response["details"]["receivedDocuments"], 1);
assert_eq!(response["details"]["indexedDocuments"], 1);
@ -638,7 +638,7 @@ async fn error_document_add_create_index_bad_uid() {
let (response, code) = index.add_documents(json!([{"id": 1}]), None).await;
let expected_response = json!({
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"message": "invalid index uid `883 fj!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
@ -655,7 +655,7 @@ async fn error_document_update_create_index_bad_uid() {
let (response, code) = index.update_documents(json!([{"id": 1}]), None).await;
let expected_response = json!({
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"message": "invalid index uid `883 fj!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
@ -685,7 +685,7 @@ async fn document_addition_with_primary_key() {
assert_eq!(code, 200);
assert_eq!(response["status"], "succeeded");
assert_eq!(response["uid"], 0);
assert_eq!(response["type"], "documentAddition");
assert_eq!(response["type"], "documentAdditionOrUpdate");
assert_eq!(response["details"]["receivedDocuments"], 1);
assert_eq!(response["details"]["indexedDocuments"], 1);
@ -714,7 +714,7 @@ async fn document_update_with_primary_key() {
assert_eq!(code, 200);
assert_eq!(response["status"], "succeeded");
assert_eq!(response["uid"], 0);
assert_eq!(response["type"], "documentPartial");
assert_eq!(response["type"], "documentAdditionOrUpdate");
assert_eq!(response["details"]["indexedDocuments"], 1);
assert_eq!(response["details"]["receivedDocuments"], 1);
@ -818,7 +818,7 @@ async fn add_larger_dataset() {
let (response, code) = index.get_task(update_id).await;
assert_eq!(code, 200);
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "documentAddition");
assert_eq!(response["type"], "documentAdditionOrUpdate");
assert_eq!(response["details"]["indexedDocuments"], 77);
assert_eq!(response["details"]["receivedDocuments"], 77);
let (response, code) = index
@ -827,8 +827,8 @@ async fn add_larger_dataset() {
..Default::default()
})
.await;
assert_eq!(code, 200);
assert_eq!(response.as_array().unwrap().len(), 77);
assert_eq!(code, 200, "failed with `{}`", response);
assert_eq!(response["results"].as_array().unwrap().len(), 77);
}
#[actix_rt::test]
@ -840,7 +840,7 @@ async fn update_larger_dataset() {
index.wait_task(0).await;
let (response, code) = index.get_task(0).await;
assert_eq!(code, 200);
assert_eq!(response["type"], "documentPartial");
assert_eq!(response["type"], "documentAdditionOrUpdate");
assert_eq!(response["details"]["indexedDocuments"], 77);
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
@ -849,7 +849,7 @@ async fn update_larger_dataset() {
})
.await;
assert_eq!(code, 200);
assert_eq!(response.as_array().unwrap().len(), 77);
assert_eq!(response["results"].as_array().unwrap().len(), 77);
}
#[actix_rt::test]
@ -868,7 +868,12 @@ async fn error_add_documents_bad_document_id() {
let (response, code) = index.get_task(1).await;
assert_eq!(code, 200);
assert_eq!(response["status"], json!("failed"));
assert_eq!(response["error"]["message"], json!("Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."));
assert_eq!(
response["error"]["message"],
json!(
r#"Document identifier `"foo & bar"` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."#
)
);
assert_eq!(response["error"]["code"], json!("invalid_document_id"));
assert_eq!(response["error"]["type"], json!("invalid_request"));
assert_eq!(
@ -891,7 +896,12 @@ async fn error_update_documents_bad_document_id() {
index.update_documents(documents, None).await;
let response = index.wait_task(1).await;
assert_eq!(response["status"], json!("failed"));
assert_eq!(response["error"]["message"], json!("Document identifier `foo & bar` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."));
assert_eq!(
response["error"]["message"],
json!(
r#"Document identifier `"foo & bar"` is invalid. A document identifier can be of type integer or string, only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and underscores (_)."#
)
);
assert_eq!(response["error"]["code"], json!("invalid_document_id"));
assert_eq!(response["error"]["type"], json!("invalid_request"));
assert_eq!(

View File

@ -72,7 +72,7 @@ async fn clear_all_documents() {
.get_all_documents(GetAllDocumentsOptions::default())
.await;
assert_eq!(code, 200);
assert!(response.as_array().unwrap().is_empty());
assert!(response["results"].as_array().unwrap().is_empty());
}
#[actix_rt::test]
@ -89,7 +89,7 @@ async fn clear_all_documents_empty_index() {
.get_all_documents(GetAllDocumentsOptions::default())
.await;
assert_eq!(code, 200);
assert!(response.as_array().unwrap().is_empty());
assert!(response["results"].as_array().unwrap().is_empty());
}
#[actix_rt::test]
@ -125,8 +125,8 @@ async fn delete_batch() {
.get_all_documents(GetAllDocumentsOptions::default())
.await;
assert_eq!(code, 200);
assert_eq!(response.as_array().unwrap().len(), 1);
assert_eq!(response.as_array().unwrap()[0]["id"], 3);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
assert_eq!(response["results"][0]["id"], json!(3));
}
#[actix_rt::test]
@ -143,5 +143,5 @@ async fn delete_no_document_batch() {
.get_all_documents(GetAllDocumentsOptions::default())
.await;
assert_eq!(code, 200);
assert_eq!(response.as_array().unwrap().len(), 3);
assert_eq!(response["results"].as_array().unwrap().len(), 3);
}

View File

@ -1,5 +1,4 @@
use crate::common::GetAllDocumentsOptions;
use crate::common::Server;
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server};
use serde_json::json;
@ -39,19 +38,51 @@ async fn get_document() {
let documents = serde_json::json!([
{
"id": 0,
"content": "foobar",
"nested": { "content": "foobar" },
}
]);
let (_, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(0).await;
index.wait_task(1).await;
let (response, code) = index.get_document(0, None).await;
assert_eq!(code, 200);
assert_eq!(
response,
serde_json::json!( {
serde_json::json!({
"id": 0,
"content": "foobar",
"nested": { "content": "foobar" },
})
);
let (response, code) = index
.get_document(
0,
Some(GetDocumentOptions {
fields: Some(vec!["id"]),
}),
)
.await;
assert_eq!(code, 200);
assert_eq!(
response,
serde_json::json!({
"id": 0,
})
);
let (response, code) = index
.get_document(
0,
Some(GetDocumentOptions {
fields: Some(vec!["nested.content"]),
}),
)
.await;
assert_eq!(code, 200);
assert_eq!(
response,
serde_json::json!({
"nested": { "content": "foobar" },
})
);
}
@ -88,7 +119,7 @@ async fn get_no_document() {
.get_all_documents(GetAllDocumentsOptions::default())
.await;
assert_eq!(code, 200);
assert!(response.as_array().unwrap().is_empty());
assert!(response["results"].as_array().unwrap().is_empty());
}
#[actix_rt::test]
@ -101,7 +132,7 @@ async fn get_all_documents_no_options() {
.get_all_documents(GetAllDocumentsOptions::default())
.await;
assert_eq!(code, 200);
let arr = response.as_array().unwrap();
let arr = response["results"].as_array().unwrap();
assert_eq!(arr.len(), 20);
let first = serde_json::json!({
"id":0,
@ -137,8 +168,11 @@ async fn test_get_all_documents_limit() {
})
.await;
assert_eq!(code, 200);
assert_eq!(response.as_array().unwrap().len(), 5);
assert_eq!(response.as_array().unwrap()[0]["id"], 0);
assert_eq!(response["results"].as_array().unwrap().len(), 5);
assert_eq!(response["results"][0]["id"], json!(0));
assert_eq!(response["offset"], json!(0));
assert_eq!(response["limit"], json!(5));
assert_eq!(response["total"], json!(77));
}
#[actix_rt::test]
@ -154,8 +188,11 @@ async fn test_get_all_documents_offset() {
})
.await;
assert_eq!(code, 200);
assert_eq!(response.as_array().unwrap().len(), 20);
assert_eq!(response.as_array().unwrap()[0]["id"], 5);
assert_eq!(response["results"].as_array().unwrap().len(), 20);
assert_eq!(response["results"][0]["id"], json!(5));
assert_eq!(response["offset"], json!(5));
assert_eq!(response["limit"], json!(20));
assert_eq!(response["total"], json!(77));
}
#[actix_rt::test]
@ -171,20 +208,14 @@ async fn test_get_all_documents_attributes_to_retrieve() {
})
.await;
assert_eq!(code, 200);
assert_eq!(response.as_array().unwrap().len(), 20);
assert_eq!(
response.as_array().unwrap()[0]
.as_object()
.unwrap()
.keys()
.count(),
1
);
assert!(response.as_array().unwrap()[0]
.as_object()
.unwrap()
.get("name")
.is_some());
assert_eq!(response["results"].as_array().unwrap().len(), 20);
for results in response["results"].as_array().unwrap() {
assert_eq!(results.as_object().unwrap().keys().count(), 1);
assert!(results["name"] != json!(null));
}
assert_eq!(response["offset"], json!(0));
assert_eq!(response["limit"], json!(20));
assert_eq!(response["total"], json!(77));
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
@ -193,15 +224,13 @@ async fn test_get_all_documents_attributes_to_retrieve() {
})
.await;
assert_eq!(code, 200);
assert_eq!(response.as_array().unwrap().len(), 20);
assert_eq!(
response.as_array().unwrap()[0]
.as_object()
.unwrap()
.keys()
.count(),
0
);
assert_eq!(response["results"].as_array().unwrap().len(), 20);
for results in response["results"].as_array().unwrap() {
assert_eq!(results.as_object().unwrap().keys().count(), 0);
}
assert_eq!(response["offset"], json!(0));
assert_eq!(response["limit"], json!(20));
assert_eq!(response["total"], json!(77));
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
@ -210,15 +239,13 @@ async fn test_get_all_documents_attributes_to_retrieve() {
})
.await;
assert_eq!(code, 200);
assert_eq!(response.as_array().unwrap().len(), 20);
assert_eq!(
response.as_array().unwrap()[0]
.as_object()
.unwrap()
.keys()
.count(),
0
);
assert_eq!(response["results"].as_array().unwrap().len(), 20);
for results in response["results"].as_array().unwrap() {
assert_eq!(results.as_object().unwrap().keys().count(), 0);
}
assert_eq!(response["offset"], json!(0));
assert_eq!(response["limit"], json!(20));
assert_eq!(response["total"], json!(77));
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
@ -227,15 +254,12 @@ async fn test_get_all_documents_attributes_to_retrieve() {
})
.await;
assert_eq!(code, 200);
assert_eq!(response.as_array().unwrap().len(), 20);
assert_eq!(
response.as_array().unwrap()[0]
.as_object()
.unwrap()
.keys()
.count(),
2
);
assert_eq!(response["results"].as_array().unwrap().len(), 20);
for results in response["results"].as_array().unwrap() {
assert_eq!(results.as_object().unwrap().keys().count(), 2);
assert!(results["name"] != json!(null));
assert!(results["tags"] != json!(null));
}
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
@ -244,15 +268,10 @@ async fn test_get_all_documents_attributes_to_retrieve() {
})
.await;
assert_eq!(code, 200);
assert_eq!(response.as_array().unwrap().len(), 20);
assert_eq!(
response.as_array().unwrap()[0]
.as_object()
.unwrap()
.keys()
.count(),
16
);
assert_eq!(response["results"].as_array().unwrap().len(), 20);
for results in response["results"].as_array().unwrap() {
assert_eq!(results.as_object().unwrap().keys().count(), 16);
}
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions {
@ -261,19 +280,99 @@ async fn test_get_all_documents_attributes_to_retrieve() {
})
.await;
assert_eq!(code, 200);
assert_eq!(response.as_array().unwrap().len(), 20);
assert_eq!(response["results"].as_array().unwrap().len(), 20);
for results in response["results"].as_array().unwrap() {
assert_eq!(results.as_object().unwrap().keys().count(), 16);
}
}
#[actix_rt::test]
async fn get_document_s_nested_attributes_to_retrieve() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
let documents = json!([
{
"id": 0,
"content.truc": "foobar",
},
{
"id": 1,
"content": {
"truc": "foobar",
"machin": "bidule",
},
},
]);
let (_, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
index.wait_task(1).await;
let (response, code) = index
.get_document(
0,
Some(GetDocumentOptions {
fields: Some(vec!["content"]),
}),
)
.await;
assert_eq!(code, 200);
assert_eq!(response, json!({}));
let (response, code) = index
.get_document(
1,
Some(GetDocumentOptions {
fields: Some(vec!["content"]),
}),
)
.await;
assert_eq!(code, 200);
assert_eq!(
response.as_array().unwrap()[0]
.as_object()
.unwrap()
.keys()
.count(),
16
response,
json!({
"content": {
"truc": "foobar",
"machin": "bidule",
},
})
);
let (response, code) = index
.get_document(
0,
Some(GetDocumentOptions {
fields: Some(vec!["content.truc"]),
}),
)
.await;
assert_eq!(code, 200);
assert_eq!(
response,
json!({
"content.truc": "foobar",
})
);
let (response, code) = index
.get_document(
1,
Some(GetDocumentOptions {
fields: Some(vec!["content.truc"]),
}),
)
.await;
assert_eq!(code, 200);
assert_eq!(
response,
json!({
"content": {
"truc": "foobar",
},
})
);
}
#[actix_rt::test]
async fn get_documents_displayed_attributes() {
async fn get_documents_displayed_attributes_is_ignored() {
let server = Server::new().await;
let index = server.index("test");
index
@ -285,23 +384,19 @@ async fn get_documents_displayed_attributes() {
.get_all_documents(GetAllDocumentsOptions::default())
.await;
assert_eq!(code, 200);
assert_eq!(response.as_array().unwrap().len(), 20);
assert_eq!(response["results"].as_array().unwrap().len(), 20);
assert_eq!(
response.as_array().unwrap()[0]
.as_object()
.unwrap()
.keys()
.count(),
1
response["results"][0].as_object().unwrap().keys().count(),
16
);
assert!(response.as_array().unwrap()[0]
.as_object()
.unwrap()
.get("gender")
.is_some());
assert!(response["results"][0]["gender"] != json!(null));
assert_eq!(response["offset"], json!(0));
assert_eq!(response["limit"], json!(20));
assert_eq!(response["total"], json!(77));
let (response, code) = index.get_document(0, None).await;
assert_eq!(code, 200);
assert_eq!(response.as_object().unwrap().keys().count(), 1);
assert_eq!(response.as_object().unwrap().keys().count(), 16);
assert!(response.as_object().unwrap().get("gender").is_some());
}

View File

@ -1,22 +0,0 @@
#![allow(dead_code)]
mod common;
use crate::common::Server;
use serde_json::json;
#[actix_rt::test]
async fn get_unexisting_dump_status() {
let server = Server::new().await;
let (response, code) = server.get_dump_status("foobar").await;
assert_eq!(code, 404);
let expected_response = json!({
"message": "Dump `foobar` not found.",
"code": "dump_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#dump_not_found"
});
assert_eq!(response, expected_response);
}

View File

@ -0,0 +1,73 @@
use std::path::PathBuf;
use manifest_dir_macros::exist_relative_path;
pub enum GetDump {
MoviesRawV1,
MoviesWithSettingsV1,
RubyGemsWithSettingsV1,
MoviesRawV2,
MoviesWithSettingsV2,
RubyGemsWithSettingsV2,
MoviesRawV3,
MoviesWithSettingsV3,
RubyGemsWithSettingsV3,
MoviesRawV4,
MoviesWithSettingsV4,
RubyGemsWithSettingsV4,
TestV5,
}
impl GetDump {
pub fn path(&self) -> PathBuf {
match self {
GetDump::MoviesRawV1 => {
exist_relative_path!("tests/assets/v1_v0.20.0_movies.dump").into()
}
GetDump::MoviesWithSettingsV1 => {
exist_relative_path!("tests/assets/v1_v0.20.0_movies_with_settings.dump").into()
}
GetDump::RubyGemsWithSettingsV1 => {
exist_relative_path!("tests/assets/v1_v0.20.0_rubygems_with_settings.dump").into()
}
GetDump::MoviesRawV2 => {
exist_relative_path!("tests/assets/v2_v0.21.1_movies.dump").into()
}
GetDump::MoviesWithSettingsV2 => {
exist_relative_path!("tests/assets/v2_v0.21.1_movies_with_settings.dump").into()
}
GetDump::RubyGemsWithSettingsV2 => {
exist_relative_path!("tests/assets/v2_v0.21.1_rubygems_with_settings.dump").into()
}
GetDump::MoviesRawV3 => {
exist_relative_path!("tests/assets/v3_v0.24.0_movies.dump").into()
}
GetDump::MoviesWithSettingsV3 => {
exist_relative_path!("tests/assets/v3_v0.24.0_movies_with_settings.dump").into()
}
GetDump::RubyGemsWithSettingsV3 => {
exist_relative_path!("tests/assets/v3_v0.24.0_rubygems_with_settings.dump").into()
}
GetDump::MoviesRawV4 => {
exist_relative_path!("tests/assets/v4_v0.25.2_movies.dump").into()
}
GetDump::MoviesWithSettingsV4 => {
exist_relative_path!("tests/assets/v4_v0.25.2_movies_with_settings.dump").into()
}
GetDump::RubyGemsWithSettingsV4 => {
exist_relative_path!("tests/assets/v4_v0.25.2_rubygems_with_settings.dump").into()
}
GetDump::TestV5 => {
exist_relative_path!("tests/assets/v5_v0.28.0_test_dump.dump").into()
}
}
}
}

View File

@ -0,0 +1,677 @@
mod data;
use crate::common::{default_settings, GetAllDocumentsOptions, Server};
use meilisearch_http::Opt;
use serde_json::json;
use self::data::GetDump;
// all the following test are ignored on windows. See #2364
#[actix_rt::test]
#[cfg_attr(target_os = "windows", ignore)]
async fn import_dump_v1() {
let temp = tempfile::tempdir().unwrap();
for path in [
GetDump::MoviesRawV1.path(),
GetDump::MoviesWithSettingsV1.path(),
GetDump::RubyGemsWithSettingsV1.path(),
] {
let options = Opt {
import_dump: Some(path),
..default_settings(temp.path())
};
let error = Server::new_with_options(options)
.await
.map(|_| ())
.unwrap_err();
assert_eq!(error.to_string(), "The version 1 of the dumps is not supported anymore. You can re-export your dump from a version between 0.21 and 0.24, or start fresh from a version 0.25 onwards.");
}
}
#[actix_rt::test]
#[cfg_attr(target_os = "windows", ignore)]
async fn import_dump_v2_movie_raw() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::MoviesRawV2.path()),
..default_settings(temp.path())
};
let server = Server::new_with_options(options).await.unwrap();
let (indexes, code) = server.list_indexes(None, None).await;
assert_eq!(code, 200);
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
let index = server.index("indexUID");
let (stats, code) = index.stats().await;
assert_eq!(code, 200);
assert_eq!(
stats,
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
);
let (settings, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(
settings,
json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
);
let (tasks, code) = index.list_tasks().await;
assert_eq!(code, 200);
assert_eq!(
tasks,
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
let (document, code) = index.get_document(100, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({"id": 100, "title": "Lock, Stock and Two Smoking Barrels", "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "genres": ["Comedy", "Crime"], "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000})
);
let (document, code) = index.get_document(500, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({"id": 500, "title": "Reservoir Dogs", "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "genres": ["Crime", "Thriller"], "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
);
let (document, code) = index.get_document(10006, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({"id": 10006, "title": "Wild Seven", "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "genres": ["Action", "Crime", "Drama"], "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
);
}
#[actix_rt::test]
#[cfg_attr(target_os = "windows", ignore)]
async fn import_dump_v2_movie_with_settings() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::MoviesWithSettingsV2.path()),
..default_settings(temp.path())
};
let server = Server::new_with_options(options).await.unwrap();
let (indexes, code) = server.list_indexes(None, None).await;
assert_eq!(code, 200);
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
let index = server.index("indexUID");
let (stats, code) = index.stats().await;
assert_eq!(code, 200);
assert_eq!(
stats,
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
);
let (settings, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(
settings,
json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
);
let (tasks, code) = index.list_tasks().await;
assert_eq!(code, 200);
assert_eq!(
tasks,
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
let (document, code) = index.get_document(100, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 })
);
let (document, code) = index.get_document(500, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "id": 500, "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
);
let (document, code) = index.get_document(10006, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "id": 10006, "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
);
}
#[actix_rt::test]
#[cfg_attr(target_os = "windows", ignore)]
async fn import_dump_v2_rubygems_with_settings() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::RubyGemsWithSettingsV2.path()),
..default_settings(temp.path())
};
let server = Server::new_with_options(options).await.unwrap();
let (indexes, code) = server.list_indexes(None, None).await;
assert_eq!(code, 200);
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
assert_eq!(indexes["results"][0]["uid"], json!("rubygems"));
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
let index = server.index("rubygems");
let (stats, code) = index.stats().await;
assert_eq!(code, 200);
assert_eq!(
stats,
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"description": 53, "id": 53, "name": 53, "summary": 53, "total_downloads": 53, "version": 53 }})
);
let (settings, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(
settings,
json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 }})
);
let (tasks, code) = index.list_tasks().await;
assert_eq!(code, 200);
assert_eq!(
tasks["results"][0],
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
);
// finally we're just going to check that we can still get a few documents by id
let (document, code) = index.get_document(188040, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "id": "188040", "version": "0.15.2", "total_downloads": "7465"})
);
let (document, code) = index.get_document(191940, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "id": "191940", "version": "1.1.0", "total_downloads": "9394"})
);
let (document, code) = index.get_document(159227, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"})
);
}
#[actix_rt::test]
#[cfg_attr(target_os = "windows", ignore)]
async fn import_dump_v3_movie_raw() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::MoviesRawV3.path()),
..default_settings(temp.path())
};
let server = Server::new_with_options(options).await.unwrap();
let (indexes, code) = server.list_indexes(None, None).await;
assert_eq!(code, 200);
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
let index = server.index("indexUID");
let (stats, code) = index.stats().await;
assert_eq!(code, 200);
assert_eq!(
stats,
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
);
let (settings, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(
settings,
json!({"displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
);
let (tasks, code) = index.list_tasks().await;
assert_eq!(code, 200);
assert_eq!(
tasks,
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
let (document, code) = index.get_document(100, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({"id": 100, "title": "Lock, Stock and Two Smoking Barrels", "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "genres": ["Comedy", "Crime"], "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000})
);
let (document, code) = index.get_document(500, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({"id": 500, "title": "Reservoir Dogs", "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "genres": ["Crime", "Thriller"], "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
);
let (document, code) = index.get_document(10006, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({"id": 10006, "title": "Wild Seven", "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "genres": ["Action", "Crime", "Drama"], "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
);
}
#[actix_rt::test]
#[cfg_attr(target_os = "windows", ignore)]
async fn import_dump_v3_movie_with_settings() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::MoviesWithSettingsV3.path()),
..default_settings(temp.path())
};
let server = Server::new_with_options(options).await.unwrap();
let (indexes, code) = server.list_indexes(None, None).await;
assert_eq!(code, 200);
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
let index = server.index("indexUID");
let (stats, code) = index.stats().await;
assert_eq!(code, 200);
assert_eq!(
stats,
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
);
let (settings, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(
settings,
json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
);
let (tasks, code) = index.list_tasks().await;
assert_eq!(code, 200);
assert_eq!(
tasks,
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
);
// finally we're just going to check that we can["results"] still get a few documents by id
let (document, code) = index.get_document(100, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 })
);
let (document, code) = index.get_document(500, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "id": 500, "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
);
let (document, code) = index.get_document(10006, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "id": 10006, "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
);
}
#[actix_rt::test]
#[cfg_attr(target_os = "windows", ignore)]
async fn import_dump_v3_rubygems_with_settings() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::RubyGemsWithSettingsV3.path()),
..default_settings(temp.path())
};
let server = Server::new_with_options(options).await.unwrap();
let (indexes, code) = server.list_indexes(None, None).await;
assert_eq!(code, 200);
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
assert_eq!(indexes["results"][0]["uid"], json!("rubygems"));
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
let index = server.index("rubygems");
let (stats, code) = index.stats().await;
assert_eq!(code, 200);
assert_eq!(
stats,
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"description": 53, "id": 53, "name": 53, "summary": 53, "total_downloads": 53, "version": 53 }})
);
let (settings, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(
settings,
json!({"displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
);
let (tasks, code) = index.list_tasks().await;
assert_eq!(code, 200);
assert_eq!(
tasks["results"][0],
json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
);
// finally we're just going to check that we can still get a few documents by id
let (document, code) = index.get_document(188040, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "id": "188040", "version": "0.15.2", "total_downloads": "7465"})
);
let (document, code) = index.get_document(191940, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "id": "191940", "version": "1.1.0", "total_downloads": "9394"})
);
let (document, code) = index.get_document(159227, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"})
);
}
#[actix_rt::test]
#[cfg_attr(target_os = "windows", ignore)]
async fn import_dump_v4_movie_raw() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::MoviesRawV4.path()),
..default_settings(temp.path())
};
let server = Server::new_with_options(options).await.unwrap();
let (indexes, code) = server.list_indexes(None, None).await;
assert_eq!(code, 200);
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
let index = server.index("indexUID");
let (stats, code) = index.stats().await;
assert_eq!(code, 200);
assert_eq!(
stats,
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
);
let (settings, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(
settings,
json!({ "displayedAttributes": ["*"], "searchableAttributes": ["*"], "filterableAttributes": [], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
);
let (tasks, code) = index.list_tasks().await;
assert_eq!(code, 200);
assert_eq!(
tasks,
json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit" : 20, "from": 0, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
let (document, code) = index.get_document(100, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "genres": ["Comedy", "Crime"], "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000})
);
let (document, code) = index.get_document(500, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "id": 500, "title": "Reservoir Dogs", "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "genres": ["Crime", "Thriller"], "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
);
let (document, code) = index.get_document(10006, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "id": 10006, "title": "Wild Seven", "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "genres": ["Action", "Crime", "Drama"], "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
);
}
#[actix_rt::test]
#[cfg_attr(target_os = "windows", ignore)]
async fn import_dump_v4_movie_with_settings() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::MoviesWithSettingsV4.path()),
..default_settings(temp.path())
};
let server = Server::new_with_options(options).await.unwrap();
let (indexes, code) = server.list_indexes(None, None).await;
assert_eq!(code, 200);
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
assert_eq!(indexes["results"][0]["uid"], json!("indexUID"));
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
let index = server.index("indexUID");
let (stats, code) = index.stats().await;
assert_eq!(code, 200);
assert_eq!(
stats,
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"genres": 53, "id": 53, "overview": 53, "poster": 53, "release_date": 53, "title": 53 }})
);
let (settings, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(
settings,
json!({ "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "sortableAttributes": [], "rankingRules": ["words", "typo", "proximity", "attribute", "exactness"], "stopWords": ["of", "the"], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": { "oneTypo": 5, "twoTypos": 9 }, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
);
let (tasks, code) = index.list_tasks().await;
assert_eq!(code, 200);
assert_eq!(
tasks,
json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null })
);
// finally we're just going to check that we can still get a few documents by id
let (document, code) = index.get_document(100, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "id": 100, "title": "Lock, Stock and Two Smoking Barrels", "genres": ["Comedy", "Crime"], "overview": "A card shark and his unwillingly-enlisted friends need to make a lot of cash quick after losing a sketchy poker match. To do this they decide to pull a heist on a small-time gang who happen to be operating out of the flat next door.", "poster": "https://image.tmdb.org/t/p/w500/8kSerJrhrJWKLk1LViesGcnrUPE.jpg", "release_date": 889056000 })
);
let (document, code) = index.get_document(500, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "id": 500, "title": "Reservoir Dogs", "genres": ["Crime", "Thriller"], "overview": "A botched robbery indicates a police informant, and the pressure mounts in the aftermath at a warehouse. Crime begets violence as the survivors -- veteran Mr. White, newcomer Mr. Orange, psychopathic parolee Mr. Blonde, bickering weasel Mr. Pink and Nice Guy Eddie -- unravel.", "poster": "https://image.tmdb.org/t/p/w500/AjTtJNumZyUDz33VtMlF1K8JPsE.jpg", "release_date": 715392000})
);
let (document, code) = index.get_document(10006, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "id": 10006, "title": "Wild Seven", "genres": ["Action", "Crime", "Drama"], "overview": "In this darkly karmic vision of Arizona, a man who breathes nothing but ill will begins a noxious domino effect as quickly as an uncontrollable virus kills. As he exits Arizona State Penn after twenty-one long years, Wilson has only one thing on the brain, leveling the score with career criminal, Mackey Willis.", "poster": "https://image.tmdb.org/t/p/w500/y114dTPoqn8k2Txps4P2tI95YCS.jpg", "release_date": 1136073600})
);
}
#[actix_rt::test]
#[cfg_attr(target_os = "windows", ignore)]
async fn import_dump_v4_rubygems_with_settings() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::RubyGemsWithSettingsV4.path()),
..default_settings(temp.path())
};
let server = Server::new_with_options(options).await.unwrap();
let (indexes, code) = server.list_indexes(None, None).await;
assert_eq!(code, 200);
assert_eq!(indexes["results"].as_array().unwrap().len(), 1);
assert_eq!(indexes["results"][0]["uid"], json!("rubygems"));
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
let index = server.index("rubygems");
let (stats, code) = index.stats().await;
assert_eq!(code, 200);
assert_eq!(
stats,
json!({ "numberOfDocuments": 53, "isIndexing": false, "fieldDistribution": {"description": 53, "id": 53, "name": 53, "summary": 53, "total_downloads": 53, "version": 53 }})
);
let (settings, code) = index.settings().await;
assert_eq!(code, 200);
assert_eq!(
settings,
json!({ "displayedAttributes": ["name", "summary", "description", "version", "total_downloads"], "searchableAttributes": ["name", "summary"], "filterableAttributes": ["version"], "sortableAttributes": [], "rankingRules": ["typo", "words", "fame:desc", "proximity", "attribute", "exactness", "total_downloads:desc"], "stopWords": [], "synonyms": {}, "distinctAttribute": null, "typoTolerance": {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [] }, "faceting": { "maxValuesPerFacet": 100 }, "pagination": { "maxTotalHits": 1000 } })
);
let (tasks, code) = index.list_tasks().await;
assert_eq!(code, 200);
assert_eq!(
tasks["results"][0],
json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"})
);
// finally we're just going to check that we can still get a few documents by id
let (document, code) = index.get_document(188040, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "name": "meilisearch", "summary": "An easy-to-use ruby client for Meilisearch API", "description": "An easy-to-use ruby client for Meilisearch API. See https://github.com/meilisearch/MeiliSearch", "id": "188040", "version": "0.15.2", "total_downloads": "7465"})
);
let (document, code) = index.get_document(191940, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "name": "doggo", "summary": "RSpec 3 formatter - documentation, with progress indication", "description": "Similar to \"rspec -f d\", but also indicates progress by showing the current test number and total test count on each line.", "id": "191940", "version": "1.1.0", "total_downloads": "9394"})
);
let (document, code) = index.get_document(159227, None).await;
assert_eq!(code, 200);
assert_eq!(
document,
json!({ "name": "vortex-of-agony", "summary": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "description": "You dont need to use nodejs or go, just install this plugin. It will crash your application at random", "id": "159227", "version": "0.1.0", "total_downloads": "1007"})
);
}
#[actix_rt::test]
#[cfg_attr(target_os = "windows", ignore)]
async fn import_dump_v5() {
let temp = tempfile::tempdir().unwrap();
let options = Opt {
import_dump: Some(GetDump::TestV5.path()),
..default_settings(temp.path())
};
let mut server = Server::new_auth_with_options(options, temp).await;
server.use_api_key("MASTER_KEY");
let (indexes, code) = server.list_indexes(None, None).await;
assert_eq!(code, 200, "{indexes}");
assert_eq!(indexes["results"].as_array().unwrap().len(), 2);
assert_eq!(indexes["results"][0]["uid"], json!("test"));
assert_eq!(indexes["results"][1]["uid"], json!("test2"));
assert_eq!(indexes["results"][0]["primaryKey"], json!("id"));
let expected_stats = json!({
"numberOfDocuments": 10,
"isIndexing": false,
"fieldDistribution": {
"cast": 10,
"director": 10,
"genres": 10,
"id": 10,
"overview": 10,
"popularity": 10,
"poster_path": 10,
"producer": 10,
"production_companies": 10,
"release_date": 10,
"tagline": 10,
"title": 10,
"vote_average": 10,
"vote_count": 10
}
});
let index1 = server.index("test");
let index2 = server.index("test2");
let (stats, code) = index1.stats().await;
assert_eq!(code, 200);
assert_eq!(stats, expected_stats);
let (docs, code) = index2
.get_all_documents(GetAllDocumentsOptions::default())
.await;
assert_eq!(code, 200);
assert_eq!(docs["results"].as_array().unwrap().len(), 10);
let (docs, code) = index1
.get_all_documents(GetAllDocumentsOptions::default())
.await;
assert_eq!(code, 200);
assert_eq!(docs["results"].as_array().unwrap().len(), 10);
let (stats, code) = index2.stats().await;
assert_eq!(code, 200);
assert_eq!(stats, expected_stats);
let (keys, code) = server.list_api_keys().await;
assert_eq!(code, 200);
let key = &keys["results"][0];
assert_eq!(key["name"], "my key");
}

View File

@ -102,7 +102,7 @@ async fn error_create_with_invalid_index_uid() {
let (response, code) = index.create(None).await;
let expected_response = json!({
"message": "`test test#!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"message": "invalid index uid `test test#!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"

View File

@ -52,10 +52,10 @@ async fn loop_delete_add_documents() {
let mut tasks = Vec::new();
for _ in 0..50 {
let (response, code) = index.add_documents(documents.clone(), None).await;
tasks.push(response["uid"].as_u64().unwrap());
tasks.push(response["taskUid"].as_u64().unwrap());
assert_eq!(code, 202, "{}", response);
let (response, code) = index.delete().await;
tasks.push(response["uid"].as_u64().unwrap());
tasks.push(response["taskUid"].as_u64().unwrap());
assert_eq!(code, 202, "{}", response);
}

View File

@ -16,12 +16,11 @@ async fn create_and_get_index() {
assert_eq!(code, 200);
assert_eq!(response["uid"], "test");
assert_eq!(response["name"], "test");
assert!(response.get("createdAt").is_some());
assert!(response.get("updatedAt").is_some());
assert_eq!(response["createdAt"], response["updatedAt"]);
assert_eq!(response["primaryKey"], Value::Null);
assert_eq!(response.as_object().unwrap().len(), 5);
assert_eq!(response.as_object().unwrap().len(), 4);
}
#[actix_rt::test]
@ -45,10 +44,10 @@ async fn error_get_unexisting_index() {
#[actix_rt::test]
async fn no_index_return_empty_list() {
let server = Server::new().await;
let (response, code) = server.list_indexes().await;
let (response, code) = server.list_indexes(None, None).await;
assert_eq!(code, 200);
assert!(response.is_array());
assert!(response.as_array().unwrap().is_empty());
assert!(response["results"].is_array());
assert!(response["results"].as_array().unwrap().is_empty());
}
#[actix_rt::test]
@ -59,10 +58,10 @@ async fn list_multiple_indexes() {
server.index("test").wait_task(1).await;
let (response, code) = server.list_indexes().await;
let (response, code) = server.list_indexes(None, None).await;
assert_eq!(code, 200);
assert!(response.is_array());
let arr = response.as_array().unwrap();
assert!(response["results"].is_array());
let arr = response["results"].as_array().unwrap();
assert_eq!(arr.len(), 2);
assert!(arr
.iter()
@ -72,6 +71,118 @@ async fn list_multiple_indexes() {
.any(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key"));
}
#[actix_rt::test]
async fn get_and_paginate_indexes() {
let server = Server::new().await;
const NB_INDEXES: usize = 50;
for i in 0..NB_INDEXES {
server.index(&format!("test_{i:02}")).create(None).await;
server
.index(&format!("test_{i:02}"))
.wait_task(i as u64)
.await;
}
// basic
let (response, code) = server.list_indexes(None, None).await;
assert_eq!(code, 200);
assert_eq!(response["limit"], json!(20));
assert_eq!(response["offset"], json!(0));
assert_eq!(response["total"], json!(NB_INDEXES));
assert!(response["results"].is_array());
let arr = response["results"].as_array().unwrap();
assert_eq!(arr.len(), 20);
// ensuring we get all the indexes in the alphabetical order
assert!((0..20)
.map(|idx| format!("test_{idx:02}"))
.zip(arr)
.all(|(expected, entry)| entry["uid"] == expected));
// with an offset
let (response, code) = server.list_indexes(Some(15), None).await;
assert_eq!(code, 200);
assert_eq!(response["limit"], json!(20));
assert_eq!(response["offset"], json!(15));
assert_eq!(response["total"], json!(NB_INDEXES));
assert!(response["results"].is_array());
let arr = response["results"].as_array().unwrap();
assert_eq!(arr.len(), 20);
assert!((15..35)
.map(|idx| format!("test_{idx:02}"))
.zip(arr)
.all(|(expected, entry)| entry["uid"] == expected));
// with an offset and not enough elements
let (response, code) = server.list_indexes(Some(45), None).await;
assert_eq!(code, 200);
assert_eq!(response["limit"], json!(20));
assert_eq!(response["offset"], json!(45));
assert_eq!(response["total"], json!(NB_INDEXES));
assert!(response["results"].is_array());
let arr = response["results"].as_array().unwrap();
assert_eq!(arr.len(), 5);
assert!((45..50)
.map(|idx| format!("test_{idx:02}"))
.zip(arr)
.all(|(expected, entry)| entry["uid"] == expected));
// with a limit lower than the default
let (response, code) = server.list_indexes(None, Some(5)).await;
assert_eq!(code, 200);
assert_eq!(response["limit"], json!(5));
assert_eq!(response["offset"], json!(0));
assert_eq!(response["total"], json!(NB_INDEXES));
assert!(response["results"].is_array());
let arr = response["results"].as_array().unwrap();
assert_eq!(arr.len(), 5);
assert!((0..5)
.map(|idx| format!("test_{idx:02}"))
.zip(arr)
.all(|(expected, entry)| entry["uid"] == expected));
// with a limit higher than the default
let (response, code) = server.list_indexes(None, Some(40)).await;
assert_eq!(code, 200);
assert_eq!(response["limit"], json!(40));
assert_eq!(response["offset"], json!(0));
assert_eq!(response["total"], json!(NB_INDEXES));
assert!(response["results"].is_array());
let arr = response["results"].as_array().unwrap();
assert_eq!(arr.len(), 40);
assert!((0..40)
.map(|idx| format!("test_{idx:02}"))
.zip(arr)
.all(|(expected, entry)| entry["uid"] == expected));
// with a limit higher than the default
let (response, code) = server.list_indexes(None, Some(80)).await;
assert_eq!(code, 200);
assert_eq!(response["limit"], json!(80));
assert_eq!(response["offset"], json!(0));
assert_eq!(response["total"], json!(NB_INDEXES));
assert!(response["results"].is_array());
let arr = response["results"].as_array().unwrap();
assert_eq!(arr.len(), 50);
assert!((0..50)
.map(|idx| format!("test_{idx:02}"))
.zip(arr)
.all(|(expected, entry)| entry["uid"] == expected));
// with a limit and an offset
let (response, code) = server.list_indexes(Some(20), Some(10)).await;
assert_eq!(code, 200);
assert_eq!(response["limit"], json!(10));
assert_eq!(response["offset"], json!(20));
assert_eq!(response["total"], json!(NB_INDEXES));
assert!(response["results"].is_array());
let arr = response["results"].as_array().unwrap();
assert_eq!(arr.len(), 10);
assert!((20..30)
.map(|idx| format!("test_{idx:02}"))
.zip(arr)
.all(|(expected, entry)| entry["uid"] == expected));
}
#[actix_rt::test]
async fn get_invalid_index_uid() {
let server = Server::new().await;

View File

@ -35,7 +35,7 @@ async fn stats() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
assert_eq!(response["uid"], 1);
assert_eq!(response["taskUid"], 1);
index.wait_task(1).await;

View File

@ -21,7 +21,6 @@ async fn update_primary_key() {
assert_eq!(code, 200);
assert_eq!(response["uid"], "test");
assert_eq!(response["name"], "test");
assert!(response.get("createdAt").is_some());
assert!(response.get("updatedAt").is_some());
@ -32,7 +31,7 @@ async fn update_primary_key() {
assert!(created_at < updated_at);
assert_eq!(response["primaryKey"], "primary");
assert_eq!(response.as_object().unwrap().len(), 5);
assert_eq!(response.as_object().unwrap().len(), 4);
}
#[actix_rt::test]

View File

@ -2,6 +2,7 @@ mod auth;
mod common;
mod dashboard;
mod documents;
mod dumps;
mod index;
mod search;
mod settings;

View File

@ -45,26 +45,18 @@ async fn search_invalid_highlight_and_crop_tags() {
for field in fields {
// object
index
.search(
json!({field.to_string(): {"marker": "<crop>"}}),
|response, code| {
assert_eq!(code, 400, "field {} passing object: {}", &field, response);
assert_eq!(response["code"], "bad_request");
},
)
let (response, code) = index
.search_post(json!({field.to_string(): {"marker": "<crop>"}}))
.await;
assert_eq!(code, 400, "field {} passing object: {}", &field, response);
assert_eq!(response["code"], "bad_request");
// array
index
.search(
json!({field.to_string(): ["marker", "<crop>"]}),
|response, code| {
assert_eq!(code, 400, "field {} passing array: {}", &field, response);
assert_eq!(response["code"], "bad_request");
},
)
let (response, code) = index
.search_post(json!({field.to_string(): ["marker", "<crop>"]}))
.await;
assert_eq!(code, 400, "field {} passing array: {}", &field, response);
assert_eq!(response["code"], "bad_request");
}
}
@ -115,7 +107,7 @@ async fn filter_invalid_syntax_array() {
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(json!({"filter": [["title & Glass"]]}), |response, code| {
.search(json!({"filter": ["title & Glass"]}), |response, code| {
assert_eq!(response, expected_response);
assert_eq!(code, 400);
})
@ -172,7 +164,7 @@ async fn filter_invalid_attribute_array() {
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(json!({"filter": [["many = Glass"]]}), |response, code| {
.search(json!({"filter": ["many = Glass"]}), |response, code| {
assert_eq!(response, expected_response);
assert_eq!(code, 400);
})
@ -226,7 +218,7 @@ async fn filter_reserved_geo_attribute_array() {
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(json!({"filter": [["_geo = Glass"]]}), |response, code| {
.search(json!({"filter": ["_geo = Glass"]}), |response, code| {
assert_eq!(response, expected_response);
assert_eq!(code, 400);
})
@ -281,7 +273,7 @@ async fn filter_reserved_attribute_array() {
});
index
.search(
json!({"filter": [["_geoDistance = Glass"]]}),
json!({"filter": ["_geoDistance = Glass"]}),
|response, code| {
assert_eq!(response, expected_response);
assert_eq!(code, 400);

View File

@ -15,83 +15,100 @@ async fn formatted_contain_wildcard() {
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (response, code) = index
.search_post(json!({ "q": "pesti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"] }))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"_formatted": {
"id": "852",
"cattos": "<em>pesti</em>",
}
})
);
index.search(json!({ "q": "pesti", "attributesToRetrieve": ["father", "mother"], "attributesToHighlight": ["father", "mother", "*"], "attributesToCrop": ["doggos"], "showMatchesPosition": true }),
|response, code|
{
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"_formatted": {
"id": "852",
"cattos": "<em>pesti</em>",
},
"_matchesPosition": {"cattos": [{"start": 0, "length": 5}]},
})
);
}
)
.await;
let (response, code) = index
.search_post(json!({ "q": "pesti", "attributesToRetrieve": ["*"] }))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"cattos": "pesti",
})
);
let (response, code) = index
.search_post(
json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToHighlight": ["id"] }),
index
.search(
json!({ "q": "pesti", "attributesToRetrieve": ["*"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"cattos": "pesti",
})
);
},
)
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"cattos": "pesti",
"_formatted": {
"id": "852",
"cattos": "pesti",
}
})
);
let (response, code) = index
.search_post(
index
.search(
json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToHighlight": ["id"], "showMatchesPosition": true }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"cattos": "pesti",
"_formatted": {
"id": "852",
"cattos": "pesti",
},
"_matchesPosition": {"cattos": [{"start": 0, "length": 5}]},
})
);
}
)
.await;
index
.search(
json!({ "q": "pesti", "attributesToRetrieve": ["*"], "attributesToCrop": ["*"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"cattos": "pesti",
"_formatted": {
"id": "852",
"cattos": "pesti",
}
})
);
},
)
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"cattos": "pesti",
"_formatted": {
"id": "852",
"cattos": "pesti",
}
})
);
let (response, code) = index
.search_post(json!({ "q": "pesti", "attributesToCrop": ["*"] }))
index
.search(
json!({ "q": "pesti", "attributesToCrop": ["*"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"cattos": "pesti",
"_formatted": {
"id": "852",
"cattos": "pesti",
}
})
);
},
)
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"cattos": "pesti",
"_formatted": {
"id": "852",
"cattos": "pesti",
}
})
);
}
#[actix_rt::test]
@ -103,87 +120,122 @@ async fn format_nested() {
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (response, code) = index
.search_post(json!({ "q": "pesti", "attributesToRetrieve": ["doggos"] }))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"doggos": [
{
"name": "bobby",
"age": 2,
},
{
"name": "buddy",
"age": 4,
},
],
})
);
let (response, code) = index
.search_post(json!({ "q": "pesti", "attributesToRetrieve": ["doggos.name"] }))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"doggos": [
{
"name": "bobby",
},
{
"name": "buddy",
},
],
})
);
let (response, code) = index
.search_post(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.name"] }))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"_formatted": {
"doggos": [
{
"name": "bobby",
},
{
"name": "buddy",
},
],
index
.search(
json!({ "q": "pesti", "attributesToRetrieve": ["doggos"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"doggos": [
{
"name": "bobby",
"age": 2,
},
{
"name": "buddy",
"age": 4,
},
],
})
);
},
})
);
let (response, code) = index
.search_post(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToCrop": ["doggos.name"] }))
)
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"_formatted": {
"doggos": [
{
"name": "bobby",
},
{
"name": "buddy",
},
],
index
.search(
json!({ "q": "pesti", "attributesToRetrieve": ["doggos.name"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"doggos": [
{
"name": "bobby",
},
{
"name": "buddy",
},
],
})
);
},
})
);
let (response, code) = index
.search_post(json!({ "q": "pesti", "attributesToRetrieve": ["doggos.name"], "attributesToHighlight": ["doggos.age"] }))
)
.await;
index
.search(
json!({ "q": "bobby", "attributesToRetrieve": ["doggos.name"], "showMatchesPosition": true }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"doggos": [
{
"name": "bobby",
},
{
"name": "buddy",
},
],
"_matchesPosition": {"doggos.name": [{"start": 0, "length": 5}]},
})
);
}
)
.await;
index
.search(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.name"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"_formatted": {
"doggos": [
{
"name": "bobby",
},
{
"name": "buddy",
},
],
},
})
);
})
.await;
index
.search(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToCrop": ["doggos.name"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"_formatted": {
"doggos": [
{
"name": "bobby",
},
{
"name": "buddy",
},
],
},
})
);
})
.await;
index
.search(json!({ "q": "pesti", "attributesToRetrieve": ["doggos.name"], "attributesToHighlight": ["doggos.age"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
@ -210,11 +262,13 @@ async fn format_nested() {
},
})
);
let (response, code) = index
.search_post(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.age"], "attributesToCrop": ["doggos.name"] }))
})
.await;
assert_eq!(code, 200, "{}", response);
index
.search(json!({ "q": "pesti", "attributesToRetrieve": [], "attributesToHighlight": ["doggos.age"], "attributesToCrop": ["doggos.name"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
@ -232,6 +286,9 @@ async fn format_nested() {
},
})
);
}
)
.await;
}
#[actix_rt::test]
@ -248,9 +305,9 @@ async fn displayedattr_2_smol() {
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (response, code) = index
.search_post(json!({ "attributesToRetrieve": ["father", "id"], "attributesToHighlight": ["mother"], "attributesToCrop": ["cattos"] }))
.await;
index
.search(json!({ "attributesToRetrieve": ["father", "id"], "attributesToHighlight": ["mother"], "attributesToCrop": ["cattos"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
@ -258,119 +315,157 @@ async fn displayedattr_2_smol() {
"id": 852,
})
);
let (response, code) = index
.search_post(json!({ "attributesToRetrieve": ["id"] }))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
})
);
let (response, code) = index
.search_post(json!({ "attributesToHighlight": ["id"] }))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"_formatted": {
"id": "852",
}
})
);
let (response, code) = index
.search_post(json!({ "attributesToCrop": ["id"] }))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"_formatted": {
"id": "852",
}
})
);
let (response, code) = index
.search_post(json!({ "attributesToHighlight": ["id"], "attributesToCrop": ["id"] }))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"_formatted": {
"id": "852",
}
})
);
let (response, code) = index
.search_post(json!({ "attributesToHighlight": ["cattos"] }))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
})
);
let (response, code) = index
.search_post(json!({ "attributesToCrop": ["cattos"] }))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
})
);
let (response, code) = index
.search_post(json!({ "attributesToRetrieve": ["cattos"] }))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"][0], json!({}));
let (response, code) = index
.search_post(
json!({ "attributesToRetrieve": ["cattos"], "attributesToHighlight": ["cattos"], "attributesToCrop": ["cattos"] }),
index
.search(
json!({ "attributesToRetrieve": ["id"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
})
);
},
)
.await;
index
.search(
json!({ "attributesToHighlight": ["id"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"_formatted": {
"id": "852",
}
})
);
},
)
.await;
index
.search(json!({ "attributesToCrop": ["id"] }), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"_formatted": {
"id": "852",
}
})
);
})
.await;
index
.search(
json!({ "attributesToHighlight": ["id"], "attributesToCrop": ["id"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
"_formatted": {
"id": "852",
}
})
);
},
)
.await;
index
.search(
json!({ "attributesToHighlight": ["cattos"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
})
);
},
)
.await;
index
.search(
json!({ "attributesToCrop": ["cattos"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"id": 852,
})
);
},
)
.await;
index
.search(
json!({ "attributesToRetrieve": ["cattos"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"][0], json!({}));
},
)
.await;
index
.search(
json!({ "attributesToRetrieve": ["cattos"], "attributesToHighlight": ["cattos"], "attributesToCrop": ["cattos"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"][0], json!({}));
let (response, code) = index
.search_post(json!({ "attributesToRetrieve": ["cattos"], "attributesToHighlight": ["id"] }))
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"_formatted": {
"id": "852",
}
})
);
)
.await;
let (response, code) = index
.search_post(json!({ "attributesToRetrieve": ["cattos"], "attributesToCrop": ["id"] }))
index
.search(
json!({ "attributesToRetrieve": ["cattos"], "attributesToHighlight": ["id"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"_formatted": {
"id": "852",
}
})
);
},
)
.await;
index
.search(
json!({ "attributesToRetrieve": ["cattos"], "attributesToCrop": ["id"] }),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"_formatted": {
"id": "852",
}
})
);
},
)
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(
response["hits"][0],
json!({
"_formatted": {
"id": "852",
}
})
);
}

View File

@ -420,11 +420,11 @@ async fn search_facet_distribution() {
index
.search(
json!({
"facetsDistribution": ["title"]
"facets": ["title"]
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
let dist = response["facetsDistribution"].as_object().unwrap();
let dist = response["facetDistribution"].as_object().unwrap();
assert_eq!(dist.len(), 1);
assert!(dist.get("title").is_some());
},
@ -445,12 +445,12 @@ async fn search_facet_distribution() {
index
.search(
json!({
// "facetsDistribution": ["father", "doggos.name"]
"facetsDistribution": ["father"]
// "facets": ["father", "doggos.name"]
"facets": ["father"]
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
let dist = response["facetsDistribution"].as_object().unwrap();
let dist = response["facetDistribution"].as_object().unwrap();
assert_eq!(dist.len(), 1);
assert_eq!(
dist["father"],
@ -474,11 +474,11 @@ async fn search_facet_distribution() {
index
.search(
json!({
"facetsDistribution": ["doggos.name"]
"facets": ["doggos.name"]
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
let dist = response["facetsDistribution"].as_object().unwrap();
let dist = response["facetDistribution"].as_object().unwrap();
assert_eq!(dist.len(), 1);
assert_eq!(
dist["doggos.name"],
@ -491,12 +491,11 @@ async fn search_facet_distribution() {
index
.search(
json!({
"facetsDistribution": ["doggos"]
"facets": ["doggos"]
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
let dist = response["facetsDistribution"].as_object().unwrap();
dbg!(&dist);
let dist = response["facetDistribution"].as_object().unwrap();
assert_eq!(dist.len(), 3);
assert_eq!(
dist["doggos.name"],
@ -566,6 +565,36 @@ async fn placeholder_search_is_hard_limited() {
},
)
.await;
index
.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } }))
.await;
index.wait_task(1).await;
index
.search(
json!({
"limit": 1500,
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 1200);
},
)
.await;
index
.search(
json!({
"offset": 1000,
"limit": 400,
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
},
)
.await;
}
#[actix_rt::test]
@ -605,4 +634,85 @@ async fn search_is_hard_limited() {
},
)
.await;
index
.update_settings(json!({ "pagination": { "maxTotalHits": 10_000 } }))
.await;
index.wait_task(1).await;
index
.search(
json!({
"q": "unique",
"limit": 1500,
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 1200);
},
)
.await;
index
.search(
json!({
"q": "unique",
"offset": 1000,
"limit": 400,
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(response["hits"].as_array().unwrap().len(), 200);
},
)
.await;
}
#[actix_rt::test]
async fn faceting_max_values_per_facet() {
let server = Server::new().await;
let index = server.index("test");
index
.update_settings(json!({ "filterableAttributes": ["number"] }))
.await;
let documents: Vec<_> = (0..10_000)
.map(|id| json!({ "id": id, "number": id * 10 }))
.collect();
index.add_documents(json!(documents), None).await;
index.wait_task(1).await;
index
.search(
json!({
"facets": ["number"]
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
let numbers = response["facetDistribution"]["number"].as_object().unwrap();
assert_eq!(numbers.len(), 100);
},
)
.await;
index
.update_settings(json!({ "faceting": { "maxValuesPerFacet": 10_000 } }))
.await;
index.wait_task(2).await;
index
.search(
json!({
"facets": ["number"]
}),
|response, code| {
assert_eq!(code, 200, "{}", response);
let numbers = dbg!(&response)["facetDistribution"]["number"]
.as_object()
.unwrap();
assert_eq!(numbers.len(), 10_000);
},
)
.await;
}

View File

@ -24,6 +24,18 @@ static DEFAULT_SETTINGS_VALUES: Lazy<HashMap<&'static str, Value>> = Lazy::new(|
);
map.insert("stop_words", json!([]));
map.insert("synonyms", json!({}));
map.insert(
"faceting",
json!({
"maxValuesPerFacet": json!(100),
}),
);
map.insert(
"pagination",
json!({
"maxTotalHits": json!(1000),
}),
);
map
});
@ -43,7 +55,7 @@ async fn get_settings() {
let (response, code) = index.settings().await;
assert_eq!(code, 200);
let settings = response.as_object().unwrap();
assert_eq!(settings.keys().len(), 9);
assert_eq!(settings.keys().len(), 11);
assert_eq!(settings["displayedAttributes"], json!(["*"]));
assert_eq!(settings["searchableAttributes"], json!(["*"]));
assert_eq!(settings["filterableAttributes"], json!([]));
@ -61,6 +73,18 @@ async fn get_settings() {
])
);
assert_eq!(settings["stopWords"], json!([]));
assert_eq!(
settings["faceting"],
json!({
"maxValuesPerFacet": 100,
})
);
assert_eq!(
settings["pagination"],
json!({
"maxTotalHits": 1000,
})
);
}
#[actix_rt::test]
@ -122,7 +146,7 @@ async fn reset_all_settings() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202);
assert_eq!(response["uid"], 0);
assert_eq!(response["taskUid"], 0);
index.wait_task(0).await;
index
@ -179,7 +203,7 @@ async fn error_update_setting_unexisting_index_invalid_uid() {
assert_eq!(code, 400);
let expected = json!({
"message": "`test##! ` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"message": "invalid index uid `test##! `, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"});
@ -188,7 +212,7 @@ async fn error_update_setting_unexisting_index_invalid_uid() {
}
macro_rules! test_setting_routes {
($($setting:ident), *) => {
($($setting:ident $write_method:ident), *) => {
$(
mod $setting {
use crate::common::Server;
@ -214,7 +238,7 @@ macro_rules! test_setting_routes {
.chars()
.map(|c| if c == '_' { '-' } else { c })
.collect::<String>());
let (response, code) = server.service.post(url, serde_json::Value::Null).await;
let (response, code) = server.service.$write_method(url, serde_json::Value::Null).await;
assert_eq!(code, 202, "{}", response);
server.index("").wait_task(0).await;
let (response, code) = server.index("test").get().await;
@ -258,13 +282,15 @@ macro_rules! test_setting_routes {
}
test_setting_routes!(
filterable_attributes,
displayed_attributes,
searchable_attributes,
distinct_attribute,
stop_words,
ranking_rules,
synonyms
filterable_attributes put,
displayed_attributes put,
searchable_attributes put,
distinct_attribute put,
stop_words put,
ranking_rules put,
synonyms put,
pagination patch,
faceting patch
);
#[actix_rt::test]
@ -283,7 +309,7 @@ async fn error_set_invalid_ranking_rules() {
assert_eq!(response["status"], "failed");
let expected_error = json!({
"message": r#"`manyTheFish` ranking rule is invalid. Valid ranking rules are Words, Typo, Sort, Proximity, Attribute, Exactness and custom ranking rules."#,
"message": r#"`manyTheFish` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules."#,
"code": "invalid_ranking_rule",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_ranking_rule"

View File

@ -41,7 +41,7 @@ async fn perform_snapshot() {
..default_settings(temp.path())
};
let server = Server::new_with_options(options).await;
let server = Server::new_with_options(options).await.unwrap();
let index = server.index("test");
index
@ -67,10 +67,10 @@ async fn perform_snapshot() {
..default_settings(temp.path())
};
let snapshot_server = Server::new_with_options(options).await;
let snapshot_server = Server::new_with_options(options).await.unwrap();
verify_snapshot!(server, snapshot_server, |server| =>
server.list_indexes(),
server.list_indexes(None, None),
// for some reason the db sizes differ. this may be due to the compaction options we have
// set when performing the snapshot
//server.stats(),

View File

@ -54,7 +54,7 @@ async fn stats() {
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(code, 202, "{}", response);
assert_eq!(response["uid"], 1);
assert_eq!(response["taskUid"], 1);
index.wait_task(1).await;

View File

@ -3,22 +3,6 @@ use serde_json::json;
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
#[actix_rt::test]
async fn error_get_task_unexisting_index() {
let server = Server::new().await;
let (response, code) = server.service.get("/indexes/test/tasks").await;
let expected_response = json!({
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
assert_eq!(response, expected_response);
assert_eq!(code, 404);
}
#[actix_rt::test]
async fn error_get_unexisting_task_status() {
let server = Server::new().await;
@ -58,22 +42,6 @@ async fn get_task_status() {
// TODO check resonse format, as per #48
}
#[actix_rt::test]
async fn error_list_tasks_unexisting_index() {
let server = Server::new().await;
let (response, code) = server.index("test").list_tasks().await;
let expected_response = json!({
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
assert_eq!(response, expected_response);
assert_eq!(code, 404);
}
#[actix_rt::test]
async fn list_tasks() {
let server = Server::new().await;
@ -91,10 +59,140 @@ async fn list_tasks() {
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn list_tasks_with_star_filters() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.await;
let (response, code) = index.service.get("/tasks?indexUid=test").await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index.service.get("/tasks?indexUid=*").await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index.service.get("/tasks?indexUid=*,pasteque").await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index.service.get("/tasks?type=*").await;
assert_eq!(code, 200);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index
.service
.get("/tasks?type=*,documentAdditionOrUpdate&status=*")
.await;
assert_eq!(code, 200, "{:?}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index
.service
.get("/tasks?type=*,documentAdditionOrUpdate&status=*,failed&indexUid=test")
.await;
assert_eq!(code, 200, "{:?}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
let (response, code) = index
.service
.get("/tasks?type=*,documentAdditionOrUpdate&status=*,failed&indexUid=test,*")
.await;
assert_eq!(code, 200, "{:?}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn list_tasks_status_filtered() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.await;
let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
// We can't be sure that the update isn't already processed so we can't test this
// let (response, code) = index.filtered_tasks(&[], &["processing"]).await;
// assert_eq!(code, 200, "{}", response);
// assert_eq!(response["results"].as_array().unwrap().len(), 1);
index.wait_task(1).await;
let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn list_tasks_type_filtered() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) = index
.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[])
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn list_tasks_status_and_type_filtered() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
None,
)
.await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 0);
let (response, code) = index
.filtered_tasks(
&["indexCreation", "documentAdditionOrUpdate"],
&["succeeded", "processing", "enqueued"],
)
.await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
macro_rules! assert_valid_summarized_task {
($response:expr, $task_type:literal, $index:literal) => {{
assert_eq!($response.as_object().unwrap().len(), 5);
assert!($response["uid"].as_u64().is_some());
assert!($response["taskUid"].as_u64().is_some());
assert_eq!($response["indexUid"], $index);
assert_eq!($response["status"], "enqueued");
assert_eq!($response["type"], $task_type);
@ -119,16 +217,16 @@ async fn test_summarized_task_view() {
assert_valid_summarized_task!(response, "settingsUpdate", "test");
let (response, _) = index.update_documents(json!([{"id": 1}]), None).await;
assert_valid_summarized_task!(response, "documentPartial", "test");
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
let (response, _) = index.add_documents(json!([{"id": 1}]), None).await;
assert_valid_summarized_task!(response, "documentAddition", "test");
assert_valid_summarized_task!(response, "documentAdditionOrUpdate", "test");
let (response, _) = index.delete_document(1).await;
assert_valid_summarized_task!(response, "documentDeletion", "test");
let (response, _) = index.clear_all_documents().await;
assert_valid_summarized_task!(response, "clearAll", "test");
assert_valid_summarized_task!(response, "documentDeletion", "test");
let (response, _) = index.delete().await;
assert_valid_summarized_task!(response, "indexDeletion", "test");

View File

@ -1,10 +1,8 @@
[package]
name = "meilisearch-lib"
version = "0.27.2"
version = "0.28.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
actix-web = { version = "4.0.1", default-features = false }
anyhow = { version = "1.0.56", features = ["backtrace"] }
@ -29,8 +27,8 @@ itertools = "0.10.3"
lazy_static = "1.4.0"
log = "0.4.14"
meilisearch-auth = { path = "../meilisearch-auth" }
meilisearch-error = { path = "../meilisearch-error" }
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.26.6" }
meilisearch-types = { path = "../meilisearch-types" }
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.31.1" }
mime = "0.3.16"
num_cpus = "1.13.1"
obkv = "0.2.0"
@ -41,6 +39,7 @@ rand = "0.8.5"
rayon = "1.5.1"
regex = "1.5.5"
reqwest = { version = "0.11.9", features = ["json", "rustls-tls"], default-features = false, optional = true }
roaring = "0.9.0"
rustls = "0.20.4"
serde = { version = "1.0.136", features = ["derive"] }
serde_json = { version = "1.0.79", features = ["preserve_order"] }
@ -52,15 +51,15 @@ tempfile = "3.3.0"
thiserror = "1.0.30"
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
tokio = { version = "1.17.0", features = ["full"] }
uuid = { version = "0.8.2", features = ["serde"] }
uuid = { version = "1.1.2", features = ["serde", "v4"] }
walkdir = "2.3.2"
whoami = { version = "1.2.1", optional = true }
[dev-dependencies]
actix-rt = "2.7.0"
meilisearch-error = { path = "../meilisearch-error", features = ["test-traits"] }
meilisearch-types = { path = "../meilisearch-types", features = ["test-traits"] }
mockall = "0.11.0"
nelson = { git = "https://github.com/MarinPostma/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"}
nelson = { git = "https://github.com/meilisearch/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"}
paste = "1.0.6"
proptest = "1.0.0"
proptest-derive = "0.3.0"

View File

@ -2,7 +2,8 @@ use std::borrow::Borrow;
use std::fmt::{self, Debug, Display};
use std::io::{self, BufRead, BufReader, BufWriter, Cursor, Read, Seek, Write};
use meilisearch_error::{internal_error, Code, ErrorCode};
use meilisearch_types::error::{Code, ErrorCode};
use meilisearch_types::internal_error;
use milli::documents::DocumentBatchBuilder;
type Result<T> = std::result::Result<T, DocumentFormatError>;

View File

@ -1,5 +1,6 @@
pub mod v2;
pub mod v3;
pub mod v4;
/// Parses the v1 version of the Asc ranking rules `asc(price)`and returns the field name.
pub fn asc_ranking_rule(text: &str) -> Option<&str> {

View File

@ -1,5 +1,5 @@
use anyhow::bail;
use meilisearch_error::Code;
use meilisearch_types::error::Code;
use milli::update::IndexDocumentsMethod;
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;

View File

@ -1,12 +1,13 @@
use meilisearch_error::{Code, ResponseError};
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::index_uid::IndexUid;
use milli::update::IndexDocumentsMethod;
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use uuid::Uuid;
use super::v4::{Task, TaskContent, TaskEvent};
use crate::index::{Settings, Unchecked};
use crate::index_resolver::IndexUid;
use crate::tasks::task::{DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult};
use crate::tasks::task::{DocumentDeletion, TaskId, TaskResult};
use super::v2;
@ -58,9 +59,9 @@ pub enum Update {
ClearDocuments,
}
impl From<Update> for TaskContent {
fn from(other: Update) -> Self {
match other {
impl From<Update> for super::v4::TaskContent {
fn from(update: Update) -> Self {
match update {
Update::DeleteDocuments(ids) => {
TaskContent::DocumentDeletion(DocumentDeletion::Ids(ids))
}
@ -185,10 +186,10 @@ impl Failed {
impl From<(UpdateStatus, String, TaskId)> for Task {
fn from((update, uid, task_id): (UpdateStatus, String, TaskId)) -> Self {
// Dummy task
let mut task = Task {
let mut task = super::v4::Task {
id: task_id,
index_uid: IndexUid::new(uid).unwrap(),
content: TaskContent::IndexDeletion,
index_uid: IndexUid::new_unchecked(uid),
content: super::v4::TaskContent::IndexDeletion,
events: Vec::new(),
};

View File

@ -0,0 +1,145 @@
use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid;
use milli::update::IndexDocumentsMethod;
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use uuid::Uuid;
use crate::index::{Settings, Unchecked};
use crate::tasks::batch::BatchId;
use crate::tasks::task::{
DocumentDeletion, TaskContent as NewTaskContent, TaskEvent as NewTaskEvent, TaskId, TaskResult,
};
#[derive(Debug, Serialize, Deserialize)]
pub struct Task {
pub id: TaskId,
pub index_uid: IndexUid,
pub content: TaskContent,
pub events: Vec<TaskEvent>,
}
impl From<Task> for crate::tasks::task::Task {
fn from(other: Task) -> Self {
Self {
id: other.id,
content: NewTaskContent::from((other.index_uid, other.content)),
events: other.events.into_iter().map(Into::into).collect(),
}
}
}
#[derive(Debug, Serialize, Deserialize)]
pub enum TaskEvent {
Created(#[serde(with = "time::serde::rfc3339")] OffsetDateTime),
Batched {
#[serde(with = "time::serde::rfc3339")]
timestamp: OffsetDateTime,
batch_id: BatchId,
},
Processing(#[serde(with = "time::serde::rfc3339")] OffsetDateTime),
Succeded {
result: TaskResult,
#[serde(with = "time::serde::rfc3339")]
timestamp: OffsetDateTime,
},
Failed {
error: ResponseError,
#[serde(with = "time::serde::rfc3339")]
timestamp: OffsetDateTime,
},
}
impl From<TaskEvent> for NewTaskEvent {
fn from(other: TaskEvent) -> Self {
match other {
TaskEvent::Created(x) => NewTaskEvent::Created(x),
TaskEvent::Batched {
timestamp,
batch_id,
} => NewTaskEvent::Batched {
timestamp,
batch_id,
},
TaskEvent::Processing(x) => NewTaskEvent::Processing(x),
TaskEvent::Succeded { result, timestamp } => {
NewTaskEvent::Succeeded { result, timestamp }
}
TaskEvent::Failed { error, timestamp } => NewTaskEvent::Failed { error, timestamp },
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
#[allow(clippy::large_enum_variant)]
pub enum TaskContent {
DocumentAddition {
content_uuid: Uuid,
merge_strategy: IndexDocumentsMethod,
primary_key: Option<String>,
documents_count: usize,
allow_index_creation: bool,
},
DocumentDeletion(DocumentDeletion),
SettingsUpdate {
settings: Settings<Unchecked>,
/// Indicates whether the task was a deletion
is_deletion: bool,
allow_index_creation: bool,
},
IndexDeletion,
IndexCreation {
primary_key: Option<String>,
},
IndexUpdate {
primary_key: Option<String>,
},
Dump {
uid: String,
},
}
impl From<(IndexUid, TaskContent)> for NewTaskContent {
fn from((index_uid, content): (IndexUid, TaskContent)) -> Self {
match content {
TaskContent::DocumentAddition {
content_uuid,
merge_strategy,
primary_key,
documents_count,
allow_index_creation,
} => NewTaskContent::DocumentAddition {
index_uid,
content_uuid,
merge_strategy,
primary_key,
documents_count,
allow_index_creation,
},
TaskContent::DocumentDeletion(deletion) => NewTaskContent::DocumentDeletion {
index_uid,
deletion,
},
TaskContent::SettingsUpdate {
settings,
is_deletion,
allow_index_creation,
} => NewTaskContent::SettingsUpdate {
index_uid,
settings,
is_deletion,
allow_index_creation,
},
TaskContent::IndexDeletion => NewTaskContent::IndexDeletion { index_uid },
TaskContent::IndexCreation { primary_key } => NewTaskContent::IndexCreation {
index_uid,
primary_key,
},
TaskContent::IndexUpdate { primary_key } => NewTaskContent::IndexUpdate {
index_uid,
primary_key,
},
TaskContent::Dump { uid } => NewTaskContent::Dump { uid },
}
}
}

View File

@ -0,0 +1,36 @@
use meilisearch_auth::error::AuthControllerError;
use meilisearch_types::error::{Code, ErrorCode};
use meilisearch_types::internal_error;
use crate::{index_resolver::error::IndexResolverError, tasks::error::TaskError};
pub type Result<T> = std::result::Result<T, DumpError>;
#[derive(thiserror::Error, Debug)]
pub enum DumpError {
#[error("An internal error has occurred. `{0}`.")]
Internal(Box<dyn std::error::Error + Send + Sync + 'static>),
#[error("{0}")]
IndexResolver(#[from] IndexResolverError),
}
internal_error!(
DumpError: milli::heed::Error,
std::io::Error,
tokio::task::JoinError,
tokio::sync::oneshot::error::RecvError,
serde_json::error::Error,
tempfile::PersistError,
fs_extra::error::Error,
AuthControllerError,
TaskError
);
impl ErrorCode for DumpError {
fn error_code(&self) -> Code {
match self {
DumpError::Internal(_) => Code::Internal,
DumpError::IndexResolver(e) => e.error_code(),
}
}
}

View File

@ -0,0 +1,188 @@
#[cfg(not(test))]
pub use real::DumpHandler;
#[cfg(test)]
pub use test::MockDumpHandler as DumpHandler;
use time::{macros::format_description, OffsetDateTime};
/// Generate uid from creation date
pub fn generate_uid() -> String {
OffsetDateTime::now_utc()
.format(format_description!(
"[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]"
))
.unwrap()
}
mod real {
use std::path::PathBuf;
use std::sync::Arc;
use log::{info, trace};
use meilisearch_auth::AuthController;
use milli::heed::Env;
use tokio::fs::create_dir_all;
use tokio::io::AsyncWriteExt;
use crate::analytics;
use crate::compression::to_tar_gz;
use crate::dump::error::{DumpError, Result};
use crate::dump::{MetadataVersion, META_FILE_NAME};
use crate::index_resolver::{
index_store::IndexStore, meta_store::IndexMetaStore, IndexResolver,
};
use crate::tasks::TaskStore;
use crate::update_file_store::UpdateFileStore;
pub struct DumpHandler<U, I> {
dump_path: PathBuf,
db_path: PathBuf,
update_file_store: UpdateFileStore,
task_store_size: usize,
index_db_size: usize,
env: Arc<Env>,
index_resolver: Arc<IndexResolver<U, I>>,
}
impl<U, I> DumpHandler<U, I>
where
U: IndexMetaStore + Sync + Send + 'static,
I: IndexStore + Sync + Send + 'static,
{
pub fn new(
dump_path: PathBuf,
db_path: PathBuf,
update_file_store: UpdateFileStore,
task_store_size: usize,
index_db_size: usize,
env: Arc<Env>,
index_resolver: Arc<IndexResolver<U, I>>,
) -> Self {
Self {
dump_path,
db_path,
update_file_store,
task_store_size,
index_db_size,
env,
index_resolver,
}
}
pub async fn run(&self, uid: String) -> Result<()> {
trace!("Performing dump.");
create_dir_all(&self.dump_path).await?;
let temp_dump_dir = tokio::task::spawn_blocking(tempfile::TempDir::new).await??;
let temp_dump_path = temp_dump_dir.path().to_owned();
let meta = MetadataVersion::new_v5(self.index_db_size, self.task_store_size);
let meta_path = temp_dump_path.join(META_FILE_NAME);
let meta_bytes = serde_json::to_vec(&meta)?;
let mut meta_file = tokio::fs::File::create(&meta_path).await?;
meta_file.write_all(&meta_bytes).await?;
analytics::copy_user_id(&self.db_path, &temp_dump_path);
create_dir_all(&temp_dump_path.join("indexes")).await?;
let db_path = self.db_path.clone();
let temp_dump_path_clone = temp_dump_path.clone();
tokio::task::spawn_blocking(move || -> Result<()> {
AuthController::dump(db_path, temp_dump_path_clone)?;
Ok(())
})
.await??;
TaskStore::dump(
self.env.clone(),
&temp_dump_path,
self.update_file_store.clone(),
)
.await?;
self.index_resolver.dump(&temp_dump_path).await?;
let dump_path = self.dump_path.clone();
let dump_path = tokio::task::spawn_blocking(move || -> Result<PathBuf> {
// for now we simply copy the updates/updates_files
// FIXME: We may copy more files than necessary, if new files are added while we are
// performing the dump. We need a way to filter them out.
let temp_dump_file = tempfile::NamedTempFile::new_in(&dump_path)?;
to_tar_gz(temp_dump_path, temp_dump_file.path())
.map_err(|e| DumpError::Internal(e.into()))?;
let dump_path = dump_path.join(uid).with_extension("dump");
temp_dump_file.persist(&dump_path)?;
Ok(dump_path)
})
.await??;
info!("Created dump in {:?}.", dump_path);
Ok(())
}
}
}
#[cfg(test)]
mod test {
use std::path::PathBuf;
use std::sync::Arc;
use milli::heed::Env;
use nelson::Mocker;
use crate::dump::error::Result;
use crate::index_resolver::IndexResolver;
use crate::index_resolver::{index_store::IndexStore, meta_store::IndexMetaStore};
use crate::update_file_store::UpdateFileStore;
use super::*;
pub enum MockDumpHandler<U, I> {
Real(super::real::DumpHandler<U, I>),
Mock(Mocker),
}
impl<U, I> MockDumpHandler<U, I> {
pub fn mock(mocker: Mocker) -> Self {
Self::Mock(mocker)
}
}
impl<U, I> MockDumpHandler<U, I>
where
U: IndexMetaStore + Sync + Send + 'static,
I: IndexStore + Sync + Send + 'static,
{
pub fn new(
dump_path: PathBuf,
db_path: PathBuf,
update_file_store: UpdateFileStore,
task_store_size: usize,
index_db_size: usize,
env: Arc<Env>,
index_resolver: Arc<IndexResolver<U, I>>,
) -> Self {
Self::Real(super::real::DumpHandler::new(
dump_path,
db_path,
update_file_store,
task_store_size,
index_db_size,
env,
index_resolver,
))
}
pub async fn run(&self, uid: String) -> Result<()> {
match self {
DumpHandler::Real(real) => real.run(uid).await,
DumpHandler::Mock(mocker) => unsafe { mocker.get("run").call(uid) },
}
}
}
}

View File

@ -1,3 +1,4 @@
pub mod v2;
pub mod v3;
pub mod v4;
pub mod v5;

View File

@ -5,8 +5,8 @@ use std::path::{Path, PathBuf};
use serde_json::{Deserializer, Value};
use tempfile::NamedTempFile;
use crate::index_controller::dump_actor::compat::{self, v2, v3};
use crate::index_controller::dump_actor::Metadata;
use crate::dump::compat::{self, v2, v3};
use crate::dump::Metadata;
use crate::options::IndexerOpts;
/// The dump v2 reads the dump folder and patches all the needed file to make it compatible with a

View File

@ -9,11 +9,11 @@ use log::info;
use tempfile::tempdir;
use uuid::Uuid;
use crate::index_controller::dump_actor::compat::v3;
use crate::index_controller::dump_actor::Metadata;
use crate::dump::compat::{self, v3};
use crate::dump::Metadata;
use crate::index_resolver::meta_store::{DumpEntry, IndexMeta};
use crate::options::IndexerOpts;
use crate::tasks::task::{Task, TaskId};
use crate::tasks::task::TaskId;
/// dump structure for V3:
/// .
@ -124,7 +124,7 @@ fn patch_updates(
.clone();
serde_json::to_writer(
&mut dst_file,
&Task::from((entry.update, name, task_id as TaskId)),
&compat::v4::Task::from((entry.update, name, task_id as TaskId)),
)?;
dst_file.write_all(b"\n")?;
Ok(())

View File

@ -0,0 +1,103 @@
use std::fs::{self, create_dir_all, File};
use std::io::{BufReader, Write};
use std::path::Path;
use fs_extra::dir::{self, CopyOptions};
use log::info;
use serde_json::{Deserializer, Map, Value};
use tempfile::tempdir;
use uuid::Uuid;
use crate::dump::{compat, Metadata};
use crate::options::IndexerOpts;
use crate::tasks::task::Task;
pub fn load_dump(
meta: Metadata,
src: impl AsRef<Path>,
dst: impl AsRef<Path>,
index_db_size: usize,
meta_env_size: usize,
indexing_options: &IndexerOpts,
) -> anyhow::Result<()> {
info!("Patching dump V4 to dump V5...");
let patched_dir = tempdir()?;
let options = CopyOptions::default();
// Indexes
dir::copy(src.as_ref().join("indexes"), &patched_dir, &options)?;
// Index uuids
dir::copy(src.as_ref().join("index_uuids"), &patched_dir, &options)?;
// Metadata
fs::copy(
src.as_ref().join("metadata.json"),
patched_dir.path().join("metadata.json"),
)?;
// Updates
patch_updates(&src, &patched_dir)?;
// Keys
patch_keys(&src, &patched_dir)?;
super::v5::load_dump(
meta,
&patched_dir,
dst,
index_db_size,
meta_env_size,
indexing_options,
)
}
fn patch_updates(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> anyhow::Result<()> {
let updates_path = src.as_ref().join("updates/data.jsonl");
let output_updates_path = dst.as_ref().join("updates/data.jsonl");
create_dir_all(output_updates_path.parent().unwrap())?;
let udpates_file = File::open(updates_path)?;
let mut output_update_file = File::create(output_updates_path)?;
serde_json::Deserializer::from_reader(udpates_file)
.into_iter::<compat::v4::Task>()
.try_for_each(|task| -> anyhow::Result<()> {
let task: Task = task?.into();
serde_json::to_writer(&mut output_update_file, &task)?;
output_update_file.write_all(b"\n")?;
Ok(())
})?;
output_update_file.flush()?;
Ok(())
}
fn patch_keys(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> anyhow::Result<()> {
let keys_file_src = src.as_ref().join("keys");
if !keys_file_src.exists() {
return Ok(());
}
fs::create_dir_all(&dst)?;
let keys_file_dst = dst.as_ref().join("keys");
let mut writer = File::create(&keys_file_dst)?;
let reader = BufReader::new(File::open(&keys_file_src)?);
for key in Deserializer::from_reader(reader).into_iter() {
let mut key: Map<String, Value> = key?;
// generate a new uuid v4 and insert it in the key.
let uid = serde_json::to_value(Uuid::new_v4()).unwrap();
key.insert("uid".to_string(), uid);
serde_json::to_writer(&mut writer, &key)?;
writer.write_all(b"\n")?;
}
Ok(())
}

View File

@ -1,12 +1,11 @@
use std::path::Path;
use std::sync::Arc;
use std::{path::Path, sync::Arc};
use log::info;
use meilisearch_auth::AuthController;
use milli::heed::EnvOpenOptions;
use crate::analytics;
use crate::index_controller::dump_actor::Metadata;
use crate::dump::Metadata;
use crate::index_resolver::IndexResolver;
use crate::options::IndexerOpts;
use crate::tasks::TaskStore;
@ -21,7 +20,7 @@ pub fn load_dump(
indexing_options: &IndexerOpts,
) -> anyhow::Result<()> {
info!(
"Loading dump from {}, dump database version: {}, dump version: V4",
"Loading dump from {}, dump database version: {}, dump version: V5",
meta.dump_date, meta.db_version
);

View File

@ -0,0 +1,262 @@
use std::fs::File;
use std::path::Path;
use anyhow::bail;
use log::info;
use serde::{Deserialize, Serialize};
use time::OffsetDateTime;
use tempfile::TempDir;
use crate::compression::from_tar_gz;
use crate::options::IndexerOpts;
use self::loaders::{v2, v3, v4, v5};
pub use handler::{generate_uid, DumpHandler};
mod compat;
pub mod error;
mod handler;
mod loaders;
const META_FILE_NAME: &str = "metadata.json";
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Metadata {
db_version: String,
index_db_size: usize,
update_db_size: usize,
#[serde(with = "time::serde::rfc3339")]
dump_date: OffsetDateTime,
}
impl Metadata {
pub fn new(index_db_size: usize, update_db_size: usize) -> Self {
Self {
db_version: env!("CARGO_PKG_VERSION").to_string(),
index_db_size,
update_db_size,
dump_date: OffsetDateTime::now_utc(),
}
}
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct MetadataV1 {
pub db_version: String,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(tag = "dumpVersion")]
pub enum MetadataVersion {
V1(MetadataV1),
V2(Metadata),
V3(Metadata),
V4(Metadata),
// V5 is forward compatible with V4 but not backward compatible.
V5(Metadata),
}
impl MetadataVersion {
pub fn load_dump(
self,
src: impl AsRef<Path>,
dst: impl AsRef<Path>,
index_db_size: usize,
meta_env_size: usize,
indexing_options: &IndexerOpts,
) -> anyhow::Result<()> {
match self {
MetadataVersion::V1(_meta) => {
anyhow::bail!("The version 1 of the dumps is not supported anymore. You can re-export your dump from a version between 0.21 and 0.24, or start fresh from a version 0.25 onwards.")
}
MetadataVersion::V2(meta) => v2::load_dump(
meta,
src,
dst,
index_db_size,
meta_env_size,
indexing_options,
)?,
MetadataVersion::V3(meta) => v3::load_dump(
meta,
src,
dst,
index_db_size,
meta_env_size,
indexing_options,
)?,
MetadataVersion::V4(meta) => v4::load_dump(
meta,
src,
dst,
index_db_size,
meta_env_size,
indexing_options,
)?,
MetadataVersion::V5(meta) => v5::load_dump(
meta,
src,
dst,
index_db_size,
meta_env_size,
indexing_options,
)?,
}
Ok(())
}
pub fn new_v5(index_db_size: usize, update_db_size: usize) -> Self {
let meta = Metadata::new(index_db_size, update_db_size);
Self::V5(meta)
}
pub fn db_version(&self) -> &str {
match self {
Self::V1(meta) => &meta.db_version,
Self::V2(meta) | Self::V3(meta) | Self::V4(meta) | Self::V5(meta) => &meta.db_version,
}
}
pub fn version(&self) -> &'static str {
match self {
MetadataVersion::V1(_) => "V1",
MetadataVersion::V2(_) => "V2",
MetadataVersion::V3(_) => "V3",
MetadataVersion::V4(_) => "V4",
MetadataVersion::V5(_) => "V5",
}
}
pub fn dump_date(&self) -> Option<&OffsetDateTime> {
match self {
MetadataVersion::V1(_) => None,
MetadataVersion::V2(meta)
| MetadataVersion::V3(meta)
| MetadataVersion::V4(meta)
| MetadataVersion::V5(meta) => Some(&meta.dump_date),
}
}
}
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone)]
#[serde(rename_all = "snake_case")]
pub enum DumpStatus {
Done,
InProgress,
Failed,
}
pub fn load_dump(
dst_path: impl AsRef<Path>,
src_path: impl AsRef<Path>,
ignore_dump_if_db_exists: bool,
ignore_missing_dump: bool,
index_db_size: usize,
update_db_size: usize,
indexer_opts: &IndexerOpts,
) -> anyhow::Result<()> {
let empty_db = crate::is_empty_db(&dst_path);
let src_path_exists = src_path.as_ref().exists();
if empty_db && src_path_exists {
let (tmp_src, tmp_dst, meta) = extract_dump(&dst_path, &src_path)?;
meta.load_dump(
tmp_src.path(),
tmp_dst.path(),
index_db_size,
update_db_size,
indexer_opts,
)?;
persist_dump(&dst_path, tmp_dst)?;
Ok(())
} else if !empty_db && !ignore_dump_if_db_exists {
bail!(
"database already exists at {:?}, try to delete it or rename it",
dst_path
.as_ref()
.canonicalize()
.unwrap_or_else(|_| dst_path.as_ref().to_owned())
)
} else if !src_path_exists && !ignore_missing_dump {
bail!("dump doesn't exist at {:?}", src_path.as_ref())
} else {
// there is nothing to do
Ok(())
}
}
fn extract_dump(
dst_path: impl AsRef<Path>,
src_path: impl AsRef<Path>,
) -> anyhow::Result<(TempDir, TempDir, MetadataVersion)> {
// Setup a temp directory path in the same path as the database, to prevent cross devices
// references.
let temp_path = dst_path
.as_ref()
.parent()
.map(ToOwned::to_owned)
.unwrap_or_else(|| ".".into());
let tmp_src = tempfile::tempdir_in(temp_path)?;
let tmp_src_path = tmp_src.path();
from_tar_gz(&src_path, tmp_src_path)?;
let meta_path = tmp_src_path.join(META_FILE_NAME);
let mut meta_file = File::open(&meta_path)?;
let meta: MetadataVersion = serde_json::from_reader(&mut meta_file)?;
if !dst_path.as_ref().exists() {
std::fs::create_dir_all(dst_path.as_ref())?;
}
let tmp_dst = tempfile::tempdir_in(dst_path.as_ref())?;
info!(
"Loading dump {}, dump database version: {}, dump version: {}",
meta.dump_date()
.map(|t| format!("from {}", t))
.unwrap_or_else(String::new),
meta.db_version(),
meta.version()
);
Ok((tmp_src, tmp_dst, meta))
}
fn persist_dump(dst_path: impl AsRef<Path>, tmp_dst: TempDir) -> anyhow::Result<()> {
let persisted_dump = tmp_dst.into_path();
// Delete everything in the `data.ms` except the tempdir.
if dst_path.as_ref().exists() {
for file in dst_path.as_ref().read_dir().unwrap() {
let file = file.unwrap().path();
if file.file_name() == persisted_dump.file_name() {
continue;
}
if file.is_file() {
std::fs::remove_file(&file)?;
} else {
std::fs::remove_dir_all(&file)?;
}
}
}
// Move the whole content of the tempdir into the `data.ms`.
for file in persisted_dump.read_dir().unwrap() {
let file = file.unwrap().path();
std::fs::rename(&file, &dst_path.as_ref().join(file.file_name().unwrap()))?;
}
// Delete the empty tempdir.
std::fs::remove_dir_all(&persisted_dump)?;
Ok(())
}

View File

@ -1,7 +1,7 @@
use std::error::Error;
use std::fmt;
use meilisearch_error::{Code, ErrorCode};
use meilisearch_types::error::{Code, ErrorCode};
use milli::UserError;
#[derive(Debug)]

View File

@ -27,7 +27,7 @@ const DATA_FILE_NAME: &str = "documents.jsonl";
impl Index {
pub fn dump(&self, path: impl AsRef<Path>) -> Result<()> {
// acquire write txn make sure any ongoing write is finished before we start.
let txn = self.env.write_txn()?;
let txn = self.write_txn()?;
let path = path.as_ref().join(format!("indexes/{}", self.uuid));
create_dir_all(&path)?;

View File

@ -1,6 +1,7 @@
use std::error::Error;
use meilisearch_error::{internal_error, Code, ErrorCode};
use meilisearch_types::error::{Code, ErrorCode};
use meilisearch_types::internal_error;
use serde_json::Value;
use crate::{error::MilliError, update_file_store};

View File

@ -1,24 +1,25 @@
use std::collections::{BTreeSet, HashSet};
use std::collections::BTreeSet;
use std::fs::create_dir_all;
use std::marker::PhantomData;
use std::ops::Deref;
use std::path::Path;
use std::sync::Arc;
use walkdir::WalkDir;
use fst::IntoStreamer;
use milli::heed::{EnvOpenOptions, RoTxn};
use milli::heed::{CompactionOption, EnvOpenOptions, RoTxn};
use milli::update::{IndexerConfig, Setting};
use milli::{obkv_to_json, FieldDistribution, FieldId};
use milli::{obkv_to_json, FieldDistribution, DEFAULT_VALUES_PER_FACET};
use serde::{Deserialize, Serialize};
use serde_json::{Map, Value};
use time::OffsetDateTime;
use uuid::Uuid;
use crate::EnvSizer;
use crate::index::search::DEFAULT_PAGINATION_MAX_TOTAL_HITS;
use super::error::IndexError;
use super::error::Result;
use super::updates::{MinWordSizeTyposSetting, TypoSettings};
use super::updates::{FacetingSettings, MinWordSizeTyposSetting, PaginationSettings, TypoSettings};
use super::{Checked, Settings};
pub type Document = Map<String, Value>;
@ -175,12 +176,10 @@ impl Index {
two_typos: Setting::Set(self.min_word_len_two_typos(txn)?),
};
let disabled_words = self
.exact_words(txn)?
.into_stream()
.into_strs()?
.into_iter()
.collect();
let disabled_words = match self.exact_words(txn)? {
Some(fst) => fst.into_stream().into_strs()?.into_iter().collect(),
None => BTreeSet::new(),
};
let disabled_attributes = self
.exact_attributes(txn)?
@ -195,6 +194,20 @@ impl Index {
disable_on_attributes: Setting::Set(disabled_attributes),
};
let faceting = FacetingSettings {
max_values_per_facet: Setting::Set(
self.max_values_per_facet(txn)?
.unwrap_or(DEFAULT_VALUES_PER_FACET),
),
};
let pagination = PaginationSettings {
max_total_hits: Setting::Set(
self.pagination_max_total_hits(txn)?
.unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS),
),
};
Ok(Settings {
displayed_attributes: match displayed_attributes {
Some(attrs) => Setting::Set(attrs),
@ -214,46 +227,55 @@ impl Index {
},
synonyms: Setting::Set(synonyms),
typo_tolerance: Setting::Set(typo_tolerance),
faceting: Setting::Set(faceting),
pagination: Setting::Set(pagination),
_kind: PhantomData,
})
}
/// Return the total number of documents contained in the index + the selected documents.
pub fn retrieve_documents<S: AsRef<str>>(
&self,
offset: usize,
limit: usize,
attributes_to_retrieve: Option<Vec<S>>,
) -> Result<Vec<Map<String, Value>>> {
) -> Result<(u64, Vec<Document>)> {
let txn = self.read_txn()?;
let fields_ids_map = self.fields_ids_map(&txn)?;
let fields_to_display =
self.fields_to_display(&txn, &attributes_to_retrieve, &fields_ids_map)?;
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
let iter = self.documents.range(&txn, &(..))?.skip(offset).take(limit);
let iter = self.all_documents(&txn)?.skip(offset).take(limit);
let mut documents = Vec::new();
for entry in iter {
let (_id, obkv) = entry?;
let object = obkv_to_json(&fields_to_display, &fields_ids_map, obkv)?;
documents.push(object);
let document = obkv_to_json(&all_fields, &fields_ids_map, obkv)?;
let document = match &attributes_to_retrieve {
Some(attributes_to_retrieve) => permissive_json_pointer::select_values(
&document,
attributes_to_retrieve.iter().map(|s| s.as_ref()),
),
None => document,
};
documents.push(document);
}
Ok(documents)
let number_of_documents = self.number_of_documents(&txn)?;
Ok((number_of_documents, documents))
}
pub fn retrieve_document<S: AsRef<str>>(
&self,
doc_id: String,
attributes_to_retrieve: Option<Vec<S>>,
) -> Result<Map<String, Value>> {
) -> Result<Document> {
let txn = self.read_txn()?;
let fields_ids_map = self.fields_ids_map(&txn)?;
let fields_to_display =
self.fields_to_display(&txn, &attributes_to_retrieve, &fields_ids_map)?;
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
let internal_id = self
.external_documents_ids(&txn)?
@ -267,36 +289,25 @@ impl Index {
.map(|(_, d)| d)
.ok_or(IndexError::DocumentNotFound(doc_id))?;
let document = obkv_to_json(&fields_to_display, &fields_ids_map, document)?;
let document = obkv_to_json(&all_fields, &fields_ids_map, document)?;
let document = match &attributes_to_retrieve {
Some(attributes_to_retrieve) => permissive_json_pointer::select_values(
&document,
attributes_to_retrieve.iter().map(|s| s.as_ref()),
),
None => document,
};
Ok(document)
}
pub fn size(&self) -> u64 {
self.env.size()
}
fn fields_to_display<S: AsRef<str>>(
&self,
txn: &milli::heed::RoTxn,
attributes_to_retrieve: &Option<Vec<S>>,
fields_ids_map: &milli::FieldsIdsMap,
) -> Result<Vec<FieldId>> {
let mut displayed_fields_ids = match self.displayed_fields_ids(txn)? {
Some(ids) => ids.into_iter().collect::<Vec<_>>(),
None => fields_ids_map.iter().map(|(id, _)| id).collect(),
};
let attributes_to_retrieve_ids = match attributes_to_retrieve {
Some(attrs) => attrs
.iter()
.filter_map(|f| fields_ids_map.id(f.as_ref()))
.collect::<HashSet<_>>(),
None => fields_ids_map.iter().map(|(id, _)| id).collect(),
};
displayed_fields_ids.retain(|fid| attributes_to_retrieve_ids.contains(fid));
Ok(displayed_fields_ids)
WalkDir::new(self.inner.path())
.into_iter()
.filter_map(|entry| entry.ok())
.filter_map(|entry| entry.metadata().ok())
.filter(|metadata| metadata.is_file())
.fold(0, |acc, m| acc + m.len())
}
pub fn snapshot(&self, path: impl AsRef<Path>) -> Result<()> {
@ -304,9 +315,7 @@ impl Index {
create_dir_all(&dst)?;
dst.push("data.mdb");
let _txn = self.write_txn()?;
self.inner
.env
.copy_to_path(dst, milli::heed::CompactionOption::Enabled)?;
self.inner.copy_to_path(dst, CompactionOption::Enabled)?;
Ok(())
}
}

Some files were not shown because too many files have changed in this diff Show More