mirror of
https://github.com/meilisearch/MeiliSearch
synced 2024-11-29 16:24:26 +01:00
Merge remote-tracking branch 'origin/main' into search-refactor
Conflicts | resolution ----------|----------- Cargo.lock | added mimalloc Cargo.toml | took origin/main version milli/src/search/criteria/exactness.rs | deleted after checking it was only clippy changes milli/src/search/query_tree.rs | deleted after checking it was only clippy changes
This commit is contained in:
commit
90bc230820
@ -2,3 +2,4 @@ target
|
|||||||
Dockerfile
|
Dockerfile
|
||||||
.dockerignore
|
.dockerignore
|
||||||
.gitignore
|
.gitignore
|
||||||
|
**/.git
|
||||||
|
19
.github/uffizzi/Dockerfile
vendored
19
.github/uffizzi/Dockerfile
vendored
@ -1,19 +0,0 @@
|
|||||||
# Run
|
|
||||||
FROM uffizzi/ttyd:alpine
|
|
||||||
|
|
||||||
ENV MEILI_HTTP_ADDR 0.0.0.0:7700
|
|
||||||
ENV MEILI_SERVER_PROVIDER docker
|
|
||||||
ENV MEILI_NO_ANALYTICS true
|
|
||||||
|
|
||||||
RUN apk update --quiet \
|
|
||||||
&& apk add -q --no-cache libgcc tini curl
|
|
||||||
|
|
||||||
COPY target/x86_64-unknown-linux-musl/release/meilisearch /bin/meilisearch
|
|
||||||
RUN ln -s /bin/meilisearch /meilisearch
|
|
||||||
|
|
||||||
WORKDIR /meili_data
|
|
||||||
|
|
||||||
EXPOSE 7700/tcp
|
|
||||||
|
|
||||||
ENTRYPOINT ["tini", "--"]
|
|
||||||
CMD ["ttyd", "/bin/zsh"]
|
|
26
.github/uffizzi/docker-compose.uffizzi.yml
vendored
26
.github/uffizzi/docker-compose.uffizzi.yml
vendored
@ -1,26 +0,0 @@
|
|||||||
version: "3"
|
|
||||||
|
|
||||||
x-uffizzi:
|
|
||||||
ingress:
|
|
||||||
service: nginx
|
|
||||||
port: 8081
|
|
||||||
|
|
||||||
services:
|
|
||||||
meilisearch:
|
|
||||||
image: "${MEILISEARCH_IMAGE}"
|
|
||||||
restart: unless-stopped
|
|
||||||
ports:
|
|
||||||
- "7681:7681"
|
|
||||||
- "7700:7700"
|
|
||||||
deploy:
|
|
||||||
resources:
|
|
||||||
limits:
|
|
||||||
memory: 500M
|
|
||||||
|
|
||||||
nginx:
|
|
||||||
image: nginx:alpine
|
|
||||||
restart: unless-stopped
|
|
||||||
ports:
|
|
||||||
- "8081:8081"
|
|
||||||
volumes:
|
|
||||||
- ./.github/uffizzi/nginx:/etc/nginx
|
|
28
.github/uffizzi/nginx/nginx.conf
vendored
28
.github/uffizzi/nginx/nginx.conf
vendored
@ -1,28 +0,0 @@
|
|||||||
|
|
||||||
events {
|
|
||||||
worker_connections 4096; ## Default: 1024
|
|
||||||
}
|
|
||||||
|
|
||||||
http {
|
|
||||||
map $http_upgrade $connection_upgrade {
|
|
||||||
default upgrade;
|
|
||||||
'' close;
|
|
||||||
}
|
|
||||||
|
|
||||||
server {
|
|
||||||
listen 8081;
|
|
||||||
|
|
||||||
location / {
|
|
||||||
proxy_pass http://localhost:7681;
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection $connection_upgrade;
|
|
||||||
}
|
|
||||||
|
|
||||||
location /meilisearch/ {
|
|
||||||
# rewrite /meilisearch/(.*) /$1 break;
|
|
||||||
proxy_pass http://localhost:7700/;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
7
.github/workflows/publish-docker-images.yml
vendored
7
.github/workflows/publish-docker-images.yml
vendored
@ -58,9 +58,13 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v2
|
uses: docker/setup-qemu-action@v2
|
||||||
|
with:
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v2
|
uses: docker/setup-buildx-action@v2
|
||||||
|
with:
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v2
|
uses: docker/login-action@v2
|
||||||
@ -88,10 +92,13 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
builder: ${{ steps.buildx.outputs.name }}
|
||||||
build-args: |
|
build-args: |
|
||||||
COMMIT_SHA=${{ github.sha }}
|
COMMIT_SHA=${{ github.sha }}
|
||||||
COMMIT_DATE=${{ steps.build-metadata.outputs.date }}
|
COMMIT_DATE=${{ steps.build-metadata.outputs.date }}
|
||||||
GIT_TAG=${{ github.ref_name }}
|
GIT_TAG=${{ github.ref_name }}
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
|
|
||||||
# /!\ Don't touch this without checking with Cloud team
|
# /!\ Don't touch this without checking with Cloud team
|
||||||
- name: Send CI information to Cloud team
|
- name: Send CI information to Cloud team
|
||||||
|
200
.github/workflows/sdks-tests.yml
vendored
Normal file
200
.github/workflows/sdks-tests.yml
vendored
Normal file
@ -0,0 +1,200 @@
|
|||||||
|
# If any test fails, the engine team should ensure the "breaking" changes are expected and contact the integration team
|
||||||
|
name: SDKs tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 6 * * MON" # Every Monday at 6:00AM
|
||||||
|
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: 'masterKey'
|
||||||
|
MEILI_NO_ANALYTICS: 'true'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
meilisearch-js-tests:
|
||||||
|
name: JS SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:nightly
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-js
|
||||||
|
- name: Setup node
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
cache: 'yarn'
|
||||||
|
- name: Install dependencies
|
||||||
|
run: yarn --dev
|
||||||
|
- name: Run tests
|
||||||
|
run: yarn test
|
||||||
|
- name: Build project
|
||||||
|
run: yarn build
|
||||||
|
- name: Run ESM env
|
||||||
|
run: yarn test:env:esm
|
||||||
|
- name: Run Node.js env
|
||||||
|
run: yarn test:env:nodejs
|
||||||
|
- name: Run node typescript env
|
||||||
|
run: yarn test:env:node-ts
|
||||||
|
- name: Run Browser env
|
||||||
|
run: yarn test:env:browser
|
||||||
|
|
||||||
|
instant-meilisearch-tests:
|
||||||
|
name: instant-meilisearch tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:nightly
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/instant-meilisearch
|
||||||
|
- name: Setup node
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
cache: yarn
|
||||||
|
- name: Install dependencies
|
||||||
|
run: yarn install
|
||||||
|
- name: Run tests
|
||||||
|
run: yarn test
|
||||||
|
- name: Build all the playgrounds and the packages
|
||||||
|
run: yarn build
|
||||||
|
|
||||||
|
meilisearch-php-tests:
|
||||||
|
name: PHP SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:nightly
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-php
|
||||||
|
- name: Install PHP
|
||||||
|
uses: shivammathur/setup-php@v2
|
||||||
|
with:
|
||||||
|
coverage: none
|
||||||
|
- name: Validate composer.json and composer.lock
|
||||||
|
run: composer validate
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
composer remove --dev friendsofphp/php-cs-fixer --no-update --no-interaction
|
||||||
|
composer update --prefer-dist --no-progress
|
||||||
|
- name: Run test suite - default HTTP client (Guzzle 7)
|
||||||
|
run: |
|
||||||
|
sh scripts/tests.sh
|
||||||
|
composer remove --dev guzzlehttp/guzzle http-interop/http-factory-guzzle
|
||||||
|
|
||||||
|
meilisearch-python-tests:
|
||||||
|
name: Python SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:nightly
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-python
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
- name: Install pipenv
|
||||||
|
uses: dschep/install-pipenv-action@v1
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pipenv install --dev --python=${{ matrix.python-version }}
|
||||||
|
- name: Test with pytest
|
||||||
|
run: pipenv run pytest
|
||||||
|
|
||||||
|
meilisearch-go-tests:
|
||||||
|
name: Go SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:nightly
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- name: Set up Go
|
||||||
|
uses: actions/setup-go@v3
|
||||||
|
with:
|
||||||
|
go-version: stable
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-go
|
||||||
|
- name: Get dependencies
|
||||||
|
run: |
|
||||||
|
go get -v -t -d ./...
|
||||||
|
if [ -f Gopkg.toml ]; then
|
||||||
|
curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh
|
||||||
|
dep ensure
|
||||||
|
fi
|
||||||
|
- name: Run integration tests
|
||||||
|
run: go test -v ./...
|
||||||
|
|
||||||
|
meilisearch-ruby-tests:
|
||||||
|
name: Ruby SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:nightly
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-ruby
|
||||||
|
- name: Set up Ruby 3
|
||||||
|
uses: ruby/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: 3
|
||||||
|
- name: Install ruby dependencies
|
||||||
|
run: bundle install --with test
|
||||||
|
- name: Run test suite
|
||||||
|
run: bundle exec rspec
|
||||||
|
|
||||||
|
meilisearch-rust-tests:
|
||||||
|
name: Rust SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:nightly
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-rust
|
||||||
|
- name: Build
|
||||||
|
run: cargo build --verbose
|
||||||
|
- name: Run tests
|
||||||
|
run: cargo test --verbose
|
5
.github/workflows/test-suite.yml
vendored
5
.github/workflows/test-suite.yml
vendored
@ -138,7 +138,7 @@ jobs:
|
|||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: 1.67.0
|
toolchain: 1.69.0
|
||||||
override: true
|
override: true
|
||||||
components: clippy
|
components: clippy
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
@ -147,8 +147,7 @@ jobs:
|
|||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: clippy
|
command: clippy
|
||||||
# allow unlined_format_args https://github.com/rust-lang/rust-clippy/issues/10087
|
args: --all-targets -- --deny warnings
|
||||||
args: --all-targets -- --deny warnings --allow clippy::uninlined_format_args
|
|
||||||
|
|
||||||
fmt:
|
fmt:
|
||||||
name: Run Rustfmt
|
name: Run Rustfmt
|
||||||
|
120
.github/workflows/uffizzi-build.yml
vendored
120
.github/workflows/uffizzi-build.yml
vendored
@ -1,120 +0,0 @@
|
|||||||
name: Uffizzi - Build PR Image
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types: [opened,synchronize,reopened,closed]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-meilisearch:
|
|
||||||
name: Build and push `meilisearch`
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
if: ${{ github.event.action != 'closed' }}
|
|
||||||
steps:
|
|
||||||
- name: checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- run: sudo apt-get install musl-tools
|
|
||||||
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
target: x86_64-unknown-linux-musl
|
|
||||||
|
|
||||||
- name: Cache dependencies
|
|
||||||
uses: Swatinem/rust-cache@v2.2.1
|
|
||||||
|
|
||||||
- name: Run cargo check without any default features
|
|
||||||
uses: actions-rs/cargo@v1
|
|
||||||
with:
|
|
||||||
command: build
|
|
||||||
args: --target x86_64-unknown-linux-musl --release
|
|
||||||
|
|
||||||
- name: Remove dockerignore so we can use the target folder in our docker build
|
|
||||||
run: rm -f .dockerignore
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v2
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
|
|
||||||
- name: Generate UUID image name
|
|
||||||
id: uuid
|
|
||||||
run: echo "UUID_TAG=$(uuidgen)" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Docker metadata
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v4
|
|
||||||
with:
|
|
||||||
images: registry.uffizzi.com/${{ env.UUID_TAG }}
|
|
||||||
tags: |
|
|
||||||
type=raw,value=60d
|
|
||||||
|
|
||||||
- name: Build Image
|
|
||||||
uses: docker/build-push-action@v4
|
|
||||||
with:
|
|
||||||
context: ./
|
|
||||||
file: .github/uffizzi/Dockerfile
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
push: true
|
|
||||||
cache-from: type=gha
|
|
||||||
cache-to: type=gha,mode=max
|
|
||||||
|
|
||||||
render-compose-file:
|
|
||||||
name: Render Docker Compose File
|
|
||||||
# Pass output of this workflow to another triggered by `workflow_run` event.
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs:
|
|
||||||
- build-meilisearch
|
|
||||||
outputs:
|
|
||||||
compose-file-cache-key: ${{ env.COMPOSE_FILE_HASH }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout git repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
- name: Render Compose File
|
|
||||||
run: |
|
|
||||||
MEILISEARCH_IMAGE=$(echo ${{ needs.build-meilisearch.outputs.tags }})
|
|
||||||
export MEILISEARCH_IMAGE
|
|
||||||
# Render simple template from environment variables.
|
|
||||||
envsubst < .github/uffizzi/docker-compose.uffizzi.yml > docker-compose.rendered.yml
|
|
||||||
cat docker-compose.rendered.yml
|
|
||||||
- name: Upload Rendered Compose File as Artifact
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: preview-spec
|
|
||||||
path: docker-compose.rendered.yml
|
|
||||||
retention-days: 2
|
|
||||||
- name: Serialize PR Event to File
|
|
||||||
run: |
|
|
||||||
cat << EOF > event.json
|
|
||||||
${{ toJSON(github.event) }}
|
|
||||||
|
|
||||||
EOF
|
|
||||||
- name: Upload PR Event as Artifact
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: preview-spec
|
|
||||||
path: event.json
|
|
||||||
retention-days: 2
|
|
||||||
|
|
||||||
delete-preview:
|
|
||||||
name: Call for Preview Deletion
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: ${{ github.event.action == 'closed' }}
|
|
||||||
steps:
|
|
||||||
# If this PR is closing, we will not render a compose file nor pass it to the next workflow.
|
|
||||||
- name: Serialize PR Event to File
|
|
||||||
run: |
|
|
||||||
cat << EOF > event.json
|
|
||||||
${{ toJSON(github.event) }}
|
|
||||||
|
|
||||||
EOF
|
|
||||||
- name: Upload PR Event as Artifact
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: preview-spec
|
|
||||||
path: event.json
|
|
||||||
retention-days: 2
|
|
103
.github/workflows/uffizzi-preview-deploy.yml
vendored
103
.github/workflows/uffizzi-preview-deploy.yml
vendored
@ -1,103 +0,0 @@
|
|||||||
name: Uffizzi - Deploy Preview
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_run:
|
|
||||||
workflows:
|
|
||||||
- "Uffizzi - Build PR Image"
|
|
||||||
types:
|
|
||||||
- completed
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
cache-compose-file:
|
|
||||||
name: Cache Compose File
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
|
||||||
outputs:
|
|
||||||
compose-file-cache-key: ${{ env.COMPOSE_FILE_HASH }}
|
|
||||||
pr-number: ${{ env.PR_NUMBER }}
|
|
||||||
expected-url: ${{ env.EXPECTED_URL }}
|
|
||||||
steps:
|
|
||||||
- name: 'Download artifacts'
|
|
||||||
# Fetch output (zip archive) from the workflow run that triggered this workflow.
|
|
||||||
uses: actions/github-script@v6
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
run_id: context.payload.workflow_run.id,
|
|
||||||
});
|
|
||||||
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
|
|
||||||
return artifact.name == "preview-spec"
|
|
||||||
})[0];
|
|
||||||
let download = await github.rest.actions.downloadArtifact({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
artifact_id: matchArtifact.id,
|
|
||||||
archive_format: 'zip',
|
|
||||||
});
|
|
||||||
let fs = require('fs');
|
|
||||||
fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/preview-spec.zip`, Buffer.from(download.data));
|
|
||||||
|
|
||||||
- name: 'Unzip artifact'
|
|
||||||
run: unzip preview-spec.zip
|
|
||||||
|
|
||||||
- name: Read Event into ENV
|
|
||||||
run: |
|
|
||||||
echo 'EVENT_JSON<<EOF' >> $GITHUB_ENV
|
|
||||||
cat event.json >> $GITHUB_ENV
|
|
||||||
echo 'EOF' >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Hash Rendered Compose File
|
|
||||||
id: hash
|
|
||||||
# If the previous workflow was triggered by a PR close event, we will not have a compose file artifact.
|
|
||||||
if: ${{ fromJSON(env.EVENT_JSON).action != 'closed' }}
|
|
||||||
run: echo "COMPOSE_FILE_HASH=$(md5sum docker-compose.rendered.yml | awk '{ print $1 }')" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Cache Rendered Compose File
|
|
||||||
if: ${{ fromJSON(env.EVENT_JSON).action != 'closed' }}
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: docker-compose.rendered.yml
|
|
||||||
key: ${{ env.COMPOSE_FILE_HASH }}
|
|
||||||
|
|
||||||
- name: Read PR Number From Event Object
|
|
||||||
id: pr
|
|
||||||
run: echo "PR_NUMBER=${{ fromJSON(env.EVENT_JSON).number }}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: DEBUG - Print Job Outputs
|
|
||||||
if: ${{ runner.debug }}
|
|
||||||
run: |
|
|
||||||
echo "PR number: ${{ env.PR_NUMBER }}"
|
|
||||||
echo "Compose file hash: ${{ env.COMPOSE_FILE_HASH }}"
|
|
||||||
cat event.json
|
|
||||||
|
|
||||||
- name: Add expected URL env var
|
|
||||||
if: ${{ runner.debug }}
|
|
||||||
run: |
|
|
||||||
REPO=$(echo ${{ github.repository }} | sed 's/\./+/g')
|
|
||||||
echo "EXPECTED_URL=${{ inputs.server }}/github.com/$REPO/pull/${{ env.PR_NUMBER }}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
deploy-uffizzi-preview:
|
|
||||||
name: Use Remote Workflow to Preview on Uffizzi
|
|
||||||
needs:
|
|
||||||
- cache-compose-file
|
|
||||||
uses: UffizziCloud/preview-action/.github/workflows/reusable.yaml@v2
|
|
||||||
with:
|
|
||||||
# If this workflow was triggered by a PR close event, cache-key will be an empty string
|
|
||||||
# and this reusable workflow will delete the preview deployment.
|
|
||||||
compose-file-cache-key: ${{ needs.cache-compose-file.outputs.compose-file-cache-key }}
|
|
||||||
compose-file-cache-path: docker-compose.rendered.yml
|
|
||||||
server: https://app.uffizzi.com
|
|
||||||
pr-number: ${{ needs.cache-compose-file.outputs.pr-number }}
|
|
||||||
description: |
|
|
||||||
The meilisearch preview environment contains a web terminal from where you can run the
|
|
||||||
`meilisearch` command. You should be able to access this instance of meilisearch running in
|
|
||||||
the preview from the link Meilisearch Endpoint link given below.
|
|
||||||
|
|
||||||
Web Terminal Endpoint : <uffizzi-url>
|
|
||||||
Meilisearch Endpoint : <uffizzi-url>/meilisearch
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
pull-requests: write
|
|
||||||
id-token: write
|
|
@ -18,7 +18,7 @@ If Meilisearch does not offer optimized support for your language, please consid
|
|||||||
|
|
||||||
## Assumptions
|
## Assumptions
|
||||||
|
|
||||||
1. **You're familiar with [GitHub](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
|
1. **You're familiar with [GitHub](https://github.com) and the [Pull Requests (PR)](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests) workflow.**
|
||||||
2. **You've read the Meilisearch [documentation](https://docs.meilisearch.com).**
|
2. **You've read the Meilisearch [documentation](https://docs.meilisearch.com).**
|
||||||
3. **You know about the [Meilisearch community](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html).
|
3. **You know about the [Meilisearch community](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html).
|
||||||
Please use this for help.**
|
Please use this for help.**
|
||||||
|
1141
Cargo.lock
generated
1141
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
12
Cargo.toml
12
Cargo.toml
@ -13,15 +13,12 @@ members = [
|
|||||||
"filter-parser",
|
"filter-parser",
|
||||||
"flatten-serde-json",
|
"flatten-serde-json",
|
||||||
"json-depth-checker",
|
"json-depth-checker",
|
||||||
"benchmarks",
|
"benchmarks"
|
||||||
]
|
]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "1.0.0"
|
version = "1.1.1"
|
||||||
authors = [
|
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||||
"Quentin de Quelen <quentin@dequelen.me>",
|
|
||||||
"Clément Renault <clement@meilisearch.com>",
|
|
||||||
]
|
|
||||||
description = "Meilisearch HTTP server"
|
description = "Meilisearch HTTP server"
|
||||||
homepage = "https://meilisearch.com"
|
homepage = "https://meilisearch.com"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
@ -29,8 +26,7 @@ edition = "2021"
|
|||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
# codegen-units = 1
|
codegen-units = 1
|
||||||
debug = true
|
|
||||||
|
|
||||||
[profile.dev.package.flate2]
|
[profile.dev.package.flate2]
|
||||||
opt-level = 3
|
opt-level = 3
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
# syntax=docker/dockerfile:1.4
|
||||||
# Compile
|
# Compile
|
||||||
FROM rust:alpine3.16 AS compiler
|
FROM rust:alpine3.16 AS compiler
|
||||||
|
|
||||||
@ -11,7 +12,7 @@ ARG GIT_TAG
|
|||||||
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} VERGEN_GIT_SEMVER_LIGHTWEIGHT=${GIT_TAG}
|
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} VERGEN_GIT_SEMVER_LIGHTWEIGHT=${GIT_TAG}
|
||||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||||
|
|
||||||
COPY . .
|
COPY --link . .
|
||||||
RUN set -eux; \
|
RUN set -eux; \
|
||||||
apkArch="$(apk --print-arch)"; \
|
apkArch="$(apk --print-arch)"; \
|
||||||
if [ "$apkArch" = "aarch64" ]; then \
|
if [ "$apkArch" = "aarch64" ]; then \
|
||||||
@ -30,7 +31,7 @@ RUN apk update --quiet \
|
|||||||
|
|
||||||
# add meilisearch to the `/bin` so you can run it from anywhere and it's easy
|
# add meilisearch to the `/bin` so you can run it from anywhere and it's easy
|
||||||
# to find.
|
# to find.
|
||||||
COPY --from=compiler /meilisearch/target/release/meilisearch /bin/meilisearch
|
COPY --from=compiler --link /meilisearch/target/release/meilisearch /bin/meilisearch
|
||||||
# To stay compatible with the older version of the container (pre v0.27.0) we're
|
# To stay compatible with the older version of the container (pre v0.27.0) we're
|
||||||
# going to symlink the meilisearch binary in the path to `/meilisearch`
|
# going to symlink the meilisearch binary in the path to `/meilisearch`
|
||||||
RUN ln -s /bin/meilisearch /meilisearch
|
RUN ln -s /bin/meilisearch /meilisearch
|
||||||
|
43
README.md
43
README.md
@ -7,8 +7,8 @@
|
|||||||
<a href="https://www.meilisearch.com">Website</a> |
|
<a href="https://www.meilisearch.com">Website</a> |
|
||||||
<a href="https://roadmap.meilisearch.com/tabs/1-under-consideration">Roadmap</a> |
|
<a href="https://roadmap.meilisearch.com/tabs/1-under-consideration">Roadmap</a> |
|
||||||
<a href="https://blog.meilisearch.com">Blog</a> |
|
<a href="https://blog.meilisearch.com">Blog</a> |
|
||||||
<a href="https://docs.meilisearch.com">Documentation</a> |
|
<a href="https://meilisearch.com/docs">Documentation</a> |
|
||||||
<a href="https://docs.meilisearch.com/faq/">FAQ</a> |
|
<a href="https://meilisearch.com/docs/faq">FAQ</a> |
|
||||||
<a href="https://discord.meilisearch.com">Discord</a>
|
<a href="https://discord.meilisearch.com">Discord</a>
|
||||||
</h4>
|
</h4>
|
||||||
|
|
||||||
@ -36,27 +36,27 @@ Meilisearch helps you shape a delightful search experience in a snap, offering f
|
|||||||
## ✨ Features
|
## ✨ Features
|
||||||
|
|
||||||
- **Search-as-you-type:** find search results in less than 50 milliseconds
|
- **Search-as-you-type:** find search results in less than 50 milliseconds
|
||||||
- **[Typo tolerance](https://docs.meilisearch.com/learn/getting_started/customizing_relevancy.html#typo-tolerance):** get relevant matches even when queries contain typos and misspellings
|
- **[Typo tolerance](https://meilisearch.com/docs/learn/getting_started/customizing_relevancy#typo-tolerance):** get relevant matches even when queries contain typos and misspellings
|
||||||
- **[Filtering and faceted search](https://docs.meilisearch.com/learn/advanced/filtering_and_faceted_search.html):** enhance your user's search experience with custom filters and build a faceted search interface in a few lines of code
|
- **[Filtering](https://meilisearch.com/docs/learn/advanced/filtering) and [faceted search](https://meilisearch.com/docs/learn/advanced/faceted_search):** enhance your user's search experience with custom filters and build a faceted search interface in a few lines of code
|
||||||
- **[Sorting](https://docs.meilisearch.com/learn/advanced/sorting.html):** sort results based on price, date, or pretty much anything else your users need
|
- **[Sorting](https://meilisearch.com/docs/learn/advanced/sorting):** sort results based on price, date, or pretty much anything else your users need
|
||||||
- **[Synonym support](https://docs.meilisearch.com/learn/getting_started/customizing_relevancy.html#synonyms):** configure synonyms to include more relevant content in your search results
|
- **[Synonym support](https://meilisearch.com/docs/learn/getting_started/customizing_relevancy#synonyms):** configure synonyms to include more relevant content in your search results
|
||||||
- **[Geosearch](https://docs.meilisearch.com/learn/advanced/geosearch.html):** filter and sort documents based on geographic data
|
- **[Geosearch](https://meilisearch.com/docs/learn/advanced/geosearch):** filter and sort documents based on geographic data
|
||||||
- **[Extensive language support](https://docs.meilisearch.com/learn/what_is_meilisearch/language.html):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet
|
- **[Extensive language support](https://meilisearch.com/docs/learn/what_is_meilisearch/language):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet
|
||||||
- **[Security management](https://docs.meilisearch.com/learn/security/master_api_keys.html):** control which users can access what data with API keys that allow fine-grained permissions handling
|
- **[Security management](https://meilisearch.com/docs/learn/security/master_api_keys):** control which users can access what data with API keys that allow fine-grained permissions handling
|
||||||
- **[Multi-Tenancy](https://docs.meilisearch.com/learn/security/tenant_tokens.html):** personalize search results for any number of application tenants
|
- **[Multi-Tenancy](https://meilisearch.com/docs/learn/security/tenant_tokens):** personalize search results for any number of application tenants
|
||||||
- **Highly Customizable:** customize Meilisearch to your specific needs or use our out-of-the-box and hassle-free presets
|
- **Highly Customizable:** customize Meilisearch to your specific needs or use our out-of-the-box and hassle-free presets
|
||||||
- **[RESTful API](https://docs.meilisearch.com/reference/api/overview.html):** integrate Meilisearch in your technical stack with our plugins and SDKs
|
- **[RESTful API](https://meilisearch.com/docs/reference/api/overview):** integrate Meilisearch in your technical stack with our plugins and SDKs
|
||||||
- **Easy to install, deploy, and maintain**
|
- **Easy to install, deploy, and maintain**
|
||||||
|
|
||||||
## 📖 Documentation
|
## 📖 Documentation
|
||||||
|
|
||||||
You can consult Meilisearch's documentation at [https://docs.meilisearch.com](https://docs.meilisearch.com/).
|
You can consult Meilisearch's documentation at [https://meilisearch.com/docs](https://meilisearch.com/docs/).
|
||||||
|
|
||||||
## 🚀 Getting started
|
## 🚀 Getting started
|
||||||
|
|
||||||
For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://docs.meilisearch.com/learn/getting_started/quick_start.html) guide.
|
For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://meilisearch.com/docs/learn/getting_started/quick_start) guide.
|
||||||
|
|
||||||
You may also want to check out [Meilisearch 101](https://docs.meilisearch.com/learn/getting_started/filtering_and_sorting.html) for an introduction to some of Meilisearch's most popular features.
|
You may also want to check out [Meilisearch 101](https://meilisearch.com/docs/learn/getting_started/filtering_and_sorting) for an introduction to some of Meilisearch's most popular features.
|
||||||
|
|
||||||
## ☁️ Meilisearch cloud
|
## ☁️ Meilisearch cloud
|
||||||
|
|
||||||
@ -66,25 +66,25 @@ Let us manage your infrastructure so you can focus on integrating a great search
|
|||||||
|
|
||||||
Install one of our SDKs in your project for seamless integration between Meilisearch and your favorite language or framework!
|
Install one of our SDKs in your project for seamless integration between Meilisearch and your favorite language or framework!
|
||||||
|
|
||||||
Take a look at the complete [Meilisearch integration list](https://docs.meilisearch.com/learn/what_is_meilisearch/sdks.html).
|
Take a look at the complete [Meilisearch integration list](https://meilisearch.com/docs/learn/what_is_meilisearch/sdks).
|
||||||
|
|
||||||
[![Logos belonging to different languages and frameworks supported by Meilisearch, including React, Ruby on Rails, Go, Rust, and PHP](assets/integrations.png)](https://docs.meilisearch.com/learn/what_is_meilisearch/sdks.html)
|
[![Logos belonging to different languages and frameworks supported by Meilisearch, including React, Ruby on Rails, Go, Rust, and PHP](assets/integrations.png)](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks)
|
||||||
|
|
||||||
## ⚙️ Advanced usage
|
## ⚙️ Advanced usage
|
||||||
|
|
||||||
Experienced users will want to keep our [API Reference](https://docs.meilisearch.com/reference/api) close at hand.
|
Experienced users will want to keep our [API Reference](https://www.meilisearch.com/docs/reference/api/overview) close at hand.
|
||||||
|
|
||||||
We also offer a wide range of dedicated guides to all Meilisearch features, such as [filtering](https://docs.meilisearch.com/learn/advanced/filtering_and_faceted_search.html), [sorting](https://docs.meilisearch.com/learn/advanced/sorting.html), [geosearch](https://docs.meilisearch.com/learn/advanced/geosearch.html), [API keys](https://docs.meilisearch.com/learn/security/master_api_keys.html), and [tenant tokens](https://docs.meilisearch.com/learn/security/tenant_tokens.html).
|
We also offer a wide range of dedicated guides to all Meilisearch features, such as [filtering](https://meilisearch.com/docs/learn/advanced/filtering), [sorting](https://meilisearch.com/docs/learn/advanced/sorting), [geosearch](https://meilisearch.com/docs/learn/advanced/geosearch), [API keys](https://meilisearch.com/docs/learn/security/master_api_keys), and [tenant tokens](https://meilisearch.com/docs/learn/security/tenant_tokens).
|
||||||
|
|
||||||
Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://docs.meilisearch.com/learn/core_concepts/documents.html) and [indexes](https://docs.meilisearch.com/learn/core_concepts/indexes.html).
|
Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://meilisearch.com/docs/learn/core_concepts/documents) and [indexes](https://meilisearch.com/docs/learn/core_concepts/indexes).
|
||||||
|
|
||||||
## 📊 Telemetry
|
## 📊 Telemetry
|
||||||
|
|
||||||
Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html#how-to-disable-data-collection) whenever you want.
|
Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://meilisearch.com/docs/learn/what_is_meilisearch/telemetry#how-to-disable-data-collection) whenever you want.
|
||||||
|
|
||||||
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Don't forget to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
|
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Don't forget to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
|
||||||
|
|
||||||
If you want to know more about the kind of data we collect and what we use it for, check the [telemetry section](https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html) of our documentation.
|
If you want to know more about the kind of data we collect and what we use it for, check the [telemetry section](https://meilisearch.com/docs/learn/what_is_meilisearch/telemetry) of our documentation.
|
||||||
|
|
||||||
## 📫 Get in touch!
|
## 📫 Get in touch!
|
||||||
|
|
||||||
@ -97,7 +97,6 @@ Meilisearch is a search engine created by [Meili](https://www.welcometothejungle
|
|||||||
- For feature requests, please visit our [product repository](https://github.com/meilisearch/product/discussions)
|
- For feature requests, please visit our [product repository](https://github.com/meilisearch/product/discussions)
|
||||||
- Found a bug? Open an [issue](https://github.com/meilisearch/meilisearch/issues)!
|
- Found a bug? Open an [issue](https://github.com/meilisearch/meilisearch/issues)!
|
||||||
- Want to be part of our Discord community? [Join us!](https://discord.gg/meilisearch)
|
- Want to be part of our Discord community? [Join us!](https://discord.gg/meilisearch)
|
||||||
- For everything else, please check [this page listing some of the other places where you can find us](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html)
|
|
||||||
|
|
||||||
Thank you for your support!
|
Thank you for your support!
|
||||||
|
|
||||||
|
@ -11,11 +11,11 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.65"
|
anyhow = "1.0.70"
|
||||||
csv = "1.1.6"
|
csv = "1.2.1"
|
||||||
milli = { path = "../milli", default-features = false }
|
milli = { path = "../milli", default-features = false }
|
||||||
mimalloc = { version = "0.1.29", default-features = false }
|
mimalloc = { version = "0.1.36", default-features = false }
|
||||||
serde_json = { version = "1.0.85", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = { version = "0.4.0", features = ["html_reports"] }
|
criterion = { version = "0.4.0", features = ["html_reports"] }
|
||||||
@ -24,11 +24,11 @@ rand_chacha = "0.3.1"
|
|||||||
roaring = "0.10.1"
|
roaring = "0.10.1"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
anyhow = "1.0.65"
|
anyhow = "1.0.70"
|
||||||
bytes = "1.2.1"
|
bytes = "1.4.0"
|
||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
flate2 = "1.0.24"
|
flate2 = "1.0.25"
|
||||||
reqwest = { version = "0.11.12", features = ["blocking", "rustls-tls"], default-features = false }
|
reqwest = { version = "0.11.16", features = ["blocking", "rustls-tls"], default-features = false }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["milli/default"]
|
default = ["milli/default"]
|
||||||
|
@ -103,7 +103,7 @@ not_available_failure_usage() {
|
|||||||
printf "$RED%s\n$DEFAULT" 'ERROR: Meilisearch binary is not available for your OS distribution or your architecture yet.'
|
printf "$RED%s\n$DEFAULT" 'ERROR: Meilisearch binary is not available for your OS distribution or your architecture yet.'
|
||||||
echo ''
|
echo ''
|
||||||
echo 'However, you can easily compile the binary from the source files.'
|
echo 'However, you can easily compile the binary from the source files.'
|
||||||
echo 'Follow the steps at the page ("Source" tab): https://docs.meilisearch.com/learn/getting_started/installation.html'
|
echo 'Follow the steps at the page ("Source" tab): https://www.meilisearch.com/docs/learn/getting_started/installation'
|
||||||
}
|
}
|
||||||
|
|
||||||
fetch_release_failure_usage() {
|
fetch_release_failure_usage() {
|
||||||
|
@ -11,22 +11,22 @@ readme.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.65"
|
anyhow = "1.0.70"
|
||||||
flate2 = "1.0.22"
|
flate2 = "1.0.25"
|
||||||
http = "0.2.8"
|
http = "0.2.9"
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
once_cell = "1.15.0"
|
once_cell = "1.17.1"
|
||||||
regex = "1.6.0"
|
regex = "1.7.3"
|
||||||
roaring = { version = "0.10.0", features = ["serde"] }
|
roaring = { version = "0.10.1", features = ["serde"] }
|
||||||
serde = { version = "1.0.136", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.85", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
tar = "0.4.38"
|
tar = "0.4.38"
|
||||||
tempfile = "3.3.0"
|
tempfile = "3.5.0"
|
||||||
thiserror = "1.0.30"
|
thiserror = "1.0.40"
|
||||||
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
big_s = "1.0.2"
|
big_s = "1.0.2"
|
||||||
|
@ -25,7 +25,6 @@ impl CompatV2ToV3 {
|
|||||||
CompatV2ToV3::Compat(compat) => compat.index_uuid(),
|
CompatV2ToV3::Compat(compat) => compat.index_uuid(),
|
||||||
};
|
};
|
||||||
v2_uuids
|
v2_uuids
|
||||||
.into_iter()
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|index| v3::meta::IndexUuid { uid: index.uid, uuid: index.uuid })
|
.map(|index| v3::meta::IndexUuid { uid: index.uid, uuid: index.uuid })
|
||||||
.collect()
|
.collect()
|
||||||
|
@ -11,9 +11,9 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tempfile = "3.3.0"
|
tempfile = "3.5.0"
|
||||||
thiserror = "1.0.30"
|
thiserror = "1.0.40"
|
||||||
uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
faux = "0.1.8"
|
faux = "0.1.9"
|
||||||
|
@ -12,8 +12,8 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nom = "7.1.1"
|
nom = "7.1.3"
|
||||||
nom_locate = "4.0.0"
|
nom_locate = "4.1.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = "1.21.0"
|
insta = "1.29.0"
|
||||||
|
@ -20,6 +20,8 @@ pub enum Condition<'a> {
|
|||||||
GreaterThanOrEqual(Token<'a>),
|
GreaterThanOrEqual(Token<'a>),
|
||||||
Equal(Token<'a>),
|
Equal(Token<'a>),
|
||||||
NotEqual(Token<'a>),
|
NotEqual(Token<'a>),
|
||||||
|
Null,
|
||||||
|
Empty,
|
||||||
Exists,
|
Exists,
|
||||||
LowerThan(Token<'a>),
|
LowerThan(Token<'a>),
|
||||||
LowerThanOrEqual(Token<'a>),
|
LowerThanOrEqual(Token<'a>),
|
||||||
@ -44,6 +46,38 @@ pub fn parse_condition(input: Span) -> IResult<FilterCondition> {
|
|||||||
Ok((input, condition))
|
Ok((input, condition))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// null = value "IS" WS+ "NULL"
|
||||||
|
pub fn parse_is_null(input: Span) -> IResult<FilterCondition> {
|
||||||
|
let (input, key) = parse_value(input)?;
|
||||||
|
|
||||||
|
let (input, _) = tuple((tag("IS"), multispace1, tag("NULL")))(input)?;
|
||||||
|
Ok((input, FilterCondition::Condition { fid: key, op: Null }))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// null = value "IS" WS+ "NOT" WS+ "NULL"
|
||||||
|
pub fn parse_is_not_null(input: Span) -> IResult<FilterCondition> {
|
||||||
|
let (input, key) = parse_value(input)?;
|
||||||
|
|
||||||
|
let (input, _) = tuple((tag("IS"), multispace1, tag("NOT"), multispace1, tag("NULL")))(input)?;
|
||||||
|
Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Null }))))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// empty = value "IS" WS+ "EMPTY"
|
||||||
|
pub fn parse_is_empty(input: Span) -> IResult<FilterCondition> {
|
||||||
|
let (input, key) = parse_value(input)?;
|
||||||
|
|
||||||
|
let (input, _) = tuple((tag("IS"), multispace1, tag("EMPTY")))(input)?;
|
||||||
|
Ok((input, FilterCondition::Condition { fid: key, op: Empty }))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// empty = value "IS" WS+ "NOT" WS+ "EMPTY"
|
||||||
|
pub fn parse_is_not_empty(input: Span) -> IResult<FilterCondition> {
|
||||||
|
let (input, key) = parse_value(input)?;
|
||||||
|
|
||||||
|
let (input, _) = tuple((tag("IS"), multispace1, tag("NOT"), multispace1, tag("EMPTY")))(input)?;
|
||||||
|
Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Empty }))))
|
||||||
|
}
|
||||||
|
|
||||||
/// exist = value "EXISTS"
|
/// exist = value "EXISTS"
|
||||||
pub fn parse_exists(input: Span) -> IResult<FilterCondition> {
|
pub fn parse_exists(input: Span) -> IResult<FilterCondition> {
|
||||||
let (input, key) = terminated(parse_value, tag("EXISTS"))(input)?;
|
let (input, key) = terminated(parse_value, tag("EXISTS"))(input)?;
|
||||||
|
@ -143,11 +143,9 @@ impl<'a> Display for Error<'a> {
|
|||||||
ErrorKind::MissingClosingDelimiter(c) => {
|
ErrorKind::MissingClosingDelimiter(c) => {
|
||||||
writeln!(f, "Expression `{}` is missing the following closing delimiter: `{}`.", escaped_input, c)?
|
writeln!(f, "Expression `{}` is missing the following closing delimiter: `{}`.", escaped_input, c)?
|
||||||
}
|
}
|
||||||
ErrorKind::InvalidPrimary if input.trim().is_empty() => {
|
|
||||||
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.")?
|
|
||||||
}
|
|
||||||
ErrorKind::InvalidPrimary => {
|
ErrorKind::InvalidPrimary => {
|
||||||
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `{}`.", escaped_input)?
|
let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) };
|
||||||
|
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` {}", text)?
|
||||||
}
|
}
|
||||||
ErrorKind::ExpectedEof => {
|
ErrorKind::ExpectedEof => {
|
||||||
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
|
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
|
||||||
|
@ -47,7 +47,10 @@ mod value;
|
|||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
|
||||||
pub use condition::{parse_condition, parse_to, Condition};
|
pub use condition::{parse_condition, parse_to, Condition};
|
||||||
use condition::{parse_exists, parse_not_exists};
|
use condition::{
|
||||||
|
parse_exists, parse_is_empty, parse_is_not_empty, parse_is_not_null, parse_is_null,
|
||||||
|
parse_not_exists,
|
||||||
|
};
|
||||||
use error::{cut_with_err, ExpectedValueKind, NomErrorExt};
|
use error::{cut_with_err, ExpectedValueKind, NomErrorExt};
|
||||||
pub use error::{Error, ErrorKind};
|
pub use error::{Error, ErrorKind};
|
||||||
use nom::branch::alt;
|
use nom::branch::alt;
|
||||||
@ -442,6 +445,10 @@ fn parse_primary(input: Span, depth: usize) -> IResult<FilterCondition> {
|
|||||||
parse_in,
|
parse_in,
|
||||||
parse_not_in,
|
parse_not_in,
|
||||||
parse_condition,
|
parse_condition,
|
||||||
|
parse_is_null,
|
||||||
|
parse_is_not_null,
|
||||||
|
parse_is_empty,
|
||||||
|
parse_is_not_empty,
|
||||||
parse_exists,
|
parse_exists,
|
||||||
parse_not_exists,
|
parse_not_exists,
|
||||||
parse_to,
|
parse_to,
|
||||||
@ -526,14 +533,30 @@ pub mod tests {
|
|||||||
insta::assert_display_snapshot!(p("subscribers <= 1000"), @"{subscribers} <= {1000}");
|
insta::assert_display_snapshot!(p("subscribers <= 1000"), @"{subscribers} <= {1000}");
|
||||||
insta::assert_display_snapshot!(p("subscribers 100 TO 1000"), @"{subscribers} {100} TO {1000}");
|
insta::assert_display_snapshot!(p("subscribers 100 TO 1000"), @"{subscribers} {100} TO {1000}");
|
||||||
|
|
||||||
// Test NOT + EXISTS
|
// Test NOT
|
||||||
insta::assert_display_snapshot!(p("subscribers EXISTS"), @"{subscribers} EXISTS");
|
|
||||||
insta::assert_display_snapshot!(p("NOT subscribers < 1000"), @"NOT ({subscribers} < {1000})");
|
insta::assert_display_snapshot!(p("NOT subscribers < 1000"), @"NOT ({subscribers} < {1000})");
|
||||||
|
insta::assert_display_snapshot!(p("NOT subscribers 100 TO 1000"), @"NOT ({subscribers} {100} TO {1000})");
|
||||||
|
|
||||||
|
// Test NULL + NOT NULL
|
||||||
|
insta::assert_display_snapshot!(p("subscribers IS NULL"), @"{subscribers} IS NULL");
|
||||||
|
insta::assert_display_snapshot!(p("NOT subscribers IS NULL"), @"NOT ({subscribers} IS NULL)");
|
||||||
|
insta::assert_display_snapshot!(p("subscribers IS NOT NULL"), @"NOT ({subscribers} IS NULL)");
|
||||||
|
insta::assert_display_snapshot!(p("NOT subscribers IS NOT NULL"), @"{subscribers} IS NULL");
|
||||||
|
insta::assert_display_snapshot!(p("subscribers IS NOT NULL"), @"NOT ({subscribers} IS NULL)");
|
||||||
|
|
||||||
|
// Test EMPTY + NOT EMPTY
|
||||||
|
insta::assert_display_snapshot!(p("subscribers IS EMPTY"), @"{subscribers} IS EMPTY");
|
||||||
|
insta::assert_display_snapshot!(p("NOT subscribers IS EMPTY"), @"NOT ({subscribers} IS EMPTY)");
|
||||||
|
insta::assert_display_snapshot!(p("subscribers IS NOT EMPTY"), @"NOT ({subscribers} IS EMPTY)");
|
||||||
|
insta::assert_display_snapshot!(p("NOT subscribers IS NOT EMPTY"), @"{subscribers} IS EMPTY");
|
||||||
|
insta::assert_display_snapshot!(p("subscribers IS NOT EMPTY"), @"NOT ({subscribers} IS EMPTY)");
|
||||||
|
|
||||||
|
// Test EXISTS + NOT EXITS
|
||||||
|
insta::assert_display_snapshot!(p("subscribers EXISTS"), @"{subscribers} EXISTS");
|
||||||
insta::assert_display_snapshot!(p("NOT subscribers EXISTS"), @"NOT ({subscribers} EXISTS)");
|
insta::assert_display_snapshot!(p("NOT subscribers EXISTS"), @"NOT ({subscribers} EXISTS)");
|
||||||
insta::assert_display_snapshot!(p("subscribers NOT EXISTS"), @"NOT ({subscribers} EXISTS)");
|
insta::assert_display_snapshot!(p("subscribers NOT EXISTS"), @"NOT ({subscribers} EXISTS)");
|
||||||
insta::assert_display_snapshot!(p("NOT subscribers NOT EXISTS"), @"{subscribers} EXISTS");
|
insta::assert_display_snapshot!(p("NOT subscribers NOT EXISTS"), @"{subscribers} EXISTS");
|
||||||
insta::assert_display_snapshot!(p("subscribers NOT EXISTS"), @"NOT ({subscribers} EXISTS)");
|
insta::assert_display_snapshot!(p("subscribers NOT EXISTS"), @"NOT ({subscribers} EXISTS)");
|
||||||
insta::assert_display_snapshot!(p("NOT subscribers 100 TO 1000"), @"NOT ({subscribers} {100} TO {1000})");
|
|
||||||
|
|
||||||
// Test nested NOT
|
// Test nested NOT
|
||||||
insta::assert_display_snapshot!(p("NOT NOT NOT NOT x = 5"), @"{x} = {5}");
|
insta::assert_display_snapshot!(p("NOT NOT NOT NOT x = 5"), @"{x} = {5}");
|
||||||
@ -606,7 +629,7 @@ pub mod tests {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("'OR'"), @r###"
|
insta::assert_display_snapshot!(p("'OR'"), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`.
|
||||||
1:5 'OR'
|
1:5 'OR'
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@ -616,12 +639,12 @@ pub mod tests {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("channel Ponce"), @r###"
|
insta::assert_display_snapshot!(p("channel Ponce"), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`.
|
||||||
1:14 channel Ponce
|
1:14 channel Ponce
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("channel = Ponce OR"), @r###"
|
insta::assert_display_snapshot!(p("channel = Ponce OR"), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.
|
||||||
19:19 channel = Ponce OR
|
19:19 channel = Ponce OR
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@ -706,12 +729,12 @@ pub mod tests {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("colour NOT EXIST"), @r###"
|
insta::assert_display_snapshot!(p("colour NOT EXIST"), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`.
|
||||||
1:17 colour NOT EXIST
|
1:17 colour NOT EXIST
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("subscribers 100 TO1000"), @r###"
|
insta::assert_display_snapshot!(p("subscribers 100 TO1000"), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`.
|
||||||
1:23 subscribers 100 TO1000
|
1:23 subscribers 100 TO1000
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@ -772,6 +795,39 @@ pub mod tests {
|
|||||||
Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes.
|
Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes.
|
||||||
5:7 NOT OR EXISTS AND EXISTS NOT EXISTS
|
5:7 NOT OR EXISTS AND EXISTS NOT EXISTS
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
insta::assert_display_snapshot!(p(r#"value NULL"#), @r###"
|
||||||
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NULL`.
|
||||||
|
1:11 value NULL
|
||||||
|
"###);
|
||||||
|
insta::assert_display_snapshot!(p(r#"value NOT NULL"#), @r###"
|
||||||
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NOT NULL`.
|
||||||
|
1:15 value NOT NULL
|
||||||
|
"###);
|
||||||
|
insta::assert_display_snapshot!(p(r#"value EMPTY"#), @r###"
|
||||||
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value EMPTY`.
|
||||||
|
1:12 value EMPTY
|
||||||
|
"###);
|
||||||
|
insta::assert_display_snapshot!(p(r#"value NOT EMPTY"#), @r###"
|
||||||
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NOT EMPTY`.
|
||||||
|
1:16 value NOT EMPTY
|
||||||
|
"###);
|
||||||
|
insta::assert_display_snapshot!(p(r#"value IS"#), @r###"
|
||||||
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS`.
|
||||||
|
1:9 value IS
|
||||||
|
"###);
|
||||||
|
insta::assert_display_snapshot!(p(r#"value IS NOT"#), @r###"
|
||||||
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT`.
|
||||||
|
1:13 value IS NOT
|
||||||
|
"###);
|
||||||
|
insta::assert_display_snapshot!(p(r#"value IS EXISTS"#), @r###"
|
||||||
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS EXISTS`.
|
||||||
|
1:16 value IS EXISTS
|
||||||
|
"###);
|
||||||
|
insta::assert_display_snapshot!(p(r#"value IS NOT EXISTS"#), @r###"
|
||||||
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT EXISTS`.
|
||||||
|
1:20 value IS NOT EXISTS
|
||||||
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -853,6 +909,8 @@ impl<'a> std::fmt::Display for Condition<'a> {
|
|||||||
Condition::GreaterThanOrEqual(token) => write!(f, ">= {token}"),
|
Condition::GreaterThanOrEqual(token) => write!(f, ">= {token}"),
|
||||||
Condition::Equal(token) => write!(f, "= {token}"),
|
Condition::Equal(token) => write!(f, "= {token}"),
|
||||||
Condition::NotEqual(token) => write!(f, "!= {token}"),
|
Condition::NotEqual(token) => write!(f, "!= {token}"),
|
||||||
|
Condition::Null => write!(f, "IS NULL"),
|
||||||
|
Condition::Empty => write!(f, "IS EMPTY"),
|
||||||
Condition::Exists => write!(f, "EXISTS"),
|
Condition::Exists => write!(f, "EXISTS"),
|
||||||
Condition::LowerThan(token) => write!(f, "< {token}"),
|
Condition::LowerThan(token) => write!(f, "< {token}"),
|
||||||
Condition::LowerThanOrEqual(token) => write!(f, "<= {token}"),
|
Condition::LowerThanOrEqual(token) => write!(f, "<= {token}"),
|
||||||
|
@ -183,7 +183,20 @@ fn is_syntax_component(c: char) -> bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn is_keyword(s: &str) -> bool {
|
fn is_keyword(s: &str) -> bool {
|
||||||
matches!(s, "AND" | "OR" | "IN" | "NOT" | "TO" | "EXISTS" | "_geoRadius" | "_geoBoundingBox")
|
matches!(
|
||||||
|
s,
|
||||||
|
"AND"
|
||||||
|
| "OR"
|
||||||
|
| "IN"
|
||||||
|
| "NOT"
|
||||||
|
| "TO"
|
||||||
|
| "EXISTS"
|
||||||
|
| "IS"
|
||||||
|
| "NULL"
|
||||||
|
| "EMPTY"
|
||||||
|
| "_geoRadius"
|
||||||
|
| "_geoBoundingBox"
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -4,51 +4,56 @@ use serde_json::{Map, Value};
|
|||||||
|
|
||||||
pub fn flatten(json: &Map<String, Value>) -> Map<String, Value> {
|
pub fn flatten(json: &Map<String, Value>) -> Map<String, Value> {
|
||||||
let mut obj = Map::new();
|
let mut obj = Map::new();
|
||||||
let mut all_keys = vec![];
|
let mut all_entries = vec![];
|
||||||
insert_object(&mut obj, None, json, &mut all_keys);
|
insert_object(&mut obj, None, json, &mut all_entries);
|
||||||
for key in all_keys {
|
for (key, old_val) in all_entries {
|
||||||
obj.entry(key).or_insert(Value::Array(vec![]));
|
obj.entry(key).or_insert(old_val.clone());
|
||||||
}
|
}
|
||||||
obj
|
obj
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_object(
|
fn insert_object<'a>(
|
||||||
base_json: &mut Map<String, Value>,
|
base_json: &mut Map<String, Value>,
|
||||||
base_key: Option<&str>,
|
base_key: Option<&str>,
|
||||||
object: &Map<String, Value>,
|
object: &'a Map<String, Value>,
|
||||||
all_keys: &mut Vec<String>,
|
all_entries: &mut Vec<(String, &'a Value)>,
|
||||||
) {
|
) {
|
||||||
for (key, value) in object {
|
for (key, value) in object {
|
||||||
let new_key = base_key.map_or_else(|| key.clone(), |base_key| format!("{base_key}.{key}"));
|
let new_key = base_key.map_or_else(|| key.clone(), |base_key| format!("{base_key}.{key}"));
|
||||||
all_keys.push(new_key.clone());
|
all_entries.push((new_key.clone(), value));
|
||||||
if let Some(array) = value.as_array() {
|
if let Some(array) = value.as_array() {
|
||||||
insert_array(base_json, &new_key, array, all_keys);
|
insert_array(base_json, &new_key, array, all_entries);
|
||||||
} else if let Some(object) = value.as_object() {
|
} else if let Some(object) = value.as_object() {
|
||||||
insert_object(base_json, Some(&new_key), object, all_keys);
|
insert_object(base_json, Some(&new_key), object, all_entries);
|
||||||
} else {
|
} else {
|
||||||
insert_value(base_json, &new_key, value.clone());
|
insert_value(base_json, &new_key, value.clone(), false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_array(
|
fn insert_array<'a>(
|
||||||
base_json: &mut Map<String, Value>,
|
base_json: &mut Map<String, Value>,
|
||||||
base_key: &str,
|
base_key: &str,
|
||||||
array: &Vec<Value>,
|
array: &'a Vec<Value>,
|
||||||
all_keys: &mut Vec<String>,
|
all_entries: &mut Vec<(String, &'a Value)>,
|
||||||
) {
|
) {
|
||||||
for value in array {
|
for value in array {
|
||||||
if let Some(object) = value.as_object() {
|
if let Some(object) = value.as_object() {
|
||||||
insert_object(base_json, Some(base_key), object, all_keys);
|
insert_object(base_json, Some(base_key), object, all_entries);
|
||||||
} else if let Some(sub_array) = value.as_array() {
|
} else if let Some(sub_array) = value.as_array() {
|
||||||
insert_array(base_json, base_key, sub_array, all_keys);
|
insert_array(base_json, base_key, sub_array, all_entries);
|
||||||
} else {
|
} else {
|
||||||
insert_value(base_json, base_key, value.clone());
|
insert_value(base_json, base_key, value.clone(), true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_value(base_json: &mut Map<String, Value>, key: &str, to_insert: Value) {
|
fn insert_value(
|
||||||
|
base_json: &mut Map<String, Value>,
|
||||||
|
key: &str,
|
||||||
|
to_insert: Value,
|
||||||
|
came_from_array: bool,
|
||||||
|
) {
|
||||||
debug_assert!(!to_insert.is_object());
|
debug_assert!(!to_insert.is_object());
|
||||||
debug_assert!(!to_insert.is_array());
|
debug_assert!(!to_insert.is_array());
|
||||||
|
|
||||||
@ -63,6 +68,8 @@ fn insert_value(base_json: &mut Map<String, Value>, key: &str, to_insert: Value)
|
|||||||
base_json[key] = Value::Array(vec![value, to_insert]);
|
base_json[key] = Value::Array(vec![value, to_insert]);
|
||||||
}
|
}
|
||||||
// if it does not exist we can push the value untouched
|
// if it does not exist we can push the value untouched
|
||||||
|
} else if came_from_array {
|
||||||
|
base_json.insert(key.to_string(), Value::Array(vec![to_insert]));
|
||||||
} else {
|
} else {
|
||||||
base_json.insert(key.to_string(), to_insert);
|
base_json.insert(key.to_string(), to_insert);
|
||||||
}
|
}
|
||||||
@ -113,7 +120,11 @@ mod tests {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
&flat,
|
&flat,
|
||||||
json!({
|
json!({
|
||||||
"a": [],
|
"a": {
|
||||||
|
"b": "c",
|
||||||
|
"d": "e",
|
||||||
|
"f": "g"
|
||||||
|
},
|
||||||
"a.b": "c",
|
"a.b": "c",
|
||||||
"a.d": "e",
|
"a.d": "e",
|
||||||
"a.f": "g"
|
"a.f": "g"
|
||||||
@ -164,7 +175,7 @@ mod tests {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
&flat,
|
&flat,
|
||||||
json!({
|
json!({
|
||||||
"a": 42,
|
"a": [42],
|
||||||
"a.b": ["c", "d", "e"],
|
"a.b": ["c", "d", "e"],
|
||||||
})
|
})
|
||||||
.as_object()
|
.as_object()
|
||||||
@ -186,7 +197,7 @@ mod tests {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
&flat,
|
&flat,
|
||||||
json!({
|
json!({
|
||||||
"a": null,
|
"a": [null],
|
||||||
"a.b": ["c", "d", "e"],
|
"a.b": ["c", "d", "e"],
|
||||||
})
|
})
|
||||||
.as_object()
|
.as_object()
|
||||||
@ -208,7 +219,9 @@ mod tests {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
&flat,
|
&flat,
|
||||||
json!({
|
json!({
|
||||||
"a": [],
|
"a": {
|
||||||
|
"b": "c"
|
||||||
|
},
|
||||||
"a.b": ["c", "d"],
|
"a.b": ["c", "d"],
|
||||||
})
|
})
|
||||||
.as_object()
|
.as_object()
|
||||||
@ -234,7 +247,7 @@ mod tests {
|
|||||||
json!({
|
json!({
|
||||||
"a.b": ["c", "d", "f"],
|
"a.b": ["c", "d", "f"],
|
||||||
"a.c": "e",
|
"a.c": "e",
|
||||||
"a": 35,
|
"a": [35],
|
||||||
})
|
})
|
||||||
.as_object()
|
.as_object()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
@ -302,4 +315,53 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn flatten_nested_values_keep_original_values() {
|
||||||
|
let mut base: Value = json!({
|
||||||
|
"tags": {
|
||||||
|
"t1": "v1"
|
||||||
|
},
|
||||||
|
"prices": {
|
||||||
|
"p1": [null],
|
||||||
|
"p1000": {"tamo": {"le": {}}}
|
||||||
|
},
|
||||||
|
"kiki": [[]]
|
||||||
|
});
|
||||||
|
let json = std::mem::take(base.as_object_mut().unwrap());
|
||||||
|
let flat = flatten(&json);
|
||||||
|
|
||||||
|
println!("{}", serde_json::to_string_pretty(&flat).unwrap());
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
&flat,
|
||||||
|
json!({
|
||||||
|
"prices": {
|
||||||
|
"p1": [null],
|
||||||
|
"p1000": {
|
||||||
|
"tamo": {
|
||||||
|
"le": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"prices.p1": [null],
|
||||||
|
"prices.p1000": {
|
||||||
|
"tamo": {
|
||||||
|
"le": {}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"prices.p1000.tamo": {
|
||||||
|
"le": {}
|
||||||
|
},
|
||||||
|
"prices.p1000.tamo.le": {},
|
||||||
|
"tags": {
|
||||||
|
"t1": "v1"
|
||||||
|
},
|
||||||
|
"tags.t1": "v1",
|
||||||
|
"kiki": [[]]
|
||||||
|
})
|
||||||
|
.as_object()
|
||||||
|
.unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -11,29 +11,29 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.64"
|
anyhow = "1.0.70"
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
csv = "1.1.6"
|
csv = "1.2.1"
|
||||||
derive_builder = "0.11.2"
|
derive_builder = "0.12.0"
|
||||||
dump = { path = "../dump" }
|
dump = { path = "../dump" }
|
||||||
enum-iterator = "1.1.3"
|
enum-iterator = "1.4.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
log = "0.4.14"
|
log = "0.4.17"
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
page_size = "0.5.0"
|
page_size = "0.5.0"
|
||||||
roaring = { version = "0.10.0", features = ["serde"] }
|
roaring = { version = "0.10.1", features = ["serde"] }
|
||||||
serde = { version = "1.0.136", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.85", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
synchronoise = "1.0.1"
|
synchronoise = "1.0.1"
|
||||||
tempfile = "3.3.0"
|
tempfile = "3.5.0"
|
||||||
thiserror = "1.0.30"
|
thiserror = "1.0.40"
|
||||||
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
big_s = "1.0.2"
|
big_s = "1.0.2"
|
||||||
crossbeam = "0.8.2"
|
crossbeam = "0.8.2"
|
||||||
insta = { version = "1.19.1", features = ["json", "redactions"] }
|
insta = { version = "1.29.0", features = ["json", "redactions"] }
|
||||||
meili-snap = { path = "../meili-snap" }
|
meili-snap = { path = "../meili-snap" }
|
||||||
nelson = { git = "https://github.com/meilisearch/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"}
|
nelson = { git = "https://github.com/meilisearch/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"}
|
||||||
|
@ -311,18 +311,9 @@ impl BatchKind {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
BatchKind::DocumentOperation { method, allow_index_creation, primary_key, mut operation_ids },
|
this @ BatchKind::DocumentOperation { .. },
|
||||||
K::DocumentDeletion,
|
K::DocumentDeletion,
|
||||||
) => {
|
) => Break(this),
|
||||||
operation_ids.push(id);
|
|
||||||
|
|
||||||
Continue(BatchKind::DocumentOperation {
|
|
||||||
method,
|
|
||||||
allow_index_creation,
|
|
||||||
primary_key,
|
|
||||||
operation_ids,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
// but we can't autobatch documents if it's not the same kind
|
// but we can't autobatch documents if it's not the same kind
|
||||||
// this match branch MUST be AFTER the previous one
|
// this match branch MUST be AFTER the previous one
|
||||||
(
|
(
|
||||||
@ -345,35 +336,7 @@ impl BatchKind {
|
|||||||
deletion_ids.push(id);
|
deletion_ids.push(id);
|
||||||
Continue(BatchKind::DocumentClear { ids: deletion_ids })
|
Continue(BatchKind::DocumentClear { ids: deletion_ids })
|
||||||
}
|
}
|
||||||
// we can autobatch the deletion and import if the index already exists
|
// we can't autobatch a deletion and an import
|
||||||
(
|
|
||||||
BatchKind::DocumentDeletion { mut deletion_ids },
|
|
||||||
K::DocumentImport { method, allow_index_creation, primary_key }
|
|
||||||
) if index_already_exists => {
|
|
||||||
deletion_ids.push(id);
|
|
||||||
|
|
||||||
Continue(BatchKind::DocumentOperation {
|
|
||||||
method,
|
|
||||||
allow_index_creation,
|
|
||||||
primary_key,
|
|
||||||
operation_ids: deletion_ids,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
// we can autobatch the deletion and import if both can't create an index
|
|
||||||
(
|
|
||||||
BatchKind::DocumentDeletion { mut deletion_ids },
|
|
||||||
K::DocumentImport { method, allow_index_creation, primary_key }
|
|
||||||
) if !allow_index_creation => {
|
|
||||||
deletion_ids.push(id);
|
|
||||||
|
|
||||||
Continue(BatchKind::DocumentOperation {
|
|
||||||
method,
|
|
||||||
allow_index_creation,
|
|
||||||
primary_key,
|
|
||||||
operation_ids: deletion_ids,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
// we can't autobatch a deletion and an import if the index does not exists but would be created by an addition
|
|
||||||
(
|
(
|
||||||
this @ BatchKind::DocumentDeletion { .. },
|
this @ BatchKind::DocumentDeletion { .. },
|
||||||
K::DocumentImport { .. }
|
K::DocumentImport { .. }
|
||||||
@ -674,36 +637,36 @@ mod tests {
|
|||||||
debug_snapshot!(autobatch_from(false,None, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false,None, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
||||||
|
|
||||||
// We can autobatch document addition with document deletion
|
// We can't autobatch document addition with document deletion
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###);
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###);
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###);
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###);
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###);
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###);
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###);
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###);
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###);
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###);
|
||||||
// And the other way around
|
// we also can't do the only way around
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -61,6 +61,8 @@ pub enum Error {
|
|||||||
SwapDuplicateIndexesFound(Vec<String>),
|
SwapDuplicateIndexesFound(Vec<String>),
|
||||||
#[error("Index `{0}` not found.")]
|
#[error("Index `{0}` not found.")]
|
||||||
SwapIndexNotFound(String),
|
SwapIndexNotFound(String),
|
||||||
|
#[error("Meilisearch cannot receive write operations because the limit of the task database has been reached. Please delete tasks to continue performing write operations.")]
|
||||||
|
NoSpaceLeftInTaskQueue,
|
||||||
#[error(
|
#[error(
|
||||||
"Indexes {} not found.",
|
"Indexes {} not found.",
|
||||||
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", ")
|
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", ")
|
||||||
@ -152,6 +154,8 @@ impl ErrorCode for Error {
|
|||||||
Error::TaskNotFound(_) => Code::TaskNotFound,
|
Error::TaskNotFound(_) => Code::TaskNotFound,
|
||||||
Error::TaskDeletionWithEmptyQuery => Code::MissingTaskFilters,
|
Error::TaskDeletionWithEmptyQuery => Code::MissingTaskFilters,
|
||||||
Error::TaskCancelationWithEmptyQuery => Code::MissingTaskFilters,
|
Error::TaskCancelationWithEmptyQuery => Code::MissingTaskFilters,
|
||||||
|
// TODO: not sure of the Code to use
|
||||||
|
Error::NoSpaceLeftInTaskQueue => Code::NoSpaceLeftOnDevice,
|
||||||
Error::Dump(e) => e.error_code(),
|
Error::Dump(e) => e.error_code(),
|
||||||
Error::Milli(e) => e.error_code(),
|
Error::Milli(e) => e.error_code(),
|
||||||
Error::ProcessBatchPanicked => Code::Internal,
|
Error::ProcessBatchPanicked => Code::Internal,
|
||||||
|
@ -828,6 +828,13 @@ impl IndexScheduler {
|
|||||||
pub fn register(&self, kind: KindWithContent) -> Result<Task> {
|
pub fn register(&self, kind: KindWithContent) -> Result<Task> {
|
||||||
let mut wtxn = self.env.write_txn()?;
|
let mut wtxn = self.env.write_txn()?;
|
||||||
|
|
||||||
|
// if the task doesn't delete anything and 50% of the task queue is full, we must refuse to enqueue the incomming task
|
||||||
|
if !matches!(&kind, KindWithContent::TaskDeletion { tasks, .. } if !tasks.is_empty())
|
||||||
|
&& (self.env.non_free_pages_size()? * 100) / self.env.map_size()? as u64 > 50
|
||||||
|
{
|
||||||
|
return Err(Error::NoSpaceLeftInTaskQueue);
|
||||||
|
}
|
||||||
|
|
||||||
let mut task = Task {
|
let mut task = Task {
|
||||||
uid: self.next_task_id(&wtxn)?,
|
uid: self.next_task_id(&wtxn)?,
|
||||||
enqueued_at: OffsetDateTime::now_utc(),
|
enqueued_at: OffsetDateTime::now_utc(),
|
||||||
@ -1936,105 +1943,6 @@ mod tests {
|
|||||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "both_task_succeeded");
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "both_task_succeeded");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn document_addition_and_document_deletion() {
|
|
||||||
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
|
||||||
|
|
||||||
let content = r#"[
|
|
||||||
{ "id": 1, "doggo": "jean bob" },
|
|
||||||
{ "id": 2, "catto": "jorts" },
|
|
||||||
{ "id": 3, "doggo": "bork" }
|
|
||||||
]"#;
|
|
||||||
|
|
||||||
let (uuid, mut file) = index_scheduler.create_update_file_with_uuid(0).unwrap();
|
|
||||||
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
|
||||||
file.persist().unwrap();
|
|
||||||
index_scheduler
|
|
||||||
.register(KindWithContent::DocumentAdditionOrUpdate {
|
|
||||||
index_uid: S("doggos"),
|
|
||||||
primary_key: Some(S("id")),
|
|
||||||
method: ReplaceDocuments,
|
|
||||||
content_file: uuid,
|
|
||||||
documents_count,
|
|
||||||
allow_index_creation: true,
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task");
|
|
||||||
index_scheduler
|
|
||||||
.register(KindWithContent::DocumentDeletion {
|
|
||||||
index_uid: S("doggos"),
|
|
||||||
documents_ids: vec![S("1"), S("2")],
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_second_task");
|
|
||||||
|
|
||||||
handle.advance_one_successful_batch(); // The addition AND deletion should've been batched together
|
|
||||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_processing_the_batch");
|
|
||||||
|
|
||||||
let index = index_scheduler.index("doggos").unwrap();
|
|
||||||
let rtxn = index.read_txn().unwrap();
|
|
||||||
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
|
||||||
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
|
||||||
let documents = index
|
|
||||||
.all_documents(&rtxn)
|
|
||||||
.unwrap()
|
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn document_deletion_and_document_addition() {
|
|
||||||
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
|
||||||
index_scheduler
|
|
||||||
.register(KindWithContent::DocumentDeletion {
|
|
||||||
index_uid: S("doggos"),
|
|
||||||
documents_ids: vec![S("1"), S("2")],
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task");
|
|
||||||
|
|
||||||
let content = r#"[
|
|
||||||
{ "id": 1, "doggo": "jean bob" },
|
|
||||||
{ "id": 2, "catto": "jorts" },
|
|
||||||
{ "id": 3, "doggo": "bork" }
|
|
||||||
]"#;
|
|
||||||
|
|
||||||
let (uuid, mut file) = index_scheduler.create_update_file_with_uuid(0).unwrap();
|
|
||||||
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
|
||||||
file.persist().unwrap();
|
|
||||||
index_scheduler
|
|
||||||
.register(KindWithContent::DocumentAdditionOrUpdate {
|
|
||||||
index_uid: S("doggos"),
|
|
||||||
primary_key: Some(S("id")),
|
|
||||||
method: ReplaceDocuments,
|
|
||||||
content_file: uuid,
|
|
||||||
documents_count,
|
|
||||||
allow_index_creation: true,
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_second_task");
|
|
||||||
|
|
||||||
// The deletion should have failed because it can't create an index
|
|
||||||
handle.advance_one_failed_batch();
|
|
||||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_failing_the_deletion");
|
|
||||||
|
|
||||||
// The addition should works
|
|
||||||
handle.advance_one_successful_batch();
|
|
||||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_last_successful_addition");
|
|
||||||
|
|
||||||
let index = index_scheduler.index("doggos").unwrap();
|
|
||||||
let rtxn = index.read_txn().unwrap();
|
|
||||||
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
|
||||||
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
|
||||||
let documents = index
|
|
||||||
.all_documents(&rtxn)
|
|
||||||
.unwrap()
|
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn do_not_batch_task_of_different_indexes() {
|
fn do_not_batch_task_of_different_indexes() {
|
||||||
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
|
@ -1,43 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
|
||||||
1 {uid: 1, status: succeeded, details: { received_document_ids: 2, deleted_documents: Some(2) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued []
|
|
||||||
succeeded [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"documentAdditionOrUpdate" [0,]
|
|
||||||
"documentDeletion" [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
doggos: { number_of_documents: 1, field_distribution: {"doggo": 1, "id": 1} }
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
[timestamp] [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
[timestamp] [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"id": 3,
|
|
||||||
"doggo": "bork"
|
|
||||||
}
|
|
||||||
]
|
|
@ -1,37 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"documentAdditionOrUpdate" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
00000000-0000-0000-0000-000000000000
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
@ -1,40 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
|
||||||
1 {uid: 1, status: enqueued, details: { received_document_ids: 2, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"documentAdditionOrUpdate" [0,]
|
|
||||||
"documentDeletion" [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
00000000-0000-0000-0000-000000000000
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
@ -1,43 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_document_ids: 2, deleted_documents: Some(0) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
|
||||||
1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued [1,]
|
|
||||||
failed [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"documentAdditionOrUpdate" [1,]
|
|
||||||
"documentDeletion" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
00000000-0000-0000-0000-000000000000
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
@ -1,46 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_document_ids: 2, deleted_documents: Some(0) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
|
||||||
1 {uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued []
|
|
||||||
succeeded [1,]
|
|
||||||
failed [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"documentAdditionOrUpdate" [1,]
|
|
||||||
"documentDeletion" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
doggos: { number_of_documents: 3, field_distribution: {"catto": 1, "doggo": 2, "id": 3} }
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
@ -1,17 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
"doggo": "jean bob"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 2,
|
|
||||||
"catto": "jorts"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 3,
|
|
||||||
"doggo": "bork"
|
|
||||||
}
|
|
||||||
]
|
|
@ -1,36 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: enqueued, details: { received_document_ids: 2, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"documentDeletion" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
@ -1,40 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: enqueued, details: { received_document_ids: 2, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
|
||||||
1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"documentAdditionOrUpdate" [1,]
|
|
||||||
"documentDeletion" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
00000000-0000-0000-0000-000000000000
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
@ -11,6 +11,6 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
insta = { version = "^1.19.1", features = ["json", "redactions"] }
|
insta = { version = "^1.29.0", features = ["json", "redactions"] }
|
||||||
md5 = "0.7.0"
|
md5 = "0.7.0"
|
||||||
once_cell = "1.15"
|
once_cell = "1.17"
|
||||||
|
@ -11,16 +11,16 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
base64 = "0.13.1"
|
base64 = "0.21.0"
|
||||||
enum-iterator = "1.1.3"
|
enum-iterator = "1.4.0"
|
||||||
hmac = "0.12.1"
|
hmac = "0.12.1"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
roaring = { version = "0.10.0", features = ["serde"] }
|
roaring = { version = "0.10.1", features = ["serde"] }
|
||||||
serde = { version = "1.0.145", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.85", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
sha2 = "0.10.6"
|
sha2 = "0.10.6"
|
||||||
thiserror = "1.0.37"
|
thiserror = "1.0.40"
|
||||||
time = { version = "0.3.15", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
||||||
|
@ -310,6 +310,7 @@ pub const MASTER_KEY_MIN_SIZE: usize = 16;
|
|||||||
const MASTER_KEY_GEN_SIZE: usize = 32;
|
const MASTER_KEY_GEN_SIZE: usize = 32;
|
||||||
|
|
||||||
pub fn generate_master_key() -> String {
|
pub fn generate_master_key() -> String {
|
||||||
|
use base64::Engine;
|
||||||
use rand::rngs::OsRng;
|
use rand::rngs::OsRng;
|
||||||
use rand::RngCore;
|
use rand::RngCore;
|
||||||
|
|
||||||
@ -320,5 +321,5 @@ pub fn generate_master_key() -> String {
|
|||||||
|
|
||||||
// let's encode the random bytes to base64 to make them human-readable and not too long.
|
// let's encode the random bytes to base64 to make them human-readable and not too long.
|
||||||
// We're using the URL_SAFE alphabet that will produce keys without =, / or other unusual characters.
|
// We're using the URL_SAFE alphabet that will produce keys without =, / or other unusual characters.
|
||||||
base64::encode_config(buf, base64::URL_SAFE_NO_PAD)
|
base64::engine::general_purpose::URL_SAFE_NO_PAD.encode(buf)
|
||||||
}
|
}
|
||||||
|
@ -11,31 +11,31 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = { version = "4.2.1", default-features = false }
|
actix-web = { version = "4.3.1", default-features = false }
|
||||||
anyhow = "1.0.65"
|
anyhow = "1.0.70"
|
||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
csv = "1.1.6"
|
csv = "1.2.1"
|
||||||
deserr = "0.5.0"
|
deserr = "0.5.0"
|
||||||
either = { version = "1.6.1", features = ["serde"] }
|
either = { version = "1.8.1", features = ["serde"] }
|
||||||
enum-iterator = "1.1.3"
|
enum-iterator = "1.4.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.0.24"
|
flate2 = "1.0.25"
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
memmap2 = "0.5.7"
|
memmap2 = "0.5.10"
|
||||||
milli = { path = "../milli", default-features = false }
|
milli = { path = "../milli", default-features = false }
|
||||||
roaring = { version = "0.10.0", features = ["serde"] }
|
roaring = { version = "0.10.1", features = ["serde"] }
|
||||||
serde = { version = "1.0.145", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde-cs = "0.2.4"
|
serde-cs = "0.2.4"
|
||||||
serde_json = "1.0.85"
|
serde_json = "1.0.95"
|
||||||
tar = "0.4.38"
|
tar = "0.4.38"
|
||||||
tempfile = "3.3.0"
|
tempfile = "3.5.0"
|
||||||
thiserror = "1.0.30"
|
thiserror = "1.0.40"
|
||||||
time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
tokio = "1.24"
|
tokio = "1.27"
|
||||||
uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = "1.19.1"
|
insta = "1.29.0"
|
||||||
meili-snap = { path = "../meili-snap" }
|
meili-snap = { path = "../meili-snap" }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
@ -50,3 +50,6 @@ hebrew = ["milli/hebrew"]
|
|||||||
japanese = ["milli/japanese"]
|
japanese = ["milli/japanese"]
|
||||||
# thai specialized tokenization
|
# thai specialized tokenization
|
||||||
thai = ["milli/thai"]
|
thai = ["milli/thai"]
|
||||||
|
|
||||||
|
# allow greek specialized tokenization
|
||||||
|
greek = ["milli/greek"]
|
||||||
|
@ -46,7 +46,7 @@ pub fn check_version_file(db_path: &Path) -> anyhow::Result<()> {
|
|||||||
pub enum VersionFileError {
|
pub enum VersionFileError {
|
||||||
#[error(
|
#[error(
|
||||||
"Meilisearch (v{}) failed to infer the version of the database.
|
"Meilisearch (v{}) failed to infer the version of the database.
|
||||||
To update Meilisearch please follow our guide on https://docs.meilisearch.com/learn/update_and_migration/updating.html.",
|
To update Meilisearch please follow our guide on https://www.meilisearch.com/docs/learn/update_and_migration/updating.",
|
||||||
env!("CARGO_PKG_VERSION").to_string()
|
env!("CARGO_PKG_VERSION").to_string()
|
||||||
)]
|
)]
|
||||||
MissingVersionFile,
|
MissingVersionFile,
|
||||||
@ -54,7 +54,7 @@ pub enum VersionFileError {
|
|||||||
MalformedVersionFile,
|
MalformedVersionFile,
|
||||||
#[error(
|
#[error(
|
||||||
"Your database version ({major}.{minor}.{patch}) is incompatible with your current engine version ({}).\n\
|
"Your database version ({major}.{minor}.{patch}) is incompatible with your current engine version ({}).\n\
|
||||||
To migrate data between Meilisearch versions, please follow our guide on https://docs.meilisearch.com/learn/update_and_migration/updating.html.",
|
To migrate data between Meilisearch versions, please follow our guide on https://www.meilisearch.com/docs/learn/update_and_migration/updating.",
|
||||||
env!("CARGO_PKG_VERSION").to_string()
|
env!("CARGO_PKG_VERSION").to_string()
|
||||||
)]
|
)]
|
||||||
VersionMismatch { major: String, minor: String, patch: String },
|
VersionMismatch { major: String, minor: String, patch: String },
|
||||||
|
@ -13,97 +13,97 @@ license.workspace = true
|
|||||||
default-run = "meilisearch"
|
default-run = "meilisearch"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-cors = "0.6.3"
|
actix-cors = "0.6.4"
|
||||||
actix-http = { version = "3.2.2", default-features = false, features = ["compress-brotli", "compress-gzip", "rustls"] }
|
actix-http = { version = "3.3.1", default-features = false, features = ["compress-brotli", "compress-gzip", "rustls"] }
|
||||||
actix-web = { version = "4.2.1", default-features = false, features = ["macros", "compress-brotli", "compress-gzip", "cookies", "rustls"] }
|
actix-web = { version = "4.3.1", default-features = false, features = ["macros", "compress-brotli", "compress-gzip", "cookies", "rustls"] }
|
||||||
actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true }
|
actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true }
|
||||||
anyhow = { version = "1.0.65", features = ["backtrace"] }
|
anyhow = { version = "1.0.70", features = ["backtrace"] }
|
||||||
async-stream = "0.3.3"
|
async-stream = "0.3.5"
|
||||||
async-trait = "0.1.57"
|
async-trait = "0.1.68"
|
||||||
bstr = "1.0.1"
|
bstr = "1.4.0"
|
||||||
byte-unit = { version = "4.0.14", default-features = false, features = ["std", "serde"] }
|
byte-unit = { version = "4.0.19", default-features = false, features = ["std", "serde"] }
|
||||||
bytes = "1.2.1"
|
bytes = "1.4.0"
|
||||||
clap = { version = "4.0.9", features = ["derive", "env"] }
|
clap = { version = "4.2.1", features = ["derive", "env"] }
|
||||||
crossbeam-channel = "0.5.6"
|
crossbeam-channel = "0.5.8"
|
||||||
deserr = "0.5.0"
|
deserr = "0.5.0"
|
||||||
dump = { path = "../dump" }
|
dump = { path = "../dump" }
|
||||||
either = "1.8.0"
|
either = "1.8.1"
|
||||||
env_logger = "0.9.1"
|
env_logger = "0.10.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.0.24"
|
flate2 = "1.0.25"
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
futures = "0.3.24"
|
futures = "0.3.28"
|
||||||
futures-util = "0.3.24"
|
futures-util = "0.3.28"
|
||||||
http = "0.2.8"
|
http = "0.2.9"
|
||||||
index-scheduler = { path = "../index-scheduler" }
|
index-scheduler = { path = "../index-scheduler" }
|
||||||
indexmap = { version = "1.9.1", features = ["serde-1"] }
|
indexmap = { version = "1.9.3", features = ["serde-1"] }
|
||||||
itertools = "0.10.5"
|
itertools = "0.10.5"
|
||||||
jsonwebtoken = "8.1.1"
|
jsonwebtoken = "8.3.0"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
mimalloc = { version = "0.1.29", default-features = false }
|
mimalloc = { version = "0.1.36", default-features = false }
|
||||||
mime = "0.3.16"
|
mime = "0.3.17"
|
||||||
num_cpus = "1.13.1"
|
num_cpus = "1.15.0"
|
||||||
obkv = "0.2.0"
|
obkv = "0.2.0"
|
||||||
once_cell = "1.15.0"
|
once_cell = "1.17.1"
|
||||||
parking_lot = "0.12.1"
|
parking_lot = "0.12.1"
|
||||||
permissive-json-pointer = { path = "../permissive-json-pointer" }
|
permissive-json-pointer = { path = "../permissive-json-pointer" }
|
||||||
pin-project-lite = "0.2.9"
|
pin-project-lite = "0.2.9"
|
||||||
platform-dirs = "0.3.0"
|
platform-dirs = "0.3.0"
|
||||||
prometheus = { version = "0.13.2", features = ["process"] }
|
prometheus = { version = "0.13.3", features = ["process"] }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
rayon = "1.5.3"
|
rayon = "1.7.0"
|
||||||
regex = "1.6.0"
|
regex = "1.7.3"
|
||||||
reqwest = { version = "0.11.12", features = ["rustls-tls", "json"], default-features = false }
|
reqwest = { version = "0.11.16", features = ["rustls-tls", "json"], default-features = false }
|
||||||
rustls = "0.20.6"
|
rustls = "0.20.8"
|
||||||
rustls-pemfile = "1.0.1"
|
rustls-pemfile = "1.0.2"
|
||||||
segment = { version = "0.2.1", optional = true }
|
segment = { version = "0.2.2", optional = true }
|
||||||
serde = { version = "1.0.145", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.85", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
sha2 = "0.10.6"
|
sha2 = "0.10.6"
|
||||||
siphasher = "0.3.10"
|
siphasher = "0.3.10"
|
||||||
slice-group-by = "0.3.0"
|
slice-group-by = "0.3.0"
|
||||||
static-files = { version = "0.2.3", optional = true }
|
static-files = { version = "0.2.3", optional = true }
|
||||||
sysinfo = "0.26.4"
|
sysinfo = "0.28.4"
|
||||||
tar = "0.4.38"
|
tar = "0.4.38"
|
||||||
tempfile = "3.3.0"
|
tempfile = "3.5.0"
|
||||||
thiserror = "1.0.37"
|
thiserror = "1.0.40"
|
||||||
time = { version = "0.3.15", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
tokio = { version = "1.24.2", features = ["full"] }
|
tokio = { version = "1.27.0", features = ["full"] }
|
||||||
tokio-stream = "0.1.10"
|
tokio-stream = "0.1.12"
|
||||||
toml = "0.5.9"
|
toml = "0.7.3"
|
||||||
uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
||||||
walkdir = "2.3.2"
|
walkdir = "2.3.3"
|
||||||
yaup = "0.2.0"
|
yaup = "0.2.1"
|
||||||
serde_urlencoded = "0.7.1"
|
serde_urlencoded = "0.7.1"
|
||||||
actix-utils = "3.0.1"
|
actix-utils = "3.0.1"
|
||||||
atty = "0.2.14"
|
atty = "0.2.14"
|
||||||
termcolor = "1.1.3"
|
termcolor = "1.2.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-rt = "2.7.0"
|
actix-rt = "2.8.0"
|
||||||
assert-json-diff = "2.0.2"
|
assert-json-diff = "2.0.2"
|
||||||
brotli = "3.3.4"
|
brotli = "3.3.4"
|
||||||
insta = "1.19.1"
|
insta = "1.29.0"
|
||||||
manifest-dir-macros = "0.1.16"
|
manifest-dir-macros = "0.1.16"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
meili-snap = {path = "../meili-snap"}
|
meili-snap = {path = "../meili-snap"}
|
||||||
temp-env = "0.3.1"
|
temp-env = "0.3.3"
|
||||||
urlencoding = "2.1.2"
|
urlencoding = "2.1.2"
|
||||||
yaup = "0.2.1"
|
yaup = "0.2.1"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
anyhow = { version = "1.0.65", optional = true }
|
anyhow = { version = "1.0.70", optional = true }
|
||||||
cargo_toml = { version = "0.14.0", optional = true }
|
cargo_toml = { version = "0.15.2", optional = true }
|
||||||
hex = { version = "0.4.3", optional = true }
|
hex = { version = "0.4.3", optional = true }
|
||||||
reqwest = { version = "0.11.12", features = ["blocking", "rustls-tls"], default-features = false, optional = true }
|
reqwest = { version = "0.11.16", features = ["blocking", "rustls-tls"], default-features = false, optional = true }
|
||||||
sha-1 = { version = "0.10.0", optional = true }
|
sha-1 = { version = "0.10.1", optional = true }
|
||||||
static-files = { version = "0.2.3", optional = true }
|
static-files = { version = "0.2.3", optional = true }
|
||||||
tempfile = { version = "3.3.0", optional = true }
|
tempfile = { version = "3.5.0", optional = true }
|
||||||
vergen = { version = "7.4.2", default-features = false, features = ["git"] }
|
vergen = { version = "7.5.1", default-features = false, features = ["git"] }
|
||||||
zip = { version = "0.6.2", optional = true }
|
zip = { version = "0.6.4", optional = true }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["analytics", "meilisearch-types/default", "mini-dashboard"]
|
default = ["analytics", "meilisearch-types/default", "mini-dashboard"]
|
||||||
@ -113,6 +113,7 @@ chinese = ["meilisearch-types/chinese"]
|
|||||||
hebrew = ["meilisearch-types/hebrew"]
|
hebrew = ["meilisearch-types/hebrew"]
|
||||||
japanese = ["meilisearch-types/japanese"]
|
japanese = ["meilisearch-types/japanese"]
|
||||||
thai = ["meilisearch-types/thai"]
|
thai = ["meilisearch-types/thai"]
|
||||||
|
greek = ["meilisearch-types/greek"]
|
||||||
|
|
||||||
[package.metadata.mini-dashboard]
|
[package.metadata.mini-dashboard]
|
||||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.7/build.zip"
|
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.7/build.zip"
|
||||||
|
@ -149,7 +149,7 @@ pub fn print_launch_resume(
|
|||||||
"
|
"
|
||||||
Thank you for using Meilisearch!
|
Thank you for using Meilisearch!
|
||||||
|
|
||||||
\nWe collect anonymized analytics to improve our product and your experience. To learn more, including how to turn off analytics, visit our dedicated documentation page: https://docs.meilisearch.com/learn/what_is_meilisearch/telemetry.html
|
\nWe collect anonymized analytics to improve our product and your experience. To learn more, including how to turn off analytics, visit our dedicated documentation page: https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry
|
||||||
|
|
||||||
Anonymous telemetry:\t\"Enabled\""
|
Anonymous telemetry:\t\"Enabled\""
|
||||||
);
|
);
|
||||||
|
@ -68,7 +68,7 @@ const DEFAULT_LOG_EVERY_N: usize = 100_000;
|
|||||||
// The actual size of the virtual address space is computed at startup to determine how many 2TiB indexes can be
|
// The actual size of the virtual address space is computed at startup to determine how many 2TiB indexes can be
|
||||||
// opened simultaneously.
|
// opened simultaneously.
|
||||||
pub const INDEX_SIZE: u64 = 2 * 1024 * 1024 * 1024 * 1024; // 2 TiB
|
pub const INDEX_SIZE: u64 = 2 * 1024 * 1024 * 1024 * 1024; // 2 TiB
|
||||||
pub const TASK_DB_SIZE: u64 = 10 * 1024 * 1024 * 1024; // 10 GiB
|
pub const TASK_DB_SIZE: u64 = 20 * 1024 * 1024 * 1024; // 20 GiB
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Copy, Serialize, Deserialize)]
|
#[derive(Debug, Default, Clone, Copy, Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "UPPERCASE")]
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
@ -323,10 +323,10 @@ impl Opt {
|
|||||||
.clone()
|
.clone()
|
||||||
.unwrap_or_else(|| PathBuf::from(DEFAULT_CONFIG_FILE_PATH));
|
.unwrap_or_else(|| PathBuf::from(DEFAULT_CONFIG_FILE_PATH));
|
||||||
|
|
||||||
match std::fs::read(&config_file_path) {
|
match std::fs::read_to_string(&config_file_path) {
|
||||||
Ok(config) => {
|
Ok(config) => {
|
||||||
// If the file is successfully read, we deserialize it with `toml`.
|
// If the file is successfully read, we deserialize it with `toml`.
|
||||||
let opt_from_config = toml::from_slice::<Opt>(&config)?;
|
let opt_from_config = toml::from_str::<Opt>(&config)?;
|
||||||
// Return an error if config file contains 'config_file_path'
|
// Return an error if config file contains 'config_file_path'
|
||||||
// Using that key in the config file doesn't make sense bc it creates a logical loop (config file referencing itself)
|
// Using that key in the config file doesn't make sense bc it creates a logical loop (config file referencing itself)
|
||||||
if opt_from_config.config_file_path.is_some() {
|
if opt_from_config.config_file_path.is_some() {
|
||||||
|
@ -60,7 +60,7 @@ async fn create_api_key_bad_uid() {
|
|||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"message": "Invalid value at `.uid`: invalid character: expected an optional prefix of `urn:uuid:` followed by [0-9a-zA-Z], found `o` at 2",
|
"message": "Invalid value at `.uid`: invalid character: expected an optional prefix of `urn:uuid:` followed by [0-9a-fA-F-], found `o` at 2",
|
||||||
"code": "invalid_api_key_uid",
|
"code": "invalid_api_key_uid",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_uid"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key_uid"
|
||||||
|
@ -1773,7 +1773,7 @@ async fn error_add_documents_payload_size() {
|
|||||||
"content": "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec metus erat, consequat in blandit venenatis, ultricies eu ipsum. Etiam luctus elit et mollis ultrices. Nam turpis risus, dictum non eros in, eleifend feugiat elit. Morbi non dolor pulvinar, sagittis mi sed, ultricies lorem. Nulla ultricies sem metus. Donec at suscipit quam, sed elementum mi. Suspendisse potenti. Fusce pharetra turpis tortor, sed eleifend odio dapibus ut. Nulla facilisi. Suspendisse elementum, dui eget aliquet dignissim, ex tellus aliquam nisl, at eleifend nisl metus tempus diam. Mauris fermentum sollicitudin efficitur. Donec dignissim est vitae elit finibus faucibus"
|
"content": "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec metus erat, consequat in blandit venenatis, ultricies eu ipsum. Etiam luctus elit et mollis ultrices. Nam turpis risus, dictum non eros in, eleifend feugiat elit. Morbi non dolor pulvinar, sagittis mi sed, ultricies lorem. Nulla ultricies sem metus. Donec at suscipit quam, sed elementum mi. Suspendisse potenti. Fusce pharetra turpis tortor, sed eleifend odio dapibus ut. Nulla facilisi. Suspendisse elementum, dui eget aliquet dignissim, ex tellus aliquam nisl, at eleifend nisl metus tempus diam. Mauris fermentum sollicitudin efficitur. Donec dignissim est vitae elit finibus faucibus"
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
let documents: Vec<_> = (0..16000).into_iter().map(|_| document.clone()).collect();
|
let documents: Vec<_> = (0..16000).map(|_| document.clone()).collect();
|
||||||
let documents = json!(documents);
|
let documents = json!(documents);
|
||||||
let (response, code) = index.add_documents(documents, None).await;
|
let (response, code) = index.add_documents(documents, None).await;
|
||||||
|
|
||||||
@ -1934,7 +1934,6 @@ async fn batch_several_documents_addition() {
|
|||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
let mut documents: Vec<_> = (0..150usize)
|
let mut documents: Vec<_> = (0..150usize)
|
||||||
.into_iter()
|
|
||||||
.map(|id| {
|
.map(|id| {
|
||||||
json!(
|
json!(
|
||||||
{
|
{
|
||||||
|
@ -547,7 +547,7 @@ async fn filter_invalid_syntax_object() {
|
|||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass",
|
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass",
|
||||||
"code": "invalid_search_filter",
|
"code": "invalid_search_filter",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||||
@ -572,7 +572,7 @@ async fn filter_invalid_syntax_array() {
|
|||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
let expected_response = json!({
|
let expected_response = json!({
|
||||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass",
|
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `title & Glass`.\n1:14 title & Glass",
|
||||||
"code": "invalid_search_filter",
|
"code": "invalid_search_filter",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
mod errors;
|
mod errors;
|
||||||
|
|
||||||
|
use byte_unit::{Byte, ByteUnit};
|
||||||
use meili_snap::insta::assert_json_snapshot;
|
use meili_snap::insta::assert_json_snapshot;
|
||||||
|
use meili_snap::{json_string, snapshot};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
use tempfile::TempDir;
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{default_settings, Server};
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn error_get_unexisting_task_status() {
|
async fn error_get_unexisting_task_status() {
|
||||||
@ -1000,3 +1003,117 @@ async fn test_summarized_dump_creation() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_web::test]
|
||||||
|
async fn test_task_queue_is_full() {
|
||||||
|
let dir = TempDir::new().unwrap();
|
||||||
|
let mut options = default_settings(dir.path());
|
||||||
|
options.max_task_db_size = Byte::from_unit(500.0, ByteUnit::B).unwrap();
|
||||||
|
|
||||||
|
let server = Server::new_with_options(options).await.unwrap();
|
||||||
|
|
||||||
|
// the first task should be enqueued without issue
|
||||||
|
let (result, code) = server.create_index(json!({ "uid": "doggo" })).await;
|
||||||
|
snapshot!(code, @"202 Accepted");
|
||||||
|
snapshot!(json_string!(result, { ".enqueuedAt" => "[date]" }), @r###"
|
||||||
|
{
|
||||||
|
"taskUid": 0,
|
||||||
|
"indexUid": "doggo",
|
||||||
|
"status": "enqueued",
|
||||||
|
"type": "indexCreation",
|
||||||
|
"enqueuedAt": "[date]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let (res, code) = server.create_index(json!({ "uid": "doggo" })).await;
|
||||||
|
if code == 422 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if res["taskUid"] == json!(null) {
|
||||||
|
panic!(
|
||||||
|
"Encountered the strange case:\n{}",
|
||||||
|
serde_json::to_string_pretty(&res).unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let (result, code) = server.create_index(json!({ "uid": "doggo" })).await;
|
||||||
|
snapshot!(code, @"422 Unprocessable Entity");
|
||||||
|
snapshot!(json_string!(result), @r###"
|
||||||
|
{
|
||||||
|
"message": "Meilisearch cannot receive write operations because the limit of the task database has been reached. Please delete tasks to continue performing write operations.",
|
||||||
|
"code": "no_space_left_on_device",
|
||||||
|
"type": "system",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#no_space_left_on_device"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// But we should still be able to register tasks deletion IF they delete something
|
||||||
|
let (result, code) = server.delete_tasks("uids=*").await;
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(result, { ".enqueuedAt" => "[date]", ".taskUid" => "uid" }), @r###"
|
||||||
|
{
|
||||||
|
"taskUid": "uid",
|
||||||
|
"indexUid": null,
|
||||||
|
"status": "enqueued",
|
||||||
|
"type": "taskDeletion",
|
||||||
|
"enqueuedAt": "[date]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let result = server.wait_task(result["taskUid"].as_u64().unwrap()).await;
|
||||||
|
snapshot!(json_string!(result["status"]), @r###""succeeded""###);
|
||||||
|
|
||||||
|
// Now we should be able to register tasks again
|
||||||
|
let (result, code) = server.create_index(json!({ "uid": "doggo" })).await;
|
||||||
|
snapshot!(code, @"202 Accepted");
|
||||||
|
snapshot!(json_string!(result, { ".enqueuedAt" => "[date]", ".taskUid" => "uid" }), @r###"
|
||||||
|
{
|
||||||
|
"taskUid": "uid",
|
||||||
|
"indexUid": "doggo",
|
||||||
|
"status": "enqueued",
|
||||||
|
"type": "indexCreation",
|
||||||
|
"enqueuedAt": "[date]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// we're going to fill up the queue once again
|
||||||
|
loop {
|
||||||
|
let (res, code) = server.delete_tasks("uids=0").await;
|
||||||
|
if code == 422 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if res["taskUid"] == json!(null) {
|
||||||
|
panic!(
|
||||||
|
"Encountered the strange case:\n{}",
|
||||||
|
serde_json::to_string_pretty(&res).unwrap()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// But we should NOT be able to register this task because it doesn't match any tasks
|
||||||
|
let (result, code) = server.delete_tasks("uids=0").await;
|
||||||
|
snapshot!(code, @"422 Unprocessable Entity");
|
||||||
|
snapshot!(json_string!(result), @r###"
|
||||||
|
{
|
||||||
|
"message": "Meilisearch cannot receive write operations because the limit of the task database has been reached. Please delete tasks to continue performing write operations.",
|
||||||
|
"code": "no_space_left_on_device",
|
||||||
|
"type": "system",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#no_space_left_on_device"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// The deletion still works
|
||||||
|
let (result, code) = server.delete_tasks("uids=*").await;
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(result, { ".enqueuedAt" => "[date]", ".taskUid" => "uid" }), @r###"
|
||||||
|
{
|
||||||
|
"taskUid": "uid",
|
||||||
|
"indexUid": null,
|
||||||
|
"status": "enqueued",
|
||||||
|
"type": "taskDeletion",
|
||||||
|
"enqueuedAt": "[date]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
@ -12,40 +12,40 @@ readme.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bimap = { version = "0.6.2", features = ["serde"] }
|
bimap = { version = "0.6.3", features = ["serde"] }
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
bstr = "1.0.1"
|
bstr = "1.4.0"
|
||||||
byteorder = "1.4.3"
|
byteorder = "1.4.3"
|
||||||
charabia = { version = "0.7.1", default-features = false }
|
charabia = { version = "0.7.2", default-features = false }
|
||||||
concat-arrays = "0.1.2"
|
concat-arrays = "0.1.2"
|
||||||
crossbeam-channel = "0.5.6"
|
crossbeam-channel = "0.5.8"
|
||||||
deserr = "0.5.0"
|
deserr = "0.5.0"
|
||||||
either = "1.8.0"
|
either = "1.8.1"
|
||||||
flatten-serde-json = { path = "../flatten-serde-json" }
|
flatten-serde-json = { path = "../flatten-serde-json" }
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
fxhash = "0.2.1"
|
fxhash = "0.2.1"
|
||||||
geoutils = "0.5.1"
|
geoutils = "0.5.1"
|
||||||
grenad = { version = "0.4.3", default-features = false, features = ["tempfile"] }
|
grenad = { version = "0.4.4", default-features = false, features = ["tempfile"] }
|
||||||
heed = { git = "https://github.com/meilisearch/heed", tag = "v0.12.5", default-features = false, features = ["lmdb", "sync-read-txn"] }
|
heed = { git = "https://github.com/meilisearch/heed", tag = "v0.12.5", default-features = false, features = ["lmdb", "sync-read-txn"] }
|
||||||
json-depth-checker = { path = "../json-depth-checker" }
|
json-depth-checker = { path = "../json-depth-checker" }
|
||||||
levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] }
|
levenshtein_automata = { version = "0.2.1", features = ["fst_automaton"] }
|
||||||
memmap2 = "0.5.7"
|
memmap2 = "0.5.10"
|
||||||
obkv = "0.2.0"
|
obkv = "0.2.0"
|
||||||
once_cell = "1.15.0"
|
once_cell = "1.17.1"
|
||||||
ordered-float = "3.2.0"
|
ordered-float = "3.6.0"
|
||||||
rayon = "1.5.3"
|
rayon = "1.7.0"
|
||||||
roaring = "0.10.1"
|
roaring = "0.10.1"
|
||||||
rstar = { version = "0.9.3", features = ["serde"] }
|
rstar = { version = "0.10.0", features = ["serde"] }
|
||||||
serde = { version = "1.0.145", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.85", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
slice-group-by = "0.3.0"
|
slice-group-by = "0.3.0"
|
||||||
smallstr = { version = "0.3.0", features = ["serde"] }
|
smallstr = { version = "0.3.0", features = ["serde"] }
|
||||||
smallvec = "1.10.0"
|
smallvec = "1.10.0"
|
||||||
smartstring = "1.0.1"
|
smartstring = "1.0.1"
|
||||||
tempfile = "3.3.0"
|
tempfile = "3.5.0"
|
||||||
thiserror = "1.0.37"
|
thiserror = "1.0.40"
|
||||||
time = { version = "0.3.15", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
uuid = { version = "1.1.2", features = ["v4"] }
|
uuid = { version = "1.3.1", features = ["v4"] }
|
||||||
|
|
||||||
filter-parser = { path = "../filter-parser" }
|
filter-parser = { path = "../filter-parser" }
|
||||||
|
|
||||||
@ -55,12 +55,12 @@ itertools = "0.10.5"
|
|||||||
# logging
|
# logging
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
logging_timer = "1.1.0"
|
logging_timer = "1.1.0"
|
||||||
csv = "1.1.6"
|
csv = "1.2.1"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
mimalloc = { version = "0.1.29", default-features = false }
|
mimalloc = { version = "0.1.29", default-features = false }
|
||||||
big_s = "1.0.2"
|
big_s = "1.0.2"
|
||||||
insta = "1.21.0"
|
insta = "1.29.0"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
md5 = "0.7.0"
|
md5 = "0.7.0"
|
||||||
rand = {version = "0.8.5", features = ["small_rng"] }
|
rand = {version = "0.8.5", features = ["small_rng"] }
|
||||||
@ -90,3 +90,6 @@ korean = ["charabia/korean"]
|
|||||||
|
|
||||||
# allow thai specialized tokenization
|
# allow thai specialized tokenization
|
||||||
thai = ["charabia/thai"]
|
thai = ["charabia/thai"]
|
||||||
|
|
||||||
|
# allow greek specialized tokenization
|
||||||
|
greek = ["charabia/greek"]
|
||||||
|
@ -82,6 +82,8 @@ pub mod db_name {
|
|||||||
pub const FIELD_ID_WORD_COUNT_DOCIDS: &str = "field-id-word-count-docids";
|
pub const FIELD_ID_WORD_COUNT_DOCIDS: &str = "field-id-word-count-docids";
|
||||||
pub const FACET_ID_F64_DOCIDS: &str = "facet-id-f64-docids";
|
pub const FACET_ID_F64_DOCIDS: &str = "facet-id-f64-docids";
|
||||||
pub const FACET_ID_EXISTS_DOCIDS: &str = "facet-id-exists-docids";
|
pub const FACET_ID_EXISTS_DOCIDS: &str = "facet-id-exists-docids";
|
||||||
|
pub const FACET_ID_IS_NULL_DOCIDS: &str = "facet-id-is-null-docids";
|
||||||
|
pub const FACET_ID_IS_EMPTY_DOCIDS: &str = "facet-id-is-empty-docids";
|
||||||
pub const FACET_ID_STRING_DOCIDS: &str = "facet-id-string-docids";
|
pub const FACET_ID_STRING_DOCIDS: &str = "facet-id-string-docids";
|
||||||
pub const FIELD_ID_DOCID_FACET_F64S: &str = "field-id-docid-facet-f64s";
|
pub const FIELD_ID_DOCID_FACET_F64S: &str = "field-id-docid-facet-f64s";
|
||||||
pub const FIELD_ID_DOCID_FACET_STRINGS: &str = "field-id-docid-facet-strings";
|
pub const FIELD_ID_DOCID_FACET_STRINGS: &str = "field-id-docid-facet-strings";
|
||||||
@ -136,6 +138,10 @@ pub struct Index {
|
|||||||
|
|
||||||
/// Maps the facet field id and the docids for which this field exists
|
/// Maps the facet field id and the docids for which this field exists
|
||||||
pub facet_id_exists_docids: Database<FieldIdCodec, CboRoaringBitmapCodec>,
|
pub facet_id_exists_docids: Database<FieldIdCodec, CboRoaringBitmapCodec>,
|
||||||
|
/// Maps the facet field id and the docids for which this field is set as null
|
||||||
|
pub facet_id_is_null_docids: Database<FieldIdCodec, CboRoaringBitmapCodec>,
|
||||||
|
/// Maps the facet field id and the docids for which this field is considered empty
|
||||||
|
pub facet_id_is_empty_docids: Database<FieldIdCodec, CboRoaringBitmapCodec>,
|
||||||
|
|
||||||
/// Maps the facet field id and ranges of numbers with the docids that corresponds to them.
|
/// Maps the facet field id and ranges of numbers with the docids that corresponds to them.
|
||||||
pub facet_id_f64_docids: Database<FacetGroupKeyCodec<OrderedF64Codec>, FacetGroupValueCodec>,
|
pub facet_id_f64_docids: Database<FacetGroupKeyCodec<OrderedF64Codec>, FacetGroupValueCodec>,
|
||||||
@ -184,6 +190,8 @@ impl Index {
|
|||||||
let facet_id_f64_docids = env.create_database(Some(FACET_ID_F64_DOCIDS))?;
|
let facet_id_f64_docids = env.create_database(Some(FACET_ID_F64_DOCIDS))?;
|
||||||
let facet_id_string_docids = env.create_database(Some(FACET_ID_STRING_DOCIDS))?;
|
let facet_id_string_docids = env.create_database(Some(FACET_ID_STRING_DOCIDS))?;
|
||||||
let facet_id_exists_docids = env.create_database(Some(FACET_ID_EXISTS_DOCIDS))?;
|
let facet_id_exists_docids = env.create_database(Some(FACET_ID_EXISTS_DOCIDS))?;
|
||||||
|
let facet_id_is_null_docids = env.create_database(Some(FACET_ID_IS_NULL_DOCIDS))?;
|
||||||
|
let facet_id_is_empty_docids = env.create_database(Some(FACET_ID_IS_EMPTY_DOCIDS))?;
|
||||||
|
|
||||||
let field_id_docid_facet_f64s = env.create_database(Some(FIELD_ID_DOCID_FACET_F64S))?;
|
let field_id_docid_facet_f64s = env.create_database(Some(FIELD_ID_DOCID_FACET_F64S))?;
|
||||||
let field_id_docid_facet_strings =
|
let field_id_docid_facet_strings =
|
||||||
@ -212,6 +220,8 @@ impl Index {
|
|||||||
facet_id_f64_docids,
|
facet_id_f64_docids,
|
||||||
facet_id_string_docids,
|
facet_id_string_docids,
|
||||||
facet_id_exists_docids,
|
facet_id_exists_docids,
|
||||||
|
facet_id_is_null_docids,
|
||||||
|
facet_id_is_empty_docids,
|
||||||
field_id_docid_facet_f64s,
|
field_id_docid_facet_f64s,
|
||||||
field_id_docid_facet_strings,
|
field_id_docid_facet_strings,
|
||||||
documents,
|
documents,
|
||||||
@ -844,6 +854,30 @@ impl Index {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Retrieve all the documents which contain this field id set as null
|
||||||
|
pub fn null_faceted_documents_ids(
|
||||||
|
&self,
|
||||||
|
rtxn: &RoTxn,
|
||||||
|
field_id: FieldId,
|
||||||
|
) -> heed::Result<RoaringBitmap> {
|
||||||
|
match self.facet_id_is_null_docids.get(rtxn, &BEU16::new(field_id))? {
|
||||||
|
Some(docids) => Ok(docids),
|
||||||
|
None => Ok(RoaringBitmap::new()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieve all the documents which contain this field id and that is considered empty
|
||||||
|
pub fn empty_faceted_documents_ids(
|
||||||
|
&self,
|
||||||
|
rtxn: &RoTxn,
|
||||||
|
field_id: FieldId,
|
||||||
|
) -> heed::Result<RoaringBitmap> {
|
||||||
|
match self.facet_id_is_empty_docids.get(rtxn, &BEU16::new(field_id))? {
|
||||||
|
Some(docids) => Ok(docids),
|
||||||
|
None => Ok(RoaringBitmap::new()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Retrieve all the documents which contain this field id
|
/// Retrieve all the documents which contain this field id
|
||||||
pub fn exists_faceted_documents_ids(
|
pub fn exists_faceted_documents_ids(
|
||||||
&self,
|
&self,
|
||||||
|
@ -211,6 +211,14 @@ impl<'a> Filter<'a> {
|
|||||||
Condition::Between { from, to } => {
|
Condition::Between { from, to } => {
|
||||||
(Included(from.parse_finite_float()?), Included(to.parse_finite_float()?))
|
(Included(from.parse_finite_float()?), Included(to.parse_finite_float()?))
|
||||||
}
|
}
|
||||||
|
Condition::Null => {
|
||||||
|
let is_null = index.null_faceted_documents_ids(rtxn, field_id)?;
|
||||||
|
return Ok(is_null);
|
||||||
|
}
|
||||||
|
Condition::Empty => {
|
||||||
|
let is_empty = index.empty_faceted_documents_ids(rtxn, field_id)?;
|
||||||
|
return Ok(is_empty);
|
||||||
|
}
|
||||||
Condition::Exists => {
|
Condition::Exists => {
|
||||||
let exist = index.exists_faceted_documents_ids(rtxn, field_id)?;
|
let exist = index.exists_faceted_documents_ids(rtxn, field_id)?;
|
||||||
return Ok(exist);
|
return Ok(exist);
|
||||||
|
@ -276,6 +276,16 @@ pub fn snap_facet_id_exists_docids(index: &Index) -> String {
|
|||||||
&format!("{facet_id:<3} {}", display_bitmap(&docids))
|
&format!("{facet_id:<3} {}", display_bitmap(&docids))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
pub fn snap_facet_id_is_null_docids(index: &Index) -> String {
|
||||||
|
make_db_snap_from_iter!(index, facet_id_is_null_docids, |(facet_id, docids)| {
|
||||||
|
&format!("{facet_id:<3} {}", display_bitmap(&docids))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
pub fn snap_facet_id_is_empty_docids(index: &Index) -> String {
|
||||||
|
make_db_snap_from_iter!(index, facet_id_is_empty_docids, |(facet_id, docids)| {
|
||||||
|
&format!("{facet_id:<3} {}", display_bitmap(&docids))
|
||||||
|
})
|
||||||
|
}
|
||||||
pub fn snap_facet_id_string_docids(index: &Index) -> String {
|
pub fn snap_facet_id_string_docids(index: &Index) -> String {
|
||||||
make_db_snap_from_iter!(index, facet_id_string_docids, |(
|
make_db_snap_from_iter!(index, facet_id_string_docids, |(
|
||||||
FacetGroupKey { field_id, level, left_bound },
|
FacetGroupKey { field_id, level, left_bound },
|
||||||
@ -503,6 +513,12 @@ macro_rules! full_snap_of_db {
|
|||||||
($index:ident, facet_id_exists_docids) => {{
|
($index:ident, facet_id_exists_docids) => {{
|
||||||
$crate::snapshot_tests::snap_facet_id_exists_docids(&$index)
|
$crate::snapshot_tests::snap_facet_id_exists_docids(&$index)
|
||||||
}};
|
}};
|
||||||
|
($index:ident, facet_id_is_null_docids) => {{
|
||||||
|
$crate::snapshot_tests::snap_facet_id_is_null_docids(&$index)
|
||||||
|
}};
|
||||||
|
($index:ident, facet_id_is_empty_docids) => {{
|
||||||
|
$crate::snapshot_tests::snap_facet_id_is_empty_docids(&$index)
|
||||||
|
}};
|
||||||
($index:ident, documents_ids) => {{
|
($index:ident, documents_ids) => {{
|
||||||
$crate::snapshot_tests::snap_documents_ids(&$index)
|
$crate::snapshot_tests::snap_documents_ids(&$index)
|
||||||
}};
|
}};
|
||||||
|
@ -36,6 +36,8 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> {
|
|||||||
facet_id_f64_docids,
|
facet_id_f64_docids,
|
||||||
facet_id_string_docids,
|
facet_id_string_docids,
|
||||||
facet_id_exists_docids,
|
facet_id_exists_docids,
|
||||||
|
facet_id_is_null_docids,
|
||||||
|
facet_id_is_empty_docids,
|
||||||
field_id_docid_facet_f64s,
|
field_id_docid_facet_f64s,
|
||||||
field_id_docid_facet_strings,
|
field_id_docid_facet_strings,
|
||||||
documents,
|
documents,
|
||||||
@ -90,6 +92,8 @@ impl<'t, 'u, 'i> ClearDocuments<'t, 'u, 'i> {
|
|||||||
script_language_docids.clear(self.wtxn)?;
|
script_language_docids.clear(self.wtxn)?;
|
||||||
facet_id_f64_docids.clear(self.wtxn)?;
|
facet_id_f64_docids.clear(self.wtxn)?;
|
||||||
facet_id_exists_docids.clear(self.wtxn)?;
|
facet_id_exists_docids.clear(self.wtxn)?;
|
||||||
|
facet_id_is_null_docids.clear(self.wtxn)?;
|
||||||
|
facet_id_is_empty_docids.clear(self.wtxn)?;
|
||||||
facet_id_string_docids.clear(self.wtxn)?;
|
facet_id_string_docids.clear(self.wtxn)?;
|
||||||
field_id_docid_facet_f64s.clear(self.wtxn)?;
|
field_id_docid_facet_f64s.clear(self.wtxn)?;
|
||||||
field_id_docid_facet_strings.clear(self.wtxn)?;
|
field_id_docid_facet_strings.clear(self.wtxn)?;
|
||||||
|
@ -247,6 +247,8 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
|
|||||||
field_id_docid_facet_strings: _,
|
field_id_docid_facet_strings: _,
|
||||||
script_language_docids,
|
script_language_docids,
|
||||||
facet_id_exists_docids,
|
facet_id_exists_docids,
|
||||||
|
facet_id_is_null_docids,
|
||||||
|
facet_id_is_empty_docids,
|
||||||
documents,
|
documents,
|
||||||
} = self.index;
|
} = self.index;
|
||||||
|
|
||||||
@ -445,12 +447,26 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
|
|||||||
&self.to_delete_docids,
|
&self.to_delete_docids,
|
||||||
)?;
|
)?;
|
||||||
// We delete the documents ids that are under the facet field id values.
|
// We delete the documents ids that are under the facet field id values.
|
||||||
remove_docids_from_facet_id_exists_docids(
|
remove_docids_from_facet_id_docids(
|
||||||
self.wtxn,
|
self.wtxn,
|
||||||
facet_id_exists_docids,
|
facet_id_exists_docids,
|
||||||
&self.to_delete_docids,
|
&self.to_delete_docids,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
// We delete the documents ids that are under the facet field id values.
|
||||||
|
remove_docids_from_facet_id_docids(
|
||||||
|
self.wtxn,
|
||||||
|
facet_id_is_null_docids,
|
||||||
|
&self.to_delete_docids,
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// We delete the documents ids that are under the facet field id values.
|
||||||
|
remove_docids_from_facet_id_docids(
|
||||||
|
self.wtxn,
|
||||||
|
facet_id_is_empty_docids,
|
||||||
|
&self.to_delete_docids,
|
||||||
|
)?;
|
||||||
|
|
||||||
self.index.put_soft_deleted_documents_ids(self.wtxn, &RoaringBitmap::new())?;
|
self.index.put_soft_deleted_documents_ids(self.wtxn, &RoaringBitmap::new())?;
|
||||||
|
|
||||||
Ok(DetailedDocumentDeletionResult {
|
Ok(DetailedDocumentDeletionResult {
|
||||||
@ -577,7 +593,7 @@ fn remove_docids_from_field_id_docid_facet_value(
|
|||||||
Ok(all_affected_facet_values)
|
Ok(all_affected_facet_values)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_docids_from_facet_id_exists_docids<'a, C>(
|
fn remove_docids_from_facet_id_docids<'a, C>(
|
||||||
wtxn: &'a mut heed::RwTxn,
|
wtxn: &'a mut heed::RwTxn,
|
||||||
db: &heed::Database<C, CboRoaringBitmapCodec>,
|
db: &heed::Database<C, CboRoaringBitmapCodec>,
|
||||||
to_remove: &RoaringBitmap,
|
to_remove: &RoaringBitmap,
|
||||||
|
@ -181,7 +181,7 @@ fn json_to_string<'a>(value: &'a Value, buffer: &'a mut String) -> Option<&'a st
|
|||||||
fn inner(value: &Value, output: &mut String) -> bool {
|
fn inner(value: &Value, output: &mut String) -> bool {
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
match value {
|
match value {
|
||||||
Value::Null => false,
|
Value::Null | Value::Object(_) => false,
|
||||||
Value::Bool(boolean) => write!(output, "{}", boolean).is_ok(),
|
Value::Bool(boolean) => write!(output, "{}", boolean).is_ok(),
|
||||||
Value::Number(number) => write!(output, "{}", number).is_ok(),
|
Value::Number(number) => write!(output, "{}", number).is_ok(),
|
||||||
Value::String(string) => write!(output, "{}", string).is_ok(),
|
Value::String(string) => write!(output, "{}", string).is_ok(),
|
||||||
@ -196,23 +196,6 @@ fn json_to_string<'a>(value: &'a Value, buffer: &'a mut String) -> Option<&'a st
|
|||||||
// check that at least one value was written
|
// check that at least one value was written
|
||||||
count != 0
|
count != 0
|
||||||
}
|
}
|
||||||
Value::Object(object) => {
|
|
||||||
let mut buffer = String::new();
|
|
||||||
let mut count = 0;
|
|
||||||
for (key, value) in object {
|
|
||||||
buffer.clear();
|
|
||||||
let _ = write!(&mut buffer, "{}: ", key);
|
|
||||||
if inner(value, &mut buffer) {
|
|
||||||
buffer.push_str(". ");
|
|
||||||
// We write the "key: value. " pair only when
|
|
||||||
// we are sure that the value can be written.
|
|
||||||
output.push_str(&buffer);
|
|
||||||
count += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// check that at least one value was written
|
|
||||||
count != 0
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ use std::mem::size_of;
|
|||||||
use heed::zerocopy::AsBytes;
|
use heed::zerocopy::AsBytes;
|
||||||
use heed::BytesEncode;
|
use heed::BytesEncode;
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
use serde_json::Value;
|
use serde_json::{from_slice, Value};
|
||||||
|
|
||||||
use super::helpers::{create_sorter, keep_first, sorter_into_reader, GrenadParameters};
|
use super::helpers::{create_sorter, keep_first, sorter_into_reader, GrenadParameters};
|
||||||
use crate::error::InternalError;
|
use crate::error::InternalError;
|
||||||
@ -15,6 +15,15 @@ use crate::facet::value_encoding::f64_into_bytes;
|
|||||||
use crate::update::index_documents::{create_writer, writer_into_reader};
|
use crate::update::index_documents::{create_writer, writer_into_reader};
|
||||||
use crate::{CboRoaringBitmapCodec, DocumentId, FieldId, Result, BEU32, MAX_FACET_VALUE_LENGTH};
|
use crate::{CboRoaringBitmapCodec, DocumentId, FieldId, Result, BEU32, MAX_FACET_VALUE_LENGTH};
|
||||||
|
|
||||||
|
/// The extracted facet values stored in grenad files by type.
|
||||||
|
pub struct ExtractedFacetValues {
|
||||||
|
pub docid_fid_facet_numbers_chunk: grenad::Reader<File>,
|
||||||
|
pub docid_fid_facet_strings_chunk: grenad::Reader<File>,
|
||||||
|
pub fid_facet_is_null_docids_chunk: grenad::Reader<File>,
|
||||||
|
pub fid_facet_is_empty_docids_chunk: grenad::Reader<File>,
|
||||||
|
pub fid_facet_exists_docids_chunk: grenad::Reader<File>,
|
||||||
|
}
|
||||||
|
|
||||||
/// Extracts the facet values of each faceted field of each document.
|
/// Extracts the facet values of each faceted field of each document.
|
||||||
///
|
///
|
||||||
/// Returns the generated grenad reader containing the docid the fid and the orginal value as key
|
/// Returns the generated grenad reader containing the docid the fid and the orginal value as key
|
||||||
@ -24,7 +33,7 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
|
|||||||
obkv_documents: grenad::Reader<R>,
|
obkv_documents: grenad::Reader<R>,
|
||||||
indexer: GrenadParameters,
|
indexer: GrenadParameters,
|
||||||
faceted_fields: &HashSet<FieldId>,
|
faceted_fields: &HashSet<FieldId>,
|
||||||
) -> Result<(grenad::Reader<File>, grenad::Reader<File>, grenad::Reader<File>)> {
|
) -> Result<ExtractedFacetValues> {
|
||||||
let max_memory = indexer.max_memory_by_thread();
|
let max_memory = indexer.max_memory_by_thread();
|
||||||
|
|
||||||
let mut fid_docid_facet_numbers_sorter = create_sorter(
|
let mut fid_docid_facet_numbers_sorter = create_sorter(
|
||||||
@ -46,6 +55,8 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
|
|||||||
);
|
);
|
||||||
|
|
||||||
let mut facet_exists_docids = BTreeMap::<FieldId, RoaringBitmap>::new();
|
let mut facet_exists_docids = BTreeMap::<FieldId, RoaringBitmap>::new();
|
||||||
|
let mut facet_is_null_docids = BTreeMap::<FieldId, RoaringBitmap>::new();
|
||||||
|
let mut facet_is_empty_docids = BTreeMap::<FieldId, RoaringBitmap>::new();
|
||||||
|
|
||||||
let mut key_buffer = Vec::new();
|
let mut key_buffer = Vec::new();
|
||||||
let mut cursor = obkv_documents.into_cursor()?;
|
let mut cursor = obkv_documents.into_cursor()?;
|
||||||
@ -69,11 +80,16 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
|
|||||||
// For the other extraction tasks, prefix the key with the field_id and the document_id
|
// For the other extraction tasks, prefix the key with the field_id and the document_id
|
||||||
key_buffer.extend_from_slice(docid_bytes);
|
key_buffer.extend_from_slice(docid_bytes);
|
||||||
|
|
||||||
let value =
|
let value = from_slice(field_bytes).map_err(InternalError::SerdeJson)?;
|
||||||
serde_json::from_slice(field_bytes).map_err(InternalError::SerdeJson)?;
|
|
||||||
|
|
||||||
let (numbers, strings) = extract_facet_values(&value);
|
|
||||||
|
|
||||||
|
match extract_facet_values(&value) {
|
||||||
|
FilterableValues::Null => {
|
||||||
|
facet_is_null_docids.entry(field_id).or_default().insert(document);
|
||||||
|
}
|
||||||
|
FilterableValues::Empty => {
|
||||||
|
facet_is_empty_docids.entry(field_id).or_default().insert(document);
|
||||||
|
}
|
||||||
|
FilterableValues::Values { numbers, strings } => {
|
||||||
// insert facet numbers in sorter
|
// insert facet numbers in sorter
|
||||||
for number in numbers {
|
for number in numbers {
|
||||||
key_buffer.truncate(size_of::<FieldId>() + size_of::<DocumentId>());
|
key_buffer.truncate(size_of::<FieldId>() + size_of::<DocumentId>());
|
||||||
@ -81,21 +97,27 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
|
|||||||
key_buffer.extend_from_slice(&value_bytes);
|
key_buffer.extend_from_slice(&value_bytes);
|
||||||
key_buffer.extend_from_slice(&number.to_be_bytes());
|
key_buffer.extend_from_slice(&number.to_be_bytes());
|
||||||
|
|
||||||
fid_docid_facet_numbers_sorter.insert(&key_buffer, ().as_bytes())?;
|
fid_docid_facet_numbers_sorter
|
||||||
|
.insert(&key_buffer, ().as_bytes())?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// insert normalized and original facet string in sorter
|
// insert normalized and original facet string in sorter
|
||||||
for (normalized, original) in strings.into_iter().filter(|(n, _)| !n.is_empty()) {
|
for (normalized, original) in
|
||||||
let normalised_truncated_value: String = normalized
|
strings.into_iter().filter(|(n, _)| !n.is_empty())
|
||||||
|
{
|
||||||
|
let normalized_truncated_value: String = normalized
|
||||||
.char_indices()
|
.char_indices()
|
||||||
.take_while(|(idx, _)| idx + 4 < MAX_FACET_VALUE_LENGTH)
|
.take_while(|(idx, _)| idx + 4 < MAX_FACET_VALUE_LENGTH)
|
||||||
.map(|(_, c)| c)
|
.map(|(_, c)| c)
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
key_buffer.truncate(size_of::<FieldId>() + size_of::<DocumentId>());
|
key_buffer.truncate(size_of::<FieldId>() + size_of::<DocumentId>());
|
||||||
key_buffer.extend_from_slice(normalised_truncated_value.as_bytes());
|
key_buffer.extend_from_slice(normalized_truncated_value.as_bytes());
|
||||||
fid_docid_facet_strings_sorter.insert(&key_buffer, original.as_bytes())?;
|
fid_docid_facet_strings_sorter
|
||||||
|
.insert(&key_buffer, original.as_bytes())?;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -112,14 +134,48 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
|
|||||||
}
|
}
|
||||||
let facet_exists_docids_reader = writer_into_reader(facet_exists_docids_writer)?;
|
let facet_exists_docids_reader = writer_into_reader(facet_exists_docids_writer)?;
|
||||||
|
|
||||||
Ok((
|
let mut facet_is_null_docids_writer = create_writer(
|
||||||
sorter_into_reader(fid_docid_facet_numbers_sorter, indexer)?,
|
indexer.chunk_compression_type,
|
||||||
sorter_into_reader(fid_docid_facet_strings_sorter, indexer)?,
|
indexer.chunk_compression_level,
|
||||||
facet_exists_docids_reader,
|
tempfile::tempfile()?,
|
||||||
))
|
);
|
||||||
|
for (fid, bitmap) in facet_is_null_docids.into_iter() {
|
||||||
|
let bitmap_bytes = CboRoaringBitmapCodec::bytes_encode(&bitmap).unwrap();
|
||||||
|
facet_is_null_docids_writer.insert(fid.to_be_bytes(), &bitmap_bytes)?;
|
||||||
|
}
|
||||||
|
let facet_is_null_docids_reader = writer_into_reader(facet_is_null_docids_writer)?;
|
||||||
|
|
||||||
|
let mut facet_is_empty_docids_writer = create_writer(
|
||||||
|
indexer.chunk_compression_type,
|
||||||
|
indexer.chunk_compression_level,
|
||||||
|
tempfile::tempfile()?,
|
||||||
|
);
|
||||||
|
for (fid, bitmap) in facet_is_empty_docids.into_iter() {
|
||||||
|
let bitmap_bytes = CboRoaringBitmapCodec::bytes_encode(&bitmap).unwrap();
|
||||||
|
facet_is_empty_docids_writer.insert(fid.to_be_bytes(), &bitmap_bytes)?;
|
||||||
|
}
|
||||||
|
let facet_is_empty_docids_reader = writer_into_reader(facet_is_empty_docids_writer)?;
|
||||||
|
|
||||||
|
Ok(ExtractedFacetValues {
|
||||||
|
docid_fid_facet_numbers_chunk: sorter_into_reader(fid_docid_facet_numbers_sorter, indexer)?,
|
||||||
|
docid_fid_facet_strings_chunk: sorter_into_reader(fid_docid_facet_strings_sorter, indexer)?,
|
||||||
|
fid_facet_is_null_docids_chunk: facet_is_null_docids_reader,
|
||||||
|
fid_facet_is_empty_docids_chunk: facet_is_empty_docids_reader,
|
||||||
|
fid_facet_exists_docids_chunk: facet_exists_docids_reader,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_facet_values(value: &Value) -> (Vec<f64>, Vec<(String, String)>) {
|
/// Represent what a document field contains.
|
||||||
|
enum FilterableValues {
|
||||||
|
/// Corresponds to the JSON `null` value.
|
||||||
|
Null,
|
||||||
|
/// Corresponds to either, an empty string `""`, an empty array `[]`, or an empty object `{}`.
|
||||||
|
Empty,
|
||||||
|
/// Represents all the numbers and strings values found in this document field.
|
||||||
|
Values { numbers: Vec<f64>, strings: Vec<(String, String)> },
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_facet_values(value: &Value) -> FilterableValues {
|
||||||
fn inner_extract_facet_values(
|
fn inner_extract_facet_values(
|
||||||
value: &Value,
|
value: &Value,
|
||||||
can_recurse: bool,
|
can_recurse: bool,
|
||||||
@ -149,9 +205,16 @@ fn extract_facet_values(value: &Value) -> (Vec<f64>, Vec<(String, String)>) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut facet_number_values = Vec::new();
|
match value {
|
||||||
let mut facet_string_values = Vec::new();
|
Value::Null => FilterableValues::Null,
|
||||||
inner_extract_facet_values(value, true, &mut facet_number_values, &mut facet_string_values);
|
Value::String(s) if s.is_empty() => FilterableValues::Empty,
|
||||||
|
Value::Array(a) if a.is_empty() => FilterableValues::Empty,
|
||||||
(facet_number_values, facet_string_values)
|
Value::Object(o) if o.is_empty() => FilterableValues::Empty,
|
||||||
|
otherwise => {
|
||||||
|
let mut numbers = Vec::new();
|
||||||
|
let mut strings = Vec::new();
|
||||||
|
inner_extract_facet_values(otherwise, true, &mut numbers, &mut strings);
|
||||||
|
FilterableValues::Values { numbers, strings }
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,7 @@ use rayon::prelude::*;
|
|||||||
use self::extract_docid_word_positions::extract_docid_word_positions;
|
use self::extract_docid_word_positions::extract_docid_word_positions;
|
||||||
use self::extract_facet_number_docids::extract_facet_number_docids;
|
use self::extract_facet_number_docids::extract_facet_number_docids;
|
||||||
use self::extract_facet_string_docids::extract_facet_string_docids;
|
use self::extract_facet_string_docids::extract_facet_string_docids;
|
||||||
use self::extract_fid_docid_facet_values::extract_fid_docid_facet_values;
|
use self::extract_fid_docid_facet_values::{extract_fid_docid_facet_values, ExtractedFacetValues};
|
||||||
use self::extract_fid_word_count_docids::extract_fid_word_count_docids;
|
use self::extract_fid_word_count_docids::extract_fid_word_count_docids;
|
||||||
use self::extract_geo_points::extract_geo_points;
|
use self::extract_geo_points::extract_geo_points;
|
||||||
use self::extract_word_docids::extract_word_docids;
|
use self::extract_word_docids::extract_word_docids;
|
||||||
@ -57,7 +57,8 @@ pub(crate) fn data_from_obkv_documents(
|
|||||||
.collect::<Result<()>>()?;
|
.collect::<Result<()>>()?;
|
||||||
|
|
||||||
#[allow(clippy::type_complexity)]
|
#[allow(clippy::type_complexity)]
|
||||||
let result: Result<(Vec<_>, (Vec<_>, (Vec<_>, Vec<_>)))> = flattened_obkv_chunks
|
let result: Result<(Vec<_>, (Vec<_>, (Vec<_>, (Vec<_>, (Vec<_>, Vec<_>)))))> =
|
||||||
|
flattened_obkv_chunks
|
||||||
.par_bridge()
|
.par_bridge()
|
||||||
.map(|flattened_obkv_chunks| {
|
.map(|flattened_obkv_chunks| {
|
||||||
send_and_extract_flattened_documents_data(
|
send_and_extract_flattened_documents_data(
|
||||||
@ -78,7 +79,13 @@ pub(crate) fn data_from_obkv_documents(
|
|||||||
docid_word_positions_chunks,
|
docid_word_positions_chunks,
|
||||||
(
|
(
|
||||||
docid_fid_facet_numbers_chunks,
|
docid_fid_facet_numbers_chunks,
|
||||||
(docid_fid_facet_strings_chunks, facet_exists_docids_chunks),
|
(
|
||||||
|
docid_fid_facet_strings_chunks,
|
||||||
|
(
|
||||||
|
facet_is_null_docids_chunks,
|
||||||
|
(facet_is_empty_docids_chunks, facet_exists_docids_chunks),
|
||||||
|
),
|
||||||
|
),
|
||||||
),
|
),
|
||||||
) = result?;
|
) = result?;
|
||||||
|
|
||||||
@ -98,6 +105,38 @@ pub(crate) fn data_from_obkv_documents(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// merge facet_is_null_docids and send them as a typed chunk
|
||||||
|
{
|
||||||
|
let lmdb_writer_sx = lmdb_writer_sx.clone();
|
||||||
|
rayon::spawn(move || {
|
||||||
|
debug!("merge {} database", "facet-id-is-null-docids");
|
||||||
|
match facet_is_null_docids_chunks.merge(merge_cbo_roaring_bitmaps, &indexer) {
|
||||||
|
Ok(reader) => {
|
||||||
|
let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsNullDocids(reader)));
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let _ = lmdb_writer_sx.send(Err(e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// merge facet_is_empty_docids and send them as a typed chunk
|
||||||
|
{
|
||||||
|
let lmdb_writer_sx = lmdb_writer_sx.clone();
|
||||||
|
rayon::spawn(move || {
|
||||||
|
debug!("merge {} database", "facet-id-is-empty-docids");
|
||||||
|
match facet_is_empty_docids_chunks.merge(merge_cbo_roaring_bitmaps, &indexer) {
|
||||||
|
Ok(reader) => {
|
||||||
|
let _ = lmdb_writer_sx.send(Ok(TypedChunk::FieldIdFacetIsEmptyDocids(reader)));
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
let _ = lmdb_writer_sx.send(Err(e));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
spawn_extraction_task::<_, _, Vec<grenad::Reader<File>>>(
|
spawn_extraction_task::<_, _, Vec<grenad::Reader<File>>>(
|
||||||
docid_word_positions_chunks.clone(),
|
docid_word_positions_chunks.clone(),
|
||||||
indexer,
|
indexer,
|
||||||
@ -246,7 +285,10 @@ fn send_and_extract_flattened_documents_data(
|
|||||||
grenad::Reader<CursorClonableMmap>,
|
grenad::Reader<CursorClonableMmap>,
|
||||||
(
|
(
|
||||||
grenad::Reader<CursorClonableMmap>,
|
grenad::Reader<CursorClonableMmap>,
|
||||||
(grenad::Reader<CursorClonableMmap>, grenad::Reader<File>),
|
(
|
||||||
|
grenad::Reader<CursorClonableMmap>,
|
||||||
|
(grenad::Reader<File>, (grenad::Reader<File>, grenad::Reader<File>)),
|
||||||
|
),
|
||||||
),
|
),
|
||||||
)> {
|
)> {
|
||||||
let flattened_documents_chunk =
|
let flattened_documents_chunk =
|
||||||
@ -292,11 +334,13 @@ fn send_and_extract_flattened_documents_data(
|
|||||||
Ok(docid_word_positions_chunk)
|
Ok(docid_word_positions_chunk)
|
||||||
},
|
},
|
||||||
|| {
|
|| {
|
||||||
let (
|
let ExtractedFacetValues {
|
||||||
docid_fid_facet_numbers_chunk,
|
docid_fid_facet_numbers_chunk,
|
||||||
docid_fid_facet_strings_chunk,
|
docid_fid_facet_strings_chunk,
|
||||||
|
fid_facet_is_null_docids_chunk,
|
||||||
|
fid_facet_is_empty_docids_chunk,
|
||||||
fid_facet_exists_docids_chunk,
|
fid_facet_exists_docids_chunk,
|
||||||
) = extract_fid_docid_facet_values(
|
} = extract_fid_docid_facet_values(
|
||||||
flattened_documents_chunk.clone(),
|
flattened_documents_chunk.clone(),
|
||||||
indexer,
|
indexer,
|
||||||
faceted_fields,
|
faceted_fields,
|
||||||
@ -320,7 +364,13 @@ fn send_and_extract_flattened_documents_data(
|
|||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
docid_fid_facet_numbers_chunk,
|
docid_fid_facet_numbers_chunk,
|
||||||
(docid_fid_facet_strings_chunk, fid_facet_exists_docids_chunk),
|
(
|
||||||
|
docid_fid_facet_strings_chunk,
|
||||||
|
(
|
||||||
|
fid_facet_is_null_docids_chunk,
|
||||||
|
(fid_facet_is_empty_docids_chunk, fid_facet_exists_docids_chunk),
|
||||||
|
),
|
||||||
|
),
|
||||||
))
|
))
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
@ -1779,6 +1779,187 @@ mod tests {
|
|||||||
check_ok(&index);
|
check_ok(&index);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn index_documents_check_is_null_database() {
|
||||||
|
let content = || {
|
||||||
|
documents!([
|
||||||
|
{
|
||||||
|
"id": 0,
|
||||||
|
"colour": null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"colour": [null], // must not be returned
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 6,
|
||||||
|
"colour": {
|
||||||
|
"green": null
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 7,
|
||||||
|
"colour": {
|
||||||
|
"green": {
|
||||||
|
"blue": null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 8,
|
||||||
|
"colour": 0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 9,
|
||||||
|
"colour": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 10,
|
||||||
|
"colour": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 12,
|
||||||
|
"colour": [1]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 13
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 14,
|
||||||
|
"colour": {
|
||||||
|
"green": 1
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 15,
|
||||||
|
"colour": {
|
||||||
|
"green": {
|
||||||
|
"blue": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
])
|
||||||
|
};
|
||||||
|
|
||||||
|
let check_ok = |index: &Index| {
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let facets = index.faceted_fields(&rtxn).unwrap();
|
||||||
|
assert_eq!(facets, hashset!(S("colour"), S("colour.green"), S("colour.green.blue")));
|
||||||
|
|
||||||
|
let colour_id = index.fields_ids_map(&rtxn).unwrap().id("colour").unwrap();
|
||||||
|
let colour_green_id = index.fields_ids_map(&rtxn).unwrap().id("colour.green").unwrap();
|
||||||
|
let colour_blue_id =
|
||||||
|
index.fields_ids_map(&rtxn).unwrap().id("colour.green.blue").unwrap();
|
||||||
|
|
||||||
|
let bitmap_null_colour =
|
||||||
|
index.facet_id_is_null_docids.get(&rtxn, &BEU16::new(colour_id)).unwrap().unwrap();
|
||||||
|
assert_eq!(bitmap_null_colour.into_iter().collect::<Vec<_>>(), vec![0]);
|
||||||
|
|
||||||
|
let bitmap_colour_green = index
|
||||||
|
.facet_id_is_null_docids
|
||||||
|
.get(&rtxn, &BEU16::new(colour_green_id))
|
||||||
|
.unwrap()
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(bitmap_colour_green.into_iter().collect::<Vec<_>>(), vec![2]);
|
||||||
|
|
||||||
|
let bitmap_colour_blue = index
|
||||||
|
.facet_id_is_null_docids
|
||||||
|
.get(&rtxn, &BEU16::new(colour_blue_id))
|
||||||
|
.unwrap()
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(bitmap_colour_blue.into_iter().collect::<Vec<_>>(), vec![3]);
|
||||||
|
};
|
||||||
|
|
||||||
|
let faceted_fields = hashset!(S("colour"));
|
||||||
|
|
||||||
|
let index = TempIndex::new();
|
||||||
|
index.add_documents(content()).unwrap();
|
||||||
|
index
|
||||||
|
.update_settings(|settings| {
|
||||||
|
settings.set_filterable_fields(faceted_fields.clone());
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
check_ok(&index);
|
||||||
|
|
||||||
|
let index = TempIndex::new();
|
||||||
|
index
|
||||||
|
.update_settings(|settings| {
|
||||||
|
settings.set_filterable_fields(faceted_fields.clone());
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
index.add_documents(content()).unwrap();
|
||||||
|
check_ok(&index);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn index_documents_check_is_empty_database() {
|
||||||
|
let content = || {
|
||||||
|
documents!([
|
||||||
|
{"id": 0, "tags": null },
|
||||||
|
{"id": 1, "tags": [null] },
|
||||||
|
{"id": 2, "tags": [] },
|
||||||
|
{"id": 3, "tags": ["hello","world"] },
|
||||||
|
{"id": 4, "tags": [""] },
|
||||||
|
{"id": 5 },
|
||||||
|
{"id": 6, "tags": {} },
|
||||||
|
{"id": 7, "tags": {"green": "cool"} },
|
||||||
|
{"id": 8, "tags": {"green": ""} },
|
||||||
|
{"id": 9, "tags": "" },
|
||||||
|
{"id": 10, "tags": { "green": null } },
|
||||||
|
{"id": 11, "tags": { "green": { "blue": null } } },
|
||||||
|
{"id": 12, "tags": { "green": { "blue": [] } } }
|
||||||
|
])
|
||||||
|
};
|
||||||
|
|
||||||
|
let check_ok = |index: &Index| {
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let facets = index.faceted_fields(&rtxn).unwrap();
|
||||||
|
assert_eq!(facets, hashset!(S("tags"), S("tags.green"), S("tags.green.blue")));
|
||||||
|
|
||||||
|
let tags_id = index.fields_ids_map(&rtxn).unwrap().id("tags").unwrap();
|
||||||
|
let tags_green_id = index.fields_ids_map(&rtxn).unwrap().id("tags.green").unwrap();
|
||||||
|
let tags_blue_id = index.fields_ids_map(&rtxn).unwrap().id("tags.green.blue").unwrap();
|
||||||
|
|
||||||
|
let bitmap_empty_tags =
|
||||||
|
index.facet_id_is_empty_docids.get(&rtxn, &BEU16::new(tags_id)).unwrap().unwrap();
|
||||||
|
assert_eq!(bitmap_empty_tags.into_iter().collect::<Vec<_>>(), vec![2, 6, 9]);
|
||||||
|
|
||||||
|
let bitmap_tags_green = index
|
||||||
|
.facet_id_is_empty_docids
|
||||||
|
.get(&rtxn, &BEU16::new(tags_green_id))
|
||||||
|
.unwrap()
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(bitmap_tags_green.into_iter().collect::<Vec<_>>(), vec![8]);
|
||||||
|
|
||||||
|
let bitmap_tags_blue = index
|
||||||
|
.facet_id_is_empty_docids
|
||||||
|
.get(&rtxn, &BEU16::new(tags_blue_id))
|
||||||
|
.unwrap()
|
||||||
|
.unwrap();
|
||||||
|
assert_eq!(bitmap_tags_blue.into_iter().collect::<Vec<_>>(), vec![12]);
|
||||||
|
};
|
||||||
|
|
||||||
|
let faceted_fields = hashset!(S("tags"));
|
||||||
|
|
||||||
|
let index = TempIndex::new();
|
||||||
|
index.add_documents(content()).unwrap();
|
||||||
|
index
|
||||||
|
.update_settings(|settings| {
|
||||||
|
settings.set_filterable_fields(faceted_fields.clone());
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
check_ok(&index);
|
||||||
|
|
||||||
|
let index = TempIndex::new();
|
||||||
|
index
|
||||||
|
.update_settings(|settings| {
|
||||||
|
settings.set_filterable_fields(faceted_fields.clone());
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
index.add_documents(content()).unwrap();
|
||||||
|
check_ok(&index);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn primary_key_must_not_contain_floats() {
|
fn primary_key_must_not_contain_floats() {
|
||||||
let index = TempIndex::new_with_map_size(4096 * 100);
|
let index = TempIndex::new_with_map_size(4096 * 100);
|
||||||
|
@ -40,6 +40,8 @@ pub(crate) enum TypedChunk {
|
|||||||
FieldIdFacetStringDocids(grenad::Reader<File>),
|
FieldIdFacetStringDocids(grenad::Reader<File>),
|
||||||
FieldIdFacetNumberDocids(grenad::Reader<File>),
|
FieldIdFacetNumberDocids(grenad::Reader<File>),
|
||||||
FieldIdFacetExistsDocids(grenad::Reader<File>),
|
FieldIdFacetExistsDocids(grenad::Reader<File>),
|
||||||
|
FieldIdFacetIsNullDocids(grenad::Reader<File>),
|
||||||
|
FieldIdFacetIsEmptyDocids(grenad::Reader<File>),
|
||||||
GeoPoints(grenad::Reader<File>),
|
GeoPoints(grenad::Reader<File>),
|
||||||
ScriptLanguageDocids(HashMap<(Script, Language), RoaringBitmap>),
|
ScriptLanguageDocids(HashMap<(Script, Language), RoaringBitmap>),
|
||||||
}
|
}
|
||||||
@ -173,6 +175,28 @@ pub(crate) fn write_typed_chunk_into_index(
|
|||||||
)?;
|
)?;
|
||||||
is_merged_database = true;
|
is_merged_database = true;
|
||||||
}
|
}
|
||||||
|
TypedChunk::FieldIdFacetIsNullDocids(facet_id_is_null_docids) => {
|
||||||
|
append_entries_into_database(
|
||||||
|
facet_id_is_null_docids,
|
||||||
|
&index.facet_id_is_null_docids,
|
||||||
|
wtxn,
|
||||||
|
index_is_empty,
|
||||||
|
|value, _buffer| Ok(value),
|
||||||
|
merge_cbo_roaring_bitmaps,
|
||||||
|
)?;
|
||||||
|
is_merged_database = true;
|
||||||
|
}
|
||||||
|
TypedChunk::FieldIdFacetIsEmptyDocids(facet_id_is_empty_docids) => {
|
||||||
|
append_entries_into_database(
|
||||||
|
facet_id_is_empty_docids,
|
||||||
|
&index.facet_id_is_empty_docids,
|
||||||
|
wtxn,
|
||||||
|
index_is_empty,
|
||||||
|
|value, _buffer| Ok(value),
|
||||||
|
merge_cbo_roaring_bitmaps,
|
||||||
|
)?;
|
||||||
|
is_merged_database = true;
|
||||||
|
}
|
||||||
TypedChunk::WordPairProximityDocids(word_pair_proximity_docids_iter) => {
|
TypedChunk::WordPairProximityDocids(word_pair_proximity_docids_iter) => {
|
||||||
append_entries_into_database(
|
append_entries_into_database(
|
||||||
word_pair_proximity_docids_iter,
|
word_pair_proximity_docids_iter,
|
||||||
|
@ -82,10 +82,23 @@ test_filter!(
|
|||||||
vec![Left(vec!["tag=red", "tag=green"]), Left(vec!["asc_desc_rank<3", "asc_desc_rank<1"])]
|
vec![Left(vec!["tag=red", "tag=green"]), Left(vec!["asc_desc_rank<3", "asc_desc_rank<1"])]
|
||||||
);
|
);
|
||||||
test_filter!(exists_filter_1, vec![Right("opt1 EXISTS")]);
|
test_filter!(exists_filter_1, vec![Right("opt1 EXISTS")]);
|
||||||
|
test_filter!(exists_filter_2, vec![Right("opt1.opt2 EXISTS")]);
|
||||||
test_filter!(exists_filter_1_not, vec![Right("opt1 NOT EXISTS")]);
|
test_filter!(exists_filter_1_not, vec![Right("opt1 NOT EXISTS")]);
|
||||||
test_filter!(exists_filter_1_not_alt, vec![Right("NOT opt1 EXISTS")]);
|
test_filter!(exists_filter_1_not_alt, vec![Right("NOT opt1 EXISTS")]);
|
||||||
test_filter!(exists_filter_1_double_not, vec![Right("NOT opt1 NOT EXISTS")]);
|
test_filter!(exists_filter_1_double_not, vec![Right("NOT opt1 NOT EXISTS")]);
|
||||||
|
|
||||||
|
test_filter!(null_filter_1, vec![Right("opt1 IS NULL")]);
|
||||||
|
test_filter!(null_filter_2, vec![Right("opt1.opt2 IS NULL")]);
|
||||||
|
test_filter!(null_filter_1_not, vec![Right("opt1 IS NOT NULL")]);
|
||||||
|
test_filter!(null_filter_1_not_alt, vec![Right("NOT opt1 IS NULL")]);
|
||||||
|
test_filter!(null_filter_1_double_not, vec![Right("NOT opt1 IS NOT NULL")]);
|
||||||
|
|
||||||
|
test_filter!(empty_filter_1, vec![Right("opt1 IS EMPTY")]);
|
||||||
|
test_filter!(empty_filter_2, vec![Right("opt1.opt2 IS EMPTY")]);
|
||||||
|
test_filter!(empty_filter_1_not, vec![Right("opt1 IS NOT EMPTY")]);
|
||||||
|
test_filter!(empty_filter_1_not_alt, vec![Right("NOT opt1 IS EMPTY")]);
|
||||||
|
test_filter!(empty_filter_1_double_not, vec![Right("NOT opt1 IS NOT EMPTY")]);
|
||||||
|
|
||||||
test_filter!(in_filter, vec![Right("tag_in IN[1, 2, 3, four, five]")]);
|
test_filter!(in_filter, vec![Right("tag_in IN[1, 2, 3, four, five]")]);
|
||||||
test_filter!(not_in_filter, vec![Right("tag_in NOT IN[1, 2, 3, four, five]")]);
|
test_filter!(not_in_filter, vec![Right("tag_in NOT IN[1, 2, 3, four, five]")]);
|
||||||
test_filter!(not_not_in_filter, vec![Right("NOT tag_in NOT IN[1, 2, 3, four, five]")]);
|
test_filter!(not_not_in_filter, vec![Right("NOT tag_in NOT IN[1, 2, 3, four, five]")]);
|
||||||
|
@ -201,6 +201,30 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option<String> {
|
|||||||
} else if let Some(opt1) = &document.opt1 {
|
} else if let Some(opt1) = &document.opt1 {
|
||||||
id = contains_key_rec(opt1, "opt2").then(|| document.id.clone());
|
id = contains_key_rec(opt1, "opt2").then(|| document.id.clone());
|
||||||
}
|
}
|
||||||
|
} else if matches!(filter, "opt1 IS NULL" | "NOT opt1 IS NOT NULL") {
|
||||||
|
id = document.opt1.as_ref().map_or(false, |v| v.is_null()).then(|| document.id.clone());
|
||||||
|
} else if matches!(filter, "NOT opt1 IS NULL" | "opt1 IS NOT NULL") {
|
||||||
|
id = document.opt1.as_ref().map_or(true, |v| !v.is_null()).then(|| document.id.clone());
|
||||||
|
} else if matches!(filter, "opt1.opt2 IS NULL") {
|
||||||
|
if document.opt1opt2.as_ref().map_or(false, |v| v.is_null()) {
|
||||||
|
id = Some(document.id.clone());
|
||||||
|
} else if let Some(opt1) = &document.opt1 {
|
||||||
|
if !opt1.is_null() {
|
||||||
|
id = contains_null_rec(opt1, "opt2").then(|| document.id.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if matches!(filter, "opt1 IS EMPTY" | "NOT opt1 IS NOT EMPTY") {
|
||||||
|
id = document.opt1.as_ref().map_or(false, is_empty_value).then(|| document.id.clone());
|
||||||
|
} else if matches!(filter, "NOT opt1 IS EMPTY" | "opt1 IS NOT EMPTY") {
|
||||||
|
id = document
|
||||||
|
.opt1
|
||||||
|
.as_ref()
|
||||||
|
.map_or(true, |v| !is_empty_value(v))
|
||||||
|
.then(|| document.id.clone());
|
||||||
|
} else if matches!(filter, "opt1.opt2 IS EMPTY") {
|
||||||
|
if document.opt1opt2.as_ref().map_or(false, is_empty_value) {
|
||||||
|
id = Some(document.id.clone());
|
||||||
|
}
|
||||||
} else if matches!(
|
} else if matches!(
|
||||||
filter,
|
filter,
|
||||||
"tag_in IN[1, 2, 3, four, five]" | "NOT tag_in NOT IN[1, 2, 3, four, five]"
|
"tag_in IN[1, 2, 3, four, five]" | "NOT tag_in NOT IN[1, 2, 3, four, five]"
|
||||||
@ -214,6 +238,15 @@ fn execute_filter(filter: &str, document: &TestDocument) -> Option<String> {
|
|||||||
id
|
id
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_empty_value(v: &serde_json::Value) -> bool {
|
||||||
|
match v {
|
||||||
|
serde_json::Value::String(s) => s.is_empty(),
|
||||||
|
serde_json::Value::Array(a) => a.is_empty(),
|
||||||
|
serde_json::Value::Object(o) => o.is_empty(),
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn contains_key_rec(v: &serde_json::Value, key: &str) -> bool {
|
pub fn contains_key_rec(v: &serde_json::Value, key: &str) -> bool {
|
||||||
match v {
|
match v {
|
||||||
serde_json::Value::Array(v) => {
|
serde_json::Value::Array(v) => {
|
||||||
@ -236,6 +269,28 @@ pub fn contains_key_rec(v: &serde_json::Value, key: &str) -> bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn contains_null_rec(v: &serde_json::Value, key: &str) -> bool {
|
||||||
|
match v {
|
||||||
|
serde_json::Value::Object(v) => {
|
||||||
|
for (k, v) in v.iter() {
|
||||||
|
if k == key && v.is_null() || contains_null_rec(v, key) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
|
serde_json::Value::Array(v) => {
|
||||||
|
for v in v.iter() {
|
||||||
|
if contains_null_rec(v, key) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn expected_filtered_ids(filters: Vec<Either<Vec<&str>, &str>>) -> HashSet<String> {
|
pub fn expected_filtered_ids(filters: Vec<Either<Vec<&str>, &str>>) -> HashSet<String> {
|
||||||
let dataset: Vec<TestDocument> =
|
let dataset: Vec<TestDocument> =
|
||||||
serde_json::Deserializer::from_str(CONTENT).into_iter().map(|r| r.unwrap()).collect();
|
serde_json::Deserializer::from_str(CONTENT).into_iter().map(|r| r.unwrap()).collect();
|
||||||
|
Loading…
Reference in New Issue
Block a user