From 457a473b72dce0d2de55e47ef69046b79a5ef22e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9mentine=20Urquizar=20-=20curqui?= Date: Mon, 28 Nov 2022 16:27:41 +0100 Subject: [PATCH 1/2] Bring back `release-v0.30.0` into `release-v0.30.0-temp` (final: into `main`) (#3145) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix error code of the "duplicate index found" error * Use the content of the ProcessingTasks in the tasks cancelation system * Change the missing_filters error code into missing_task_filters * WIP Introduce the invalid_task_uid error code * Use more precise error codes/message for the task routes + Allow star operator in delete/cancel tasks + rename originalQuery to originalFilters + Display error/canceled_by in task view even when they are = null + Rename task filter fields by using their plural forms + Prepare an error code for canceledBy filter + Only return global tasks if the API key action `index.*` is there * Add canceledBy task filter * Update tests following task API changes * Rename original_query to original_filters everywhere * Update more insta-snap tests * Make clippy happy They're a happy clip now. * Make rustfmt happy >:-( * Fix Index name parsing error message to fit the specification * Bump milli version to 0.35.1 * Fix the new error messages * fix the error messages and add tests * rename the error codes for the sake of consistency * refactor the way we send the cli informations + add the analytics for the config file and ssl usage * Apply suggestions from code review Co-authored-by: Clément Renault * add a comment over the new infos structure * reformat, sorry @kero * Store analytics for the documents deletions * Add analytics on all the settings * Spawn threads with names * Spawn rayon threads with names * update the distinct attributes to the spec update * update the analytics on the search route * implements the analytics on the health and version routes * Fix task details serialization * Add the question mark to the task deletion query filter * Add the question mark to the task cancelation query filter * Fix tests * add analytics on the task route * Add all the missing fields of the new task query type * Create a new analytics for the task deletion * Create a new analytics for the task creation * batch the tasks seen events * Update the finite pagination analytics * add the analytics of the swap-indexes route * Stop removing the DB when failing to read it * Rename originalFilters into originalFilters * Rename matchedDocuments into providedIds * Add `workflow_dispatch` to flaky.yml * Bump grenad to 0.4.4 * Bump milli to version v0.37.0 * Don't multiply total memory returned by sysinfo anymore sysinfo now returns bytes rather than KB * Add a dispatch to the publish binaries workflow * Fix publish release CI * Don't use gold but the default linker * Always display details for the indexDeletion task * Fix the insta tests * refactorize the whole test suite 1. Make a call to assert_internally_consistent automatically when snapshoting the scheduler. There is no point in snapshoting something broken and expect the dumb humans to notice. 2. Replace every possible call to assert_internally_consistent by a snapshot of the scheduler. It takes as many lines and ensure we never change something without noticing in any tests ever. 3. Name every snapshots: it's easier to debug when something goes wrong and easier to review in general. 4. Stop skipping breakpoints, it's too easy to miss something. Now you must explicitely show which path is the scheduler supposed to use. 5. Add a timeout on the channel.recv, it eases the process of writing tests, now when something file you get a failure instead of a deadlock. * rebase on release-v0.30 * makes clippy happy * update the snapshots after a rebase * try to remove the flakyness of the failing test * Add more analytics on the ranking rules positions * Update the dump test to check for the dumpUid dumpCreation task details * send the ranking rules as a string because amplitude is too dumb to process an array as a single value * Display a null dumpUid until we computed the dump itself on disk * Update tests * Check if the master key is missing before returning an error Co-authored-by: Loïc Lecrenier Co-authored-by: bors[bot] <26634292+bors[bot]@users.noreply.github.com> Co-authored-by: Kerollmops Co-authored-by: ManyTheFish Co-authored-by: Tamo Co-authored-by: Louis Dureuil --- .github/workflows/publish-binaries.yml | 11 +- Cargo.lock | 32 +- dump/src/lib.rs | 7 +- dump/src/reader/compat/v5_to_v6.rs | 15 +- index-scheduler/src/batch.rs | 105 ++- index-scheduler/src/error.rs | 45 +- index-scheduler/src/index_mapper.rs | 35 +- index-scheduler/src/insta_snapshot.rs | 30 +- index-scheduler/src/lib.rs | 873 ++++++++++-------- .../cancel_processed.snap | 9 +- .../initial_tasks_enqueued.snap | 5 +- .../aborted_indexation.snap | 50 + .../cancel_mix_of_tasks/cancel_processed.snap | 12 +- .../first_task_processed.snap | 3 + ...rocessing_second_task_cancel_enqueued.snap | 5 +- .../aborted_indexation.snap | 40 + .../cancel_processed.snap | 10 +- .../cancel_task_registered.snap | 40 + .../initial_task_processing.snap | 3 + .../registered_the_first_task.snap | 37 + .../cancel_processed.snap | 6 +- .../initial_task_processed.snap | 3 + .../registered_the_first_task.snap | 37 + .../all_tasks_processed.snap | 3 + .../{1.snap => after_register.snap} | 3 + .../{2.snap => after_the_batch_creation.snap} | 3 + ...snap => once_everything_is_processed.snap} | 3 + .../before_index_creation.snap | 46 + .../{2.snap => both_task_succeeded.snap} | 3 + .../registered_the_first_task.snap | 36 + ...1.snap => registered_the_second_task.snap} | 10 +- .../registered_the_third_task.snap | 43 + .../1.snap | 5 +- .../2.snap | 3 + .../document_addition_batch_created.snap | 3 + .../document_addition_failed.snap | 5 +- .../registered_the_first_task.snap | 37 + .../after_register.snap} | 5 +- .../index_creation_failed.snap | 3 + .../after_batch_succeeded.snap | 37 + .../after_failing_to_commit.snap | 37 + ...eeded_but_index_scheduler_not_updated.snap | 3 + .../registered_the_first_task.snap | 37 + ....snap => task_successfully_processed.snap} | 3 + .../after_batch_creation.snap | 36 + .../registered_the_first_task.snap | 36 + ...1.snap => registered_the_second_task.snap} | 10 +- .../registered_the_third_task.snap | 42 + .../index_creation_failed.snap | 3 + .../registered_the_first_task.snap | 36 + .../processed_the_first_task.snap | 45 + .../processed_the_second_task.snap | 47 + ...sed.snap => processed_the_third_task.snap} | 3 + .../registered_the_first_task.snap | 36 + .../registered_the_second_task.snap | 39 + .../registered_the_third_task.snap | 42 + .../first.snap | 46 + .../{all_tasks_processed.snap => fourth.snap} | 3 + .../registered_the_first_task.snap | 36 + .../registered_the_fourth_task.snap | 43 + .../registered_the_second_task.snap | 39 + .../registered_the_third_task.snap | 41 + .../second.snap | 48 + .../third.snap | 50 + .../lib.rs/query_tasks_canceled_by/start.snap | 53 ++ ...finished.snap => processed_all_tasks.snap} | 3 + .../registered_the_first_task.snap | 36 + .../registered_the_second_task.snap | 39 + ...rt.snap => registered_the_third_task.snap} | 3 + .../lib.rs/query_tasks_simple/end.snap | 3 + .../lib.rs/query_tasks_simple/start.snap | 3 + .../query_tasks_special_rules/start.snap | 3 + ...everything_is_succesfully_registered.snap} | 3 + .../lib.rs/swap_indexes/create_a.snap | 48 + .../lib.rs/swap_indexes/create_b.snap | 50 + .../lib.rs/swap_indexes/create_c.snap | 52 ++ ...ial_tasks_processed.snap => create_d.snap} | 3 + .../swap_indexes/first_swap_processed.snap | 3 + .../swap_indexes/first_swap_registered.snap | 57 ++ .../swap_indexes/second_swap_processed.snap | 3 + .../third_empty_swap_processed.snap | 3 + .../swap_indexes/two_swaps_registered.snap | 3 + .../after_the_index_creation.snap | 54 ++ .../first_swap_failed.snap | 3 + .../initial_tasks_processed.snap | 3 + .../initial_tasks_enqueued.snap | 3 + .../initial_tasks_processed.snap | 3 + .../task_deletion_processed.snap | 7 +- .../after_registering_the_task_deletion.snap | 46 + .../initial_tasks_enqueued.snap | 3 + .../initial_tasks_processed.snap | 3 + .../task_deletion_processed.snap | 5 +- .../initial_tasks_enqueued.snap | 3 + .../task_deletion_done.snap | 5 +- .../task_deletion_enqueued.snap | 5 +- .../task_deletion_processing.snap | 5 +- .../1.snap | 106 +-- .../3.snap | 45 - ...nap => after_processing_the_10_tasks.snap} | 3 + .../after_registering_the_10_tasks.snap | 70 ++ .../processed_the_first_task.snap | 39 + .../registered_the_first_task.snap | 36 + .../1.snap | 106 +-- .../4.snap | 45 - .../after_registering_the_10_tasks.snap | 70 ++ .../{3.snap => all_tasks_processed.snap} | 3 + .../{2.snap => five_tasks_processed.snap} | 3 + .../processed_the_first_task.snap | 39 + .../registered_the_first_task.snap | 36 + ...nap => after_processing_the_10_tasks.snap} | 23 +- ...ap => after_registering_the_10_tasks.snap} | 3 + ...ap => after_registering_the_10_tasks.snap} | 3 + .../{3.snap => all_tasks_processed.snap} | 23 +- .../{2.snap => five_tasks_processed.snap} | 13 +- .../1.snap | 96 +- .../4.snap | 41 - .../after_registering_the_10_tasks.snap | 64 ++ .../{3.snap => all_tasks_processed.snap} | 5 +- .../{2.snap => only_first_task_failed.snap} | 5 +- .../1.snap | 106 +-- .../3.snap | 45 - .../after_registering_the_10_tasks.snap | 70 ++ .../{2.snap => all_tasks_processed.snap} | 3 + .../processed_the_first_task.snap | 39 + .../registered_the_first_task.snap | 36 + .../lib.rs/test_document_replace/1.snap | 3 + .../lib.rs/test_document_replace/2.snap | 3 + .../1.snap | 100 +- .../4.snap | 45 - .../after_registering_the_10_tasks.snap | 64 ++ .../{3.snap => all_tasks_processed.snap} | 3 + .../{2.snap => five_tasks_processed.snap} | 3 + .../lib.rs/test_document_update/1.snap | 3 + .../lib.rs/test_document_update/2.snap | 3 + .../1.snap | 100 +- .../4.snap | 45 - .../after_registering_the_10_tasks.snap | 64 ++ .../{3.snap => all_tasks_processed.snap} | 3 + .../{2.snap => five_tasks_processed.snap} | 3 + .../test_mixed_document_addition/1.snap | 100 +- .../test_mixed_document_addition/3.snap | 45 - .../after_registering_the_10_tasks.snap | 64 ++ .../all_tasks_processed.snap | 75 ++ .../{2.snap => five_tasks_processed.snap} | 3 + index-scheduler/src/utils.rs | 32 +- .../src/analytics/mock_analytics.rs | 6 +- meilisearch-http/src/analytics/mod.rs | 18 + .../src/analytics/segment_analytics.rs | 435 ++++++++- .../src/extractors/authentication/mod.rs | 3 + meilisearch-http/src/lib.rs | 39 +- meilisearch-http/src/option.rs | 31 +- meilisearch-http/src/routes/dump.rs | 9 - .../src/routes/indexes/documents.rs | 15 +- .../src/routes/indexes/settings.rs | 112 ++- meilisearch-http/src/routes/mod.rs | 15 +- meilisearch-http/src/routes/swap_indexes.rs | 13 +- meilisearch-http/src/routes/tasks.rs | 836 +++++++++++------ meilisearch-http/tests/auth/api_keys.rs | 13 +- meilisearch-http/tests/common/index.rs | 14 +- meilisearch-http/tests/common/server.rs | 8 +- .../tests/documents/add_documents.rs | 2 +- .../tests/documents/update_documents.rs | 2 +- meilisearch-http/tests/dumps/mod.rs | 18 +- meilisearch-http/tests/index/create_index.rs | 2 +- meilisearch-http/tests/search/errors.rs | 4 +- .../tests/settings/get_settings.rs | 2 +- meilisearch-http/tests/tasks/mod.rs | 205 +++- meilisearch-types/Cargo.toml | 2 +- meilisearch-types/src/error.rs | 24 +- meilisearch-types/src/index_uid.rs | 6 +- meilisearch-types/src/keys.rs | 29 +- meilisearch-types/src/tasks.rs | 81 +- 172 files changed, 5107 insertions(+), 1712 deletions(-) create mode 100644 index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/aborted_indexation.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/cancel_processing_task/aborted_indexation.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/cancel_processing_task/cancel_task_registered.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/cancel_processing_task/registered_the_first_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/cancel_succeeded_task/registered_the_first_task.snap rename index-scheduler/src/snapshots/lib.rs/document_addition/{1.snap => after_register.snap} (93%) rename index-scheduler/src/snapshots/lib.rs/document_addition/{2.snap => after_the_batch_creation.snap} (93%) rename index-scheduler/src/snapshots/lib.rs/document_addition/{3.snap => once_everything_is_processed.snap} (93%) create mode 100644 index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/before_index_creation.snap rename index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/{2.snap => both_task_succeeded.snap} (95%) create mode 100644 index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/registered_the_first_task.snap rename index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/{1.snap => registered_the_second_task.snap} (91%) create mode 100644 index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/registered_the_third_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_addition/registered_the_first_task.snap rename index-scheduler/src/snapshots/lib.rs/{fail_in_create_batch_for_index_creation/1.snap => fail_in_process_batch_for_index_creation/after_register.snap} (92%) create mode 100644 index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/after_batch_succeeded.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/after_failing_to_commit.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/registered_the_first_task.snap rename index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/{second_iteration.snap => task_successfully_processed.snap} (93%) create mode 100644 index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/after_batch_creation.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/registered_the_first_task.snap rename index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/{1.snap => registered_the_second_task.snap} (89%) create mode 100644 index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/registered_the_third_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/panic_in_process_batch_for_index_creation/registered_the_first_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/processed_the_first_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/processed_the_second_task.snap rename index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/{all_tasks_processed.snap => processed_the_third_task.snap} (94%) create mode 100644 index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/registered_the_first_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/registered_the_second_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/registered_the_third_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/first.snap rename index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/{all_tasks_processed.snap => fourth.snap} (95%) create mode 100644 index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_first_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_fourth_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_second_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_third_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/second.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/third.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/query_tasks_canceled_by/start.snap rename index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/{finished.snap => processed_all_tasks.snap} (94%) create mode 100644 index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/registered_the_first_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/registered_the_second_task.snap rename index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/{start.snap => registered_the_third_task.snap} (94%) rename index-scheduler/src/snapshots/lib.rs/register/{1.snap => everything_is_succesfully_registered.snap} (96%) create mode 100644 index-scheduler/src/snapshots/lib.rs/swap_indexes/create_a.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/swap_indexes/create_b.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/swap_indexes/create_c.snap rename index-scheduler/src/snapshots/lib.rs/swap_indexes/{initial_tasks_processed.snap => create_d.snap} (95%) create mode 100644 index-scheduler/src/snapshots/lib.rs/swap_indexes/first_swap_registered.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/after_the_index_creation.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/after_registering_the_task_deletion.snap delete mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/3.snap rename index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/{2.snap => after_processing_the_10_tasks.snap} (98%) create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/after_registering_the_10_tasks.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/processed_the_first_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/registered_the_first_task.snap delete mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/4.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/after_registering_the_10_tasks.snap rename index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/{3.snap => all_tasks_processed.snap} (98%) rename index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/{2.snap => five_tasks_processed.snap} (98%) create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/processed_the_first_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/registered_the_first_task.snap rename index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/{2.snap => after_processing_the_10_tasks.snap} (60%) rename index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/{1.snap => after_registering_the_10_tasks.snap} (98%) rename index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/{1.snap => after_registering_the_10_tasks.snap} (98%) rename index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/{3.snap => all_tasks_processed.snap} (61%) rename index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/{2.snap => five_tasks_processed.snap} (76%) delete mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/4.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/after_registering_the_10_tasks.snap rename index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/{3.snap => all_tasks_processed.snap} (92%) rename index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/{2.snap => only_first_task_failed.snap} (93%) delete mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/3.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/after_registering_the_10_tasks.snap rename index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/{2.snap => all_tasks_processed.snap} (98%) create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/processed_the_first_task.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/registered_the_first_task.snap delete mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/4.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/after_registering_the_10_tasks.snap rename index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/{3.snap => all_tasks_processed.snap} (98%) rename index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/{2.snap => five_tasks_processed.snap} (98%) delete mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/4.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/after_registering_the_10_tasks.snap rename index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/{3.snap => all_tasks_processed.snap} (98%) rename index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/{2.snap => five_tasks_processed.snap} (98%) delete mode 100644 index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/3.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/after_registering_the_10_tasks.snap create mode 100644 index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/all_tasks_processed.snap rename index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/{2.snap => five_tasks_processed.snap} (98%) diff --git a/.github/workflows/publish-binaries.yml b/.github/workflows/publish-binaries.yml index 98c912db8..9d06c35bc 100644 --- a/.github/workflows/publish-binaries.yml +++ b/.github/workflows/publish-binaries.yml @@ -18,7 +18,7 @@ jobs: # If yes, it means we are publishing an official release. # If no, we are releasing a RC, so no need to check the version. - name: Check tag format - if: github.event_name != 'schedule' + if: github.event_name == 'release' id: check-tag-format run: | escaped_tag=$(printf "%q" ${{ github.ref_name }}) @@ -29,7 +29,7 @@ jobs: echo "stable=false" >> $GITHUB_OUTPUT fi - name: Check release validity - if: github.event_name != 'schedule' && steps.check-tag-format.outputs.stable == 'true' + if: github.event_name == 'release' && steps.check-tag-format.outputs.stable == 'true' run: bash .github/scripts/check-release.sh publish: @@ -60,7 +60,7 @@ jobs: run: cargo build --release --locked # No need to upload binaries for dry run (cron) - name: Upload binaries to release - if: github.event_name != 'schedule' + if: github.event_name == 'release' uses: svenstaro/upload-release-action@v1-release with: repo_token: ${{ secrets.MEILI_BOT_GH_PAT }} @@ -98,7 +98,7 @@ jobs: args: --release --target ${{ matrix.target }} - name: Upload the binary to release # No need to upload binaries for dry run (cron) - if: github.event_name != 'schedule' + if: github.event_name == 'release' uses: svenstaro/upload-release-action@v1-release with: repo_token: ${{ secrets.MEILI_BOT_GH_PAT }} @@ -154,7 +154,6 @@ jobs: echo '[target.aarch64-unknown-linux-gnu]' >> ~/.cargo/config echo 'linker = "aarch64-linux-gnu-gcc"' >> ~/.cargo/config echo 'JEMALLOC_SYS_WITH_LG_PAGE=16' >> $GITHUB_ENV - echo RUSTFLAGS="-Clink-arg=-fuse-ld=gold" >> $GITHUB_ENV - name: Cargo build uses: actions-rs/cargo@v1 @@ -168,7 +167,7 @@ jobs: - name: Upload the binary to release # No need to upload binaries for dry run (cron) - if: github.event_name != 'schedule' + if: github.event_name == 'release' uses: svenstaro/upload-release-action@v1-release with: repo_token: ${{ secrets.MEILI_BOT_GH_PAT }} diff --git a/Cargo.lock b/Cargo.lock index 01eb9cd36..525b92976 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1332,8 +1332,8 @@ dependencies = [ [[package]] name = "filter-parser" -version = "0.35.0" -source = "git+https://github.com/meilisearch/milli.git#2e539249cb16f5e88be9f21ab712f8b4266cad36" +version = "0.37.0" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.37.0#57c9f03e514436a2cca799b2a28cd89247682be0" dependencies = [ "nom", "nom_locate", @@ -1351,8 +1351,8 @@ dependencies = [ [[package]] name = "flatten-serde-json" -version = "0.35.0" -source = "git+https://github.com/meilisearch/milli.git#2e539249cb16f5e88be9f21ab712f8b4266cad36" +version = "0.37.0" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.37.0#57c9f03e514436a2cca799b2a28cd89247682be0" dependencies = [ "serde_json", ] @@ -1542,9 +1542,9 @@ checksum = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" [[package]] name = "grenad" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e46ef6273921c5c0cced57632b48c02a968a57f9af929ef78f980409c2e26f2" +checksum = "5232b2d157b7bf63d7abe1b12177039e58db2f29e377517c0cdee1578cca4c93" dependencies = [ "bytemuck", "byteorder", @@ -1616,8 +1616,8 @@ checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" [[package]] name = "heed" -version = "0.12.2" -source = "git+https://github.com/meilisearch/heed?tag=v0.12.3#076971765f4ce09591ed7e19e45ea817580a53e3" +version = "0.12.4" +source = "git+https://github.com/meilisearch/heed?tag=v0.12.4#7a4542bc72dd60ef0f508c89900ea292218223fb" dependencies = [ "byteorder", "heed-traits", @@ -1634,12 +1634,12 @@ dependencies = [ [[package]] name = "heed-traits" version = "0.7.0" -source = "git+https://github.com/meilisearch/heed?tag=v0.12.3#076971765f4ce09591ed7e19e45ea817580a53e3" +source = "git+https://github.com/meilisearch/heed?tag=v0.12.4#7a4542bc72dd60ef0f508c89900ea292218223fb" [[package]] name = "heed-types" version = "0.7.2" -source = "git+https://github.com/meilisearch/heed?tag=v0.12.3#076971765f4ce09591ed7e19e45ea817580a53e3" +source = "git+https://github.com/meilisearch/heed?tag=v0.12.4#7a4542bc72dd60ef0f508c89900ea292218223fb" dependencies = [ "bincode", "heed-traits", @@ -1897,8 +1897,8 @@ dependencies = [ [[package]] name = "json-depth-checker" -version = "0.35.0" -source = "git+https://github.com/meilisearch/milli.git#2e539249cb16f5e88be9f21ab712f8b4266cad36" +version = "0.37.0" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.37.0#57c9f03e514436a2cca799b2a28cd89247682be0" dependencies = [ "serde_json", ] @@ -2154,8 +2154,8 @@ checksum = "d4d2456c373231a208ad294c33dc5bff30051eafd954cd4caae83a712b12854d" [[package]] name = "lmdb-rkv-sys" -version = "0.15.0" -source = "git+https://github.com/meilisearch/lmdb-rs#8f0fe377a98d177cabbd056e777778f559df2bb6" +version = "0.15.1" +source = "git+https://github.com/meilisearch/lmdb-rs#5592bf5a812905cf0c633404ef8f8f4057112c65" dependencies = [ "cc", "libc", @@ -2416,8 +2416,8 @@ dependencies = [ [[package]] name = "milli" -version = "0.35.0" -source = "git+https://github.com/meilisearch/milli.git#2e539249cb16f5e88be9f21ab712f8b4266cad36" +version = "0.37.0" +source = "git+https://github.com/meilisearch/milli.git?tag=v0.37.0#57c9f03e514436a2cca799b2a28cd89247682be0" dependencies = [ "bimap", "bincode", diff --git a/dump/src/lib.rs b/dump/src/lib.rs index 0b34b16ba..5be680c12 100644 --- a/dump/src/lib.rs +++ b/dump/src/lib.rs @@ -87,7 +87,7 @@ pub struct TaskDump { pub finished_at: Option, } -// A `Kind` specific version made for the dump. If modified you may break the dump. +// A `Kind` specific version made for the dump. If modified you may break the dump. #[derive(Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub enum KindDump { @@ -125,7 +125,6 @@ pub enum KindDump { tasks: RoaringBitmap, }, DumpCreation { - dump_uid: String, keys: Vec, instance_uid: Option, }, @@ -188,8 +187,8 @@ impl From for KindDump { KindWithContent::TaskDeletion { query, tasks } => { KindDump::TasksDeletion { query, tasks } } - KindWithContent::DumpCreation { dump_uid, keys, instance_uid } => { - KindDump::DumpCreation { dump_uid, keys, instance_uid } + KindWithContent::DumpCreation { keys, instance_uid } => { + KindDump::DumpCreation { keys, instance_uid } } KindWithContent::SnapshotCreation => KindDump::SnapshotCreation, } diff --git a/dump/src/reader/compat/v5_to_v6.rs b/dump/src/reader/compat/v5_to_v6.rs index 0030ad0d2..4d81e0552 100644 --- a/dump/src/reader/compat/v5_to_v6.rs +++ b/dump/src/reader/compat/v5_to_v6.rs @@ -119,11 +119,10 @@ impl CompatV5ToV6 { allow_index_creation, settings: Box::new(settings.into()), }, - v5::tasks::TaskContent::Dump { uid } => v6::Kind::DumpCreation { - dump_uid: uid, - keys: keys.clone(), - instance_uid, - }, + v5::tasks::TaskContent::Dump { uid: _ } => { + // in v6 we compute the dump_uid from the started_at processing time + v6::Kind::DumpCreation { keys: keys.clone(), instance_uid } + } }, canceled_by: None, details: task_view.details.map(|details| match details { @@ -143,13 +142,15 @@ impl CompatV5ToV6 { received_document_ids, deleted_documents, } => v6::Details::DocumentDeletion { - matched_documents: received_document_ids, + provided_ids: received_document_ids, deleted_documents, }, v5::Details::ClearAll { deleted_documents } => { v6::Details::ClearAll { deleted_documents } } - v5::Details::Dump { dump_uid } => v6::Details::Dump { dump_uid }, + v5::Details::Dump { dump_uid } => { + v6::Details::Dump { dump_uid: Some(dump_uid) } + } }), error: task_view.error.map(|e| e.into()), enqueued_at: task_view.enqueued_at, diff --git a/index-scheduler/src/batch.rs b/index-scheduler/src/batch.rs index 01ad50d54..02cfdb178 100644 --- a/index-scheduler/src/batch.rs +++ b/index-scheduler/src/batch.rs @@ -36,12 +36,13 @@ use meilisearch_types::settings::{apply_settings_to_builder, Settings, Unchecked use meilisearch_types::tasks::{Details, IndexSwap, Kind, KindWithContent, Status, Task}; use meilisearch_types::{compression, Index, VERSION_FILE_NAME}; use roaring::RoaringBitmap; +use time::macros::format_description; use time::OffsetDateTime; use uuid::Uuid; use crate::autobatcher::{self, BatchKind}; use crate::utils::{self, swap_index_uid_in_task}; -use crate::{Error, IndexScheduler, Result, TaskId}; +use crate::{Error, IndexScheduler, ProcessingTasks, Result, TaskId}; /// Represents a combination of tasks that can all be processed at the same time. /// @@ -50,15 +51,39 @@ use crate::{Error, IndexScheduler, Result, TaskId}; /// be processed. #[derive(Debug)] pub(crate) enum Batch { - TaskCancelation(Task), + TaskCancelation { + /// The task cancelation itself. + task: Task, + /// The date and time at which the previously processing tasks started. + previous_started_at: OffsetDateTime, + /// The list of tasks that were processing when this task cancelation appeared. + previous_processing_tasks: RoaringBitmap, + }, TaskDeletion(Task), SnapshotCreation(Vec), Dump(Task), - IndexOperation { op: IndexOperation, must_create_index: bool }, - IndexCreation { index_uid: String, primary_key: Option, task: Task }, - IndexUpdate { index_uid: String, primary_key: Option, task: Task }, - IndexDeletion { index_uid: String, tasks: Vec, index_has_been_created: bool }, - IndexSwap { task: Task }, + IndexOperation { + op: IndexOperation, + must_create_index: bool, + }, + IndexCreation { + index_uid: String, + primary_key: Option, + task: Task, + }, + IndexUpdate { + index_uid: String, + primary_key: Option, + task: Task, + }, + IndexDeletion { + index_uid: String, + tasks: Vec, + index_has_been_created: bool, + }, + IndexSwap { + task: Task, + }, } /// A [batch](Batch) that combines multiple tasks operating on an index. @@ -115,7 +140,7 @@ impl Batch { /// Return the task ids associated with this batch. pub fn ids(&self) -> Vec { match self { - Batch::TaskCancelation(task) + Batch::TaskCancelation { task, .. } | Batch::TaskDeletion(task) | Batch::Dump(task) | Batch::IndexCreation { task, .. } @@ -394,9 +419,15 @@ impl IndexScheduler { // 1. we get the last task to cancel. if let Some(task_id) = to_cancel.max() { - return Ok(Some(Batch::TaskCancelation( - self.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?, - ))); + // We retrieve the tasks that were processing before this tasks cancelation started. + // We must *not* reset the processing tasks before calling this method. + let ProcessingTasks { started_at, processing } = + &*self.processing_tasks.read().unwrap(); + return Ok(Some(Batch::TaskCancelation { + task: self.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?, + previous_started_at: *started_at, + previous_processing_tasks: processing.clone(), + })); } // 2. we get the next task to delete @@ -482,7 +513,7 @@ impl IndexScheduler { self.breakpoint(crate::Breakpoint::InsideProcessBatch); } match batch { - Batch::TaskCancelation(mut task) => { + Batch::TaskCancelation { mut task, previous_started_at, previous_processing_tasks } => { // 1. Retrieve the tasks that matched the query at enqueue-time. let matched_tasks = if let KindWithContent::TaskCancelation { tasks, query: _ } = &task.kind { @@ -492,15 +523,20 @@ impl IndexScheduler { }; let mut wtxn = self.env.write_txn()?; - let canceled_tasks_content_uuids = - self.cancel_matched_tasks(&mut wtxn, task.uid, matched_tasks)?; + let canceled_tasks_content_uuids = self.cancel_matched_tasks( + &mut wtxn, + task.uid, + matched_tasks, + previous_started_at, + &previous_processing_tasks, + )?; task.status = Status::Succeeded; match &mut task.details { Some(Details::TaskCancelation { matched_tasks: _, canceled_tasks, - original_query: _, + original_filter: _, }) => { *canceled_tasks = Some(canceled_tasks_content_uuids.len() as u64); } @@ -544,7 +580,7 @@ impl IndexScheduler { Some(Details::TaskDeletion { matched_tasks: _, deleted_tasks, - original_query: _, + original_filter: _, }) => { *deleted_tasks = Some(deleted_tasks_count); } @@ -645,11 +681,9 @@ impl IndexScheduler { } Batch::Dump(mut task) => { let started_at = OffsetDateTime::now_utc(); - let (keys, instance_uid, dump_uid) = - if let KindWithContent::DumpCreation { keys, instance_uid, dump_uid } = - &task.kind - { - (keys, instance_uid, dump_uid) + let (keys, instance_uid) = + if let KindWithContent::DumpCreation { keys, instance_uid } = &task.kind { + (keys, instance_uid) } else { unreachable!(); }; @@ -736,12 +770,17 @@ impl IndexScheduler { index_dumper.settings(&settings)?; } + let dump_uid = started_at.format(format_description!( + "[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]" + )).unwrap(); + let path = self.dumps_path.join(format!("{}.dump", dump_uid)); let file = File::create(path)?; dump.persist_to(BufWriter::new(file))?; // if we reached this step we can tell the scheduler we succeeded to dump ourselves. task.status = Status::Succeeded; + task.details = Some(Details::Dump { dump_uid: Some(dump_uid) }); Ok(vec![task]) } Batch::IndexOperation { op, must_create_index } => { @@ -1040,7 +1079,7 @@ impl IndexScheduler { for (task, documents) in tasks.iter_mut().zip(documents) { task.status = Status::Succeeded; task.details = Some(Details::DocumentDeletion { - matched_documents: documents.len(), + provided_ids: documents.len(), deleted_documents: Some(deleted_documents.min(documents.len() as u64)), }); } @@ -1152,6 +1191,7 @@ impl IndexScheduler { let mut affected_indexes = HashSet::new(); let mut affected_statuses = HashSet::new(); let mut affected_kinds = HashSet::new(); + let mut affected_canceled_by = RoaringBitmap::new(); for task_id in to_delete_tasks.iter() { let task = self.get_task(wtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?; @@ -1170,6 +1210,9 @@ impl IndexScheduler { if let Some(finished_at) = task.finished_at { utils::remove_task_datetime(wtxn, self.finished_at, finished_at, task.uid)?; } + if let Some(canceled_by) = task.canceled_by { + affected_canceled_by.insert(canceled_by); + } } for index in affected_indexes { @@ -1187,6 +1230,17 @@ impl IndexScheduler { for task in to_delete_tasks.iter() { self.all_tasks.delete(wtxn, &BEU32::new(task))?; } + for canceled_by in affected_canceled_by { + let canceled_by = BEU32::new(canceled_by); + if let Some(mut tasks) = self.canceled_by.get(wtxn, &canceled_by)? { + tasks -= &to_delete_tasks; + if tasks.is_empty() { + self.canceled_by.delete(wtxn, &canceled_by)?; + } else { + self.canceled_by.put(wtxn, &canceled_by, &tasks)?; + } + } + } Ok(to_delete_tasks.len()) } @@ -1199,6 +1253,8 @@ impl IndexScheduler { wtxn: &mut RwTxn, cancel_task_id: TaskId, matched_tasks: &RoaringBitmap, + previous_started_at: OffsetDateTime, + previous_processing_tasks: &RoaringBitmap, ) -> Result> { let now = OffsetDateTime::now_utc(); @@ -1214,11 +1270,16 @@ impl IndexScheduler { if let Some(uuid) = task.content_uuid() { content_files_to_delete.push(uuid); } + if previous_processing_tasks.contains(task.uid) { + task.started_at = Some(previous_started_at); + } task.status = Status::Canceled; task.canceled_by = Some(cancel_task_id); task.finished_at = Some(now); + task.details = task.details.map(|d| d.to_failed()); self.update_task(wtxn, &task)?; } + self.canceled_by.put(wtxn, &BEU32::new(cancel_task_id), &tasks_to_cancel)?; Ok(content_files_to_delete) } diff --git a/index-scheduler/src/error.rs b/index-scheduler/src/error.rs index c31e3e97f..cfbf7a25e 100644 --- a/index-scheduler/src/error.rs +++ b/index-scheduler/src/error.rs @@ -1,4 +1,5 @@ use meilisearch_types::error::{Code, ErrorCode}; +use meilisearch_types::tasks::{Kind, Status}; use meilisearch_types::{heed, milli}; use thiserror::Error; @@ -27,11 +28,41 @@ pub enum Error { SwapDuplicateIndexesFound(Vec), #[error("Corrupted dump.")] CorruptedDump, + #[error( + "Task `{field}` `{date}` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format." + )] + InvalidTaskDate { field: String, date: String }, + #[error("Task uid `{task_uid}` is invalid. It should only contain numeric characters.")] + InvalidTaskUids { task_uid: String }, + #[error( + "Task status `{status}` is invalid. Available task statuses are {}.", + enum_iterator::all::() + .map(|s| format!("`{s}`")) + .collect::>() + .join(", ") + )] + InvalidTaskStatuses { status: String }, + #[error( + "Task type `{type_}` is invalid. Available task types are {}", + enum_iterator::all::() + .map(|s| format!("`{s}`")) + .collect::>() + .join(", ") + )] + InvalidTaskTypes { type_: String }, + #[error( + "Task canceledBy `{canceled_by}` is invalid. It should only contains numeric characters separated by `,` character." + )] + InvalidTaskCanceledBy { canceled_by: String }, + #[error( + "{index_uid} is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_)." + )] + InvalidIndexUid { index_uid: String }, #[error("Task `{0}` not found.")] TaskNotFound(TaskId), - #[error("Query parameters to filter the tasks to delete are missing. Available query parameters are: `uid`, `indexUid`, `status`, `type`.")] + #[error("Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")] TaskDeletionWithEmptyQuery, - #[error("Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uid`, `indexUid`, `status`, `type`.")] + #[error("Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")] TaskCancelationWithEmptyQuery, #[error(transparent)] @@ -69,8 +100,14 @@ impl ErrorCode for Error { Error::IndexNotFound(_) => Code::IndexNotFound, Error::IndexesNotFound(_) => Code::IndexNotFound, Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists, - Error::SwapDuplicateIndexesFound(_) => Code::BadRequest, - Error::SwapDuplicateIndexFound(_) => Code::BadRequest, + Error::SwapDuplicateIndexesFound(_) => Code::DuplicateIndexFound, + Error::SwapDuplicateIndexFound(_) => Code::DuplicateIndexFound, + Error::InvalidTaskDate { .. } => Code::InvalidTaskDateFilter, + Error::InvalidTaskUids { .. } => Code::InvalidTaskUidsFilter, + Error::InvalidTaskStatuses { .. } => Code::InvalidTaskStatusesFilter, + Error::InvalidTaskTypes { .. } => Code::InvalidTaskTypesFilter, + Error::InvalidTaskCanceledBy { .. } => Code::InvalidTaskCanceledByFilter, + Error::InvalidIndexUid { .. } => Code::InvalidIndexUid, Error::TaskNotFound(_) => Code::TaskNotFound, Error::TaskDeletionWithEmptyQuery => Code::TaskDeletionWithEmptyQuery, Error::TaskCancelationWithEmptyQuery => Code::TaskCancelationWithEmptyQuery, diff --git a/index-scheduler/src/index_mapper.rs b/index-scheduler/src/index_mapper.rs index b75267927..8869591f7 100644 --- a/index-scheduler/src/index_mapper.rs +++ b/index-scheduler/src/index_mapper.rs @@ -126,24 +126,27 @@ impl IndexMapper { let index_map = self.index_map.clone(); let index_path = self.base_path.join(uuid.to_string()); let index_name = name.to_string(); - thread::spawn(move || { - // We first wait to be sure that the previously opened index is effectively closed. - // This can take a lot of time, this is why we do that in a seperate thread. - if let Some(closing_event) = closing_event { - closing_event.wait(); - } + thread::Builder::new() + .name(String::from("index_deleter")) + .spawn(move || { + // We first wait to be sure that the previously opened index is effectively closed. + // This can take a lot of time, this is why we do that in a seperate thread. + if let Some(closing_event) = closing_event { + closing_event.wait(); + } - // Then we remove the content from disk. - if let Err(e) = fs::remove_dir_all(&index_path) { - error!( - "An error happened when deleting the index {} ({}): {}", - index_name, uuid, e - ); - } + // Then we remove the content from disk. + if let Err(e) = fs::remove_dir_all(&index_path) { + error!( + "An error happened when deleting the index {} ({}): {}", + index_name, uuid, e + ); + } - // Finally we remove the entry from the index map. - assert!(matches!(index_map.write().unwrap().remove(&uuid), Some(BeingDeleted))); - }); + // Finally we remove the entry from the index map. + assert!(matches!(index_map.write().unwrap().remove(&uuid), Some(BeingDeleted))); + }) + .unwrap(); Ok(()) } diff --git a/index-scheduler/src/insta_snapshot.rs b/index-scheduler/src/insta_snapshot.rs index 50846c555..0f0c9953a 100644 --- a/index-scheduler/src/insta_snapshot.rs +++ b/index-scheduler/src/insta_snapshot.rs @@ -10,6 +10,8 @@ use crate::index_mapper::IndexMapper; use crate::{IndexScheduler, Kind, Status, BEI128}; pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String { + scheduler.assert_internally_consistent(); + let IndexScheduler { autobatching_enabled, must_stop_processing: _, @@ -20,6 +22,7 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String { status, kind, index_tasks, + canceled_by, enqueued_at, started_at, finished_at, @@ -64,6 +67,10 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String { snap.push_str(&snapshot_index_mapper(&rtxn, index_mapper)); snap.push_str("\n----------------------------------------------------------------------\n"); + snap.push_str("### Canceled By:\n"); + snap.push_str(&snapshot_canceled_by(&rtxn, *canceled_by)); + snap.push_str("\n----------------------------------------------------------------------\n"); + snap.push_str("### Enqueued At:\n"); snap.push_str(&snapshot_date_db(&rtxn, *enqueued_at)); snap.push_str("----------------------------------------------------------------------\n"); @@ -170,7 +177,7 @@ fn snapshot_details(d: &Details) -> String { format!("{{ primary_key: {primary_key:?} }}") } Details::DocumentDeletion { - matched_documents: received_document_ids, + provided_ids: received_document_ids, deleted_documents, } => format!("{{ received_document_ids: {received_document_ids}, deleted_documents: {deleted_documents:?} }}"), Details::ClearAll { deleted_documents } => { @@ -179,16 +186,16 @@ fn snapshot_details(d: &Details) -> String { Details::TaskCancelation { matched_tasks, canceled_tasks, - original_query, + original_filter, } => { - format!("{{ matched_tasks: {matched_tasks:?}, canceled_tasks: {canceled_tasks:?}, original_query: {original_query:?} }}") + format!("{{ matched_tasks: {matched_tasks:?}, canceled_tasks: {canceled_tasks:?}, original_filter: {original_filter:?} }}") } Details::TaskDeletion { matched_tasks, deleted_tasks, - original_query, + original_filter, } => { - format!("{{ matched_tasks: {matched_tasks:?}, deleted_tasks: {deleted_tasks:?}, original_query: {original_query:?} }}") + format!("{{ matched_tasks: {matched_tasks:?}, deleted_tasks: {deleted_tasks:?}, original_filter: {original_filter:?} }}") }, Details::Dump { dump_uid } => { format!("{{ dump_uid: {dump_uid:?} }}") @@ -231,7 +238,18 @@ pub fn snapshot_index_tasks(rtxn: &RoTxn, db: Database) } snap } - +pub fn snapshot_canceled_by( + rtxn: &RoTxn, + db: Database, RoaringBitmapCodec>, +) -> String { + let mut snap = String::new(); + let iter = db.iter(rtxn).unwrap(); + for next in iter { + let (kind, task_ids) = next.unwrap(); + writeln!(snap, "{kind} {}", snapshot_bitmap(&task_ids)).unwrap(); + } + snap +} pub fn snapshot_index_mapper(rtxn: &RoTxn, mapper: &IndexMapper) -> String { let names = mapper.indexes(rtxn).unwrap().into_iter().map(|(n, _)| n).collect::>(); format!("{names:?}") diff --git a/index-scheduler/src/lib.rs b/index-scheduler/src/lib.rs index 1807bdb40..9e32a40b8 100644 --- a/index-scheduler/src/lib.rs +++ b/index-scheduler/src/lib.rs @@ -71,7 +71,7 @@ pub struct Query { /// The minimum [task id](`meilisearch_types::tasks::Task::uid`) to be matched pub from: Option, /// The allowed [statuses](`meilisearch_types::tasks::Task::status`) of the matched tasls - pub status: Option>, + pub statuses: Option>, /// The allowed [kinds](meilisearch_types::tasks::Kind) of the matched tasks. /// /// The kind of a task is given by: @@ -81,12 +81,14 @@ pub struct Query { /// task.kind.as_kind() /// # } /// ``` - pub kind: Option>, + pub types: Option>, /// The allowed [index ids](meilisearch_types::tasks::Task::index_uid) of the matched tasks - pub index_uid: Option>, + pub index_uids: Option>, /// The [task ids](`meilisearch_types::tasks::Task::uid`) to be matched - pub uid: Option>, - + pub uids: Option>, + /// The [task ids](`meilisearch_types::tasks::Task::uid`) of the [`TaskCancelation`](meilisearch_types::tasks::Task::Kind::TaskCancelation) tasks + /// that canceled the matched tasks. + pub canceled_by: Option>, /// Exclusive upper bound of the matched tasks' [`enqueued_at`](meilisearch_types::tasks::Task::enqueued_at) field. pub before_enqueued_at: Option, /// Exclusive lower bound of the matched tasks' [`enqueued_at`](meilisearch_types::tasks::Task::enqueued_at) field. @@ -110,10 +112,11 @@ impl Query { Query { limit: None, from: None, - status: None, - kind: None, - index_uid: None, - uid: None, + statuses: None, + types: None, + index_uids: None, + uids: None, + canceled_by: None, before_enqueued_at: None, after_enqueued_at: None, before_started_at: None, @@ -126,9 +129,9 @@ impl Query { /// Add an [index id](meilisearch_types::tasks::Task::index_uid) to the list of permitted indexes. pub fn with_index(self, index_uid: String) -> Self { - let mut index_vec = self.index_uid.unwrap_or_default(); + let mut index_vec = self.index_uids.unwrap_or_default(); index_vec.push(index_uid); - Self { index_uid: Some(index_vec), ..self } + Self { index_uids: Some(index_vec), ..self } } } @@ -152,13 +155,12 @@ impl ProcessingTasks { self.processing = processing; } - /// Set the processing tasks to an empty list. - fn stop_processing_at(&mut self, stopped_at: OffsetDateTime) { - self.started_at = stopped_at; + /// Set the processing tasks to an empty list + fn stop_processing(&mut self) { self.processing = RoaringBitmap::new(); } - /// Returns `true` if there, at least, is one task that is currently processing we must stop. + /// Returns `true` if there, at least, is one task that is currently processing that we must stop. fn must_cancel_processing_tasks(&self, canceled_tasks: &RoaringBitmap) -> bool { !self.processing.is_disjoint(canceled_tasks) } @@ -187,6 +189,7 @@ mod db_name { pub const STATUS: &str = "status"; pub const KIND: &str = "kind"; pub const INDEX_TASKS: &str = "index-tasks"; + pub const CANCELED_BY: &str = "canceled_by"; pub const ENQUEUED_AT: &str = "enqueued-at"; pub const STARTED_AT: &str = "started-at"; pub const FINISHED_AT: &str = "finished-at"; @@ -195,6 +198,9 @@ mod db_name { #[cfg(test)] #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum Breakpoint { + // this state is only encountered while creating the scheduler in the test suite. + Init, + Start, BatchCreated, BeforeProcessing, @@ -258,6 +264,9 @@ pub struct IndexScheduler { /// Store the tasks associated to an index. pub(crate) index_tasks: Database, + /// Store the tasks that were canceled by a task uid + pub(crate) canceled_by: Database, RoaringBitmapCodec>, + /// Store the task ids of tasks which were enqueued at a specific date pub(crate) enqueued_at: Database, CboRoaringBitmapCodec>, @@ -318,6 +327,7 @@ impl IndexScheduler { status: self.status, kind: self.kind, index_tasks: self.index_tasks, + canceled_by: self.canceled_by, enqueued_at: self.enqueued_at, started_at: self.started_at, finished_at: self.finished_at, @@ -351,7 +361,7 @@ impl IndexScheduler { std::fs::create_dir_all(&options.dumps_path)?; let env = heed::EnvOpenOptions::new() - .max_dbs(9) + .max_dbs(10) .map_size(options.task_db_size) .open(options.tasks_path)?; let file_store = FileStore::new(&options.update_file_path)?; @@ -365,6 +375,7 @@ impl IndexScheduler { status: env.create_database(Some(db_name::STATUS))?, kind: env.create_database(Some(db_name::KIND))?, index_tasks: env.create_database(Some(db_name::INDEX_TASKS))?, + canceled_by: env.create_database(Some(db_name::CANCELED_BY))?, enqueued_at: env.create_database(Some(db_name::ENQUEUED_AT))?, started_at: env.create_database(Some(db_name::STARTED_AT))?, finished_at: env.create_database(Some(db_name::FINISHED_AT))?, @@ -405,28 +416,36 @@ impl IndexScheduler { /// only once per index scheduler. fn run(&self) { let run = self.private_clone(); + std::thread::Builder::new() + .name(String::from("scheduler")) + .spawn(move || { + #[cfg(test)] + run.breakpoint(Breakpoint::Init); - std::thread::spawn(move || loop { - run.wake_up.wait(); + loop { + run.wake_up.wait(); - match run.tick() { - Ok(0) => (), - Ok(_) => run.wake_up.signal(), - Err(e) => { - log::error!("{}", e); - // Wait one second when an irrecoverable error occurs. - if matches!( - e, - Error::CorruptedTaskQueue - | Error::TaskDatabaseUpdate(_) - | Error::HeedTransaction(_) - | Error::CreateBatch(_) - ) { - std::thread::sleep(Duration::from_secs(1)); + match run.tick() { + Ok(0) => (), + Ok(_) => run.wake_up.signal(), + Err(e) => { + log::error!("{}", e); + // Wait one second when an irrecoverable error occurs. + if matches!( + e, + Error::CorruptedTaskQueue + | Error::TaskDatabaseUpdate(_) + | Error::HeedTransaction(_) + | Error::CreateBatch(_) + ) { + std::thread::sleep(Duration::from_secs(1)); + } + run.wake_up.signal(); + } } } - } - }); + }) + .unwrap(); } pub fn indexer_config(&self) -> &IndexerConfig { @@ -450,8 +469,9 @@ impl IndexScheduler { /// Return the task ids matched by the given query from the index scheduler's point of view. pub(crate) fn get_task_ids(&self, rtxn: &RoTxn, query: &Query) -> Result { - let ProcessingTasks { started_at: started_at_processing, processing: processing_tasks } = - self.processing_tasks.read().unwrap().clone(); + let ProcessingTasks { + started_at: started_at_processing, processing: processing_tasks, .. + } = self.processing_tasks.read().unwrap().clone(); let mut tasks = self.all_task_ids(rtxn)?; @@ -459,7 +479,7 @@ impl IndexScheduler { tasks.remove_range(from.saturating_add(1)..); } - if let Some(status) = &query.status { + if let Some(status) = &query.statuses { let mut status_tasks = RoaringBitmap::new(); for status in status { match status { @@ -476,12 +496,22 @@ impl IndexScheduler { tasks &= status_tasks; } - if let Some(uids) = &query.uid { + if let Some(uids) = &query.uids { let uids = RoaringBitmap::from_iter(uids); tasks &= &uids; } - if let Some(kind) = &query.kind { + if let Some(canceled_by) = &query.canceled_by { + for cancel_task_uid in canceled_by { + if let Some(canceled_by_uid) = + self.canceled_by.get(rtxn, &BEU32::new(*cancel_task_uid))? + { + tasks &= canceled_by_uid; + } + } + } + + if let Some(kind) = &query.types { let mut kind_tasks = RoaringBitmap::new(); for kind in kind { kind_tasks |= self.get_kind(rtxn, *kind)?; @@ -489,7 +519,7 @@ impl IndexScheduler { tasks &= &kind_tasks; } - if let Some(index) = &query.index_uid { + if let Some(index) = &query.index_uids { let mut index_tasks = RoaringBitmap::new(); for index in index { index_tasks |= self.index_tasks(rtxn, index)?; @@ -591,9 +621,9 @@ impl IndexScheduler { ) -> Result { let mut tasks = self.get_task_ids(rtxn, query)?; - // If the query contains a list of `index_uid`, then we must exclude all the kind that - // arn't associated to one and only one index. - if query.index_uid.is_some() { + // If the query contains a list of index uid or there is a finite list of authorized indexes, + // then we must exclude all the kinds that aren't associated to one and only one index. + if query.index_uids.is_some() || authorized_indexes.is_some() { for kind in enum_iterator::all::().filter(|kind| !kind.related_to_one_index()) { tasks -= self.get_kind(rtxn, kind)?; } @@ -805,8 +835,8 @@ impl IndexScheduler { KindDump::TasksDeletion { query, tasks } => { KindWithContent::TaskDeletion { query, tasks } } - KindDump::DumpCreation { dump_uid, keys, instance_uid } => { - KindWithContent::DumpCreation { dump_uid, keys, instance_uid } + KindDump::DumpCreation { keys, instance_uid } => { + KindWithContent::DumpCreation { keys, instance_uid } } KindDump::SnapshotCreation => KindWithContent::SnapshotCreation, }, @@ -907,7 +937,10 @@ impl IndexScheduler { // 2. Process the tasks let res = { let cloned_index_scheduler = self.private_clone(); - let handle = std::thread::spawn(move || cloned_index_scheduler.process_batch(batch)); + let handle = std::thread::Builder::new() + .name(String::from("batch-operation")) + .spawn(move || cloned_index_scheduler.process_batch(batch)) + .unwrap(); handle.join().unwrap_or(Err(Error::ProcessBatchPanicked)) }; @@ -921,6 +954,7 @@ impl IndexScheduler { Ok(tasks) => { #[cfg(test)] self.breakpoint(Breakpoint::ProcessBatchSucceeded); + #[allow(unused_variables)] for (i, mut task) in tasks.into_iter().enumerate() { task.started_at = Some(started_at); @@ -948,6 +982,12 @@ impl IndexScheduler { #[cfg(test)] self.breakpoint(Breakpoint::AbortedIndexation); wtxn.abort().map_err(Error::HeedTransaction)?; + + // We make sure that we don't call `stop_processing` on the `processing_tasks`, + // this is because we want to let the next tick call `create_next_batch` and keep + // the `started_at` date times and `processings` of the current processing tasks. + // This date time is used by the task cancelation to store the right `started_at` + // date in the task on disk. return Ok(0); } // In case of a failure we must get back and patch all the tasks with the error. @@ -964,6 +1004,7 @@ impl IndexScheduler { task.finished_at = Some(finished_at); task.status = Status::Failed; task.error = Some(error.clone()); + task.details = task.details.map(|d| d.to_failed()); #[cfg(test)] self.maybe_fail(tests::FailureLocation::UpdatingTaskAfterProcessBatchFailure)?; @@ -977,7 +1018,7 @@ impl IndexScheduler { } } - self.processing_tasks.write().unwrap().stop_processing_at(finished_at); + self.processing_tasks.write().unwrap().stop_processing(); #[cfg(test)] self.maybe_fail(tests::FailureLocation::CommittingWtxn)?; @@ -1027,6 +1068,7 @@ mod tests { use std::time::Instant; use big_s::S; + use crossbeam::channel::RecvTimeoutError; use file_store::File; use meili_snap::snapshot; use meilisearch_types::milli::obkv_to_json; @@ -1038,6 +1080,7 @@ mod tests { use tempfile::TempDir; use time::Duration; use uuid::Uuid; + use Breakpoint::*; use super::*; use crate::insta_snapshot::{snapshot_bitmap, snapshot_index_scheduler}; @@ -1077,8 +1120,21 @@ mod tests { let index_scheduler = Self::new(options, sender, planned_failures).unwrap(); - let index_scheduler_handle = - IndexSchedulerHandle { _tempdir: tempdir, test_breakpoint_rcv: receiver }; + // To be 100% consistent between all test we're going to start the scheduler right now + // and ensure it's in the expected starting state. + let breakpoint = match receiver.recv_timeout(std::time::Duration::from_secs(1)) { + Ok(b) => b, + Err(RecvTimeoutError::Timeout) => { + panic!("The scheduler seems to be waiting for a new task while your test is waiting for a breakpoint.") + } + Err(RecvTimeoutError::Disconnected) => panic!("The scheduler crashed."), + }; + assert_eq!(breakpoint, (Init, false)); + let index_scheduler_handle = IndexSchedulerHandle { + _tempdir: tempdir, + test_breakpoint_rcv: receiver, + last_breakpoint: breakpoint.0, + }; (index_scheduler, index_scheduler_handle) } @@ -1155,26 +1211,127 @@ mod tests { pub struct IndexSchedulerHandle { _tempdir: TempDir, test_breakpoint_rcv: crossbeam::channel::Receiver<(Breakpoint, bool)>, + last_breakpoint: Breakpoint, } impl IndexSchedulerHandle { - /// Wait until the provided breakpoint is reached. - fn wait_till(&self, breakpoint: Breakpoint) { - self.test_breakpoint_rcv.iter().find(|b| *b == (breakpoint, false)); + /// Advance the scheduler to the next tick. + /// Panic + /// * If the scheduler is waiting for a task to be registered. + /// * If the breakpoint queue is in a bad state. + #[track_caller] + fn advance(&mut self) -> Breakpoint { + let (breakpoint_1, b) = match self + .test_breakpoint_rcv + .recv_timeout(std::time::Duration::from_secs(5)) + { + Ok(b) => b, + Err(RecvTimeoutError::Timeout) => { + panic!("The scheduler seems to be waiting for a new task while your test is waiting for a breakpoint.") + } + Err(RecvTimeoutError::Disconnected) => panic!("The scheduler crashed."), + }; + // if we've already encountered a breakpoint we're supposed to be stuck on the false + // and we expect the same variant with the true to come now. + assert_eq!( + (breakpoint_1, b), + (self.last_breakpoint, true), + "Internal error in the test suite. In the previous iteration I got `({:?}, false)` and now I got `({:?}, {:?})`.", + self.last_breakpoint, + breakpoint_1, + b, + ); + + let (breakpoint_2, b) = match self + .test_breakpoint_rcv + .recv_timeout(std::time::Duration::from_secs(5)) + { + Ok(b) => b, + Err(RecvTimeoutError::Timeout) => { + panic!("The scheduler seems to be waiting for a new task while your test is waiting for a breakpoint.") + } + Err(RecvTimeoutError::Disconnected) => panic!("The scheduler crashed."), + }; + assert!(!b, "Found the breakpoint handle in a bad state. Check your test suite"); + + self.last_breakpoint = breakpoint_2; + + breakpoint_2 } - /// Wait for `n` tasks. - fn advance_n_batch(&self, n: usize) { - for _ in 0..n { - self.wait_till(Breakpoint::AfterProcessing); + /// Advance the scheduler until all the provided breakpoints are reached in order. + #[track_caller] + fn advance_till(&mut self, breakpoints: impl IntoIterator) { + for breakpoint in breakpoints { + let b = self.advance(); + assert_eq!( + b, breakpoint, + "Was expecting the breakpoint `{:?}` but instead got `{:?}`.", + breakpoint, b + ); } } + + /// Wait for `n` successful batches. + #[track_caller] + fn advance_n_successful_batches(&mut self, n: usize) { + for _ in 0..n { + self.advance_one_successful_batch(); + } + } + + /// Wait for `n` failed batches. + #[track_caller] + fn advance_n_failed_batches(&mut self, n: usize) { + for _ in 0..n { + self.advance_one_failed_batch(); + } + } + + // Wait for one successful batch. + #[track_caller] + fn advance_one_successful_batch(&mut self) { + self.advance_till([Start, BatchCreated]); + loop { + match self.advance() { + // the process_batch function can call itself recursively, thus we need to + // accept as may InsideProcessBatch as possible before moving to the next state. + InsideProcessBatch => (), + // the batch went successfully, we can stop the loop and go on with the next states. + ProcessBatchSucceeded => break, + AbortedIndexation => panic!("The batch was aborted."), + ProcessBatchFailed => panic!("The batch failed."), + breakpoint => panic!("Encountered an impossible breakpoint `{:?}`, this is probably an issue with the test suite.", breakpoint), + } + } + + self.advance_till([AfterProcessing]); + } + + // Wait for one failed batch. + #[track_caller] + fn advance_one_failed_batch(&mut self) { + self.advance_till([Start, BatchCreated]); + loop { + match self.advance() { + // the process_batch function can call itself recursively, thus we need to + // accept as may InsideProcessBatch as possible before moving to the next state. + InsideProcessBatch => (), + // the batch went failed, we can stop the loop and go on with the next states. + ProcessBatchFailed => break, + ProcessBatchSucceeded => panic!("The batch succeeded. (and it wasn't supposed to sorry)"), + AbortedIndexation => panic!("The batch was aborted."), + breakpoint => panic!("Encountered an impossible breakpoint `{:?}`, this is probably an issue with the test suite.", breakpoint), + } + } + self.advance_till([AfterProcessing]); + } } #[test] fn register() { // In this test, the handle doesn't make any progress, we only check that the tasks are registered - let (index_scheduler, _handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut _handle) = IndexScheduler::test(true, vec![]); let kinds = [ index_creation_task("catto", "mouse"), @@ -1199,109 +1356,100 @@ mod tests { assert_eq!(task.kind.as_kind(), k); } - snapshot!(snapshot_index_scheduler(&index_scheduler)); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "everything_is_succesfully_registered"); } #[test] fn insert_task_while_another_task_is_processing() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); index_scheduler.register(index_creation_task("index_a", "id")).unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); - handle.wait_till(Breakpoint::BatchCreated); - index_scheduler.assert_internally_consistent(); + handle.advance_till([Start, BatchCreated]); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_batch_creation"); // while the task is processing can we register another task? index_scheduler.register(index_creation_task("index_b", "id")).unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_second_task"); index_scheduler .register(KindWithContent::IndexDeletion { index_uid: S("index_a") }) .unwrap(); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_third_task"); } /// We send a lot of tasks but notify the tasks scheduler only once as /// we send them very fast, we must make sure that they are all processed. #[test] fn process_tasks_inserted_without_new_signal() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); index_scheduler .register(KindWithContent::IndexCreation { index_uid: S("doggos"), primary_key: None }) .unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); index_scheduler .register(KindWithContent::IndexCreation { index_uid: S("cattos"), primary_key: None }) .unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_second_task"); index_scheduler .register(KindWithContent::IndexDeletion { index_uid: S("doggos") }) .unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_third_task"); - handle.wait_till(Breakpoint::Start); - index_scheduler.assert_internally_consistent(); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "processed_the_first_task"); - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "processed_the_second_task"); - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); - - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_tasks_processed"); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "processed_the_third_task"); } #[test] fn process_tasks_without_autobatching() { - let (index_scheduler, handle) = IndexScheduler::test(false, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(false, vec![]); index_scheduler .register(KindWithContent::IndexCreation { index_uid: S("doggos"), primary_key: None }) .unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); index_scheduler .register(KindWithContent::DocumentClear { index_uid: S("doggos") }) .unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_second_task"); index_scheduler .register(KindWithContent::DocumentClear { index_uid: S("doggos") }) .unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_third_task"); index_scheduler .register(KindWithContent::DocumentClear { index_uid: S("doggos") }) .unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_fourth_task"); - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first"); - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second"); - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third"); - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_tasks_processed"); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "fourth"); } #[test] fn task_deletion_undeleteable() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); let (file0, documents_count0) = sample_documents(&index_scheduler, 0, 0); let (file1, documents_count1) = sample_documents(&index_scheduler, 1, 1); @@ -1329,21 +1477,16 @@ mod tests { tasks: RoaringBitmap::from_iter([0, 1]), }) .unwrap(); - index_scheduler.assert_internally_consistent(); - // again, no progress made at all, but one more task is registered snapshot!(snapshot_index_scheduler(&index_scheduler), name: "task_deletion_enqueued"); // now we create the first batch - handle.wait_till(Breakpoint::BatchCreated); - index_scheduler.assert_internally_consistent(); + handle.advance_till([Start, BatchCreated]); // the task deletion should now be "processing" snapshot!(snapshot_index_scheduler(&index_scheduler), name: "task_deletion_processing"); - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); - + handle.advance_till([InsideProcessBatch, ProcessBatchSucceeded, AfterProcessing]); // after the task deletion is processed, no task should actually have been deleted, // because the tasks with ids 0 and 1 were still "enqueued", and thus undeleteable // the "task deletion" task should be marked as "succeeded" and, in its details, the @@ -1353,7 +1496,7 @@ mod tests { #[test] fn task_deletion_deleteable() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); let (file0, documents_count0) = sample_documents(&index_scheduler, 0, 0); let (file1, documents_count1) = sample_documents(&index_scheduler, 1, 1); @@ -1369,12 +1512,9 @@ mod tests { let _ = index_scheduler.register(task).unwrap(); index_scheduler.assert_internally_consistent(); } - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "initial_tasks_enqueued"); - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); - + handle.advance_one_successful_batch(); // first addition of documents should be successful snapshot!(snapshot_index_scheduler(&index_scheduler), name: "initial_tasks_processed"); @@ -1385,17 +1525,15 @@ mod tests { tasks: RoaringBitmap::from_iter([0]), }) .unwrap(); - index_scheduler.assert_internally_consistent(); - - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_task_deletion"); + handle.advance_one_successful_batch(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "task_deletion_processed"); } #[test] fn task_deletion_delete_same_task_twice() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); let (file0, documents_count0) = sample_documents(&index_scheduler, 0, 0); let (file1, documents_count1) = sample_documents(&index_scheduler, 1, 1); @@ -1411,12 +1549,9 @@ mod tests { let _ = index_scheduler.register(task).unwrap(); index_scheduler.assert_internally_consistent(); } - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "initial_tasks_enqueued"); - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); - + handle.advance_one_successful_batch(); // first addition of documents should be successful snapshot!(snapshot_index_scheduler(&index_scheduler), name: "initial_tasks_processed"); @@ -1431,7 +1566,7 @@ mod tests { index_scheduler.assert_internally_consistent(); } for _ in 0..2 { - handle.wait_till(Breakpoint::AfterProcessing); + handle.advance_one_successful_batch(); index_scheduler.assert_internally_consistent(); } @@ -1440,7 +1575,7 @@ mod tests { #[test] fn document_addition() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); let content = r#" { @@ -1463,25 +1598,18 @@ mod tests { allow_index_creation: true, }) .unwrap(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_register"); - index_scheduler.assert_internally_consistent(); + handle.advance_till([Start, BatchCreated]); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_the_batch_creation"); - snapshot!(snapshot_index_scheduler(&index_scheduler)); - - handle.wait_till(Breakpoint::BatchCreated); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); - - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + handle.advance_till([InsideProcessBatch, ProcessBatchSucceeded, AfterProcessing]); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "once_everything_is_processed"); } #[test] fn document_addition_and_index_deletion() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); let content = r#" { @@ -1492,7 +1620,7 @@ mod tests { index_scheduler .register(KindWithContent::IndexCreation { index_uid: S("doggos"), primary_key: None }) .unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); let (uuid, mut file) = index_scheduler.create_update_file_with_uuid(0).unwrap(); let documents_count = @@ -1509,25 +1637,22 @@ mod tests { allow_index_creation: true, }) .unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_second_task"); index_scheduler .register(KindWithContent::IndexDeletion { index_uid: S("doggos") }) .unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_third_task"); - snapshot!(snapshot_index_scheduler(&index_scheduler)); - - handle.wait_till(Breakpoint::Start); // The index creation. - handle.wait_till(Breakpoint::Start); // before anything happens. - handle.wait_till(Breakpoint::Start); // after the execution of the two tasks in a single batch. - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + handle.advance_one_successful_batch(); // The index creation. + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "before_index_creation"); + handle.advance_one_successful_batch(); // // after the execution of the two tasks in a single batch. + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "both_task_succeeded"); } #[test] fn do_not_batch_task_of_different_indexes() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); let index_names = ["doggos", "cattos", "girafos"]; for name in index_names { @@ -1548,7 +1673,7 @@ mod tests { } for _ in 0..(index_names.len() * 2) { - handle.wait_till(Breakpoint::AfterProcessing); + handle.advance_one_successful_batch(); index_scheduler.assert_internally_consistent(); } @@ -1557,7 +1682,7 @@ mod tests { #[test] fn swap_indexes() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); let to_enqueue = [ index_creation_task("a", "id"), @@ -1571,19 +1696,14 @@ mod tests { index_scheduler.assert_internally_consistent(); } - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); - - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); - - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); - - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "initial_tasks_processed"); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "create_a"); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "create_b"); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "create_c"); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "create_d"); index_scheduler .register(KindWithContent::IndexSwap { @@ -1593,31 +1713,28 @@ mod tests { ], }) .unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_swap_registered"); index_scheduler .register(KindWithContent::IndexSwap { swaps: vec![IndexSwap { indexes: ("a".to_owned(), "c".to_owned()) }], }) .unwrap(); - index_scheduler.assert_internally_consistent(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "two_swaps_registered"); - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); + handle.advance_one_successful_batch(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_swap_processed"); - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); + handle.advance_one_successful_batch(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_swap_processed"); index_scheduler.register(KindWithContent::IndexSwap { swaps: vec![] }).unwrap(); - handle.wait_till(Breakpoint::AfterProcessing); + handle.advance_one_successful_batch(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_empty_swap_processed"); } #[test] fn swap_indexes_errors() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); let to_enqueue = [ index_creation_task("a", "id"), @@ -1630,8 +1747,8 @@ mod tests { let _ = index_scheduler.register(task).unwrap(); index_scheduler.assert_internally_consistent(); } - handle.advance_n_batch(4); - index_scheduler.assert_internally_consistent(); + handle.advance_n_successful_batches(4); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_the_index_creation"); let first_snap = snapshot_index_scheduler(&index_scheduler); snapshot!(first_snap, name: "initial_tasks_processed"); @@ -1646,7 +1763,6 @@ mod tests { .unwrap_err(); snapshot!(format!("{err}"), @"Indexes must be declared only once during a swap. `a`, `b` were specified several times."); - index_scheduler.assert_internally_consistent(); let second_snap = snapshot_index_scheduler(&index_scheduler); assert_eq!(first_snap, second_snap); @@ -1660,15 +1776,14 @@ mod tests { ], }) .unwrap(); - handle.advance_n_batch(1); + handle.advance_one_failed_batch(); // Now the first swap should have an error message saying `e` and `f` do not exist - index_scheduler.assert_internally_consistent(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_swap_failed"); } #[test] fn document_addition_and_index_deletion_on_unexisting_index() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); let content = r#" { @@ -1697,15 +1812,14 @@ mod tests { snapshot!(snapshot_index_scheduler(&index_scheduler)); - handle.wait_till(Breakpoint::Start); // before anything happens. - handle.wait_till(Breakpoint::Start); // after the execution of the two tasks in a single batch. + handle.advance_n_successful_batches(1); snapshot!(snapshot_index_scheduler(&index_scheduler)); } #[test] fn cancel_enqueued_task() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); let (file0, documents_count0) = sample_documents(&index_scheduler, 0, 0); file0.persist().unwrap(); @@ -1723,16 +1837,13 @@ mod tests { } snapshot!(snapshot_index_scheduler(&index_scheduler), name: "initial_tasks_enqueued"); - - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); - + handle.advance_one_successful_batch(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "cancel_processed"); } #[test] fn cancel_succeeded_task() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); let (file0, documents_count0) = sample_documents(&index_scheduler, 0, 0); file0.persist().unwrap(); @@ -1740,9 +1851,9 @@ mod tests { let _ = index_scheduler .register(replace_document_import_task("catto", None, 0, documents_count0)) .unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); - handle.wait_till(Breakpoint::AfterProcessing); + handle.advance_one_successful_batch(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "initial_task_processed"); index_scheduler @@ -1752,15 +1863,13 @@ mod tests { }) .unwrap(); - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); - + handle.advance_one_successful_batch(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "cancel_processed"); } #[test] fn cancel_processing_task() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); let (file0, documents_count0) = sample_documents(&index_scheduler, 0, 0); file0.persist().unwrap(); @@ -1768,9 +1877,9 @@ mod tests { let _ = index_scheduler .register(replace_document_import_task("catto", None, 0, documents_count0)) .unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); - handle.wait_till(Breakpoint::InsideProcessBatch); + handle.advance_till([Start, BatchCreated, InsideProcessBatch]); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "initial_task_processing"); index_scheduler @@ -1779,22 +1888,20 @@ mod tests { tasks: RoaringBitmap::from_iter([0]), }) .unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "cancel_task_registered"); // Now we check that we can reach the AbortedIndexation error handling - handle.wait_till(Breakpoint::AbortedIndexation); - index_scheduler.assert_internally_consistent(); - - handle.wait_till(Breakpoint::AfterProcessing); - - index_scheduler.assert_internally_consistent(); + handle.advance_till([AbortedIndexation]); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "aborted_indexation"); + // handle.advance_till([Start, BatchCreated, BeforeProcessing, AfterProcessing]); + handle.advance_one_successful_batch(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "cancel_processed"); } #[test] fn cancel_mix_of_tasks() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); let (file0, documents_count0) = sample_documents(&index_scheduler, 0, 0); file0.persist().unwrap(); @@ -1812,30 +1919,28 @@ mod tests { let _ = index_scheduler.register(task).unwrap(); index_scheduler.assert_internally_consistent(); } - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); + handle.advance_one_successful_batch(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_task_processed"); - handle.wait_till(Breakpoint::InsideProcessBatch); + handle.advance_till([Start, BatchCreated, InsideProcessBatch]); index_scheduler .register(KindWithContent::TaskCancelation { query: "test_query".to_owned(), tasks: RoaringBitmap::from_iter([0, 1, 2]), }) .unwrap(); - index_scheduler.assert_internally_consistent(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "processing_second_task_cancel_enqueued"); - handle.wait_till(Breakpoint::AbortedIndexation); - index_scheduler.assert_internally_consistent(); + handle.advance_till([AbortedIndexation]); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "aborted_indexation"); - handle.wait_till(Breakpoint::AfterProcessing); + handle.advance_one_successful_batch(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "cancel_processed"); } #[test] fn test_document_replace() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); for i in 0..10 { let content = format!( @@ -1865,14 +1970,10 @@ mod tests { .unwrap(); index_scheduler.assert_internally_consistent(); } - snapshot!(snapshot_index_scheduler(&index_scheduler)); - index_scheduler.assert_internally_consistent(); // everything should be batched together. - handle.advance_n_batch(1); - index_scheduler.assert_internally_consistent(); - + handle.advance_n_successful_batches(1); snapshot!(snapshot_index_scheduler(&index_scheduler)); // has everything being pushed successfully in milli? @@ -1890,7 +1991,7 @@ mod tests { #[test] fn test_document_update() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); for i in 0..10 { let content = format!( @@ -1920,14 +2021,10 @@ mod tests { .unwrap(); index_scheduler.assert_internally_consistent(); } - snapshot!(snapshot_index_scheduler(&index_scheduler)); - index_scheduler.assert_internally_consistent(); // everything should be batched together. - handle.advance_n_batch(1); - index_scheduler.assert_internally_consistent(); - + handle.advance_n_successful_batches(1); snapshot!(snapshot_index_scheduler(&index_scheduler)); // has everything being pushed successfully in milli? @@ -1945,7 +2042,7 @@ mod tests { #[test] fn test_mixed_document_addition() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); for i in 0..10 { let method = if i % 2 == 0 { UpdateDocuments } else { ReplaceDocuments }; @@ -1977,17 +2074,14 @@ mod tests { .unwrap(); index_scheduler.assert_internally_consistent(); } - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_10_tasks"); // Only half of the task should've been processed since we can't autobatch replace and update together. - handle.advance_n_batch(5); - index_scheduler.assert_internally_consistent(); + handle.advance_n_successful_batches(5); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "five_tasks_processed"); - snapshot!(snapshot_index_scheduler(&index_scheduler)); - - handle.advance_n_batch(5); - index_scheduler.assert_internally_consistent(); + handle.advance_n_successful_batches(5); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_tasks_processed"); // has everything being pushed successfully in milli? let index = index_scheduler.index("doggos").unwrap(); @@ -2004,7 +2098,7 @@ mod tests { #[test] fn test_document_replace_without_autobatching() { - let (index_scheduler, handle) = IndexScheduler::test(false, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(false, vec![]); for i in 0..10 { let content = format!( @@ -2034,20 +2128,15 @@ mod tests { .unwrap(); index_scheduler.assert_internally_consistent(); } - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_10_tasks"); // Nothing should be batched thus half of the tasks are processed. - handle.advance_n_batch(5); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + handle.advance_n_successful_batches(5); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "five_tasks_processed"); // Everything is processed. - handle.advance_n_batch(5); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + handle.advance_n_successful_batches(5); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_tasks_processed"); // has everything being pushed successfully in milli? let index = index_scheduler.index("doggos").unwrap(); @@ -2064,7 +2153,7 @@ mod tests { #[test] fn test_document_update_without_autobatching() { - let (index_scheduler, handle) = IndexScheduler::test(false, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(false, vec![]); for i in 0..10 { let content = format!( @@ -2094,20 +2183,15 @@ mod tests { .unwrap(); index_scheduler.assert_internally_consistent(); } - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_10_tasks"); // Nothing should be batched thus half of the tasks are processed. - handle.advance_n_batch(5); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + handle.advance_n_successful_batches(5); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "five_tasks_processed"); // Everything is processed. - handle.advance_n_batch(5); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + handle.advance_n_successful_batches(5); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_tasks_processed"); // has everything being pushed successfully in milli? let index = index_scheduler.index("doggos").unwrap(); @@ -2137,24 +2221,20 @@ mod tests { #[test] fn query_tasks_from_and_limit() { - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); let kind = index_creation_task("doggo", "bone"); let _task = index_scheduler.register(kind).unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); let kind = index_creation_task("whalo", "plankton"); let _task = index_scheduler.register(kind).unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_second_task"); let kind = index_creation_task("catto", "his_own_vomit"); let _task = index_scheduler.register(kind).unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_third_task"); - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "start"); - - handle.advance_n_batch(3); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "finished"); + handle.advance_n_successful_batches(3); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "processed_all_tasks"); let rtxn = index_scheduler.env.read_txn().unwrap(); let query = Query { limit: Some(0), ..Default::default() }; @@ -2197,7 +2277,7 @@ mod tests { fn query_tasks_simple() { let start_time = OffsetDateTime::now_utc(); - let (index_scheduler, handle) = + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![(3, FailureLocation::InsideProcessBatch)]); let kind = index_creation_task("catto", "mouse"); @@ -2209,22 +2289,22 @@ mod tests { snapshot!(snapshot_index_scheduler(&index_scheduler), name: "start"); - handle.wait_till(Breakpoint::BatchCreated); + handle.advance_till([Start, BatchCreated]); let rtxn = index_scheduler.env.read_txn().unwrap(); - let query = Query { status: Some(vec![Status::Processing]), ..Default::default() }; + let query = Query { statuses: Some(vec![Status::Processing]), ..Default::default() }; let tasks = index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); snapshot!(snapshot_bitmap(&tasks), @"[0,]"); // only the processing tasks in the first tick - let query = Query { status: Some(vec![Status::Enqueued]), ..Default::default() }; + let query = Query { statuses: Some(vec![Status::Enqueued]), ..Default::default() }; let tasks = index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); snapshot!(snapshot_bitmap(&tasks), @"[1,2,]"); // only the enqueued tasks in the first tick let query = Query { - status: Some(vec![Status::Enqueued, Status::Processing]), + statuses: Some(vec![Status::Enqueued, Status::Processing]), ..Default::default() }; let tasks = @@ -2232,7 +2312,7 @@ mod tests { snapshot!(snapshot_bitmap(&tasks), @"[0,1,2,]"); // both enqueued and processing tasks in the first tick let query = Query { - status: Some(vec![Status::Enqueued, Status::Processing]), + statuses: Some(vec![Status::Enqueued, Status::Processing]), after_started_at: Some(start_time), ..Default::default() }; @@ -2243,7 +2323,7 @@ mod tests { snapshot!(snapshot_bitmap(&tasks), @"[0,]"); let query = Query { - status: Some(vec![Status::Enqueued, Status::Processing]), + statuses: Some(vec![Status::Enqueued, Status::Processing]), before_started_at: Some(start_time), ..Default::default() }; @@ -2254,7 +2334,7 @@ mod tests { snapshot!(snapshot_bitmap(&tasks), @"[]"); let query = Query { - status: Some(vec![Status::Enqueued, Status::Processing]), + statuses: Some(vec![Status::Enqueued, Status::Processing]), after_started_at: Some(start_time), before_started_at: Some(start_time + Duration::minutes(1)), ..Default::default() @@ -2266,14 +2346,21 @@ mod tests { // which should exclude the enqueued tasks and include the only processing task snapshot!(snapshot_bitmap(&tasks), @"[0,]"); - handle.wait_till(Breakpoint::BatchCreated); + handle.advance_till([ + InsideProcessBatch, + InsideProcessBatch, + ProcessBatchSucceeded, + AfterProcessing, + Start, + BatchCreated, + ]); let rtxn = index_scheduler.env.read_txn().unwrap(); let second_start_time = OffsetDateTime::now_utc(); let query = Query { - status: Some(vec![Status::Succeeded, Status::Processing]), + statuses: Some(vec![Status::Succeeded, Status::Processing]), after_started_at: Some(start_time), before_started_at: Some(start_time + Duration::minutes(1)), ..Default::default() @@ -2286,7 +2373,7 @@ mod tests { snapshot!(snapshot_bitmap(&tasks), @"[0,1,]"); let query = Query { - status: Some(vec![Status::Succeeded, Status::Processing]), + statuses: Some(vec![Status::Succeeded, Status::Processing]), before_started_at: Some(start_time), ..Default::default() }; @@ -2297,7 +2384,7 @@ mod tests { snapshot!(snapshot_bitmap(&tasks), @"[]"); let query = Query { - status: Some(vec![Status::Enqueued, Status::Succeeded, Status::Processing]), + statuses: Some(vec![Status::Enqueued, Status::Succeeded, Status::Processing]), after_started_at: Some(second_start_time), before_started_at: Some(second_start_time + Duration::minutes(1)), ..Default::default() @@ -2310,7 +2397,14 @@ mod tests { snapshot!(snapshot_bitmap(&tasks), @"[]"); // now we make one more batch, the started_at field of the new tasks will be past `second_start_time` - handle.wait_till(Breakpoint::BatchCreated); + handle.advance_till([ + InsideProcessBatch, + InsideProcessBatch, + ProcessBatchSucceeded, + AfterProcessing, + Start, + BatchCreated, + ]); let rtxn = index_scheduler.env.read_txn().unwrap(); @@ -2320,7 +2414,7 @@ mod tests { snapshot!(snapshot_bitmap(&tasks), @"[2,]"); let query = Query { - status: Some(vec![Status::Enqueued, Status::Succeeded, Status::Processing]), + statuses: Some(vec![Status::Enqueued, Status::Succeeded, Status::Processing]), after_started_at: Some(second_start_time), before_started_at: Some(second_start_time + Duration::minutes(1)), ..Default::default() @@ -2331,7 +2425,7 @@ mod tests { // again only return the last task snapshot!(snapshot_bitmap(&tasks), @"[2,]"); - handle.wait_till(Breakpoint::AfterProcessing); + handle.advance_till([ProcessBatchFailed, AfterProcessing]); let rtxn = index_scheduler.read_txn().unwrap(); // now the last task should have failed @@ -2342,7 +2436,7 @@ mod tests { snapshot!(snapshot_bitmap(&tasks), @"[]"); let query = Query { - status: Some(vec![Status::Failed]), + statuses: Some(vec![Status::Failed]), after_started_at: Some(second_start_time), before_started_at: Some(second_start_time + Duration::minutes(1)), ..Default::default() @@ -2353,7 +2447,7 @@ mod tests { snapshot!(snapshot_bitmap(&tasks), @"[2,]"); let query = Query { - status: Some(vec![Status::Failed]), + statuses: Some(vec![Status::Failed]), after_started_at: Some(second_start_time), before_started_at: Some(second_start_time + Duration::minutes(1)), ..Default::default() @@ -2364,8 +2458,8 @@ mod tests { snapshot!(snapshot_bitmap(&tasks), @"[2,]"); let query = Query { - status: Some(vec![Status::Failed]), - uid: Some(vec![1]), + statuses: Some(vec![Status::Failed]), + uids: Some(vec![1]), after_started_at: Some(second_start_time), before_started_at: Some(second_start_time + Duration::minutes(1)), ..Default::default() @@ -2376,8 +2470,8 @@ mod tests { snapshot!(snapshot_bitmap(&tasks), @"[]"); let query = Query { - status: Some(vec![Status::Failed]), - uid: Some(vec![2]), + statuses: Some(vec![Status::Failed]), + uids: Some(vec![2]), after_started_at: Some(second_start_time), before_started_at: Some(second_start_time + Duration::minutes(1)), ..Default::default() @@ -2390,7 +2484,7 @@ mod tests { #[test] fn query_tasks_special_rules() { - let (index_scheduler, handle) = + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![(3, FailureLocation::InsideProcessBatch)]); let kind = index_creation_task("catto", "mouse"); @@ -2408,17 +2502,17 @@ mod tests { snapshot!(snapshot_index_scheduler(&index_scheduler), name: "start"); - handle.wait_till(Breakpoint::BatchCreated); + handle.advance_till([Start, BatchCreated]); let rtxn = index_scheduler.env.read_txn().unwrap(); - let query = Query { index_uid: Some(vec!["catto".to_owned()]), ..Default::default() }; + let query = Query { index_uids: Some(vec!["catto".to_owned()]), ..Default::default() }; let tasks = index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); // only the first task associated with catto is returned, the indexSwap tasks are excluded! snapshot!(snapshot_bitmap(&tasks), @"[0,]"); - let query = Query { index_uid: Some(vec!["catto".to_owned()]), ..Default::default() }; + let query = Query { index_uids: Some(vec!["catto".to_owned()]), ..Default::default() }; let tasks = index_scheduler .get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_owned()])) .unwrap(); @@ -2444,7 +2538,7 @@ mod tests { .unwrap(); // we asked for all the tasks, but we are only authorized to retrieve the doggo and catto tasks // -> all tasks except the swap of catto with whalo are returned - snapshot!(snapshot_bitmap(&tasks), @"[0,1,2,]"); + snapshot!(snapshot_bitmap(&tasks), @"[0,1,]"); let query = Query::default(); let tasks = @@ -2454,47 +2548,65 @@ mod tests { } #[test] - fn fail_in_create_batch_for_index_creation() { - let (index_scheduler, handle) = - IndexScheduler::test(true, vec![(1, FailureLocation::InsideCreateBatch)]); + fn query_tasks_canceled_by() { + let (index_scheduler, mut handle) = + IndexScheduler::test(true, vec![(3, FailureLocation::InsideProcessBatch)]); - let kinds = [index_creation_task("catto", "mouse")]; + let kind = index_creation_task("catto", "mouse"); + let _ = index_scheduler.register(kind).unwrap(); + let kind = index_creation_task("doggo", "sheep"); + let _ = index_scheduler.register(kind).unwrap(); + let kind = KindWithContent::IndexSwap { + swaps: vec![IndexSwap { indexes: ("catto".to_owned(), "doggo".to_owned()) }], + }; + let _task = index_scheduler.register(kind).unwrap(); - for kind in kinds { - let _task = index_scheduler.register(kind).unwrap(); - index_scheduler.assert_internally_consistent(); - } - handle.wait_till(Breakpoint::BatchCreated); + handle.advance_n_successful_batches(1); + let kind = KindWithContent::TaskCancelation { + query: "test_query".to_string(), + tasks: [0, 1, 2, 3].into_iter().collect(), + }; + let task_cancelation = index_scheduler.register(kind).unwrap(); + handle.advance_n_successful_batches(1); - // We skipped an iteration of `tick` to reach BatchCreated - assert_eq!(*index_scheduler.run_loop_iteration.read().unwrap(), 2); - // Otherwise nothing weird happened - index_scheduler.assert_internally_consistent(); - snapshot!(snapshot_index_scheduler(&index_scheduler)); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "start"); + + let rtxn = index_scheduler.read_txn().unwrap(); + let query = Query { canceled_by: Some(vec![task_cancelation.uid]), ..Query::default() }; + let tasks = + index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap(); + // 0 is not returned because it was not canceled, 3 is not returned because it is the uid of the + // taskCancelation itself + snapshot!(snapshot_bitmap(&tasks), @"[1,2,]"); + + let query = Query { canceled_by: Some(vec![task_cancelation.uid]), ..Query::default() }; + let tasks = index_scheduler + .get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_string()])) + .unwrap(); + // Return only 1 because the user is not authorized to see task 2 + snapshot!(snapshot_bitmap(&tasks), @"[1,]"); } #[test] fn fail_in_process_batch_for_index_creation() { - let (index_scheduler, handle) = + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![(1, FailureLocation::InsideProcessBatch)]); let kind = index_creation_task("catto", "mouse"); let _task = index_scheduler.register(kind).unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_register"); - handle.wait_till(Breakpoint::AfterProcessing); + handle.advance_one_failed_batch(); // Still in the first iteration assert_eq!(*index_scheduler.run_loop_iteration.read().unwrap(), 1); - // No matter what happens in process_batch, the index_scheduler should be internally consistent - index_scheduler.assert_internally_consistent(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "index_creation_failed"); } #[test] fn fail_in_process_batch_for_document_addition() { - let (index_scheduler, handle) = + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![(1, FailureLocation::InsideProcessBatch)]); let content = r#" @@ -2518,23 +2630,21 @@ mod tests { allow_index_creation: true, }) .unwrap(); - index_scheduler.assert_internally_consistent(); - handle.wait_till(Breakpoint::BatchCreated); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); + handle.advance_till([Start, BatchCreated]); snapshot!( snapshot_index_scheduler(&index_scheduler), name: "document_addition_batch_created" ); - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); - + handle.advance_till([ProcessBatchFailed, AfterProcessing]); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "document_addition_failed"); } #[test] fn fail_in_update_task_after_process_batch_success_for_document_addition() { - let (index_scheduler, handle) = IndexScheduler::test( + let (index_scheduler, mut handle) = IndexScheduler::test( true, vec![(1, FailureLocation::UpdatingTaskAfterProcessBatchSuccess { task_uid: 0 })], ); @@ -2560,22 +2670,30 @@ mod tests { allow_index_creation: true, }) .unwrap(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); - // This tests that the index scheduler pauses for one second when an irrecoverable failure occurs - let start_time = Instant::now(); - - index_scheduler.assert_internally_consistent(); - handle.wait_till(Breakpoint::Start); - - index_scheduler.assert_internally_consistent(); + handle.advance_till([Start]); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "document_addition_succeeded_but_index_scheduler_not_updated"); - handle.wait_till(Breakpoint::AfterProcessing); - index_scheduler.assert_internally_consistent(); - snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_iteration"); + handle.advance_till([BatchCreated, InsideProcessBatch, ProcessBatchSucceeded]); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_batch_succeeded"); - let test_duration = start_time.elapsed(); - assert!(test_duration.as_millis() > 1000); + // At this point the next time the scheduler will try to progress it should encounter + // a critical failure and have to wait for 1s before retrying anything. + + let before_failure = Instant::now(); + handle.advance_till([Start]); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_failing_to_commit"); + let failure_duration = before_failure.elapsed(); + assert!(failure_duration.as_millis() >= 1000); + + handle.advance_till([ + BatchCreated, + InsideProcessBatch, + ProcessBatchSucceeded, + AfterProcessing, + ]); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "task_successfully_processed"); } #[test] @@ -2584,7 +2702,7 @@ mod tests { // the right to create an index while there is no index currently. // Thus, everything should be batched together and a IndexDoesNotExists // error should be throwed. - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); for i in 0..10 { let content = format!( @@ -2614,14 +2732,17 @@ mod tests { .unwrap(); index_scheduler.assert_internally_consistent(); } - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_10_tasks"); // Everything should be batched together. - handle.advance_n_batch(1); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + handle.advance_till([ + Start, + BatchCreated, + InsideProcessBatch, + ProcessBatchFailed, + AfterProcessing, + ]); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_processing_the_10_tasks"); // The index should not exists. snapshot!(format!("{}", index_scheduler.index("doggos").map(|_| ()).unwrap_err()), @"Index `doggos` not found."); @@ -2633,7 +2754,7 @@ mod tests { // the right to create an index while there is no index currently. // Since the autobatching is disabled, every tasks should be processed // sequentially and throw an IndexDoesNotExists. - let (index_scheduler, handle) = IndexScheduler::test(false, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(false, vec![]); for i in 0..10 { let content = format!( @@ -2663,20 +2784,15 @@ mod tests { .unwrap(); index_scheduler.assert_internally_consistent(); } - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_10_tasks"); // Nothing should be batched thus half of the tasks are processed. - handle.advance_n_batch(5); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + handle.advance_n_failed_batches(5); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "five_tasks_processed"); // Everything is processed. - handle.advance_n_batch(5); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + handle.advance_n_failed_batches(5); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_tasks_processed"); // The index should not exists. snapshot!(format!("{}", index_scheduler.index("doggos").map(|_| ()).unwrap_err()), @"Index `doggos` not found."); @@ -2688,15 +2804,15 @@ mod tests { // the right to create an index while there is already an index. // Thus, everything should be batched together and no error should be // throwed. - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); // Create the index. index_scheduler .register(KindWithContent::IndexCreation { index_uid: S("doggos"), primary_key: None }) .unwrap(); - index_scheduler.assert_internally_consistent(); - handle.advance_n_batch(1); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "processed_the_first_task"); for i in 0..10 { let content = format!( @@ -2726,14 +2842,11 @@ mod tests { .unwrap(); index_scheduler.assert_internally_consistent(); } - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_10_tasks"); // Everything should be batched together. - handle.advance_n_batch(1); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + handle.advance_n_successful_batches(1); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_processing_the_10_tasks"); // Has everything being pushed successfully in milli? let index = index_scheduler.index("doggos").unwrap(); @@ -2754,14 +2867,15 @@ mod tests { // the right to create an index while there is no index currently. // Since the autobatching is disabled, every tasks should be processed // sequentially and throw an IndexDoesNotExists. - let (index_scheduler, handle) = IndexScheduler::test(false, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(false, vec![]); // Create the index. index_scheduler .register(KindWithContent::IndexCreation { index_uid: S("doggos"), primary_key: None }) .unwrap(); - handle.advance_n_batch(1); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "processed_the_first_task"); for i in 0..10 { let content = format!( @@ -2791,20 +2905,15 @@ mod tests { .unwrap(); index_scheduler.assert_internally_consistent(); } - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_10_tasks"); // Nothing should be batched thus half of the tasks are processed. - handle.advance_n_batch(5); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + handle.advance_n_successful_batches(5); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "five_tasks_processed"); // Everything is processed. - handle.advance_n_batch(5); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + handle.advance_n_successful_batches(5); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_tasks_processed"); // Has everything being pushed successfully in milli? let index = index_scheduler.index("doggos").unwrap(); @@ -2825,14 +2934,15 @@ mod tests { // - The index already exists // - The first document addition don't have the right to create an index // can it batch with the other one? - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); // Create the index. index_scheduler .register(KindWithContent::IndexCreation { index_uid: S("doggos"), primary_key: None }) .unwrap(); - handle.advance_n_batch(1); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "processed_the_first_task"); for i in 0..10 { let content = format!( @@ -2863,14 +2973,11 @@ mod tests { .unwrap(); index_scheduler.assert_internally_consistent(); } - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_10_tasks"); // Everything should be batched together. - handle.advance_n_batch(1); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_tasks_processed"); // Has everything being pushed successfully in milli? let index = index_scheduler.index("doggos").unwrap(); @@ -2892,7 +2999,7 @@ mod tests { // - The first document addition don't have the right to create an index // - The second do. They should not batch together. // - The second should batch with everything else as it's going to create an index. - let (index_scheduler, handle) = IndexScheduler::test(true, vec![]); + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]); for i in 0..10 { let content = format!( @@ -2923,20 +3030,15 @@ mod tests { .unwrap(); index_scheduler.assert_internally_consistent(); } - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_10_tasks"); // A first batch should be processed with only the first documentAddition that's going to fail. - handle.advance_n_batch(1); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + handle.advance_one_failed_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "only_first_task_failed"); // Everything else should be batched together. - handle.advance_n_batch(1); - index_scheduler.assert_internally_consistent(); - - snapshot!(snapshot_index_scheduler(&index_scheduler)); + handle.advance_one_successful_batch(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_tasks_processed"); // Has everything being pushed successfully in milli? let index = index_scheduler.index("doggos").unwrap(); @@ -2953,20 +3055,19 @@ mod tests { #[test] fn panic_in_process_batch_for_index_creation() { - let (index_scheduler, handle) = + let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![(1, FailureLocation::PanicInsideProcessBatch)]); let kind = index_creation_task("catto", "mouse"); let _task = index_scheduler.register(kind).unwrap(); - index_scheduler.assert_internally_consistent(); + snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task"); - handle.wait_till(Breakpoint::AfterProcessing); + handle.advance_till([Start, BatchCreated, ProcessBatchFailed, AfterProcessing]); // Still in the first iteration assert_eq!(*index_scheduler.run_loop_iteration.read().unwrap(), 1); // No matter what happens in process_batch, the index_scheduler should be internally consistent - index_scheduler.assert_internally_consistent(); snapshot!(snapshot_index_scheduler(&index_scheduler), name: "index_creation_failed"); } } diff --git a/index-scheduler/src/snapshots/lib.rs/cancel_enqueued_task/cancel_processed.snap b/index-scheduler/src/snapshots/lib.rs/cancel_enqueued_task/cancel_processed.snap index 659a325c5..a06b82c74 100644 --- a/index-scheduler/src/snapshots/lib.rs/cancel_enqueued_task/cancel_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/cancel_enqueued_task/cancel_processed.snap @@ -1,13 +1,14 @@ --- source: index-scheduler/src/lib.rs +assertion_line: 1755 --- ### Autobatching Enabled = true ### Processing Tasks: [] ---------------------------------------------------------------------- ### All Tasks: -0 {uid: 0, status: canceled, canceled_by: 1, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: succeeded, details: { matched_tasks: 1, canceled_tasks: Some(1), original_query: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }} +0 {uid: 0, status: canceled, canceled_by: 1, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: succeeded, details: { matched_tasks: 1, canceled_tasks: Some(1), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }} ---------------------------------------------------------------------- ### Status: enqueued [] @@ -23,6 +24,10 @@ catto [0,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: +1 [0,] + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/cancel_enqueued_task/initial_tasks_enqueued.snap b/index-scheduler/src/snapshots/lib.rs/cancel_enqueued_task/initial_tasks_enqueued.snap index 6b44b0acc..743e74a14 100644 --- a/index-scheduler/src/snapshots/lib.rs/cancel_enqueued_task/initial_tasks_enqueued.snap +++ b/index-scheduler/src/snapshots/lib.rs/cancel_enqueued_task/initial_tasks_enqueued.snap @@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs ---------------------------------------------------------------------- ### All Tasks: 0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: enqueued, details: { matched_tasks: 1, canceled_tasks: None, original_query: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }} +1 {uid: 1, status: enqueued, details: { matched_tasks: 1, canceled_tasks: None, original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }} ---------------------------------------------------------------------- ### Status: enqueued [0,1,] @@ -21,6 +21,9 @@ catto [0,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/aborted_indexation.snap b/index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/aborted_indexation.snap new file mode 100644 index 000000000..5c6078b51 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/aborted_indexation.snap @@ -0,0 +1,50 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[1,] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "beavero", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "wolfo", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { matched_tasks: 3, canceled_tasks: None, original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0, 1, 2]> }} +---------------------------------------------------------------------- +### Status: +enqueued [1,2,3,] +succeeded [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,] +"taskCancelation" [3,] +---------------------------------------------------------------------- +### Index Tasks: +beavero [1,] +catto [0,] +wolfo [2,] +---------------------------------------------------------------------- +### Index Mapper: +["beavero", "catto"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/cancel_processed.snap b/index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/cancel_processed.snap index e398ab205..f67fff59f 100644 --- a/index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/cancel_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/cancel_processed.snap @@ -1,5 +1,6 @@ --- source: index-scheduler/src/lib.rs +assertion_line: 1859 --- ### Autobatching Enabled = true ### Processing Tasks: @@ -7,9 +8,9 @@ source: index-scheduler/src/lib.rs ---------------------------------------------------------------------- ### All Tasks: 0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: canceled, canceled_by: 3, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "beavero", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} -2 {uid: 2, status: canceled, canceled_by: 3, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "wolfo", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} -3 {uid: 3, status: succeeded, details: { matched_tasks: 3, canceled_tasks: Some(2), original_query: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0, 1, 2]> }} +1 {uid: 1, status: canceled, canceled_by: 3, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "beavero", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: canceled, canceled_by: 3, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "wolfo", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: succeeded, details: { matched_tasks: 3, canceled_tasks: Some(2), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0, 1, 2]> }} ---------------------------------------------------------------------- ### Status: enqueued [] @@ -27,6 +28,10 @@ wolfo [2,] ---------------------------------------------------------------------- ### Index Mapper: ["beavero", "catto"] +---------------------------------------------------------------------- +### Canceled By: +3 [1,2,] + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] @@ -36,6 +41,7 @@ wolfo [2,] ---------------------------------------------------------------------- ### Started At: [timestamp] [0,] +[timestamp] [1,] [timestamp] [3,] ---------------------------------------------------------------------- ### Finished At: diff --git a/index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/first_task_processed.snap b/index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/first_task_processed.snap index 8e3ef1692..36d34ff93 100644 --- a/index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/first_task_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/first_task_processed.snap @@ -24,6 +24,9 @@ wolfo [2,] ---------------------------------------------------------------------- ### Index Mapper: ["catto"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/processing_second_task_cancel_enqueued.snap b/index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/processing_second_task_cancel_enqueued.snap index 219ea9968..30da295f9 100644 --- a/index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/processing_second_task_cancel_enqueued.snap +++ b/index-scheduler/src/snapshots/lib.rs/cancel_mix_of_tasks/processing_second_task_cancel_enqueued.snap @@ -9,7 +9,7 @@ source: index-scheduler/src/lib.rs 0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} 1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "beavero", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "wolfo", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} -3 {uid: 3, status: enqueued, details: { matched_tasks: 3, canceled_tasks: None, original_query: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0, 1, 2]> }} +3 {uid: 3, status: enqueued, details: { matched_tasks: 3, canceled_tasks: None, original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0, 1, 2]> }} ---------------------------------------------------------------------- ### Status: enqueued [1,2,3,] @@ -26,6 +26,9 @@ wolfo [2,] ---------------------------------------------------------------------- ### Index Mapper: ["catto"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/aborted_indexation.snap b/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/aborted_indexation.snap new file mode 100644 index 000000000..6074673e3 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/aborted_indexation.snap @@ -0,0 +1,40 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[0,] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { matched_tasks: 1, canceled_tasks: None, original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,] +"taskCancelation" [1,] +---------------------------------------------------------------------- +### Index Tasks: +catto [0,] +---------------------------------------------------------------------- +### Index Mapper: +["catto"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/cancel_processed.snap b/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/cancel_processed.snap index f0706934b..f2035c7fe 100644 --- a/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/cancel_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/cancel_processed.snap @@ -1,13 +1,14 @@ --- source: index-scheduler/src/lib.rs +assertion_line: 1818 --- ### Autobatching Enabled = true ### Processing Tasks: [] ---------------------------------------------------------------------- ### All Tasks: -0 {uid: 0, status: canceled, canceled_by: 1, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: succeeded, details: { matched_tasks: 1, canceled_tasks: Some(1), original_query: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }} +0 {uid: 0, status: canceled, canceled_by: 1, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: succeeded, details: { matched_tasks: 1, canceled_tasks: Some(1), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }} ---------------------------------------------------------------------- ### Status: enqueued [] @@ -23,12 +24,17 @@ catto [0,] ---------------------------------------------------------------------- ### Index Mapper: ["catto"] +---------------------------------------------------------------------- +### Canceled By: +1 [0,] + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] [timestamp] [1,] ---------------------------------------------------------------------- ### Started At: +[timestamp] [0,] [timestamp] [1,] ---------------------------------------------------------------------- ### Finished At: diff --git a/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/cancel_task_registered.snap b/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/cancel_task_registered.snap new file mode 100644 index 000000000..061f334c8 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/cancel_task_registered.snap @@ -0,0 +1,40 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[0,] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { matched_tasks: 1, canceled_tasks: None, original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,] +"taskCancelation" [1,] +---------------------------------------------------------------------- +### Index Tasks: +catto [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/initial_task_processing.snap b/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/initial_task_processing.snap index 9bcfbd2b3..905cec451 100644 --- a/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/initial_task_processing.snap +++ b/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/initial_task_processing.snap @@ -19,6 +19,9 @@ catto [0,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/registered_the_first_task.snap new file mode 100644 index 000000000..d454b501e --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/cancel_processing_task/registered_the_first_task.snap @@ -0,0 +1,37 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,] +---------------------------------------------------------------------- +### Index Tasks: +catto [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/cancel_succeeded_task/cancel_processed.snap b/index-scheduler/src/snapshots/lib.rs/cancel_succeeded_task/cancel_processed.snap index 7f071b2f2..b3842cc12 100644 --- a/index-scheduler/src/snapshots/lib.rs/cancel_succeeded_task/cancel_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/cancel_succeeded_task/cancel_processed.snap @@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs ---------------------------------------------------------------------- ### All Tasks: 0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: succeeded, details: { matched_tasks: 1, canceled_tasks: Some(0), original_query: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }} +1 {uid: 1, status: succeeded, details: { matched_tasks: 1, canceled_tasks: Some(0), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }} ---------------------------------------------------------------------- ### Status: enqueued [] @@ -22,6 +22,10 @@ catto [0,] ---------------------------------------------------------------------- ### Index Mapper: ["catto"] +---------------------------------------------------------------------- +### Canceled By: +1 [] + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/cancel_succeeded_task/initial_task_processed.snap b/index-scheduler/src/snapshots/lib.rs/cancel_succeeded_task/initial_task_processed.snap index d16658b72..e52a80fae 100644 --- a/index-scheduler/src/snapshots/lib.rs/cancel_succeeded_task/initial_task_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/cancel_succeeded_task/initial_task_processed.snap @@ -20,6 +20,9 @@ catto [0,] ---------------------------------------------------------------------- ### Index Mapper: ["catto"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/cancel_succeeded_task/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/cancel_succeeded_task/registered_the_first_task.snap new file mode 100644 index 000000000..d454b501e --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/cancel_succeeded_task/registered_the_first_task.snap @@ -0,0 +1,37 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,] +---------------------------------------------------------------------- +### Index Tasks: +catto [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/do_not_batch_task_of_different_indexes/all_tasks_processed.snap b/index-scheduler/src/snapshots/lib.rs/do_not_batch_task_of_different_indexes/all_tasks_processed.snap index 8541c7c1b..f9195857a 100644 --- a/index-scheduler/src/snapshots/lib.rs/do_not_batch_task_of_different_indexes/all_tasks_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/do_not_batch_task_of_different_indexes/all_tasks_processed.snap @@ -28,6 +28,9 @@ girafos [2,5,] ---------------------------------------------------------------------- ### Index Mapper: ["cattos", "doggos", "girafos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition/1.snap b/index-scheduler/src/snapshots/lib.rs/document_addition/after_register.snap similarity index 93% rename from index-scheduler/src/snapshots/lib.rs/document_addition/1.snap rename to index-scheduler/src/snapshots/lib.rs/document_addition/after_register.snap index 6abb00f81..3e654a0e2 100644 --- a/index-scheduler/src/snapshots/lib.rs/document_addition/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/document_addition/after_register.snap @@ -19,6 +19,9 @@ doggos [0,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition/2.snap b/index-scheduler/src/snapshots/lib.rs/document_addition/after_the_batch_creation.snap similarity index 93% rename from index-scheduler/src/snapshots/lib.rs/document_addition/2.snap rename to index-scheduler/src/snapshots/lib.rs/document_addition/after_the_batch_creation.snap index b9e745cf0..10291b206 100644 --- a/index-scheduler/src/snapshots/lib.rs/document_addition/2.snap +++ b/index-scheduler/src/snapshots/lib.rs/document_addition/after_the_batch_creation.snap @@ -19,6 +19,9 @@ doggos [0,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition/3.snap b/index-scheduler/src/snapshots/lib.rs/document_addition/once_everything_is_processed.snap similarity index 93% rename from index-scheduler/src/snapshots/lib.rs/document_addition/3.snap rename to index-scheduler/src/snapshots/lib.rs/document_addition/once_everything_is_processed.snap index 2bcc9368d..6079a4317 100644 --- a/index-scheduler/src/snapshots/lib.rs/document_addition/3.snap +++ b/index-scheduler/src/snapshots/lib.rs/document_addition/once_everything_is_processed.snap @@ -20,6 +20,9 @@ doggos [0,] ---------------------------------------------------------------------- ### Index Mapper: ["doggos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/before_index_creation.snap b/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/before_index_creation.snap new file mode 100644 index 000000000..379e90120 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/before_index_creation.snap @@ -0,0 +1,46 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }} +---------------------------------------------------------------------- +### Status: +enqueued [1,2,] +succeeded [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [1,] +"indexCreation" [0,] +"indexDeletion" [2,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/2.snap b/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/both_task_succeeded.snap similarity index 95% rename from index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/2.snap rename to index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/both_task_succeeded.snap index 6954d37e0..2ff82bfd2 100644 --- a/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/2.snap +++ b/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/both_task_succeeded.snap @@ -24,6 +24,9 @@ doggos [0,1,2,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/registered_the_first_task.snap new file mode 100644 index 000000000..e23cd648f --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/registered_the_first_task.snap @@ -0,0 +1,36 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/1.snap b/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/registered_the_second_task.snap similarity index 91% rename from index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/1.snap rename to index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/registered_the_second_task.snap index 448988c8c..86674ccd0 100644 --- a/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/registered_the_second_task.snap @@ -8,26 +8,26 @@ source: index-scheduler/src/lib.rs ### All Tasks: 0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} 1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -2 {uid: 2, status: enqueued, kind: IndexDeletion { index_uid: "doggos" }} ---------------------------------------------------------------------- ### Status: -enqueued [0,1,2,] +enqueued [0,1,] ---------------------------------------------------------------------- ### Kind: "documentAdditionOrUpdate" [1,] "indexCreation" [0,] -"indexDeletion" [2,] ---------------------------------------------------------------------- ### Index Tasks: -doggos [0,1,2,] +doggos [0,1,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] [timestamp] [1,] -[timestamp] [2,] ---------------------------------------------------------------------- ### Started At: ---------------------------------------------------------------------- diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/registered_the_third_task.snap b/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/registered_the_third_task.snap new file mode 100644 index 000000000..f4d3a8190 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion/registered_the_third_task.snap @@ -0,0 +1,43 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,2,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [1,] +"indexCreation" [0,] +"indexDeletion" [2,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion_on_unexisting_index/1.snap b/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion_on_unexisting_index/1.snap index 3f921934d..e0813f109 100644 --- a/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion_on_unexisting_index/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion_on_unexisting_index/1.snap @@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs ---------------------------------------------------------------------- ### All Tasks: 0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: enqueued, kind: IndexDeletion { index_uid: "doggos" }} +1 {uid: 1, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }} ---------------------------------------------------------------------- ### Status: enqueued [0,1,] @@ -21,6 +21,9 @@ doggos [0,1,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion_on_unexisting_index/2.snap b/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion_on_unexisting_index/2.snap index 2abd3e4cf..f8586b7b8 100644 --- a/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion_on_unexisting_index/2.snap +++ b/index-scheduler/src/snapshots/lib.rs/document_addition_and_index_deletion_on_unexisting_index/2.snap @@ -22,6 +22,9 @@ doggos [0,1,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_addition/document_addition_batch_created.snap b/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_addition/document_addition_batch_created.snap index b9e745cf0..10291b206 100644 --- a/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_addition/document_addition_batch_created.snap +++ b/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_addition/document_addition_batch_created.snap @@ -19,6 +19,9 @@ doggos [0,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_addition/document_addition_failed.snap b/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_addition/document_addition_failed.snap index 750edbbf2..c1bfd7db9 100644 --- a/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_addition/document_addition_failed.snap +++ b/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_addition/document_addition_failed.snap @@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs [] ---------------------------------------------------------------------- ### All Tasks: -0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Corrupted task queue.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Corrupted task queue.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} ---------------------------------------------------------------------- ### Status: enqueued [] @@ -20,6 +20,9 @@ doggos [0,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_addition/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_addition/registered_the_first_task.snap new file mode 100644 index 000000000..3e654a0e2 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_addition/registered_the_first_task.snap @@ -0,0 +1,37 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/fail_in_create_batch_for_index_creation/1.snap b/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_index_creation/after_register.snap similarity index 92% rename from index-scheduler/src/snapshots/lib.rs/fail_in_create_batch_for_index_creation/1.snap rename to index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_index_creation/after_register.snap index b78d63444..63a2d606e 100644 --- a/index-scheduler/src/snapshots/lib.rs/fail_in_create_batch_for_index_creation/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_index_creation/after_register.snap @@ -3,7 +3,7 @@ source: index-scheduler/src/lib.rs --- ### Autobatching Enabled = true ### Processing Tasks: -[0,] +[] ---------------------------------------------------------------------- ### All Tasks: 0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }} @@ -19,6 +19,9 @@ catto [0,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_index_creation/index_creation_failed.snap b/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_index_creation/index_creation_failed.snap index 11bfb09c1..252ae082e 100644 --- a/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_index_creation/index_creation_failed.snap +++ b/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_index_creation/index_creation_failed.snap @@ -20,6 +20,9 @@ catto [0,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/after_batch_succeeded.snap b/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/after_batch_succeeded.snap new file mode 100644 index 000000000..bdda4e086 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/after_batch_succeeded.snap @@ -0,0 +1,37 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[0,] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/after_failing_to_commit.snap b/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/after_failing_to_commit.snap new file mode 100644 index 000000000..bdda4e086 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/after_failing_to_commit.snap @@ -0,0 +1,37 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[0,] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/document_addition_succeeded_but_index_scheduler_not_updated.snap b/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/document_addition_succeeded_but_index_scheduler_not_updated.snap index 6abb00f81..3e654a0e2 100644 --- a/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/document_addition_succeeded_but_index_scheduler_not_updated.snap +++ b/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/document_addition_succeeded_but_index_scheduler_not_updated.snap @@ -19,6 +19,9 @@ doggos [0,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/registered_the_first_task.snap new file mode 100644 index 000000000..3e654a0e2 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/registered_the_first_task.snap @@ -0,0 +1,37 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/second_iteration.snap b/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/task_successfully_processed.snap similarity index 93% rename from index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/second_iteration.snap rename to index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/task_successfully_processed.snap index 2bcc9368d..6079a4317 100644 --- a/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/second_iteration.snap +++ b/index-scheduler/src/snapshots/lib.rs/fail_in_update_task_after_process_batch_success_for_document_addition/task_successfully_processed.snap @@ -20,6 +20,9 @@ doggos [0,] ---------------------------------------------------------------------- ### Index Mapper: ["doggos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/after_batch_creation.snap b/index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/after_batch_creation.snap new file mode 100644 index 000000000..c75a3b87e --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/after_batch_creation.snap @@ -0,0 +1,36 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[0,] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +index_a [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/registered_the_first_task.snap new file mode 100644 index 000000000..656b06ad3 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/registered_the_first_task.snap @@ -0,0 +1,36 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +index_a [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/1.snap b/index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/registered_the_second_task.snap similarity index 89% rename from index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/1.snap rename to index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/registered_the_second_task.snap index ddac65249..0cf82317b 100644 --- a/index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/registered_the_second_task.snap @@ -8,26 +8,26 @@ source: index-scheduler/src/lib.rs ### All Tasks: 0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }} 1 {uid: 1, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_b", primary_key: Some("id") }} -2 {uid: 2, status: enqueued, kind: IndexDeletion { index_uid: "index_a" }} ---------------------------------------------------------------------- ### Status: -enqueued [0,1,2,] +enqueued [0,1,] ---------------------------------------------------------------------- ### Kind: "indexCreation" [0,1,] -"indexDeletion" [2,] ---------------------------------------------------------------------- ### Index Tasks: -index_a [0,2,] +index_a [0,] index_b [1,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] [timestamp] [1,] -[timestamp] [2,] ---------------------------------------------------------------------- ### Started At: ---------------------------------------------------------------------- diff --git a/index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/registered_the_third_task.snap b/index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/registered_the_third_task.snap new file mode 100644 index 000000000..8b73d12c2 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/insert_task_while_another_task_is_processing/registered_the_third_task.snap @@ -0,0 +1,42 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[0,] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }} +1 {uid: 1, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_b", primary_key: Some("id") }} +2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "index_a" }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,2,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,1,] +"indexDeletion" [2,] +---------------------------------------------------------------------- +### Index Tasks: +index_a [0,2,] +index_b [1,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/panic_in_process_batch_for_index_creation/index_creation_failed.snap b/index-scheduler/src/snapshots/lib.rs/panic_in_process_batch_for_index_creation/index_creation_failed.snap index 211c67326..60d8c4cdb 100644 --- a/index-scheduler/src/snapshots/lib.rs/panic_in_process_batch_for_index_creation/index_creation_failed.snap +++ b/index-scheduler/src/snapshots/lib.rs/panic_in_process_batch_for_index_creation/index_creation_failed.snap @@ -20,6 +20,9 @@ catto [0,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/panic_in_process_batch_for_index_creation/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/panic_in_process_batch_for_index_creation/registered_the_first_task.snap new file mode 100644 index 000000000..63a2d606e --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/panic_in_process_batch_for_index_creation/registered_the_first_task.snap @@ -0,0 +1,36 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +catto [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/processed_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/processed_the_first_task.snap new file mode 100644 index 000000000..3a4705635 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/processed_the_first_task.snap @@ -0,0 +1,45 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +1 {uid: 1, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }} +2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }} +---------------------------------------------------------------------- +### Status: +enqueued [1,2,] +succeeded [0,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,1,] +"indexDeletion" [2,] +---------------------------------------------------------------------- +### Index Tasks: +cattos [1,] +doggos [0,2,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/processed_the_second_task.snap b/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/processed_the_second_task.snap new file mode 100644 index 000000000..979ec8af6 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/processed_the_second_task.snap @@ -0,0 +1,47 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +1 {uid: 1, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }} +2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }} +---------------------------------------------------------------------- +### Status: +enqueued [2,] +succeeded [0,1,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,1,] +"indexDeletion" [2,] +---------------------------------------------------------------------- +### Index Tasks: +cattos [1,] +doggos [0,2,] +---------------------------------------------------------------------- +### Index Mapper: +["cattos", "doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/all_tasks_processed.snap b/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/processed_the_third_task.snap similarity index 94% rename from index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/all_tasks_processed.snap rename to index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/processed_the_third_task.snap index c75964581..c7190dd8b 100644 --- a/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/all_tasks_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/processed_the_third_task.snap @@ -24,6 +24,9 @@ doggos [0,2,] ---------------------------------------------------------------------- ### Index Mapper: ["cattos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/registered_the_first_task.snap new file mode 100644 index 000000000..e23cd648f --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/registered_the_first_task.snap @@ -0,0 +1,36 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/registered_the_second_task.snap b/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/registered_the_second_task.snap new file mode 100644 index 000000000..82cc517cb --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/registered_the_second_task.snap @@ -0,0 +1,39 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +1 {uid: 1, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,1,] +---------------------------------------------------------------------- +### Index Tasks: +cattos [1,] +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/registered_the_third_task.snap b/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/registered_the_third_task.snap new file mode 100644 index 000000000..76a6b3f08 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/process_tasks_inserted_without_new_signal/registered_the_third_task.snap @@ -0,0 +1,42 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +1 {uid: 1, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }} +2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,2,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,1,] +"indexDeletion" [2,] +---------------------------------------------------------------------- +### Index Tasks: +cattos [1,] +doggos [0,2,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/first.snap b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/first.snap new file mode 100644 index 000000000..fa09eba28 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/first.snap @@ -0,0 +1,46 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = false +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +1 {uid: 1, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }} +2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }} +3 {uid: 3, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }} +---------------------------------------------------------------------- +### Status: +enqueued [1,2,3,] +succeeded [0,] +---------------------------------------------------------------------- +### Kind: +"documentDeletion" [1,2,3,] +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/all_tasks_processed.snap b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/fourth.snap similarity index 95% rename from index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/all_tasks_processed.snap rename to index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/fourth.snap index 44ce75ebb..e52c36718 100644 --- a/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/all_tasks_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/fourth.snap @@ -24,6 +24,9 @@ doggos [0,1,2,3,] ---------------------------------------------------------------------- ### Index Mapper: ["doggos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_first_task.snap new file mode 100644 index 000000000..52866bed6 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_first_task.snap @@ -0,0 +1,36 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = false +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_fourth_task.snap b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_fourth_task.snap new file mode 100644 index 000000000..6ac8aa79f --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_fourth_task.snap @@ -0,0 +1,43 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = false +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +1 {uid: 1, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }} +2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }} +3 {uid: 3, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,2,3,] +---------------------------------------------------------------------- +### Kind: +"documentDeletion" [1,2,3,] +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_second_task.snap b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_second_task.snap new file mode 100644 index 000000000..32d32daaf --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_second_task.snap @@ -0,0 +1,39 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = false +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +1 {uid: 1, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,] +---------------------------------------------------------------------- +### Kind: +"documentDeletion" [1,] +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_third_task.snap b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_third_task.snap new file mode 100644 index 000000000..75ceef14d --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/registered_the_third_task.snap @@ -0,0 +1,41 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = false +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +1 {uid: 1, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }} +2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,2,] +---------------------------------------------------------------------- +### Kind: +"documentDeletion" [1,2,] +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/second.snap b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/second.snap new file mode 100644 index 000000000..4b1577aa6 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/second.snap @@ -0,0 +1,48 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = false +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +1 {uid: 1, status: succeeded, details: { deleted_documents: Some(0) }, kind: DocumentClear { index_uid: "doggos" }} +2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }} +3 {uid: 3, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }} +---------------------------------------------------------------------- +### Status: +enqueued [2,3,] +succeeded [0,1,] +---------------------------------------------------------------------- +### Kind: +"documentDeletion" [1,2,3,] +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/third.snap b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/third.snap new file mode 100644 index 000000000..2ac3b141f --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/process_tasks_without_autobatching/third.snap @@ -0,0 +1,50 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = false +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +1 {uid: 1, status: succeeded, details: { deleted_documents: Some(0) }, kind: DocumentClear { index_uid: "doggos" }} +2 {uid: 2, status: succeeded, details: { deleted_documents: Some(0) }, kind: DocumentClear { index_uid: "doggos" }} +3 {uid: 3, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }} +---------------------------------------------------------------------- +### Status: +enqueued [3,] +succeeded [0,1,2,] +---------------------------------------------------------------------- +### Kind: +"documentDeletion" [1,2,3,] +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/query_tasks_canceled_by/start.snap b/index-scheduler/src/snapshots/lib.rs/query_tasks_canceled_by/start.snap new file mode 100644 index 000000000..624606ba9 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/query_tasks_canceled_by/start.snap @@ -0,0 +1,53 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }} +1 {uid: 1, status: canceled, canceled_by: 3, details: { primary_key: Some("sheep") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }} +2 {uid: 2, status: canceled, canceled_by: 3, details: { swaps: [IndexSwap { indexes: ("catto", "doggo") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "doggo") }] }} +3 {uid: 3, status: succeeded, details: { matched_tasks: 3, canceled_tasks: Some(0), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0, 1, 2]> }} +---------------------------------------------------------------------- +### Status: +enqueued [] +succeeded [0,3,] +canceled [1,2,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,1,] +"indexSwap" [2,] +"taskCancelation" [3,] +---------------------------------------------------------------------- +### Index Tasks: +catto [0,2,] +doggo [1,2,] +---------------------------------------------------------------------- +### Index Mapper: +["catto"] +---------------------------------------------------------------------- +### Canceled By: +3 [1,2,] + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [3,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/finished.snap b/index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/processed_all_tasks.snap similarity index 94% rename from index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/finished.snap rename to index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/processed_all_tasks.snap index dff6707f4..694bbff26 100644 --- a/index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/finished.snap +++ b/index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/processed_all_tasks.snap @@ -24,6 +24,9 @@ whalo [1,] ---------------------------------------------------------------------- ### Index Mapper: ["catto", "doggo", "whalo"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/registered_the_first_task.snap new file mode 100644 index 000000000..c1a0899cd --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/registered_the_first_task.snap @@ -0,0 +1,36 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggo [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/registered_the_second_task.snap b/index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/registered_the_second_task.snap new file mode 100644 index 000000000..6daa6bce2 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/registered_the_second_task.snap @@ -0,0 +1,39 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }} +1 {uid: 1, status: enqueued, details: { primary_key: Some("plankton") }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("plankton") }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,1,] +---------------------------------------------------------------------- +### Index Tasks: +doggo [0,] +whalo [1,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/start.snap b/index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/registered_the_third_task.snap similarity index 94% rename from index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/start.snap rename to index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/registered_the_third_task.snap index 2717569f4..8427679e7 100644 --- a/index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/start.snap +++ b/index-scheduler/src/snapshots/lib.rs/query_tasks_from_and_limit/registered_the_third_task.snap @@ -23,6 +23,9 @@ whalo [1,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/query_tasks_simple/end.snap b/index-scheduler/src/snapshots/lib.rs/query_tasks_simple/end.snap index 6b7ec2a2a..65838db64 100644 --- a/index-scheduler/src/snapshots/lib.rs/query_tasks_simple/end.snap +++ b/index-scheduler/src/snapshots/lib.rs/query_tasks_simple/end.snap @@ -25,6 +25,9 @@ whalo [2,] ---------------------------------------------------------------------- ### Index Mapper: ["catto", "doggo"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/query_tasks_simple/start.snap b/index-scheduler/src/snapshots/lib.rs/query_tasks_simple/start.snap index 60c8de558..aed5aed8c 100644 --- a/index-scheduler/src/snapshots/lib.rs/query_tasks_simple/start.snap +++ b/index-scheduler/src/snapshots/lib.rs/query_tasks_simple/start.snap @@ -23,6 +23,9 @@ whalo [2,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/query_tasks_special_rules/start.snap b/index-scheduler/src/snapshots/lib.rs/query_tasks_special_rules/start.snap index caab362d7..2bb4f7590 100644 --- a/index-scheduler/src/snapshots/lib.rs/query_tasks_special_rules/start.snap +++ b/index-scheduler/src/snapshots/lib.rs/query_tasks_special_rules/start.snap @@ -25,6 +25,9 @@ whalo [3,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/register/1.snap b/index-scheduler/src/snapshots/lib.rs/register/everything_is_succesfully_registered.snap similarity index 96% rename from index-scheduler/src/snapshots/lib.rs/register/1.snap rename to index-scheduler/src/snapshots/lib.rs/register/everything_is_succesfully_registered.snap index 95eaa11c5..360752bc6 100644 --- a/index-scheduler/src/snapshots/lib.rs/register/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/register/everything_is_succesfully_registered.snap @@ -24,6 +24,9 @@ doggo [3,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/swap_indexes/create_a.snap b/index-scheduler/src/snapshots/lib.rs/swap_indexes/create_a.snap new file mode 100644 index 000000000..2c009ef1a --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/swap_indexes/create_a.snap @@ -0,0 +1,48 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "a", primary_key: Some("id") }} +1 {uid: 1, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }} +2 {uid: 2, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "c", primary_key: Some("id") }} +3 {uid: 3, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }} +---------------------------------------------------------------------- +### Status: +enqueued [1,2,3,] +succeeded [0,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,1,2,3,] +---------------------------------------------------------------------- +### Index Tasks: +a [0,] +b [1,] +c [2,] +d [3,] +---------------------------------------------------------------------- +### Index Mapper: +["a"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/swap_indexes/create_b.snap b/index-scheduler/src/snapshots/lib.rs/swap_indexes/create_b.snap new file mode 100644 index 000000000..6d6e89c5f --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/swap_indexes/create_b.snap @@ -0,0 +1,50 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "a", primary_key: Some("id") }} +1 {uid: 1, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }} +2 {uid: 2, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "c", primary_key: Some("id") }} +3 {uid: 3, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }} +---------------------------------------------------------------------- +### Status: +enqueued [2,3,] +succeeded [0,1,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,1,2,3,] +---------------------------------------------------------------------- +### Index Tasks: +a [0,] +b [1,] +c [2,] +d [3,] +---------------------------------------------------------------------- +### Index Mapper: +["a", "b"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/swap_indexes/create_c.snap b/index-scheduler/src/snapshots/lib.rs/swap_indexes/create_c.snap new file mode 100644 index 000000000..c12334ecf --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/swap_indexes/create_c.snap @@ -0,0 +1,52 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "a", primary_key: Some("id") }} +1 {uid: 1, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }} +2 {uid: 2, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "c", primary_key: Some("id") }} +3 {uid: 3, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }} +---------------------------------------------------------------------- +### Status: +enqueued [3,] +succeeded [0,1,2,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,1,2,3,] +---------------------------------------------------------------------- +### Index Tasks: +a [0,] +b [1,] +c [2,] +d [3,] +---------------------------------------------------------------------- +### Index Mapper: +["a", "b", "c"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/swap_indexes/initial_tasks_processed.snap b/index-scheduler/src/snapshots/lib.rs/swap_indexes/create_d.snap similarity index 95% rename from index-scheduler/src/snapshots/lib.rs/swap_indexes/initial_tasks_processed.snap rename to index-scheduler/src/snapshots/lib.rs/swap_indexes/create_d.snap index 073f280f3..b20b3b320 100644 --- a/index-scheduler/src/snapshots/lib.rs/swap_indexes/initial_tasks_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/swap_indexes/create_d.snap @@ -26,6 +26,9 @@ d [3,] ---------------------------------------------------------------------- ### Index Mapper: ["a", "b", "c", "d"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/swap_indexes/first_swap_processed.snap b/index-scheduler/src/snapshots/lib.rs/swap_indexes/first_swap_processed.snap index ec2c10e95..17e8936f0 100644 --- a/index-scheduler/src/snapshots/lib.rs/swap_indexes/first_swap_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/swap_indexes/first_swap_processed.snap @@ -29,6 +29,9 @@ d [2,4,] ---------------------------------------------------------------------- ### Index Mapper: ["a", "b", "c", "d"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/swap_indexes/first_swap_registered.snap b/index-scheduler/src/snapshots/lib.rs/swap_indexes/first_swap_registered.snap new file mode 100644 index 000000000..f2c74f676 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/swap_indexes/first_swap_registered.snap @@ -0,0 +1,57 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "a", primary_key: Some("id") }} +1 {uid: 1, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }} +2 {uid: 2, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "c", primary_key: Some("id") }} +3 {uid: 3, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }} +4 {uid: 4, status: enqueued, details: { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "d") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "d") }] }} +---------------------------------------------------------------------- +### Status: +enqueued [4,] +succeeded [0,1,2,3,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,1,2,3,] +"indexSwap" [4,] +---------------------------------------------------------------------- +### Index Tasks: +a [0,4,] +b [1,4,] +c [2,4,] +d [3,4,] +---------------------------------------------------------------------- +### Index Mapper: +["a", "b", "c", "d"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/swap_indexes/second_swap_processed.snap b/index-scheduler/src/snapshots/lib.rs/swap_indexes/second_swap_processed.snap index d820e04e6..acfbc4c77 100644 --- a/index-scheduler/src/snapshots/lib.rs/swap_indexes/second_swap_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/swap_indexes/second_swap_processed.snap @@ -29,6 +29,9 @@ d [2,4,] ---------------------------------------------------------------------- ### Index Mapper: ["a", "b", "c", "d"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/swap_indexes/third_empty_swap_processed.snap b/index-scheduler/src/snapshots/lib.rs/swap_indexes/third_empty_swap_processed.snap index 26bd1b0d3..c7c6faae6 100644 --- a/index-scheduler/src/snapshots/lib.rs/swap_indexes/third_empty_swap_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/swap_indexes/third_empty_swap_processed.snap @@ -30,6 +30,9 @@ d [2,4,] ---------------------------------------------------------------------- ### Index Mapper: ["a", "b", "c", "d"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/swap_indexes/two_swaps_registered.snap b/index-scheduler/src/snapshots/lib.rs/swap_indexes/two_swaps_registered.snap index c1472fdc8..0f8355f25 100644 --- a/index-scheduler/src/snapshots/lib.rs/swap_indexes/two_swaps_registered.snap +++ b/index-scheduler/src/snapshots/lib.rs/swap_indexes/two_swaps_registered.snap @@ -29,6 +29,9 @@ d [3,4,] ---------------------------------------------------------------------- ### Index Mapper: ["a", "b", "c", "d"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/after_the_index_creation.snap b/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/after_the_index_creation.snap new file mode 100644 index 000000000..b20b3b320 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/after_the_index_creation.snap @@ -0,0 +1,54 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "a", primary_key: Some("id") }} +1 {uid: 1, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }} +2 {uid: 2, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "c", primary_key: Some("id") }} +3 {uid: 3, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }} +---------------------------------------------------------------------- +### Status: +enqueued [] +succeeded [0,1,2,3,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,1,2,3,] +---------------------------------------------------------------------- +### Index Tasks: +a [0,] +b [1,] +c [2,] +d [3,] +---------------------------------------------------------------------- +### Index Mapper: +["a", "b", "c", "d"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/first_swap_failed.snap b/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/first_swap_failed.snap index 3b98a429f..fd9790835 100644 --- a/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/first_swap_failed.snap +++ b/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/first_swap_failed.snap @@ -31,6 +31,9 @@ f [4,] ---------------------------------------------------------------------- ### Index Mapper: ["a", "b", "c", "d"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/initial_tasks_processed.snap b/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/initial_tasks_processed.snap index 073f280f3..b20b3b320 100644 --- a/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/initial_tasks_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/swap_indexes_errors/initial_tasks_processed.snap @@ -26,6 +26,9 @@ d [3,] ---------------------------------------------------------------------- ### Index Mapper: ["a", "b", "c", "d"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/task_deletion_delete_same_task_twice/initial_tasks_enqueued.snap b/index-scheduler/src/snapshots/lib.rs/task_deletion_delete_same_task_twice/initial_tasks_enqueued.snap index 162cffd2b..fc37dcf2d 100644 --- a/index-scheduler/src/snapshots/lib.rs/task_deletion_delete_same_task_twice/initial_tasks_enqueued.snap +++ b/index-scheduler/src/snapshots/lib.rs/task_deletion_delete_same_task_twice/initial_tasks_enqueued.snap @@ -21,6 +21,9 @@ doggo [1,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/task_deletion_delete_same_task_twice/initial_tasks_processed.snap b/index-scheduler/src/snapshots/lib.rs/task_deletion_delete_same_task_twice/initial_tasks_processed.snap index c33926a04..e4c4d9d7e 100644 --- a/index-scheduler/src/snapshots/lib.rs/task_deletion_delete_same_task_twice/initial_tasks_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/task_deletion_delete_same_task_twice/initial_tasks_processed.snap @@ -22,6 +22,9 @@ doggo [1,] ---------------------------------------------------------------------- ### Index Mapper: ["catto"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/task_deletion_delete_same_task_twice/task_deletion_processed.snap b/index-scheduler/src/snapshots/lib.rs/task_deletion_delete_same_task_twice/task_deletion_processed.snap index bbea9ff8b..8874cc9e0 100644 --- a/index-scheduler/src/snapshots/lib.rs/task_deletion_delete_same_task_twice/task_deletion_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/task_deletion_delete_same_task_twice/task_deletion_processed.snap @@ -7,8 +7,8 @@ source: index-scheduler/src/lib.rs ---------------------------------------------------------------------- ### All Tasks: 1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggo", primary_key: Some("bone"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} -2 {uid: 2, status: succeeded, details: { matched_tasks: 1, deleted_tasks: Some(1), original_query: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0]> }} -3 {uid: 3, status: succeeded, details: { matched_tasks: 1, deleted_tasks: Some(0), original_query: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0]> }} +2 {uid: 2, status: succeeded, details: { matched_tasks: 1, deleted_tasks: Some(1), original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0]> }} +3 {uid: 3, status: succeeded, details: { matched_tasks: 1, deleted_tasks: Some(0), original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0]> }} ---------------------------------------------------------------------- ### Status: enqueued [1,] @@ -23,6 +23,9 @@ doggo [1,] ---------------------------------------------------------------------- ### Index Mapper: ["catto"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [1,] diff --git a/index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/after_registering_the_task_deletion.snap b/index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/after_registering_the_task_deletion.snap new file mode 100644 index 000000000..3c3bd754e --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/after_registering_the_task_deletion.snap @@ -0,0 +1,46 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggo", primary_key: Some("bone"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { matched_tasks: 1, deleted_tasks: None, original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0]> }} +---------------------------------------------------------------------- +### Status: +enqueued [1,2,] +succeeded [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,] +"taskDeletion" [2,] +---------------------------------------------------------------------- +### Index Tasks: +catto [0,] +doggo [1,] +---------------------------------------------------------------------- +### Index Mapper: +["catto"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000001 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/initial_tasks_enqueued.snap b/index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/initial_tasks_enqueued.snap index 162cffd2b..fc37dcf2d 100644 --- a/index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/initial_tasks_enqueued.snap +++ b/index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/initial_tasks_enqueued.snap @@ -21,6 +21,9 @@ doggo [1,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/initial_tasks_processed.snap b/index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/initial_tasks_processed.snap index c33926a04..e4c4d9d7e 100644 --- a/index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/initial_tasks_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/initial_tasks_processed.snap @@ -22,6 +22,9 @@ doggo [1,] ---------------------------------------------------------------------- ### Index Mapper: ["catto"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/task_deletion_processed.snap b/index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/task_deletion_processed.snap index 3ae98f06f..29c251027 100644 --- a/index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/task_deletion_processed.snap +++ b/index-scheduler/src/snapshots/lib.rs/task_deletion_deleteable/task_deletion_processed.snap @@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs ---------------------------------------------------------------------- ### All Tasks: 1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggo", primary_key: Some("bone"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} -2 {uid: 2, status: succeeded, details: { matched_tasks: 1, deleted_tasks: Some(1), original_query: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0]> }} +2 {uid: 2, status: succeeded, details: { matched_tasks: 1, deleted_tasks: Some(1), original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0]> }} ---------------------------------------------------------------------- ### Status: enqueued [1,] @@ -22,6 +22,9 @@ doggo [1,] ---------------------------------------------------------------------- ### Index Mapper: ["catto"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [1,] diff --git a/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/initial_tasks_enqueued.snap b/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/initial_tasks_enqueued.snap index b22cad0ca..afb8af39c 100644 --- a/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/initial_tasks_enqueued.snap +++ b/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/initial_tasks_enqueued.snap @@ -23,6 +23,9 @@ doggo [2,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/task_deletion_done.snap b/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/task_deletion_done.snap index acf3b752c..6fc0a4f7c 100644 --- a/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/task_deletion_done.snap +++ b/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/task_deletion_done.snap @@ -9,7 +9,7 @@ source: index-scheduler/src/lib.rs 0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }} 1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggo", primary_key: Some("bone"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} -3 {uid: 3, status: succeeded, details: { matched_tasks: 2, deleted_tasks: Some(0), original_query: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0, 1]> }} +3 {uid: 3, status: succeeded, details: { matched_tasks: 2, deleted_tasks: Some(0), original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0, 1]> }} ---------------------------------------------------------------------- ### Status: enqueued [0,1,2,] @@ -26,6 +26,9 @@ doggo [2,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/task_deletion_enqueued.snap b/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/task_deletion_enqueued.snap index f41fae458..e2ad01246 100644 --- a/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/task_deletion_enqueued.snap +++ b/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/task_deletion_enqueued.snap @@ -9,7 +9,7 @@ source: index-scheduler/src/lib.rs 0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }} 1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggo", primary_key: Some("bone"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} -3 {uid: 3, status: enqueued, details: { matched_tasks: 2, deleted_tasks: None, original_query: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0, 1]> }} +3 {uid: 3, status: enqueued, details: { matched_tasks: 2, deleted_tasks: None, original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0, 1]> }} ---------------------------------------------------------------------- ### Status: enqueued [0,1,2,3,] @@ -25,6 +25,9 @@ doggo [2,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/task_deletion_processing.snap b/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/task_deletion_processing.snap index 15638b4b4..8017f77b9 100644 --- a/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/task_deletion_processing.snap +++ b/index-scheduler/src/snapshots/lib.rs/task_deletion_undeleteable/task_deletion_processing.snap @@ -9,7 +9,7 @@ source: index-scheduler/src/lib.rs 0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }} 1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggo", primary_key: Some("bone"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} -3 {uid: 3, status: enqueued, details: { matched_tasks: 2, deleted_tasks: None, original_query: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0, 1]> }} +3 {uid: 3, status: enqueued, details: { matched_tasks: 2, deleted_tasks: None, original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0, 1]> }} ---------------------------------------------------------------------- ### Status: enqueued [0,1,2,3,] @@ -25,6 +25,9 @@ doggo [2,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/1.snap index 5a1d5e749..5a839838d 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/1.snap @@ -1,67 +1,45 @@ --- source: index-scheduler/src/lib.rs --- -### Autobatching Enabled = true -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} -1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} -2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} -3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} -4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} -5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} -6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }} -7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} -8 {uid: 8, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }} -9 {uid: 9, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} -10 {uid: 10, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }} ----------------------------------------------------------------------- -### Status: -enqueued [1,2,3,4,5,6,7,8,9,10,] -succeeded [0,] ----------------------------------------------------------------------- -### Kind: -"documentAdditionOrUpdate" [1,2,3,4,5,6,7,8,9,10,] -"indexCreation" [0,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,1,2,3,4,5,6,7,8,9,10,] ----------------------------------------------------------------------- -### Index Mapper: -["doggos"] ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] -[timestamp] [1,] -[timestamp] [2,] -[timestamp] [3,] -[timestamp] [4,] -[timestamp] [5,] -[timestamp] [6,] -[timestamp] [7,] -[timestamp] [8,] -[timestamp] [9,] -[timestamp] [10,] ----------------------------------------------------------------------- -### Started At: -[timestamp] [0,] ----------------------------------------------------------------------- -### Finished At: -[timestamp] [0,] ----------------------------------------------------------------------- -### File Store: -00000000-0000-0000-0000-000000000000 -00000000-0000-0000-0000-000000000001 -00000000-0000-0000-0000-000000000002 -00000000-0000-0000-0000-000000000003 -00000000-0000-0000-0000-000000000004 -00000000-0000-0000-0000-000000000005 -00000000-0000-0000-0000-000000000006 -00000000-0000-0000-0000-000000000007 -00000000-0000-0000-0000-000000000008 -00000000-0000-0000-0000-000000000009 - ----------------------------------------------------------------------- - +[ + { + "id": 0, + "doggo": "bob 0" + }, + { + "id": 1, + "doggo": "bob 1" + }, + { + "id": 2, + "doggo": "bob 2" + }, + { + "id": 3, + "doggo": "bob 3" + }, + { + "id": 4, + "doggo": "bob 4" + }, + { + "id": 5, + "doggo": "bob 5" + }, + { + "id": 6, + "doggo": "bob 6" + }, + { + "id": 7, + "doggo": "bob 7" + }, + { + "id": 8, + "doggo": "bob 8" + }, + { + "id": 9, + "doggo": "bob 9" + } +] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/3.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/3.snap deleted file mode 100644 index 5a839838d..000000000 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/3.snap +++ /dev/null @@ -1,45 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -[ - { - "id": 0, - "doggo": "bob 0" - }, - { - "id": 1, - "doggo": "bob 1" - }, - { - "id": 2, - "doggo": "bob 2" - }, - { - "id": 3, - "doggo": "bob 3" - }, - { - "id": 4, - "doggo": "bob 4" - }, - { - "id": 5, - "doggo": "bob 5" - }, - { - "id": 6, - "doggo": "bob 6" - }, - { - "id": 7, - "doggo": "bob 7" - }, - { - "id": 8, - "doggo": "bob 8" - }, - { - "id": 9, - "doggo": "bob 9" - } -] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/2.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/after_processing_the_10_tasks.snap similarity index 98% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/2.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/after_processing_the_10_tasks.snap index 1fac082df..d112c8145 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/2.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/after_processing_the_10_tasks.snap @@ -31,6 +31,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,10,] ---------------------------------------------------------------------- ### Index Mapper: ["doggos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/after_registering_the_10_tasks.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/after_registering_the_10_tasks.snap new file mode 100644 index 000000000..7daafcccb --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/after_registering_the_10_tasks.snap @@ -0,0 +1,70 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} +6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }} +7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} +8 {uid: 8, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }} +9 {uid: 9, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} +10 {uid: 10, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }} +---------------------------------------------------------------------- +### Status: +enqueued [1,2,3,4,5,6,7,8,9,10,] +succeeded [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [1,2,3,4,5,6,7,8,9,10,] +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,6,7,8,9,10,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +[timestamp] [6,] +[timestamp] [7,] +[timestamp] [8,] +[timestamp] [9,] +[timestamp] [10,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 +00000000-0000-0000-0000-000000000005 +00000000-0000-0000-0000-000000000006 +00000000-0000-0000-0000-000000000007 +00000000-0000-0000-0000-000000000008 +00000000-0000-0000-0000-000000000009 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/processed_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/processed_the_first_task.snap new file mode 100644 index 000000000..ed265ac6e --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/processed_the_first_task.snap @@ -0,0 +1,39 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +---------------------------------------------------------------------- +### Status: +enqueued [] +succeeded [0,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/registered_the_first_task.snap new file mode 100644 index 000000000..e23cd648f --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index/registered_the_first_task.snap @@ -0,0 +1,36 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/1.snap index ae959d293..5a839838d 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/1.snap @@ -1,67 +1,45 @@ --- source: index-scheduler/src/lib.rs --- -### Autobatching Enabled = false -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} -1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} -2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} -3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} -4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} -5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} -6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }} -7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} -8 {uid: 8, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }} -9 {uid: 9, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} -10 {uid: 10, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }} ----------------------------------------------------------------------- -### Status: -enqueued [1,2,3,4,5,6,7,8,9,10,] -succeeded [0,] ----------------------------------------------------------------------- -### Kind: -"documentAdditionOrUpdate" [1,2,3,4,5,6,7,8,9,10,] -"indexCreation" [0,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,1,2,3,4,5,6,7,8,9,10,] ----------------------------------------------------------------------- -### Index Mapper: -["doggos"] ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] -[timestamp] [1,] -[timestamp] [2,] -[timestamp] [3,] -[timestamp] [4,] -[timestamp] [5,] -[timestamp] [6,] -[timestamp] [7,] -[timestamp] [8,] -[timestamp] [9,] -[timestamp] [10,] ----------------------------------------------------------------------- -### Started At: -[timestamp] [0,] ----------------------------------------------------------------------- -### Finished At: -[timestamp] [0,] ----------------------------------------------------------------------- -### File Store: -00000000-0000-0000-0000-000000000000 -00000000-0000-0000-0000-000000000001 -00000000-0000-0000-0000-000000000002 -00000000-0000-0000-0000-000000000003 -00000000-0000-0000-0000-000000000004 -00000000-0000-0000-0000-000000000005 -00000000-0000-0000-0000-000000000006 -00000000-0000-0000-0000-000000000007 -00000000-0000-0000-0000-000000000008 -00000000-0000-0000-0000-000000000009 - ----------------------------------------------------------------------- - +[ + { + "id": 0, + "doggo": "bob 0" + }, + { + "id": 1, + "doggo": "bob 1" + }, + { + "id": 2, + "doggo": "bob 2" + }, + { + "id": 3, + "doggo": "bob 3" + }, + { + "id": 4, + "doggo": "bob 4" + }, + { + "id": 5, + "doggo": "bob 5" + }, + { + "id": 6, + "doggo": "bob 6" + }, + { + "id": 7, + "doggo": "bob 7" + }, + { + "id": 8, + "doggo": "bob 8" + }, + { + "id": 9, + "doggo": "bob 9" + } +] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/4.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/4.snap deleted file mode 100644 index 5a839838d..000000000 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/4.snap +++ /dev/null @@ -1,45 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -[ - { - "id": 0, - "doggo": "bob 0" - }, - { - "id": 1, - "doggo": "bob 1" - }, - { - "id": 2, - "doggo": "bob 2" - }, - { - "id": 3, - "doggo": "bob 3" - }, - { - "id": 4, - "doggo": "bob 4" - }, - { - "id": 5, - "doggo": "bob 5" - }, - { - "id": 6, - "doggo": "bob 6" - }, - { - "id": 7, - "doggo": "bob 7" - }, - { - "id": 8, - "doggo": "bob 8" - }, - { - "id": 9, - "doggo": "bob 9" - } -] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/after_registering_the_10_tasks.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/after_registering_the_10_tasks.snap new file mode 100644 index 000000000..83f17bcef --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/after_registering_the_10_tasks.snap @@ -0,0 +1,70 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = false +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} +6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }} +7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} +8 {uid: 8, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }} +9 {uid: 9, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} +10 {uid: 10, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }} +---------------------------------------------------------------------- +### Status: +enqueued [1,2,3,4,5,6,7,8,9,10,] +succeeded [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [1,2,3,4,5,6,7,8,9,10,] +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,6,7,8,9,10,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +[timestamp] [6,] +[timestamp] [7,] +[timestamp] [8,] +[timestamp] [9,] +[timestamp] [10,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 +00000000-0000-0000-0000-000000000005 +00000000-0000-0000-0000-000000000006 +00000000-0000-0000-0000-000000000007 +00000000-0000-0000-0000-000000000008 +00000000-0000-0000-0000-000000000009 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/3.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/all_tasks_processed.snap similarity index 98% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/3.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/all_tasks_processed.snap index f27170870..fc2fdc5f1 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/3.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/all_tasks_processed.snap @@ -31,6 +31,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,10,] ---------------------------------------------------------------------- ### Index Mapper: ["doggos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/2.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/five_tasks_processed.snap similarity index 98% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/2.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/five_tasks_processed.snap index 6261c5f78..48f972785 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/2.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/five_tasks_processed.snap @@ -31,6 +31,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,10,] ---------------------------------------------------------------------- ### Index Mapper: ["doggos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/processed_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/processed_the_first_task.snap new file mode 100644 index 000000000..6214f3139 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/processed_the_first_task.snap @@ -0,0 +1,39 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = false +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +---------------------------------------------------------------------- +### Status: +enqueued [] +succeeded [0,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/registered_the_first_task.snap new file mode 100644 index 000000000..52866bed6 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_with_index_without_autobatching/registered_the_first_task.snap @@ -0,0 +1,36 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = false +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/2.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/after_processing_the_10_tasks.snap similarity index 60% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/2.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/after_processing_the_10_tasks.snap index 983bde528..ed28c121b 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/2.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/after_processing_the_10_tasks.snap @@ -6,16 +6,16 @@ source: index-scheduler/src/lib.rs [] ---------------------------------------------------------------------- ### All Tasks: -0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} -1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} -2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} -3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} -4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} -5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }} -6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} -7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }} -8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} -9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }} +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} +2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} +3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} +4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} +5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }} +6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} +7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }} +8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} +9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }} ---------------------------------------------------------------------- ### Status: enqueued [] @@ -29,6 +29,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/after_registering_the_10_tasks.snap similarity index 98% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/1.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/after_registering_the_10_tasks.snap index a6e6954fa..828d4dafc 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index/after_registering_the_10_tasks.snap @@ -28,6 +28,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/after_registering_the_10_tasks.snap similarity index 98% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/1.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/after_registering_the_10_tasks.snap index 2f21c9ef2..671713c8e 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/after_registering_the_10_tasks.snap @@ -28,6 +28,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/3.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/all_tasks_processed.snap similarity index 61% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/3.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/all_tasks_processed.snap index 9540e40bc..d995cab9e 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/3.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/all_tasks_processed.snap @@ -6,16 +6,16 @@ source: index-scheduler/src/lib.rs [] ---------------------------------------------------------------------- ### All Tasks: -0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} -1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} -2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} -3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} -4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} -5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }} -6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} -7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }} -8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} -9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }} +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} +2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} +3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} +4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} +5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }} +6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} +7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }} +8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} +9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }} ---------------------------------------------------------------------- ### Status: enqueued [] @@ -29,6 +29,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/2.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/five_tasks_processed.snap similarity index 76% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/2.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/five_tasks_processed.snap index 40dfd4fd1..3ae875bff 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/2.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_cant_create_index_without_index_without_autobatching/five_tasks_processed.snap @@ -6,11 +6,11 @@ source: index-scheduler/src/lib.rs [] ---------------------------------------------------------------------- ### All Tasks: -0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} -1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} -2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} -3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} -4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} +1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }} +2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} +3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }} +4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} 5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }} 6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} 7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }} @@ -29,6 +29,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/1.snap index 1ea5cf1e6..cbd8d175a 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/1.snap @@ -1,61 +1,41 @@ --- source: index-scheduler/src/lib.rs --- -### Autobatching Enabled = true -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} -1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} -2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} -3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} -4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} -5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} -6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} -7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: true }} -8 {uid: 8, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} -9 {uid: 9, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: true }} ----------------------------------------------------------------------- -### Status: -enqueued [0,1,2,3,4,5,6,7,8,9,] ----------------------------------------------------------------------- -### Kind: -"documentAdditionOrUpdate" [0,1,2,3,4,5,6,7,8,9,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,1,2,3,4,5,6,7,8,9,] ----------------------------------------------------------------------- -### Index Mapper: -[] ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] -[timestamp] [1,] -[timestamp] [2,] -[timestamp] [3,] -[timestamp] [4,] -[timestamp] [5,] -[timestamp] [6,] -[timestamp] [7,] -[timestamp] [8,] -[timestamp] [9,] ----------------------------------------------------------------------- -### Started At: ----------------------------------------------------------------------- -### Finished At: ----------------------------------------------------------------------- -### File Store: -00000000-0000-0000-0000-000000000000 -00000000-0000-0000-0000-000000000001 -00000000-0000-0000-0000-000000000002 -00000000-0000-0000-0000-000000000003 -00000000-0000-0000-0000-000000000004 -00000000-0000-0000-0000-000000000005 -00000000-0000-0000-0000-000000000006 -00000000-0000-0000-0000-000000000007 -00000000-0000-0000-0000-000000000008 -00000000-0000-0000-0000-000000000009 - ----------------------------------------------------------------------- - +[ + { + "id": 1, + "doggo": "bob 1" + }, + { + "id": 2, + "doggo": "bob 2" + }, + { + "id": 3, + "doggo": "bob 3" + }, + { + "id": 4, + "doggo": "bob 4" + }, + { + "id": 5, + "doggo": "bob 5" + }, + { + "id": 6, + "doggo": "bob 6" + }, + { + "id": 7, + "doggo": "bob 7" + }, + { + "id": 8, + "doggo": "bob 8" + }, + { + "id": 9, + "doggo": "bob 9" + } +] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/4.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/4.snap deleted file mode 100644 index cbd8d175a..000000000 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/4.snap +++ /dev/null @@ -1,41 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -[ - { - "id": 1, - "doggo": "bob 1" - }, - { - "id": 2, - "doggo": "bob 2" - }, - { - "id": 3, - "doggo": "bob 3" - }, - { - "id": 4, - "doggo": "bob 4" - }, - { - "id": 5, - "doggo": "bob 5" - }, - { - "id": 6, - "doggo": "bob 6" - }, - { - "id": 7, - "doggo": "bob 7" - }, - { - "id": 8, - "doggo": "bob 8" - }, - { - "id": 9, - "doggo": "bob 9" - } -] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/after_registering_the_10_tasks.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/after_registering_the_10_tasks.snap new file mode 100644 index 000000000..ad5968b58 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/after_registering_the_10_tasks.snap @@ -0,0 +1,64 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} +7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: true }} +8 {uid: 8, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} +9 {uid: 9, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,2,3,4,5,6,7,8,9,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,6,7,8,9,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,6,7,8,9,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +[timestamp] [6,] +[timestamp] [7,] +[timestamp] [8,] +[timestamp] [9,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 +00000000-0000-0000-0000-000000000005 +00000000-0000-0000-0000-000000000006 +00000000-0000-0000-0000-000000000007 +00000000-0000-0000-0000-000000000008 +00000000-0000-0000-0000-000000000009 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/3.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/all_tasks_processed.snap similarity index 92% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/3.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/all_tasks_processed.snap index 88a3866a7..19ee47359 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/3.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/all_tasks_processed.snap @@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs [] ---------------------------------------------------------------------- ### All Tasks: -0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} 1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} 3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} @@ -30,6 +30,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: ["doggos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/2.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/only_first_task_failed.snap similarity index 93% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/2.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/only_first_task_failed.snap index 8a6eb23e9..ed57bc4e3 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/2.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_right_without_index_starts_with_cant_create/only_first_task_failed.snap @@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs [] ---------------------------------------------------------------------- ### All Tasks: -0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} +0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} 1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} 2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} 3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} @@ -29,6 +29,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/1.snap index 83f67d737..5a839838d 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/1.snap @@ -1,67 +1,45 @@ --- source: index-scheduler/src/lib.rs --- -### Autobatching Enabled = true -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} -1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} -2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} -3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} -4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} -5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} -6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} -7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} -8 {uid: 8, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: true }} -9 {uid: 9, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} -10 {uid: 10, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: true }} ----------------------------------------------------------------------- -### Status: -enqueued [1,2,3,4,5,6,7,8,9,10,] -succeeded [0,] ----------------------------------------------------------------------- -### Kind: -"documentAdditionOrUpdate" [1,2,3,4,5,6,7,8,9,10,] -"indexCreation" [0,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,1,2,3,4,5,6,7,8,9,10,] ----------------------------------------------------------------------- -### Index Mapper: -["doggos"] ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] -[timestamp] [1,] -[timestamp] [2,] -[timestamp] [3,] -[timestamp] [4,] -[timestamp] [5,] -[timestamp] [6,] -[timestamp] [7,] -[timestamp] [8,] -[timestamp] [9,] -[timestamp] [10,] ----------------------------------------------------------------------- -### Started At: -[timestamp] [0,] ----------------------------------------------------------------------- -### Finished At: -[timestamp] [0,] ----------------------------------------------------------------------- -### File Store: -00000000-0000-0000-0000-000000000000 -00000000-0000-0000-0000-000000000001 -00000000-0000-0000-0000-000000000002 -00000000-0000-0000-0000-000000000003 -00000000-0000-0000-0000-000000000004 -00000000-0000-0000-0000-000000000005 -00000000-0000-0000-0000-000000000006 -00000000-0000-0000-0000-000000000007 -00000000-0000-0000-0000-000000000008 -00000000-0000-0000-0000-000000000009 - ----------------------------------------------------------------------- - +[ + { + "id": 0, + "doggo": "bob 0" + }, + { + "id": 1, + "doggo": "bob 1" + }, + { + "id": 2, + "doggo": "bob 2" + }, + { + "id": 3, + "doggo": "bob 3" + }, + { + "id": 4, + "doggo": "bob 4" + }, + { + "id": 5, + "doggo": "bob 5" + }, + { + "id": 6, + "doggo": "bob 6" + }, + { + "id": 7, + "doggo": "bob 7" + }, + { + "id": 8, + "doggo": "bob 8" + }, + { + "id": 9, + "doggo": "bob 9" + } +] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/3.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/3.snap deleted file mode 100644 index 5a839838d..000000000 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/3.snap +++ /dev/null @@ -1,45 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -[ - { - "id": 0, - "doggo": "bob 0" - }, - { - "id": 1, - "doggo": "bob 1" - }, - { - "id": 2, - "doggo": "bob 2" - }, - { - "id": 3, - "doggo": "bob 3" - }, - { - "id": 4, - "doggo": "bob 4" - }, - { - "id": 5, - "doggo": "bob 5" - }, - { - "id": 6, - "doggo": "bob 6" - }, - { - "id": 7, - "doggo": "bob 7" - }, - { - "id": 8, - "doggo": "bob 8" - }, - { - "id": 9, - "doggo": "bob 9" - } -] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/after_registering_the_10_tasks.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/after_registering_the_10_tasks.snap new file mode 100644 index 000000000..61b7f3016 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/after_registering_the_10_tasks.snap @@ -0,0 +1,70 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }} +6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }} +8 {uid: 8, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: true }} +9 {uid: 9, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }} +10 {uid: 10, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [1,2,3,4,5,6,7,8,9,10,] +succeeded [0,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [1,2,3,4,5,6,7,8,9,10,] +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,6,7,8,9,10,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +[timestamp] [6,] +[timestamp] [7,] +[timestamp] [8,] +[timestamp] [9,] +[timestamp] [10,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 +00000000-0000-0000-0000-000000000005 +00000000-0000-0000-0000-000000000006 +00000000-0000-0000-0000-000000000007 +00000000-0000-0000-0000-000000000008 +00000000-0000-0000-0000-000000000009 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/2.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/all_tasks_processed.snap similarity index 98% rename from index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/2.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/all_tasks_processed.snap index 09e43e490..0962dcdf5 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/2.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/all_tasks_processed.snap @@ -31,6 +31,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,10,] ---------------------------------------------------------------------- ### Index Mapper: ["doggos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/processed_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/processed_the_first_task.snap new file mode 100644 index 000000000..ed265ac6e --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/processed_the_first_task.snap @@ -0,0 +1,39 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +---------------------------------------------------------------------- +### Status: +enqueued [] +succeeded [0,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/registered_the_first_task.snap b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/registered_the_first_task.snap new file mode 100644 index 000000000..e23cd648f --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_addition_mixed_rights_with_index/registered_the_first_task.snap @@ -0,0 +1,36 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }} +---------------------------------------------------------------------- +### Status: +enqueued [0,] +---------------------------------------------------------------------- +### Kind: +"indexCreation" [0,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_replace/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_replace/1.snap index 3ef17fe8a..a47ef319f 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_replace/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_replace/1.snap @@ -28,6 +28,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_replace/2.snap b/index-scheduler/src/snapshots/lib.rs/test_document_replace/2.snap index 06c8fb066..f6423719c 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_replace/2.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_replace/2.snap @@ -29,6 +29,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: ["doggos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/1.snap index f37b613e8..5a839838d 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/1.snap @@ -1,61 +1,45 @@ --- source: index-scheduler/src/lib.rs --- -### Autobatching Enabled = false -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} -2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} -3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} -4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} -5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} -6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: true }} -7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: true }} -8 {uid: 8, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: true }} -9 {uid: 9, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: true }} ----------------------------------------------------------------------- -### Status: -enqueued [0,1,2,3,4,5,6,7,8,9,] ----------------------------------------------------------------------- -### Kind: -"documentAdditionOrUpdate" [0,1,2,3,4,5,6,7,8,9,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,1,2,3,4,5,6,7,8,9,] ----------------------------------------------------------------------- -### Index Mapper: -[] ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] -[timestamp] [1,] -[timestamp] [2,] -[timestamp] [3,] -[timestamp] [4,] -[timestamp] [5,] -[timestamp] [6,] -[timestamp] [7,] -[timestamp] [8,] -[timestamp] [9,] ----------------------------------------------------------------------- -### Started At: ----------------------------------------------------------------------- -### Finished At: ----------------------------------------------------------------------- -### File Store: -00000000-0000-0000-0000-000000000000 -00000000-0000-0000-0000-000000000001 -00000000-0000-0000-0000-000000000002 -00000000-0000-0000-0000-000000000003 -00000000-0000-0000-0000-000000000004 -00000000-0000-0000-0000-000000000005 -00000000-0000-0000-0000-000000000006 -00000000-0000-0000-0000-000000000007 -00000000-0000-0000-0000-000000000008 -00000000-0000-0000-0000-000000000009 - ----------------------------------------------------------------------- - +[ + { + "id": 0, + "doggo": "bob 0" + }, + { + "id": 1, + "doggo": "bob 1" + }, + { + "id": 2, + "doggo": "bob 2" + }, + { + "id": 3, + "doggo": "bob 3" + }, + { + "id": 4, + "doggo": "bob 4" + }, + { + "id": 5, + "doggo": "bob 5" + }, + { + "id": 6, + "doggo": "bob 6" + }, + { + "id": 7, + "doggo": "bob 7" + }, + { + "id": 8, + "doggo": "bob 8" + }, + { + "id": 9, + "doggo": "bob 9" + } +] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/4.snap b/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/4.snap deleted file mode 100644 index 5a839838d..000000000 --- a/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/4.snap +++ /dev/null @@ -1,45 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -[ - { - "id": 0, - "doggo": "bob 0" - }, - { - "id": 1, - "doggo": "bob 1" - }, - { - "id": 2, - "doggo": "bob 2" - }, - { - "id": 3, - "doggo": "bob 3" - }, - { - "id": 4, - "doggo": "bob 4" - }, - { - "id": 5, - "doggo": "bob 5" - }, - { - "id": 6, - "doggo": "bob 6" - }, - { - "id": 7, - "doggo": "bob 7" - }, - { - "id": 8, - "doggo": "bob 8" - }, - { - "id": 9, - "doggo": "bob 9" - } -] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/after_registering_the_10_tasks.snap b/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/after_registering_the_10_tasks.snap new file mode 100644 index 000000000..0f52c9664 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/after_registering_the_10_tasks.snap @@ -0,0 +1,64 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = false +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: true }} +7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: true }} +8 {uid: 8, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: true }} +9 {uid: 9, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,2,3,4,5,6,7,8,9,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,6,7,8,9,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,6,7,8,9,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +[timestamp] [6,] +[timestamp] [7,] +[timestamp] [8,] +[timestamp] [9,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 +00000000-0000-0000-0000-000000000005 +00000000-0000-0000-0000-000000000006 +00000000-0000-0000-0000-000000000007 +00000000-0000-0000-0000-000000000008 +00000000-0000-0000-0000-000000000009 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/3.snap b/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/all_tasks_processed.snap similarity index 98% rename from index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/3.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/all_tasks_processed.snap index 028ec3e0b..b80b8bb40 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/3.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/all_tasks_processed.snap @@ -29,6 +29,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: ["doggos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/2.snap b/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/five_tasks_processed.snap similarity index 98% rename from index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/2.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/five_tasks_processed.snap index 37aedde10..b1528c103 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/2.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_replace_without_autobatching/five_tasks_processed.snap @@ -29,6 +29,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: ["doggos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_update/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_update/1.snap index cfaccc46f..6157fb454 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_update/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_update/1.snap @@ -28,6 +28,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: [] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_update/2.snap b/index-scheduler/src/snapshots/lib.rs/test_document_update/2.snap index 68d640fea..736f998d0 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_update/2.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_update/2.snap @@ -29,6 +29,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: ["doggos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/1.snap b/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/1.snap index ceee17298..5a839838d 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/1.snap @@ -1,61 +1,45 @@ --- source: index-scheduler/src/lib.rs --- -### Autobatching Enabled = false -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} -2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} -3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} -4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} -5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} -6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: true }} -7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: true }} -8 {uid: 8, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: true }} -9 {uid: 9, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: true }} ----------------------------------------------------------------------- -### Status: -enqueued [0,1,2,3,4,5,6,7,8,9,] ----------------------------------------------------------------------- -### Kind: -"documentAdditionOrUpdate" [0,1,2,3,4,5,6,7,8,9,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,1,2,3,4,5,6,7,8,9,] ----------------------------------------------------------------------- -### Index Mapper: -[] ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] -[timestamp] [1,] -[timestamp] [2,] -[timestamp] [3,] -[timestamp] [4,] -[timestamp] [5,] -[timestamp] [6,] -[timestamp] [7,] -[timestamp] [8,] -[timestamp] [9,] ----------------------------------------------------------------------- -### Started At: ----------------------------------------------------------------------- -### Finished At: ----------------------------------------------------------------------- -### File Store: -00000000-0000-0000-0000-000000000000 -00000000-0000-0000-0000-000000000001 -00000000-0000-0000-0000-000000000002 -00000000-0000-0000-0000-000000000003 -00000000-0000-0000-0000-000000000004 -00000000-0000-0000-0000-000000000005 -00000000-0000-0000-0000-000000000006 -00000000-0000-0000-0000-000000000007 -00000000-0000-0000-0000-000000000008 -00000000-0000-0000-0000-000000000009 - ----------------------------------------------------------------------- - +[ + { + "id": 0, + "doggo": "bob 0" + }, + { + "id": 1, + "doggo": "bob 1" + }, + { + "id": 2, + "doggo": "bob 2" + }, + { + "id": 3, + "doggo": "bob 3" + }, + { + "id": 4, + "doggo": "bob 4" + }, + { + "id": 5, + "doggo": "bob 5" + }, + { + "id": 6, + "doggo": "bob 6" + }, + { + "id": 7, + "doggo": "bob 7" + }, + { + "id": 8, + "doggo": "bob 8" + }, + { + "id": 9, + "doggo": "bob 9" + } +] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/4.snap b/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/4.snap deleted file mode 100644 index 5a839838d..000000000 --- a/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/4.snap +++ /dev/null @@ -1,45 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -[ - { - "id": 0, - "doggo": "bob 0" - }, - { - "id": 1, - "doggo": "bob 1" - }, - { - "id": 2, - "doggo": "bob 2" - }, - { - "id": 3, - "doggo": "bob 3" - }, - { - "id": 4, - "doggo": "bob 4" - }, - { - "id": 5, - "doggo": "bob 5" - }, - { - "id": 6, - "doggo": "bob 6" - }, - { - "id": 7, - "doggo": "bob 7" - }, - { - "id": 8, - "doggo": "bob 8" - }, - { - "id": 9, - "doggo": "bob 9" - } -] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/after_registering_the_10_tasks.snap b/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/after_registering_the_10_tasks.snap new file mode 100644 index 000000000..85fda1a43 --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/after_registering_the_10_tasks.snap @@ -0,0 +1,64 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = false +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: true }} +7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: true }} +8 {uid: 8, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: true }} +9 {uid: 9, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,2,3,4,5,6,7,8,9,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,6,7,8,9,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,6,7,8,9,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +[timestamp] [6,] +[timestamp] [7,] +[timestamp] [8,] +[timestamp] [9,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 +00000000-0000-0000-0000-000000000005 +00000000-0000-0000-0000-000000000006 +00000000-0000-0000-0000-000000000007 +00000000-0000-0000-0000-000000000008 +00000000-0000-0000-0000-000000000009 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/3.snap b/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/all_tasks_processed.snap similarity index 98% rename from index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/3.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/all_tasks_processed.snap index ed9f09f93..a1fc55210 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/3.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/all_tasks_processed.snap @@ -29,6 +29,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: ["doggos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/2.snap b/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/five_tasks_processed.snap similarity index 98% rename from index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/2.snap rename to index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/five_tasks_processed.snap index 62cb62c71..fb0b629ec 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/2.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_document_update_without_autobatching/five_tasks_processed.snap @@ -29,6 +29,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: ["doggos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/1.snap b/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/1.snap index 2875c299c..5a839838d 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/1.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/1.snap @@ -1,61 +1,45 @@ --- source: index-scheduler/src/lib.rs --- -### Autobatching Enabled = true -### Processing Tasks: -[] ----------------------------------------------------------------------- -### All Tasks: -0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} -1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} -2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} -3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} -4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} -5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} -6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: true }} -7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: true }} -8 {uid: 8, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: true }} -9 {uid: 9, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: true }} ----------------------------------------------------------------------- -### Status: -enqueued [0,1,2,3,4,5,6,7,8,9,] ----------------------------------------------------------------------- -### Kind: -"documentAdditionOrUpdate" [0,1,2,3,4,5,6,7,8,9,] ----------------------------------------------------------------------- -### Index Tasks: -doggos [0,1,2,3,4,5,6,7,8,9,] ----------------------------------------------------------------------- -### Index Mapper: -[] ----------------------------------------------------------------------- -### Enqueued At: -[timestamp] [0,] -[timestamp] [1,] -[timestamp] [2,] -[timestamp] [3,] -[timestamp] [4,] -[timestamp] [5,] -[timestamp] [6,] -[timestamp] [7,] -[timestamp] [8,] -[timestamp] [9,] ----------------------------------------------------------------------- -### Started At: ----------------------------------------------------------------------- -### Finished At: ----------------------------------------------------------------------- -### File Store: -00000000-0000-0000-0000-000000000000 -00000000-0000-0000-0000-000000000001 -00000000-0000-0000-0000-000000000002 -00000000-0000-0000-0000-000000000003 -00000000-0000-0000-0000-000000000004 -00000000-0000-0000-0000-000000000005 -00000000-0000-0000-0000-000000000006 -00000000-0000-0000-0000-000000000007 -00000000-0000-0000-0000-000000000008 -00000000-0000-0000-0000-000000000009 - ----------------------------------------------------------------------- - +[ + { + "id": 0, + "doggo": "bob 0" + }, + { + "id": 1, + "doggo": "bob 1" + }, + { + "id": 2, + "doggo": "bob 2" + }, + { + "id": 3, + "doggo": "bob 3" + }, + { + "id": 4, + "doggo": "bob 4" + }, + { + "id": 5, + "doggo": "bob 5" + }, + { + "id": 6, + "doggo": "bob 6" + }, + { + "id": 7, + "doggo": "bob 7" + }, + { + "id": 8, + "doggo": "bob 8" + }, + { + "id": 9, + "doggo": "bob 9" + } +] diff --git a/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/3.snap b/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/3.snap deleted file mode 100644 index 5a839838d..000000000 --- a/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/3.snap +++ /dev/null @@ -1,45 +0,0 @@ ---- -source: index-scheduler/src/lib.rs ---- -[ - { - "id": 0, - "doggo": "bob 0" - }, - { - "id": 1, - "doggo": "bob 1" - }, - { - "id": 2, - "doggo": "bob 2" - }, - { - "id": 3, - "doggo": "bob 3" - }, - { - "id": 4, - "doggo": "bob 4" - }, - { - "id": 5, - "doggo": "bob 5" - }, - { - "id": 6, - "doggo": "bob 6" - }, - { - "id": 7, - "doggo": "bob 7" - }, - { - "id": 8, - "doggo": "bob 8" - }, - { - "id": 9, - "doggo": "bob 9" - } -] diff --git a/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/after_registering_the_10_tasks.snap b/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/after_registering_the_10_tasks.snap new file mode 100644 index 000000000..330a3318e --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/after_registering_the_10_tasks.snap @@ -0,0 +1,64 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: true }} +7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: true }} +8 {uid: 8, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: true }} +9 {uid: 9, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [0,1,2,3,4,5,6,7,8,9,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,6,7,8,9,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,6,7,8,9,] +---------------------------------------------------------------------- +### Index Mapper: +[] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +[timestamp] [6,] +[timestamp] [7,] +[timestamp] [8,] +[timestamp] [9,] +---------------------------------------------------------------------- +### Started At: +---------------------------------------------------------------------- +### Finished At: +---------------------------------------------------------------------- +### File Store: +00000000-0000-0000-0000-000000000000 +00000000-0000-0000-0000-000000000001 +00000000-0000-0000-0000-000000000002 +00000000-0000-0000-0000-000000000003 +00000000-0000-0000-0000-000000000004 +00000000-0000-0000-0000-000000000005 +00000000-0000-0000-0000-000000000006 +00000000-0000-0000-0000-000000000007 +00000000-0000-0000-0000-000000000008 +00000000-0000-0000-0000-000000000009 + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/all_tasks_processed.snap b/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/all_tasks_processed.snap new file mode 100644 index 000000000..20fda049f --- /dev/null +++ b/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/all_tasks_processed.snap @@ -0,0 +1,75 @@ +--- +source: index-scheduler/src/lib.rs +--- +### Autobatching Enabled = true +### Processing Tasks: +[] +---------------------------------------------------------------------- +### All Tasks: +0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }} +1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }} +2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }} +3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }} +4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }} +5 {uid: 5, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }} +6 {uid: 6, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: true }} +7 {uid: 7, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: true }} +8 {uid: 8, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: true }} +9 {uid: 9, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: true }} +---------------------------------------------------------------------- +### Status: +enqueued [] +succeeded [0,1,2,3,4,5,6,7,8,9,] +---------------------------------------------------------------------- +### Kind: +"documentAdditionOrUpdate" [0,1,2,3,4,5,6,7,8,9,] +---------------------------------------------------------------------- +### Index Tasks: +doggos [0,1,2,3,4,5,6,7,8,9,] +---------------------------------------------------------------------- +### Index Mapper: +["doggos"] +---------------------------------------------------------------------- +### Canceled By: + +---------------------------------------------------------------------- +### Enqueued At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +[timestamp] [6,] +[timestamp] [7,] +[timestamp] [8,] +[timestamp] [9,] +---------------------------------------------------------------------- +### Started At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +[timestamp] [6,] +[timestamp] [7,] +[timestamp] [8,] +[timestamp] [9,] +---------------------------------------------------------------------- +### Finished At: +[timestamp] [0,] +[timestamp] [1,] +[timestamp] [2,] +[timestamp] [3,] +[timestamp] [4,] +[timestamp] [5,] +[timestamp] [6,] +[timestamp] [7,] +[timestamp] [8,] +[timestamp] [9,] +---------------------------------------------------------------------- +### File Store: + +---------------------------------------------------------------------- + diff --git a/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/2.snap b/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/five_tasks_processed.snap similarity index 98% rename from index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/2.snap rename to index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/five_tasks_processed.snap index 0f9af60e7..9fd990aa9 100644 --- a/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/2.snap +++ b/index-scheduler/src/snapshots/lib.rs/test_mixed_document_addition/five_tasks_processed.snap @@ -29,6 +29,9 @@ doggos [0,1,2,3,4,5,6,7,8,9,] ---------------------------------------------------------------------- ### Index Mapper: ["doggos"] +---------------------------------------------------------------------- +### Canceled By: + ---------------------------------------------------------------------- ### Enqueued At: [timestamp] [0,] diff --git a/index-scheduler/src/utils.rs b/index-scheduler/src/utils.rs index 71869d1fd..a193c2bec 100644 --- a/index-scheduler/src/utils.rs +++ b/index-scheduler/src/utils.rs @@ -398,11 +398,17 @@ impl IndexScheduler { } Details::DocumentAdditionOrUpdate { received_documents, indexed_documents } => { assert_eq!(kind.as_kind(), Kind::DocumentAdditionOrUpdate); - if let Some(indexed_documents) = indexed_documents { - assert_eq!(status, Status::Succeeded); - assert!(indexed_documents <= received_documents); - } else { - assert_ne!(status, Status::Succeeded); + match indexed_documents { + Some(0) => assert_ne!(status, Status::Enqueued), + Some(indexed_documents) => { + assert_eq!(status, Status::Succeeded); + assert!(indexed_documents <= received_documents); + } + None => { + assert_ne!(status, Status::Succeeded); + assert_ne!(status, Status::Canceled); + assert_ne!(status, Status::Failed); + } } } Details::SettingsUpdate { settings: _ } => { @@ -421,7 +427,7 @@ impl IndexScheduler { _ => panic!(), }, Details::DocumentDeletion { - matched_documents: received_document_ids, + provided_ids: received_document_ids, deleted_documents, } => { if let Some(deleted_documents) = deleted_documents { @@ -451,13 +457,13 @@ impl IndexScheduler { assert_ne!(status, Status::Succeeded); } } - Details::TaskCancelation { matched_tasks, canceled_tasks, original_query } => { + Details::TaskCancelation { matched_tasks, canceled_tasks, original_filter } => { if let Some(canceled_tasks) = canceled_tasks { assert_eq!(status, Status::Succeeded); assert!(canceled_tasks <= matched_tasks); match &kind { KindWithContent::TaskCancelation { query, tasks } => { - assert_eq!(query, &original_query); + assert_eq!(query, &original_filter); assert_eq!(tasks.len(), matched_tasks); } _ => panic!(), @@ -466,13 +472,13 @@ impl IndexScheduler { assert_ne!(status, Status::Succeeded); } } - Details::TaskDeletion { matched_tasks, deleted_tasks, original_query } => { + Details::TaskDeletion { matched_tasks, deleted_tasks, original_filter } => { if let Some(deleted_tasks) = deleted_tasks { assert_eq!(status, Status::Succeeded); assert!(deleted_tasks <= matched_tasks); match &kind { KindWithContent::TaskDeletion { query, tasks } => { - assert_eq!(query, &original_query); + assert_eq!(query, &original_filter); assert_eq!(tasks.len(), matched_tasks); } _ => panic!(), @@ -481,10 +487,8 @@ impl IndexScheduler { assert_ne!(status, Status::Succeeded); } } - Details::Dump { dump_uid: d1 } => { - assert!( - matches!(&kind, KindWithContent::DumpCreation { dump_uid: d2, keys: _, instance_uid: _ } if &d1 == d2 ) - ); + Details::Dump { dump_uid: _ } => { + assert_eq!(kind.as_kind(), Kind::DumpCreation); } } } diff --git a/meilisearch-http/src/analytics/mock_analytics.rs b/meilisearch-http/src/analytics/mock_analytics.rs index ab93f5edc..ad45a1ac8 100644 --- a/meilisearch-http/src/analytics/mock_analytics.rs +++ b/meilisearch-http/src/analytics/mock_analytics.rs @@ -5,8 +5,9 @@ use actix_web::HttpRequest; use meilisearch_types::InstanceUid; use serde_json::Value; -use super::{find_user_id, Analytics}; +use super::{find_user_id, Analytics, DocumentDeletionKind}; use crate::routes::indexes::documents::UpdateDocumentsQuery; +use crate::routes::tasks::TasksFilterQueryRaw; use crate::Opt; pub struct MockAnalytics { @@ -49,6 +50,7 @@ impl Analytics for MockAnalytics { _request: &HttpRequest, ) { } + fn delete_documents(&self, _kind: DocumentDeletionKind, _request: &HttpRequest) {} fn update_documents( &self, _documents_query: &UpdateDocumentsQuery, @@ -56,4 +58,6 @@ impl Analytics for MockAnalytics { _request: &HttpRequest, ) { } + fn get_tasks(&self, _query: &TasksFilterQueryRaw, _request: &HttpRequest) {} + fn health_seen(&self, _request: &HttpRequest) {} } diff --git a/meilisearch-http/src/analytics/mod.rs b/meilisearch-http/src/analytics/mod.rs index ffebaea77..46c4b2090 100644 --- a/meilisearch-http/src/analytics/mod.rs +++ b/meilisearch-http/src/analytics/mod.rs @@ -15,6 +15,7 @@ use platform_dirs::AppDirs; use serde_json::Value; use crate::routes::indexes::documents::UpdateDocumentsQuery; +use crate::routes::tasks::TasksFilterQueryRaw; // if we are in debug mode OR the analytics feature is disabled // the `SegmentAnalytics` point to the mock instead of the real analytics @@ -54,6 +55,13 @@ fn find_user_id(db_path: &Path) -> Option { .and_then(|uid| InstanceUid::from_str(&uid).ok()) } +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum DocumentDeletionKind { + PerDocumentId, + ClearAll, + PerBatch, +} + pub trait Analytics: Sync + Send { fn instance_uid(&self) -> Option<&InstanceUid>; @@ -73,6 +81,10 @@ pub trait Analytics: Sync + Send { index_creation: bool, request: &HttpRequest, ); + + // this method should be called to aggregate a add documents request + fn delete_documents(&self, kind: DocumentDeletionKind, request: &HttpRequest); + // this method should be called to batch a update documents request fn update_documents( &self, @@ -80,4 +92,10 @@ pub trait Analytics: Sync + Send { index_creation: bool, request: &HttpRequest, ); + + // this method should be called to aggregate the get tasks requests. + fn get_tasks(&self, query: &TasksFilterQueryRaw, request: &HttpRequest); + + // this method should be called to aggregate a add documents request + fn health_seen(&self, request: &HttpRequest); } diff --git a/meilisearch-http/src/analytics/segment_analytics.rs b/meilisearch-http/src/analytics/segment_analytics.rs index 13dba7896..afec4c5cb 100644 --- a/meilisearch-http/src/analytics/segment_analytics.rs +++ b/meilisearch-http/src/analytics/segment_analytics.rs @@ -6,6 +6,7 @@ use std::time::{Duration, Instant}; use actix_web::http::header::USER_AGENT; use actix_web::HttpRequest; +use byte_unit::Byte; use http::header::CONTENT_TYPE; use index_scheduler::IndexScheduler; use meilisearch_auth::SearchRules; @@ -14,6 +15,7 @@ use once_cell::sync::Lazy; use regex::Regex; use segment::message::{Identify, Track, User}; use segment::{AutoBatcher, Batcher, HttpClient}; +use serde::Serialize; use serde_json::{json, Value}; use sysinfo::{DiskExt, System, SystemExt}; use time::OffsetDateTime; @@ -21,10 +23,11 @@ use tokio::select; use tokio::sync::mpsc::{self, Receiver, Sender}; use uuid::Uuid; -use super::{config_user_id_path, MEILISEARCH_CONFIG_PATH}; +use super::{config_user_id_path, DocumentDeletionKind, MEILISEARCH_CONFIG_PATH}; use crate::analytics::Analytics; -use crate::option::default_http_addr; +use crate::option::{default_http_addr, IndexerOpts, MaxMemory, MaxThreads, SchedulerConfig}; use crate::routes::indexes::documents::UpdateDocumentsQuery; +use crate::routes::tasks::TasksFilterQueryRaw; use crate::routes::{create_all_stats, Stats}; use crate::search::{ SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, @@ -66,7 +69,10 @@ pub enum AnalyticsMsg { AggregateGetSearch(SearchAggregator), AggregatePostSearch(SearchAggregator), AggregateAddDocuments(DocumentsAggregator), + AggregateDeleteDocuments(DocumentsDeletionAggregator), AggregateUpdateDocuments(DocumentsAggregator), + AggregateTasks(TasksAggregator), + AggregateHealth(HealthAggregator), } pub struct SegmentAnalytics { @@ -125,7 +131,10 @@ impl SegmentAnalytics { post_search_aggregator: SearchAggregator::default(), get_search_aggregator: SearchAggregator::default(), add_documents_aggregator: DocumentsAggregator::default(), + delete_documents_aggregator: DocumentsDeletionAggregator::default(), update_documents_aggregator: DocumentsAggregator::default(), + get_tasks_aggregator: TasksAggregator::default(), + health_aggregator: HealthAggregator::default(), }); tokio::spawn(segment.run(index_scheduler.clone())); @@ -171,6 +180,11 @@ impl super::Analytics for SegmentAnalytics { let _ = self.sender.try_send(AnalyticsMsg::AggregateAddDocuments(aggregate)); } + fn delete_documents(&self, kind: DocumentDeletionKind, request: &HttpRequest) { + let aggregate = DocumentsDeletionAggregator::from_query(kind, request); + let _ = self.sender.try_send(AnalyticsMsg::AggregateDeleteDocuments(aggregate)); + } + fn update_documents( &self, documents_query: &UpdateDocumentsQuery, @@ -180,6 +194,134 @@ impl super::Analytics for SegmentAnalytics { let aggregate = DocumentsAggregator::from_query(documents_query, index_creation, request); let _ = self.sender.try_send(AnalyticsMsg::AggregateUpdateDocuments(aggregate)); } + + fn get_tasks(&self, query: &TasksFilterQueryRaw, request: &HttpRequest) { + let aggregate = TasksAggregator::from_query(query, request); + let _ = self.sender.try_send(AnalyticsMsg::AggregateTasks(aggregate)); + } + + fn health_seen(&self, request: &HttpRequest) { + let aggregate = HealthAggregator::from_query(request); + let _ = self.sender.try_send(AnalyticsMsg::AggregateHealth(aggregate)); + } +} + +/// This structure represent the `infos` field we send in the analytics. +/// It's quite close to the `Opt` structure except all sensitive informations +/// have been simplified to a boolean. +/// It's send as-is in amplitude thus you should never update a name of the +/// struct without the approval of the PM. +#[derive(Debug, Clone, Serialize)] +struct Infos { + env: String, + db_path: bool, + import_dump: bool, + dumps_dir: bool, + ignore_missing_dump: bool, + ignore_dump_if_db_exists: bool, + import_snapshot: bool, + schedule_snapshot: bool, + snapshot_dir: bool, + snapshot_interval_sec: u64, + ignore_missing_snapshot: bool, + ignore_snapshot_if_db_exists: bool, + http_addr: bool, + max_index_size: Byte, + max_task_db_size: Byte, + http_payload_size_limit: Byte, + disable_auto_batching: bool, + log_level: String, + max_indexing_memory: MaxMemory, + max_indexing_threads: MaxThreads, + with_configuration_file: bool, + ssl_auth_path: bool, + ssl_cert_path: bool, + ssl_key_path: bool, + ssl_ocsp_path: bool, + ssl_require_auth: bool, + ssl_resumption: bool, + ssl_tickets: bool, +} + +impl From for Infos { + fn from(options: Opt) -> Self { + // We wants to decompose this whole struct by hand to be sure we don't forget + // to add analytics when we add a field in the Opt. + // Thus we must not insert `..` at the end. + let Opt { + db_path, + http_addr, + master_key: _, + env, + max_index_size, + max_task_db_size, + http_payload_size_limit, + ssl_cert_path, + ssl_key_path, + ssl_auth_path, + ssl_ocsp_path, + ssl_require_auth, + ssl_resumption, + ssl_tickets, + import_snapshot, + ignore_missing_snapshot, + ignore_snapshot_if_db_exists, + snapshot_dir, + schedule_snapshot, + snapshot_interval_sec, + import_dump, + ignore_missing_dump, + ignore_dump_if_db_exists, + dumps_dir, + log_level, + indexer_options, + scheduler_options, + config_file_path, + #[cfg(all(not(debug_assertions), feature = "analytics"))] + no_analytics: _, + } = options; + + let SchedulerConfig { disable_auto_batching } = scheduler_options; + let IndexerOpts { + log_every_n: _, + max_nb_chunks: _, + max_indexing_memory, + max_indexing_threads, + } = indexer_options; + + // We're going to override every sensible information. + // We consider information sensible if it contains a path, an address, or a key. + Self { + env, + db_path: db_path != PathBuf::from("./data.ms"), + import_dump: import_dump.is_some(), + dumps_dir: dumps_dir != PathBuf::from("dumps/"), + ignore_missing_dump, + ignore_dump_if_db_exists, + import_snapshot: import_snapshot.is_some(), + schedule_snapshot, + snapshot_dir: snapshot_dir != PathBuf::from("snapshots/"), + snapshot_interval_sec, + ignore_missing_snapshot, + ignore_snapshot_if_db_exists, + http_addr: http_addr != default_http_addr(), + max_index_size, + max_task_db_size, + http_payload_size_limit, + disable_auto_batching, + log_level, + max_indexing_memory, + max_indexing_threads, + with_configuration_file: config_file_path.is_some(), + ssl_auth_path: ssl_auth_path.is_some(), + ssl_cert_path: ssl_cert_path.is_some(), + ssl_key_path: ssl_key_path.is_some(), + ssl_ocsp_path: ssl_ocsp_path.is_some(), + ssl_require_auth, + ssl_resumption, + ssl_tickets, + } + } } pub struct Segment { @@ -190,7 +332,10 @@ pub struct Segment { get_search_aggregator: SearchAggregator, post_search_aggregator: SearchAggregator, add_documents_aggregator: DocumentsAggregator, + delete_documents_aggregator: DocumentsDeletionAggregator, update_documents_aggregator: DocumentsAggregator, + get_tasks_aggregator: TasksAggregator, + health_aggregator: HealthAggregator, } impl Segment { @@ -212,31 +357,6 @@ impl Segment { "server_provider": std::env::var("MEILI_SERVER_PROVIDER").ok(), }) }); - // The infos are all cli option except every option containing sensitive information. - // We consider an information as sensible if it contains a path, an address or a key. - let infos = { - // First we see if any sensitive fields were used. - let db_path = opt.db_path != PathBuf::from("./data.ms"); - let import_dump = opt.import_dump.is_some(); - let dumps_dir = opt.dumps_dir != PathBuf::from("dumps/"); - let import_snapshot = opt.import_snapshot.is_some(); - let snapshots_dir = opt.snapshot_dir != PathBuf::from("snapshots/"); - let http_addr = opt.http_addr != default_http_addr(); - - let mut infos = serde_json::to_value(opt).unwrap(); - - // Then we overwrite all sensitive field with a boolean representing if - // the feature was used or not. - infos["db_path"] = json!(db_path); - infos["import_dump"] = json!(import_dump); - infos["dumps_dir"] = json!(dumps_dir); - infos["import_snapshot"] = json!(import_snapshot); - infos["snapshot_dir"] = json!(snapshots_dir); - infos["http_addr"] = json!(http_addr); - - infos - }; - let number_of_documents = stats.indexes.values().map(|index| index.number_of_documents).collect::>(); @@ -248,7 +368,7 @@ impl Segment { "indexes_number": stats.indexes.len(), "documents_number": number_of_documents, }, - "infos": infos, + "infos": Infos::from(opt.clone()), }) } @@ -269,7 +389,10 @@ impl Segment { Some(AnalyticsMsg::AggregateGetSearch(agreg)) => self.get_search_aggregator.aggregate(agreg), Some(AnalyticsMsg::AggregatePostSearch(agreg)) => self.post_search_aggregator.aggregate(agreg), Some(AnalyticsMsg::AggregateAddDocuments(agreg)) => self.add_documents_aggregator.aggregate(agreg), + Some(AnalyticsMsg::AggregateDeleteDocuments(agreg)) => self.delete_documents_aggregator.aggregate(agreg), Some(AnalyticsMsg::AggregateUpdateDocuments(agreg)) => self.update_documents_aggregator.aggregate(agreg), + Some(AnalyticsMsg::AggregateTasks(agreg)) => self.get_tasks_aggregator.aggregate(agreg), + Some(AnalyticsMsg::AggregateHealth(agreg)) => self.health_aggregator.aggregate(agreg), None => (), } } @@ -299,8 +422,14 @@ impl Segment { .into_event(&self.user, "Documents Searched POST"); let add_documents = std::mem::take(&mut self.add_documents_aggregator) .into_event(&self.user, "Documents Added"); + let delete_documents = std::mem::take(&mut self.delete_documents_aggregator) + .into_event(&self.user, "Documents Deleted"); let update_documents = std::mem::take(&mut self.update_documents_aggregator) .into_event(&self.user, "Documents Updated"); + let get_tasks = + std::mem::take(&mut self.get_tasks_aggregator).into_event(&self.user, "Tasks Seen"); + let health = + std::mem::take(&mut self.health_aggregator).into_event(&self.user, "Health Seen"); if let Some(get_search) = get_search { let _ = self.batcher.push(get_search).await; @@ -311,9 +440,18 @@ impl Segment { if let Some(add_documents) = add_documents { let _ = self.batcher.push(add_documents).await; } + if let Some(delete_documents) = delete_documents { + let _ = self.batcher.push(delete_documents).await; + } if let Some(update_documents) = update_documents { let _ = self.batcher.push(update_documents).await; } + if let Some(get_tasks) = get_tasks { + let _ = self.batcher.push(get_tasks).await; + } + if let Some(health) = health { + let _ = self.batcher.push(health).await; + } let _ = self.batcher.flush().await; } } @@ -358,11 +496,18 @@ pub struct SearchAggregator { finite_pagination: usize, // formatting + max_attributes_to_retrieve: usize, + max_attributes_to_highlight: usize, highlight_pre_tag: bool, highlight_post_tag: bool, + max_attributes_to_crop: usize, crop_marker: bool, show_matches_position: bool, crop_length: bool, + + // facets + facets_sum_of_terms: usize, + facets_total_number_of_facets: usize, } impl SearchAggregator { @@ -443,16 +588,19 @@ impl SearchAggregator { for user_agent in other.user_agents.into_iter() { self.user_agents.insert(user_agent); } + // request self.total_received = self.total_received.saturating_add(other.total_received); self.total_succeeded = self.total_succeeded.saturating_add(other.total_succeeded); self.time_spent.append(&mut other.time_spent); + // sort self.sort_with_geo_point |= other.sort_with_geo_point; self.sort_sum_of_criteria_terms = self.sort_sum_of_criteria_terms.saturating_add(other.sort_sum_of_criteria_terms); self.sort_total_number_of_criteria = self.sort_total_number_of_criteria.saturating_add(other.sort_total_number_of_criteria); + // filter self.filter_with_geo_radius |= other.filter_with_geo_radius; self.filter_sum_of_criteria_terms = @@ -467,20 +615,34 @@ impl SearchAggregator { // q self.max_terms_number = self.max_terms_number.max(other.max_terms_number); - for (key, value) in other.matching_strategy.into_iter() { - let matching_strategy = self.matching_strategy.entry(key).or_insert(0); - *matching_strategy = matching_strategy.saturating_add(value); - } // pagination self.max_limit = self.max_limit.max(other.max_limit); self.max_offset = self.max_offset.max(other.max_offset); self.finite_pagination += other.finite_pagination; + // formatting + self.max_attributes_to_retrieve = + self.max_attributes_to_retrieve.max(other.max_attributes_to_retrieve); + self.max_attributes_to_highlight = + self.max_attributes_to_highlight.max(other.max_attributes_to_highlight); self.highlight_pre_tag |= other.highlight_pre_tag; self.highlight_post_tag |= other.highlight_post_tag; + self.max_attributes_to_crop = self.max_attributes_to_crop.max(other.max_attributes_to_crop); self.crop_marker |= other.crop_marker; self.show_matches_position |= other.show_matches_position; self.crop_length |= other.crop_length; + + // facets + self.facets_sum_of_terms = + self.facets_sum_of_terms.saturating_add(other.facets_sum_of_terms); + self.facets_total_number_of_facets = + self.facets_total_number_of_facets.saturating_add(other.facets_total_number_of_facets); + + // matching strategy + for (key, value) in other.matching_strategy.into_iter() { + let matching_strategy = self.matching_strategy.entry(key).or_insert(0); + *matching_strategy = matching_strategy.saturating_add(value); + } } pub fn into_event(self, user: &User, event_name: &str) -> Option { @@ -513,20 +675,28 @@ impl SearchAggregator { }, "q": { "max_terms_number": self.max_terms_number, - "most_used_matching_strategy": self.matching_strategy.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)), }, "pagination": { "max_limit": self.max_limit, "max_offset": self.max_offset, - "finite_pagination": self.finite_pagination > self.total_received / 2, + "most_used_navigation": if self.finite_pagination > (self.total_received / 2) { "exhaustive" } else { "estimated" }, }, "formatting": { + "max_attributes_to_retrieve": self.max_attributes_to_retrieve, + "max_attributes_to_highlight": self.max_attributes_to_highlight, "highlight_pre_tag": self.highlight_pre_tag, "highlight_post_tag": self.highlight_post_tag, + "max_attributes_to_crop": self.max_attributes_to_crop, "crop_marker": self.crop_marker, "show_matches_position": self.show_matches_position, "crop_length": self.crop_length, }, + "facets": { + "avg_facets_number": format!("{:.2}", self.facets_sum_of_terms as f64 / self.facets_total_number_of_facets as f64), + }, + "matching_strategy": { + "most_used_strategy": self.matching_strategy.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)), + } }); Some(Track { @@ -622,3 +792,200 @@ impl DocumentsAggregator { } } } + +#[derive(Default, Serialize)] +pub struct DocumentsDeletionAggregator { + #[serde(skip)] + timestamp: Option, + + // context + #[serde(rename = "user-agent")] + user_agents: HashSet, + + total_received: usize, + per_document_id: bool, + clear_all: bool, + per_batch: bool, +} + +impl DocumentsDeletionAggregator { + pub fn from_query(kind: DocumentDeletionKind, request: &HttpRequest) -> Self { + let mut ret = Self::default(); + ret.timestamp = Some(OffsetDateTime::now_utc()); + + ret.user_agents = extract_user_agents(request).into_iter().collect(); + ret.total_received = 1; + match kind { + DocumentDeletionKind::PerDocumentId => ret.per_document_id = true, + DocumentDeletionKind::ClearAll => ret.clear_all = true, + DocumentDeletionKind::PerBatch => ret.per_batch = true, + } + + ret + } + + /// Aggregate one [DocumentsAggregator] into another. + pub fn aggregate(&mut self, other: Self) { + if self.timestamp.is_none() { + self.timestamp = other.timestamp; + } + + // we can't create a union because there is no `into_union` method + for user_agent in other.user_agents { + self.user_agents.insert(user_agent); + } + self.total_received = self.total_received.saturating_add(other.total_received); + self.per_document_id |= other.per_document_id; + self.clear_all |= other.clear_all; + self.per_batch |= other.per_batch; + } + + pub fn into_event(self, user: &User, event_name: &str) -> Option { + // if we had no timestamp it means we never encountered any events and + // thus we don't need to send this event. + let timestamp = self.timestamp?; + + Some(Track { + timestamp: Some(timestamp), + user: user.clone(), + event: event_name.to_string(), + properties: serde_json::to_value(self).ok()?, + ..Default::default() + }) + } +} + +#[derive(Default, Serialize)] +pub struct TasksAggregator { + #[serde(skip)] + timestamp: Option, + + // context + #[serde(rename = "user-agent")] + user_agents: HashSet, + + filtered_by_uid: bool, + filtered_by_index_uid: bool, + filtered_by_type: bool, + filtered_by_status: bool, + filtered_by_canceled_by: bool, + filtered_by_before_enqueued_at: bool, + filtered_by_after_enqueued_at: bool, + filtered_by_before_started_at: bool, + filtered_by_after_started_at: bool, + filtered_by_before_finished_at: bool, + filtered_by_after_finished_at: bool, + total_received: usize, +} + +impl TasksAggregator { + pub fn from_query(query: &TasksFilterQueryRaw, request: &HttpRequest) -> Self { + Self { + timestamp: Some(OffsetDateTime::now_utc()), + user_agents: extract_user_agents(request).into_iter().collect(), + filtered_by_uid: query.common.uids.is_some(), + filtered_by_index_uid: query.common.index_uids.is_some(), + filtered_by_type: query.common.types.is_some(), + filtered_by_status: query.common.statuses.is_some(), + filtered_by_canceled_by: query.common.canceled_by.is_some(), + filtered_by_before_enqueued_at: query.dates.before_enqueued_at.is_some(), + filtered_by_after_enqueued_at: query.dates.after_enqueued_at.is_some(), + filtered_by_before_started_at: query.dates.before_started_at.is_some(), + filtered_by_after_started_at: query.dates.after_started_at.is_some(), + filtered_by_before_finished_at: query.dates.before_finished_at.is_some(), + filtered_by_after_finished_at: query.dates.after_finished_at.is_some(), + total_received: 1, + } + } + + /// Aggregate one [DocumentsAggregator] into another. + pub fn aggregate(&mut self, other: Self) { + if self.timestamp.is_none() { + self.timestamp = other.timestamp; + } + + // we can't create a union because there is no `into_union` method + for user_agent in other.user_agents { + self.user_agents.insert(user_agent); + } + + self.filtered_by_uid |= other.filtered_by_uid; + self.filtered_by_index_uid |= other.filtered_by_index_uid; + self.filtered_by_type |= other.filtered_by_type; + self.filtered_by_status |= other.filtered_by_status; + self.filtered_by_canceled_by |= other.filtered_by_canceled_by; + self.filtered_by_before_enqueued_at |= other.filtered_by_before_enqueued_at; + self.filtered_by_after_enqueued_at |= other.filtered_by_after_enqueued_at; + self.filtered_by_before_started_at |= other.filtered_by_before_started_at; + self.filtered_by_after_started_at |= other.filtered_by_after_started_at; + self.filtered_by_before_finished_at |= other.filtered_by_before_finished_at; + self.filtered_by_after_finished_at |= other.filtered_by_after_finished_at; + self.filtered_by_after_finished_at |= other.filtered_by_after_finished_at; + + self.total_received = self.total_received.saturating_add(other.total_received); + } + + pub fn into_event(self, user: &User, event_name: &str) -> Option { + // if we had no timestamp it means we never encountered any events and + // thus we don't need to send this event. + let timestamp = self.timestamp?; + + Some(Track { + timestamp: Some(timestamp), + user: user.clone(), + event: event_name.to_string(), + properties: serde_json::to_value(self).ok()?, + ..Default::default() + }) + } +} + +#[derive(Default, Serialize)] +pub struct HealthAggregator { + #[serde(skip)] + timestamp: Option, + + // context + #[serde(rename = "user-agent")] + user_agents: HashSet, + + total_received: usize, +} + +impl HealthAggregator { + pub fn from_query(request: &HttpRequest) -> Self { + let mut ret = Self::default(); + ret.timestamp = Some(OffsetDateTime::now_utc()); + + ret.user_agents = extract_user_agents(request).into_iter().collect(); + ret.total_received = 1; + ret + } + + /// Aggregate one [DocumentsAggregator] into another. + pub fn aggregate(&mut self, other: Self) { + if self.timestamp.is_none() { + self.timestamp = other.timestamp; + } + + // we can't create a union because there is no `into_union` method + for user_agent in other.user_agents { + self.user_agents.insert(user_agent); + } + self.total_received = self.total_received.saturating_add(other.total_received); + } + + pub fn into_event(self, user: &User, event_name: &str) -> Option { + // if we had no timestamp it means we never encountered any events and + // thus we don't need to send this event. + let timestamp = self.timestamp?; + + Some(Track { + timestamp: Some(timestamp), + user: user.clone(), + event: event_name.to_string(), + properties: serde_json::to_value(self).ok()?, + ..Default::default() + }) + } +} diff --git a/meilisearch-http/src/extractors/authentication/mod.rs b/meilisearch-http/src/extractors/authentication/mod.rs index cd7a43114..8944b60d3 100644 --- a/meilisearch-http/src/extractors/authentication/mod.rs +++ b/meilisearch-http/src/extractors/authentication/mod.rs @@ -31,11 +31,14 @@ impl GuardedData { where P: Policy + 'static, { + let missing_master_key = auth.get_master_key().is_none(); + match Self::authenticate(auth, token, index).await? { Some(filters) => match data { Some(data) => Ok(Self { data, filters, _marker: PhantomData }), None => Err(AuthenticationError::IrretrievableState.into()), }, + None if missing_master_key => Err(AuthenticationError::MissingMasterKey.into()), None => Err(AuthenticationError::InvalidToken.into()), } } diff --git a/meilisearch-http/src/lib.rs b/meilisearch-http/src/lib.rs index 9a3ce857e..6fa6b77d8 100644 --- a/meilisearch-http/src/lib.rs +++ b/meilisearch-http/src/lib.rs @@ -128,7 +128,13 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(Arc, Auth autobatching_enabled: !opt.scheduler_options.disable_auto_batching, }) }; - let meilisearch_builder = || -> anyhow::Result<_> { + + enum OnFailure { + RemoveDb, + KeepDb, + } + + let meilisearch_builder = |on_failure: OnFailure| -> anyhow::Result<_> { // if anything wrong happens we delete the `data.ms` entirely. match ( index_scheduler_builder().map_err(anyhow::Error::from), @@ -137,7 +143,9 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(Arc, Auth ) { (Ok(i), Ok(a), Ok(())) => Ok((i, a)), (Err(e), _, _) | (_, Err(e), _) | (_, _, Err(e)) => { - std::fs::remove_dir_all(&opt.db_path)?; + if matches!(on_failure, OnFailure::RemoveDb) { + std::fs::remove_dir_all(&opt.db_path)?; + } Err(e) } } @@ -148,7 +156,7 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(Arc, Auth let snapshot_path_exists = snapshot_path.exists(); if empty_db && snapshot_path_exists { match compression::from_tar_gz(snapshot_path, &opt.db_path) { - Ok(()) => meilisearch_builder()?, + Ok(()) => meilisearch_builder(OnFailure::RemoveDb)?, Err(e) => { std::fs::remove_dir_all(&opt.db_path)?; return Err(e); @@ -162,12 +170,13 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(Arc, Auth } else if !snapshot_path_exists && !opt.ignore_missing_snapshot { bail!("snapshot doesn't exist at {}", snapshot_path.display()) } else { - meilisearch_builder()? + meilisearch_builder(OnFailure::RemoveDb)? } } else if let Some(ref path) = opt.import_dump { let src_path_exists = path.exists(); if empty_db && src_path_exists { - let (mut index_scheduler, mut auth_controller) = meilisearch_builder()?; + let (mut index_scheduler, mut auth_controller) = + meilisearch_builder(OnFailure::RemoveDb)?; match import_dump(&opt.db_path, path, &mut index_scheduler, &mut auth_controller) { Ok(()) => (index_scheduler, auth_controller), Err(e) => { @@ -183,7 +192,8 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(Arc, Auth } else if !src_path_exists && !opt.ignore_missing_dump { bail!("dump doesn't exist at {:?}", path) } else { - let (mut index_scheduler, mut auth_controller) = meilisearch_builder()?; + let (mut index_scheduler, mut auth_controller) = + meilisearch_builder(OnFailure::RemoveDb)?; match import_dump(&opt.db_path, path, &mut index_scheduler, &mut auth_controller) { Ok(()) => (index_scheduler, auth_controller), Err(e) => { @@ -196,7 +206,7 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(Arc, Auth if !empty_db { check_version_file(&opt.db_path)?; } - meilisearch_builder()? + meilisearch_builder(OnFailure::KeepDb)? }; // We create a loop in a thread that registers snapshotCreation tasks @@ -204,12 +214,15 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(Arc, Auth if opt.schedule_snapshot { let snapshot_delay = Duration::from_secs(opt.snapshot_interval_sec); let index_scheduler = index_scheduler.clone(); - thread::spawn(move || loop { - thread::sleep(snapshot_delay); - if let Err(e) = index_scheduler.register(KindWithContent::SnapshotCreation) { - error!("Error while registering snapshot: {}", e); - } - }); + thread::Builder::new() + .name(String::from("register-snapshot-tasks")) + .spawn(move || loop { + thread::sleep(snapshot_delay); + if let Err(e) = index_scheduler.register(KindWithContent::SnapshotCreation) { + error!("Error while registering snapshot: {}", e); + } + }) + .unwrap(); } Ok((index_scheduler, auth_controller)) diff --git a/meilisearch-http/src/option.rs b/meilisearch-http/src/option.rs index 02ce67bbc..82d67d5a0 100644 --- a/meilisearch-http/src/option.rs +++ b/meilisearch-http/src/option.rs @@ -69,7 +69,7 @@ const MEILI_MAX_INDEXING_THREADS: &str = "MEILI_MAX_INDEXING_THREADS"; const DISABLE_AUTO_BATCHING: &str = "DISABLE_AUTO_BATCHING"; const DEFAULT_LOG_EVERY_N: usize = 100000; -#[derive(Debug, Clone, Parser, Serialize, Deserialize)] +#[derive(Debug, Clone, Parser, Deserialize)] #[clap(version, next_display_order = None)] #[serde(rename_all = "snake_case", deny_unknown_fields)] pub struct Opt { @@ -84,7 +84,6 @@ pub struct Opt { pub http_addr: String, /// Sets the instance's master key, automatically protecting all routes except `GET /health`. - #[serde(skip_serializing)] #[clap(long, env = MEILI_MASTER_KEY)] pub master_key: Option, @@ -99,7 +98,7 @@ pub struct Opt { /// All gathered data is used solely for the purpose of improving Meilisearch, and can be deleted /// at any time. #[cfg(all(not(debug_assertions), feature = "analytics"))] - #[serde(skip_serializing, default)] // we can't send true + #[serde(default)] // we can't send true #[clap(long, env = MEILI_NO_ANALYTICS)] pub no_analytics: bool, @@ -121,39 +120,35 @@ pub struct Opt { pub http_payload_size_limit: Byte, /// Sets the server's SSL certificates. - #[serde(skip_serializing)] #[clap(long, env = MEILI_SSL_CERT_PATH, value_parser)] pub ssl_cert_path: Option, /// Sets the server's SSL key files. - #[serde(skip_serializing)] #[clap(long, env = MEILI_SSL_KEY_PATH, value_parser)] pub ssl_key_path: Option, /// Enables client authentication in the specified path. - #[serde(skip_serializing)] #[clap(long, env = MEILI_SSL_AUTH_PATH, value_parser)] pub ssl_auth_path: Option, /// Sets the server's OCSP file. *Optional* /// /// Reads DER-encoded OCSP response from OCSPFILE and staple to certificate. - #[serde(skip_serializing)] #[clap(long, env = MEILI_SSL_OCSP_PATH, value_parser)] pub ssl_ocsp_path: Option, /// Makes SSL authentication mandatory. - #[serde(skip_serializing, default)] + #[serde(default)] #[clap(long, env = MEILI_SSL_REQUIRE_AUTH)] pub ssl_require_auth: bool, /// Activates SSL session resumption. - #[serde(skip_serializing, default)] + #[serde(default)] #[clap(long, env = MEILI_SSL_RESUMPTION)] pub ssl_resumption: bool, /// Activates SSL tickets. - #[serde(skip_serializing, default)] + #[serde(default)] #[clap(long, env = MEILI_SSL_TICKETS)] pub ssl_tickets: bool, @@ -251,7 +246,6 @@ pub struct Opt { /// Set the path to a configuration file that should be used to setup the engine. /// Format must be TOML. - #[serde(skip_serializing)] #[clap(long)] pub config_file_path: Option, } @@ -439,16 +433,15 @@ impl Opt { } } -#[derive(Debug, Clone, Parser, Deserialize, Serialize)] +#[derive(Debug, Clone, Parser, Deserialize)] pub struct IndexerOpts { /// Sets the amount of documents to skip before printing /// a log regarding the indexing advancement. - #[serde(skip_serializing, default = "default_log_every_n")] + #[serde(default = "default_log_every_n")] #[clap(long, default_value_t = default_log_every_n(), hide = true)] // 100k pub log_every_n: usize, /// Grenad max number of chunks in bytes. - #[serde(skip_serializing)] #[clap(long, hide = true)] pub max_nb_chunks: Option, @@ -488,7 +481,7 @@ impl IndexerOpts { } } -#[derive(Debug, Clone, Parser, Default, Deserialize, Serialize)] +#[derive(Debug, Clone, Parser, Default, Deserialize)] #[serde(rename_all = "snake_case", deny_unknown_fields)] pub struct SchedulerConfig { /// Deactivates auto-batching when provided. @@ -508,8 +501,10 @@ impl TryFrom<&IndexerOpts> for IndexerConfig { type Error = anyhow::Error; fn try_from(other: &IndexerOpts) -> Result { - let thread_pool = - rayon::ThreadPoolBuilder::new().num_threads(*other.max_indexing_threads).build()?; + let thread_pool = rayon::ThreadPoolBuilder::new() + .thread_name(|index| format!("indexing-thread:{index}")) + .num_threads(*other.max_indexing_threads) + .build()?; Ok(Self { log_every_n: Some(other.log_every_n), @@ -580,7 +575,7 @@ fn total_memory_bytes() -> Option { let memory_kind = RefreshKind::new().with_memory(); let mut system = System::new_with_specifics(memory_kind); system.refresh_memory(); - Some(system.total_memory() * 1024) // KiB into bytes + Some(system.total_memory()) } else { None } diff --git a/meilisearch-http/src/routes/dump.rs b/meilisearch-http/src/routes/dump.rs index 1148cdcb6..8e0e63776 100644 --- a/meilisearch-http/src/routes/dump.rs +++ b/meilisearch-http/src/routes/dump.rs @@ -6,8 +6,6 @@ use meilisearch_auth::AuthController; use meilisearch_types::error::ResponseError; use meilisearch_types::tasks::KindWithContent; use serde_json::json; -use time::macros::format_description; -use time::OffsetDateTime; use crate::analytics::Analytics; use crate::extractors::authentication::policies::*; @@ -27,16 +25,9 @@ pub async fn create_dump( ) -> Result { analytics.publish("Dump Created".to_string(), json!({}), Some(&req)); - let dump_uid = OffsetDateTime::now_utc() - .format(format_description!( - "[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]" - )) - .unwrap(); - let task = KindWithContent::DumpCreation { keys: auth_controller.list_keys()?, instance_uid: analytics.instance_uid().cloned(), - dump_uid, }; let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); diff --git a/meilisearch-http/src/routes/indexes/documents.rs b/meilisearch-http/src/routes/indexes/documents.rs index 0cdb11e8a..0fe3cf102 100644 --- a/meilisearch-http/src/routes/indexes/documents.rs +++ b/meilisearch-http/src/routes/indexes/documents.rs @@ -21,7 +21,7 @@ use serde::Deserialize; use serde_cs::vec::CS; use serde_json::Value; -use crate::analytics::Analytics; +use crate::analytics::{Analytics, DocumentDeletionKind}; use crate::error::MeilisearchHttpError; use crate::extractors::authentication::policies::*; use crate::extractors::authentication::GuardedData; @@ -95,7 +95,11 @@ pub async fn get_document( pub async fn delete_document( index_scheduler: GuardedData, Data>, path: web::Path, + req: HttpRequest, + analytics: web::Data, ) -> Result { + analytics.delete_documents(DocumentDeletionKind::PerDocumentId, &req); + let DocumentParam { document_id, index_uid } = path.into_inner(); let task = KindWithContent::DocumentDeletion { index_uid, documents_ids: vec![document_id] }; let task: SummarizedTaskView = @@ -296,8 +300,13 @@ pub async fn delete_documents( index_scheduler: GuardedData, Data>, path: web::Path, body: web::Json>, + req: HttpRequest, + analytics: web::Data, ) -> Result { debug!("called with params: {:?}", body); + + analytics.delete_documents(DocumentDeletionKind::PerBatch, &req); + let ids = body .iter() .map(|v| v.as_str().map(String::from).unwrap_or_else(|| v.to_string())) @@ -315,7 +324,11 @@ pub async fn delete_documents( pub async fn clear_all_documents( index_scheduler: GuardedData, Data>, path: web::Path, + req: HttpRequest, + analytics: web::Data, ) -> Result { + analytics.delete_documents(DocumentDeletionKind::ClearAll, &req); + let task = KindWithContent::DocumentClear { index_uid: path.into_inner() }; let task: SummarizedTaskView = tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into(); diff --git a/meilisearch-http/src/routes/indexes/settings.rs b/meilisearch-http/src/routes/indexes/settings.rs index 4e33cb0b4..d2508a3c8 100644 --- a/meilisearch-http/src/routes/indexes/settings.rs +++ b/meilisearch-http/src/routes/indexes/settings.rs @@ -123,17 +123,6 @@ macro_rules! make_setting_route { } } }; - ($route:literal, $update_verb:ident, $type:ty, $attr:ident, $camelcase_attr:literal) => { - make_setting_route!( - $route, - $update_verb, - $type, - $attr, - $camelcase_attr, - _analytics, - |_, _| {} - ); - }; } make_setting_route!( @@ -187,7 +176,22 @@ make_setting_route!( put, Vec, displayed_attributes, - "displayedAttributes" + "displayedAttributes", + analytics, + |displayed: &Option>, req: &HttpRequest| { + use serde_json::json; + + analytics.publish( + "DisplayedAttributes Updated".to_string(), + json!({ + "displayed_attributes": { + "total": displayed.as_ref().map(|displayed| displayed.len()), + "with_wildcard": displayed.as_ref().map(|displayed| displayed.iter().any(|displayed| displayed == "*")), + }, + }), + Some(req), + ); + } ); make_setting_route!( @@ -247,6 +251,7 @@ make_setting_route!( json!({ "searchable_attributes": { "total": setting.as_ref().map(|searchable| searchable.len()), + "with_wildcard": setting.as_ref().map(|searchable| searchable.iter().any(|searchable| searchable == "*")), }, }), Some(req), @@ -259,7 +264,21 @@ make_setting_route!( put, std::collections::BTreeSet, stop_words, - "stopWords" + "stopWords", + analytics, + |stop_words: &Option>, req: &HttpRequest| { + use serde_json::json; + + analytics.publish( + "StopWords Updated".to_string(), + json!({ + "stop_words": { + "total": stop_words.as_ref().map(|stop_words| stop_words.len()), + }, + }), + Some(req), + ); + } ); make_setting_route!( @@ -267,10 +286,43 @@ make_setting_route!( put, std::collections::BTreeMap>, synonyms, - "synonyms" + "synonyms", + analytics, + |synonyms: &Option>>, req: &HttpRequest| { + use serde_json::json; + + analytics.publish( + "Synonyms Updated".to_string(), + json!({ + "synonyms": { + "total": synonyms.as_ref().map(|synonyms| synonyms.len()), + }, + }), + Some(req), + ); + } ); -make_setting_route!("/distinct-attribute", put, String, distinct_attribute, "distinctAttribute"); +make_setting_route!( + "/distinct-attribute", + put, + String, + distinct_attribute, + "distinctAttribute", + analytics, + |distinct: &Option, req: &HttpRequest| { + use serde_json::json; + analytics.publish( + "DistinctAttribute Updated".to_string(), + json!({ + "distinct_attribute": { + "set": distinct.is_some(), + } + }), + Some(req), + ); + } +); make_setting_route!( "/ranking-rules", @@ -286,7 +338,13 @@ make_setting_route!( "RankingRules Updated".to_string(), json!({ "ranking_rules": { - "sort_position": setting.as_ref().map(|sort| sort.iter().position(|s| s == "sort")), + "words_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "words")), + "typo_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "typo")), + "proximity_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "proximity")), + "attribute_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "attribute")), + "sort_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "sort")), + "exactness_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "exactness")), + "values": setting.as_ref().map(|rr| rr.iter().filter(|s| !s.contains(':')).cloned().collect::>().join(", ")), } }), Some(req), @@ -379,10 +437,21 @@ pub async fn update_all( "Settings Updated".to_string(), json!({ "ranking_rules": { - "sort_position": new_settings.ranking_rules.as_ref().set().map(|sort| sort.iter().position(|s| s == "sort")), + "words_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "words")), + "typo_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "typo")), + "proximity_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "proximity")), + "attribute_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "attribute")), + "sort_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "sort")), + "exactness_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "exactness")), + "values": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().filter(|s| !s.contains(':')).cloned().collect::>().join(", ")), }, "searchable_attributes": { "total": new_settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()), + "with_wildcard": new_settings.searchable_attributes.as_ref().set().map(|searchable| searchable.iter().any(|searchable| searchable == "*")), + }, + "displayed_attributes": { + "total": new_settings.displayed_attributes.as_ref().set().map(|displayed| displayed.len()), + "with_wildcard": new_settings.displayed_attributes.as_ref().set().map(|displayed| displayed.iter().any(|displayed| displayed == "*")), }, "sortable_attributes": { "total": new_settings.sortable_attributes.as_ref().set().map(|sort| sort.len()), @@ -392,6 +461,9 @@ pub async fn update_all( "total": new_settings.filterable_attributes.as_ref().set().map(|filter| filter.len()), "has_geo": new_settings.filterable_attributes.as_ref().set().map(|filter| filter.iter().any(|s| s == "_geo")), }, + "distinct_attribute": { + "set": new_settings.distinct_attribute.as_ref().set().is_some() + }, "typo_tolerance": { "enabled": new_settings.typo_tolerance .as_ref() @@ -435,6 +507,12 @@ pub async fn update_all( .set() .and_then(|s| s.max_total_hits.as_ref().set()), }, + "stop_words": { + "total": new_settings.stop_words.as_ref().set().map(|stop_words| stop_words.len()), + }, + "synonyms": { + "total": new_settings.synonyms.as_ref().set().map(|synonyms| synonyms.len()), + }, }), Some(&req), ); diff --git a/meilisearch-http/src/routes/mod.rs b/meilisearch-http/src/routes/mod.rs index 81e100214..9fcb1c4b7 100644 --- a/meilisearch-http/src/routes/mod.rs +++ b/meilisearch-http/src/routes/mod.rs @@ -21,7 +21,7 @@ mod api_key; mod dump; pub mod indexes; mod swap_indexes; -mod tasks; +pub mod tasks; pub fn configure(cfg: &mut web::ServiceConfig) { cfg.service(web::scope("/tasks").configure(tasks::configure)) @@ -271,7 +271,7 @@ pub fn create_all_stats( let mut indexes = BTreeMap::new(); let mut database_size = 0; let processing_task = index_scheduler.get_tasks_from_authorized_indexes( - Query { status: Some(vec![Status::Processing]), limit: Some(1), ..Query::default() }, + Query { statuses: Some(vec![Status::Processing]), limit: Some(1), ..Query::default() }, search_rules.authorized_indexes(), )?; let processing_index = processing_task.first().and_then(|task| task.index_uid()); @@ -308,7 +308,11 @@ struct VersionResponse { async fn get_version( _index_scheduler: GuardedData, Data>, + req: HttpRequest, + analytics: web::Data, ) -> HttpResponse { + analytics.publish("Version Seen".to_string(), json!(null), Some(&req)); + let commit_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown"); let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown"); @@ -325,6 +329,11 @@ struct KeysResponse { public: Option, } -pub async fn get_health() -> Result { +pub async fn get_health( + req: HttpRequest, + analytics: web::Data, +) -> Result { + analytics.health_seen(&req); + Ok(HttpResponse::Ok().json(serde_json::json!({ "status": "available" }))) } diff --git a/meilisearch-http/src/routes/swap_indexes.rs b/meilisearch-http/src/routes/swap_indexes.rs index fa28fe27c..bc0c1705a 100644 --- a/meilisearch-http/src/routes/swap_indexes.rs +++ b/meilisearch-http/src/routes/swap_indexes.rs @@ -1,11 +1,13 @@ use actix_web::web::Data; -use actix_web::{web, HttpResponse}; +use actix_web::{web, HttpRequest, HttpResponse}; use index_scheduler::IndexScheduler; use meilisearch_types::error::ResponseError; use meilisearch_types::tasks::{IndexSwap, KindWithContent}; use serde::Deserialize; +use serde_json::json; use super::SummarizedTaskView; +use crate::analytics::Analytics; use crate::error::MeilisearchHttpError; use crate::extractors::authentication::policies::*; use crate::extractors::authentication::{AuthenticationError, GuardedData}; @@ -23,7 +25,16 @@ pub struct SwapIndexesPayload { pub async fn swap_indexes( index_scheduler: GuardedData, Data>, params: web::Json>, + req: HttpRequest, + analytics: web::Data, ) -> Result { + analytics.publish( + "Indexes Swapped".to_string(), + json!({ + "swap_operation_number": params.len(), + }), + Some(&req), + ); let search_rules = &index_scheduler.filters().search_rules; let mut swaps = vec![]; diff --git a/meilisearch-http/src/routes/tasks.rs b/meilisearch-http/src/routes/tasks.rs index 0c9a49a3c..914315711 100644 --- a/meilisearch-http/src/routes/tasks.rs +++ b/meilisearch-http/src/routes/tasks.rs @@ -1,3 +1,5 @@ +use std::str::FromStr; + use actix_web::web::Data; use actix_web::{web, HttpRequest, HttpResponse}; use index_scheduler::{IndexScheduler, Query, TaskId}; @@ -14,6 +16,7 @@ use serde_json::json; use time::{Duration, OffsetDateTime}; use tokio::task; +use self::date_deserializer::{deserialize_date, DeserializeDateOption}; use super::{fold_star_or, SummarizedTaskView}; use crate::analytics::Analytics; use crate::extractors::authentication::policies::*; @@ -41,15 +44,10 @@ pub struct TaskView { pub status: Status, #[serde(rename = "type")] pub kind: Kind, - - #[serde(skip_serializing_if = "Option::is_none")] pub canceled_by: Option, - #[serde(skip_serializing_if = "Option::is_none")] pub details: Option, - #[serde(skip_serializing_if = "Option::is_none")] pub error: Option, - #[serde(serialize_with = "serialize_duration", default)] pub duration: Option, #[serde(with = "time::serde::rfc3339")] @@ -84,11 +82,11 @@ pub struct DetailsView { #[serde(skip_serializing_if = "Option::is_none")] pub received_documents: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub indexed_documents: Option, + pub indexed_documents: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub primary_key: Option>, #[serde(skip_serializing_if = "Option::is_none")] - pub matched_documents: Option, + pub provided_ids: Option, #[serde(skip_serializing_if = "Option::is_none")] pub deleted_documents: Option>, #[serde(skip_serializing_if = "Option::is_none")] @@ -98,9 +96,9 @@ pub struct DetailsView { #[serde(skip_serializing_if = "Option::is_none")] pub deleted_tasks: Option>, #[serde(skip_serializing_if = "Option::is_none")] - pub original_query: Option, + pub original_filter: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub dump_uid: Option, + pub dump_uid: Option>, #[serde(skip_serializing_if = "Option::is_none")] #[serde(flatten)] pub settings: Option>>, @@ -114,7 +112,7 @@ impl From
for DetailsView { Details::DocumentAdditionOrUpdate { received_documents, indexed_documents } => { DetailsView { received_documents: Some(received_documents), - indexed_documents, + indexed_documents: Some(indexed_documents), ..DetailsView::default() } } @@ -125,30 +123,32 @@ impl From
for DetailsView { DetailsView { primary_key: Some(primary_key), ..DetailsView::default() } } Details::DocumentDeletion { - matched_documents: received_document_ids, + provided_ids: received_document_ids, deleted_documents, } => DetailsView { - matched_documents: Some(received_document_ids), + provided_ids: Some(received_document_ids), deleted_documents: Some(deleted_documents), ..DetailsView::default() }, Details::ClearAll { deleted_documents } => { DetailsView { deleted_documents: Some(deleted_documents), ..DetailsView::default() } } - Details::TaskCancelation { matched_tasks, canceled_tasks, original_query } => { + Details::TaskCancelation { matched_tasks, canceled_tasks, original_filter } => { DetailsView { matched_tasks: Some(matched_tasks), canceled_tasks: Some(canceled_tasks), - original_query: Some(original_query), + original_filter: Some(original_filter), + ..DetailsView::default() + } + } + Details::TaskDeletion { matched_tasks, deleted_tasks, original_filter } => { + DetailsView { + matched_tasks: Some(matched_tasks), + deleted_tasks: Some(deleted_tasks), + original_filter: Some(original_filter), ..DetailsView::default() } } - Details::TaskDeletion { matched_tasks, deleted_tasks, original_query } => DetailsView { - matched_tasks: Some(matched_tasks), - deleted_tasks: Some(deleted_tasks), - original_query: Some(original_query), - ..DetailsView::default() - }, Details::Dump { dump_uid } => { DetailsView { dump_uid: Some(dump_uid), ..DetailsView::default() } } @@ -159,102 +159,296 @@ impl From
for DetailsView { } } -#[derive(Serialize, Deserialize, Debug)] +#[derive(Deserialize, Debug)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct TaskCommonQueryRaw { + pub uids: Option>, + pub canceled_by: Option>, + pub types: Option>>, + pub statuses: Option>>, + pub index_uids: Option>>, +} +impl TaskCommonQueryRaw { + fn validate(self) -> Result { + let Self { uids, canceled_by, types, statuses, index_uids } = self; + let uids = if let Some(uids) = uids { + Some( + uids.into_iter() + .map(|uid_string| { + uid_string.parse::().map_err(|_e| { + index_scheduler::Error::InvalidTaskUids { task_uid: uid_string }.into() + }) + }) + .collect::, ResponseError>>()?, + ) + } else { + None + }; + let canceled_by = if let Some(canceled_by) = canceled_by { + Some( + canceled_by + .into_iter() + .map(|canceled_by_string| { + canceled_by_string.parse::().map_err(|_e| { + index_scheduler::Error::InvalidTaskCanceledBy { + canceled_by: canceled_by_string, + } + .into() + }) + }) + .collect::, ResponseError>>()?, + ) + } else { + None + }; + + let types = if let Some(types) = types.and_then(fold_star_or) as Option> { + Some( + types + .into_iter() + .map(|type_string| { + Kind::from_str(&type_string).map_err(|_e| { + index_scheduler::Error::InvalidTaskTypes { type_: type_string }.into() + }) + }) + .collect::, ResponseError>>()?, + ) + } else { + None + }; + let statuses = if let Some(statuses) = + statuses.and_then(fold_star_or) as Option> + { + Some( + statuses + .into_iter() + .map(|status_string| { + Status::from_str(&status_string).map_err(|_e| { + index_scheduler::Error::InvalidTaskStatuses { status: status_string } + .into() + }) + }) + .collect::, ResponseError>>()?, + ) + } else { + None + }; + + let index_uids = + if let Some(index_uids) = index_uids.and_then(fold_star_or) as Option> { + Some( + index_uids + .into_iter() + .map(|index_uid_string| { + IndexUid::from_str(&index_uid_string) + .map(|index_uid| index_uid.to_string()) + .map_err(|_e| { + index_scheduler::Error::InvalidIndexUid { + index_uid: index_uid_string, + } + .into() + }) + }) + .collect::, ResponseError>>()?, + ) + } else { + None + }; + Ok(TaskCommonQuery { types, uids, canceled_by, statuses, index_uids }) + } +} + +#[derive(Deserialize, Debug)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct TaskDateQueryRaw { + pub after_enqueued_at: Option, + pub before_enqueued_at: Option, + pub after_started_at: Option, + pub before_started_at: Option, + pub after_finished_at: Option, + pub before_finished_at: Option, +} +impl TaskDateQueryRaw { + fn validate(self) -> Result { + let Self { + after_enqueued_at, + before_enqueued_at, + after_started_at, + before_started_at, + after_finished_at, + before_finished_at, + } = self; + + let mut query = TaskDateQuery { + after_enqueued_at: None, + before_enqueued_at: None, + after_started_at: None, + before_started_at: None, + after_finished_at: None, + before_finished_at: None, + }; + + for (field_name, string_value, before_or_after, dest) in [ + ( + "afterEnqueuedAt", + after_enqueued_at, + DeserializeDateOption::After, + &mut query.after_enqueued_at, + ), + ( + "beforeEnqueuedAt", + before_enqueued_at, + DeserializeDateOption::Before, + &mut query.before_enqueued_at, + ), + ( + "afterStartedAt", + after_started_at, + DeserializeDateOption::After, + &mut query.after_started_at, + ), + ( + "beforeStartedAt", + before_started_at, + DeserializeDateOption::Before, + &mut query.before_started_at, + ), + ( + "afterFinishedAt", + after_finished_at, + DeserializeDateOption::After, + &mut query.after_finished_at, + ), + ( + "beforeFinishedAt", + before_finished_at, + DeserializeDateOption::Before, + &mut query.before_finished_at, + ), + ] { + if let Some(string_value) = string_value { + *dest = Some(deserialize_date(field_name, &string_value, before_or_after)?); + } + } + + Ok(query) + } +} + +#[derive(Deserialize, Debug)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct TasksFilterQueryRaw { + #[serde(flatten)] + pub common: TaskCommonQueryRaw, + #[serde(default = "DEFAULT_LIMIT")] + pub limit: u32, + pub from: Option, + #[serde(flatten)] + pub dates: TaskDateQueryRaw, +} + +#[derive(Deserialize, Debug)] +#[serde(rename_all = "camelCase", deny_unknown_fields)] +pub struct TaskDeletionOrCancelationQueryRaw { + #[serde(flatten)] + pub common: TaskCommonQueryRaw, + #[serde(flatten)] + pub dates: TaskDateQueryRaw, +} + +impl TasksFilterQueryRaw { + fn validate(self) -> Result { + let Self { common, limit, from, dates } = self; + let common = common.validate()?; + let dates = dates.validate()?; + + Ok(TasksFilterQuery { common, limit, from, dates }) + } +} + +impl TaskDeletionOrCancelationQueryRaw { + fn validate(self) -> Result { + let Self { common, dates } = self; + let common = common.validate()?; + let dates = dates.validate()?; + + Ok(TaskDeletionOrCancelationQuery { common, dates }) + } +} + +#[derive(Serialize, Debug)] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct TaskDateQuery { #[serde( default, skip_serializing_if = "Option::is_none", - serialize_with = "time::serde::rfc3339::option::serialize", - deserialize_with = "date_deserializer::after::deserialize" + serialize_with = "time::serde::rfc3339::option::serialize" )] after_enqueued_at: Option, #[serde( default, skip_serializing_if = "Option::is_none", - serialize_with = "time::serde::rfc3339::option::serialize", - deserialize_with = "date_deserializer::before::deserialize" + serialize_with = "time::serde::rfc3339::option::serialize" )] before_enqueued_at: Option, #[serde( default, skip_serializing_if = "Option::is_none", - serialize_with = "time::serde::rfc3339::option::serialize", - deserialize_with = "date_deserializer::after::deserialize" + serialize_with = "time::serde::rfc3339::option::serialize" )] after_started_at: Option, #[serde( default, skip_serializing_if = "Option::is_none", - serialize_with = "time::serde::rfc3339::option::serialize", - deserialize_with = "date_deserializer::before::deserialize" + serialize_with = "time::serde::rfc3339::option::serialize" )] before_started_at: Option, #[serde( default, skip_serializing_if = "Option::is_none", - serialize_with = "time::serde::rfc3339::option::serialize", - deserialize_with = "date_deserializer::after::deserialize" + serialize_with = "time::serde::rfc3339::option::serialize" )] after_finished_at: Option, #[serde( default, skip_serializing_if = "Option::is_none", - serialize_with = "time::serde::rfc3339::option::serialize", - deserialize_with = "date_deserializer::before::deserialize" + serialize_with = "time::serde::rfc3339::option::serialize" )] before_finished_at: Option, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] +#[derive(Debug)] +pub struct TaskCommonQuery { + types: Option>, + uids: Option>, + canceled_by: Option>, + statuses: Option>, + index_uids: Option>, +} + +#[derive(Debug)] pub struct TasksFilterQuery { - #[serde(rename = "type")] - kind: Option>>, - uid: Option>, - status: Option>>, - index_uid: Option>>, - #[serde(default = "DEFAULT_LIMIT")] limit: u32, from: Option, - #[serde(flatten)] + common: TaskCommonQuery, dates: TaskDateQuery, } -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct TaskDeletionQuery { - #[serde(rename = "type")] - kind: Option>, - uid: Option>, - status: Option>, - index_uid: Option>, - #[serde(flatten)] - dates: TaskDateQuery, -} - -#[derive(Deserialize, Debug)] -#[serde(rename_all = "camelCase", deny_unknown_fields)] -pub struct TaskCancelationQuery { - #[serde(rename = "type")] - type_: Option>, - uid: Option>, - status: Option>, - index_uid: Option>, - #[serde(flatten)] +#[derive(Debug)] +pub struct TaskDeletionOrCancelationQuery { + common: TaskCommonQuery, dates: TaskDateQuery, } async fn cancel_tasks( index_scheduler: GuardedData, Data>, + params: web::Query, req: HttpRequest, - params: web::Query, + analytics: web::Data, ) -> Result { - let TaskCancelationQuery { - type_, - uid, - status, - index_uid, + let query = params.into_inner().validate()?; + let TaskDeletionOrCancelationQuery { + common: TaskCommonQuery { types, uids, canceled_by, statuses, index_uids }, dates: TaskDateQuery { after_enqueued_at, @@ -264,21 +458,34 @@ async fn cancel_tasks( after_finished_at, before_finished_at, }, - } = params.into_inner(); + } = query; - let kind: Option> = type_.map(|x| x.into_iter().collect()); - let uid: Option> = uid.map(|x| x.into_iter().collect()); - let status: Option> = status.map(|x| x.into_iter().collect()); - let index_uid: Option> = - index_uid.map(|x| x.into_iter().map(|x| x.to_string()).collect()); + analytics.publish( + "Tasks Canceled".to_string(), + json!({ + "filtered_by_uid": uids.is_some(), + "filtered_by_index_uid": index_uids.is_some(), + "filtered_by_type": types.is_some(), + "filtered_by_status": statuses.is_some(), + "filtered_by_canceled_by": canceled_by.is_some(), + "filtered_by_before_enqueued_at": before_enqueued_at.is_some(), + "filtered_by_after_enqueued_at": after_enqueued_at.is_some(), + "filtered_by_before_started_at": before_started_at.is_some(), + "filtered_by_after_started_at": after_started_at.is_some(), + "filtered_by_before_finished_at": before_finished_at.is_some(), + "filtered_by_after_finished_at": after_finished_at.is_some(), + }), + Some(&req), + ); let query = Query { limit: None, from: None, - status, - kind, - index_uid, - uid, + statuses, + types, + index_uids, + uids, + canceled_by, before_enqueued_at, after_enqueued_at, before_started_at, @@ -297,7 +504,7 @@ async fn cancel_tasks( &index_scheduler.filters().search_rules.authorized_indexes(), )?; let task_cancelation = - KindWithContent::TaskCancelation { query: req.query_string().to_string(), tasks }; + KindWithContent::TaskCancelation { query: format!("?{}", req.query_string()), tasks }; let task = task::spawn_blocking(move || index_scheduler.register(task_cancelation)).await??; let task: SummarizedTaskView = task.into(); @@ -307,14 +514,12 @@ async fn cancel_tasks( async fn delete_tasks( index_scheduler: GuardedData, Data>, + params: web::Query, req: HttpRequest, - params: web::Query, + analytics: web::Data, ) -> Result { - let TaskDeletionQuery { - kind: type_, - uid, - status, - index_uid, + let TaskDeletionOrCancelationQuery { + common: TaskCommonQuery { types, uids, canceled_by, statuses, index_uids }, dates: TaskDateQuery { after_enqueued_at, @@ -324,21 +529,34 @@ async fn delete_tasks( after_finished_at, before_finished_at, }, - } = params.into_inner(); + } = params.into_inner().validate()?; - let kind: Option> = type_.map(|x| x.into_iter().collect()); - let uid: Option> = uid.map(|x| x.into_iter().collect()); - let status: Option> = status.map(|x| x.into_iter().collect()); - let index_uid: Option> = - index_uid.map(|x| x.into_iter().map(|x| x.to_string()).collect()); + analytics.publish( + "Tasks Deleted".to_string(), + json!({ + "filtered_by_uid": uids.is_some(), + "filtered_by_index_uid": index_uids.is_some(), + "filtered_by_type": types.is_some(), + "filtered_by_status": statuses.is_some(), + "filtered_by_canceled_by": canceled_by.is_some(), + "filtered_by_before_enqueued_at": before_enqueued_at.is_some(), + "filtered_by_after_enqueued_at": after_enqueued_at.is_some(), + "filtered_by_before_started_at": before_started_at.is_some(), + "filtered_by_after_started_at": after_started_at.is_some(), + "filtered_by_before_finished_at": before_finished_at.is_some(), + "filtered_by_after_finished_at": after_finished_at.is_some(), + }), + Some(&req), + ); let query = Query { limit: None, from: None, - status, - kind, - index_uid, - uid, + statuses, + types, + index_uids, + uids, + canceled_by, after_enqueued_at, before_enqueued_at, after_started_at, @@ -357,7 +575,7 @@ async fn delete_tasks( &index_scheduler.filters().search_rules.authorized_indexes(), )?; let task_deletion = - KindWithContent::TaskDeletion { query: req.query_string().to_string(), tasks }; + KindWithContent::TaskDeletion { query: format!("?{}", req.query_string()), tasks }; let task = task::spawn_blocking(move || index_scheduler.register(task_deletion)).await??; let task: SummarizedTaskView = task.into(); @@ -375,15 +593,14 @@ pub struct AllTasks { async fn get_tasks( index_scheduler: GuardedData, Data>, - params: web::Query, + params: web::Query, req: HttpRequest, analytics: web::Data, ) -> Result { + analytics.get_tasks(¶ms, &req); + let TasksFilterQuery { - kind, - uid, - status, - index_uid, + common: TaskCommonQuery { types, uids, canceled_by, statuses, index_uids }, limit, from, dates: @@ -395,24 +612,7 @@ async fn get_tasks( after_finished_at, before_finished_at, }, - } = params.into_inner(); - - // We first transform a potential indexUid=* into a "not specified indexUid filter" - // for every one of the filters: type, status, and indexUid. - let kind: Option> = kind.and_then(fold_star_or); - let uid: Option> = uid.map(|x| x.into_iter().collect()); - let status: Option> = status.and_then(fold_star_or); - let index_uid: Option> = index_uid.and_then(fold_star_or); - - analytics.publish( - "Tasks Seen".to_string(), - json!({ - "filtered_by_index_uid": index_uid.as_ref().map_or(false, |v| !v.is_empty()), - "filtered_by_type": kind.as_ref().map_or(false, |v| !v.is_empty()), - "filtered_by_status": status.as_ref().map_or(false, |v| !v.is_empty()), - }), - Some(&req), - ); + } = params.into_inner().validate()?; // We +1 just to know if there is more after this "page" or not. let limit = limit.saturating_add(1); @@ -420,10 +620,11 @@ async fn get_tasks( let query = index_scheduler::Query { limit: Some(limit), from, - status, - kind, - index_uid, - uid, + statuses, + types, + index_uids, + uids, + canceled_by, before_enqueued_at, after_enqueued_at, before_started_at, @@ -457,15 +658,22 @@ async fn get_tasks( async fn get_task( index_scheduler: GuardedData, Data>, - task_id: web::Path, + task_uid: web::Path, req: HttpRequest, analytics: web::Data, ) -> Result { - let task_id = task_id.into_inner(); + let task_uid_string = task_uid.into_inner(); + + let task_uid: TaskId = match task_uid_string.parse() { + Ok(id) => id, + Err(_e) => { + return Err(index_scheduler::Error::InvalidTaskUids { task_uid: task_uid_string }.into()) + } + }; analytics.publish("Tasks Seen".to_string(), json!({ "per_task_uid": true }), Some(&req)); - let query = index_scheduler::Query { uid: Some(vec![task_id]), ..Query::default() }; + let query = index_scheduler::Query { uids: Some(vec![task_uid]), ..Query::default() }; if let Some(task) = index_scheduler .get_tasks_from_authorized_indexes( @@ -477,24 +685,26 @@ async fn get_task( let task_view = TaskView::from_task(task); Ok(HttpResponse::Ok().json(task_view)) } else { - Err(index_scheduler::Error::TaskNotFound(task_id).into()) + Err(index_scheduler::Error::TaskNotFound(task_uid).into()) } } pub(crate) mod date_deserializer { + use meilisearch_types::error::ResponseError; use time::format_description::well_known::Rfc3339; use time::macros::format_description; use time::{Date, Duration, OffsetDateTime, Time}; - enum DeserializeDateOption { + pub enum DeserializeDateOption { Before, After, } - fn deserialize_date( + pub fn deserialize_date( + field_name: &str, value: &str, option: DeserializeDateOption, - ) -> std::result::Result { + ) -> std::result::Result { // We can't parse using time's rfc3339 format, since then we won't know what part of the // datetime was not explicitly specified, and thus we won't be able to increment it to the // next step. @@ -511,120 +721,16 @@ pub(crate) mod date_deserializer { match option { DeserializeDateOption::Before => Ok(datetime), DeserializeDateOption::After => { - let datetime = datetime - .checked_add(Duration::days(1)) - .ok_or_else(|| serde::de::Error::custom("date overflow"))?; + let datetime = datetime.checked_add(Duration::days(1)).unwrap_or(datetime); Ok(datetime) } } } else { - Err(serde::de::Error::custom( - "could not parse a date with the RFC3339 or YYYY-MM-DD format", - )) - } - } - - /// Deserialize an upper bound datetime with RFC3339 or YYYY-MM-DD. - pub(crate) mod before { - use serde::Deserializer; - use time::OffsetDateTime; - - use super::{deserialize_date, DeserializeDateOption}; - - /// Deserialize an [`Option`] from its ISO 8601 representation. - pub fn deserialize<'a, D: Deserializer<'a>>( - deserializer: D, - ) -> Result, D::Error> { - deserializer.deserialize_option(Visitor) - } - - struct Visitor; - - #[derive(Debug)] - struct DeserializeError; - - impl<'a> serde::de::Visitor<'a> for Visitor { - type Value = Option; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str( - "an optional date written as a string with the RFC3339 or YYYY-MM-DD format", - ) - } - - fn visit_str( - self, - value: &str, - ) -> Result, E> { - deserialize_date(value, DeserializeDateOption::Before).map(Some) - } - - fn visit_some>( - self, - deserializer: D, - ) -> Result, D::Error> { - deserializer.deserialize_str(Visitor) - } - - fn visit_none(self) -> Result, E> { - Ok(None) - } - - fn visit_unit(self) -> Result { - Ok(None) - } - } - } - /// Deserialize a lower bound datetime with RFC3339 or YYYY-MM-DD. - /// - /// If YYYY-MM-DD is used, the day is incremented by one. - pub(crate) mod after { - use serde::Deserializer; - use time::OffsetDateTime; - - use super::{deserialize_date, DeserializeDateOption}; - - /// Deserialize an [`Option`] from its ISO 8601 representation. - pub fn deserialize<'a, D: Deserializer<'a>>( - deserializer: D, - ) -> Result, D::Error> { - deserializer.deserialize_option(Visitor) - } - - struct Visitor; - - #[derive(Debug)] - struct DeserializeError; - - impl<'a> serde::de::Visitor<'a> for Visitor { - type Value = Option; - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str( - "an optional date written as a string with the RFC3339 or YYYY-MM-DD format", - ) - } - - fn visit_str( - self, - value: &str, - ) -> Result, E> { - deserialize_date(value, DeserializeDateOption::After).map(Some) - } - - fn visit_some>( - self, - deserializer: D, - ) -> Result, D::Error> { - deserializer.deserialize_str(Visitor) - } - - fn visit_none(self) -> Result, E> { - Ok(None) - } - - fn visit_unit(self) -> Result { - Ok(None) + Err(index_scheduler::Error::InvalidTaskDate { + field: field_name.to_string(), + date: value.to_string(), } + .into()) } } } @@ -633,10 +739,10 @@ pub(crate) mod date_deserializer { mod tests { use meili_snap::snapshot; - use crate::routes::tasks::TaskDeletionQuery; + use crate::routes::tasks::{TaskDeletionOrCancelationQueryRaw, TasksFilterQueryRaw}; #[test] - fn deserialize_task_deletion_query_datetime() { + fn deserialize_task_filter_dates() { { let json = r#" { "afterEnqueuedAt": "2021-12-03", @@ -646,7 +752,10 @@ mod tests { "afterFinishedAt": "2021-12-03", "beforeFinishedAt": "2021-12-03" } "#; - let query = serde_json::from_str::(json).unwrap(); + let query = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap(); snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00"); snapshot!(format!("{:?}", query.dates.before_enqueued_at.unwrap()), @"2021-12-03 0:00:00.0 +00:00:00"); snapshot!(format!("{:?}", query.dates.after_started_at.unwrap()), @"2021-12-04 0:00:00.0 +00:00:00"); @@ -656,45 +765,256 @@ mod tests { } { let json = r#" { "afterEnqueuedAt": "2021-12-03T23:45:23Z", "beforeEnqueuedAt": "2021-12-03T23:45:23Z" } "#; - let query = serde_json::from_str::(json).unwrap(); + let query = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap(); snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"2021-12-03 23:45:23.0 +00:00:00"); snapshot!(format!("{:?}", query.dates.before_enqueued_at.unwrap()), @"2021-12-03 23:45:23.0 +00:00:00"); } { let json = r#" { "afterEnqueuedAt": "1997-11-12T09:55:06-06:20" } "#; - let query = serde_json::from_str::(json).unwrap(); + let query = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap(); snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.0 -06:20:00"); } { let json = r#" { "afterEnqueuedAt": "1997-11-12T09:55:06+00:00" } "#; - let query = serde_json::from_str::(json).unwrap(); + let query = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap(); snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.0 +00:00:00"); } { let json = r#" { "afterEnqueuedAt": "1997-11-12T09:55:06.200000300Z" } "#; - let query = serde_json::from_str::(json).unwrap(); + let query = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap(); snapshot!(format!("{:?}", query.dates.after_enqueued_at.unwrap()), @"1997-11-12 9:55:06.2000003 +00:00:00"); } { - let json = r#" { "afterEnqueuedAt": "2021" } "#; - let err = serde_json::from_str::(json).unwrap_err(); - snapshot!(format!("{err}"), @"could not parse a date with the RFC3339 or YYYY-MM-DD format at line 1 column 30"); + let json = r#" { "afterFinishedAt": "2021" } "#; + let err = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap_err(); + snapshot!(format!("{err}"), @"Task `afterFinishedAt` `2021` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); + } + { + let json = r#" { "beforeFinishedAt": "2021" } "#; + let err = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap_err(); + snapshot!(format!("{err}"), @"Task `beforeFinishedAt` `2021` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); } { let json = r#" { "afterEnqueuedAt": "2021-12" } "#; - let err = serde_json::from_str::(json).unwrap_err(); - snapshot!(format!("{err}"), @"could not parse a date with the RFC3339 or YYYY-MM-DD format at line 1 column 33"); + let err = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap_err(); + snapshot!(format!("{err}"), @"Task `afterEnqueuedAt` `2021-12` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); } { - let json = r#" { "afterEnqueuedAt": "2021-12-03T23" } "#; - let err = serde_json::from_str::(json).unwrap_err(); - snapshot!(format!("{err}"), @"could not parse a date with the RFC3339 or YYYY-MM-DD format at line 1 column 39"); + let json = r#" { "beforeEnqueuedAt": "2021-12-03T23" } "#; + let err = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap_err(); + snapshot!(format!("{err}"), @"Task `beforeEnqueuedAt` `2021-12-03T23` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); } { - let json = r#" { "afterEnqueuedAt": "2021-12-03T23:45" } "#; - let err = serde_json::from_str::(json).unwrap_err(); - snapshot!(format!("{err}"), @"could not parse a date with the RFC3339 or YYYY-MM-DD format at line 1 column 42"); + let json = r#" { "afterStartedAt": "2021-12-03T23:45" } "#; + let err = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap_err(); + snapshot!(format!("{err}"), @"Task `afterStartedAt` `2021-12-03T23:45` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); + + let json = r#" { "beforeStartedAt": "2021-12-03T23:45" } "#; + let err = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap_err(); + snapshot!(format!("{err}"), @"Task `beforeStartedAt` `2021-12-03T23:45` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format."); + } + } + + #[test] + fn deserialize_task_filter_uids() { + { + let json = r#" { "uids": "78,1,12,73" } "#; + let query = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap(); + snapshot!(format!("{:?}", query.common.uids.unwrap()), @"[78, 1, 12, 73]"); + } + { + let json = r#" { "uids": "1" } "#; + let query = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap(); + snapshot!(format!("{:?}", query.common.uids.unwrap()), @"[1]"); + } + { + let json = r#" { "uids": "78,hello,world" } "#; + let err = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap_err(); + snapshot!(format!("{err}"), @"Task uid `hello` is invalid. It should only contain numeric characters."); + } + { + let json = r#" { "uids": "cat" } "#; + let err = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap_err(); + snapshot!(format!("{err}"), @"Task uid `cat` is invalid. It should only contain numeric characters."); + } + } + + #[test] + fn deserialize_task_filter_status() { + { + let json = r#" { "statuses": "succeeded,failed,enqueued,processing,canceled" } "#; + let query = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap(); + snapshot!(format!("{:?}", query.common.statuses.unwrap()), @"[Succeeded, Failed, Enqueued, Processing, Canceled]"); + } + { + let json = r#" { "statuses": "enqueued" } "#; + let query = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap(); + snapshot!(format!("{:?}", query.common.statuses.unwrap()), @"[Enqueued]"); + } + { + let json = r#" { "statuses": "finished" } "#; + let err = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap_err(); + snapshot!(format!("{err}"), @"Task status `finished` is invalid. Available task statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`."); + } + } + #[test] + fn deserialize_task_filter_types() { + { + let json = r#" { "types": "documentAdditionOrUpdate,documentDeletion,settingsUpdate,indexCreation,indexDeletion,indexUpdate,indexSwap,taskCancelation,taskDeletion,dumpCreation,snapshotCreation" }"#; + let query = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap(); + snapshot!(format!("{:?}", query.common.types.unwrap()), @"[DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, IndexCreation, IndexDeletion, IndexUpdate, IndexSwap, TaskCancelation, TaskDeletion, DumpCreation, SnapshotCreation]"); + } + { + let json = r#" { "types": "settingsUpdate" } "#; + let query = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap(); + snapshot!(format!("{:?}", query.common.types.unwrap()), @"[SettingsUpdate]"); + } + { + let json = r#" { "types": "createIndex" } "#; + let err = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap_err(); + snapshot!(format!("{err}"), @"Task type `createIndex` is invalid. Available task types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`"); + } + } + #[test] + fn deserialize_task_filter_index_uids() { + { + let json = r#" { "indexUids": "toto,tata-78" }"#; + let query = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap(); + snapshot!(format!("{:?}", query.common.index_uids.unwrap()), @r###"["toto", "tata-78"]"###); + } + { + let json = r#" { "indexUids": "index_a" } "#; + let query = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap(); + snapshot!(format!("{:?}", query.common.index_uids.unwrap()), @r###"["index_a"]"###); + } + { + let json = r#" { "indexUids": "1,hé" } "#; + let err = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap_err(); + snapshot!(format!("{err}"), @"hé is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_)."); + } + { + let json = r#" { "indexUids": "hé" } "#; + let err = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap_err(); + snapshot!(format!("{err}"), @"hé is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_)."); + } + } + + #[test] + fn deserialize_task_filter_general() { + { + let json = r#" { "from": 12, "limit": 15, "indexUids": "toto,tata-78", "statuses": "succeeded,enqueued", "afterEnqueuedAt": "2012-04-23", "uids": "1,2,3" }"#; + let query = + serde_json::from_str::(json).unwrap().validate().unwrap(); + snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: 15, from: Some(12), common: TaskCommonQuery { types: None, uids: Some([1, 2, 3]), canceled_by: None, statuses: Some([Succeeded, Enqueued]), index_uids: Some(["toto", "tata-78"]) }, dates: TaskDateQuery { after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None } }"###); + } + { + // Stars should translate to `None` in the query + // Verify value of the default limit + let json = r#" { "indexUids": "*", "statuses": "succeeded,*", "afterEnqueuedAt": "2012-04-23", "uids": "1,2,3" }"#; + let query = + serde_json::from_str::(json).unwrap().validate().unwrap(); + snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: 20, from: None, common: TaskCommonQuery { types: None, uids: Some([1, 2, 3]), canceled_by: None, statuses: None, index_uids: None }, dates: TaskDateQuery { after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None } }"); + } + { + // Stars should also translate to `None` in task deletion/cancelation queries + let json = r#" { "indexUids": "*", "statuses": "succeeded,*", "afterEnqueuedAt": "2012-04-23", "uids": "1,2,3" }"#; + let query = serde_json::from_str::(json) + .unwrap() + .validate() + .unwrap(); + snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { common: TaskCommonQuery { types: None, uids: Some([1, 2, 3]), canceled_by: None, statuses: None, index_uids: None }, dates: TaskDateQuery { after_enqueued_at: Some(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None } }"); + } + { + // Stars in uids not allowed + let json = r#" { "uids": "*" }"#; + let err = + serde_json::from_str::(json).unwrap().validate().unwrap_err(); + snapshot!(format!("{err}"), @"Task uid `*` is invalid. It should only contain numeric characters."); + } + { + // From not allowed in task deletion/cancelation queries + let json = r#" { "from": 12 }"#; + let err = serde_json::from_str::(json).unwrap_err(); + snapshot!(format!("{err}"), @"unknown field `from` at line 1 column 15"); + } + { + // Limit not allowed in task deletion/cancelation queries + let json = r#" { "limit": 12 }"#; + let err = serde_json::from_str::(json).unwrap_err(); + snapshot!(format!("{err}"), @"unknown field `limit` at line 1 column 16"); } } } diff --git a/meilisearch-http/tests/auth/api_keys.rs b/meilisearch-http/tests/auth/api_keys.rs index bcea51d3f..052eb7509 100644 --- a/meilisearch-http/tests/auth/api_keys.rs +++ b/meilisearch-http/tests/auth/api_keys.rs @@ -352,7 +352,7 @@ async fn error_add_api_key_invalid_parameters_indexes() { assert_eq!(400, code, "{:?}", &response); let expected_response = json!({ - "message": r#"`{"name":"products"}` is not a valid index uid. It should be an array of string representing index names."#, + "message": r#"`indexes` field value `{"name":"products"}` is invalid. It should be an array of string representing index names."#, "code": "invalid_api_key_indexes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes" @@ -377,7 +377,7 @@ async fn error_add_api_key_invalid_index_uids() { let (response, code) = server.add_api_key(content).await; let expected_response = json!({ - "message": r#"`["invalid index # / \\name with spaces"]` is not a valid index uid. It should be an array of string representing index names."#, + "message": r#"`invalid index # / \name with spaces` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_)."#, "code": "invalid_api_key_indexes", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes" @@ -1434,12 +1434,13 @@ async fn error_access_api_key_routes_no_master_key_set() { server.use_api_key("MASTER_KEY"); - let expected_response = json!({"message": "The provided API key is invalid.", - "code": "invalid_api_key", + let expected_response = json!({ + "message": "Meilisearch is running without a master key. To access this API endpoint, you must have set a master key at launch.", + "code": "missing_master_key", "type": "auth", - "link": "https://docs.meilisearch.com/errors#invalid_api_key" + "link": "https://docs.meilisearch.com/errors#missing_master_key" }); - let expected_code = 403; + let expected_code = 401; let (response, code) = server.add_api_key(json!({})).await; diff --git a/meilisearch-http/tests/common/index.rs b/meilisearch-http/tests/common/index.rs index 1ac56d9ad..dac3653f7 100644 --- a/meilisearch-http/tests/common/index.rs +++ b/meilisearch-http/tests/common/index.rs @@ -106,17 +106,17 @@ impl Index<'_> { } pub async fn list_tasks(&self) -> (Value, StatusCode) { - let url = format!("/tasks?indexUid={}", self.uid); + let url = format!("/tasks?indexUids={}", self.uid); self.service.get(url).await } - pub async fn filtered_tasks(&self, type_: &[&str], status: &[&str]) -> (Value, StatusCode) { - let mut url = format!("/tasks?indexUid={}", self.uid); - if !type_.is_empty() { - let _ = write!(url, "&type={}", type_.join(",")); + pub async fn filtered_tasks(&self, types: &[&str], statuses: &[&str]) -> (Value, StatusCode) { + let mut url = format!("/tasks?indexUids={}", self.uid); + if !types.is_empty() { + let _ = write!(url, "&types={}", types.join(",")); } - if !status.is_empty() { - let _ = write!(url, "&status={}", status.join(",")); + if !statuses.is_empty() { + let _ = write!(url, "&statuses={}", statuses.join(",")); } self.service.get(url).await } diff --git a/meilisearch-http/tests/common/server.rs b/meilisearch-http/tests/common/server.rs index b7ddc772c..3f72248c5 100644 --- a/meilisearch-http/tests/common/server.rs +++ b/meilisearch-http/tests/common/server.rs @@ -132,6 +132,10 @@ impl Server { self.service.get("/tasks").await } + pub async fn tasks_filter(&self, filter: Value) -> (Value, StatusCode) { + self.service.get(format!("/tasks?{}", yaup::to_string(&filter).unwrap())).await + } + pub async fn get_dump_status(&self, uid: &str) -> (Value, StatusCode) { self.service.get(format!("/dumps/{}/status", uid)).await } @@ -144,13 +148,13 @@ impl Server { self.service.post("/swap-indexes", value).await } - pub async fn cancel_task(&self, value: Value) -> (Value, StatusCode) { + pub async fn cancel_tasks(&self, value: Value) -> (Value, StatusCode) { self.service .post(format!("/tasks/cancel?{}", yaup::to_string(&value).unwrap()), json!(null)) .await } - pub async fn delete_task(&self, value: Value) -> (Value, StatusCode) { + pub async fn delete_tasks(&self, value: Value) -> (Value, StatusCode) { self.service.delete(format!("/tasks?{}", yaup::to_string(&value).unwrap())).await } diff --git a/meilisearch-http/tests/documents/add_documents.rs b/meilisearch-http/tests/documents/add_documents.rs index 8dd3ba39a..6f1fabeae 100644 --- a/meilisearch-http/tests/documents/add_documents.rs +++ b/meilisearch-http/tests/documents/add_documents.rs @@ -636,7 +636,7 @@ async fn error_document_add_create_index_bad_uid() { let (response, code) = index.add_documents(json!([{"id": 1}]), None).await; let expected_response = json!({ - "message": "invalid index uid `883 fj!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.", + "message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_index_uid" diff --git a/meilisearch-http/tests/documents/update_documents.rs b/meilisearch-http/tests/documents/update_documents.rs index 1cc66a0c2..688605861 100644 --- a/meilisearch-http/tests/documents/update_documents.rs +++ b/meilisearch-http/tests/documents/update_documents.rs @@ -10,7 +10,7 @@ async fn error_document_update_create_index_bad_uid() { let (response, code) = index.update_documents(json!([{"id": 1}]), None).await; let expected_response = json!({ - "message": "invalid index uid `883 fj!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.", + "message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_index_uid" diff --git a/meilisearch-http/tests/dumps/mod.rs b/meilisearch-http/tests/dumps/mod.rs index 3f783a1e3..cd9ba3828 100644 --- a/meilisearch-http/tests/dumps/mod.rs +++ b/meilisearch-http/tests/dumps/mod.rs @@ -59,7 +59,7 @@ async fn import_dump_v2_movie_raw() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null }) + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null }) ); // finally we're just going to check that we can still get a few documents by id @@ -122,7 +122,7 @@ async fn import_dump_v2_movie_with_settings() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null }) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null }) ); // finally we're just going to check that we can still get a few documents by id @@ -185,7 +185,7 @@ async fn import_dump_v2_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( tasks["results"][0], - json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) + json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "error": null, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) ); // finally we're just going to check that we can still get a few documents by id @@ -246,7 +246,7 @@ async fn import_dump_v3_movie_raw() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null }) + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit": 20, "from": 0, "next": null }) ); // finally we're just going to check that we can still get a few documents by id @@ -309,7 +309,7 @@ async fn import_dump_v3_movie_with_settings() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null }) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null }) ); // finally we're just going to check that we can["results"] still get a few documents by id @@ -372,7 +372,7 @@ async fn import_dump_v3_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( tasks["results"][0], - json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) + json!({"uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "error": null, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) ); // finally we're just going to check that we can still get a few documents by id @@ -433,7 +433,7 @@ async fn import_dump_v4_movie_raw() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit" : 20, "from": 0, "next": null }) + json!({ "results": [{"uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT41.751156S", "enqueuedAt": "2021-09-08T08:30:30.550282Z", "startedAt": "2021-09-08T08:30:30.553012Z", "finishedAt": "2021-09-08T08:31:12.304168Z" }], "limit" : 20, "from": 0, "next": null }) ); // finally we're just going to check that we can still get a few documents by id @@ -496,7 +496,7 @@ async fn import_dump_v4_movie_with_settings() { assert_eq!(code, 200); assert_eq!( tasks, - json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null }) + json!({ "results": [{ "uid": 1, "indexUid": "indexUID", "status": "succeeded", "type": "settingsUpdate", "canceledBy": null, "details": { "displayedAttributes": ["title", "genres", "overview", "poster", "release_date"], "searchableAttributes": ["title", "overview"], "filterableAttributes": ["genres"], "stopWords": ["of", "the"] }, "error": null, "duration": "PT37.488777S", "enqueuedAt": "2021-09-08T08:24:02.323444Z", "startedAt": "2021-09-08T08:24:02.324145Z", "finishedAt": "2021-09-08T08:24:39.812922Z" }, { "uid": 0, "indexUid": "indexUID", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": { "receivedDocuments": 0, "indexedDocuments": 31944 }, "error": null, "duration": "PT39.941318S", "enqueuedAt": "2021-09-08T08:21:14.742672Z", "startedAt": "2021-09-08T08:21:14.750166Z", "finishedAt": "2021-09-08T08:21:54.691484Z" }], "limit": 20, "from": 1, "next": null }) ); // finally we're just going to check that we can still get a few documents by id @@ -559,7 +559,7 @@ async fn import_dump_v4_rubygems_with_settings() { assert_eq!(code, 200); assert_eq!( tasks["results"][0], - json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) + json!({ "uid": 92, "indexUid": "rubygems", "status": "succeeded", "type": "documentAdditionOrUpdate", "canceledBy": null, "details": {"receivedDocuments": 0, "indexedDocuments": 1042}, "error": null, "duration": "PT14.034672S", "enqueuedAt": "2021-09-08T08:40:31.390775Z", "startedAt": "2021-09-08T08:51:39.060642Z", "finishedAt": "2021-09-08T08:51:53.095314Z"}) ); // finally we're just going to check that we can still get a few documents by id diff --git a/meilisearch-http/tests/index/create_index.rs b/meilisearch-http/tests/index/create_index.rs index 0d4b01278..8e01a8113 100644 --- a/meilisearch-http/tests/index/create_index.rs +++ b/meilisearch-http/tests/index/create_index.rs @@ -189,7 +189,7 @@ async fn error_create_with_invalid_index_uid() { let (response, code) = index.create(None).await; let expected_response = json!({ - "message": "invalid index uid `test test#!`, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.", + "message": "`test test#!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_index_uid" diff --git a/meilisearch-http/tests/search/errors.rs b/meilisearch-http/tests/search/errors.rs index 76e63eeb7..add305083 100644 --- a/meilisearch-http/tests/search/errors.rs +++ b/meilisearch-http/tests/search/errors.rs @@ -70,7 +70,7 @@ async fn filter_invalid_syntax_object() { index.wait_task(1).await; let expected_response = json!({ - "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass", + "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass", "code": "invalid_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_filter" @@ -95,7 +95,7 @@ async fn filter_invalid_syntax_array() { index.wait_task(1).await; let expected_response = json!({ - "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass", + "message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass", "code": "invalid_filter", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_filter" diff --git a/meilisearch-http/tests/settings/get_settings.rs b/meilisearch-http/tests/settings/get_settings.rs index fa45ad55e..a3c667047 100644 --- a/meilisearch-http/tests/settings/get_settings.rs +++ b/meilisearch-http/tests/settings/get_settings.rs @@ -182,7 +182,7 @@ async fn error_update_setting_unexisting_index_invalid_uid() { assert_eq!(code, 400); let expected = json!({ - "message": "invalid index uid `test##! `, the uid must be an integer or a string containing only alphanumeric characters a-z A-Z 0-9, hyphens - and underscores _.", + "message": "`test##! ` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).", "code": "invalid_index_uid", "type": "invalid_request", "link": "https://docs.meilisearch.com/errors#invalid_index_uid"}); diff --git a/meilisearch-http/tests/tasks/mod.rs b/meilisearch-http/tests/tasks/mod.rs index 6a642617c..548fa90be 100644 --- a/meilisearch-http/tests/tasks/mod.rs +++ b/meilisearch-http/tests/tasks/mod.rs @@ -1,4 +1,4 @@ -use meili_snap::insta::assert_json_snapshot; +use meili_snap::insta::{self, assert_json_snapshot}; use serde_json::json; use time::format_description::well_known::Rfc3339; use time::OffsetDateTime; @@ -67,37 +67,37 @@ async fn list_tasks_with_star_filters() { index .add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None) .await; - let (response, code) = index.service.get("/tasks?indexUid=test").await; + let (response, code) = index.service.get("/tasks?indexUids=test").await; assert_eq!(code, 200); assert_eq!(response["results"].as_array().unwrap().len(), 2); - let (response, code) = index.service.get("/tasks?indexUid=*").await; + let (response, code) = index.service.get("/tasks?indexUids=*").await; assert_eq!(code, 200); assert_eq!(response["results"].as_array().unwrap().len(), 2); - let (response, code) = index.service.get("/tasks?indexUid=*,pasteque").await; + let (response, code) = index.service.get("/tasks?indexUids=*,pasteque").await; assert_eq!(code, 200); assert_eq!(response["results"].as_array().unwrap().len(), 2); - let (response, code) = index.service.get("/tasks?type=*").await; + let (response, code) = index.service.get("/tasks?types=*").await; assert_eq!(code, 200); assert_eq!(response["results"].as_array().unwrap().len(), 2); let (response, code) = - index.service.get("/tasks?type=*,documentAdditionOrUpdate&status=*").await; + index.service.get("/tasks?types=*,documentAdditionOrUpdate&statuses=*").await; assert_eq!(code, 200, "{:?}", response); assert_eq!(response["results"].as_array().unwrap().len(), 2); let (response, code) = index .service - .get("/tasks?type=*,documentAdditionOrUpdate&status=*,failed&indexUid=test") + .get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test") .await; assert_eq!(code, 200, "{:?}", response); assert_eq!(response["results"].as_array().unwrap().len(), 2); let (response, code) = index .service - .get("/tasks?type=*,documentAdditionOrUpdate&status=*,failed&indexUid=test,*") + .get("/tasks?types=*,documentAdditionOrUpdate&statuses=*,failed&indexUids=test,*") .await; assert_eq!(code, 200, "{:?}", response); assert_eq!(response["results"].as_array().unwrap().len(), 2); @@ -173,6 +173,131 @@ async fn list_tasks_status_and_type_filtered() { assert_eq!(response["results"].as_array().unwrap().len(), 2); } +#[actix_rt::test] +async fn get_task_filter_error() { + let server = Server::new().await; + + let (response, code) = server.tasks_filter(json!( { "lol": "pied" })).await; + assert_eq!(code, 400, "{}", response); + insta::assert_json_snapshot!(response, @r###" + { + "message": "Query deserialize error: unknown field `lol`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad_request" + } + "###); + + let (response, code) = server.tasks_filter(json!( { "uids": "pied" })).await; + assert_eq!(code, 400, "{}", response); + insta::assert_json_snapshot!(response, @r###" + { + "message": "Task uid `pied` is invalid. It should only contain numeric characters.", + "code": "invalid_task_uids_filter", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_uids_filter" + } + "###); + + let (response, code) = server.tasks_filter(json!( { "from": "pied" })).await; + assert_eq!(code, 400, "{}", response); + insta::assert_json_snapshot!(response, @r###" + { + "message": "Query deserialize error: invalid digit found in string", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad_request" + } + "###); + + let (response, code) = server.tasks_filter(json!( { "beforeStartedAt": "pied" })).await; + assert_eq!(code, 400, "{}", response); + insta::assert_json_snapshot!(response, @r###" + { + "message": "Task `beforeStartedAt` `pied` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", + "code": "invalid_task_date_filter", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_date_filter" + } + "###); +} + +#[actix_rt::test] +async fn delete_task_filter_error() { + let server = Server::new().await; + + let (response, code) = server.delete_tasks(json!(null)).await; + assert_eq!(code, 400, "{}", response); + insta::assert_json_snapshot!(response, @r###" + { + "message": "Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.", + "code": "missing_task_filters", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_task_filters" + } + "###); + + let (response, code) = server.delete_tasks(json!({ "lol": "pied" })).await; + assert_eq!(code, 400, "{}", response); + insta::assert_json_snapshot!(response, @r###" + { + "message": "Query deserialize error: unknown field `lol`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad_request" + } + "###); + + let (response, code) = server.delete_tasks(json!({ "uids": "pied" })).await; + assert_eq!(code, 400, "{}", response); + insta::assert_json_snapshot!(response, @r###" + { + "message": "Task uid `pied` is invalid. It should only contain numeric characters.", + "code": "invalid_task_uids_filter", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_uids_filter" + } + "###); +} + +#[actix_rt::test] +async fn cancel_task_filter_error() { + let server = Server::new().await; + + let (response, code) = server.cancel_tasks(json!(null)).await; + assert_eq!(code, 400, "{}", response); + insta::assert_json_snapshot!(response, @r###" + { + "message": "Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.", + "code": "missing_task_filters", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#missing_task_filters" + } + "###); + + let (response, code) = server.cancel_tasks(json!({ "lol": "pied" })).await; + assert_eq!(code, 400, "{}", response); + insta::assert_json_snapshot!(response, @r###" + { + "message": "Query deserialize error: unknown field `lol`", + "code": "bad_request", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#bad_request" + } + "###); + + let (response, code) = server.cancel_tasks(json!({ "uids": "pied" })).await; + assert_eq!(code, 400, "{}", response); + insta::assert_json_snapshot!(response, @r###" + { + "message": "Task uid `pied` is invalid. It should only contain numeric characters.", + "code": "invalid_task_uids_filter", + "type": "invalid_request", + "link": "https://docs.meilisearch.com/errors#invalid_task_uids_filter" + } + "###); +} + macro_rules! assert_valid_summarized_task { ($response:expr, $task_type:literal, $index:literal) => {{ assert_eq!($response.as_object().unwrap().len(), 5); @@ -231,10 +356,12 @@ async fn test_summarized_document_addition_or_update() { "indexUid": "test", "status": "succeeded", "type": "documentAdditionOrUpdate", + "canceledBy": null, "details": { "receivedDocuments": 1, "indexedDocuments": 1 }, + "error": null, "duration": "[duration]", "enqueuedAt": "[date]", "startedAt": "[date]", @@ -253,10 +380,12 @@ async fn test_summarized_document_addition_or_update() { "indexUid": "test", "status": "succeeded", "type": "documentAdditionOrUpdate", + "canceledBy": null, "details": { "receivedDocuments": 1, "indexedDocuments": 1 }, + "error": null, "duration": "[duration]", "enqueuedAt": "[date]", "startedAt": "[date]", @@ -280,9 +409,10 @@ async fn test_summarized_delete_batch() { "indexUid": "test", "status": "failed", "type": "documentDeletion", + "canceledBy": null, "details": { - "matchedDocuments": 3, - "deletedDocuments": null + "providedIds": 3, + "deletedDocuments": 0 }, "error": { "message": "Index `test` not found.", @@ -309,10 +439,12 @@ async fn test_summarized_delete_batch() { "indexUid": "test", "status": "succeeded", "type": "documentDeletion", + "canceledBy": null, "details": { - "matchedDocuments": 1, + "providedIds": 1, "deletedDocuments": 0 }, + "error": null, "duration": "[duration]", "enqueuedAt": "[date]", "startedAt": "[date]", @@ -336,9 +468,10 @@ async fn test_summarized_delete_document() { "indexUid": "test", "status": "failed", "type": "documentDeletion", + "canceledBy": null, "details": { - "matchedDocuments": 1, - "deletedDocuments": null + "providedIds": 1, + "deletedDocuments": 0 }, "error": { "message": "Index `test` not found.", @@ -365,10 +498,12 @@ async fn test_summarized_delete_document() { "indexUid": "test", "status": "succeeded", "type": "documentDeletion", + "canceledBy": null, "details": { - "matchedDocuments": 1, + "providedIds": 1, "deletedDocuments": 0 }, + "error": null, "duration": "[duration]", "enqueuedAt": "[date]", "startedAt": "[date]", @@ -394,6 +529,7 @@ async fn test_summarized_settings_update() { "indexUid": "test", "status": "failed", "type": "settingsUpdate", + "canceledBy": null, "details": { "rankingRules": [ "custom" @@ -423,6 +559,7 @@ async fn test_summarized_settings_update() { "indexUid": "test", "status": "succeeded", "type": "settingsUpdate", + "canceledBy": null, "details": { "displayedAttributes": [ "doggos", @@ -436,6 +573,7 @@ async fn test_summarized_settings_update() { "iq" ] }, + "error": null, "duration": "[duration]", "enqueuedAt": "[date]", "startedAt": "[date]", @@ -459,9 +597,11 @@ async fn test_summarized_index_creation() { "indexUid": "test", "status": "succeeded", "type": "indexCreation", + "canceledBy": null, "details": { "primaryKey": null }, + "error": null, "duration": "[duration]", "enqueuedAt": "[date]", "startedAt": "[date]", @@ -480,6 +620,7 @@ async fn test_summarized_index_creation() { "indexUid": "test", "status": "failed", "type": "indexCreation", + "canceledBy": null, "details": { "primaryKey": "doggos" }, @@ -512,6 +653,10 @@ async fn test_summarized_index_deletion() { "indexUid": "test", "status": "failed", "type": "indexDeletion", + "canceledBy": null, + "details": { + "deletedDocuments": 0 + }, "error": { "message": "Index `test` not found.", "code": "index_not_found", @@ -538,9 +683,11 @@ async fn test_summarized_index_deletion() { "indexUid": "test", "status": "succeeded", "type": "indexDeletion", + "canceledBy": null, "details": { "deletedDocuments": 1 }, + "error": null, "duration": "[duration]", "enqueuedAt": "[date]", "startedAt": "[date]", @@ -560,9 +707,11 @@ async fn test_summarized_index_deletion() { "indexUid": "test", "status": "succeeded", "type": "indexDeletion", + "canceledBy": null, "details": { "deletedDocuments": 1 }, + "error": null, "duration": "[duration]", "enqueuedAt": "[date]", "startedAt": "[date]", @@ -587,6 +736,7 @@ async fn test_summarized_index_update() { "indexUid": "test", "status": "failed", "type": "indexUpdate", + "canceledBy": null, "details": { "primaryKey": null }, @@ -614,6 +764,7 @@ async fn test_summarized_index_update() { "indexUid": "test", "status": "failed", "type": "indexUpdate", + "canceledBy": null, "details": { "primaryKey": "bones" }, @@ -644,9 +795,11 @@ async fn test_summarized_index_update() { "indexUid": "test", "status": "succeeded", "type": "indexUpdate", + "canceledBy": null, "details": { "primaryKey": null }, + "error": null, "duration": "[duration]", "enqueuedAt": "[date]", "startedAt": "[date]", @@ -665,9 +818,11 @@ async fn test_summarized_index_update() { "indexUid": "test", "status": "succeeded", "type": "indexUpdate", + "canceledBy": null, "details": { "primaryKey": "bones" }, + "error": null, "duration": "[duration]", "enqueuedAt": "[date]", "startedAt": "[date]", @@ -694,6 +849,7 @@ async fn test_summarized_index_swap() { "indexUid": null, "status": "failed", "type": "indexSwap", + "canceledBy": null, "details": { "swaps": [ { @@ -734,6 +890,7 @@ async fn test_summarized_index_swap() { "indexUid": null, "status": "succeeded", "type": "indexSwap", + "canceledBy": null, "details": { "swaps": [ { @@ -744,6 +901,7 @@ async fn test_summarized_index_swap() { } ] }, + "error": null, "duration": "[duration]", "enqueuedAt": "[date]", "startedAt": "[date]", @@ -759,7 +917,7 @@ async fn test_summarized_task_cancelation() { // to avoid being flaky we're only going to cancel an already finished task :( index.create(None).await; index.wait_task(0).await; - server.cancel_task(json!({ "uid": [0] })).await; + server.cancel_tasks(json!({ "uids": [0] })).await; index.wait_task(1).await; let (task, _) = index.get_task(1).await; assert_json_snapshot!(task, @@ -770,11 +928,13 @@ async fn test_summarized_task_cancelation() { "indexUid": null, "status": "succeeded", "type": "taskCancelation", + "canceledBy": null, "details": { "matchedTasks": 1, "canceledTasks": 0, - "originalQuery": "uid=0" + "originalFilter": "?uids=0" }, + "error": null, "duration": "[duration]", "enqueuedAt": "[date]", "startedAt": "[date]", @@ -790,7 +950,7 @@ async fn test_summarized_task_deletion() { // to avoid being flaky we're only going to delete an already finished task :( index.create(None).await; index.wait_task(0).await; - server.delete_task(json!({ "uid": [0] })).await; + server.delete_tasks(json!({ "uids": [0] })).await; index.wait_task(1).await; let (task, _) = index.get_task(1).await; assert_json_snapshot!(task, @@ -801,11 +961,13 @@ async fn test_summarized_task_deletion() { "indexUid": null, "status": "succeeded", "type": "taskDeletion", + "canceledBy": null, "details": { "matchedTasks": 1, "deletedTasks": 1, - "originalQuery": "uid=0" + "originalFilter": "?uids=0" }, + "error": null, "duration": "[duration]", "enqueuedAt": "[date]", "startedAt": "[date]", @@ -821,13 +983,18 @@ async fn test_summarized_dump_creation() { server.wait_task(0).await; let (task, _) = server.get_task(0).await; assert_json_snapshot!(task, - { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, + { ".details.dumpUid" => "[dumpUid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }, @r###" { "uid": 0, "indexUid": null, "status": "succeeded", "type": "dumpCreation", + "canceledBy": null, + "details": { + "dumpUid": "[dumpUid]" + }, + "error": null, "duration": "[duration]", "enqueuedAt": "[date]", "startedAt": "[date]", diff --git a/meilisearch-types/Cargo.toml b/meilisearch-types/Cargo.toml index 3b5346438..81aeaaa69 100644 --- a/meilisearch-types/Cargo.toml +++ b/meilisearch-types/Cargo.toml @@ -12,7 +12,7 @@ either = { version = "1.6.1", features = ["serde"] } enum-iterator = "1.1.3" flate2 = "1.0.24" fst = "0.4.7" -milli = { git = "https://github.com/meilisearch/milli.git", version = "0.35.0", default-features = false } +milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.37.0", default-features = false } proptest = { version = "1.0.0", optional = true } proptest-derive = { version = "0.3.0", optional = true } roaring = { version = "0.10.0", features = ["serde"] } diff --git a/meilisearch-types/src/error.rs b/meilisearch-types/src/error.rs index 330a6f082..5c0e1d9b8 100644 --- a/meilisearch-types/src/error.rs +++ b/meilisearch-types/src/error.rs @@ -147,6 +147,11 @@ pub enum Code { MissingMasterKey, NoSpaceLeftOnDevice, DumpNotFound, + InvalidTaskDateFilter, + InvalidTaskStatusesFilter, + InvalidTaskTypesFilter, + InvalidTaskCanceledByFilter, + InvalidTaskUidsFilter, TaskNotFound, TaskDeletionWithEmptyQuery, TaskCancelationWithEmptyQuery, @@ -238,12 +243,27 @@ impl Code { MissingMasterKey => { ErrCode::authentication("missing_master_key", StatusCode::UNAUTHORIZED) } + InvalidTaskDateFilter => { + ErrCode::invalid("invalid_task_date_filter", StatusCode::BAD_REQUEST) + } + InvalidTaskUidsFilter => { + ErrCode::invalid("invalid_task_uids_filter", StatusCode::BAD_REQUEST) + } + InvalidTaskStatusesFilter => { + ErrCode::invalid("invalid_task_statuses_filter", StatusCode::BAD_REQUEST) + } + InvalidTaskTypesFilter => { + ErrCode::invalid("invalid_task_types_filter", StatusCode::BAD_REQUEST) + } + InvalidTaskCanceledByFilter => { + ErrCode::invalid("invalid_task_canceled_by_filter", StatusCode::BAD_REQUEST) + } TaskNotFound => ErrCode::invalid("task_not_found", StatusCode::NOT_FOUND), TaskDeletionWithEmptyQuery => { - ErrCode::invalid("missing_filters", StatusCode::BAD_REQUEST) + ErrCode::invalid("missing_task_filters", StatusCode::BAD_REQUEST) } TaskCancelationWithEmptyQuery => { - ErrCode::invalid("missing_filters", StatusCode::BAD_REQUEST) + ErrCode::invalid("missing_task_filters", StatusCode::BAD_REQUEST) } DumpNotFound => ErrCode::invalid("dump_not_found", StatusCode::NOT_FOUND), NoSpaceLeftOnDevice => { diff --git a/meilisearch-types/src/index_uid.rs b/meilisearch-types/src/index_uid.rs index 00e94c5b9..945a57e9e 100644 --- a/meilisearch-types/src/index_uid.rs +++ b/meilisearch-types/src/index_uid.rs @@ -75,9 +75,9 @@ impl fmt::Display for IndexUidFormatError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, - "invalid index uid `{}`, the uid must be an integer \ - or a string containing only alphanumeric characters \ - a-z A-Z 0-9, hyphens - and underscores _.", + "`{}` is not a valid index uid. Index uid can be an \ + integer or a string containing only alphanumeric \ + characters, hyphens (-) and underscores (_).", self.invalid_uid, ) } diff --git a/meilisearch-types/src/keys.rs b/meilisearch-types/src/keys.rs index cb0ec807e..2ec624809 100644 --- a/meilisearch-types/src/keys.rs +++ b/meilisearch-types/src/keys.rs @@ -1,4 +1,5 @@ use std::hash::Hash; +use std::str::FromStr; use enum_iterator::Sequence; use serde::{Deserialize, Serialize}; @@ -9,7 +10,7 @@ use time::{Date, OffsetDateTime, PrimitiveDateTime}; use uuid::Uuid; use crate::error::{Code, ErrorCode}; -use crate::index_uid::IndexUid; +use crate::index_uid::{IndexUid, IndexUidFormatError}; use crate::star_or::StarOr; type Result = std::result::Result; @@ -64,7 +65,15 @@ impl Key { let indexes = value .get("indexes") .map(|ind| { - from_value(ind.clone()).map_err(|_| Error::InvalidApiKeyIndexes(ind.clone())) + from_value::>(ind.clone()) + // If it's not a vec of string, return an API key parsing error. + .map_err(|_| Error::InvalidApiKeyIndexes(ind.clone())) + .and_then(|ind| { + ind.into_iter() + // If it's not a valid Index uid, return an Index Uid parsing error. + .map(|i| StarOr::::from_str(&i).map_err(Error::from)) + .collect() + }) }) .ok_or(Error::MissingParameter("indexes"))??; @@ -339,10 +348,10 @@ pub enum Error { MissingParameter(&'static str), #[error("`actions` field value `{0}` is invalid. It should be an array of string representing action names.")] InvalidApiKeyActions(Value), - #[error( - "`{0}` is not a valid index uid. It should be an array of string representing index names." - )] + #[error("`indexes` field value `{0}` is invalid. It should be an array of string representing index names.")] InvalidApiKeyIndexes(Value), + #[error("{0}")] + InvalidApiKeyIndexUid(IndexUidFormatError), #[error("`expiresAt` field value `{0}` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.")] InvalidApiKeyExpiresAt(Value), #[error("`description` field value `{0}` is invalid. It should be a string or specified as a null value.")] @@ -357,12 +366,20 @@ pub enum Error { ImmutableField(String), } +impl From for Error { + fn from(e: IndexUidFormatError) -> Self { + Self::InvalidApiKeyIndexUid(e) + } +} + impl ErrorCode for Error { fn error_code(&self) -> Code { match self { Self::MissingParameter(_) => Code::MissingParameter, Self::InvalidApiKeyActions(_) => Code::InvalidApiKeyActions, - Self::InvalidApiKeyIndexes(_) => Code::InvalidApiKeyIndexes, + Self::InvalidApiKeyIndexes(_) | Self::InvalidApiKeyIndexUid(_) => { + Code::InvalidApiKeyIndexes + } Self::InvalidApiKeyExpiresAt(_) => Code::InvalidApiKeyExpiresAt, Self::InvalidApiKeyDescription(_) => Code::InvalidApiKeyDescription, Self::InvalidApiKeyName(_) => Code::InvalidApiKeyName, diff --git a/meilisearch-types/src/tasks.rs b/meilisearch-types/src/tasks.rs index aafa3008e..ceddbd51c 100644 --- a/meilisearch-types/src/tasks.rs +++ b/meilisearch-types/src/tasks.rs @@ -127,7 +127,6 @@ pub enum KindWithContent { tasks: RoaringBitmap, }, DumpCreation { - dump_uid: String, keys: Vec, instance_uid: Option, }, @@ -196,17 +195,16 @@ impl KindWithContent { } KindWithContent::DocumentDeletion { index_uid: _, documents_ids } => { Some(Details::DocumentDeletion { - matched_documents: documents_ids.len(), + provided_ids: documents_ids.len(), deleted_documents: None, }) } - KindWithContent::DocumentClear { .. } => { + KindWithContent::DocumentClear { .. } | KindWithContent::IndexDeletion { .. } => { Some(Details::ClearAll { deleted_documents: None }) } KindWithContent::SettingsUpdate { new_settings, .. } => { Some(Details::SettingsUpdate { settings: new_settings.clone() }) } - KindWithContent::IndexDeletion { .. } => None, KindWithContent::IndexCreation { primary_key, .. } | KindWithContent::IndexUpdate { primary_key, .. } => { Some(Details::IndexInfo { primary_key: primary_key.clone() }) @@ -217,14 +215,14 @@ impl KindWithContent { KindWithContent::TaskCancelation { query, tasks } => Some(Details::TaskCancelation { matched_tasks: tasks.len(), canceled_tasks: None, - original_query: query.clone(), + original_filter: query.clone(), }), KindWithContent::TaskDeletion { query, tasks } => Some(Details::TaskDeletion { matched_tasks: tasks.len(), deleted_tasks: None, - original_query: query.clone(), + original_filter: query.clone(), }), - KindWithContent::DumpCreation { .. } => None, + KindWithContent::DumpCreation { .. } => Some(Details::Dump { dump_uid: None }), KindWithContent::SnapshotCreation => None, } } @@ -239,7 +237,7 @@ impl KindWithContent { } KindWithContent::DocumentDeletion { index_uid: _, documents_ids } => { Some(Details::DocumentDeletion { - matched_documents: documents_ids.len(), + provided_ids: documents_ids.len(), deleted_documents: Some(0), }) } @@ -260,14 +258,14 @@ impl KindWithContent { KindWithContent::TaskCancelation { query, tasks } => Some(Details::TaskCancelation { matched_tasks: tasks.len(), canceled_tasks: Some(0), - original_query: query.clone(), + original_filter: query.clone(), }), KindWithContent::TaskDeletion { query, tasks } => Some(Details::TaskDeletion { matched_tasks: tasks.len(), deleted_tasks: Some(0), - original_query: query.clone(), + original_filter: query.clone(), }), - KindWithContent::DumpCreation { .. } => None, + KindWithContent::DumpCreation { .. } => Some(Details::Dump { dump_uid: None }), KindWithContent::SnapshotCreation => None, } } @@ -298,16 +296,14 @@ impl From<&KindWithContent> for Option
{ KindWithContent::TaskCancelation { query, tasks } => Some(Details::TaskCancelation { matched_tasks: tasks.len(), canceled_tasks: None, - original_query: query.clone(), + original_filter: query.clone(), }), KindWithContent::TaskDeletion { query, tasks } => Some(Details::TaskDeletion { matched_tasks: tasks.len(), deleted_tasks: None, - original_query: query.clone(), + original_filter: query.clone(), }), - KindWithContent::DumpCreation { dump_uid, .. } => { - Some(Details::Dump { dump_uid: dump_uid.clone() }) - } + KindWithContent::DumpCreation { .. } => Some(Details::Dump { dump_uid: None }), KindWithContent::SnapshotCreation => None, } } @@ -398,7 +394,23 @@ impl Kind { } } } - +impl Display for Kind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Kind::DocumentAdditionOrUpdate => write!(f, "documentAdditionOrUpdate"), + Kind::DocumentDeletion => write!(f, "documentDeletion"), + Kind::SettingsUpdate => write!(f, "settingsUpdate"), + Kind::IndexCreation => write!(f, "indexCreation"), + Kind::IndexDeletion => write!(f, "indexDeletion"), + Kind::IndexUpdate => write!(f, "indexUpdate"), + Kind::IndexSwap => write!(f, "indexSwap"), + Kind::TaskCancelation => write!(f, "taskCancelation"), + Kind::TaskDeletion => write!(f, "taskDeletion"), + Kind::DumpCreation => write!(f, "dumpCreation"), + Kind::SnapshotCreation => write!(f, "snapshotCreation"), + } + } +} impl FromStr for Kind { type Err = ResponseError; @@ -450,14 +462,35 @@ pub enum Details { DocumentAdditionOrUpdate { received_documents: u64, indexed_documents: Option }, SettingsUpdate { settings: Box> }, IndexInfo { primary_key: Option }, - DocumentDeletion { matched_documents: usize, deleted_documents: Option }, + DocumentDeletion { provided_ids: usize, deleted_documents: Option }, ClearAll { deleted_documents: Option }, - TaskCancelation { matched_tasks: u64, canceled_tasks: Option, original_query: String }, - TaskDeletion { matched_tasks: u64, deleted_tasks: Option, original_query: String }, - Dump { dump_uid: String }, + TaskCancelation { matched_tasks: u64, canceled_tasks: Option, original_filter: String }, + TaskDeletion { matched_tasks: u64, deleted_tasks: Option, original_filter: String }, + Dump { dump_uid: Option }, IndexSwap { swaps: Vec }, } +impl Details { + pub fn to_failed(&self) -> Self { + let mut details = self.clone(); + match &mut details { + Self::DocumentAdditionOrUpdate { indexed_documents, .. } => { + *indexed_documents = Some(0) + } + Self::DocumentDeletion { deleted_documents, .. } => *deleted_documents = Some(0), + Self::ClearAll { deleted_documents } => *deleted_documents = Some(0), + Self::TaskCancelation { canceled_tasks, .. } => *canceled_tasks = Some(0), + Self::TaskDeletion { deleted_tasks, .. } => *deleted_tasks = Some(0), + Self::SettingsUpdate { .. } + | Self::IndexInfo { .. } + | Self::Dump { .. } + | Self::IndexSwap { .. } => (), + } + + details + } +} + /// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for /// https://github.com/time-rs/time/issues/378. /// This code is a port of the old code of time that was removed in 0.2. @@ -520,11 +553,11 @@ mod tests { let details = Details::TaskDeletion { matched_tasks: 1, deleted_tasks: None, - original_query: "hello".to_owned(), + original_filter: "hello".to_owned(), }; let serialised = SerdeJson::
::bytes_encode(&details).unwrap(); let deserialised = SerdeJson::
::bytes_decode(&serialised).unwrap(); - meili_snap::snapshot!(format!("{:?}", details), @r###"TaskDeletion { matched_tasks: 1, deleted_tasks: None, original_query: "hello" }"###); - meili_snap::snapshot!(format!("{:?}", deserialised), @r###"TaskDeletion { matched_tasks: 1, deleted_tasks: None, original_query: "hello" }"###); + meili_snap::snapshot!(format!("{:?}", details), @r###"TaskDeletion { matched_tasks: 1, deleted_tasks: None, original_filter: "hello" }"###); + meili_snap::snapshot!(format!("{:?}", deserialised), @r###"TaskDeletion { matched_tasks: 1, deleted_tasks: None, original_filter: "hello" }"###); } } From 41eb986f65134f97b0b99bac29af10715203ac70 Mon Sep 17 00:00:00 2001 From: Tamo Date: Mon, 28 Nov 2022 16:39:22 +0100 Subject: [PATCH 2/2] Fix the dump tests --- dump/src/reader/compat/v5_to_v6.rs | 2 +- dump/src/reader/mod.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dump/src/reader/compat/v5_to_v6.rs b/dump/src/reader/compat/v5_to_v6.rs index 4d81e0552..2fe99e2af 100644 --- a/dump/src/reader/compat/v5_to_v6.rs +++ b/dump/src/reader/compat/v5_to_v6.rs @@ -419,7 +419,7 @@ pub(crate) mod test { // tasks let tasks = dump.tasks().unwrap().collect::>>().unwrap(); let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip(); - meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"42d4200cf6d92a6449989ca48cd8e28a"); + meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"6519f7064c45d2196dd59b71350a9bf5"); assert_eq!(update_files.len(), 22); assert!(update_files[0].is_none()); // the dump creation assert!(update_files[1].is_some()); // the enqueued document addition diff --git a/dump/src/reader/mod.rs b/dump/src/reader/mod.rs index 54481ab9e..d1ca9ec42 100644 --- a/dump/src/reader/mod.rs +++ b/dump/src/reader/mod.rs @@ -200,7 +200,7 @@ pub(crate) mod test { // tasks let tasks = dump.tasks().unwrap().collect::>>().unwrap(); let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip(); - meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"42d4200cf6d92a6449989ca48cd8e28a"); + meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"6519f7064c45d2196dd59b71350a9bf5"); assert_eq!(update_files.len(), 22); assert!(update_files[0].is_none()); // the dump creation assert!(update_files[1].is_some()); // the enqueued document addition