mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-11-24 13:37:19 +00:00
Compare commits
230 Commits
paralleliz
...
v1.22.3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c36a3239ca | ||
|
|
da0ec8a270 | ||
|
|
ac7a69d6db | ||
|
|
39f69966ee | ||
|
|
9433b84dd3 | ||
|
|
a4d7e0d1bb | ||
|
|
dcc6195816 | ||
|
|
67b8b7cc71 | ||
|
|
de311b5fde | ||
|
|
077ec2ab11 | ||
|
|
c50a337c29 | ||
|
|
efeae09ce1 | ||
|
|
ad55b48664 | ||
|
|
94eabd34e6 | ||
|
|
6935589f74 | ||
|
|
4beb452027 | ||
|
|
b722da303a | ||
|
|
ad39263b94 | ||
|
|
0ffb08b112 | ||
|
|
ff80b4d0ff | ||
|
|
7fb4404928 | ||
|
|
8405f0bf9c | ||
|
|
3a7f9b56fe | ||
|
|
61034e2e2e | ||
|
|
108d6d3344 | ||
|
|
35bd00f6a1 | ||
|
|
69059d67ef | ||
|
|
e13783103f | ||
|
|
f719665c4e | ||
|
|
638f284614 | ||
|
|
32ac98ed95 | ||
|
|
46aee695ca | ||
|
|
716c67f858 | ||
|
|
fec10bb2d6 | ||
|
|
3dac2cf73e | ||
|
|
03eca800e6 | ||
|
|
28fa2e960e | ||
|
|
a3b9220f84 | ||
|
|
c09d48edf2 | ||
|
|
ae4ab0ebbb | ||
|
|
900a9a6d59 | ||
|
|
fc560e6730 | ||
|
|
e2a06470b7 | ||
|
|
ada27323f2 | ||
|
|
607a1c2395 | ||
|
|
b56956ea0c | ||
|
|
3d21290f7f | ||
|
|
4edd4c06bc | ||
|
|
566baddc6b | ||
|
|
febe3186ce | ||
|
|
5dd42c1871 | ||
|
|
8670793e6e | ||
|
|
41a04aa3ab | ||
|
|
88f841bc05 | ||
|
|
d19892d2ea | ||
|
|
c0905d6650 | ||
|
|
576d7d94b1 | ||
|
|
f4f1334b62 | ||
|
|
aaff6c3685 | ||
|
|
42d2af4c84 | ||
|
|
6be91c824c | ||
|
|
6ee0537db8 | ||
|
|
3fbeff4308 | ||
|
|
375546b61a | ||
|
|
25a1d50763 | ||
|
|
4fe073cc1a | ||
|
|
5cd3d36d20 | ||
|
|
d7ad76ea1e | ||
|
|
e82bb93221 | ||
|
|
000cb93aad | ||
|
|
ad4f5514b9 | ||
|
|
8d29a29867 | ||
|
|
d7de819d11 | ||
|
|
7a6cf30cb2 | ||
|
|
e43d67591c | ||
|
|
134237d1eb | ||
|
|
26d9070aa7 | ||
|
|
f9ffb8ada5 | ||
|
|
a47888f02c | ||
|
|
5bef2f4d86 | ||
|
|
06b3ca9eb5 | ||
|
|
7dc1c03a36 | ||
|
|
0b74722a73 | ||
|
|
0f80249b70 | ||
|
|
a9b8a60320 | ||
|
|
fd795c513b | ||
|
|
ce136ec0c1 | ||
|
|
4d4f6d2c20 | ||
|
|
4cc8fb2c5c | ||
|
|
5d47590f3e | ||
|
|
16461a9145 | ||
|
|
17810394b8 | ||
|
|
15690b9e22 | ||
|
|
a8cd81c7f4 | ||
|
|
6376571df0 | ||
|
|
cfb040e647 | ||
|
|
f54773781a | ||
|
|
0fccd0ca1f | ||
|
|
226c102bab | ||
|
|
2940bbb75c | ||
|
|
13df964564 | ||
|
|
35b24a28aa | ||
|
|
0faf495173 | ||
|
|
c32c74671d | ||
|
|
b05bcf2c13 | ||
|
|
90cc5263f6 | ||
|
|
424d0e277e | ||
|
|
34eba61c0d | ||
|
|
687260bc13 | ||
|
|
0a3ab8e171 | ||
|
|
6b6e69b07a | ||
|
|
a25111f32e | ||
|
|
b144d9ab2b | ||
|
|
c3cefbc170 | ||
|
|
8e2aeb6739 | ||
|
|
9c06545ae3 | ||
|
|
e1c859c0f7 | ||
|
|
c4848e6cc0 | ||
|
|
454581dbc9 | ||
|
|
bc5100dddd | ||
|
|
118c6da64d | ||
|
|
a989f52657 | ||
|
|
a8cc66899c | ||
|
|
c9cc748f42 | ||
|
|
4ccce18d7b | ||
|
|
00d1006cd9 | ||
|
|
5cad65cca5 | ||
|
|
7fe9d07247 | ||
|
|
8933d87031 | ||
|
|
231f86decf | ||
|
|
381de52fc5 | ||
|
|
026b95afbb | ||
|
|
210da70faf | ||
|
|
5fc7872ab3 | ||
|
|
1f0a6e8a44 | ||
|
|
b2f2807a94 | ||
|
|
952394710c | ||
|
|
da6fffdf6d | ||
|
|
b5f0c19406 | ||
|
|
0fd66a5317 | ||
|
|
cb4dd3b88c | ||
|
|
0ade376b00 | ||
|
|
32785cb2d0 | ||
|
|
fb7ccc0db3 | ||
|
|
69a84fbfe6 | ||
|
|
31cb960992 | ||
|
|
6d9e0c4bce | ||
|
|
a8e9597f49 | ||
|
|
f4147a60a3 | ||
|
|
5139dd273e | ||
|
|
72c63d3929 | ||
|
|
97ea9e9937 | ||
|
|
4645813ea8 | ||
|
|
fb68f1241c | ||
|
|
f5f2f7c6f2 | ||
|
|
6340412219 | ||
|
|
6e4dfa0168 | ||
|
|
5cf66856ae | ||
|
|
0d4b78a217 | ||
|
|
aef07f4bfa | ||
|
|
0b3f983d27 | ||
|
|
52d55ccd8e | ||
|
|
6d92c94bb3 | ||
|
|
30110a0488 | ||
|
|
47cee7e1ea | ||
|
|
493d67ffd4 | ||
|
|
2b2559016a | ||
|
|
6176b143bb | ||
|
|
f9d0d1ddd6 | ||
|
|
e50f970ab8 | ||
|
|
27550dafad | ||
|
|
a7cd6853db | ||
|
|
f51f7832a7 | ||
|
|
a38a57acb6 | ||
|
|
affcaef556 | ||
|
|
7acac2f560 | ||
|
|
b68431367f | ||
|
|
79d3d1606c | ||
|
|
580bfb06b4 | ||
|
|
062c9c6971 | ||
|
|
07ed5c57e4 | ||
|
|
a94a13c9b0 | ||
|
|
938ef77ee5 | ||
|
|
9dcdde592c | ||
|
|
7de44ad2b7 | ||
|
|
820854ba5c | ||
|
|
496de5563a | ||
|
|
0a86b1e11e | ||
|
|
795045c03a | ||
|
|
b541b7bed3 | ||
|
|
6fb3cf95e4 | ||
|
|
cbd2bdf0fa | ||
|
|
601785692f | ||
|
|
65c212d1fd | ||
|
|
85feb3a26c | ||
|
|
d550b90c60 | ||
|
|
385acbbcd2 | ||
|
|
484dbf8c06 | ||
|
|
9c6c0af076 | ||
|
|
e33fbcf7b2 | ||
|
|
d352f33d16 | ||
|
|
3682b92ee8 | ||
|
|
ef10c1fb23 | ||
|
|
bd97a7cc19 | ||
|
|
56c7f54804 | ||
|
|
15d34c33e8 | ||
|
|
42ac869c5c | ||
|
|
6e0152921f | ||
|
|
069d25dce6 | ||
|
|
9929f798d3 | ||
|
|
80ff438402 | ||
|
|
e62a807b60 | ||
|
|
907055ed08 | ||
|
|
8b18adee95 | ||
|
|
53223ace47 | ||
|
|
a579ea2596 | ||
|
|
e13541818a | ||
|
|
c974f0ab0a | ||
|
|
36cac8acf7 | ||
|
|
5507a73b23 | ||
|
|
b0479eb996 | ||
|
|
2bab375001 | ||
|
|
81020c7d6d | ||
|
|
4068c58417 | ||
|
|
a904ce109a | ||
|
|
ecea247e5d | ||
|
|
ae5bd9d0e3 | ||
|
|
ae2d0a67a4 | ||
|
|
0f1c78b185 | ||
|
|
45da2257ec |
4
.github/workflows/bench-manual.yml
vendored
4
.github/workflows/bench-manual.yml
vendored
@@ -17,8 +17,8 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
timeout-minutes: 180 # 3h
|
timeout-minutes: 180 # 3h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/bench-pr.yml
vendored
4
.github/workflows/bench-pr.yml
vendored
@@ -60,13 +60,13 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
repo_token: ${{ env.GH_TOKEN }}
|
repo_token: ${{ env.GH_TOKEN }}
|
||||||
|
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
if: success()
|
if: success()
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
||||||
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/bench-push-indexing.yml
vendored
4
.github/workflows/bench-push-indexing.yml
vendored
@@ -11,8 +11,8 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
timeout-minutes: 180 # 3h
|
timeout-minutes: 180 # 3h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/benchmarks-manual.yml
vendored
4
.github/workflows/benchmarks-manual.yml
vendored
@@ -17,8 +17,8 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/benchmarks-pr.yml
vendored
4
.github/workflows/benchmarks-pr.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
@@ -61,7 +61,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
repo_token: ${{ env.GH_TOKEN }}
|
repo_token: ${{ env.GH_TOKEN }}
|
||||||
|
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
if: success()
|
if: success()
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
||||||
|
|||||||
@@ -15,8 +15,8 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
@@ -14,8 +14,8 @@ jobs:
|
|||||||
name: Run and upload benchmarks
|
name: Run and upload benchmarks
|
||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
@@ -14,8 +14,8 @@ jobs:
|
|||||||
name: Run and upload benchmarks
|
name: Run and upload benchmarks
|
||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
@@ -14,8 +14,8 @@ jobs:
|
|||||||
name: Run and upload benchmarks
|
name: Run and upload benchmarks
|
||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/db-change-missing.yml
vendored
2
.github/workflows/db-change-missing.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v5
|
||||||
- name: Check db change labels
|
- name: Check db change labels
|
||||||
id: check_labels
|
id: check_labels
|
||||||
env:
|
env:
|
||||||
|
|||||||
2
.github/workflows/dependency-issue.yml
vendored
2
.github/workflows/dependency-issue.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
|||||||
ISSUE_TEMPLATE: issue-template.md
|
ISSUE_TEMPLATE: issue-template.md
|
||||||
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- name: Download the issue template
|
- name: Download the issue template
|
||||||
run: curl -s https://raw.githubusercontent.com/meilisearch/meilisearch/main/.github/templates/dependency-issue.md > $ISSUE_TEMPLATE
|
run: curl -s https://raw.githubusercontent.com/meilisearch/meilisearch/main/.github/templates/dependency-issue.md > $ISSUE_TEMPLATE
|
||||||
- name: Create issue
|
- name: Create issue
|
||||||
|
|||||||
4
.github/workflows/flaky-tests.yml
vendored
4
.github/workflows/flaky-tests.yml
vendored
@@ -12,12 +12,12 @@ jobs:
|
|||||||
# Use ubuntu-22.04 to compile with glibc 2.35
|
# Use ubuntu-22.04 to compile with glibc 2.35
|
||||||
image: ubuntu:22.04
|
image: ubuntu:22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- name: Install needed dependencies
|
- name: Install needed dependencies
|
||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Install cargo-flaky
|
- name: Install cargo-flaky
|
||||||
run: cargo install cargo-flaky
|
run: cargo install cargo-flaky
|
||||||
- name: Run cargo flaky in the dumps
|
- name: Run cargo flaky in the dumps
|
||||||
|
|||||||
4
.github/workflows/fuzzer-indexing.yml
vendored
4
.github/workflows/fuzzer-indexing.yml
vendored
@@ -11,8 +11,8 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/latest-git-tag.yml
vendored
4
.github/workflows/latest-git-tag.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
|||||||
name: Check the version validity
|
name: Check the version validity
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- name: Check release validity
|
- name: Check release validity
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
run: bash .github/scripts/check-release.sh
|
run: bash .github/scripts/check-release.sh
|
||||||
@@ -19,7 +19,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: check-version
|
needs: check-version
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- uses: rickstaa/action-create-tag@v1
|
- uses: rickstaa/action-create-tag@v1
|
||||||
with:
|
with:
|
||||||
tag: "latest"
|
tag: "latest"
|
||||||
|
|||||||
6
.github/workflows/publish-apt-brew-pkg.yml
vendored
6
.github/workflows/publish-apt-brew-pkg.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
|||||||
name: Check the version validity
|
name: Check the version validity
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- name: Check release validity
|
- name: Check release validity
|
||||||
run: bash .github/scripts/check-release.sh
|
run: bash .github/scripts/check-release.sh
|
||||||
|
|
||||||
@@ -25,10 +25,10 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Install cargo-deb
|
- name: Install cargo-deb
|
||||||
run: cargo install cargo-deb
|
run: cargo install cargo-deb
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- name: Build deb package
|
- name: Build deb package
|
||||||
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
||||||
- name: Upload debian pkg to release
|
- name: Upload debian pkg to release
|
||||||
|
|||||||
2
.github/workflows/publish-docker-images.yml
vendored
2
.github/workflows/publish-docker-images.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
id-token: write # This is needed to use Cosign in keyless mode
|
id-token: write # This is needed to use Cosign in keyless mode
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
# If we are running a cron or manual job ('schedule' or 'workflow_dispatch' event), it means we are publishing the `nightly` tag, so not considered stable.
|
# If we are running a cron or manual job ('schedule' or 'workflow_dispatch' event), it means we are publishing the `nightly` tag, so not considered stable.
|
||||||
# If we have pushed a tag, and the tag has the v<nmumber>.<number>.<number> format, it means we are publishing an official release, so considered stable.
|
# If we have pushed a tag, and the tag has the v<nmumber>.<number>.<number> format, it means we are publishing an official release, so considered stable.
|
||||||
|
|||||||
20
.github/workflows/publish-release-assets.yml
vendored
20
.github/workflows/publish-release-assets.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
# No need to check the version for dry run (cron)
|
# No need to check the version for dry run (cron)
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
# Check if the tag has the v<nmumber>.<number>.<number> format.
|
# Check if the tag has the v<nmumber>.<number>.<number> format.
|
||||||
# If yes, it means we are publishing an official release.
|
# If yes, it means we are publishing an official release.
|
||||||
# If no, we are releasing a RC, so no need to check the version.
|
# If no, we are releasing a RC, so no need to check the version.
|
||||||
@@ -40,12 +40,12 @@ jobs:
|
|||||||
# Use ubuntu-22.04 to compile with glibc 2.35
|
# Use ubuntu-22.04 to compile with glibc 2.35
|
||||||
image: ubuntu:22.04
|
image: ubuntu:22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- name: Install needed dependencies
|
- name: Install needed dependencies
|
||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --release --locked
|
run: cargo build --release --locked
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
@@ -74,8 +74,8 @@ jobs:
|
|||||||
artifact_name: meilisearch.exe
|
artifact_name: meilisearch.exe
|
||||||
asset_name: meilisearch-windows-amd64.exe
|
asset_name: meilisearch-windows-amd64.exe
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --release --locked
|
run: cargo build --release --locked
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
@@ -99,9 +99,9 @@ jobs:
|
|||||||
asset_name: meilisearch-macos-apple-silicon
|
asset_name: meilisearch-macos-apple-silicon
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v5
|
||||||
- name: Installing Rust toolchain
|
- name: Installing Rust toolchain
|
||||||
uses: dtolnay/rust-toolchain@1.85
|
uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
@@ -136,7 +136,7 @@ jobs:
|
|||||||
asset_name: meilisearch-linux-aarch64
|
asset_name: meilisearch-linux-aarch64
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v5
|
||||||
- name: Install needed dependencies
|
- name: Install needed dependencies
|
||||||
run: |
|
run: |
|
||||||
apt-get update -y && apt upgrade -y
|
apt-get update -y && apt upgrade -y
|
||||||
@@ -148,7 +148,7 @@ jobs:
|
|||||||
add-apt-repository "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
add-apt-repository "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||||
apt-get update -y && apt-get install -y docker-ce
|
apt-get update -y && apt-get install -y docker-ce
|
||||||
- name: Installing Rust toolchain
|
- name: Installing Rust toolchain
|
||||||
uses: dtolnay/rust-toolchain@1.85
|
uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
@@ -190,7 +190,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v5
|
||||||
- name: Setup Rust
|
- name: Setup Rust
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
|
|||||||
30
.github/workflows/sdks-tests.yml
vendored
30
.github/workflows/sdks-tests.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
|||||||
outputs:
|
outputs:
|
||||||
docker-image: ${{ steps.define-image.outputs.docker-image }}
|
docker-image: ${{ steps.define-image.outputs.docker-image }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- name: Define the Docker image we need to use
|
- name: Define the Docker image we need to use
|
||||||
id: define-image
|
id: define-image
|
||||||
run: |
|
run: |
|
||||||
@@ -46,7 +46,7 @@ jobs:
|
|||||||
MEILISEARCH_VERSION: ${{ needs.define-docker-image.outputs.docker-image }}
|
MEILISEARCH_VERSION: ${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-dotnet
|
repository: meilisearch/meilisearch-dotnet
|
||||||
- name: Setup .NET Core
|
- name: Setup .NET Core
|
||||||
@@ -75,7 +75,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-dart
|
repository: meilisearch/meilisearch-dart
|
||||||
- uses: dart-lang/setup-dart@v1
|
- uses: dart-lang/setup-dart@v1
|
||||||
@@ -103,7 +103,7 @@ jobs:
|
|||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: stable
|
go-version: stable
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-go
|
repository: meilisearch/meilisearch-go
|
||||||
- name: Get dependencies
|
- name: Get dependencies
|
||||||
@@ -129,11 +129,11 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-java
|
repository: meilisearch/meilisearch-java
|
||||||
- name: Set up Java
|
- name: Set up Java
|
||||||
uses: actions/setup-java@v4
|
uses: actions/setup-java@v5
|
||||||
with:
|
with:
|
||||||
java-version: 8
|
java-version: 8
|
||||||
distribution: 'zulu'
|
distribution: 'zulu'
|
||||||
@@ -156,7 +156,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-js
|
repository: meilisearch/meilisearch-js
|
||||||
- name: Setup node
|
- name: Setup node
|
||||||
@@ -191,7 +191,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-php
|
repository: meilisearch/meilisearch-php
|
||||||
- name: Install PHP
|
- name: Install PHP
|
||||||
@@ -220,7 +220,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-python
|
repository: meilisearch/meilisearch-python
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
@@ -245,7 +245,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-ruby
|
repository: meilisearch/meilisearch-ruby
|
||||||
- name: Set up Ruby 3
|
- name: Set up Ruby 3
|
||||||
@@ -270,7 +270,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-rust
|
repository: meilisearch/meilisearch-rust
|
||||||
- name: Build
|
- name: Build
|
||||||
@@ -291,7 +291,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-swift
|
repository: meilisearch/meilisearch-swift
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
@@ -314,7 +314,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-js-plugins
|
repository: meilisearch/meilisearch-js-plugins
|
||||||
- name: Setup node
|
- name: Setup node
|
||||||
@@ -347,7 +347,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
RAILS_VERSION: '7.0'
|
RAILS_VERSION: '7.0'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-rails
|
repository: meilisearch/meilisearch-rails
|
||||||
- name: Install SQLite dependencies
|
- name: Install SQLite dependencies
|
||||||
@@ -377,7 +377,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-symfony
|
repository: meilisearch/meilisearch-symfony
|
||||||
- name: Install PHP
|
- name: Install PHP
|
||||||
|
|||||||
30
.github/workflows/test-suite.yml
vendored
30
.github/workflows/test-suite.yml
vendored
@@ -21,13 +21,13 @@ jobs:
|
|||||||
# Use ubuntu-22.04 to compile with glibc 2.35
|
# Use ubuntu-22.04 to compile with glibc 2.35
|
||||||
image: ubuntu:22.04
|
image: ubuntu:22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- name: Install needed dependencies
|
- name: Install needed dependencies
|
||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- name: Setup test with Rust stable
|
- name: Setup test with Rust stable
|
||||||
uses: dtolnay/rust-toolchain@1.85
|
uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
@@ -49,10 +49,10 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
os: [macos-13, windows-2022]
|
os: [macos-13, windows-2022]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -72,12 +72,12 @@ jobs:
|
|||||||
image: ubuntu:22.04
|
image: ubuntu:22.04
|
||||||
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- name: Install needed dependencies
|
- name: Install needed dependencies
|
||||||
run: |
|
run: |
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get install --assume-yes build-essential curl
|
apt-get install --assume-yes build-essential curl
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Run cargo build with almost all features
|
- name: Run cargo build with almost all features
|
||||||
run: |
|
run: |
|
||||||
cargo build --workspace --locked --release --features "$(cargo xtask list-features --exclude-feature cuda,test-ollama)"
|
cargo build --workspace --locked --release --features "$(cargo xtask list-features --exclude-feature cuda,test-ollama)"
|
||||||
@@ -91,7 +91,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
MEILI_TEST_OLLAMA_SERVER: "http://localhost:11434"
|
MEILI_TEST_OLLAMA_SERVER: "http://localhost:11434"
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- name: Install Ollama
|
- name: Install Ollama
|
||||||
run: |
|
run: |
|
||||||
curl -fsSL https://ollama.com/install.sh | sudo -E sh
|
curl -fsSL https://ollama.com/install.sh | sudo -E sh
|
||||||
@@ -124,12 +124,12 @@ jobs:
|
|||||||
image: ubuntu:22.04
|
image: ubuntu:22.04
|
||||||
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- name: Install needed dependencies
|
- name: Install needed dependencies
|
||||||
run: |
|
run: |
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get install --assume-yes build-essential curl
|
apt-get install --assume-yes build-essential curl
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Run cargo tree without default features and check lindera is not present
|
- name: Run cargo tree without default features and check lindera is not present
|
||||||
run: |
|
run: |
|
||||||
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -qz lindera; then
|
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -qz lindera; then
|
||||||
@@ -148,12 +148,12 @@ jobs:
|
|||||||
# Use ubuntu-22.04 to compile with glibc 2.35
|
# Use ubuntu-22.04 to compile with glibc 2.35
|
||||||
image: ubuntu:22.04
|
image: ubuntu:22.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- name: Install needed dependencies
|
- name: Install needed dependencies
|
||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
- name: Run tests in debug
|
- name: Run tests in debug
|
||||||
@@ -166,8 +166,8 @@ jobs:
|
|||||||
name: Run Clippy
|
name: Run Clippy
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
components: clippy
|
components: clippy
|
||||||
@@ -183,8 +183,8 @@ jobs:
|
|||||||
name: Run Rustfmt
|
name: Run Rustfmt
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: nightly-2024-07-09
|
toolchain: nightly-2024-07-09
|
||||||
|
|||||||
@@ -17,8 +17,8 @@ jobs:
|
|||||||
name: Update version in Cargo.toml
|
name: Update version in Cargo.toml
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v5
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.89
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
- name: Install sd
|
- name: Install sd
|
||||||
|
|||||||
1687
Cargo.lock
generated
1687
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -23,7 +23,7 @@ members = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "1.17.1"
|
version = "1.22.3"
|
||||||
authors = [
|
authors = [
|
||||||
"Quentin de Quelen <quentin@dequelen.me>",
|
"Quentin de Quelen <quentin@dequelen.me>",
|
||||||
"Clément Renault <clement@meilisearch.com>",
|
"Clément Renault <clement@meilisearch.com>",
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# Compile
|
# Compile
|
||||||
FROM rust:1.85-alpine3.20 AS compiler
|
FROM rust:1.89-alpine3.20 AS compiler
|
||||||
|
|
||||||
RUN apk add -q --no-cache build-base openssl-dev
|
RUN apk add -q --no-cache build-base openssl-dev
|
||||||
|
|
||||||
|
|||||||
8
LICENSE
8
LICENSE
@@ -19,3 +19,11 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
SOFTWARE.
|
SOFTWARE.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
🔒 Meilisearch Enterprise Edition (EE)
|
||||||
|
|
||||||
|
Certain parts of this codebase are not licensed under the MIT license and governed by the Business Source License 1.1.
|
||||||
|
|
||||||
|
See the LICENSE-EE file for details.
|
||||||
|
|||||||
67
LICENSE-EE
Normal file
67
LICENSE-EE
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
Business Source License 1.1 – Adapted for Meili SAS
|
||||||
|
This license is based on the Business Source License version 1.1, as published by MariaDB Corporation Ab.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
|
||||||
|
Licensor: Meili SAS
|
||||||
|
|
||||||
|
Licensed Work: Any file explicitly marked as “Enterprise Edition (EE)” or “governed by the Business Source License” residing in enterprise_editions modules/folders.
|
||||||
|
|
||||||
|
Additional Use Grant:
|
||||||
|
You may use, modify, and distribute the Licensed Work for non-production purposes only, such as testing, development, or evaluation.
|
||||||
|
|
||||||
|
Production use of the Licensed Work requires a commercial license agreement with Meilisearch. Contact bonjour@meilisearch.com for licensing.
|
||||||
|
|
||||||
|
Change License: MIT
|
||||||
|
|
||||||
|
Change Date: Four years from the date the Licensed Work is published.
|
||||||
|
|
||||||
|
This License does not apply to any code outside of the Licensed Work, which remains under the MIT license.
|
||||||
|
|
||||||
|
For information about alternative licensing arrangements for the Licensed Work,
|
||||||
|
please contact bonjour@meilisearch.com or sales@meilisearch.com.
|
||||||
|
|
||||||
|
Notice
|
||||||
|
|
||||||
|
Business Source License 1.1
|
||||||
|
|
||||||
|
Terms
|
||||||
|
|
||||||
|
The Licensor hereby grants you the right to copy, modify, create derivative
|
||||||
|
works, redistribute, and make non-production use of the Licensed Work. The
|
||||||
|
Licensor may make an Additional Use Grant, above, permitting limited production use.
|
||||||
|
|
||||||
|
Effective on the Change Date, or the fourth anniversary of the first publicly
|
||||||
|
available distribution of a specific version of the Licensed Work under this
|
||||||
|
License, whichever comes first, the Licensor hereby grants you rights under
|
||||||
|
the terms of the Change License, and the rights granted in the paragraph
|
||||||
|
above terminate.
|
||||||
|
|
||||||
|
If your use of the Licensed Work does not comply with the requirements
|
||||||
|
currently in effect as described in this License, you must purchase a
|
||||||
|
commercial license from the Licensor, its affiliated entities, or authorized
|
||||||
|
resellers, or you must refrain from using the Licensed Work.
|
||||||
|
|
||||||
|
All copies of the original and modified Licensed Work, and derivative works
|
||||||
|
of the Licensed Work, are subject to this License. This License applies
|
||||||
|
separately for each version of the Licensed Work and the Change Date may vary
|
||||||
|
for each version of the Licensed Work released by Licensor.
|
||||||
|
|
||||||
|
You must conspicuously display this License on each original or modified copy
|
||||||
|
of the Licensed Work. If you receive the Licensed Work in original or
|
||||||
|
modified form from a third party, the terms and conditions set forth in this
|
||||||
|
License apply to your use of that work.
|
||||||
|
|
||||||
|
Any use of the Licensed Work in violation of this License will automatically
|
||||||
|
terminate your rights under this License for the current and all other
|
||||||
|
versions of the Licensed Work.
|
||||||
|
|
||||||
|
This License does not grant you any right in any trademark or logo of
|
||||||
|
Licensor or its affiliates (provided that you may use a trademark or logo of
|
||||||
|
Licensor as expressly required by this License).
|
||||||
|
|
||||||
|
TO THE EXTENT PERMITTED BY APPLICABLE LAW, THE LICENSED WORK IS PROVIDED ON
|
||||||
|
AN "AS IS" BASIS. LICENSOR HEREBY DISCLAIMS ALL WARRANTIES AND CONDITIONS,
|
||||||
|
EXPRESS OR IMPLIED, INCLUDING (WITHOUT LIMITATION) WARRANTIES OF
|
||||||
|
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT, AND
|
||||||
|
TITLE.
|
||||||
20
README.md
20
README.md
@@ -89,6 +89,26 @@ We also offer a wide range of dedicated guides to all Meilisearch features, such
|
|||||||
|
|
||||||
Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://www.meilisearch.com/docs/learn/core_concepts/documents?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced) and [indexes](https://www.meilisearch.com/docs/learn/core_concepts/indexes?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced).
|
Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://www.meilisearch.com/docs/learn/core_concepts/documents?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced) and [indexes](https://www.meilisearch.com/docs/learn/core_concepts/indexes?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced).
|
||||||
|
|
||||||
|
## 🧾 Editions & Licensing
|
||||||
|
|
||||||
|
Meilisearch is available in two editions:
|
||||||
|
|
||||||
|
### 🧪 Community Edition (CE)
|
||||||
|
|
||||||
|
- Fully open source under the [MIT license](./LICENSE)
|
||||||
|
- Core search engine with fast and relevant full-text, semantic or hybrid search
|
||||||
|
- Free to use for anyone, including commercial usage
|
||||||
|
|
||||||
|
### 🏢 Enterprise Edition (EE)
|
||||||
|
|
||||||
|
- Includes advanced features such as:
|
||||||
|
- Sharding
|
||||||
|
- Governed by a [commercial license](./LICENSE-EE) or the [Business Source License 1.1](https://mariadb.com/bsl11)
|
||||||
|
- Not allowed in production without a commercial agreement with Meilisearch.
|
||||||
|
- You may use, modify, and distribute the Licensed Work for non-production purposes only, such as testing, development, or evaluation.
|
||||||
|
|
||||||
|
Want access to Enterprise features? → Contact us at [sales@meilisearch.com](maito:sales@meilisearch.com).
|
||||||
|
|
||||||
## 📊 Telemetry
|
## 📊 Telemetry
|
||||||
|
|
||||||
Meilisearch collects **anonymized** user data to help us improve our product. You can [deactivate this](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=telemetry#how-to-disable-data-collection) whenever you want.
|
Meilisearch collects **anonymized** user data to help us improve our product. You can [deactivate this](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=telemetry#how-to-disable-data-collection) whenever you want.
|
||||||
|
|||||||
@@ -154,6 +154,7 @@ fn indexing_songs_default(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -221,6 +222,7 @@ fn reindexing_songs_default(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -266,6 +268,7 @@ fn reindexing_songs_default(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -335,6 +338,7 @@ fn deleting_songs_in_batches_default(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -412,6 +416,7 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -457,6 +462,7 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -498,6 +504,7 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -566,6 +573,7 @@ fn indexing_songs_without_faceted_numbers(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -633,6 +641,7 @@ fn indexing_songs_without_faceted_fields(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -700,6 +709,7 @@ fn indexing_wiki(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -766,6 +776,7 @@ fn reindexing_wiki(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -811,6 +822,7 @@ fn reindexing_wiki(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -879,6 +891,7 @@ fn deleting_wiki_in_batches_default(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -956,6 +969,7 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -1002,6 +1016,7 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -1044,6 +1059,7 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -1111,6 +1127,7 @@ fn indexing_movies_default(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -1177,6 +1194,7 @@ fn reindexing_movies_default(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -1222,6 +1240,7 @@ fn reindexing_movies_default(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -1290,6 +1309,7 @@ fn deleting_movies_in_batches_default(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -1404,6 +1424,7 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -1449,6 +1470,7 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -1490,6 +1512,7 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -1580,6 +1603,7 @@ fn indexing_nested_movies_default(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -1671,6 +1695,7 @@ fn deleting_nested_movies_in_batches_default(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -1754,6 +1779,7 @@ fn indexing_nested_movies_without_faceted_fields(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -1821,6 +1847,7 @@ fn indexing_geo(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -1887,6 +1914,7 @@ fn reindexing_geo(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -1932,6 +1960,7 @@ fn reindexing_geo(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@@ -2000,6 +2029,7 @@ fn deleting_geo_in_batches_default(c: &mut Criterion) {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|||||||
@@ -123,6 +123,7 @@ pub fn base_setup(conf: &Conf) -> Index {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ use meilisearch_types::keys::Key;
|
|||||||
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
||||||
use meilisearch_types::settings::Unchecked;
|
use meilisearch_types::settings::Unchecked;
|
||||||
use meilisearch_types::tasks::{
|
use meilisearch_types::tasks::{
|
||||||
Details, ExportIndexSettings, IndexSwap, KindWithContent, Status, Task, TaskId,
|
Details, ExportIndexSettings, IndexSwap, KindWithContent, Status, Task, TaskId, TaskNetwork,
|
||||||
};
|
};
|
||||||
use meilisearch_types::InstanceUid;
|
use meilisearch_types::InstanceUid;
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
@@ -94,6 +94,8 @@ pub struct TaskDump {
|
|||||||
default
|
default
|
||||||
)]
|
)]
|
||||||
pub finished_at: Option<OffsetDateTime>,
|
pub finished_at: Option<OffsetDateTime>,
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub network: Option<TaskNetwork>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// A `Kind` specific version made for the dump. If modified you may break the dump.
|
// A `Kind` specific version made for the dump. If modified you may break the dump.
|
||||||
@@ -129,6 +131,7 @@ pub enum KindDump {
|
|||||||
},
|
},
|
||||||
IndexUpdate {
|
IndexUpdate {
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
|
uid: Option<String>,
|
||||||
},
|
},
|
||||||
IndexSwap {
|
IndexSwap {
|
||||||
swaps: Vec<IndexSwap>,
|
swaps: Vec<IndexSwap>,
|
||||||
@@ -171,6 +174,7 @@ impl From<Task> for TaskDump {
|
|||||||
enqueued_at: task.enqueued_at,
|
enqueued_at: task.enqueued_at,
|
||||||
started_at: task.started_at,
|
started_at: task.started_at,
|
||||||
finished_at: task.finished_at,
|
finished_at: task.finished_at,
|
||||||
|
network: task.network,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -210,8 +214,8 @@ impl From<KindWithContent> for KindDump {
|
|||||||
KindWithContent::IndexCreation { primary_key, .. } => {
|
KindWithContent::IndexCreation { primary_key, .. } => {
|
||||||
KindDump::IndexCreation { primary_key }
|
KindDump::IndexCreation { primary_key }
|
||||||
}
|
}
|
||||||
KindWithContent::IndexUpdate { primary_key, .. } => {
|
KindWithContent::IndexUpdate { primary_key, new_index_uid: uid, .. } => {
|
||||||
KindDump::IndexUpdate { primary_key }
|
KindDump::IndexUpdate { primary_key, uid }
|
||||||
}
|
}
|
||||||
KindWithContent::IndexSwap { swaps } => KindDump::IndexSwap { swaps },
|
KindWithContent::IndexSwap { swaps } => KindDump::IndexSwap { swaps },
|
||||||
KindWithContent::TaskCancelation { query, tasks } => {
|
KindWithContent::TaskCancelation { query, tasks } => {
|
||||||
@@ -249,8 +253,9 @@ pub(crate) mod test {
|
|||||||
use big_s::S;
|
use big_s::S;
|
||||||
use maplit::{btreemap, btreeset};
|
use maplit::{btreemap, btreeset};
|
||||||
use meilisearch_types::batches::{Batch, BatchEnqueuedAt, BatchStats};
|
use meilisearch_types::batches::{Batch, BatchEnqueuedAt, BatchStats};
|
||||||
|
use meilisearch_types::enterprise_edition::network::{Network, Remote};
|
||||||
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
||||||
use meilisearch_types::features::{Network, Remote, RuntimeTogglableFeatures};
|
use meilisearch_types::features::RuntimeTogglableFeatures;
|
||||||
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||||
use meilisearch_types::keys::{Action, Key};
|
use meilisearch_types::keys::{Action, Key};
|
||||||
use meilisearch_types::milli::update::Setting;
|
use meilisearch_types::milli::update::Setting;
|
||||||
@@ -326,6 +331,7 @@ pub(crate) mod test {
|
|||||||
facet_search: Setting::NotSet,
|
facet_search: Setting::NotSet,
|
||||||
prefix_search: Setting::NotSet,
|
prefix_search: Setting::NotSet,
|
||||||
chat: Setting::NotSet,
|
chat: Setting::NotSet,
|
||||||
|
vector_store: Setting::NotSet,
|
||||||
_kind: std::marker::PhantomData,
|
_kind: std::marker::PhantomData,
|
||||||
};
|
};
|
||||||
settings.check()
|
settings.check()
|
||||||
@@ -383,6 +389,7 @@ pub(crate) mod test {
|
|||||||
enqueued_at: datetime!(2022-11-11 0:00 UTC),
|
enqueued_at: datetime!(2022-11-11 0:00 UTC),
|
||||||
started_at: Some(datetime!(2022-11-20 0:00 UTC)),
|
started_at: Some(datetime!(2022-11-20 0:00 UTC)),
|
||||||
finished_at: Some(datetime!(2022-11-21 0:00 UTC)),
|
finished_at: Some(datetime!(2022-11-21 0:00 UTC)),
|
||||||
|
network: None,
|
||||||
},
|
},
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
@@ -407,6 +414,7 @@ pub(crate) mod test {
|
|||||||
enqueued_at: datetime!(2022-11-11 0:00 UTC),
|
enqueued_at: datetime!(2022-11-11 0:00 UTC),
|
||||||
started_at: None,
|
started_at: None,
|
||||||
finished_at: None,
|
finished_at: None,
|
||||||
|
network: None,
|
||||||
},
|
},
|
||||||
Some(vec![
|
Some(vec![
|
||||||
json!({ "id": 4, "race": "leonberg" }).as_object().unwrap().clone(),
|
json!({ "id": 4, "race": "leonberg" }).as_object().unwrap().clone(),
|
||||||
@@ -426,6 +434,7 @@ pub(crate) mod test {
|
|||||||
enqueued_at: datetime!(2022-11-15 0:00 UTC),
|
enqueued_at: datetime!(2022-11-15 0:00 UTC),
|
||||||
started_at: None,
|
started_at: None,
|
||||||
finished_at: None,
|
finished_at: None,
|
||||||
|
network: None,
|
||||||
},
|
},
|
||||||
None,
|
None,
|
||||||
),
|
),
|
||||||
@@ -538,7 +547,8 @@ pub(crate) mod test {
|
|||||||
fn create_test_network() -> Network {
|
fn create_test_network() -> Network {
|
||||||
Network {
|
Network {
|
||||||
local: Some("myself".to_string()),
|
local: Some("myself".to_string()),
|
||||||
remotes: maplit::btreemap! {"other".to_string() => Remote { url: "http://test".to_string(), search_api_key: Some("apiKey".to_string()) }},
|
remotes: maplit::btreemap! {"other".to_string() => Remote { url: "http://test".to_string(), search_api_key: Some("apiKey".to_string()), write_api_key: Some("docApiKey".to_string()) }},
|
||||||
|
sharding: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -97,6 +97,7 @@ impl CompatV2ToV3 {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
pub enum CompatIndexV2ToV3 {
|
pub enum CompatIndexV2ToV3 {
|
||||||
V2(v2::V2IndexReader),
|
V2(v2::V2IndexReader),
|
||||||
Compat(Box<CompatIndexV1ToV2>),
|
Compat(Box<CompatIndexV1ToV2>),
|
||||||
|
|||||||
@@ -85,7 +85,7 @@ impl CompatV5ToV6 {
|
|||||||
v6::Kind::IndexCreation { primary_key }
|
v6::Kind::IndexCreation { primary_key }
|
||||||
}
|
}
|
||||||
v5::tasks::TaskContent::IndexUpdate { primary_key, .. } => {
|
v5::tasks::TaskContent::IndexUpdate { primary_key, .. } => {
|
||||||
v6::Kind::IndexUpdate { primary_key }
|
v6::Kind::IndexUpdate { primary_key, uid: None }
|
||||||
}
|
}
|
||||||
v5::tasks::TaskContent::IndexDeletion { .. } => v6::Kind::IndexDeletion,
|
v5::tasks::TaskContent::IndexDeletion { .. } => v6::Kind::IndexDeletion,
|
||||||
v5::tasks::TaskContent::DocumentAddition {
|
v5::tasks::TaskContent::DocumentAddition {
|
||||||
@@ -140,9 +140,11 @@ impl CompatV5ToV6 {
|
|||||||
v5::Details::Settings { settings } => {
|
v5::Details::Settings { settings } => {
|
||||||
v6::Details::SettingsUpdate { settings: Box::new(settings.into()) }
|
v6::Details::SettingsUpdate { settings: Box::new(settings.into()) }
|
||||||
}
|
}
|
||||||
v5::Details::IndexInfo { primary_key } => {
|
v5::Details::IndexInfo { primary_key } => v6::Details::IndexInfo {
|
||||||
v6::Details::IndexInfo { primary_key }
|
primary_key,
|
||||||
}
|
new_index_uid: None,
|
||||||
|
old_index_uid: None,
|
||||||
|
},
|
||||||
v5::Details::DocumentDeletion {
|
v5::Details::DocumentDeletion {
|
||||||
received_document_ids,
|
received_document_ids,
|
||||||
deleted_documents,
|
deleted_documents,
|
||||||
@@ -161,6 +163,7 @@ impl CompatV5ToV6 {
|
|||||||
enqueued_at: task_view.enqueued_at,
|
enqueued_at: task_view.enqueued_at,
|
||||||
started_at: task_view.started_at,
|
started_at: task_view.started_at,
|
||||||
finished_at: task_view.finished_at,
|
finished_at: task_view.finished_at,
|
||||||
|
network: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
(task, content_file)
|
(task, content_file)
|
||||||
@@ -418,6 +421,7 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
|||||||
facet_search: v6::Setting::NotSet,
|
facet_search: v6::Setting::NotSet,
|
||||||
prefix_search: v6::Setting::NotSet,
|
prefix_search: v6::Setting::NotSet,
|
||||||
chat: v6::Setting::NotSet,
|
chat: v6::Setting::NotSet,
|
||||||
|
vector_store: v6::Setting::NotSet,
|
||||||
_kind: std::marker::PhantomData,
|
_kind: std::marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ use std::io::{BufRead, BufReader, ErrorKind};
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
pub use meilisearch_types::milli;
|
pub use meilisearch_types::milli;
|
||||||
use meilisearch_types::milli::vector::hf::OverridePooling;
|
use meilisearch_types::milli::vector::embedder::hf::OverridePooling;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use tracing::debug;
|
use tracing::debug;
|
||||||
@@ -24,7 +24,7 @@ pub type Batch = meilisearch_types::batches::Batch;
|
|||||||
pub type Key = meilisearch_types::keys::Key;
|
pub type Key = meilisearch_types::keys::Key;
|
||||||
pub type ChatCompletionSettings = meilisearch_types::features::ChatCompletionSettings;
|
pub type ChatCompletionSettings = meilisearch_types::features::ChatCompletionSettings;
|
||||||
pub type RuntimeTogglableFeatures = meilisearch_types::features::RuntimeTogglableFeatures;
|
pub type RuntimeTogglableFeatures = meilisearch_types::features::RuntimeTogglableFeatures;
|
||||||
pub type Network = meilisearch_types::features::Network;
|
pub type Network = meilisearch_types::enterprise_edition::network::Network;
|
||||||
pub type Webhooks = meilisearch_types::webhooks::WebhooksDumpView;
|
pub type Webhooks = meilisearch_types::webhooks::WebhooksDumpView;
|
||||||
|
|
||||||
// ===== Other types to clarify the code of the compat module
|
// ===== Other types to clarify the code of the compat module
|
||||||
|
|||||||
@@ -5,7 +5,8 @@ use std::path::PathBuf;
|
|||||||
use flate2::write::GzEncoder;
|
use flate2::write::GzEncoder;
|
||||||
use flate2::Compression;
|
use flate2::Compression;
|
||||||
use meilisearch_types::batches::Batch;
|
use meilisearch_types::batches::Batch;
|
||||||
use meilisearch_types::features::{ChatCompletionSettings, Network, RuntimeTogglableFeatures};
|
use meilisearch_types::enterprise_edition::network::Network;
|
||||||
|
use meilisearch_types::features::{ChatCompletionSettings, RuntimeTogglableFeatures};
|
||||||
use meilisearch_types::keys::Key;
|
use meilisearch_types::keys::Key;
|
||||||
use meilisearch_types::settings::{Checked, Settings};
|
use meilisearch_types::settings::{Checked, Settings};
|
||||||
use meilisearch_types::webhooks::WebhooksDumpView;
|
use meilisearch_types::webhooks::WebhooksDumpView;
|
||||||
|
|||||||
@@ -148,11 +148,10 @@ impl File {
|
|||||||
Ok(Self { path: PathBuf::new(), file: None })
|
Ok(Self { path: PathBuf::new(), file: None })
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn persist(self) -> Result<()> {
|
pub fn persist(self) -> Result<Option<StdFile>> {
|
||||||
if let Some(file) = self.file {
|
let Some(file) = self.file else { return Ok(None) };
|
||||||
file.persist(&self.path)?;
|
|
||||||
}
|
Ok(Some(file.persist(&self.path)?))
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -7,23 +7,14 @@
|
|||||||
|
|
||||||
use nom::branch::alt;
|
use nom::branch::alt;
|
||||||
use nom::bytes::complete::tag;
|
use nom::bytes::complete::tag;
|
||||||
use nom::character::complete::char;
|
use nom::character::complete::{char, multispace0, multispace1};
|
||||||
use nom::character::complete::multispace0;
|
use nom::combinator::{cut, map, value};
|
||||||
use nom::character::complete::multispace1;
|
use nom::sequence::{preceded, terminated, tuple};
|
||||||
use nom::combinator::cut;
|
|
||||||
use nom::combinator::map;
|
|
||||||
use nom::combinator::value;
|
|
||||||
use nom::sequence::preceded;
|
|
||||||
use nom::sequence::{terminated, tuple};
|
|
||||||
use Condition::*;
|
use Condition::*;
|
||||||
|
|
||||||
use crate::error::IResultExt;
|
use crate::error::IResultExt;
|
||||||
use crate::value::parse_vector_value;
|
use crate::value::{parse_vector_value, parse_vector_value_cut};
|
||||||
use crate::value::parse_vector_value_cut;
|
use crate::{parse_value, Error, ErrorKind, FilterCondition, IResult, Span, Token, VectorFilter};
|
||||||
use crate::Error;
|
|
||||||
use crate::ErrorKind;
|
|
||||||
use crate::VectorFilter;
|
|
||||||
use crate::{parse_value, FilterCondition, IResult, Span, Token};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum Condition<'a> {
|
pub enum Condition<'a> {
|
||||||
@@ -124,7 +115,7 @@ pub fn parse_not_exists(input: Span) -> IResult<FilterCondition> {
|
|||||||
Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Exists }))))
|
Ok((input, FilterCondition::Not(Box::new(FilterCondition::Condition { fid: key, op: Exists }))))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_vectors(input: Span) -> IResult<(Token, Option<Token>, VectorFilter<'_>)> {
|
fn parse_vectors(input: Span) -> IResult<(Token, Option<Token>, VectorFilter)> {
|
||||||
let (input, _) = multispace0(input)?;
|
let (input, _) = multispace0(input)?;
|
||||||
let (input, fid) = tag("_vectors")(input)?;
|
let (input, fid) = tag("_vectors")(input)?;
|
||||||
|
|
||||||
|
|||||||
@@ -75,7 +75,11 @@ pub enum ExpectedValueKind {
|
|||||||
pub enum ErrorKind<'a> {
|
pub enum ErrorKind<'a> {
|
||||||
ReservedGeo(&'a str),
|
ReservedGeo(&'a str),
|
||||||
GeoRadius,
|
GeoRadius,
|
||||||
|
GeoRadiusArgumentCount(usize),
|
||||||
GeoBoundingBox,
|
GeoBoundingBox,
|
||||||
|
GeoPolygon,
|
||||||
|
GeoPolygonNotEnoughPoints(usize),
|
||||||
|
GeoCoordinatesNotPair(usize),
|
||||||
MisusedGeoRadius,
|
MisusedGeoRadius,
|
||||||
MisusedGeoBoundingBox,
|
MisusedGeoBoundingBox,
|
||||||
VectorFilterLeftover,
|
VectorFilterLeftover,
|
||||||
@@ -189,7 +193,7 @@ impl Display for Error<'_> {
|
|||||||
}
|
}
|
||||||
ErrorKind::InvalidPrimary => {
|
ErrorKind::InvalidPrimary => {
|
||||||
let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) };
|
let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) };
|
||||||
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` {}", text)?
|
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` {text}")?
|
||||||
}
|
}
|
||||||
ErrorKind::InvalidEscapedNumber => {
|
ErrorKind::InvalidEscapedNumber => {
|
||||||
writeln!(f, "Found an invalid escaped sequence number: `{}`.", escaped_input)?
|
writeln!(f, "Found an invalid escaped sequence number: `{}`.", escaped_input)?
|
||||||
@@ -198,11 +202,23 @@ impl Display for Error<'_> {
|
|||||||
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
|
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
|
||||||
}
|
}
|
||||||
ErrorKind::GeoRadius => {
|
ErrorKind::GeoRadius => {
|
||||||
writeln!(f, "The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.")?
|
writeln!(f, "The `_geoRadius` filter must be in the form: `_geoRadius(latitude, longitude, radius, optionalResolution)`.")?
|
||||||
|
}
|
||||||
|
ErrorKind::GeoRadiusArgumentCount(count) => {
|
||||||
|
writeln!(f, "Was expecting 3 or 4 arguments for `_geoRadius`, but instead found {count}.")?
|
||||||
}
|
}
|
||||||
ErrorKind::GeoBoundingBox => {
|
ErrorKind::GeoBoundingBox => {
|
||||||
writeln!(f, "The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.")?
|
writeln!(f, "The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.")?
|
||||||
}
|
}
|
||||||
|
ErrorKind::GeoPolygon => {
|
||||||
|
writeln!(f, "The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.")?
|
||||||
|
}
|
||||||
|
ErrorKind::GeoPolygonNotEnoughPoints(n) => {
|
||||||
|
writeln!(f, "The `_geoPolygon` filter expects at least 3 points but only {n} were specified")?;
|
||||||
|
}
|
||||||
|
ErrorKind::GeoCoordinatesNotPair(number) => {
|
||||||
|
writeln!(f, "Was expecting 2 coordinates but instead found {number}.")?
|
||||||
|
}
|
||||||
ErrorKind::ReservedGeo(name) => {
|
ErrorKind::ReservedGeo(name) => {
|
||||||
writeln!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.", name.escape_debug())?
|
writeln!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.", name.escape_debug())?
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -19,6 +19,7 @@
|
|||||||
//! word = (alphanumeric | _ | - | .)+
|
//! word = (alphanumeric | _ | - | .)+
|
||||||
//! geoRadius = "_geoRadius(" WS* float WS* "," WS* float WS* "," float WS* ")"
|
//! geoRadius = "_geoRadius(" WS* float WS* "," WS* float WS* "," float WS* ")"
|
||||||
//! geoBoundingBox = "_geoBoundingBox([" WS * float WS* "," WS* float WS* "], [" WS* float WS* "," WS* float WS* "]")
|
//! geoBoundingBox = "_geoBoundingBox([" WS * float WS* "," WS* float WS* "], [" WS* float WS* "," WS* float WS* "]")
|
||||||
|
//! geoPolygon = "_geoPolygon([[" WS* float WS* "," WS* float WS* "],+])"
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! Other BNF grammar used to handle some specific errors:
|
//! Other BNF grammar used to handle some specific errors:
|
||||||
@@ -116,7 +117,7 @@ impl<'a> Token<'a> {
|
|||||||
self.span
|
self.span
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_finite_float(&self) -> Result<f64, Error> {
|
pub fn parse_finite_float(&self) -> Result<f64, Error<'a>> {
|
||||||
let value: f64 = self.value().parse().map_err(|e| self.as_external_error(e))?;
|
let value: f64 = self.value().parse().map_err(|e| self.as_external_error(e))?;
|
||||||
if value.is_finite() {
|
if value.is_finite() {
|
||||||
Ok(value)
|
Ok(value)
|
||||||
@@ -156,8 +157,9 @@ pub enum FilterCondition<'a> {
|
|||||||
Or(Vec<Self>),
|
Or(Vec<Self>),
|
||||||
And(Vec<Self>),
|
And(Vec<Self>),
|
||||||
VectorExists { fid: Token<'a>, embedder: Option<Token<'a>>, filter: VectorFilter<'a> },
|
VectorExists { fid: Token<'a>, embedder: Option<Token<'a>>, filter: VectorFilter<'a> },
|
||||||
GeoLowerThan { point: [Token<'a>; 2], radius: Token<'a> },
|
GeoLowerThan { point: [Token<'a>; 2], radius: Token<'a>, resolution: Option<Token<'a>> },
|
||||||
GeoBoundingBox { top_right_point: [Token<'a>; 2], bottom_left_point: [Token<'a>; 2] },
|
GeoBoundingBox { top_right_point: [Token<'a>; 2], bottom_left_point: [Token<'a>; 2] },
|
||||||
|
GeoPolygon { points: Vec<[Token<'a>; 2]> },
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum TraversedElement<'a> {
|
pub enum TraversedElement<'a> {
|
||||||
@@ -166,7 +168,7 @@ pub enum TraversedElement<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> FilterCondition<'a> {
|
impl<'a> FilterCondition<'a> {
|
||||||
pub fn use_contains_operator(&self) -> Option<&Token> {
|
pub fn use_contains_operator(&self) -> Option<&Token<'a>> {
|
||||||
match self {
|
match self {
|
||||||
FilterCondition::Condition { fid: _, op } => match op {
|
FilterCondition::Condition { fid: _, op } => match op {
|
||||||
Condition::GreaterThan(_)
|
Condition::GreaterThan(_)
|
||||||
@@ -189,11 +191,12 @@ impl<'a> FilterCondition<'a> {
|
|||||||
FilterCondition::VectorExists { .. }
|
FilterCondition::VectorExists { .. }
|
||||||
| FilterCondition::GeoLowerThan { .. }
|
| FilterCondition::GeoLowerThan { .. }
|
||||||
| FilterCondition::GeoBoundingBox { .. }
|
| FilterCondition::GeoBoundingBox { .. }
|
||||||
|
| FilterCondition::GeoPolygon { .. }
|
||||||
| FilterCondition::In { .. } => None,
|
| FilterCondition::In { .. } => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn use_vector_filter(&self) -> Option<&Token> {
|
pub fn use_vector_filter(&self) -> Option<&Token<'a>> {
|
||||||
match self {
|
match self {
|
||||||
FilterCondition::Condition { .. } => None,
|
FilterCondition::Condition { .. } => None,
|
||||||
FilterCondition::Not(this) => this.use_vector_filter(),
|
FilterCondition::Not(this) => this.use_vector_filter(),
|
||||||
@@ -202,12 +205,13 @@ impl<'a> FilterCondition<'a> {
|
|||||||
}
|
}
|
||||||
FilterCondition::GeoLowerThan { .. }
|
FilterCondition::GeoLowerThan { .. }
|
||||||
| FilterCondition::GeoBoundingBox { .. }
|
| FilterCondition::GeoBoundingBox { .. }
|
||||||
|
| FilterCondition::GeoPolygon { .. }
|
||||||
| FilterCondition::In { .. } => None,
|
| FilterCondition::In { .. } => None,
|
||||||
FilterCondition::VectorExists { fid, .. } => Some(fid),
|
FilterCondition::VectorExists { fid, .. } => Some(fid),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fids(&self, depth: usize) -> Box<dyn Iterator<Item = &Token> + '_> {
|
pub fn fids(&self, depth: usize) -> Box<dyn Iterator<Item = &Token<'a>> + '_> {
|
||||||
if depth == 0 {
|
if depth == 0 {
|
||||||
return Box::new(std::iter::empty());
|
return Box::new(std::iter::empty());
|
||||||
}
|
}
|
||||||
@@ -228,7 +232,7 @@ impl<'a> FilterCondition<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the first token found at the specified depth, `None` if no token at this depth.
|
/// Returns the first token found at the specified depth, `None` if no token at this depth.
|
||||||
pub fn token_at_depth(&self, depth: usize) -> Option<&Token> {
|
pub fn token_at_depth(&self, depth: usize) -> Option<&Token<'a>> {
|
||||||
match self {
|
match self {
|
||||||
FilterCondition::Condition { fid, .. } if depth == 0 => Some(fid),
|
FilterCondition::Condition { fid, .. } if depth == 0 => Some(fid),
|
||||||
FilterCondition::Or(subfilters) => {
|
FilterCondition::Or(subfilters) => {
|
||||||
@@ -396,23 +400,27 @@ fn parse_not(input: Span, depth: usize) -> IResult<FilterCondition> {
|
|||||||
/// If we parse `_geoRadius` we MUST parse the rest of the expression.
|
/// If we parse `_geoRadius` we MUST parse the rest of the expression.
|
||||||
fn parse_geo_radius(input: Span) -> IResult<FilterCondition> {
|
fn parse_geo_radius(input: Span) -> IResult<FilterCondition> {
|
||||||
// we want to allow space BEFORE the _geoRadius but not after
|
// we want to allow space BEFORE the _geoRadius but not after
|
||||||
let parsed = preceded(
|
|
||||||
tuple((multispace0, word_exact("_geoRadius"))),
|
let (input, _) = tuple((multispace0, word_exact("_geoRadius")))(input)?;
|
||||||
// if we were able to parse `_geoRadius` and can't parse the rest of the input we return a failure
|
|
||||||
cut(delimited(char('('), separated_list1(tag(","), ws(recognize_float)), char(')'))),
|
// if we were able to parse `_geoRadius` and can't parse the rest of the input we return a failure
|
||||||
)(input)
|
|
||||||
.map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::GeoRadius)));
|
let parsed =
|
||||||
|
delimited(char('('), separated_list1(tag(","), ws(recognize_float)), char(')'))(input)
|
||||||
|
.map_cut(ErrorKind::GeoRadius);
|
||||||
|
|
||||||
let (input, args) = parsed?;
|
let (input, args) = parsed?;
|
||||||
|
|
||||||
if args.len() != 3 {
|
if !(3..=4).contains(&args.len()) {
|
||||||
return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::GeoRadius)));
|
return Err(Error::failure_from_kind(input, ErrorKind::GeoRadiusArgumentCount(args.len())));
|
||||||
}
|
}
|
||||||
|
|
||||||
let res = FilterCondition::GeoLowerThan {
|
let res = FilterCondition::GeoLowerThan {
|
||||||
point: [args[0].into(), args[1].into()],
|
point: [args[0].into(), args[1].into()],
|
||||||
radius: args[2].into(),
|
radius: args[2].into(),
|
||||||
|
resolution: args.get(3).cloned().map(Token::from),
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok((input, res))
|
Ok((input, res))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -420,26 +428,33 @@ fn parse_geo_radius(input: Span) -> IResult<FilterCondition> {
|
|||||||
/// If we parse `_geoBoundingBox` we MUST parse the rest of the expression.
|
/// If we parse `_geoBoundingBox` we MUST parse the rest of the expression.
|
||||||
fn parse_geo_bounding_box(input: Span) -> IResult<FilterCondition> {
|
fn parse_geo_bounding_box(input: Span) -> IResult<FilterCondition> {
|
||||||
// we want to allow space BEFORE the _geoBoundingBox but not after
|
// we want to allow space BEFORE the _geoBoundingBox but not after
|
||||||
let parsed = preceded(
|
|
||||||
tuple((multispace0, word_exact("_geoBoundingBox"))),
|
let (input, _) = tuple((multispace0, word_exact("_geoBoundingBox")))(input)?;
|
||||||
// if we were able to parse `_geoBoundingBox` and can't parse the rest of the input we return a failure
|
|
||||||
cut(delimited(
|
// if we were able to parse `_geoBoundingBox` and can't parse the rest of the input we return a failure
|
||||||
char('('),
|
|
||||||
separated_list1(
|
let (input, args) = delimited(
|
||||||
tag(","),
|
char('('),
|
||||||
ws(delimited(char('['), separated_list1(tag(","), ws(recognize_float)), char(']'))),
|
separated_list1(
|
||||||
),
|
tag(","),
|
||||||
char(')'),
|
ws(delimited(char('['), separated_list1(tag(","), ws(recognize_float)), char(']'))),
|
||||||
)),
|
),
|
||||||
|
char(')'),
|
||||||
)(input)
|
)(input)
|
||||||
.map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::GeoBoundingBox)));
|
.map_cut(ErrorKind::GeoBoundingBox)?;
|
||||||
|
|
||||||
let (input, args) = parsed?;
|
if args.len() != 2 {
|
||||||
|
|
||||||
if args.len() != 2 || args[0].len() != 2 || args[1].len() != 2 {
|
|
||||||
return Err(Error::failure_from_kind(input, ErrorKind::GeoBoundingBox));
|
return Err(Error::failure_from_kind(input, ErrorKind::GeoBoundingBox));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(offending) = args.iter().find(|a| a.len() != 2) {
|
||||||
|
let context = offending.first().unwrap_or(&input);
|
||||||
|
return Err(Error::failure_from_kind(
|
||||||
|
*context,
|
||||||
|
ErrorKind::GeoCoordinatesNotPair(offending.len()),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
let res = FilterCondition::GeoBoundingBox {
|
let res = FilterCondition::GeoBoundingBox {
|
||||||
top_right_point: [args[0][0].into(), args[0][1].into()],
|
top_right_point: [args[0][0].into(), args[0][1].into()],
|
||||||
bottom_left_point: [args[1][0].into(), args[1][1].into()],
|
bottom_left_point: [args[1][0].into(), args[1][1].into()],
|
||||||
@@ -447,6 +462,47 @@ fn parse_geo_bounding_box(input: Span) -> IResult<FilterCondition> {
|
|||||||
Ok((input, res))
|
Ok((input, res))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// geoPolygon = "_geoPolygon([[" WS* float WS* "," WS* float WS* "],+])"
|
||||||
|
/// If we parse `_geoPolygon` we MUST parse the rest of the expression.
|
||||||
|
fn parse_geo_polygon(input: Span) -> IResult<FilterCondition> {
|
||||||
|
// we want to allow space BEFORE the _geoPolygon but not after
|
||||||
|
|
||||||
|
let (input, _) = tuple((multispace0, word_exact("_geoPolygon")))(input)?;
|
||||||
|
|
||||||
|
// if we were able to parse `_geoPolygon` and can't parse the rest of the input we return a failure
|
||||||
|
|
||||||
|
let (input, args): (_, Vec<Vec<LocatedSpan<_, _>>>) = delimited(
|
||||||
|
char('('),
|
||||||
|
separated_list1(
|
||||||
|
tag(","),
|
||||||
|
ws(delimited(char('['), separated_list1(tag(","), ws(recognize_float)), char(']'))),
|
||||||
|
),
|
||||||
|
preceded(opt(ws(char(','))), char(')')), // Tolerate trailing comma
|
||||||
|
)(input)
|
||||||
|
.map_cut(ErrorKind::GeoPolygon)?;
|
||||||
|
|
||||||
|
if args.len() < 3 {
|
||||||
|
let context = args.last().and_then(|a| a.last()).unwrap_or(&input);
|
||||||
|
return Err(Error::failure_from_kind(
|
||||||
|
*context,
|
||||||
|
ErrorKind::GeoPolygonNotEnoughPoints(args.len()),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(offending) = args.iter().find(|a| a.len() != 2) {
|
||||||
|
let context = offending.first().unwrap_or(&input);
|
||||||
|
return Err(Error::failure_from_kind(
|
||||||
|
*context,
|
||||||
|
ErrorKind::GeoCoordinatesNotPair(offending.len()),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let res = FilterCondition::GeoPolygon {
|
||||||
|
points: args.into_iter().map(|a| [a[0].into(), a[1].into()]).collect(),
|
||||||
|
};
|
||||||
|
Ok((input, res))
|
||||||
|
}
|
||||||
|
|
||||||
/// geoPoint = WS* "_geoPoint(float WS* "," WS* float WS* "," WS* float)
|
/// geoPoint = WS* "_geoPoint(float WS* "," WS* float WS* "," WS* float)
|
||||||
fn parse_geo_point(input: Span) -> IResult<FilterCondition> {
|
fn parse_geo_point(input: Span) -> IResult<FilterCondition> {
|
||||||
// we want to forbid space BEFORE the _geoPoint but not after
|
// we want to forbid space BEFORE the _geoPoint but not after
|
||||||
@@ -516,8 +572,8 @@ fn parse_primary(input: Span, depth: usize) -> IResult<FilterCondition> {
|
|||||||
Error::new_from_kind(input, ErrorKind::MissingClosingDelimiter(c.char()))
|
Error::new_from_kind(input, ErrorKind::MissingClosingDelimiter(c.char()))
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
parse_geo_radius,
|
// Made a random block of functions because we reached the maximum number of elements per alt
|
||||||
parse_geo_bounding_box,
|
alt((parse_geo_radius, parse_geo_bounding_box, parse_geo_polygon)),
|
||||||
parse_in,
|
parse_in,
|
||||||
parse_not_in,
|
parse_not_in,
|
||||||
parse_condition,
|
parse_condition,
|
||||||
@@ -597,9 +653,12 @@ impl std::fmt::Display for FilterCondition<'_> {
|
|||||||
}
|
}
|
||||||
write!(f, " EXISTS")
|
write!(f, " EXISTS")
|
||||||
}
|
}
|
||||||
FilterCondition::GeoLowerThan { point, radius } => {
|
FilterCondition::GeoLowerThan { point, radius, resolution: None } => {
|
||||||
write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius)
|
write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius)
|
||||||
}
|
}
|
||||||
|
FilterCondition::GeoLowerThan { point, radius, resolution: Some(resolution) } => {
|
||||||
|
write!(f, "_geoRadius({}, {}, {}, {})", point[0], point[1], radius, resolution)
|
||||||
|
}
|
||||||
FilterCondition::GeoBoundingBox {
|
FilterCondition::GeoBoundingBox {
|
||||||
top_right_point: top_left_point,
|
top_right_point: top_left_point,
|
||||||
bottom_left_point: bottom_right_point,
|
bottom_left_point: bottom_right_point,
|
||||||
@@ -613,6 +672,13 @@ impl std::fmt::Display for FilterCondition<'_> {
|
|||||||
bottom_right_point[1]
|
bottom_right_point[1]
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
FilterCondition::GeoPolygon { points } => {
|
||||||
|
write!(f, "_geoPolygon([")?;
|
||||||
|
for point in points {
|
||||||
|
write!(f, "[{}, {}], ", point[0], point[1])?;
|
||||||
|
}
|
||||||
|
write!(f, "])")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -651,7 +717,7 @@ pub mod tests {
|
|||||||
/// Create a raw [Token]. You must specify the string that appear BEFORE your element followed by your element
|
/// Create a raw [Token]. You must specify the string that appear BEFORE your element followed by your element
|
||||||
pub fn rtok<'a>(before: &'a str, value: &'a str) -> Token<'a> {
|
pub fn rtok<'a>(before: &'a str, value: &'a str) -> Token<'a> {
|
||||||
// if the string is empty we still need to return 1 for the line number
|
// if the string is empty we still need to return 1 for the line number
|
||||||
let lines = before.is_empty().then_some(1).unwrap_or_else(|| before.lines().count());
|
let lines = if before.is_empty() { 1 } else { before.lines().count() };
|
||||||
let offset = before.chars().count();
|
let offset = before.chars().count();
|
||||||
// the extra field is not checked in the tests so we can set it to nothing
|
// the extra field is not checked in the tests so we can set it to nothing
|
||||||
unsafe { Span::new_from_raw_offset(offset, lines as u32, value, "") }.into()
|
unsafe { Span::new_from_raw_offset(offset, lines as u32, value, "") }.into()
|
||||||
@@ -776,12 +842,17 @@ pub mod tests {
|
|||||||
insta::assert_snapshot!(p("_geoRadius(12, 13, 14)"), @"_geoRadius({12}, {13}, {14})");
|
insta::assert_snapshot!(p("_geoRadius(12, 13, 14)"), @"_geoRadius({12}, {13}, {14})");
|
||||||
insta::assert_snapshot!(p("NOT _geoRadius(12, 13, 14)"), @"NOT (_geoRadius({12}, {13}, {14}))");
|
insta::assert_snapshot!(p("NOT _geoRadius(12, 13, 14)"), @"NOT (_geoRadius({12}, {13}, {14}))");
|
||||||
insta::assert_snapshot!(p("_geoRadius(12,13,14)"), @"_geoRadius({12}, {13}, {14})");
|
insta::assert_snapshot!(p("_geoRadius(12,13,14)"), @"_geoRadius({12}, {13}, {14})");
|
||||||
|
insta::assert_snapshot!(p("_geoRadius(12,13,14,1000)"), @"_geoRadius({12}, {13}, {14}, {1000})");
|
||||||
|
|
||||||
// Test geo bounding box
|
// Test geo bounding box
|
||||||
insta::assert_snapshot!(p("_geoBoundingBox([12, 13], [14, 15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
insta::assert_snapshot!(p("_geoBoundingBox([12, 13], [14, 15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
||||||
insta::assert_snapshot!(p("NOT _geoBoundingBox([12, 13], [14, 15])"), @"NOT (_geoBoundingBox([{12}, {13}], [{14}, {15}]))");
|
insta::assert_snapshot!(p("NOT _geoBoundingBox([12, 13], [14, 15])"), @"NOT (_geoBoundingBox([{12}, {13}], [{14}, {15}]))");
|
||||||
insta::assert_snapshot!(p("_geoBoundingBox([12,13],[14,15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
insta::assert_snapshot!(p("_geoBoundingBox([12,13],[14,15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
||||||
|
|
||||||
|
// Test geo polygon
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon([12, 13], [14, 15], [16, 17])"), @"_geoPolygon([[{12}, {13}], [{14}, {15}], [{16}, {17}], ])");
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon([12, 13], [14, 15], [-1.2,2939.2], [1,1])"), @"_geoPolygon([[{12}, {13}], [{14}, {15}], [{-1.2}, {2939.2}], [{1}, {1}], ])");
|
||||||
|
|
||||||
// Test OR + AND
|
// Test OR + AND
|
||||||
insta::assert_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain'"), @"AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
insta::assert_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain'"), @"AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
||||||
insta::assert_snapshot!(p("channel = ponce OR 'dog race' != 'bernese mountain'"), @"OR[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
insta::assert_snapshot!(p("channel = ponce OR 'dog race' != 'bernese mountain'"), @"OR[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
||||||
@@ -838,50 +909,80 @@ pub mod tests {
|
|||||||
11:12 channel = 🐻 AND followers < 100
|
11:12 channel = 🐻 AND followers < 100
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_snapshot!(p("'OR'"), @r###"
|
insta::assert_snapshot!(p("'OR'"), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `\'OR\'`.
|
||||||
1:5 'OR'
|
1:5 'OR'
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("OR"), @r###"
|
insta::assert_snapshot!(p("OR"), @r###"
|
||||||
Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes.
|
Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes.
|
||||||
1:3 OR
|
1:3 OR
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_snapshot!(p("channel Ponce"), @r###"
|
insta::assert_snapshot!(p("channel Ponce"), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `channel Ponce`.
|
||||||
1:14 channel Ponce
|
1:14 channel Ponce
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("channel = Ponce OR"), @r###"
|
insta::assert_snapshot!(p("channel = Ponce OR"), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` but instead got nothing.
|
||||||
19:19 channel = Ponce OR
|
19:19 channel = Ponce OR
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("_geoRadius"), @r###"
|
insta::assert_snapshot!(p("_geoRadius"), @r"
|
||||||
The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.
|
The `_geoRadius` filter must be in the form: `_geoRadius(latitude, longitude, radius, optionalResolution)`.
|
||||||
1:11 _geoRadius
|
11:11 _geoRadius
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("_geoRadius = 12"), @r###"
|
insta::assert_snapshot!(p("_geoRadius = 12"), @r"
|
||||||
The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.
|
The `_geoRadius` filter must be in the form: `_geoRadius(latitude, longitude, radius, optionalResolution)`.
|
||||||
1:16 _geoRadius = 12
|
11:16 _geoRadius = 12
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("_geoBoundingBox"), @r###"
|
insta::assert_snapshot!(p("_geoBoundingBox"), @r"
|
||||||
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
||||||
1:16 _geoBoundingBox
|
16:16 _geoBoundingBox
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("_geoBoundingBox = 12"), @r###"
|
insta::assert_snapshot!(p("_geoBoundingBox = 12"), @r"
|
||||||
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
||||||
1:21 _geoBoundingBox = 12
|
16:21 _geoBoundingBox = 12
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r###"
|
insta::assert_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r"
|
||||||
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
||||||
1:26 _geoBoundingBox(1.0, 1.0)
|
17:26 _geoBoundingBox(1.0, 1.0)
|
||||||
"###);
|
");
|
||||||
|
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon([1,2,3])"), @r"
|
||||||
|
The `_geoPolygon` filter expects at least 3 points but only 1 were specified
|
||||||
|
18:19 _geoPolygon([1,2,3])
|
||||||
|
");
|
||||||
|
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon(1,2,3)"), @r"
|
||||||
|
The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.
|
||||||
|
13:19 _geoPolygon(1,2,3)
|
||||||
|
");
|
||||||
|
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon([1,2],[1,2],[1,2,3])"), @r"
|
||||||
|
Was expecting 2 coordinates but instead found 3.
|
||||||
|
26:27 _geoPolygon([1,2],[1,2],[1,2,3])
|
||||||
|
");
|
||||||
|
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon([1,2],[1,2,3])"), @r"
|
||||||
|
The `_geoPolygon` filter expects at least 3 points but only 2 were specified
|
||||||
|
24:25 _geoPolygon([1,2],[1,2,3])
|
||||||
|
");
|
||||||
|
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon(1)"), @r"
|
||||||
|
The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.
|
||||||
|
13:15 _geoPolygon(1)
|
||||||
|
");
|
||||||
|
|
||||||
|
insta::assert_snapshot!(p("_geoPolygon([1,2)"), @r"
|
||||||
|
The `_geoPolygon` filter doesn't match the expected format: `_geoPolygon([latitude, longitude], [latitude, longitude])`.
|
||||||
|
17:18 _geoPolygon([1,2)
|
||||||
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("_geoPoint(12, 13, 14)"), @r###"
|
insta::assert_snapshot!(p("_geoPoint(12, 13, 14)"), @r###"
|
||||||
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
||||||
@@ -938,15 +1039,15 @@ pub mod tests {
|
|||||||
34:35 channel = mv OR followers >= 1000)
|
34:35 channel = mv OR followers >= 1000)
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_snapshot!(p("colour NOT EXIST"), @r###"
|
insta::assert_snapshot!(p("colour NOT EXIST"), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `colour NOT EXIST`.
|
||||||
1:17 colour NOT EXIST
|
1:17 colour NOT EXIST
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("subscribers 100 TO1000"), @r###"
|
insta::assert_snapshot!(p("subscribers 100 TO1000"), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `subscribers 100 TO1000`.
|
||||||
1:23 subscribers 100 TO1000
|
1:23 subscribers 100 TO1000
|
||||||
"###);
|
");
|
||||||
|
|
||||||
insta::assert_snapshot!(p("channel = ponce ORdog != 'bernese mountain'"), @r###"
|
insta::assert_snapshot!(p("channel = ponce ORdog != 'bernese mountain'"), @r###"
|
||||||
Found unexpected characters at the end of the filter: `ORdog != \'bernese mountain\'`. You probably forgot an `OR` or an `AND` rule.
|
Found unexpected characters at the end of the filter: `ORdog != \'bernese mountain\'`. You probably forgot an `OR` or an `AND` rule.
|
||||||
@@ -1071,38 +1172,38 @@ pub mod tests {
|
|||||||
5:7 NOT OR EXISTS AND EXISTS NOT EXISTS
|
5:7 NOT OR EXISTS AND EXISTS NOT EXISTS
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_snapshot!(p(r#"value NULL"#), @r###"
|
insta::assert_snapshot!(p(r#"value NULL"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value NULL`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value NULL`.
|
||||||
1:11 value NULL
|
1:11 value NULL
|
||||||
"###);
|
");
|
||||||
insta::assert_snapshot!(p(r#"value NOT NULL"#), @r###"
|
insta::assert_snapshot!(p(r#"value NOT NULL"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value NOT NULL`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value NOT NULL`.
|
||||||
1:15 value NOT NULL
|
1:15 value NOT NULL
|
||||||
"###);
|
");
|
||||||
insta::assert_snapshot!(p(r#"value EMPTY"#), @r###"
|
insta::assert_snapshot!(p(r#"value EMPTY"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value EMPTY`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value EMPTY`.
|
||||||
1:12 value EMPTY
|
1:12 value EMPTY
|
||||||
"###);
|
");
|
||||||
insta::assert_snapshot!(p(r#"value NOT EMPTY"#), @r###"
|
insta::assert_snapshot!(p(r#"value NOT EMPTY"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value NOT EMPTY`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value NOT EMPTY`.
|
||||||
1:16 value NOT EMPTY
|
1:16 value NOT EMPTY
|
||||||
"###);
|
");
|
||||||
insta::assert_snapshot!(p(r#"value IS"#), @r###"
|
insta::assert_snapshot!(p(r#"value IS"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS`.
|
||||||
1:9 value IS
|
1:9 value IS
|
||||||
"###);
|
");
|
||||||
insta::assert_snapshot!(p(r#"value IS NOT"#), @r###"
|
insta::assert_snapshot!(p(r#"value IS NOT"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS NOT`.
|
||||||
1:13 value IS NOT
|
1:13 value IS NOT
|
||||||
"###);
|
");
|
||||||
insta::assert_snapshot!(p(r#"value IS EXISTS"#), @r###"
|
insta::assert_snapshot!(p(r#"value IS EXISTS"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS EXISTS`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS EXISTS`.
|
||||||
1:16 value IS EXISTS
|
1:16 value IS EXISTS
|
||||||
"###);
|
");
|
||||||
insta::assert_snapshot!(p(r#"value IS NOT EXISTS"#), @r###"
|
insta::assert_snapshot!(p(r#"value IS NOT EXISTS"#), @r"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT EXISTS`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, `_geoBoundingBox` or `_geoPolygon` at `value IS NOT EXISTS`.
|
||||||
1:20 value IS NOT EXISTS
|
1:20 value IS NOT EXISTS
|
||||||
"###);
|
");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@@ -129,6 +129,7 @@ fn main() {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| false,
|
&|| false,
|
||||||
Progress::default(),
|
Progress::default(),
|
||||||
|
None,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
#![allow(clippy::result_large_err)]
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
@@ -147,6 +149,7 @@ impl<'a> Dump<'a> {
|
|||||||
canceled_by: task.canceled_by,
|
canceled_by: task.canceled_by,
|
||||||
details: task.details,
|
details: task.details,
|
||||||
status: task.status,
|
status: task.status,
|
||||||
|
network: task.network,
|
||||||
kind: match task.kind {
|
kind: match task.kind {
|
||||||
KindDump::DocumentImport {
|
KindDump::DocumentImport {
|
||||||
primary_key,
|
primary_key,
|
||||||
@@ -197,9 +200,10 @@ impl<'a> Dump<'a> {
|
|||||||
index_uid: task.index_uid.ok_or(Error::CorruptedDump)?,
|
index_uid: task.index_uid.ok_or(Error::CorruptedDump)?,
|
||||||
primary_key,
|
primary_key,
|
||||||
},
|
},
|
||||||
KindDump::IndexUpdate { primary_key } => KindWithContent::IndexUpdate {
|
KindDump::IndexUpdate { primary_key, uid } => KindWithContent::IndexUpdate {
|
||||||
index_uid: task.index_uid.ok_or(Error::CorruptedDump)?,
|
index_uid: task.index_uid.ok_or(Error::CorruptedDump)?,
|
||||||
primary_key,
|
primary_key,
|
||||||
|
new_index_uid: uid,
|
||||||
},
|
},
|
||||||
KindDump::IndexSwap { swaps } => KindWithContent::IndexSwap { swaps },
|
KindDump::IndexSwap { swaps } => KindWithContent::IndexSwap { swaps },
|
||||||
KindDump::TaskCancelation { query, tasks } => {
|
KindDump::TaskCancelation { query, tasks } => {
|
||||||
|
|||||||
@@ -67,6 +67,8 @@ pub enum Error {
|
|||||||
SwapDuplicateIndexesFound(Vec<String>),
|
SwapDuplicateIndexesFound(Vec<String>),
|
||||||
#[error("Index `{0}` not found.")]
|
#[error("Index `{0}` not found.")]
|
||||||
SwapIndexNotFound(String),
|
SwapIndexNotFound(String),
|
||||||
|
#[error("Cannot rename `{0}` to `{1}` as the index already exists. Hint: You can remove `{1}` first and then do your remove.")]
|
||||||
|
SwapIndexFoundDuringRename(String, String),
|
||||||
#[error("Meilisearch cannot receive write operations because the limit of the task database has been reached. Please delete tasks to continue performing write operations.")]
|
#[error("Meilisearch cannot receive write operations because the limit of the task database has been reached. Please delete tasks to continue performing write operations.")]
|
||||||
NoSpaceLeftInTaskQueue,
|
NoSpaceLeftInTaskQueue,
|
||||||
#[error(
|
#[error(
|
||||||
@@ -74,6 +76,10 @@ pub enum Error {
|
|||||||
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", ")
|
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", ")
|
||||||
)]
|
)]
|
||||||
SwapIndexesNotFound(Vec<String>),
|
SwapIndexesNotFound(Vec<String>),
|
||||||
|
#[error("The following indexes are being renamed but cannot because their new name conflicts with an already existing index: {}. Renaming doesn't overwrite the other index name.",
|
||||||
|
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", ")
|
||||||
|
)]
|
||||||
|
SwapIndexesFoundDuringRename(Vec<String>),
|
||||||
#[error("Corrupted dump.")]
|
#[error("Corrupted dump.")]
|
||||||
CorruptedDump,
|
CorruptedDump,
|
||||||
#[error(
|
#[error(
|
||||||
@@ -203,6 +209,8 @@ impl Error {
|
|||||||
| Error::SwapIndexNotFound(_)
|
| Error::SwapIndexNotFound(_)
|
||||||
| Error::NoSpaceLeftInTaskQueue
|
| Error::NoSpaceLeftInTaskQueue
|
||||||
| Error::SwapIndexesNotFound(_)
|
| Error::SwapIndexesNotFound(_)
|
||||||
|
| Error::SwapIndexFoundDuringRename(_, _)
|
||||||
|
| Error::SwapIndexesFoundDuringRename(_)
|
||||||
| Error::CorruptedDump
|
| Error::CorruptedDump
|
||||||
| Error::InvalidTaskDate { .. }
|
| Error::InvalidTaskDate { .. }
|
||||||
| Error::InvalidTaskUid { .. }
|
| Error::InvalidTaskUid { .. }
|
||||||
@@ -271,6 +279,8 @@ impl ErrorCode for Error {
|
|||||||
Error::SwapDuplicateIndexFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
Error::SwapDuplicateIndexFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
||||||
Error::SwapIndexNotFound(_) => Code::IndexNotFound,
|
Error::SwapIndexNotFound(_) => Code::IndexNotFound,
|
||||||
Error::SwapIndexesNotFound(_) => Code::IndexNotFound,
|
Error::SwapIndexesNotFound(_) => Code::IndexNotFound,
|
||||||
|
Error::SwapIndexFoundDuringRename(_, _) => Code::IndexAlreadyExists,
|
||||||
|
Error::SwapIndexesFoundDuringRename(_) => Code::IndexAlreadyExists,
|
||||||
Error::InvalidTaskDate { field, .. } => (*field).into(),
|
Error::InvalidTaskDate { field, .. } => (*field).into(),
|
||||||
Error::InvalidTaskUid { .. } => Code::InvalidTaskUids,
|
Error::InvalidTaskUid { .. } => Code::InvalidTaskUids,
|
||||||
Error::InvalidBatchUid { .. } => Code::InvalidBatchUids,
|
Error::InvalidBatchUid { .. } => Code::InvalidBatchUids,
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
use std::sync::{Arc, RwLock};
|
use std::sync::{Arc, RwLock};
|
||||||
|
|
||||||
use meilisearch_types::features::{InstanceTogglableFeatures, Network, RuntimeTogglableFeatures};
|
use meilisearch_types::enterprise_edition::network::Network;
|
||||||
|
use meilisearch_types::features::{InstanceTogglableFeatures, RuntimeTogglableFeatures};
|
||||||
use meilisearch_types::heed::types::{SerdeJson, Str};
|
use meilisearch_types::heed::types::{SerdeJson, Str};
|
||||||
use meilisearch_types::heed::{Database, Env, RwTxn, WithoutTls};
|
use meilisearch_types::heed::{Database, Env, RwTxn, WithoutTls};
|
||||||
|
|
||||||
@@ -157,6 +158,19 @@ impl RoFeatures {
|
|||||||
.into())
|
.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn check_vector_store_setting(&self, disabled_action: &'static str) -> Result<()> {
|
||||||
|
if self.runtime.vector_store_setting {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(FeatureNotEnabledError {
|
||||||
|
disabled_action,
|
||||||
|
feature: "vector_store_setting",
|
||||||
|
issue_link: "https://github.com/orgs/meilisearch/discussions/860",
|
||||||
|
}
|
||||||
|
.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FeatureData {
|
impl FeatureData {
|
||||||
|
|||||||
@@ -143,10 +143,10 @@ impl IndexStats {
|
|||||||
///
|
///
|
||||||
/// - rtxn: a RO transaction for the index, obtained from `Index::read_txn()`.
|
/// - rtxn: a RO transaction for the index, obtained from `Index::read_txn()`.
|
||||||
pub fn new(index: &Index, rtxn: &RoTxn) -> milli::Result<Self> {
|
pub fn new(index: &Index, rtxn: &RoTxn) -> milli::Result<Self> {
|
||||||
let arroy_stats = index.arroy_stats(rtxn)?;
|
let vector_store_stats = index.vector_store_stats(rtxn)?;
|
||||||
Ok(IndexStats {
|
Ok(IndexStats {
|
||||||
number_of_embeddings: Some(arroy_stats.number_of_embeddings),
|
number_of_embeddings: Some(vector_store_stats.number_of_embeddings),
|
||||||
number_of_embedded_documents: Some(arroy_stats.documents.len()),
|
number_of_embedded_documents: Some(vector_store_stats.documents.len()),
|
||||||
documents_database_stats: index.documents_stats(rtxn)?.unwrap_or_default(),
|
documents_database_stats: index.documents_stats(rtxn)?.unwrap_or_default(),
|
||||||
number_of_documents: None,
|
number_of_documents: None,
|
||||||
database_size: index.on_disk_size()?,
|
database_size: index.on_disk_size()?,
|
||||||
@@ -526,6 +526,20 @@ impl IndexMapper {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Rename an index.
|
||||||
|
pub fn rename(&self, wtxn: &mut RwTxn, current: &str, new: &str) -> Result<()> {
|
||||||
|
let uuid = self
|
||||||
|
.index_mapping
|
||||||
|
.get(wtxn, current)?
|
||||||
|
.ok_or_else(|| Error::IndexNotFound(current.to_string()))?;
|
||||||
|
if self.index_mapping.get(wtxn, new)?.is_some() {
|
||||||
|
return Err(Error::IndexAlreadyExists(new.to_string()));
|
||||||
|
}
|
||||||
|
self.index_mapping.delete(wtxn, current)?;
|
||||||
|
self.index_mapping.put(wtxn, new, &uuid)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// The stats of an index.
|
/// The stats of an index.
|
||||||
///
|
///
|
||||||
/// If available in the cache, they are directly returned.
|
/// If available in the cache, they are directly returned.
|
||||||
|
|||||||
@@ -230,6 +230,7 @@ pub fn snapshot_task(task: &Task) -> String {
|
|||||||
details,
|
details,
|
||||||
status,
|
status,
|
||||||
kind,
|
kind,
|
||||||
|
network,
|
||||||
} = task;
|
} = task;
|
||||||
snap.push('{');
|
snap.push('{');
|
||||||
snap.push_str(&format!("uid: {uid}, "));
|
snap.push_str(&format!("uid: {uid}, "));
|
||||||
@@ -247,6 +248,9 @@ pub fn snapshot_task(task: &Task) -> String {
|
|||||||
snap.push_str(&format!("details: {}, ", &snapshot_details(details)));
|
snap.push_str(&format!("details: {}, ", &snapshot_details(details)));
|
||||||
}
|
}
|
||||||
snap.push_str(&format!("kind: {kind:?}"));
|
snap.push_str(&format!("kind: {kind:?}"));
|
||||||
|
if let Some(network) = network {
|
||||||
|
snap.push_str(&format!("network: {network:?}, "))
|
||||||
|
}
|
||||||
|
|
||||||
snap.push('}');
|
snap.push('}');
|
||||||
snap
|
snap
|
||||||
@@ -274,8 +278,8 @@ fn snapshot_details(d: &Details) -> String {
|
|||||||
Details::SettingsUpdate { settings } => {
|
Details::SettingsUpdate { settings } => {
|
||||||
format!("{{ settings: {settings:?} }}")
|
format!("{{ settings: {settings:?} }}")
|
||||||
}
|
}
|
||||||
Details::IndexInfo { primary_key } => {
|
Details::IndexInfo { primary_key, new_index_uid, old_index_uid } => {
|
||||||
format!("{{ primary_key: {primary_key:?} }}")
|
format!("{{ primary_key: {primary_key:?}, old_new_uid: {old_index_uid:?}, new_index_uid: {new_index_uid:?} }}")
|
||||||
}
|
}
|
||||||
Details::DocumentDeletion {
|
Details::DocumentDeletion {
|
||||||
provided_ids: received_document_ids,
|
provided_ids: received_document_ids,
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
|
// The main Error type is large and boxing the large variant make the pattern matching fails
|
||||||
|
#![allow(clippy::result_large_err)]
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
This crate defines the index scheduler, which is responsible for:
|
This crate defines the index scheduler, which is responsible for:
|
||||||
1. Keeping references to meilisearch's indexes and mapping them to their
|
1. Keeping references to meilisearch's indexes and mapping them to their
|
||||||
@@ -51,8 +54,9 @@ pub use features::RoFeatures;
|
|||||||
use flate2::bufread::GzEncoder;
|
use flate2::bufread::GzEncoder;
|
||||||
use flate2::Compression;
|
use flate2::Compression;
|
||||||
use meilisearch_types::batches::Batch;
|
use meilisearch_types::batches::Batch;
|
||||||
|
use meilisearch_types::enterprise_edition::network::Network;
|
||||||
use meilisearch_types::features::{
|
use meilisearch_types::features::{
|
||||||
ChatCompletionSettings, InstanceTogglableFeatures, Network, RuntimeTogglableFeatures,
|
ChatCompletionSettings, InstanceTogglableFeatures, RuntimeTogglableFeatures,
|
||||||
};
|
};
|
||||||
use meilisearch_types::heed::byteorder::BE;
|
use meilisearch_types::heed::byteorder::BE;
|
||||||
use meilisearch_types::heed::types::{DecodeIgnore, SerdeJson, Str, I128};
|
use meilisearch_types::heed::types::{DecodeIgnore, SerdeJson, Str, I128};
|
||||||
@@ -64,7 +68,7 @@ use meilisearch_types::milli::vector::{
|
|||||||
};
|
};
|
||||||
use meilisearch_types::milli::{self, Index};
|
use meilisearch_types::milli::{self, Index};
|
||||||
use meilisearch_types::task_view::TaskView;
|
use meilisearch_types::task_view::TaskView;
|
||||||
use meilisearch_types::tasks::{KindWithContent, Task};
|
use meilisearch_types::tasks::{KindWithContent, Task, TaskNetwork};
|
||||||
use meilisearch_types::webhooks::{Webhook, WebhooksDumpView, WebhooksView};
|
use meilisearch_types::webhooks::{Webhook, WebhooksDumpView, WebhooksView};
|
||||||
use milli::vector::db::IndexEmbeddingConfig;
|
use milli::vector::db::IndexEmbeddingConfig;
|
||||||
use processing::ProcessingTasks;
|
use processing::ProcessingTasks;
|
||||||
@@ -343,7 +347,7 @@ impl IndexScheduler {
|
|||||||
Ok(this)
|
Ok(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_txn(&self) -> Result<RoTxn<WithoutTls>> {
|
fn read_txn(&self) -> Result<RoTxn<'_, WithoutTls>> {
|
||||||
self.env.read_txn().map_err(|e| e.into())
|
self.env.read_txn().map_err(|e| e.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -666,6 +670,16 @@ impl IndexScheduler {
|
|||||||
self.queue.get_task_ids_from_authorized_indexes(&rtxn, query, filters, &processing)
|
self.queue.get_task_ids_from_authorized_indexes(&rtxn, query, filters, &processing)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn set_task_network(&self, task_id: TaskId, network: TaskNetwork) -> Result<()> {
|
||||||
|
let mut wtxn = self.env.write_txn()?;
|
||||||
|
let mut task =
|
||||||
|
self.queue.tasks.get_task(&wtxn, task_id)?.ok_or(Error::TaskNotFound(task_id))?;
|
||||||
|
task.network = Some(network);
|
||||||
|
self.queue.tasks.all_tasks.put(&mut wtxn, &task_id, &task)?;
|
||||||
|
wtxn.commit()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Return the batches matching the query from the user's point of view along
|
/// Return the batches matching the query from the user's point of view along
|
||||||
/// with the total number of batches matching the query, ignoring from and limit.
|
/// with the total number of batches matching the query, ignoring from and limit.
|
||||||
///
|
///
|
||||||
@@ -746,7 +760,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
/// Register a new task coming from a dump in the scheduler.
|
/// Register a new task coming from a dump in the scheduler.
|
||||||
/// By taking a mutable ref we're pretty sure no one will ever import a dump while actix is running.
|
/// By taking a mutable ref we're pretty sure no one will ever import a dump while actix is running.
|
||||||
pub fn register_dumped_task(&mut self) -> Result<Dump> {
|
pub fn register_dumped_task(&mut self) -> Result<Dump<'_>> {
|
||||||
Dump::new(self)
|
Dump::new(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -795,10 +809,8 @@ impl IndexScheduler {
|
|||||||
.queue
|
.queue
|
||||||
.tasks
|
.tasks
|
||||||
.get_task(self.rtxn, task_id)
|
.get_task(self.rtxn, task_id)
|
||||||
.map_err(|err| io::Error::new(io::ErrorKind::Other, err))?
|
.map_err(io::Error::other)?
|
||||||
.ok_or_else(|| {
|
.ok_or_else(|| io::Error::other(Error::CorruptedTaskQueue))?;
|
||||||
io::Error::new(io::ErrorKind::Other, Error::CorruptedTaskQueue)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
serde_json::to_writer(&mut self.buffer, &TaskView::from_task(&task))?;
|
serde_json::to_writer(&mut self.buffer, &TaskView::from_task(&task))?;
|
||||||
self.buffer.push(b'\n');
|
self.buffer.push(b'\n');
|
||||||
|
|||||||
@@ -275,19 +275,27 @@ impl BatchQueue {
|
|||||||
pub(crate) fn get_existing_batches(
|
pub(crate) fn get_existing_batches(
|
||||||
&self,
|
&self,
|
||||||
rtxn: &RoTxn,
|
rtxn: &RoTxn,
|
||||||
tasks: impl IntoIterator<Item = BatchId>,
|
batches: impl IntoIterator<Item = BatchId>,
|
||||||
processing: &ProcessingTasks,
|
processing: &ProcessingTasks,
|
||||||
) -> Result<Vec<Batch>> {
|
) -> Result<Vec<Batch>> {
|
||||||
tasks
|
batches
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|batch_id| {
|
.map(|batch_id| {
|
||||||
if Some(batch_id) == processing.batch.as_ref().map(|batch| batch.uid) {
|
if Some(batch_id) == processing.batch.as_ref().map(|batch| batch.uid) {
|
||||||
let mut batch = processing.batch.as_ref().unwrap().to_batch();
|
let mut batch = processing.batch.as_ref().unwrap().to_batch();
|
||||||
batch.progress = processing.get_progress_view();
|
batch.progress = processing.get_progress_view();
|
||||||
|
// Add progress_trace from the current progress state
|
||||||
|
if let Some(progress) = &processing.progress {
|
||||||
|
batch.stats.progress_trace = progress
|
||||||
|
.accumulated_durations()
|
||||||
|
.into_iter()
|
||||||
|
.map(|(k, v)| (k, v.into()))
|
||||||
|
.collect();
|
||||||
|
}
|
||||||
Ok(batch)
|
Ok(batch)
|
||||||
} else {
|
} else {
|
||||||
self.get_batch(rtxn, batch_id)
|
self.get_batch(rtxn, batch_id)
|
||||||
.and_then(|task| task.ok_or(Error::CorruptedTaskQueue))
|
.and_then(|batch| batch.ok_or(Error::CorruptedTaskQueue))
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect::<Result<_>>()
|
.collect::<Result<_>>()
|
||||||
|
|||||||
@@ -104,6 +104,15 @@ fn query_batches_simple() {
|
|||||||
batches[0].started_at = OffsetDateTime::UNIX_EPOCH;
|
batches[0].started_at = OffsetDateTime::UNIX_EPOCH;
|
||||||
assert!(batches[0].enqueued_at.is_some());
|
assert!(batches[0].enqueued_at.is_some());
|
||||||
batches[0].enqueued_at = None;
|
batches[0].enqueued_at = None;
|
||||||
|
|
||||||
|
if !batches[0].stats.progress_trace.is_empty() {
|
||||||
|
batches[0].stats.progress_trace.clear();
|
||||||
|
batches[0]
|
||||||
|
.stats
|
||||||
|
.progress_trace
|
||||||
|
.insert("processing tasks".to_string(), "deterministic_duration".into());
|
||||||
|
}
|
||||||
|
|
||||||
// Insta cannot snapshot our batches because the batch stats contains an enum as key: https://github.com/mitsuhiko/insta/issues/689
|
// Insta cannot snapshot our batches because the batch stats contains an enum as key: https://github.com/mitsuhiko/insta/issues/689
|
||||||
let batch = serde_json::to_string_pretty(&batches[0]).unwrap();
|
let batch = serde_json::to_string_pretty(&batches[0]).unwrap();
|
||||||
snapshot!(batch, @r###"
|
snapshot!(batch, @r###"
|
||||||
@@ -122,6 +131,9 @@ fn query_batches_simple() {
|
|||||||
},
|
},
|
||||||
"indexUids": {
|
"indexUids": {
|
||||||
"catto": 1
|
"catto": 1
|
||||||
|
},
|
||||||
|
"progressTrace": {
|
||||||
|
"processing tasks": "deterministic_duration"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"startedAt": "1970-01-01T00:00:00Z",
|
"startedAt": "1970-01-01T00:00:00Z",
|
||||||
@@ -334,11 +346,11 @@ fn query_batches_special_rules() {
|
|||||||
let kind = index_creation_task("doggo", "sheep");
|
let kind = index_creation_task("doggo", "sheep");
|
||||||
let _task = index_scheduler.register(kind, None, false).unwrap();
|
let _task = index_scheduler.register(kind, None, false).unwrap();
|
||||||
let kind = KindWithContent::IndexSwap {
|
let kind = KindWithContent::IndexSwap {
|
||||||
swaps: vec![IndexSwap { indexes: ("catto".to_owned(), "doggo".to_owned()) }],
|
swaps: vec![IndexSwap { indexes: ("catto".to_owned(), "doggo".to_owned()), rename: false }],
|
||||||
};
|
};
|
||||||
let _task = index_scheduler.register(kind, None, false).unwrap();
|
let _task = index_scheduler.register(kind, None, false).unwrap();
|
||||||
let kind = KindWithContent::IndexSwap {
|
let kind = KindWithContent::IndexSwap {
|
||||||
swaps: vec![IndexSwap { indexes: ("catto".to_owned(), "whalo".to_owned()) }],
|
swaps: vec![IndexSwap { indexes: ("catto".to_owned(), "whalo".to_owned()), rename: false }],
|
||||||
};
|
};
|
||||||
let _task = index_scheduler.register(kind, None, false).unwrap();
|
let _task = index_scheduler.register(kind, None, false).unwrap();
|
||||||
|
|
||||||
@@ -442,7 +454,7 @@ fn query_batches_canceled_by() {
|
|||||||
let kind = index_creation_task("doggo", "sheep");
|
let kind = index_creation_task("doggo", "sheep");
|
||||||
let _ = index_scheduler.register(kind, None, false).unwrap();
|
let _ = index_scheduler.register(kind, None, false).unwrap();
|
||||||
let kind = KindWithContent::IndexSwap {
|
let kind = KindWithContent::IndexSwap {
|
||||||
swaps: vec![IndexSwap { indexes: ("catto".to_owned(), "doggo".to_owned()) }],
|
swaps: vec![IndexSwap { indexes: ("catto".to_owned(), "doggo".to_owned()), rename: false }],
|
||||||
};
|
};
|
||||||
let _task = index_scheduler.register(kind, None, false).unwrap();
|
let _task = index_scheduler.register(kind, None, false).unwrap();
|
||||||
|
|
||||||
|
|||||||
@@ -279,6 +279,7 @@ impl Queue {
|
|||||||
details: kind.default_details(),
|
details: kind.default_details(),
|
||||||
status: Status::Enqueued,
|
status: Status::Enqueued,
|
||||||
kind: kind.clone(),
|
kind: kind.clone(),
|
||||||
|
network: None,
|
||||||
};
|
};
|
||||||
// For deletion and cancelation tasks, we want to make extra sure that they
|
// For deletion and cancelation tasks, we want to make extra sure that they
|
||||||
// don't attempt to delete/cancel tasks that are newer than themselves.
|
// don't attempt to delete/cancel tasks that are newer than themselves.
|
||||||
@@ -309,7 +310,8 @@ impl Queue {
|
|||||||
| self.tasks.status.get(wtxn, &Status::Failed)?.unwrap_or_default()
|
| self.tasks.status.get(wtxn, &Status::Failed)?.unwrap_or_default()
|
||||||
| self.tasks.status.get(wtxn, &Status::Canceled)?.unwrap_or_default();
|
| self.tasks.status.get(wtxn, &Status::Canceled)?.unwrap_or_default();
|
||||||
|
|
||||||
let to_delete = RoaringBitmap::from_iter(finished.into_iter().rev().take(100_000));
|
let to_delete =
|
||||||
|
RoaringBitmap::from_sorted_iter(finished.into_iter().take(100_000)).unwrap();
|
||||||
|
|
||||||
// /!\ the len must be at least 2 or else we might enter an infinite loop where we only delete
|
// /!\ the len must be at least 2 or else we might enter an infinite loop where we only delete
|
||||||
// the deletion tasks we enqueued ourselves.
|
// the deletion tasks we enqueued ourselves.
|
||||||
@@ -325,7 +327,7 @@ impl Queue {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// it's safe to unwrap here because we checked the len above
|
// it's safe to unwrap here because we checked the len above
|
||||||
let newest_task_id = to_delete.iter().last().unwrap();
|
let newest_task_id = to_delete.iter().next_back().unwrap();
|
||||||
let last_task_to_delete =
|
let last_task_to_delete =
|
||||||
self.tasks.get_task(wtxn, newest_task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
self.tasks.get_task(wtxn, newest_task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
|
|
||||||
|
|||||||
@@ -6,9 +6,9 @@ source: crates/index-scheduler/src/queue/batches_test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
1 {uid: 1, batch_uid: 1, status: canceled, canceled_by: 3, details: { primary_key: Some("sheep") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
1 {uid: 1, batch_uid: 1, status: canceled, canceled_by: 3, details: { primary_key: Some("sheep"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
||||||
2 {uid: 2, batch_uid: 1, status: canceled, canceled_by: 3, details: { swaps: [IndexSwap { indexes: ("catto", "doggo") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "doggo") }] }}
|
2 {uid: 2, batch_uid: 1, status: canceled, canceled_by: 3, details: { swaps: [IndexSwap { indexes: ("catto", "doggo"), rename: false }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "doggo"), rename: false }] }}
|
||||||
3 {uid: 3, batch_uid: 1, status: succeeded, details: { matched_tasks: 3, canceled_tasks: Some(2), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0, 1, 2]> }}
|
3 {uid: 3, batch_uid: 1, status: succeeded, details: { matched_tasks: 3, canceled_tasks: Some(2), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0, 1, 2]> }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
@@ -49,7 +49,7 @@ catto: { number_of_documents: 0, field_distribution: {} }
|
|||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
||||||
1 {uid: 1, details: {"primaryKey":"sheep","matchedTasks":3,"canceledTasks":2,"originalFilter":"test_query","swaps":[{"indexes":["catto","doggo"]}]}, stats: {"totalNbTasks":3,"status":{"succeeded":1,"canceled":2},"types":{"indexCreation":1,"indexSwap":1,"taskCancelation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `taskCancelation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"sheep","matchedTasks":3,"canceledTasks":2,"originalFilter":"test_query","swaps":[{"indexes":["catto","doggo"],"rename":false}]}, stats: {"totalNbTasks":3,"status":{"succeeded":1,"canceled":2},"types":{"indexCreation":1,"indexSwap":1,"taskCancelation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `taskCancelation` that cannot be batched with any other task.", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
|||||||
@@ -6,9 +6,9 @@ source: crates/index-scheduler/src/queue/batches_test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("plankton") }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("plankton") }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("plankton"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("plankton") }}
|
||||||
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("his_own_vomit") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("his_own_vomit") }}
|
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("his_own_vomit"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("his_own_vomit") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued []
|
enqueued []
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/batches_test.rs
|
source: crates/index-scheduler/src/queue/batches_test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,]
|
enqueued [0,]
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/batches_test.rs
|
source: crates/index-scheduler/src/queue/batches_test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("plankton") }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("plankton") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("plankton"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("plankton") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,1,]
|
enqueued [0,1,]
|
||||||
|
|||||||
@@ -1,15 +1,14 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/batches_test.rs
|
source: crates/index-scheduler/src/queue/batches_test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("plankton") }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("plankton") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("plankton"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("plankton") }}
|
||||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("his_own_vomit") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("his_own_vomit") }}
|
2 {uid: 2, status: enqueued, details: { primary_key: Some("his_own_vomit"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("his_own_vomit") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,1,2,]
|
enqueued [0,1,2,]
|
||||||
|
|||||||
@@ -7,9 +7,9 @@ source: crates/index-scheduler/src/queue/batches_test.rs
|
|||||||
{uid: 1, details: {"primaryKey":"sheep"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
{uid: 1, details: {"primaryKey":"sheep"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("sheep") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("sheep"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
||||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("fish") }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("fish") }}
|
2 {uid: 2, status: enqueued, details: { primary_key: Some("fish"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("fish") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [1,2,]
|
enqueued [1,2,]
|
||||||
|
|||||||
@@ -6,9 +6,9 @@ source: crates/index-scheduler/src/queue/batches_test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("sheep") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("sheep"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
||||||
2 {uid: 2, batch_uid: 2, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { primary_key: Some("fish") }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("fish") }}
|
2 {uid: 2, batch_uid: 2, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { primary_key: Some("fish"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("fish") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued []
|
enqueued []
|
||||||
|
|||||||
@@ -1,15 +1,14 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/batches_test.rs
|
source: crates/index-scheduler/src/queue/batches_test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("sheep") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("sheep"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
||||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("fish") }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("fish") }}
|
2 {uid: 2, status: enqueued, details: { primary_key: Some("fish"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("fish") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,1,2,]
|
enqueued [0,1,2,]
|
||||||
|
|||||||
@@ -6,10 +6,10 @@ source: crates/index-scheduler/src/queue/batches_test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("sheep") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("sheep"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
||||||
2 {uid: 2, batch_uid: 2, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { swaps: [IndexSwap { indexes: ("catto", "doggo") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "doggo") }] }}
|
2 {uid: 2, batch_uid: 2, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { swaps: [IndexSwap { indexes: ("catto", "doggo"), rename: false }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "doggo"), rename: false }] }}
|
||||||
3 {uid: 3, batch_uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `whalo` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { swaps: [IndexSwap { indexes: ("catto", "whalo") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "whalo") }] }}
|
3 {uid: 3, batch_uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `whalo` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { swaps: [IndexSwap { indexes: ("catto", "whalo"), rename: false }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "whalo"), rename: false }] }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued []
|
enqueued []
|
||||||
@@ -54,8 +54,8 @@ doggo: { number_of_documents: 0, field_distribution: {} }
|
|||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
||||||
1 {uid: 1, details: {"primaryKey":"sheep"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"sheep"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
||||||
2 {uid: 2, details: {"swaps":[{"indexes":["catto","doggo"]}]}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "created batch containing only task with id 2 of type `indexSwap` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"swaps":[{"indexes":["catto","doggo"],"rename":false}]}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "created batch containing only task with id 2 of type `indexSwap` that cannot be batched with any other task.", }
|
||||||
3 {uid: 3, details: {"swaps":[{"indexes":["catto","whalo"]}]}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "created batch containing only task with id 3 of type `indexSwap` that cannot be batched with any other task.", }
|
3 {uid: 3, details: {"swaps":[{"indexes":["catto","whalo"],"rename":false}]}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "created batch containing only task with id 3 of type `indexSwap` that cannot be batched with any other task.", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
|||||||
@@ -1,16 +1,15 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/batches_test.rs
|
source: crates/index-scheduler/src/queue/batches_test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("sheep") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("sheep"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
||||||
2 {uid: 2, status: enqueued, details: { swaps: [IndexSwap { indexes: ("catto", "doggo") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "doggo") }] }}
|
2 {uid: 2, status: enqueued, details: { swaps: [IndexSwap { indexes: ("catto", "doggo"), rename: false }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "doggo"), rename: false }] }}
|
||||||
3 {uid: 3, status: enqueued, details: { swaps: [IndexSwap { indexes: ("catto", "whalo") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "whalo") }] }}
|
3 {uid: 3, status: enqueued, details: { swaps: [IndexSwap { indexes: ("catto", "whalo"), rename: false }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "whalo"), rename: false }] }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,1,2,3,]
|
enqueued [0,1,2,3,]
|
||||||
|
|||||||
@@ -6,9 +6,9 @@ source: crates/index-scheduler/src/queue/tasks_test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
1 {uid: 1, batch_uid: 1, status: canceled, canceled_by: 3, details: { primary_key: Some("sheep") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
1 {uid: 1, batch_uid: 1, status: canceled, canceled_by: 3, details: { primary_key: Some("sheep"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
||||||
2 {uid: 2, batch_uid: 1, status: canceled, canceled_by: 3, details: { swaps: [IndexSwap { indexes: ("catto", "doggo") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "doggo") }] }}
|
2 {uid: 2, batch_uid: 1, status: canceled, canceled_by: 3, details: { swaps: [IndexSwap { indexes: ("catto", "doggo"), rename: false }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "doggo"), rename: false }] }}
|
||||||
3 {uid: 3, batch_uid: 1, status: succeeded, details: { matched_tasks: 3, canceled_tasks: Some(2), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0, 1, 2]> }}
|
3 {uid: 3, batch_uid: 1, status: succeeded, details: { matched_tasks: 3, canceled_tasks: Some(2), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0, 1, 2]> }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
@@ -49,7 +49,7 @@ catto: { number_of_documents: 0, field_distribution: {} }
|
|||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
||||||
1 {uid: 1, details: {"primaryKey":"sheep","matchedTasks":3,"canceledTasks":2,"originalFilter":"test_query","swaps":[{"indexes":["catto","doggo"]}]}, stats: {"totalNbTasks":3,"status":{"succeeded":1,"canceled":2},"types":{"indexCreation":1,"indexSwap":1,"taskCancelation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `taskCancelation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"sheep","matchedTasks":3,"canceledTasks":2,"originalFilter":"test_query","swaps":[{"indexes":["catto","doggo"],"rename":false}]}, stats: {"totalNbTasks":3,"status":{"succeeded":1,"canceled":2},"types":{"indexCreation":1,"indexSwap":1,"taskCancelation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `taskCancelation` that cannot be batched with any other task.", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
|||||||
@@ -6,9 +6,9 @@ source: crates/index-scheduler/src/queue/tasks_test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("plankton") }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("plankton") }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("plankton"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("plankton") }}
|
||||||
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("his_own_vomit") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("his_own_vomit") }}
|
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("his_own_vomit"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("his_own_vomit") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued []
|
enqueued []
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/tasks_test.rs
|
source: crates/index-scheduler/src/queue/tasks_test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,]
|
enqueued [0,]
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/tasks_test.rs
|
source: crates/index-scheduler/src/queue/tasks_test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("plankton") }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("plankton") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("plankton"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("plankton") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,1,]
|
enqueued [0,1,]
|
||||||
|
|||||||
@@ -1,15 +1,14 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/tasks_test.rs
|
source: crates/index-scheduler/src/queue/tasks_test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("bone"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("plankton") }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("plankton") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("plankton"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("plankton") }}
|
||||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("his_own_vomit") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("his_own_vomit") }}
|
2 {uid: 2, status: enqueued, details: { primary_key: Some("his_own_vomit"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("his_own_vomit") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,1,2,]
|
enqueued [0,1,2,]
|
||||||
|
|||||||
@@ -6,9 +6,9 @@ source: crates/index-scheduler/src/queue/tasks_test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("sheep") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("sheep"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
||||||
2 {uid: 2, batch_uid: 2, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { primary_key: Some("fish") }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("fish") }}
|
2 {uid: 2, batch_uid: 2, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { primary_key: Some("fish"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("fish") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued []
|
enqueued []
|
||||||
|
|||||||
@@ -1,15 +1,14 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/tasks_test.rs
|
source: crates/index-scheduler/src/queue/tasks_test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("sheep") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("sheep"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
||||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("fish") }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("fish") }}
|
2 {uid: 2, status: enqueued, details: { primary_key: Some("fish"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "whalo", primary_key: Some("fish") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,1,2,]
|
enqueued [0,1,2,]
|
||||||
|
|||||||
@@ -1,16 +1,15 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/tasks_test.rs
|
source: crates/index-scheduler/src/queue/tasks_test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("sheep") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("sheep"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
||||||
2 {uid: 2, status: enqueued, details: { swaps: [IndexSwap { indexes: ("catto", "doggo") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "doggo") }] }}
|
2 {uid: 2, status: enqueued, details: { swaps: [IndexSwap { indexes: ("catto", "doggo"), rename: false }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "doggo"), rename: false }] }}
|
||||||
3 {uid: 3, status: enqueued, details: { swaps: [IndexSwap { indexes: ("catto", "whalo") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "whalo") }] }}
|
3 {uid: 3, status: enqueued, details: { swaps: [IndexSwap { indexes: ("catto", "whalo"), rename: false }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "whalo"), rename: false }] }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,1,2,3,]
|
enqueued [0,1,2,3,]
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/test.rs
|
source: crates/index-scheduler/src/queue/test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
1 {uid: 1, status: enqueued, details: { received_documents: 12, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 12, allow_index_creation: true }}
|
1 {uid: 1, status: enqueued, details: { received_documents: 12, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 12, allow_index_creation: true }}
|
||||||
2 {uid: 2, status: enqueued, details: { received_documents: 50, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 50, allow_index_creation: true }}
|
2 {uid: 2, status: enqueued, details: { received_documents: 50, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 50, allow_index_creation: true }}
|
||||||
3 {uid: 3, status: enqueued, details: { received_documents: 5000, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggo", primary_key: Some("bone"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 5000, allow_index_creation: true }}
|
3 {uid: 3, status: enqueued, details: { received_documents: 5000, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggo", primary_key: Some("bone"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 5000, allow_index_creation: true }}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/test.rs
|
source: crates/index-scheduler/src/queue/test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
@@ -13,7 +12,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "enqueued",
|
"status": "enqueued",
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/test.rs
|
source: crates/index-scheduler/src/queue/test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
@@ -13,7 +12,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "succeeded",
|
"status": "succeeded",
|
||||||
@@ -39,7 +40,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "failed",
|
"status": "failed",
|
||||||
@@ -60,7 +63,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "enqueued",
|
"status": "enqueued",
|
||||||
@@ -81,7 +86,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "enqueued",
|
"status": "enqueued",
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/test.rs
|
source: crates/index-scheduler/src/queue/test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
@@ -13,7 +12,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "enqueued",
|
"status": "enqueued",
|
||||||
@@ -34,7 +35,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "enqueued",
|
"status": "enqueued",
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/test.rs
|
source: crates/index-scheduler/src/queue/test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
@@ -13,7 +12,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "succeeded",
|
"status": "succeeded",
|
||||||
@@ -39,7 +40,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "failed",
|
"status": "failed",
|
||||||
@@ -60,7 +63,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "enqueued",
|
"status": "enqueued",
|
||||||
@@ -81,7 +86,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "enqueued",
|
"status": "enqueued",
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/test.rs
|
source: crates/index-scheduler/src/queue/test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
@@ -13,7 +12,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "succeeded",
|
"status": "succeeded",
|
||||||
@@ -39,7 +40,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "failed",
|
"status": "failed",
|
||||||
@@ -60,7 +63,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "enqueued",
|
"status": "enqueued",
|
||||||
@@ -81,7 +86,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "enqueued",
|
"status": "enqueued",
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/queue/test.rs
|
source: crates/index-scheduler/src/queue/test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
@@ -13,7 +12,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "succeeded",
|
"status": "succeeded",
|
||||||
@@ -39,7 +40,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "failed",
|
"status": "failed",
|
||||||
@@ -60,7 +63,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "enqueued",
|
"status": "enqueued",
|
||||||
@@ -81,7 +86,9 @@ snapshot_kind: text
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"IndexInfo": {
|
"IndexInfo": {
|
||||||
"primary_key": null
|
"primary_key": null,
|
||||||
|
"new_index_uid": null,
|
||||||
|
"old_index_uid": null
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"status": "enqueued",
|
"status": "enqueued",
|
||||||
|
|||||||
@@ -97,7 +97,22 @@ impl TaskQueue {
|
|||||||
Ok(self.all_tasks.get(rtxn, &task_id)?)
|
Ok(self.all_tasks.get(rtxn, &task_id)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn update_task(&self, wtxn: &mut RwTxn, task: &Task) -> Result<()> {
|
/// Update the inverted task indexes and write the new value of the task.
|
||||||
|
///
|
||||||
|
/// The passed `task` object typically comes from a previous transaction, so two kinds of modification might have occurred:
|
||||||
|
/// 1. Modification to the `task` object after loading it from the DB (the purpose of this method is to persist these changes)
|
||||||
|
/// 2. Modification to the task committed by another transaction in the DB (an annoying consequence of having lost the original
|
||||||
|
/// transaction from which the `task` instance was deserialized)
|
||||||
|
///
|
||||||
|
/// When calling this function, this `task` is modified to take into account any existing `network`
|
||||||
|
/// that can have been added since the task was loaded into memory.
|
||||||
|
///
|
||||||
|
/// Any other modification to the task that was committed from the DB since the parameter was pulled from the DB will be overwritten.
|
||||||
|
///
|
||||||
|
/// # Errors
|
||||||
|
///
|
||||||
|
/// - CorruptedTaskQueue: The task doesn't exist in the database
|
||||||
|
pub(crate) fn update_task(&self, wtxn: &mut RwTxn, task: &mut Task) -> Result<()> {
|
||||||
let old_task = self.get_task(wtxn, task.uid)?.ok_or(Error::CorruptedTaskQueue)?;
|
let old_task = self.get_task(wtxn, task.uid)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
let reprocessing = old_task.status != Status::Enqueued;
|
let reprocessing = old_task.status != Status::Enqueued;
|
||||||
|
|
||||||
@@ -157,6 +172,12 @@ impl TaskQueue {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
task.network = match (old_task.network, task.network.take()) {
|
||||||
|
(None, None) => None,
|
||||||
|
(None, Some(network)) | (Some(network), None) => Some(network),
|
||||||
|
(Some(_), Some(network)) => Some(network),
|
||||||
|
};
|
||||||
|
|
||||||
self.all_tasks.put(wtxn, &task.uid, task)?;
|
self.all_tasks.put(wtxn, &task.uid, task)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -304,11 +304,11 @@ fn query_tasks_special_rules() {
|
|||||||
let kind = index_creation_task("doggo", "sheep");
|
let kind = index_creation_task("doggo", "sheep");
|
||||||
let _task = index_scheduler.register(kind, None, false).unwrap();
|
let _task = index_scheduler.register(kind, None, false).unwrap();
|
||||||
let kind = KindWithContent::IndexSwap {
|
let kind = KindWithContent::IndexSwap {
|
||||||
swaps: vec![IndexSwap { indexes: ("catto".to_owned(), "doggo".to_owned()) }],
|
swaps: vec![IndexSwap { indexes: ("catto".to_owned(), "doggo".to_owned()), rename: false }],
|
||||||
};
|
};
|
||||||
let _task = index_scheduler.register(kind, None, false).unwrap();
|
let _task = index_scheduler.register(kind, None, false).unwrap();
|
||||||
let kind = KindWithContent::IndexSwap {
|
let kind = KindWithContent::IndexSwap {
|
||||||
swaps: vec![IndexSwap { indexes: ("catto".to_owned(), "whalo".to_owned()) }],
|
swaps: vec![IndexSwap { indexes: ("catto".to_owned(), "whalo".to_owned()), rename: false }],
|
||||||
};
|
};
|
||||||
let _task = index_scheduler.register(kind, None, false).unwrap();
|
let _task = index_scheduler.register(kind, None, false).unwrap();
|
||||||
|
|
||||||
@@ -399,7 +399,7 @@ fn query_tasks_canceled_by() {
|
|||||||
let kind = index_creation_task("doggo", "sheep");
|
let kind = index_creation_task("doggo", "sheep");
|
||||||
let _ = index_scheduler.register(kind, None, false).unwrap();
|
let _ = index_scheduler.register(kind, None, false).unwrap();
|
||||||
let kind = KindWithContent::IndexSwap {
|
let kind = KindWithContent::IndexSwap {
|
||||||
swaps: vec![IndexSwap { indexes: ("catto".to_owned(), "doggo".to_owned()) }],
|
swaps: vec![IndexSwap { indexes: ("catto".to_owned(), "doggo".to_owned()), rename: false }],
|
||||||
};
|
};
|
||||||
let _task = index_scheduler.register(kind, None, false).unwrap();
|
let _task = index_scheduler.register(kind, None, false).unwrap();
|
||||||
|
|
||||||
|
|||||||
@@ -287,7 +287,7 @@ impl BatchKind {
|
|||||||
};
|
};
|
||||||
|
|
||||||
match (self, autobatch_kind) {
|
match (self, autobatch_kind) {
|
||||||
// We don't batch any of these operations
|
// We don't batch any of these operations
|
||||||
(this, K::IndexCreation | K::IndexUpdate | K::IndexSwap | K::DocumentEdition) => Break((this, BatchStopReason::TaskCannotBeBatched { kind, id })),
|
(this, K::IndexCreation | K::IndexUpdate | K::IndexSwap | K::DocumentEdition) => Break((this, BatchStopReason::TaskCannotBeBatched { kind, id })),
|
||||||
// We must not batch tasks that don't have the same index creation rights if the index doesn't already exists.
|
// We must not batch tasks that don't have the same index creation rights if the index doesn't already exists.
|
||||||
(this, kind) if !index_already_exists && this.allow_index_creation() == Some(false) && kind.allow_index_creation() == Some(true) => {
|
(this, kind) if !index_already_exists && this.allow_index_creation() == Some(false) && kind.allow_index_creation() == Some(true) => {
|
||||||
|
|||||||
@@ -75,7 +75,11 @@ fn idx_create() -> KindWithContent {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn idx_update() -> KindWithContent {
|
fn idx_update() -> KindWithContent {
|
||||||
KindWithContent::IndexUpdate { index_uid: String::from("doggo"), primary_key: None }
|
KindWithContent::IndexUpdate {
|
||||||
|
index_uid: String::from("doggo"),
|
||||||
|
primary_key: None,
|
||||||
|
new_index_uid: None,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn idx_del() -> KindWithContent {
|
fn idx_del() -> KindWithContent {
|
||||||
@@ -84,7 +88,10 @@ fn idx_del() -> KindWithContent {
|
|||||||
|
|
||||||
fn idx_swap() -> KindWithContent {
|
fn idx_swap() -> KindWithContent {
|
||||||
KindWithContent::IndexSwap {
|
KindWithContent::IndexSwap {
|
||||||
swaps: vec![IndexSwap { indexes: (String::from("doggo"), String::from("catto")) }],
|
swaps: vec![IndexSwap {
|
||||||
|
indexes: (String::from("doggo"), String::from("catto")),
|
||||||
|
rename: false,
|
||||||
|
}],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -38,6 +38,7 @@ pub(crate) enum Batch {
|
|||||||
IndexUpdate {
|
IndexUpdate {
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
|
new_index_uid: Option<String>,
|
||||||
task: Task,
|
task: Task,
|
||||||
},
|
},
|
||||||
IndexDeletion {
|
IndexDeletion {
|
||||||
@@ -65,6 +66,7 @@ pub(crate) enum DocumentOperation {
|
|||||||
|
|
||||||
/// A [batch](Batch) that combines multiple tasks operating on an index.
|
/// A [batch](Batch) that combines multiple tasks operating on an index.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
#[allow(clippy::large_enum_variant)]
|
||||||
pub(crate) enum IndexOperation {
|
pub(crate) enum IndexOperation {
|
||||||
DocumentOperation {
|
DocumentOperation {
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
@@ -405,11 +407,13 @@ impl IndexScheduler {
|
|||||||
let mut task =
|
let mut task =
|
||||||
self.queue.tasks.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
self.queue.tasks.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
current_batch.processing(Some(&mut task));
|
current_batch.processing(Some(&mut task));
|
||||||
let primary_key = match &task.kind {
|
let (primary_key, new_index_uid) = match &task.kind {
|
||||||
KindWithContent::IndexUpdate { primary_key, .. } => primary_key.clone(),
|
KindWithContent::IndexUpdate { primary_key, new_index_uid, .. } => {
|
||||||
|
(primary_key.clone(), new_index_uid.clone())
|
||||||
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
Ok(Some(Batch::IndexUpdate { index_uid, primary_key, task }))
|
Ok(Some(Batch::IndexUpdate { index_uid, primary_key, new_index_uid, task }))
|
||||||
}
|
}
|
||||||
BatchKind::IndexDeletion { ids } => Ok(Some(Batch::IndexDeletion {
|
BatchKind::IndexDeletion { ids } => Ok(Some(Batch::IndexDeletion {
|
||||||
index_uid,
|
index_uid,
|
||||||
|
|||||||
@@ -268,7 +268,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
self.queue
|
self.queue
|
||||||
.tasks
|
.tasks
|
||||||
.update_task(&mut wtxn, &task)
|
.update_task(&mut wtxn, &mut task)
|
||||||
.map_err(|e| Error::UnrecoverableError(Box::new(e)))?;
|
.map_err(|e| Error::UnrecoverableError(Box::new(e)))?;
|
||||||
}
|
}
|
||||||
if let Some(canceled_by) = canceled_by {
|
if let Some(canceled_by) = canceled_by {
|
||||||
@@ -349,7 +349,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
self.queue
|
self.queue
|
||||||
.tasks
|
.tasks
|
||||||
.update_task(&mut wtxn, &task)
|
.update_task(&mut wtxn, &mut task)
|
||||||
.map_err(|e| Error::UnrecoverableError(Box::new(e)))?;
|
.map_err(|e| Error::UnrecoverableError(Box::new(e)))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ use meilisearch_types::tasks::{Details, IndexSwap, Kind, KindWithContent, Status
|
|||||||
use meilisearch_types::versioning::{VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH};
|
use meilisearch_types::versioning::{VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH};
|
||||||
use milli::update::Settings as MilliSettings;
|
use milli::update::Settings as MilliSettings;
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use super::create_batch::Batch;
|
use super::create_batch::Batch;
|
||||||
use crate::processing::{
|
use crate::processing::{
|
||||||
@@ -146,7 +147,6 @@ impl IndexScheduler {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let mut index_wtxn = index.write_txn()?;
|
let mut index_wtxn = index.write_txn()?;
|
||||||
|
|
||||||
let index_version = index.get_version(&index_wtxn)?.unwrap_or((1, 12, 0));
|
let index_version = index.get_version(&index_wtxn)?.unwrap_or((1, 12, 0));
|
||||||
let package_version = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH);
|
let package_version = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH);
|
||||||
if index_version != package_version {
|
if index_version != package_version {
|
||||||
@@ -224,24 +224,46 @@ impl IndexScheduler {
|
|||||||
self.index_mapper.create_index(wtxn, &index_uid, None)?;
|
self.index_mapper.create_index(wtxn, &index_uid, None)?;
|
||||||
|
|
||||||
self.process_batch(
|
self.process_batch(
|
||||||
Batch::IndexUpdate { index_uid, primary_key, task },
|
Batch::IndexUpdate { index_uid, primary_key, new_index_uid: None, task },
|
||||||
current_batch,
|
current_batch,
|
||||||
progress,
|
progress,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
Batch::IndexUpdate { index_uid, primary_key, mut task } => {
|
Batch::IndexUpdate { index_uid, primary_key, new_index_uid, mut task } => {
|
||||||
progress.update_progress(UpdateIndexProgress::UpdatingTheIndex);
|
progress.update_progress(UpdateIndexProgress::UpdatingTheIndex);
|
||||||
|
|
||||||
|
// Get the index (renamed or not)
|
||||||
let rtxn = self.env.read_txn()?;
|
let rtxn = self.env.read_txn()?;
|
||||||
let index = self.index_mapper.index(&rtxn, &index_uid)?;
|
let index = self.index_mapper.index(&rtxn, &index_uid)?;
|
||||||
|
let mut index_wtxn = index.write_txn()?;
|
||||||
|
|
||||||
if let Some(primary_key) = primary_key.clone() {
|
// Handle rename if new_index_uid is provided
|
||||||
let mut index_wtxn = index.write_txn()?;
|
let final_index_uid = if let Some(new_uid) = &new_index_uid {
|
||||||
|
if new_uid != &index_uid {
|
||||||
|
index.set_updated_at(&mut index_wtxn, &OffsetDateTime::now_utc())?;
|
||||||
|
|
||||||
|
let mut wtxn = self.env.write_txn()?;
|
||||||
|
self.apply_index_swap(
|
||||||
|
&mut wtxn, &progress, task.uid, &index_uid, new_uid, true,
|
||||||
|
)?;
|
||||||
|
wtxn.commit()?;
|
||||||
|
|
||||||
|
new_uid.clone()
|
||||||
|
} else {
|
||||||
|
new_uid.clone()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
index_uid.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Handle primary key update if provided
|
||||||
|
if let Some(ref primary_key) = primary_key {
|
||||||
let mut builder = MilliSettings::new(
|
let mut builder = MilliSettings::new(
|
||||||
&mut index_wtxn,
|
&mut index_wtxn,
|
||||||
&index,
|
&index,
|
||||||
self.index_mapper.indexer_config(),
|
self.index_mapper.indexer_config(),
|
||||||
);
|
);
|
||||||
builder.set_primary_key(primary_key);
|
builder.set_primary_key(primary_key.clone());
|
||||||
let must_stop_processing = self.scheduler.must_stop_processing.clone();
|
let must_stop_processing = self.scheduler.must_stop_processing.clone();
|
||||||
|
|
||||||
builder
|
builder
|
||||||
@@ -250,15 +272,20 @@ impl IndexScheduler {
|
|||||||
&progress,
|
&progress,
|
||||||
current_batch.embedder_stats.clone(),
|
current_batch.embedder_stats.clone(),
|
||||||
)
|
)
|
||||||
.map_err(|e| Error::from_milli(e, Some(index_uid.to_string())))?;
|
.map_err(|e| Error::from_milli(e, Some(final_index_uid.to_string())))?;
|
||||||
index_wtxn.commit()?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
index_wtxn.commit()?;
|
||||||
// drop rtxn before starting a new wtxn on the same db
|
// drop rtxn before starting a new wtxn on the same db
|
||||||
rtxn.commit()?;
|
rtxn.commit()?;
|
||||||
|
|
||||||
task.status = Status::Succeeded;
|
task.status = Status::Succeeded;
|
||||||
task.details = Some(Details::IndexInfo { primary_key });
|
task.details = Some(Details::IndexInfo {
|
||||||
|
primary_key: primary_key.clone(),
|
||||||
|
new_index_uid: new_index_uid.clone(),
|
||||||
|
// we only display the old index uid if a rename happened => there is a new_index_uid
|
||||||
|
old_index_uid: new_index_uid.map(|_| index_uid.clone()),
|
||||||
|
});
|
||||||
|
|
||||||
// if the update processed successfully, we're going to store the new
|
// if the update processed successfully, we're going to store the new
|
||||||
// stats of the index. Since the tasks have already been processed and
|
// stats of the index. Since the tasks have already been processed and
|
||||||
@@ -268,8 +295,8 @@ impl IndexScheduler {
|
|||||||
let mut wtxn = self.env.write_txn()?;
|
let mut wtxn = self.env.write_txn()?;
|
||||||
let index_rtxn = index.read_txn()?;
|
let index_rtxn = index.read_txn()?;
|
||||||
let stats = crate::index_mapper::IndexStats::new(&index, &index_rtxn)
|
let stats = crate::index_mapper::IndexStats::new(&index, &index_rtxn)
|
||||||
.map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?;
|
.map_err(|e| Error::from_milli(e, Some(final_index_uid.clone())))?;
|
||||||
self.index_mapper.store_stats_of(&mut wtxn, &index_uid, &stats)?;
|
self.index_mapper.store_stats_of(&mut wtxn, &final_index_uid, &stats)?;
|
||||||
wtxn.commit()?;
|
wtxn.commit()?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}();
|
}();
|
||||||
@@ -330,13 +357,18 @@ impl IndexScheduler {
|
|||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
};
|
||||||
let mut not_found_indexes = BTreeSet::new();
|
let mut not_found_indexes = BTreeSet::new();
|
||||||
for IndexSwap { indexes: (lhs, rhs) } in swaps {
|
let mut found_indexes_but_should_not = BTreeSet::new();
|
||||||
for index in [lhs, rhs] {
|
for IndexSwap { indexes: (lhs, rhs), rename } in swaps {
|
||||||
let index_exists = self.index_mapper.index_exists(&wtxn, index)?;
|
let index_exists = self.index_mapper.index_exists(&wtxn, lhs)?;
|
||||||
if !index_exists {
|
if !index_exists {
|
||||||
not_found_indexes.insert(index);
|
not_found_indexes.insert(lhs);
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
let index_exists = self.index_mapper.index_exists(&wtxn, rhs)?;
|
||||||
|
match (index_exists, rename) {
|
||||||
|
(true, true) => found_indexes_but_should_not.insert((lhs, rhs)),
|
||||||
|
(false, false) => not_found_indexes.insert(rhs),
|
||||||
|
(true, false) | (false, true) => true, // random value we don't read it anyway
|
||||||
|
};
|
||||||
}
|
}
|
||||||
if !not_found_indexes.is_empty() {
|
if !not_found_indexes.is_empty() {
|
||||||
if not_found_indexes.len() == 1 {
|
if not_found_indexes.len() == 1 {
|
||||||
@@ -349,6 +381,23 @@ impl IndexScheduler {
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if !found_indexes_but_should_not.is_empty() {
|
||||||
|
if found_indexes_but_should_not.len() == 1 {
|
||||||
|
let (lhs, rhs) = found_indexes_but_should_not
|
||||||
|
.into_iter()
|
||||||
|
.next()
|
||||||
|
.map(|(lhs, rhs)| (lhs.clone(), rhs.clone()))
|
||||||
|
.unwrap();
|
||||||
|
return Err(Error::SwapIndexFoundDuringRename(lhs, rhs));
|
||||||
|
} else {
|
||||||
|
return Err(Error::SwapIndexesFoundDuringRename(
|
||||||
|
found_indexes_but_should_not
|
||||||
|
.into_iter()
|
||||||
|
.map(|(_, rhs)| rhs.to_string())
|
||||||
|
.collect(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
progress.update_progress(SwappingTheIndexes::SwappingTheIndexes);
|
progress.update_progress(SwappingTheIndexes::SwappingTheIndexes);
|
||||||
for (step, swap) in swaps.iter().enumerate() {
|
for (step, swap) in swaps.iter().enumerate() {
|
||||||
progress.update_progress(VariableNameStep::<SwappingTheIndexes>::new(
|
progress.update_progress(VariableNameStep::<SwappingTheIndexes>::new(
|
||||||
@@ -362,6 +411,7 @@ impl IndexScheduler {
|
|||||||
task.uid,
|
task.uid,
|
||||||
&swap.indexes.0,
|
&swap.indexes.0,
|
||||||
&swap.indexes.1,
|
&swap.indexes.1,
|
||||||
|
swap.rename,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
wtxn.commit()?;
|
wtxn.commit()?;
|
||||||
@@ -451,6 +501,7 @@ impl IndexScheduler {
|
|||||||
task_id: u32,
|
task_id: u32,
|
||||||
lhs: &str,
|
lhs: &str,
|
||||||
rhs: &str,
|
rhs: &str,
|
||||||
|
rename: bool,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
progress.update_progress(InnerSwappingTwoIndexes::RetrieveTheTasks);
|
progress.update_progress(InnerSwappingTwoIndexes::RetrieveTheTasks);
|
||||||
// 1. Verify that both lhs and rhs are existing indexes
|
// 1. Verify that both lhs and rhs are existing indexes
|
||||||
@@ -458,16 +509,23 @@ impl IndexScheduler {
|
|||||||
if !index_lhs_exists {
|
if !index_lhs_exists {
|
||||||
return Err(Error::IndexNotFound(lhs.to_owned()));
|
return Err(Error::IndexNotFound(lhs.to_owned()));
|
||||||
}
|
}
|
||||||
let index_rhs_exists = self.index_mapper.index_exists(wtxn, rhs)?;
|
if !rename {
|
||||||
if !index_rhs_exists {
|
let index_rhs_exists = self.index_mapper.index_exists(wtxn, rhs)?;
|
||||||
return Err(Error::IndexNotFound(rhs.to_owned()));
|
if !index_rhs_exists {
|
||||||
|
return Err(Error::IndexNotFound(rhs.to_owned()));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 2. Get the task set for index = name that appeared before the index swap task
|
// 2. Get the task set for index = name that appeared before the index swap task
|
||||||
let mut index_lhs_task_ids = self.queue.tasks.index_tasks(wtxn, lhs)?;
|
let mut index_lhs_task_ids = self.queue.tasks.index_tasks(wtxn, lhs)?;
|
||||||
index_lhs_task_ids.remove_range(task_id..);
|
index_lhs_task_ids.remove_range(task_id..);
|
||||||
let mut index_rhs_task_ids = self.queue.tasks.index_tasks(wtxn, rhs)?;
|
let index_rhs_task_ids = if rename {
|
||||||
index_rhs_task_ids.remove_range(task_id..);
|
let mut index_rhs_task_ids = self.queue.tasks.index_tasks(wtxn, rhs)?;
|
||||||
|
index_rhs_task_ids.remove_range(task_id..);
|
||||||
|
index_rhs_task_ids
|
||||||
|
} else {
|
||||||
|
RoaringBitmap::new()
|
||||||
|
};
|
||||||
|
|
||||||
// 3. before_name -> new_name in the task's KindWithContent
|
// 3. before_name -> new_name in the task's KindWithContent
|
||||||
progress.update_progress(InnerSwappingTwoIndexes::UpdateTheTasks);
|
progress.update_progress(InnerSwappingTwoIndexes::UpdateTheTasks);
|
||||||
@@ -496,7 +554,11 @@ impl IndexScheduler {
|
|||||||
})?;
|
})?;
|
||||||
|
|
||||||
// 6. Swap in the index mapper
|
// 6. Swap in the index mapper
|
||||||
self.index_mapper.swap(wtxn, lhs, rhs)?;
|
if rename {
|
||||||
|
self.index_mapper.rename(wtxn, lhs, rhs)?;
|
||||||
|
} else {
|
||||||
|
self.index_mapper.swap(wtxn, lhs, rhs)?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -370,7 +370,7 @@ fn ureq_error_into_error(error: ureq::Error) -> Error {
|
|||||||
}
|
}
|
||||||
Err(e) => e.into(),
|
Err(e) => e.into(),
|
||||||
},
|
},
|
||||||
ureq::Error::Transport(transport) => io::Error::new(io::ErrorKind::Other, transport).into(),
|
ureq::Error::Transport(transport) => io::Error::other(transport).into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -66,6 +66,11 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
IndexOperation::DocumentOperation { index_uid, primary_key, operations, mut tasks } => {
|
IndexOperation::DocumentOperation { index_uid, primary_key, operations, mut tasks } => {
|
||||||
progress.update_progress(DocumentOperationProgress::RetrievingConfig);
|
progress.update_progress(DocumentOperationProgress::RetrievingConfig);
|
||||||
|
|
||||||
|
let network = self.network();
|
||||||
|
|
||||||
|
let shards = network.shards();
|
||||||
|
|
||||||
// TODO: at some point, for better efficiency we might want to reuse the bumpalo for successive batches.
|
// TODO: at some point, for better efficiency we might want to reuse the bumpalo for successive batches.
|
||||||
// this is made difficult by the fact we're doing private clones of the index scheduler and sending it
|
// this is made difficult by the fact we're doing private clones of the index scheduler and sending it
|
||||||
// to a fresh thread.
|
// to a fresh thread.
|
||||||
@@ -130,6 +135,7 @@ impl IndexScheduler {
|
|||||||
&mut new_fields_ids_map,
|
&mut new_fields_ids_map,
|
||||||
&|| must_stop_processing.get(),
|
&|| must_stop_processing.get(),
|
||||||
progress.clone(),
|
progress.clone(),
|
||||||
|
shards.as_ref(),
|
||||||
)
|
)
|
||||||
.map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?;
|
.map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?;
|
||||||
|
|
||||||
|
|||||||
@@ -6,9 +6,9 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }}
|
||||||
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "girafos", primary_key: None }}
|
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "girafos", primary_key: None }}
|
||||||
3 {uid: 3, batch_uid: 3, status: succeeded, details: { deleted_documents: Some(0) }, kind: DocumentClear { index_uid: "doggos" }}
|
3 {uid: 3, batch_uid: 3, status: succeeded, details: { deleted_documents: Some(0) }, kind: DocumentClear { index_uid: "doggos" }}
|
||||||
4 {uid: 4, batch_uid: 4, status: succeeded, details: { deleted_documents: Some(0) }, kind: DocumentClear { index_uid: "cattos" }}
|
4 {uid: 4, batch_uid: 4, status: succeeded, details: { deleted_documents: Some(0) }, kind: DocumentClear { index_uid: "cattos" }}
|
||||||
5 {uid: 5, batch_uid: 5, status: succeeded, details: { deleted_documents: Some(0) }, kind: DocumentClear { index_uid: "girafos" }}
|
5 {uid: 5, batch_uid: 5, status: succeeded, details: { deleted_documents: Some(0) }, kind: DocumentClear { index_uid: "girafos" }}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
||||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }}
|
2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
2 {uid: 2, batch_uid: 1, status: succeeded, details: { deleted_documents: Some(0) }, kind: IndexDeletion { index_uid: "doggos" }}
|
2 {uid: 2, batch_uid: 1, status: succeeded, details: { deleted_documents: Some(0) }, kind: IndexDeletion { index_uid: "doggos" }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/scheduler/test.rs
|
source: crates/index-scheduler/src/scheduler/test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
0 {uid: 0, status: enqueued, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,]
|
enqueued [0,]
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/scheduler/test.rs
|
source: crates/index-scheduler/src/scheduler/test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
0 {uid: 0, status: enqueued, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
||||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/scheduler/test.rs
|
source: crates/index-scheduler/src/scheduler/test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
0 {uid: 0, status: enqueued, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
||||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }}
|
2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"index_a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"index_a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,]
|
enqueued [0,]
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/scheduler/test.rs
|
source: crates/index-scheduler/src/scheduler/test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,]
|
enqueued [0,]
|
||||||
|
|||||||
@@ -7,8 +7,8 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"index_a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"index_a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_b", primary_key: Some("id") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "index_b", primary_key: Some("id") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,1,]
|
enqueued [0,1,]
|
||||||
|
|||||||
@@ -7,8 +7,8 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"index_a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"index_a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_b", primary_key: Some("id") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("id"), old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "index_b", primary_key: Some("id") }}
|
||||||
2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "index_a" }}
|
2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "index_a" }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
|
|||||||
@@ -6,8 +6,8 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }}
|
1 {uid: 1, status: enqueued, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }}
|
||||||
2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }}
|
2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
|
|||||||
@@ -6,8 +6,8 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }}
|
||||||
2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }}
|
2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
|
|||||||
@@ -6,8 +6,8 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }}
|
||||||
2 {uid: 2, batch_uid: 2, status: succeeded, details: { deleted_documents: Some(0) }, kind: IndexDeletion { index_uid: "doggos" }}
|
2 {uid: 2, batch_uid: 2, status: succeeded, details: { deleted_documents: Some(0) }, kind: IndexDeletion { index_uid: "doggos" }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/scheduler/test.rs
|
source: crates/index-scheduler/src/scheduler/test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
0 {uid: 0, status: enqueued, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,]
|
enqueued [0,]
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/scheduler/test.rs
|
source: crates/index-scheduler/src/scheduler/test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
0 {uid: 0, status: enqueued, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }}
|
1 {uid: 1, status: enqueued, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,1,]
|
enqueued [0,1,]
|
||||||
|
|||||||
@@ -1,14 +1,13 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/scheduler/test.rs
|
source: crates/index-scheduler/src/scheduler/test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
0 {uid: 0, status: enqueued, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }}
|
1 {uid: 1, status: enqueued, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "cattos", primary_key: None }}
|
||||||
2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }}
|
2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: IndexDeletion { index_uid: "doggos" }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
||||||
1 {uid: 1, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }}
|
1 {uid: 1, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }}
|
||||||
2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }}
|
2 {uid: 2, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }}
|
||||||
3 {uid: 3, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }}
|
3 {uid: 3, status: enqueued, details: { deleted_documents: None }, kind: DocumentClear { index_uid: "doggos" }}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { deleted_documents: Some(0) }, kind: DocumentClear { index_uid: "doggos" }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { deleted_documents: Some(0) }, kind: DocumentClear { index_uid: "doggos" }}
|
||||||
2 {uid: 2, batch_uid: 2, status: succeeded, details: { deleted_documents: Some(0) }, kind: DocumentClear { index_uid: "doggos" }}
|
2 {uid: 2, batch_uid: 2, status: succeeded, details: { deleted_documents: Some(0) }, kind: DocumentClear { index_uid: "doggos" }}
|
||||||
3 {uid: 3, batch_uid: 3, status: succeeded, details: { deleted_documents: Some(0) }, kind: DocumentClear { index_uid: "doggos" }}
|
3 {uid: 3, batch_uid: 3, status: succeeded, details: { deleted_documents: Some(0) }, kind: DocumentClear { index_uid: "doggos" }}
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/scheduler/test.rs
|
source: crates/index-scheduler/src/scheduler/test.rs
|
||||||
snapshot_kind: text
|
|
||||||
---
|
---
|
||||||
### Autobatching Enabled = false
|
### Autobatching Enabled = false
|
||||||
### Processing batch None:
|
### Processing batch None:
|
||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
0 {uid: 0, status: enqueued, details: { primary_key: None, old_new_uid: None, new_index_uid: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,]
|
enqueued [0,]
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user