mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-08 21:55:42 +00:00
Compare commits
278 Commits
measure-so
...
add-socket
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5d8726d92d | ||
|
|
bca2974266 | ||
|
|
13025594a8 | ||
|
|
2c1c33166d | ||
|
|
cdb6e3f45a | ||
|
|
1d574bd443 | ||
|
|
37a4fd7f99 | ||
|
|
3753f87fd8 | ||
|
|
5675585fe8 | ||
|
|
af589c85ec | ||
|
|
ac919df37d | ||
|
|
73b5722896 | ||
|
|
c94679bde6 | ||
|
|
89e2d2b2b9 | ||
|
|
3a7a20c716 | ||
|
|
fa1db6b721 | ||
|
|
1ab6fec903 | ||
|
|
18ac4032aa | ||
|
|
d9115b74f0 | ||
|
|
0fde49640a | ||
|
|
4ee65d870e | ||
|
|
ef77c7699b | ||
|
|
7382fb21e4 | ||
|
|
e4ace98004 | ||
|
|
aa7a34ffe8 | ||
|
|
6728cfbfac | ||
|
|
ea6883189e | ||
|
|
fdeb47fb54 | ||
|
|
e66fccc3f2 | ||
|
|
73e87c152a | ||
|
|
75b2f22add | ||
|
|
5a74d4729c | ||
|
|
e44e7b5e81 | ||
|
|
a0b3887709 | ||
|
|
4b4a6c7863 | ||
|
|
3085092e04 | ||
|
|
c4efd1df4e | ||
|
|
c32282acb1 | ||
|
|
92070a3578 | ||
|
|
a90563df3f | ||
|
|
466604725e | ||
|
|
995394a516 | ||
|
|
6e37ae8619 | ||
|
|
657c645603 | ||
|
|
7f5d0837c3 | ||
|
|
0566f2549d | ||
|
|
0c2661ea90 | ||
|
|
62dfbd6255 | ||
|
|
cc669f90d5 | ||
|
|
b1dc10e771 | ||
|
|
4b598fa648 | ||
|
|
71b364286b | ||
|
|
86183e0807 | ||
|
|
78a4b7949d | ||
|
|
dc2cb58cf1 | ||
|
|
e9580fe619 | ||
|
|
8205254f4c | ||
|
|
efdc5739d7 | ||
|
|
b31e9bea26 | ||
|
|
7f048b9732 | ||
|
|
8b4e2c7b17 | ||
|
|
645a55317a | ||
|
|
8caf97db86 | ||
|
|
b8a74e0464 | ||
|
|
fd8447c521 | ||
|
|
f2d187ba3e | ||
|
|
79d8a7a51a | ||
|
|
86da0e83fe | ||
|
|
0704fb71e9 | ||
|
|
1e4d4e69c4 | ||
|
|
6ba4baecbf | ||
|
|
7f20c13f3f | ||
|
|
462a2329f1 | ||
|
|
afa3ae0cbd | ||
|
|
f6483cf15d | ||
|
|
bd34ed01d9 | ||
|
|
74199f328d | ||
|
|
1113c42de0 | ||
|
|
465afe01b2 | ||
|
|
7d6768e4c4 | ||
|
|
f77661ec44 | ||
|
|
b8fd85a46d | ||
|
|
fd43c6c404 | ||
|
|
2564ec1496 | ||
|
|
b6b73fe41c | ||
|
|
6dde41cc46 | ||
|
|
163f8023a1 | ||
|
|
2b120b89e4 | ||
|
|
84f842233d | ||
|
|
633537ccd7 | ||
|
|
e8d7c00d30 | ||
|
|
3f6301dbc9 | ||
|
|
ca71b63ed1 | ||
|
|
2b6952eda1 | ||
|
|
79f29eed3c | ||
|
|
cc45e264ca | ||
|
|
5f474a640d | ||
|
|
bbaee3dbc6 | ||
|
|
877717cb26 | ||
|
|
716817122a | ||
|
|
ff523a2357 | ||
|
|
29c3aca72a | ||
|
|
00f8d03f43 | ||
|
|
50981ea778 | ||
|
|
c2caff1716 | ||
|
|
4c355bede7 | ||
|
|
174d69ff72 | ||
|
|
52a52f97cf | ||
|
|
5de4b48552 | ||
|
|
df648ce7a6 | ||
|
|
af8edab21d | ||
|
|
c42746c4cd | ||
|
|
98b77aec66 | ||
|
|
54d3ba3357 | ||
|
|
6e058709f2 | ||
|
|
0fbf9ea5b1 | ||
|
|
9f1fb4b425 | ||
|
|
1120a5296c | ||
|
|
a35a339c3d | ||
|
|
cac5836f6f | ||
|
|
5239ae0297 | ||
|
|
2fdb1d8018 | ||
|
|
3c5e363554 | ||
|
|
da0dd6febf | ||
|
|
a197d63ab6 | ||
|
|
390eadb733 | ||
|
|
93f0317b94 | ||
|
|
29ff02f3ff | ||
|
|
d9e0df74ea | ||
|
|
dc8a662209 | ||
|
|
6732dd95d7 | ||
|
|
95da428dc8 | ||
|
|
38c4be1c8e | ||
|
|
91dfab317f | ||
|
|
47e3c4b5c3 | ||
|
|
533f1d4345 | ||
|
|
7b55462610 | ||
|
|
f6114a1ff2 | ||
|
|
7c084b1286 | ||
|
|
57f9517a98 | ||
|
|
72cc573e0a | ||
|
|
a48b1d5a79 | ||
|
|
a94a87ee54 | ||
|
|
4b55ba68bc | ||
|
|
23e14138bb | ||
|
|
e44325683a | ||
|
|
02c2b660f8 | ||
|
|
f18e9cb7b3 | ||
|
|
db0cf3b2ed | ||
|
|
f6abf01d2c | ||
|
|
28da759f11 | ||
|
|
ea96d19525 | ||
|
|
d352b1ee83 | ||
|
|
3f3cebf5f9 | ||
|
|
b278815617 | ||
|
|
40e13ceef3 | ||
|
|
18a2c13e4e | ||
|
|
ed19b7c3c3 | ||
|
|
66bda2ce8a | ||
|
|
1ac008926b | ||
|
|
c49d892c82 | ||
|
|
de962a26f3 | ||
|
|
005204e9e5 | ||
|
|
1040e5e2b4 | ||
|
|
80408c92dc | ||
|
|
fa1a0beb0c | ||
|
|
5aefe7cd17 | ||
|
|
e6dd66e4a0 | ||
|
|
6e3839d8b6 | ||
|
|
cd271b8762 | ||
|
|
3ce8500d4c | ||
|
|
588000d398 | ||
|
|
92b151607c | ||
|
|
42e7499260 | ||
|
|
41aa1e1424 | ||
|
|
24ace5c381 | ||
|
|
21296190a3 | ||
|
|
03fda78901 | ||
|
|
30a143f149 | ||
|
|
4464d319af | ||
|
|
580ea2f450 | ||
|
|
915cf4bae5 | ||
|
|
9a756cf2c5 | ||
|
|
36d8684dc8 | ||
|
|
b12e997c8a | ||
|
|
8bf89ec394 | ||
|
|
ee62d9ce30 | ||
|
|
0f965d3574 | ||
|
|
ade54493ab | ||
|
|
07c8ed0459 | ||
|
|
c3cdc407ec | ||
|
|
2f10273d14 | ||
|
|
321639364f | ||
|
|
442d06dce7 | ||
|
|
8f6a98df07 | ||
|
|
b44e17c4c3 | ||
|
|
e3ef0ae19e | ||
|
|
57f7af77c7 | ||
|
|
2d16d0aea1 | ||
|
|
c817718e07 | ||
|
|
e64d0e0ca8 | ||
|
|
21aa430b5e | ||
|
|
8535dc0be2 | ||
|
|
72b9005344 | ||
|
|
420c33132c | ||
|
|
9ef710cad4 | ||
|
|
48f7329a83 | ||
|
|
ab1ec9ca21 | ||
|
|
9d6efd92d2 | ||
|
|
abdb337fd6 | ||
|
|
1c755c8899 | ||
|
|
3a42c3134e | ||
|
|
5aa6cb3600 | ||
|
|
9b7764575b | ||
|
|
0e68718027 | ||
|
|
7c3fc8c655 | ||
|
|
8acd3f50bb | ||
|
|
25791e3f46 | ||
|
|
866922ecc3 | ||
|
|
f05ea04879 | ||
|
|
b1b3a1a98b | ||
|
|
143d6cde10 | ||
|
|
c457069367 | ||
|
|
bb1283222e | ||
|
|
7a5a38f870 | ||
|
|
ded3cd0dd6 | ||
|
|
68f885f1c4 | ||
|
|
9372c34dab | ||
|
|
6666c57880 | ||
|
|
b53a019b07 | ||
|
|
d262b1df32 | ||
|
|
ed795bc837 | ||
|
|
993264227d | ||
|
|
953d3a44bd | ||
|
|
e5345fb0eb | ||
|
|
2d9a055fb9 | ||
|
|
110dc01f40 | ||
|
|
9719dec443 | ||
|
|
fa77a949aa | ||
|
|
abe128476f | ||
|
|
a663e408ad | ||
|
|
986991277f | ||
|
|
c2c1ba39ee | ||
|
|
35567b2137 | ||
|
|
00c97c7152 | ||
|
|
d4ea7cc2a9 | ||
|
|
8532fe8afc | ||
|
|
2413592bbf | ||
|
|
553440632e | ||
|
|
7a347966da | ||
|
|
6c598fa06d | ||
|
|
8338df0dbe | ||
|
|
4654d51e05 | ||
|
|
22ef2d877f | ||
|
|
76bc2c18e8 | ||
|
|
59115fd058 | ||
|
|
a918561ac1 | ||
|
|
70d71581ee | ||
|
|
4fbe048cbf | ||
|
|
e06fbcc607 | ||
|
|
04fa44e7eb | ||
|
|
90c0a6db7d | ||
|
|
d82f8fd904 | ||
|
|
cc02920f2b | ||
|
|
c26bd68de5 | ||
|
|
80fdea9afc | ||
|
|
e3faacd160 | ||
|
|
988552e178 | ||
|
|
0d8199f3b7 | ||
|
|
4b74803dae | ||
|
|
d731fa661b | ||
|
|
a1beddd5d9 | ||
|
|
4109182ca4 | ||
|
|
1a297c048e | ||
|
|
ecee0c922f | ||
|
|
303e601b87 | ||
|
|
f6d2c59bca | ||
|
|
50b7093f8e |
2
.github/workflows/bench-manual.yml
vendored
2
.github/workflows/bench-manual.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
timeout-minutes: 180 # 3h
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
33
.github/workflows/bench-pr.yml
vendored
33
.github/workflows/bench-pr.yml
vendored
@@ -16,6 +16,37 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
timeout-minutes: 180 # 3h
|
||||
steps:
|
||||
- name: Check permissions
|
||||
id: permission
|
||||
env:
|
||||
PR_AUTHOR: ${{github.event.issue.user.login }}
|
||||
COMMENT_AUTHOR: ${{github.event.comment.user.login }}
|
||||
REPOSITORY: ${{github.repository}}
|
||||
PR_ID: ${{github.event.issue.number}}
|
||||
run: |
|
||||
PR_REPOSITORY=$(gh api /repos/"$REPOSITORY"/pulls/"$PR_ID" --jq .head.repo.full_name)
|
||||
if $(gh api /repos/"$REPOSITORY"/collaborators/"$PR_AUTHOR"/permission --jq .user.permissions.push)
|
||||
then
|
||||
echo "::notice title=Authentication success::PR author authenticated"
|
||||
else
|
||||
echo "::error title=Authentication error::PR author doesn't have push permission on this repository"
|
||||
exit 1
|
||||
fi
|
||||
if $(gh api /repos/"$REPOSITORY"/collaborators/"$COMMENT_AUTHOR"/permission --jq .user.permissions.push)
|
||||
then
|
||||
echo "::notice title=Authentication success::Comment author authenticated"
|
||||
else
|
||||
echo "::error title=Authentication error::Comment author doesn't have push permission on this repository"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$PR_REPOSITORY" = "$REPOSITORY" ]
|
||||
then
|
||||
echo "::notice title=Authentication success::PR started from main repository"
|
||||
else
|
||||
echo "::error title=Authentication error::PR started from a fork"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check for Command
|
||||
id: command
|
||||
uses: xt0rted/slash-command-action@v2
|
||||
@@ -35,7 +66,7 @@ jobs:
|
||||
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
||||
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
||||
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/bench-push-indexing.yml
vendored
2
.github/workflows/bench-push-indexing.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
timeout-minutes: 180 # 3h
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/benchmarks-manual.yml
vendored
2
.github/workflows/benchmarks-manual.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
33
.github/workflows/benchmarks-pr.yml
vendored
33
.github/workflows/benchmarks-pr.yml
vendored
@@ -13,7 +13,38 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- name: Check permissions
|
||||
id: permission
|
||||
env:
|
||||
PR_AUTHOR: ${{github.event.issue.user.login }}
|
||||
COMMENT_AUTHOR: ${{github.event.comment.user.login }}
|
||||
REPOSITORY: ${{github.repository}}
|
||||
PR_ID: ${{github.event.issue.number}}
|
||||
run: |
|
||||
PR_REPOSITORY=$(gh api /repos/"$REPOSITORY"/pulls/"$PR_ID" --jq .head.repo.full_name)
|
||||
if $(gh api /repos/"$REPOSITORY"/collaborators/"$PR_AUTHOR"/permission --jq .user.permissions.push)
|
||||
then
|
||||
echo "::notice title=Authentication success::PR author authenticated"
|
||||
else
|
||||
echo "::error title=Authentication error::PR author doesn't have push permission on this repository"
|
||||
exit 1
|
||||
fi
|
||||
if $(gh api /repos/"$REPOSITORY"/collaborators/"$COMMENT_AUTHOR"/permission --jq .user.permissions.push)
|
||||
then
|
||||
echo "::notice title=Authentication success::Comment author authenticated"
|
||||
else
|
||||
echo "::error title=Authentication error::Comment author doesn't have push permission on this repository"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$PR_REPOSITORY" = "$REPOSITORY" ]
|
||||
then
|
||||
echo "::notice title=Authentication success::PR started from main repository"
|
||||
else
|
||||
echo "::error title=Authentication error::PR started from a fork"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ jobs:
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/flaky-tests.yml
vendored
2
.github/workflows/flaky-tests.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
- name: Install cargo-flaky
|
||||
run: cargo install cargo-flaky
|
||||
- name: Run cargo flaky in the dumps
|
||||
|
||||
2
.github/workflows/fuzzer-indexing.yml
vendored
2
.github/workflows/fuzzer-indexing.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
||||
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
- name: Install cargo-deb
|
||||
run: cargo install cargo-deb
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
8
.github/workflows/publish-binaries.yml
vendored
8
.github/workflows/publish-binaries.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
- name: Build
|
||||
run: cargo build --release --locked
|
||||
# No need to upload binaries for dry run (cron)
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
asset_name: meilisearch-windows-amd64.exe
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
- name: Build
|
||||
run: cargo build --release --locked
|
||||
# No need to upload binaries for dry run (cron)
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Installing Rust toolchain
|
||||
uses: helix-editor/rust-toolchain@v1
|
||||
uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
profile: minimal
|
||||
target: ${{ matrix.target }}
|
||||
@@ -148,7 +148,7 @@ jobs:
|
||||
add-apt-repository "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||
apt-get update -y && apt-get install -y docker-ce
|
||||
- name: Installing Rust toolchain
|
||||
uses: helix-editor/rust-toolchain@v1
|
||||
uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
profile: minimal
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
14
.github/workflows/test-suite.yml
vendored
14
.github/workflows/test-suite.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- name: Setup test with Rust stable
|
||||
uses: helix-editor/rust-toolchain@v1
|
||||
uses: dtolnay/rust-toolchain@1.79
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.7.1
|
||||
- name: Run cargo check without any default features
|
||||
@@ -56,7 +56,7 @@ jobs:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.7.1
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@@ -81,7 +81,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install --assume-yes build-essential curl
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
- name: Run cargo build with almost all features
|
||||
run: |
|
||||
cargo build --workspace --locked --release --features "$(cargo xtask list-features --exclude-feature cuda)"
|
||||
@@ -101,7 +101,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install --assume-yes build-essential curl
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
- name: Run cargo tree without default features and check lindera is not present
|
||||
run: |
|
||||
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -qz lindera; then
|
||||
@@ -125,7 +125,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.7.1
|
||||
- name: Run tests in debug
|
||||
@@ -139,7 +139,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
profile: minimal
|
||||
components: clippy
|
||||
@@ -156,7 +156,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly-2024-07-09
|
||||
|
||||
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: helix-editor/rust-toolchain@v1
|
||||
- uses: dtolnay/rust-toolchain@1.79
|
||||
with:
|
||||
profile: minimal
|
||||
- name: Install sd
|
||||
|
||||
214
Cargo.lock
generated
214
Cargo.lock
generated
@@ -55,7 +55,7 @@ dependencies = [
|
||||
"encoding_rs",
|
||||
"flate2",
|
||||
"futures-core",
|
||||
"h2",
|
||||
"h2 0.3.26",
|
||||
"http 0.2.11",
|
||||
"httparse",
|
||||
"httpdate",
|
||||
@@ -386,15 +386,16 @@ checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711"
|
||||
|
||||
[[package]]
|
||||
name = "arroy"
|
||||
version = "0.4.0"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2ece9e5347e7fdaaea3181dec7f916677ad5f3fcbac183648ce1924eb4aeef9a"
|
||||
checksum = "dfc5f272f38fa063bbff0a7ab5219404e221493de005e2b4078c62d626ef567e"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"byteorder",
|
||||
"heed",
|
||||
"log",
|
||||
"memmap2",
|
||||
"nohash",
|
||||
"ordered-float",
|
||||
"rand",
|
||||
"rayon",
|
||||
@@ -403,6 +404,16 @@ dependencies = [
|
||||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "assert-json-diff"
|
||||
version = "2.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-trait"
|
||||
version = "0.1.81"
|
||||
@@ -414,6 +425,12 @@ dependencies = [
|
||||
"syn 2.0.60",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atomic-waker"
|
||||
version = "1.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.2.0"
|
||||
@@ -455,7 +472,7 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
|
||||
|
||||
[[package]]
|
||||
name = "benchmarks"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@@ -511,7 +528,7 @@ dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"shlex",
|
||||
"syn 2.0.60",
|
||||
]
|
||||
@@ -636,7 +653,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "build-info"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"time",
|
||||
@@ -917,20 +934,17 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "charabia"
|
||||
version = "0.8.12"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9868a22f10dee80498a8a2b6c641d80bf28ea4495fcf71c2dc4836c2dd23958c"
|
||||
checksum = "55ff52497324e7d168505a16949ae836c14595606fab94687238d2f6c8d4c798"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"cow-utils",
|
||||
"csv",
|
||||
"deunicode",
|
||||
"either",
|
||||
"fst",
|
||||
"irg-kvariants",
|
||||
"jieba-rs",
|
||||
"lindera",
|
||||
"litemap",
|
||||
"once_cell",
|
||||
"pinyin",
|
||||
"serde",
|
||||
@@ -938,7 +952,6 @@ dependencies = [
|
||||
"unicode-normalization",
|
||||
"wana_kana",
|
||||
"whatlang",
|
||||
"zerovec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1129,12 +1142,6 @@ version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa"
|
||||
|
||||
[[package]]
|
||||
name = "cow-utils"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79bb3adfaf5f75d24b01aee375f7555907840fa2800e5ec8fa3b9e2031830173"
|
||||
|
||||
[[package]]
|
||||
name = "cpufeatures"
|
||||
version = "0.2.12"
|
||||
@@ -1377,6 +1384,24 @@ dependencies = [
|
||||
"syn 2.0.60",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deadpool"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fb84100978c1c7b37f09ed3ce3e5f843af02c2a2c431bae5b19230dad2c1b490"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"deadpool-runtime",
|
||||
"num_cpus",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deadpool-runtime"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b"
|
||||
|
||||
[[package]]
|
||||
name = "debugid"
|
||||
version = "0.8.0"
|
||||
@@ -1517,12 +1542,6 @@ dependencies = [
|
||||
"syn 2.0.60",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deunicode"
|
||||
version = "1.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "339544cc9e2c4dc3fc7149fd630c5f22263a4fdf18a98afd0075784968b5cf00"
|
||||
|
||||
[[package]]
|
||||
name = "digest"
|
||||
version = "0.10.7"
|
||||
@@ -1604,7 +1623,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "dump"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"big_s",
|
||||
@@ -1816,7 +1835,7 @@ checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a"
|
||||
|
||||
[[package]]
|
||||
name = "file-store"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"tempfile",
|
||||
"thiserror",
|
||||
@@ -1838,7 +1857,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "filter-parser"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"insta",
|
||||
"nom",
|
||||
@@ -1858,7 +1877,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "flatten-serde-json"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"criterion",
|
||||
"serde_json",
|
||||
@@ -1982,7 +2001,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "fuzzers"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"arbitrary",
|
||||
"clap",
|
||||
@@ -2231,6 +2250,25 @@ dependencies = [
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "h2"
|
||||
version = "0.4.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fa82e28a107a8cc405f0839610bdc9b15f1e25ec7d696aa5cf173edbcb1486ab"
|
||||
dependencies = [
|
||||
"atomic-waker",
|
||||
"bytes",
|
||||
"fnv",
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"http 1.1.0",
|
||||
"indexmap",
|
||||
"slab",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "half"
|
||||
version = "1.8.2"
|
||||
@@ -2441,9 +2479,11 @@ dependencies = [
|
||||
"bytes",
|
||||
"futures-channel",
|
||||
"futures-util",
|
||||
"h2 0.4.5",
|
||||
"http 1.1.0",
|
||||
"http-body",
|
||||
"httparse",
|
||||
"httpdate",
|
||||
"itoa",
|
||||
"pin-project-lite",
|
||||
"smallvec",
|
||||
@@ -2513,7 +2553,7 @@ checksum = "206ca75c9c03ba3d4ace2460e57b189f39f43de612c2f85836e65c929701bb2d"
|
||||
|
||||
[[package]]
|
||||
name = "index-scheduler"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arroy",
|
||||
@@ -2707,7 +2747,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "json-depth-checker"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"criterion",
|
||||
"serde_json",
|
||||
@@ -3223,12 +3263,6 @@ dependencies = [
|
||||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "litemap"
|
||||
version = "0.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704"
|
||||
|
||||
[[package]]
|
||||
name = "lmdb-master-sys"
|
||||
version = "0.2.2"
|
||||
@@ -3280,15 +3314,6 @@ version = "0.4.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
|
||||
|
||||
[[package]]
|
||||
name = "lru"
|
||||
version = "0.12.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d3262e75e648fce39813cb56ac41f3c3e3f65217ebf3844d818d1f9398cfb0dc"
|
||||
dependencies = [
|
||||
"hashbrown 0.14.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lzma-rs"
|
||||
version = "0.3.0"
|
||||
@@ -3341,7 +3366,7 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
|
||||
|
||||
[[package]]
|
||||
name = "meili-snap"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"insta",
|
||||
"md5",
|
||||
@@ -3350,7 +3375,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"actix-cors",
|
||||
"actix-http",
|
||||
@@ -3390,6 +3415,7 @@ dependencies = [
|
||||
"meilisearch-types",
|
||||
"mimalloc",
|
||||
"mime",
|
||||
"mopa-maintained",
|
||||
"num_cpus",
|
||||
"obkv",
|
||||
"once_cell",
|
||||
@@ -3432,13 +3458,14 @@ dependencies = [
|
||||
"url",
|
||||
"urlencoding",
|
||||
"uuid",
|
||||
"wiremock",
|
||||
"yaup",
|
||||
"zip 2.1.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-auth"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"enum-iterator",
|
||||
@@ -3457,7 +3484,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-types"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"actix-web",
|
||||
"anyhow",
|
||||
@@ -3487,7 +3514,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilitool"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@@ -3495,6 +3522,7 @@ dependencies = [
|
||||
"file-store",
|
||||
"meilisearch-auth",
|
||||
"meilisearch-types",
|
||||
"serde",
|
||||
"time",
|
||||
"uuid",
|
||||
]
|
||||
@@ -3517,7 +3545,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "milli"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"arroy",
|
||||
"big_s",
|
||||
@@ -3549,7 +3577,6 @@ dependencies = [
|
||||
"json-depth-checker",
|
||||
"levenshtein_automata",
|
||||
"liquid",
|
||||
"lru",
|
||||
"maplit",
|
||||
"md5",
|
||||
"meili-snap",
|
||||
@@ -3655,12 +3682,24 @@ dependencies = [
|
||||
"syn 2.0.60",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mopa-maintained"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79b7f3e22167862cc7c95b21a6f326c22e4bf40da59cbf000b368a310173ba11"
|
||||
|
||||
[[package]]
|
||||
name = "mutually_exclusive_features"
|
||||
version = "0.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6d02c0b00610773bb7fc61d85e13d86c7858cbdf00e1a120bfc41bc055dbaa0e"
|
||||
|
||||
[[package]]
|
||||
name = "nohash"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a0f889fb66f7acdf83442c35775764b51fed3c606ab9cee51500dbde2cf528ca"
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "7.1.3"
|
||||
@@ -3952,7 +3991,7 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
|
||||
|
||||
[[package]]
|
||||
name = "permissive-json-pointer"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"big_s",
|
||||
"serde_json",
|
||||
@@ -4283,7 +4322,7 @@ dependencies = [
|
||||
"pin-project-lite",
|
||||
"quinn-proto",
|
||||
"quinn-udp",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
"rustls",
|
||||
"thiserror",
|
||||
"tokio",
|
||||
@@ -4292,14 +4331,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "quinn-proto"
|
||||
version = "0.11.3"
|
||||
version = "0.11.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ddf517c03a109db8100448a4be38d498df8a210a99fe0e1b9eaf39e78c640efe"
|
||||
checksum = "fadfaed2cd7f389d0161bb73eeb07b7b78f8691047a6f3e73caaeae55310a4a6"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"rand",
|
||||
"ring",
|
||||
"rustc-hash",
|
||||
"rustc-hash 2.0.0",
|
||||
"rustls",
|
||||
"slab",
|
||||
"thiserror",
|
||||
@@ -4550,9 +4589,8 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rhai"
|
||||
version = "1.19.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "61797318be89b1a268a018a92a7657096d83f3ecb31418b9e9c16dcbb043b702"
|
||||
version = "1.20.0"
|
||||
source = "git+https://github.com/rhaiscript/rhai?rev=ef3df63121d27aacd838f366f2b83fd65f20a1e4#ef3df63121d27aacd838f366f2b83fd65f20a1e4"
|
||||
dependencies = [
|
||||
"ahash 0.8.11",
|
||||
"bitflags 2.6.0",
|
||||
@@ -4569,8 +4607,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rhai_codegen"
|
||||
version = "2.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a5a11a05ee1ce44058fa3d5961d05194fdbe3ad6b40f904af764d81b86450e6b"
|
||||
source = "git+https://github.com/rhaiscript/rhai?rev=ef3df63121d27aacd838f366f2b83fd65f20a1e4#ef3df63121d27aacd838f366f2b83fd65f20a1e4"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -4672,6 +4709,12 @@ version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-hash"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "583034fd73374156e66797ed8e5b0d5690409c9226b22d87cb7f19821c05d152"
|
||||
|
||||
[[package]]
|
||||
name = "rustc_version"
|
||||
version = "0.4.0"
|
||||
@@ -4810,9 +4853,9 @@ checksum = "a3f0bf26fd526d2a95683cd0f87bf103b8539e2ca1ef48ce002d67aad59aa0b4"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.204"
|
||||
version = "1.0.209"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12"
|
||||
checksum = "99fce0ffe7310761ca6bf9faf5115afbc19688edd00171d81b1bb1b116c63e09"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
@@ -4828,9 +4871,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.204"
|
||||
version = "1.0.209"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222"
|
||||
checksum = "a5831b979fd7b5439637af1752d535ff49f4860c0f341d1baeb6faf0f4242170"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -5323,7 +5366,7 @@ dependencies = [
|
||||
"fancy-regex 0.12.0",
|
||||
"lazy_static",
|
||||
"parking_lot",
|
||||
"rustc-hash",
|
||||
"rustc-hash 1.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6291,6 +6334,30 @@ dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wiremock"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec874e1eef0df2dcac546057fe5e29186f09c378181cd7b635b4b7bcc98e9d81"
|
||||
dependencies = [
|
||||
"assert-json-diff",
|
||||
"async-trait",
|
||||
"base64 0.21.7",
|
||||
"deadpool",
|
||||
"futures",
|
||||
"http 1.1.0",
|
||||
"http-body-util",
|
||||
"hyper",
|
||||
"hyper-util",
|
||||
"log",
|
||||
"once_cell",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wyz"
|
||||
version = "0.5.1"
|
||||
@@ -6313,7 +6380,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "xtask"
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"build-info",
|
||||
@@ -6436,15 +6503,6 @@ dependencies = [
|
||||
"syn 2.0.60",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerovec"
|
||||
version = "0.10.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079"
|
||||
dependencies = [
|
||||
"zerofrom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zip"
|
||||
version = "1.1.4"
|
||||
|
||||
@@ -22,7 +22,7 @@ members = [
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "1.9.0"
|
||||
version = "1.11.1"
|
||||
authors = [
|
||||
"Quentin de Quelen <quentin@dequelen.me>",
|
||||
"Clément Renault <clement@meilisearch.com>",
|
||||
|
||||
@@ -45,14 +45,14 @@ See the list of all our example apps in our [demos repository](https://github.co
|
||||
## ✨ Features
|
||||
- **Hybrid search:** Combine the best of both [semantic](https://www.meilisearch.com/docs/learn/experimental/vector_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features) & full-text search to get the most relevant results
|
||||
- **Search-as-you-type:** Find & display results in less than 50 milliseconds to provide an intuitive experience
|
||||
- **[Typo tolerance](https://www.meilisearch.com/docs/learn/configuration/typo_tolerance?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** get relevant matches even when queries contain typos and misspellings
|
||||
- **[Typo tolerance](https://www.meilisearch.com/docs/learn/relevancy/typo_tolerance_settings?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** get relevant matches even when queries contain typos and misspellings
|
||||
- **[Filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features) and [faceted search](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** enhance your users' search experience with custom filters and build a faceted search interface in a few lines of code
|
||||
- **[Sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** sort results based on price, date, or pretty much anything else your users need
|
||||
- **[Synonym support](https://www.meilisearch.com/docs/learn/configuration/synonyms?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** configure synonyms to include more relevant content in your search results
|
||||
- **[Synonym support](https://www.meilisearch.com/docs/learn/relevancy/synonyms?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** configure synonyms to include more relevant content in your search results
|
||||
- **[Geosearch](https://www.meilisearch.com/docs/learn/fine_tuning_results/geosearch?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** filter and sort documents based on geographic data
|
||||
- **[Extensive language support](https://www.meilisearch.com/docs/learn/what_is_meilisearch/language?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet
|
||||
- **[Security management](https://www.meilisearch.com/docs/learn/security/master_api_keys?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** control which users can access what data with API keys that allow fine-grained permissions handling
|
||||
- **[Multi-Tenancy](https://www.meilisearch.com/docs/learn/security/tenant_tokens?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** personalize search results for any number of application tenants
|
||||
- **[Multi-Tenancy](https://www.meilisearch.com/docs/learn/security/multitenancy_tenant_tokens?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** personalize search results for any number of application tenants
|
||||
- **Highly Customizable:** customize Meilisearch to your specific needs or use our out-of-the-box and hassle-free presets
|
||||
- **[RESTful API](https://www.meilisearch.com/docs/reference/api/overview?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** integrate Meilisearch in your technical stack with our plugins and SDKs
|
||||
- **Easy to install, deploy, and maintain**
|
||||
|
||||
@@ -286,6 +286,7 @@ pub(crate) mod test {
|
||||
pagination: Setting::NotSet,
|
||||
embedders: Setting::NotSet,
|
||||
search_cutoff_ms: Setting::NotSet,
|
||||
localized_attributes: Setting::NotSet,
|
||||
_kind: std::marker::PhantomData,
|
||||
};
|
||||
settings.check()
|
||||
|
||||
@@ -379,6 +379,7 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
||||
v5::Setting::NotSet => v6::Setting::NotSet,
|
||||
},
|
||||
embedders: v6::Setting::NotSet,
|
||||
localized_attributes: v6::Setting::NotSet,
|
||||
search_cutoff_ms: v6::Setting::NotSet,
|
||||
_kind: std::marker::PhantomData,
|
||||
}
|
||||
|
||||
@@ -255,6 +255,8 @@ pub(crate) mod test {
|
||||
}
|
||||
"###);
|
||||
|
||||
insta::assert_json_snapshot!(vector_index.settings().unwrap());
|
||||
|
||||
{
|
||||
let documents: Result<Vec<_>> = vector_index.documents().unwrap().collect();
|
||||
let mut documents = documents.unwrap();
|
||||
|
||||
@@ -1,783 +1,56 @@
|
||||
---
|
||||
source: dump/src/reader/mod.rs
|
||||
expression: document
|
||||
expression: vector_index.settings().unwrap()
|
||||
---
|
||||
{
|
||||
"id": "e3",
|
||||
"desc": "overriden vector + map",
|
||||
"_vectors": {
|
||||
"default": [
|
||||
0.2,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1
|
||||
],
|
||||
"toto": [
|
||||
0.1
|
||||
]
|
||||
}
|
||||
"displayedAttributes": [
|
||||
"*"
|
||||
],
|
||||
"searchableAttributes": [
|
||||
"*"
|
||||
],
|
||||
"filterableAttributes": [],
|
||||
"sortableAttributes": [],
|
||||
"rankingRules": [
|
||||
"words",
|
||||
"typo",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"sort",
|
||||
"exactness"
|
||||
],
|
||||
"stopWords": [],
|
||||
"nonSeparatorTokens": [],
|
||||
"separatorTokens": [],
|
||||
"dictionary": [],
|
||||
"synonyms": {},
|
||||
"distinctAttribute": null,
|
||||
"proximityPrecision": "byWord",
|
||||
"typoTolerance": {
|
||||
"enabled": true,
|
||||
"minWordSizeForTypos": {
|
||||
"oneTypo": 5,
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
"sortFacetValuesBy": {
|
||||
"*": "alpha"
|
||||
}
|
||||
},
|
||||
"pagination": {
|
||||
"maxTotalHits": 1000
|
||||
},
|
||||
"embedders": {
|
||||
"default": {
|
||||
"source": "huggingFace",
|
||||
"model": "BAAI/bge-base-en-v1.5",
|
||||
"revision": "617ca489d9e86b49b8167676d8220688b99db36e",
|
||||
"documentTemplate": "{% for field in fields %} {{ field.name }}: {{ field.value }}\n{% endfor %}"
|
||||
}
|
||||
},
|
||||
"searchCutoffMs": null
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,780 @@
|
||||
---
|
||||
source: dump/src/reader/mod.rs
|
||||
expression: document
|
||||
---
|
||||
{
|
||||
"id": "e0",
|
||||
"desc": "overriden vector",
|
||||
"_vectors": {
|
||||
"default": [
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1,
|
||||
0.1
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -27,6 +27,7 @@ pub enum Condition<'a> {
|
||||
LowerThanOrEqual(Token<'a>),
|
||||
Between { from: Token<'a>, to: Token<'a> },
|
||||
Contains { keyword: Token<'a>, word: Token<'a> },
|
||||
StartsWith { keyword: Token<'a>, word: Token<'a> },
|
||||
}
|
||||
|
||||
/// condition = value ("==" | ">" ...) value
|
||||
@@ -121,6 +122,34 @@ pub fn parse_not_contains(input: Span) -> IResult<FilterCondition> {
|
||||
))
|
||||
}
|
||||
|
||||
/// starts with = value "CONTAINS" value
|
||||
pub fn parse_starts_with(input: Span) -> IResult<FilterCondition> {
|
||||
let (input, (fid, starts_with, value)) =
|
||||
tuple((parse_value, tag("STARTS WITH"), cut(parse_value)))(input)?;
|
||||
Ok((
|
||||
input,
|
||||
FilterCondition::Condition {
|
||||
fid,
|
||||
op: StartsWith { keyword: Token { span: starts_with, value: None }, word: value },
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
/// starts with = value "NOT" WS+ "CONTAINS" value
|
||||
pub fn parse_not_starts_with(input: Span) -> IResult<FilterCondition> {
|
||||
let keyword = tuple((tag("NOT"), multispace1, tag("STARTS WITH")));
|
||||
let (input, (fid, (_not, _spaces, starts_with), value)) =
|
||||
tuple((parse_value, keyword, cut(parse_value)))(input)?;
|
||||
|
||||
Ok((
|
||||
input,
|
||||
FilterCondition::Not(Box::new(FilterCondition::Condition {
|
||||
fid,
|
||||
op: StartsWith { keyword: Token { span: starts_with, value: None }, word: value },
|
||||
})),
|
||||
))
|
||||
}
|
||||
|
||||
/// to = value value "TO" WS+ value
|
||||
pub fn parse_to(input: Span) -> IResult<FilterCondition> {
|
||||
let (input, (key, from, _, _, to)) =
|
||||
|
||||
@@ -146,7 +146,7 @@ impl<'a> Display for Error<'a> {
|
||||
}
|
||||
ErrorKind::InvalidPrimary => {
|
||||
let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) };
|
||||
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `_geoRadius`, or `_geoBoundingBox` {}", text)?
|
||||
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` {}", text)?
|
||||
}
|
||||
ErrorKind::InvalidEscapedNumber => {
|
||||
writeln!(f, "Found an invalid escaped sequence number: `{}`.", escaped_input)?
|
||||
|
||||
@@ -49,7 +49,7 @@ use std::fmt::Debug;
|
||||
pub use condition::{parse_condition, parse_to, Condition};
|
||||
use condition::{
|
||||
parse_contains, parse_exists, parse_is_empty, parse_is_not_empty, parse_is_not_null,
|
||||
parse_is_null, parse_not_contains, parse_not_exists,
|
||||
parse_is_null, parse_not_contains, parse_not_exists, parse_not_starts_with, parse_starts_with,
|
||||
};
|
||||
use error::{cut_with_err, ExpectedValueKind, NomErrorExt};
|
||||
pub use error::{Error, ErrorKind};
|
||||
@@ -166,7 +166,8 @@ impl<'a> FilterCondition<'a> {
|
||||
| Condition::LowerThan(_)
|
||||
| Condition::LowerThanOrEqual(_)
|
||||
| Condition::Between { .. } => None,
|
||||
Condition::Contains { keyword, word: _ } => Some(keyword),
|
||||
Condition::Contains { keyword, word: _ }
|
||||
| Condition::StartsWith { keyword, word: _ } => Some(keyword),
|
||||
},
|
||||
FilterCondition::Not(this) => this.use_contains_operator(),
|
||||
FilterCondition::Or(seq) | FilterCondition::And(seq) => {
|
||||
@@ -484,6 +485,8 @@ fn parse_primary(input: Span, depth: usize) -> IResult<FilterCondition> {
|
||||
parse_to,
|
||||
parse_contains,
|
||||
parse_not_contains,
|
||||
parse_starts_with,
|
||||
parse_not_starts_with,
|
||||
// the next lines are only for error handling and are written at the end to have the less possible performance impact
|
||||
parse_geo,
|
||||
parse_geo_distance,
|
||||
@@ -567,6 +570,7 @@ impl<'a> std::fmt::Display for Condition<'a> {
|
||||
Condition::LowerThanOrEqual(token) => write!(f, "<= {token}"),
|
||||
Condition::Between { from, to } => write!(f, "{from} TO {to}"),
|
||||
Condition::Contains { word, keyword: _ } => write!(f, "CONTAINS {word}"),
|
||||
Condition::StartsWith { word, keyword: _ } => write!(f, "STARTS WITH {word}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -680,6 +684,13 @@ pub mod tests {
|
||||
insta::assert_snapshot!(p("NOT subscribers NOT CONTAINS 'hello'"), @"{subscribers} CONTAINS {hello}");
|
||||
insta::assert_snapshot!(p("subscribers NOT CONTAINS 'hello'"), @"NOT ({subscribers} CONTAINS {hello})");
|
||||
|
||||
// Test STARTS WITH + NOT STARTS WITH
|
||||
insta::assert_snapshot!(p("subscribers STARTS WITH 'hel'"), @"{subscribers} STARTS WITH {hel}");
|
||||
insta::assert_snapshot!(p("NOT subscribers STARTS WITH 'hel'"), @"NOT ({subscribers} STARTS WITH {hel})");
|
||||
insta::assert_snapshot!(p("subscribers NOT STARTS WITH hel"), @"NOT ({subscribers} STARTS WITH {hel})");
|
||||
insta::assert_snapshot!(p("NOT subscribers NOT STARTS WITH 'hel'"), @"{subscribers} STARTS WITH {hel}");
|
||||
insta::assert_snapshot!(p("subscribers NOT STARTS WITH 'hel'"), @"NOT ({subscribers} STARTS WITH {hel})");
|
||||
|
||||
// Test nested NOT
|
||||
insta::assert_snapshot!(p("NOT NOT NOT NOT x = 5"), @"{x} = {5}");
|
||||
insta::assert_snapshot!(p("NOT NOT (NOT NOT x = 5)"), @"{x} = {5}");
|
||||
@@ -751,7 +762,7 @@ pub mod tests {
|
||||
"###);
|
||||
|
||||
insta::assert_snapshot!(p("'OR'"), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`.
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`.
|
||||
1:5 'OR'
|
||||
"###);
|
||||
|
||||
@@ -761,12 +772,12 @@ pub mod tests {
|
||||
"###);
|
||||
|
||||
insta::assert_snapshot!(p("channel Ponce"), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`.
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`.
|
||||
1:14 channel Ponce
|
||||
"###);
|
||||
|
||||
insta::assert_snapshot!(p("channel = Ponce OR"), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.
|
||||
19:19 channel = Ponce OR
|
||||
"###);
|
||||
|
||||
@@ -851,12 +862,12 @@ pub mod tests {
|
||||
"###);
|
||||
|
||||
insta::assert_snapshot!(p("colour NOT EXIST"), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`.
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`.
|
||||
1:17 colour NOT EXIST
|
||||
"###);
|
||||
|
||||
insta::assert_snapshot!(p("subscribers 100 TO1000"), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`.
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`.
|
||||
1:23 subscribers 100 TO1000
|
||||
"###);
|
||||
|
||||
@@ -919,35 +930,35 @@ pub mod tests {
|
||||
"###);
|
||||
|
||||
insta::assert_snapshot!(p(r#"value NULL"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `_geoRadius`, or `_geoBoundingBox` at `value NULL`.
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value NULL`.
|
||||
1:11 value NULL
|
||||
"###);
|
||||
insta::assert_snapshot!(p(r#"value NOT NULL"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `_geoRadius`, or `_geoBoundingBox` at `value NOT NULL`.
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value NOT NULL`.
|
||||
1:15 value NOT NULL
|
||||
"###);
|
||||
insta::assert_snapshot!(p(r#"value EMPTY"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `_geoRadius`, or `_geoBoundingBox` at `value EMPTY`.
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value EMPTY`.
|
||||
1:12 value EMPTY
|
||||
"###);
|
||||
insta::assert_snapshot!(p(r#"value NOT EMPTY"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `_geoRadius`, or `_geoBoundingBox` at `value NOT EMPTY`.
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value NOT EMPTY`.
|
||||
1:16 value NOT EMPTY
|
||||
"###);
|
||||
insta::assert_snapshot!(p(r#"value IS"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `_geoRadius`, or `_geoBoundingBox` at `value IS`.
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS`.
|
||||
1:9 value IS
|
||||
"###);
|
||||
insta::assert_snapshot!(p(r#"value IS NOT"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT`.
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT`.
|
||||
1:13 value IS NOT
|
||||
"###);
|
||||
insta::assert_snapshot!(p(r#"value IS EXISTS"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `_geoRadius`, or `_geoBoundingBox` at `value IS EXISTS`.
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS EXISTS`.
|
||||
1:16 value IS EXISTS
|
||||
"###);
|
||||
insta::assert_snapshot!(p(r#"value IS NOT EXISTS"#), @r###"
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT EXISTS`.
|
||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `CONTAINS`, `NOT CONTAINS`, `STARTS WITH`, `NOT STARTS WITH`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT EXISTS`.
|
||||
1:20 value IS NOT EXISTS
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -212,6 +212,8 @@ fn is_keyword(s: &str) -> bool {
|
||||
| "NULL"
|
||||
| "EMPTY"
|
||||
| "CONTAINS"
|
||||
| "STARTS"
|
||||
| "WITH"
|
||||
| "_geoRadius"
|
||||
| "_geoBoundingBox"
|
||||
)
|
||||
|
||||
@@ -40,7 +40,7 @@ ureq = "2.10.0"
|
||||
uuid = { version = "1.10.0", features = ["serde", "v4"] }
|
||||
|
||||
[dev-dependencies]
|
||||
arroy = "0.4.0"
|
||||
arroy = "0.5.0"
|
||||
big_s = "1.0.2"
|
||||
crossbeam = "0.8.4"
|
||||
insta = { version = "1.39.0", features = ["json", "redactions"] }
|
||||
|
||||
@@ -25,8 +25,9 @@ enum AutobatchKind {
|
||||
primary_key: Option<String>,
|
||||
},
|
||||
DocumentEdition,
|
||||
DocumentDeletion,
|
||||
DocumentDeletionByFilter,
|
||||
DocumentDeletion {
|
||||
by_filter: bool,
|
||||
},
|
||||
DocumentClear,
|
||||
Settings {
|
||||
allow_index_creation: bool,
|
||||
@@ -65,10 +66,12 @@ impl From<KindWithContent> for AutobatchKind {
|
||||
..
|
||||
} => AutobatchKind::DocumentImport { method, allow_index_creation, primary_key },
|
||||
KindWithContent::DocumentEdition { .. } => AutobatchKind::DocumentEdition,
|
||||
KindWithContent::DocumentDeletion { .. } => AutobatchKind::DocumentDeletion,
|
||||
KindWithContent::DocumentDeletion { .. } => {
|
||||
AutobatchKind::DocumentDeletion { by_filter: false }
|
||||
}
|
||||
KindWithContent::DocumentClear { .. } => AutobatchKind::DocumentClear,
|
||||
KindWithContent::DocumentDeletionByFilter { .. } => {
|
||||
AutobatchKind::DocumentDeletionByFilter
|
||||
AutobatchKind::DocumentDeletion { by_filter: true }
|
||||
}
|
||||
KindWithContent::SettingsUpdate { allow_index_creation, is_deletion, .. } => {
|
||||
AutobatchKind::Settings {
|
||||
@@ -105,9 +108,7 @@ pub enum BatchKind {
|
||||
},
|
||||
DocumentDeletion {
|
||||
deletion_ids: Vec<TaskId>,
|
||||
},
|
||||
DocumentDeletionByFilter {
|
||||
id: TaskId,
|
||||
includes_by_filter: bool,
|
||||
},
|
||||
ClearAndSettings {
|
||||
other: Vec<TaskId>,
|
||||
@@ -205,12 +206,13 @@ impl BatchKind {
|
||||
allow_index_creation,
|
||||
),
|
||||
K::DocumentEdition => (Break(BatchKind::DocumentEdition { id: task_id }), false),
|
||||
K::DocumentDeletion => {
|
||||
(Continue(BatchKind::DocumentDeletion { deletion_ids: vec![task_id] }), false)
|
||||
}
|
||||
K::DocumentDeletionByFilter => {
|
||||
(Break(BatchKind::DocumentDeletionByFilter { id: task_id }), false)
|
||||
}
|
||||
K::DocumentDeletion { by_filter: includes_by_filter } => (
|
||||
Continue(BatchKind::DocumentDeletion {
|
||||
deletion_ids: vec![task_id],
|
||||
includes_by_filter,
|
||||
}),
|
||||
false,
|
||||
),
|
||||
K::Settings { allow_index_creation } => (
|
||||
Continue(BatchKind::Settings { allow_index_creation, settings_ids: vec![task_id] }),
|
||||
allow_index_creation,
|
||||
@@ -228,7 +230,7 @@ impl BatchKind {
|
||||
|
||||
match (self, kind) {
|
||||
// We don't batch any of these operations
|
||||
(this, K::IndexCreation | K::IndexUpdate | K::IndexSwap | K::DocumentEdition | K::DocumentDeletionByFilter) => Break(this),
|
||||
(this, K::IndexCreation | K::IndexUpdate | K::IndexSwap | K::DocumentEdition) => Break(this),
|
||||
// We must not batch tasks that don't have the same index creation rights if the index doesn't already exists.
|
||||
(this, kind) if !index_already_exists && this.allow_index_creation() == Some(false) && kind.allow_index_creation() == Some(true) => {
|
||||
Break(this)
|
||||
@@ -264,7 +266,7 @@ impl BatchKind {
|
||||
// The index deletion can batch with everything but must stop after
|
||||
(
|
||||
BatchKind::DocumentClear { mut ids }
|
||||
| BatchKind::DocumentDeletion { deletion_ids: mut ids }
|
||||
| BatchKind::DocumentDeletion { deletion_ids: mut ids, includes_by_filter: _ }
|
||||
| BatchKind::DocumentOperation { method: _, allow_index_creation: _, primary_key: _, operation_ids: mut ids }
|
||||
| BatchKind::Settings { allow_index_creation: _, settings_ids: mut ids },
|
||||
K::IndexDeletion,
|
||||
@@ -284,7 +286,7 @@ impl BatchKind {
|
||||
|
||||
(
|
||||
BatchKind::DocumentClear { mut ids },
|
||||
K::DocumentClear | K::DocumentDeletion,
|
||||
K::DocumentClear | K::DocumentDeletion { by_filter: _ },
|
||||
) => {
|
||||
ids.push(id);
|
||||
Continue(BatchKind::DocumentClear { ids })
|
||||
@@ -328,7 +330,7 @@ impl BatchKind {
|
||||
}
|
||||
(
|
||||
BatchKind::DocumentOperation { method, allow_index_creation, primary_key, mut operation_ids },
|
||||
K::DocumentDeletion,
|
||||
K::DocumentDeletion { by_filter: false },
|
||||
) => {
|
||||
operation_ids.push(id);
|
||||
|
||||
@@ -339,6 +341,13 @@ impl BatchKind {
|
||||
operation_ids,
|
||||
})
|
||||
}
|
||||
// We can't batch a document operation with a delete by filter
|
||||
(
|
||||
this @ BatchKind::DocumentOperation { .. },
|
||||
K::DocumentDeletion { by_filter: true },
|
||||
) => {
|
||||
Break(this)
|
||||
}
|
||||
// but we can't autobatch documents if it's not the same kind
|
||||
// this match branch MUST be AFTER the previous one
|
||||
(
|
||||
@@ -357,13 +366,18 @@ impl BatchKind {
|
||||
operation_ids,
|
||||
}),
|
||||
|
||||
(BatchKind::DocumentDeletion { mut deletion_ids }, K::DocumentClear) => {
|
||||
(BatchKind::DocumentDeletion { mut deletion_ids, includes_by_filter: _ }, K::DocumentClear) => {
|
||||
deletion_ids.push(id);
|
||||
Continue(BatchKind::DocumentClear { ids: deletion_ids })
|
||||
}
|
||||
// we can't autobatch the deletion and import if the document deletion contained a filter
|
||||
(
|
||||
this @ BatchKind::DocumentDeletion { deletion_ids: _, includes_by_filter: true },
|
||||
K::DocumentImport { .. }
|
||||
) => Break(this),
|
||||
// we can autobatch the deletion and import if the index already exists
|
||||
(
|
||||
BatchKind::DocumentDeletion { mut deletion_ids },
|
||||
BatchKind::DocumentDeletion { mut deletion_ids, includes_by_filter: false },
|
||||
K::DocumentImport { method, allow_index_creation, primary_key }
|
||||
) if index_already_exists => {
|
||||
deletion_ids.push(id);
|
||||
@@ -377,7 +391,7 @@ impl BatchKind {
|
||||
}
|
||||
// we can autobatch the deletion and import if both can't create an index
|
||||
(
|
||||
BatchKind::DocumentDeletion { mut deletion_ids },
|
||||
BatchKind::DocumentDeletion { mut deletion_ids, includes_by_filter: false },
|
||||
K::DocumentImport { method, allow_index_creation, primary_key }
|
||||
) if !allow_index_creation => {
|
||||
deletion_ids.push(id);
|
||||
@@ -396,9 +410,9 @@ impl BatchKind {
|
||||
) => {
|
||||
Break(this)
|
||||
}
|
||||
(BatchKind::DocumentDeletion { mut deletion_ids }, K::DocumentDeletion) => {
|
||||
(BatchKind::DocumentDeletion { mut deletion_ids, includes_by_filter }, K::DocumentDeletion { by_filter }) => {
|
||||
deletion_ids.push(id);
|
||||
Continue(BatchKind::DocumentDeletion { deletion_ids })
|
||||
Continue(BatchKind::DocumentDeletion { deletion_ids, includes_by_filter: includes_by_filter | by_filter })
|
||||
}
|
||||
(this @ BatchKind::DocumentDeletion { .. }, K::Settings { .. }) => Break(this),
|
||||
|
||||
@@ -412,7 +426,7 @@ impl BatchKind {
|
||||
}),
|
||||
(
|
||||
this @ BatchKind::Settings { .. },
|
||||
K::DocumentImport { .. } | K::DocumentDeletion,
|
||||
K::DocumentImport { .. } | K::DocumentDeletion { .. },
|
||||
) => Break(this),
|
||||
(
|
||||
BatchKind::Settings { mut settings_ids, allow_index_creation },
|
||||
@@ -443,7 +457,7 @@ impl BatchKind {
|
||||
settings_ids,
|
||||
allow_index_creation,
|
||||
},
|
||||
K::DocumentDeletion,
|
||||
K::DocumentDeletion { .. },
|
||||
) => {
|
||||
other.push(id);
|
||||
Continue(BatchKind::ClearAndSettings {
|
||||
@@ -505,7 +519,7 @@ impl BatchKind {
|
||||
// this MUST be AFTER the two previous branch
|
||||
(
|
||||
this @ BatchKind::SettingsAndDocumentOperation { .. },
|
||||
K::DocumentDeletion | K::DocumentImport { .. },
|
||||
K::DocumentDeletion { .. } | K::DocumentImport { .. },
|
||||
) => Break(this),
|
||||
(
|
||||
BatchKind::SettingsAndDocumentOperation { mut settings_ids, method, allow_index_creation,primary_key, operation_ids },
|
||||
@@ -525,8 +539,7 @@ impl BatchKind {
|
||||
| BatchKind::IndexDeletion { .. }
|
||||
| BatchKind::IndexUpdate { .. }
|
||||
| BatchKind::IndexSwap { .. }
|
||||
| BatchKind::DocumentEdition { .. }
|
||||
| BatchKind::DocumentDeletionByFilter { .. },
|
||||
| BatchKind::DocumentEdition { .. },
|
||||
_,
|
||||
) => {
|
||||
unreachable!()
|
||||
@@ -616,6 +629,13 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
fn doc_del_fil() -> KindWithContent {
|
||||
KindWithContent::DocumentDeletionByFilter {
|
||||
index_uid: String::from("doggo"),
|
||||
filter_expr: serde_json::json!("cuteness > 100"),
|
||||
}
|
||||
}
|
||||
|
||||
fn doc_clr() -> KindWithContent {
|
||||
KindWithContent::DocumentClear { index_uid: String::from("doggo") }
|
||||
}
|
||||
@@ -676,10 +696,16 @@ mod tests {
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1, 2] }, false))");
|
||||
|
||||
// we can autobatch one or multiple DocumentDeletion together
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: false }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2], includes_by_filter: false }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: false }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2], includes_by_filter: false }, false))");
|
||||
|
||||
// we can autobatch one or multiple DocumentDeletionByFilter together
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil()]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil(), doc_del_fil(), doc_del_fil()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2], includes_by_filter: true }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_del_fil()]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_del_fil(), doc_del_fil(), doc_del_fil()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2], includes_by_filter: true }, false))");
|
||||
|
||||
// we can autobatch one or multiple Settings together
|
||||
debug_snapshot!(autobatch_from(true, None, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))");
|
||||
@@ -722,25 +748,63 @@ mod tests {
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
||||
|
||||
// But we can't autobatch document addition with document deletion by filter
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_del_fil()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_del_fil()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_del_fil()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), doc_del_fil()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del_fil()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del_fil()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del_fil()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###);
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del_fil()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###);
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, None), doc_del_fil()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, None), doc_del_fil()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, None), doc_del_fil()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, None), doc_del_fil()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del_fil()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del_fil()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del_fil()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###);
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del_fil()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###);
|
||||
// And the other way around
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil(), doc_imp(ReplaceDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil(), doc_imp(UpdateDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil(), doc_imp(UpdateDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_del_fil(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_del_fil(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_del_fil(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_del_fil(), doc_imp(UpdateDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_document_operation_dont_autobatch_with_other() {
|
||||
// addition, updates and deletion can't batch together
|
||||
// addition, updates and deletion by filter can't batch together
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_del_fil()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_del_fil()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_create()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_create()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_create()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_create()]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: false }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil(), idx_create()]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_update()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_update()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_update()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_update()]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: false }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil(), idx_update()]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_swap()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_swap()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_swap()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_swap()]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: false }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil(), idx_swap()]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: true }, false))");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -807,6 +871,7 @@ mod tests {
|
||||
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_del_fil()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
@@ -816,6 +881,7 @@ mod tests {
|
||||
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_del_fil()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
@@ -827,6 +893,7 @@ mod tests {
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del_fil(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
@@ -836,6 +903,7 @@ mod tests {
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_del_fil(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
@@ -901,10 +969,10 @@ mod tests {
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), settings(true)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||
|
||||
// batch deletion and addition
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: false }, false))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: false }, false))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: false }, false))");
|
||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0], includes_by_filter: false }, false))");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -110,9 +110,9 @@ pub(crate) enum IndexOperation {
|
||||
index_uid: String,
|
||||
task: Task,
|
||||
},
|
||||
IndexDocumentDeletionByFilter {
|
||||
DocumentDeletion {
|
||||
index_uid: String,
|
||||
task: Task,
|
||||
tasks: Vec<Task>,
|
||||
},
|
||||
DocumentClear {
|
||||
index_uid: String,
|
||||
@@ -165,11 +165,11 @@ impl Batch {
|
||||
Batch::IndexOperation { op, .. } => match op {
|
||||
IndexOperation::DocumentOperation { tasks, .. }
|
||||
| IndexOperation::Settings { tasks, .. }
|
||||
| IndexOperation::DocumentDeletion { tasks, .. }
|
||||
| IndexOperation::DocumentClear { tasks, .. } => {
|
||||
RoaringBitmap::from_iter(tasks.iter().map(|task| task.uid))
|
||||
}
|
||||
IndexOperation::DocumentEdition { task, .. }
|
||||
| IndexOperation::IndexDocumentDeletionByFilter { task, .. } => {
|
||||
IndexOperation::DocumentEdition { task, .. } => {
|
||||
RoaringBitmap::from_sorted_iter(std::iter::once(task.uid)).unwrap()
|
||||
}
|
||||
IndexOperation::SettingsAndDocumentOperation {
|
||||
@@ -234,7 +234,7 @@ impl IndexOperation {
|
||||
match self {
|
||||
IndexOperation::DocumentOperation { index_uid, .. }
|
||||
| IndexOperation::DocumentEdition { index_uid, .. }
|
||||
| IndexOperation::IndexDocumentDeletionByFilter { index_uid, .. }
|
||||
| IndexOperation::DocumentDeletion { index_uid, .. }
|
||||
| IndexOperation::DocumentClear { index_uid, .. }
|
||||
| IndexOperation::Settings { index_uid, .. }
|
||||
| IndexOperation::DocumentClearAndSetting { index_uid, .. }
|
||||
@@ -252,8 +252,8 @@ impl fmt::Display for IndexOperation {
|
||||
IndexOperation::DocumentEdition { .. } => {
|
||||
f.write_str("IndexOperation::DocumentEdition")
|
||||
}
|
||||
IndexOperation::IndexDocumentDeletionByFilter { .. } => {
|
||||
f.write_str("IndexOperation::IndexDocumentDeletionByFilter")
|
||||
IndexOperation::DocumentDeletion { .. } => {
|
||||
f.write_str("IndexOperation::DocumentDeletion")
|
||||
}
|
||||
IndexOperation::DocumentClear { .. } => f.write_str("IndexOperation::DocumentClear"),
|
||||
IndexOperation::Settings { .. } => f.write_str("IndexOperation::Settings"),
|
||||
@@ -289,21 +289,6 @@ impl IndexScheduler {
|
||||
},
|
||||
must_create_index,
|
||||
})),
|
||||
BatchKind::DocumentDeletionByFilter { id } => {
|
||||
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||
match &task.kind {
|
||||
KindWithContent::DocumentDeletionByFilter { index_uid, .. } => {
|
||||
Ok(Some(Batch::IndexOperation {
|
||||
op: IndexOperation::IndexDocumentDeletionByFilter {
|
||||
index_uid: index_uid.clone(),
|
||||
task,
|
||||
},
|
||||
must_create_index: false,
|
||||
}))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
BatchKind::DocumentEdition { id } => {
|
||||
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||
match &task.kind {
|
||||
@@ -366,30 +351,11 @@ impl IndexScheduler {
|
||||
must_create_index,
|
||||
}))
|
||||
}
|
||||
BatchKind::DocumentDeletion { deletion_ids } => {
|
||||
BatchKind::DocumentDeletion { deletion_ids, includes_by_filter: _ } => {
|
||||
let tasks = self.get_existing_tasks(rtxn, deletion_ids)?;
|
||||
|
||||
let mut operations = Vec::with_capacity(tasks.len());
|
||||
let mut documents_counts = Vec::with_capacity(tasks.len());
|
||||
for task in &tasks {
|
||||
match task.kind {
|
||||
KindWithContent::DocumentDeletion { ref documents_ids, .. } => {
|
||||
operations.push(DocumentOperation::Delete(documents_ids.clone()));
|
||||
documents_counts.push(documents_ids.len() as u64);
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Some(Batch::IndexOperation {
|
||||
op: IndexOperation::DocumentOperation {
|
||||
index_uid,
|
||||
primary_key: None,
|
||||
method: IndexDocumentsMethod::ReplaceDocuments,
|
||||
documents_counts,
|
||||
operations,
|
||||
tasks,
|
||||
},
|
||||
op: IndexOperation::DocumentDeletion { index_uid, tasks },
|
||||
must_create_index,
|
||||
}))
|
||||
}
|
||||
@@ -1439,7 +1405,7 @@ impl IndexScheduler {
|
||||
{
|
||||
(original_filter, context, function)
|
||||
} else {
|
||||
// In the case of a `documentDeleteByFilter` the details MUST be set
|
||||
// In the case of a `documentEdition` the details MUST be set
|
||||
unreachable!();
|
||||
};
|
||||
|
||||
@@ -1469,52 +1435,102 @@ impl IndexScheduler {
|
||||
|
||||
Ok(vec![task])
|
||||
}
|
||||
IndexOperation::IndexDocumentDeletionByFilter { mut task, index_uid: _ } => {
|
||||
let filter =
|
||||
if let KindWithContent::DocumentDeletionByFilter { filter_expr, .. } =
|
||||
&task.kind
|
||||
{
|
||||
filter_expr
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
let deleted_documents = delete_document_by_filter(
|
||||
index_wtxn,
|
||||
filter,
|
||||
self.index_mapper.indexer_config(),
|
||||
self.must_stop_processing.clone(),
|
||||
index,
|
||||
);
|
||||
let original_filter = if let Some(Details::DocumentDeletionByFilter {
|
||||
original_filter,
|
||||
deleted_documents: _,
|
||||
}) = task.details
|
||||
{
|
||||
original_filter
|
||||
} else {
|
||||
// In the case of a `documentDeleteByFilter` the details MUST be set
|
||||
unreachable!();
|
||||
};
|
||||
IndexOperation::DocumentDeletion { mut tasks, index_uid: _ } => {
|
||||
let mut to_delete = RoaringBitmap::new();
|
||||
let external_documents_ids = index.external_documents_ids();
|
||||
|
||||
match deleted_documents {
|
||||
Ok(deleted_documents) => {
|
||||
task.status = Status::Succeeded;
|
||||
task.details = Some(Details::DocumentDeletionByFilter {
|
||||
original_filter,
|
||||
deleted_documents: Some(deleted_documents),
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
task.status = Status::Failed;
|
||||
task.details = Some(Details::DocumentDeletionByFilter {
|
||||
original_filter,
|
||||
deleted_documents: Some(0),
|
||||
});
|
||||
task.error = Some(e.into());
|
||||
for task in tasks.iter_mut() {
|
||||
let before = to_delete.len();
|
||||
task.status = Status::Succeeded;
|
||||
|
||||
match &task.kind {
|
||||
KindWithContent::DocumentDeletion { index_uid: _, documents_ids } => {
|
||||
for id in documents_ids {
|
||||
if let Some(id) = external_documents_ids.get(index_wtxn, id)? {
|
||||
to_delete.insert(id);
|
||||
}
|
||||
}
|
||||
let will_be_removed = to_delete.len() - before;
|
||||
task.details = Some(Details::DocumentDeletion {
|
||||
provided_ids: documents_ids.len(),
|
||||
deleted_documents: Some(will_be_removed),
|
||||
});
|
||||
}
|
||||
KindWithContent::DocumentDeletionByFilter { index_uid: _, filter_expr } => {
|
||||
let before = to_delete.len();
|
||||
let filter = match Filter::from_json(filter_expr) {
|
||||
Ok(filter) => filter,
|
||||
Err(err) => {
|
||||
// theorically, this should be catched by deserr before reaching the index-scheduler and cannot happens
|
||||
task.status = Status::Failed;
|
||||
task.error = match err {
|
||||
milli::Error::UserError(
|
||||
milli::UserError::InvalidFilterExpression { .. },
|
||||
) => Some(
|
||||
Error::from(err)
|
||||
.with_custom_error_code(Code::InvalidDocumentFilter)
|
||||
.into(),
|
||||
),
|
||||
e => Some(e.into()),
|
||||
};
|
||||
None
|
||||
}
|
||||
};
|
||||
if let Some(filter) = filter {
|
||||
let candidates =
|
||||
filter.evaluate(index_wtxn, index).map_err(|err| match err {
|
||||
milli::Error::UserError(
|
||||
milli::UserError::InvalidFilter(_),
|
||||
) => Error::from(err)
|
||||
.with_custom_error_code(Code::InvalidDocumentFilter),
|
||||
e => e.into(),
|
||||
});
|
||||
match candidates {
|
||||
Ok(candidates) => to_delete |= candidates,
|
||||
Err(err) => {
|
||||
task.status = Status::Failed;
|
||||
task.error = Some(err.into());
|
||||
}
|
||||
};
|
||||
}
|
||||
let will_be_removed = to_delete.len() - before;
|
||||
if let Some(Details::DocumentDeletionByFilter {
|
||||
original_filter: _,
|
||||
deleted_documents,
|
||||
}) = &mut task.details
|
||||
{
|
||||
*deleted_documents = Some(will_be_removed);
|
||||
} else {
|
||||
// In the case of a `documentDeleteByFilter` the details MUST be set
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(vec![task])
|
||||
let config = IndexDocumentsConfig {
|
||||
update_method: IndexDocumentsMethod::ReplaceDocuments,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let must_stop_processing = self.must_stop_processing.clone();
|
||||
let mut builder = milli::update::IndexDocuments::new(
|
||||
index_wtxn,
|
||||
index,
|
||||
self.index_mapper.indexer_config(),
|
||||
config,
|
||||
|indexing_step| tracing::debug!(update = ?indexing_step),
|
||||
|| must_stop_processing.get(),
|
||||
)?;
|
||||
|
||||
let (new_builder, _count) =
|
||||
builder.remove_documents_from_db_no_batch(&to_delete)?;
|
||||
builder = new_builder;
|
||||
|
||||
let _ = builder.execute()?;
|
||||
|
||||
Ok(tasks)
|
||||
}
|
||||
IndexOperation::Settings { index_uid: _, settings, mut tasks } => {
|
||||
let indexer_config = self.index_mapper.indexer_config();
|
||||
@@ -1718,46 +1734,6 @@ impl IndexScheduler {
|
||||
}
|
||||
}
|
||||
|
||||
fn delete_document_by_filter<'a>(
|
||||
wtxn: &mut RwTxn<'a>,
|
||||
filter: &serde_json::Value,
|
||||
indexer_config: &IndexerConfig,
|
||||
must_stop_processing: MustStopProcessing,
|
||||
index: &'a Index,
|
||||
) -> Result<u64> {
|
||||
let filter = Filter::from_json(filter)?;
|
||||
Ok(if let Some(filter) = filter {
|
||||
let candidates = filter.evaluate(wtxn, index).map_err(|err| match err {
|
||||
milli::Error::UserError(milli::UserError::InvalidFilter(_)) => {
|
||||
Error::from(err).with_custom_error_code(Code::InvalidDocumentFilter)
|
||||
}
|
||||
e => e.into(),
|
||||
})?;
|
||||
|
||||
let config = IndexDocumentsConfig {
|
||||
update_method: IndexDocumentsMethod::ReplaceDocuments,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let mut builder = milli::update::IndexDocuments::new(
|
||||
wtxn,
|
||||
index,
|
||||
indexer_config,
|
||||
config,
|
||||
|indexing_step| tracing::debug!(update = ?indexing_step),
|
||||
|| must_stop_processing.get(),
|
||||
)?;
|
||||
|
||||
let (new_builder, count) = builder.remove_documents_from_db_no_batch(&candidates)?;
|
||||
builder = new_builder;
|
||||
|
||||
let _ = builder.execute()?;
|
||||
count
|
||||
} else {
|
||||
0
|
||||
})
|
||||
}
|
||||
|
||||
fn edit_documents_by_function<'a>(
|
||||
wtxn: &mut RwTxn<'a>,
|
||||
filter: &Option<serde_json::Value>,
|
||||
|
||||
@@ -87,7 +87,7 @@ impl RoFeatures {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(FeatureNotEnabledError {
|
||||
disabled_action: "Using `CONTAINS` in a filter",
|
||||
disabled_action: "Using `CONTAINS` or `STARTS WITH` in a filter",
|
||||
feature: "contains filter",
|
||||
issue_link: "https://github.com/orgs/meilisearch/discussions/763",
|
||||
}
|
||||
|
||||
@@ -108,8 +108,10 @@ pub struct IndexStats {
|
||||
/// Association of every field name with the number of times it occurs in the documents.
|
||||
pub field_distribution: FieldDistribution,
|
||||
/// Creation date of the index.
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub created_at: OffsetDateTime,
|
||||
/// Date of the last update of the index.
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
pub updated_at: OffsetDateTime,
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,9 @@ use crate::index_mapper::IndexMapper;
|
||||
use crate::{IndexScheduler, Kind, Status, BEI128};
|
||||
|
||||
pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
||||
// Since we'll snapshot the index right afterward, we don't need to ensure it's internally consistent for every run.
|
||||
// We can only do it for the release run, where the function runs way faster.
|
||||
#[cfg(not(debug_assertions))]
|
||||
scheduler.assert_internally_consistent();
|
||||
|
||||
let IndexScheduler {
|
||||
|
||||
@@ -35,6 +35,7 @@ pub type TaskId = u32;
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
use std::io::{self, BufReader, Read};
|
||||
use std::ops::{Bound, RangeBounds};
|
||||
use std::panic::{catch_unwind, AssertUnwindSafe};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::atomic::Ordering::{self, Relaxed};
|
||||
use std::sync::atomic::{AtomicBool, AtomicU32};
|
||||
@@ -612,19 +613,24 @@ impl IndexScheduler {
|
||||
#[cfg(test)]
|
||||
run.breakpoint(Breakpoint::Init);
|
||||
|
||||
run.wake_up.wait();
|
||||
run.wake_up.wait_timeout(std::time::Duration::from_secs(60));
|
||||
|
||||
loop {
|
||||
match run.tick() {
|
||||
Ok(TickOutcome::TickAgain(_)) => (),
|
||||
Ok(TickOutcome::WaitForSignal) => run.wake_up.wait(),
|
||||
Err(e) => {
|
||||
let ret = catch_unwind(AssertUnwindSafe(|| run.tick()));
|
||||
match ret {
|
||||
Ok(Ok(TickOutcome::TickAgain(_))) => (),
|
||||
Ok(Ok(TickOutcome::WaitForSignal)) => run.wake_up.wait(),
|
||||
Ok(Err(e)) => {
|
||||
tracing::error!("{e}");
|
||||
// Wait one second when an irrecoverable error occurs.
|
||||
if !e.is_recoverable() {
|
||||
std::thread::sleep(Duration::from_secs(1));
|
||||
}
|
||||
}
|
||||
Err(_panic) => {
|
||||
tracing::error!("Internal error: Unexpected panic in the `IndexScheduler::run` method.");
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -1257,7 +1263,7 @@ impl IndexScheduler {
|
||||
#[cfg(test)]
|
||||
self.maybe_fail(tests::FailureLocation::UpdatingTaskAfterProcessBatchFailure)?;
|
||||
|
||||
tracing::info!("Batch failed {}", error);
|
||||
tracing::error!("Batch failed {}", error);
|
||||
|
||||
self.update_task(&mut wtxn, &task)
|
||||
.map_err(|e| Error::TaskDatabaseUpdate(Box::new(e)))?;
|
||||
@@ -1471,7 +1477,7 @@ impl IndexScheduler {
|
||||
.map(
|
||||
|IndexEmbeddingConfig {
|
||||
name,
|
||||
config: milli::vector::EmbeddingConfig { embedder_options, prompt },
|
||||
config: milli::vector::EmbeddingConfig { embedder_options, prompt, quantized },
|
||||
..
|
||||
}| {
|
||||
let prompt =
|
||||
@@ -1480,7 +1486,10 @@ impl IndexScheduler {
|
||||
{
|
||||
let embedders = self.embedders.read().unwrap();
|
||||
if let Some(embedder) = embedders.get(&embedder_options) {
|
||||
return Ok((name, (embedder.clone(), prompt)));
|
||||
return Ok((
|
||||
name,
|
||||
(embedder.clone(), prompt, quantized.unwrap_or_default()),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1494,7 +1503,7 @@ impl IndexScheduler {
|
||||
let mut embedders = self.embedders.write().unwrap();
|
||||
embedders.insert(embedder_options, embedder.clone());
|
||||
}
|
||||
Ok((name, (embedder, prompt)))
|
||||
Ok((name, (embedder, prompt, quantized.unwrap_or_default())))
|
||||
},
|
||||
)
|
||||
.collect();
|
||||
@@ -1758,6 +1767,7 @@ mod tests {
|
||||
use crossbeam::channel::RecvTimeoutError;
|
||||
use file_store::File;
|
||||
use insta::assert_json_snapshot;
|
||||
use maplit::btreeset;
|
||||
use meili_snap::{json_string, snapshot};
|
||||
use meilisearch_auth::AuthFilter;
|
||||
use meilisearch_types::document_formats::DocumentFormatError;
|
||||
@@ -2000,11 +2010,13 @@ mod tests {
|
||||
fn advance_till(&mut self, breakpoints: impl IntoIterator<Item = Breakpoint>) {
|
||||
for breakpoint in breakpoints {
|
||||
let b = self.advance();
|
||||
let state = snapshot_index_scheduler(&self.index_scheduler);
|
||||
assert_eq!(
|
||||
b, breakpoint,
|
||||
"Was expecting the breakpoint `{:?}` but instead got `{:?}`.\n{state}",
|
||||
breakpoint, b
|
||||
b,
|
||||
breakpoint,
|
||||
"Was expecting the breakpoint `{:?}` but instead got `{:?}`.\n{}",
|
||||
breakpoint,
|
||||
b,
|
||||
snapshot_index_scheduler(&self.index_scheduler)
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -2028,7 +2040,6 @@ mod tests {
|
||||
// Wait for one successful batch.
|
||||
#[track_caller]
|
||||
fn advance_one_successful_batch(&mut self) {
|
||||
self.index_scheduler.assert_internally_consistent();
|
||||
self.advance_till([Start, BatchCreated]);
|
||||
loop {
|
||||
match self.advance() {
|
||||
@@ -2047,7 +2058,6 @@ mod tests {
|
||||
}
|
||||
|
||||
self.advance_till([AfterProcessing]);
|
||||
self.index_scheduler.assert_internally_consistent();
|
||||
}
|
||||
|
||||
// Wait for one failed batch.
|
||||
@@ -2547,6 +2557,117 @@ mod tests {
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fail_in_process_batch_for_document_deletion() {
|
||||
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||
|
||||
use meilisearch_types::settings::{Settings, Unchecked};
|
||||
let mut new_settings: Box<Settings<Unchecked>> = Box::default();
|
||||
new_settings.filterable_attributes = Setting::Set(btreeset!(S("catto")));
|
||||
|
||||
index_scheduler
|
||||
.register(
|
||||
KindWithContent::SettingsUpdate {
|
||||
index_uid: S("doggos"),
|
||||
new_settings,
|
||||
is_deletion: false,
|
||||
allow_index_creation: true,
|
||||
},
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let content = r#"[
|
||||
{ "id": 1, "doggo": "jean bob" },
|
||||
{ "id": 2, "catto": "jorts" },
|
||||
{ "id": 3, "doggo": "bork" }
|
||||
]"#;
|
||||
|
||||
let (uuid, mut file) = index_scheduler.create_update_file_with_uuid(0).unwrap();
|
||||
let documents_count = read_json(content.as_bytes(), &mut file).unwrap();
|
||||
file.persist().unwrap();
|
||||
index_scheduler
|
||||
.register(
|
||||
KindWithContent::DocumentAdditionOrUpdate {
|
||||
index_uid: S("doggos"),
|
||||
primary_key: Some(S("id")),
|
||||
method: ReplaceDocuments,
|
||||
content_file: uuid,
|
||||
documents_count,
|
||||
allow_index_creation: true,
|
||||
},
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_setting_and_document_addition");
|
||||
|
||||
handle.advance_one_successful_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_adding_the_settings");
|
||||
handle.advance_one_successful_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_adding_the_documents");
|
||||
|
||||
index_scheduler
|
||||
.register(
|
||||
KindWithContent::DocumentDeletion {
|
||||
index_uid: S("doggos"),
|
||||
documents_ids: vec![S("1")],
|
||||
},
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
// This one should not be catched by Meilisearch but it's still nice to handle it because if one day we break the filters it could happens
|
||||
index_scheduler
|
||||
.register(
|
||||
KindWithContent::DocumentDeletionByFilter {
|
||||
index_uid: S("doggos"),
|
||||
filter_expr: serde_json::json!(true),
|
||||
},
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
// Should fail because the ids are not filterable
|
||||
index_scheduler
|
||||
.register(
|
||||
KindWithContent::DocumentDeletionByFilter {
|
||||
index_uid: S("doggos"),
|
||||
filter_expr: serde_json::json!("id = 2"),
|
||||
},
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
index_scheduler
|
||||
.register(
|
||||
KindWithContent::DocumentDeletionByFilter {
|
||||
index_uid: S("doggos"),
|
||||
filter_expr: serde_json::json!("catto EXISTS"),
|
||||
},
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_document_deletions");
|
||||
|
||||
// Everything should be batched together
|
||||
handle.advance_one_successful_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_removing_the_documents");
|
||||
|
||||
let index = index_scheduler.index("doggos").unwrap();
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||
let documents = index
|
||||
.all_documents(&rtxn)
|
||||
.unwrap()
|
||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents_remaining_should_only_be_bork");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn do_not_batch_task_of_different_indexes() {
|
||||
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||
@@ -3047,6 +3168,8 @@ mod tests {
|
||||
api_key: Setting::Set(S("My super secret")),
|
||||
url: Setting::Set(S("http://localhost:7777")),
|
||||
dimensions: Setting::Set(4),
|
||||
request: Setting::Set(serde_json::json!("{{text}}")),
|
||||
response: Setting::Set(serde_json::json!("{{embedding}}")),
|
||||
..Default::default()
|
||||
};
|
||||
embedders.insert(S("default"), Setting::Set(embedding_settings));
|
||||
@@ -5006,6 +5129,8 @@ mod tests {
|
||||
api_key: Setting::Set(S("My super secret")),
|
||||
url: Setting::Set(S("http://localhost:7777")),
|
||||
dimensions: Setting::Set(384),
|
||||
request: Setting::Set(serde_json::json!("{{text}}")),
|
||||
response: Setting::Set(serde_json::json!("{{embedding}}")),
|
||||
..Default::default()
|
||||
};
|
||||
embedders.insert(S("A_fakerest"), Setting::Set(embedding_settings));
|
||||
@@ -5075,10 +5200,11 @@ mod tests {
|
||||
let simple_hf_name = name.clone();
|
||||
|
||||
let configs = index_scheduler.embedders(configs).unwrap();
|
||||
let (hf_embedder, _) = configs.get(&simple_hf_name).unwrap();
|
||||
let beagle_embed = hf_embedder.embed_one(S("Intel the beagle best doggo")).unwrap();
|
||||
let lab_embed = hf_embedder.embed_one(S("Max the lab best doggo")).unwrap();
|
||||
let patou_embed = hf_embedder.embed_one(S("kefir the patou best doggo")).unwrap();
|
||||
let (hf_embedder, _, _) = configs.get(&simple_hf_name).unwrap();
|
||||
let beagle_embed =
|
||||
hf_embedder.embed_one(S("Intel the beagle best doggo"), None).unwrap();
|
||||
let lab_embed = hf_embedder.embed_one(S("Max the lab best doggo"), None).unwrap();
|
||||
let patou_embed = hf_embedder.embed_one(S("kefir the patou best doggo"), None).unwrap();
|
||||
(fakerest_name, simple_hf_name, beagle_embed, lab_embed, patou_embed)
|
||||
};
|
||||
|
||||
@@ -5393,7 +5519,11 @@ mod tests {
|
||||
),
|
||||
prompt: PromptData {
|
||||
template: "{{doc.doggo}}",
|
||||
max_bytes: Some(
|
||||
400,
|
||||
),
|
||||
},
|
||||
quantized: None,
|
||||
},
|
||||
user_provided: RoaringBitmap<[1, 2]>,
|
||||
},
|
||||
@@ -5406,28 +5536,8 @@ mod tests {
|
||||
|
||||
// the document with the id 3 should keep its original embedding
|
||||
let docid = index.external_documents_ids.get(&rtxn, "3").unwrap().unwrap();
|
||||
let mut embeddings = Vec::new();
|
||||
|
||||
'vectors: for i in 0..=u8::MAX {
|
||||
let reader = arroy::Reader::open(&rtxn, i as u16, index.vector_arroy)
|
||||
.map(Some)
|
||||
.or_else(|e| match e {
|
||||
arroy::Error::MissingMetadata(_) => Ok(None),
|
||||
e => Err(e),
|
||||
})
|
||||
.transpose();
|
||||
|
||||
let Some(reader) = reader else {
|
||||
break 'vectors;
|
||||
};
|
||||
|
||||
let embedding = reader.unwrap().item_vector(&rtxn, docid).unwrap();
|
||||
if let Some(embedding) = embedding {
|
||||
embeddings.push(embedding)
|
||||
} else {
|
||||
break 'vectors;
|
||||
}
|
||||
}
|
||||
let embeddings = index.embeddings(&rtxn, docid).unwrap();
|
||||
let embeddings = &embeddings["my_doggo_embedder"];
|
||||
|
||||
snapshot!(embeddings.len(), @"1");
|
||||
assert!(embeddings[0].iter().all(|i| *i == 3.0), "{:?}", embeddings[0]);
|
||||
@@ -5607,8 +5717,12 @@ mod tests {
|
||||
},
|
||||
),
|
||||
prompt: PromptData {
|
||||
template: "{% for field in fields %} {{ field.name }}: {{ field.value }}\n{% endfor %}",
|
||||
template: "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
|
||||
max_bytes: Some(
|
||||
400,
|
||||
),
|
||||
},
|
||||
quantized: None,
|
||||
},
|
||||
user_provided: RoaringBitmap<[0]>,
|
||||
},
|
||||
@@ -5647,8 +5761,12 @@ mod tests {
|
||||
},
|
||||
),
|
||||
prompt: PromptData {
|
||||
template: "{% for field in fields %} {{ field.name }}: {{ field.value }}\n{% endfor %}",
|
||||
template: "{% for field in fields %}{% if field.is_searchable and field.value != nil %}{{ field.name }}: {{ field.value }}\n{% endif %}{% endfor %}",
|
||||
max_bytes: Some(
|
||||
400,
|
||||
),
|
||||
},
|
||||
quantized: None,
|
||||
},
|
||||
user_provided: RoaringBitmap<[]>,
|
||||
},
|
||||
|
||||
@@ -8,7 +8,9 @@ expression: task.details
|
||||
"source": "rest",
|
||||
"apiKey": "MyXXXX...",
|
||||
"dimensions": 384,
|
||||
"url": "http://localhost:7777"
|
||||
"url": "http://localhost:7777",
|
||||
"request": "{{text}}",
|
||||
"response": "{{embedding}}"
|
||||
},
|
||||
"B_small_hf": {
|
||||
"source": "huggingFace",
|
||||
|
||||
@@ -8,16 +8,8 @@ expression: fakerest_config.embedder_options
|
||||
"distribution": null,
|
||||
"dimensions": 384,
|
||||
"url": "http://localhost:7777",
|
||||
"query": null,
|
||||
"input_field": [
|
||||
"input"
|
||||
],
|
||||
"path_to_embeddings": [
|
||||
"data"
|
||||
],
|
||||
"embedding_object": [
|
||||
"embedding"
|
||||
],
|
||||
"input_type": "text"
|
||||
"request": "{{text}}",
|
||||
"response": "{{embedding}}",
|
||||
"headers": {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,9 @@ expression: task.details
|
||||
"source": "rest",
|
||||
"apiKey": "MyXXXX...",
|
||||
"dimensions": 384,
|
||||
"url": "http://localhost:7777"
|
||||
"url": "http://localhost:7777",
|
||||
"request": "{{text}}",
|
||||
"response": "{{embedding}}"
|
||||
},
|
||||
"B_small_hf": {
|
||||
"source": "huggingFace",
|
||||
|
||||
@@ -8,7 +8,9 @@ expression: task.details
|
||||
"source": "rest",
|
||||
"apiKey": "MyXXXX...",
|
||||
"dimensions": 4,
|
||||
"url": "http://localhost:7777"
|
||||
"url": "http://localhost:7777",
|
||||
"request": "{{text}}",
|
||||
"response": "{{embedding}}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
expression: embedding_config.embedder_options
|
||||
expression: config.embedder_options
|
||||
---
|
||||
{
|
||||
"Rest": {
|
||||
@@ -8,16 +8,8 @@ expression: embedding_config.embedder_options
|
||||
"distribution": null,
|
||||
"dimensions": 4,
|
||||
"url": "http://localhost:7777",
|
||||
"query": null,
|
||||
"input_field": [
|
||||
"input"
|
||||
],
|
||||
"path_to_embeddings": [
|
||||
"data"
|
||||
],
|
||||
"embedding_object": [
|
||||
"embedding"
|
||||
],
|
||||
"input_type": "text"
|
||||
"request": "{{text}}",
|
||||
"response": "{{embedding}}",
|
||||
"headers": {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,9 @@ expression: task.details
|
||||
"source": "rest",
|
||||
"apiKey": "MyXXXX...",
|
||||
"dimensions": 4,
|
||||
"url": "http://localhost:7777"
|
||||
"url": "http://localhost:7777",
|
||||
"request": "{{text}}",
|
||||
"response": "{{embedding}}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,44 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
1 {uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued []
|
||||
succeeded [0,1,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [1,]
|
||||
"settingsUpdate" [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
doggos: { number_of_documents: 3, field_distribution: {"catto": 1, "doggo": 2, "id": 3} }
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
@@ -0,0 +1,43 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [1,]
|
||||
succeeded [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [1,]
|
||||
"settingsUpdate" [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
doggos: { number_of_documents: 0, field_distribution: {} }
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000000
|
||||
|
||||
----------------------------------------------------------------------
|
||||
@@ -0,0 +1,43 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [1,]
|
||||
succeeded [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [1,]
|
||||
"settingsUpdate" [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
doggos: { number_of_documents: 0, field_distribution: {} }
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000000
|
||||
|
||||
----------------------------------------------------------------------
|
||||
@@ -0,0 +1,56 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
1 {uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||
2 {uid: 2, status: succeeded, details: { received_document_ids: 1, deleted_documents: Some(1) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1"] }}
|
||||
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Invalid type for filter subexpression: expected: String, Array, found: true.", error_code: "invalid_document_filter", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid_document_filter" }, details: { original_filter: true, deleted_documents: Some(0) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: Bool(true) }}
|
||||
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Attribute `id` is not filterable. Available filterable attributes are: `catto`.\n1:3 id = 2", error_code: "invalid_document_filter", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid_document_filter" }, details: { original_filter: "id = 2", deleted_documents: Some(0) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: String("id = 2") }}
|
||||
5 {uid: 5, status: succeeded, details: { original_filter: "catto EXISTS", deleted_documents: Some(1) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: String("catto EXISTS") }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued []
|
||||
succeeded [0,1,2,5,]
|
||||
failed [3,4,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [1,]
|
||||
"documentDeletion" [2,3,4,5,]
|
||||
"settingsUpdate" [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
doggos: { number_of_documents: 1, field_distribution: {"doggo": 1, "id": 1} }
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
[timestamp] [5,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
@@ -0,0 +1,9 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
[
|
||||
{
|
||||
"id": 3,
|
||||
"doggo": "bork"
|
||||
}
|
||||
]
|
||||
@@ -0,0 +1,53 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
1 {uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_document_ids: 1, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1"] }}
|
||||
3 {uid: 3, status: enqueued, details: { original_filter: true, deleted_documents: None }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: Bool(true) }}
|
||||
4 {uid: 4, status: enqueued, details: { original_filter: "id = 2", deleted_documents: None }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: String("id = 2") }}
|
||||
5 {uid: 5, status: enqueued, details: { original_filter: "catto EXISTS", deleted_documents: None }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: String("catto EXISTS") }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [2,3,4,5,]
|
||||
succeeded [0,1,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [1,]
|
||||
"documentDeletion" [2,3,4,5,]
|
||||
"settingsUpdate" [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
doggos: { number_of_documents: 3, field_distribution: {"catto": 1, "doggo": 2, "id": 3} }
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
[timestamp] [5,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
@@ -0,0 +1,39 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,1,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [1,]
|
||||
"settingsUpdate" [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000000
|
||||
|
||||
----------------------------------------------------------------------
|
||||
@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), url: NotSet, query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), url: NotSet, query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
@@ -46,4 +46,3 @@ doggos: { number_of_documents: 1, field_distribution: {"_vectors": 1, "breed": 1
|
||||
### File Store:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), url: NotSet, query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), url: NotSet, query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
@@ -45,4 +45,3 @@ doggos: { number_of_documents: 1, field_distribution: {"_vectors": 1, "breed": 1
|
||||
00000000-0000-0000-0000-000000000001
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), url: NotSet, query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), url: NotSet, query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
@@ -42,4 +42,3 @@ doggos: { number_of_documents: 1, field_distribution: {"_vectors": 1, "breed": 1
|
||||
### File Store:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), url: NotSet, query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), url: NotSet, query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: UpdateDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
@@ -41,4 +41,3 @@ doggos: { number_of_documents: 0, field_distribution: {} }
|
||||
00000000-0000-0000-0000-000000000000
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), url: NotSet, query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), url: NotSet, query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,]
|
||||
@@ -33,4 +33,3 @@ doggos [0,]
|
||||
### File Store:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), url: NotSet, query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), url: NotSet, query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"A_fakerest": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(384), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet }), "B_small_hf": Set(EmbeddingSettings { source: Set(HuggingFace), model: Set("sentence-transformers/all-MiniLM-L6-v2"), revision: Set("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"), api_key: NotSet, dimensions: NotSet, binary_quantized: NotSet, document_template: Set("{{doc.doggo}} the {{doc.breed}} best doggo"), document_template_max_bytes: NotSet, url: NotSet, request: NotSet, response: NotSet, headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued []
|
||||
@@ -37,4 +37,3 @@ doggos: { number_of_documents: 0, field_distribution: {} }
|
||||
### File Store:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,]
|
||||
@@ -33,4 +33,3 @@ doggos [0,]
|
||||
### File Store:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), document_template: NotSet, url: Set("http://localhost:7777"), query: NotSet, input_field: NotSet, path_to_embeddings: NotSet, embedding_object: NotSet, input_type: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
0 {uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued []
|
||||
@@ -37,4 +37,3 @@ doggos: { number_of_documents: 0, field_distribution: {} }
|
||||
### File Store:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
|
||||
@@ -66,3 +66,8 @@ khmer = ["milli/khmer"]
|
||||
vietnamese = ["milli/vietnamese"]
|
||||
# force swedish character recomposition
|
||||
swedish-recomposition = ["milli/swedish-recomposition"]
|
||||
# allow german tokenization
|
||||
german = ["milli/german"]
|
||||
# allow turkish normalization
|
||||
turkish = ["milli/turkish"]
|
||||
|
||||
|
||||
@@ -238,8 +238,14 @@ InvalidIndexLimit , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidIndexOffset , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidIndexPrimaryKey , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidIndexUid , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidMultiSearchFacets , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidMultiSearchFacetsByIndex , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidMultiSearchFacetOrder , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidMultiSearchFederated , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidMultiSearchFederationOptions , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidMultiSearchMaxValuesPerFacet , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidMultiSearchMergeFacets , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidMultiSearchQueryFacets , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidMultiSearchQueryPagination , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidMultiSearchQueryRankingRules , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidMultiSearchWeight , InvalidRequest , BAD_REQUEST ;
|
||||
@@ -256,6 +262,7 @@ InvalidSearchCropLength , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchCropMarker , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchFacets , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchSemanticRatio , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchLocales , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidFacetSearchFacetName , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSimilarId , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchFilter , InvalidRequest , BAD_REQUEST ;
|
||||
@@ -297,6 +304,7 @@ InvalidSettingsSeparatorTokens , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsDictionary , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsLocalizedAttributes , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidState , Internal , INTERNAL_SERVER_ERROR ;
|
||||
InvalidStoreFile , Internal , INTERNAL_SERVER_ERROR ;
|
||||
InvalidSwapDuplicateIndexFound , InvalidRequest , BAD_REQUEST ;
|
||||
@@ -386,7 +394,11 @@ impl ErrorCode for milli::Error {
|
||||
| UserError::InvalidOpenAiModelDimensionsMax { .. }
|
||||
| UserError::InvalidSettingsDimensions { .. }
|
||||
| UserError::InvalidUrl { .. }
|
||||
| UserError::InvalidPrompt(_) => Code::InvalidSettingsEmbedders,
|
||||
| UserError::InvalidSettingsDocumentTemplateMaxBytes { .. }
|
||||
| UserError::InvalidPrompt(_)
|
||||
| UserError::InvalidDisableBinaryQuantization { .. } => {
|
||||
Code::InvalidSettingsEmbedders
|
||||
}
|
||||
UserError::TooManyEmbedders(_) => Code::InvalidSettingsEmbedders,
|
||||
UserError::InvalidPromptForEmbeddings(..) => Code::InvalidSettingsEmbedders,
|
||||
UserError::NoPrimaryKeyCandidateFound => Code::IndexPrimaryKeyNoCandidateFound,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::borrow::Borrow;
|
||||
use std::error::Error;
|
||||
use std::fmt;
|
||||
use std::str::FromStr;
|
||||
@@ -8,7 +9,7 @@ use crate::error::{Code, ErrorCode};
|
||||
|
||||
/// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400
|
||||
/// bytes long
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserr)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserr, PartialOrd, Ord)]
|
||||
#[deserr(try_from(String) = IndexUid::try_from -> IndexUidFormatError)]
|
||||
pub struct IndexUid(String);
|
||||
|
||||
@@ -70,6 +71,12 @@ impl From<IndexUid> for String {
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<String> for IndexUid {
|
||||
fn borrow(&self) -> &String {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IndexUidFormatError {
|
||||
pub invalid_uid: String,
|
||||
|
||||
@@ -7,6 +7,7 @@ pub mod features;
|
||||
pub mod index_uid;
|
||||
pub mod index_uid_pattern;
|
||||
pub mod keys;
|
||||
pub mod locales;
|
||||
pub mod settings;
|
||||
pub mod star_or;
|
||||
pub mod task_view;
|
||||
|
||||
166
meilisearch-types/src/locales.rs
Normal file
166
meilisearch-types/src/locales.rs
Normal file
@@ -0,0 +1,166 @@
|
||||
use deserr::Deserr;
|
||||
use milli::LocalizedAttributesRule;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserr, Serialize, Deserialize)]
|
||||
#[deserr(rename_all = camelCase)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LocalizedAttributesRuleView {
|
||||
pub attribute_patterns: Vec<String>,
|
||||
pub locales: Vec<Locale>,
|
||||
}
|
||||
|
||||
impl From<LocalizedAttributesRule> for LocalizedAttributesRuleView {
|
||||
fn from(rule: LocalizedAttributesRule) -> Self {
|
||||
Self {
|
||||
attribute_patterns: rule.attribute_patterns,
|
||||
locales: rule.locales.into_iter().map(|l| l.into()).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<LocalizedAttributesRuleView> for LocalizedAttributesRule {
|
||||
fn from(view: LocalizedAttributesRuleView) -> Self {
|
||||
Self {
|
||||
attribute_patterns: view.attribute_patterns,
|
||||
locales: view.locales.into_iter().map(|l| l.into()).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate a Locale enum and its From and Into implementations for milli::tokenizer::Language.
|
||||
///
|
||||
/// this enum implements `Deserr` in order to be used in the API.
|
||||
macro_rules! make_locale {
|
||||
($(($iso_639_1:ident, $iso_639_1_str:expr) => ($iso_639_3:ident, $iso_639_3_str:expr),)+) => {
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserr, Serialize, Deserialize, Ord, PartialOrd)]
|
||||
#[deserr(rename_all = camelCase)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum Locale {
|
||||
$($iso_639_1,)+
|
||||
$($iso_639_3,)+
|
||||
Cmn,
|
||||
}
|
||||
|
||||
impl From<milli::tokenizer::Language> for Locale {
|
||||
fn from(other: milli::tokenizer::Language) -> Locale {
|
||||
match other {
|
||||
$(milli::tokenizer::Language::$iso_639_3 => Locale::$iso_639_3,)+
|
||||
milli::tokenizer::Language::Cmn => Locale::Cmn,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Locale> for milli::tokenizer::Language {
|
||||
fn from(other: Locale) -> milli::tokenizer::Language {
|
||||
match other {
|
||||
$(Locale::$iso_639_1 => milli::tokenizer::Language::$iso_639_3,)+
|
||||
$(Locale::$iso_639_3 => milli::tokenizer::Language::$iso_639_3,)+
|
||||
Locale::Cmn => milli::tokenizer::Language::Cmn,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for Locale {
|
||||
type Err = LocaleFormatError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let locale = match s {
|
||||
$($iso_639_1_str => Locale::$iso_639_1,)+
|
||||
$($iso_639_3_str => Locale::$iso_639_3,)+
|
||||
"cmn" => Locale::Cmn,
|
||||
_ => return Err(LocaleFormatError { invalid_locale: s.to_string() }),
|
||||
};
|
||||
|
||||
Ok(locale)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LocaleFormatError {
|
||||
pub invalid_locale: String,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for LocaleFormatError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut valid_locales = [$($iso_639_1_str),+,$($iso_639_3_str),+,"cmn"];
|
||||
valid_locales.sort_by(|left, right| left.len().cmp(&right.len()).then(left.cmp(right)));
|
||||
write!(f, "Unsupported locale `{}`, expected one of {}", self.invalid_locale, valid_locales.join(", "))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for LocaleFormatError {}
|
||||
};
|
||||
}
|
||||
|
||||
make_locale!(
|
||||
(Af, "af") => (Afr, "afr"),
|
||||
(Ak, "ak") => (Aka, "aka"),
|
||||
(Am, "am") => (Amh, "amh"),
|
||||
(Ar, "ar") => (Ara, "ara"),
|
||||
(Az, "az") => (Aze, "aze"),
|
||||
(Be, "be") => (Bel, "bel"),
|
||||
(Bn, "bn") => (Ben, "ben"),
|
||||
(Bg, "bg") => (Bul, "bul"),
|
||||
(Ca, "ca") => (Cat, "cat"),
|
||||
(Cs, "cs") => (Ces, "ces"),
|
||||
(Da, "da") => (Dan, "dan"),
|
||||
(De, "de") => (Deu, "deu"),
|
||||
(El, "el") => (Ell, "ell"),
|
||||
(En, "en") => (Eng, "eng"),
|
||||
(Eo, "eo") => (Epo, "epo"),
|
||||
(Et, "et") => (Est, "est"),
|
||||
(Fi, "fi") => (Fin, "fin"),
|
||||
(Fr, "fr") => (Fra, "fra"),
|
||||
(Gu, "gu") => (Guj, "guj"),
|
||||
(He, "he") => (Heb, "heb"),
|
||||
(Hi, "hi") => (Hin, "hin"),
|
||||
(Hr, "hr") => (Hrv, "hrv"),
|
||||
(Hu, "hu") => (Hun, "hun"),
|
||||
(Hy, "hy") => (Hye, "hye"),
|
||||
(Id, "id") => (Ind, "ind"),
|
||||
(It, "it") => (Ita, "ita"),
|
||||
(Jv, "jv") => (Jav, "jav"),
|
||||
(Ja, "ja") => (Jpn, "jpn"),
|
||||
(Kn, "kn") => (Kan, "kan"),
|
||||
(Ka, "ka") => (Kat, "kat"),
|
||||
(Km, "km") => (Khm, "khm"),
|
||||
(Ko, "ko") => (Kor, "kor"),
|
||||
(La, "la") => (Lat, "lat"),
|
||||
(Lv, "lv") => (Lav, "lav"),
|
||||
(Lt, "lt") => (Lit, "lit"),
|
||||
(Ml, "ml") => (Mal, "mal"),
|
||||
(Mr, "mr") => (Mar, "mar"),
|
||||
(Mk, "mk") => (Mkd, "mkd"),
|
||||
(My, "my") => (Mya, "mya"),
|
||||
(Ne, "ne") => (Nep, "nep"),
|
||||
(Nl, "nl") => (Nld, "nld"),
|
||||
(Nb, "nb") => (Nob, "nob"),
|
||||
(Or, "or") => (Ori, "ori"),
|
||||
(Pa, "pa") => (Pan, "pan"),
|
||||
(Fa, "fa") => (Pes, "pes"),
|
||||
(Pl, "pl") => (Pol, "pol"),
|
||||
(Pt, "pt") => (Por, "por"),
|
||||
(Ro, "ro") => (Ron, "ron"),
|
||||
(Ru, "ru") => (Rus, "rus"),
|
||||
(Si, "si") => (Sin, "sin"),
|
||||
(Sk, "sk") => (Slk, "slk"),
|
||||
(Sl, "sl") => (Slv, "slv"),
|
||||
(Sn, "sn") => (Sna, "sna"),
|
||||
(Es, "es") => (Spa, "spa"),
|
||||
(Sr, "sr") => (Srp, "srp"),
|
||||
(Sv, "sv") => (Swe, "swe"),
|
||||
(Ta, "ta") => (Tam, "tam"),
|
||||
(Te, "te") => (Tel, "tel"),
|
||||
(Tl, "tl") => (Tgl, "tgl"),
|
||||
(Th, "th") => (Tha, "tha"),
|
||||
(Tk, "tk") => (Tuk, "tuk"),
|
||||
(Tr, "tr") => (Tur, "tur"),
|
||||
(Uk, "uk") => (Ukr, "ukr"),
|
||||
(Ur, "ur") => (Urd, "urd"),
|
||||
(Uz, "uz") => (Uzb, "uzb"),
|
||||
(Vi, "vi") => (Vie, "vie"),
|
||||
(Yi, "yi") => (Yid, "yid"),
|
||||
(Zh, "zh") => (Zho, "zho"),
|
||||
(Zu, "zu") => (Zul, "zul"),
|
||||
);
|
||||
@@ -17,6 +17,7 @@ use serde::{Deserialize, Serialize, Serializer};
|
||||
use crate::deserr::DeserrJsonError;
|
||||
use crate::error::deserr_codes::*;
|
||||
use crate::facet_values_sort::FacetValuesSort;
|
||||
use crate::locales::LocalizedAttributesRuleView;
|
||||
|
||||
/// The maximum number of results that the engine
|
||||
/// will be able to return in one search call.
|
||||
@@ -198,6 +199,9 @@ pub struct Settings<T> {
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSearchCutoffMs>)]
|
||||
pub search_cutoff_ms: Setting<u64>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsLocalizedAttributes>)]
|
||||
pub localized_attributes: Setting<Vec<LocalizedAttributesRuleView>>,
|
||||
|
||||
#[serde(skip)]
|
||||
#[deserr(skip)]
|
||||
@@ -261,6 +265,7 @@ impl Settings<Checked> {
|
||||
pagination: Setting::Reset,
|
||||
embedders: Setting::Reset,
|
||||
search_cutoff_ms: Setting::Reset,
|
||||
localized_attributes: Setting::Reset,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
@@ -284,7 +289,8 @@ impl Settings<Checked> {
|
||||
pagination,
|
||||
embedders,
|
||||
search_cutoff_ms,
|
||||
..
|
||||
localized_attributes: localized_attributes_rules,
|
||||
_kind,
|
||||
} = self;
|
||||
|
||||
Settings {
|
||||
@@ -305,6 +311,7 @@ impl Settings<Checked> {
|
||||
pagination,
|
||||
embedders,
|
||||
search_cutoff_ms,
|
||||
localized_attributes: localized_attributes_rules,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
@@ -352,6 +359,7 @@ impl Settings<Unchecked> {
|
||||
pagination: self.pagination,
|
||||
embedders: self.embedders,
|
||||
search_cutoff_ms: self.search_cutoff_ms,
|
||||
localized_attributes: self.localized_attributes,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
@@ -402,6 +410,7 @@ pub fn apply_settings_to_builder(
|
||||
pagination,
|
||||
embedders,
|
||||
search_cutoff_ms,
|
||||
localized_attributes: localized_attributes_rules,
|
||||
_kind,
|
||||
} = settings;
|
||||
|
||||
@@ -485,6 +494,13 @@ pub fn apply_settings_to_builder(
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match localized_attributes_rules {
|
||||
Setting::Set(ref rules) => builder
|
||||
.set_localized_attributes_rules(rules.iter().cloned().map(|r| r.into()).collect()),
|
||||
Setting::Reset => builder.reset_localized_attributes_rules(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match typo_tolerance {
|
||||
Setting::Set(ref value) => {
|
||||
match value.enabled {
|
||||
@@ -679,6 +695,8 @@ pub fn settings(
|
||||
|
||||
let search_cutoff_ms = index.search_cutoff(rtxn)?;
|
||||
|
||||
let localized_attributes_rules = index.localized_attributes_rules(rtxn)?;
|
||||
|
||||
let mut settings = Settings {
|
||||
displayed_attributes: match displayed_attributes {
|
||||
Some(attrs) => Setting::Set(attrs),
|
||||
@@ -711,6 +729,10 @@ pub fn settings(
|
||||
Some(cutoff) => Setting::Set(cutoff),
|
||||
None => Setting::Reset,
|
||||
},
|
||||
localized_attributes: match localized_attributes_rules {
|
||||
Some(rules) => Setting::Set(rules.into_iter().map(|r| r.into()).collect()),
|
||||
None => Setting::Reset,
|
||||
},
|
||||
_kind: PhantomData,
|
||||
};
|
||||
|
||||
@@ -902,6 +924,7 @@ pub(crate) mod test {
|
||||
faceting: Setting::NotSet,
|
||||
pagination: Setting::NotSet,
|
||||
embedders: Setting::NotSet,
|
||||
localized_attributes: Setting::NotSet,
|
||||
search_cutoff_ms: Setting::NotSet,
|
||||
_kind: PhantomData::<Unchecked>,
|
||||
};
|
||||
@@ -930,6 +953,7 @@ pub(crate) mod test {
|
||||
faceting: Setting::NotSet,
|
||||
pagination: Setting::NotSet,
|
||||
embedders: Setting::NotSet,
|
||||
localized_attributes: Setting::NotSet,
|
||||
search_cutoff_ms: Setting::NotSet,
|
||||
_kind: PhantomData::<Unchecked>,
|
||||
};
|
||||
|
||||
@@ -10,38 +10,52 @@ static VERSION_MINOR: &str = env!("CARGO_PKG_VERSION_MINOR");
|
||||
static VERSION_PATCH: &str = env!("CARGO_PKG_VERSION_PATCH");
|
||||
|
||||
/// Persists the version of the current Meilisearch binary to a VERSION file
|
||||
pub fn create_version_file(db_path: &Path) -> io::Result<()> {
|
||||
pub fn create_current_version_file(db_path: &Path) -> io::Result<()> {
|
||||
create_version_file(db_path, VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
|
||||
}
|
||||
|
||||
pub fn create_version_file(
|
||||
db_path: &Path,
|
||||
major: &str,
|
||||
minor: &str,
|
||||
patch: &str,
|
||||
) -> io::Result<()> {
|
||||
let version_path = db_path.join(VERSION_FILE_NAME);
|
||||
fs::write(version_path, format!("{}.{}.{}", VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH))
|
||||
fs::write(version_path, format!("{}.{}.{}", major, minor, patch))
|
||||
}
|
||||
|
||||
/// Ensures Meilisearch version is compatible with the database, returns an error versions mismatch.
|
||||
pub fn check_version_file(db_path: &Path) -> anyhow::Result<()> {
|
||||
let version_path = db_path.join(VERSION_FILE_NAME);
|
||||
let (major, minor, patch) = get_version(db_path)?;
|
||||
|
||||
match fs::read_to_string(version_path) {
|
||||
Ok(version) => {
|
||||
let version_components = version.split('.').collect::<Vec<_>>();
|
||||
let (major, minor, patch) = match &version_components[..] {
|
||||
[major, minor, patch] => (major.to_string(), minor.to_string(), patch.to_string()),
|
||||
_ => return Err(VersionFileError::MalformedVersionFile.into()),
|
||||
};
|
||||
|
||||
if major != VERSION_MAJOR || minor != VERSION_MINOR {
|
||||
return Err(VersionFileError::VersionMismatch { major, minor, patch }.into());
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
return match error.kind() {
|
||||
ErrorKind::NotFound => Err(VersionFileError::MissingVersionFile.into()),
|
||||
_ => Err(error.into()),
|
||||
}
|
||||
}
|
||||
if major != VERSION_MAJOR || minor != VERSION_MINOR {
|
||||
return Err(VersionFileError::VersionMismatch { major, minor, patch }.into());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_version(db_path: &Path) -> Result<(String, String, String), VersionFileError> {
|
||||
let version_path = db_path.join(VERSION_FILE_NAME);
|
||||
|
||||
match fs::read_to_string(version_path) {
|
||||
Ok(version) => parse_version(&version),
|
||||
Err(error) => match error.kind() {
|
||||
ErrorKind::NotFound => Err(VersionFileError::MissingVersionFile),
|
||||
_ => Err(error.into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_version(version: &str) -> Result<(String, String, String), VersionFileError> {
|
||||
let version_components = version.split('.').collect::<Vec<_>>();
|
||||
let (major, minor, patch) = match &version_components[..] {
|
||||
[major, minor, patch] => (major.to_string(), minor.to_string(), patch.to_string()),
|
||||
_ => return Err(VersionFileError::MalformedVersionFile),
|
||||
};
|
||||
Ok((major, minor, patch))
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum VersionFileError {
|
||||
#[error(
|
||||
@@ -58,4 +72,7 @@ pub enum VersionFileError {
|
||||
env!("CARGO_PKG_VERSION").to_string()
|
||||
)]
|
||||
VersionMismatch { major: String, minor: String, patch: String },
|
||||
|
||||
#[error(transparent)]
|
||||
IoError(#[from] std::io::Error),
|
||||
}
|
||||
|
||||
@@ -75,7 +75,7 @@ reqwest = { version = "0.12.5", features = [
|
||||
rustls = { version = "0.23.11", features = ["ring"], default-features = false }
|
||||
rustls-pki-types = { version = "1.7.0", features = ["alloc"] }
|
||||
rustls-pemfile = "2.1.2"
|
||||
segment = { version = "0.2.4", optional = true }
|
||||
segment = { version = "0.2.4" }
|
||||
serde = { version = "1.0.204", features = ["derive"] }
|
||||
serde_json = { version = "1.0.120", features = ["preserve_order"] }
|
||||
sha2 = "0.10.8"
|
||||
@@ -104,6 +104,7 @@ tracing-trace = { version = "0.1.0", path = "../tracing-trace" }
|
||||
tracing-actix-web = "0.7.11"
|
||||
build-info = { version = "1.7.0", path = "../build-info" }
|
||||
roaring = "0.10.2"
|
||||
mopa-maintained = "0.2.3"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.10.0"
|
||||
@@ -114,6 +115,7 @@ maplit = "1.0.2"
|
||||
meili-snap = { path = "../meili-snap" }
|
||||
temp-env = "0.3.6"
|
||||
urlencoding = "2.1.3"
|
||||
wiremock = "0.6.0"
|
||||
yaup = "0.3.1"
|
||||
|
||||
[build-dependencies]
|
||||
@@ -130,8 +132,7 @@ tempfile = { version = "3.10.1", optional = true }
|
||||
zip = { version = "2.1.3", optional = true }
|
||||
|
||||
[features]
|
||||
default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"]
|
||||
analytics = ["segment"]
|
||||
default = ["meilisearch-types/all-tokenizations", "mini-dashboard"]
|
||||
mini-dashboard = [
|
||||
"static-files",
|
||||
"anyhow",
|
||||
@@ -152,7 +153,9 @@ greek = ["meilisearch-types/greek"]
|
||||
khmer = ["meilisearch-types/khmer"]
|
||||
vietnamese = ["meilisearch-types/vietnamese"]
|
||||
swedish-recomposition = ["meilisearch-types/swedish-recomposition"]
|
||||
german = ["meilisearch-types/german"]
|
||||
turkish = ["meilisearch-types/turkish"]
|
||||
|
||||
[package.metadata.mini-dashboard]
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.14/build.zip"
|
||||
sha1 = "592d1b5a3459d621d0aae1dded8fe3154f5c38fe"
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.15/build.zip"
|
||||
sha1 = "d057600b4a839a2e0c0be7a372cd1b2683f3ca7e"
|
||||
|
||||
@@ -1,109 +0,0 @@
|
||||
use std::any::Any;
|
||||
use std::sync::Arc;
|
||||
|
||||
use actix_web::HttpRequest;
|
||||
use meilisearch_types::InstanceUid;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::{find_user_id, Analytics, DocumentDeletionKind, DocumentFetchKind};
|
||||
use crate::routes::indexes::documents::{DocumentEditionByFunction, UpdateDocumentsQuery};
|
||||
use crate::Opt;
|
||||
|
||||
pub struct MockAnalytics {
|
||||
instance_uid: Option<InstanceUid>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SearchAggregator;
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl SearchAggregator {
|
||||
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SimilarAggregator;
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl SimilarAggregator {
|
||||
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct MultiSearchAggregator;
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl MultiSearchAggregator {
|
||||
pub fn from_federated_search(_: &dyn Any, _: &dyn Any) -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self) {}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct FacetSearchAggregator;
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl FacetSearchAggregator {
|
||||
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||
}
|
||||
|
||||
impl MockAnalytics {
|
||||
#[allow(clippy::new_ret_no_self)]
|
||||
pub fn new(opt: &Opt) -> Arc<dyn Analytics> {
|
||||
let instance_uid = find_user_id(&opt.db_path);
|
||||
Arc::new(Self { instance_uid })
|
||||
}
|
||||
}
|
||||
|
||||
impl Analytics for MockAnalytics {
|
||||
fn instance_uid(&self) -> Option<&meilisearch_types::InstanceUid> {
|
||||
self.instance_uid.as_ref()
|
||||
}
|
||||
|
||||
// These methods are noop and should be optimized out
|
||||
fn publish(&self, _event_name: String, _send: Value, _request: Option<&HttpRequest>) {}
|
||||
fn get_search(&self, _aggregate: super::SearchAggregator) {}
|
||||
fn post_search(&self, _aggregate: super::SearchAggregator) {}
|
||||
fn get_similar(&self, _aggregate: super::SimilarAggregator) {}
|
||||
fn post_similar(&self, _aggregate: super::SimilarAggregator) {}
|
||||
fn post_multi_search(&self, _aggregate: super::MultiSearchAggregator) {}
|
||||
fn post_facet_search(&self, _aggregate: super::FacetSearchAggregator) {}
|
||||
fn add_documents(
|
||||
&self,
|
||||
_documents_query: &UpdateDocumentsQuery,
|
||||
_index_creation: bool,
|
||||
_request: &HttpRequest,
|
||||
) {
|
||||
}
|
||||
fn delete_documents(&self, _kind: DocumentDeletionKind, _request: &HttpRequest) {}
|
||||
fn update_documents(
|
||||
&self,
|
||||
_documents_query: &UpdateDocumentsQuery,
|
||||
_index_creation: bool,
|
||||
_request: &HttpRequest,
|
||||
) {
|
||||
}
|
||||
fn update_documents_by_function(
|
||||
&self,
|
||||
_documents_query: &DocumentEditionByFunction,
|
||||
_index_creation: bool,
|
||||
_request: &HttpRequest,
|
||||
) {
|
||||
}
|
||||
fn get_fetch_documents(&self, _documents_query: &DocumentFetchKind, _request: &HttpRequest) {}
|
||||
fn post_fetch_documents(&self, _documents_query: &DocumentFetchKind, _request: &HttpRequest) {}
|
||||
}
|
||||
@@ -1,44 +1,45 @@
|
||||
mod mock_analytics;
|
||||
#[cfg(feature = "analytics")]
|
||||
mod segment_analytics;
|
||||
pub mod segment_analytics;
|
||||
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use actix_web::HttpRequest;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_types::InstanceUid;
|
||||
pub use mock_analytics::MockAnalytics;
|
||||
use mopa::mopafy;
|
||||
use once_cell::sync::Lazy;
|
||||
use platform_dirs::AppDirs;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::routes::indexes::documents::{DocumentEditionByFunction, UpdateDocumentsQuery};
|
||||
|
||||
// if the analytics feature is disabled
|
||||
// the `SegmentAnalytics` point to the mock instead of the real analytics
|
||||
#[cfg(not(feature = "analytics"))]
|
||||
pub type SegmentAnalytics = mock_analytics::MockAnalytics;
|
||||
#[cfg(not(feature = "analytics"))]
|
||||
pub type SearchAggregator = mock_analytics::SearchAggregator;
|
||||
#[cfg(not(feature = "analytics"))]
|
||||
pub type SimilarAggregator = mock_analytics::SimilarAggregator;
|
||||
#[cfg(not(feature = "analytics"))]
|
||||
pub type MultiSearchAggregator = mock_analytics::MultiSearchAggregator;
|
||||
#[cfg(not(feature = "analytics"))]
|
||||
pub type FacetSearchAggregator = mock_analytics::FacetSearchAggregator;
|
||||
|
||||
// if the feature analytics is enabled we use the real analytics
|
||||
#[cfg(feature = "analytics")]
|
||||
pub type SegmentAnalytics = segment_analytics::SegmentAnalytics;
|
||||
#[cfg(feature = "analytics")]
|
||||
pub type SearchAggregator = segment_analytics::SearchAggregator;
|
||||
#[cfg(feature = "analytics")]
|
||||
pub type SimilarAggregator = segment_analytics::SimilarAggregator;
|
||||
#[cfg(feature = "analytics")]
|
||||
pub type MultiSearchAggregator = segment_analytics::MultiSearchAggregator;
|
||||
#[cfg(feature = "analytics")]
|
||||
pub type FacetSearchAggregator = segment_analytics::FacetSearchAggregator;
|
||||
|
||||
use crate::Opt;
|
||||
|
||||
/// A macro used to quickly define events that don't aggregate or send anything besides an empty event with its name.
|
||||
#[macro_export]
|
||||
macro_rules! empty_analytics {
|
||||
($struct_name:ident, $event_name:literal) => {
|
||||
#[derive(Default)]
|
||||
struct $struct_name {}
|
||||
|
||||
impl $crate::analytics::Aggregate for $struct_name {
|
||||
fn event_name(&self) -> &'static str {
|
||||
$event_name
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, _other: Box<Self>) -> Box<Self> {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::json!({})
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// The Meilisearch config dir:
|
||||
/// `~/.config/Meilisearch` on *NIX or *BSD.
|
||||
@@ -78,60 +79,88 @@ pub enum DocumentFetchKind {
|
||||
Normal { with_filter: bool, limit: usize, offset: usize, retrieve_vectors: bool },
|
||||
}
|
||||
|
||||
pub trait Analytics: Sync + Send {
|
||||
fn instance_uid(&self) -> Option<&InstanceUid>;
|
||||
/// To send an event to segment, your event must be able to aggregate itself with another event of the same type.
|
||||
pub trait Aggregate: 'static + mopa::Any + Send {
|
||||
/// The name of the event that will be sent to segment.
|
||||
fn event_name(&self) -> &'static str;
|
||||
|
||||
/// Will be called every time an event has been used twice before segment flushed its buffer.
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
|
||||
/// Converts your structure to the final event that'll be sent to segment.
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value;
|
||||
}
|
||||
|
||||
mopafy!(Aggregate);
|
||||
|
||||
/// Helper trait to define multiple aggregates with the same content but a different name.
|
||||
/// Commonly used when you must aggregate a search with POST or with GET, for example.
|
||||
pub trait AggregateMethod: 'static + Default + Send {
|
||||
fn event_name() -> &'static str;
|
||||
}
|
||||
|
||||
/// A macro used to quickly define multiple aggregate method with their name
|
||||
/// Usage:
|
||||
/// ```rust
|
||||
/// use meilisearch::aggregate_methods;
|
||||
///
|
||||
/// aggregate_methods!(
|
||||
/// SearchGET => "Documents Searched GET",
|
||||
/// SearchPOST => "Documents Searched POST",
|
||||
/// );
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! aggregate_methods {
|
||||
($method:ident => $event_name:literal) => {
|
||||
#[derive(Default)]
|
||||
pub struct $method {}
|
||||
|
||||
impl $crate::analytics::AggregateMethod for $method {
|
||||
fn event_name() -> &'static str {
|
||||
$event_name
|
||||
}
|
||||
}
|
||||
};
|
||||
($($method:ident => $event_name:literal,)+) => {
|
||||
$(
|
||||
aggregate_methods!($method => $event_name);
|
||||
)+
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Analytics {
|
||||
segment: Option<Arc<SegmentAnalytics>>,
|
||||
}
|
||||
|
||||
impl Analytics {
|
||||
pub async fn new(
|
||||
opt: &Opt,
|
||||
index_scheduler: Arc<IndexScheduler>,
|
||||
auth_controller: Arc<AuthController>,
|
||||
) -> Self {
|
||||
if opt.no_analytics {
|
||||
Self { segment: None }
|
||||
} else {
|
||||
Self { segment: SegmentAnalytics::new(opt, index_scheduler, auth_controller).await }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn no_analytics() -> Self {
|
||||
Self { segment: None }
|
||||
}
|
||||
|
||||
pub fn instance_uid(&self) -> Option<&InstanceUid> {
|
||||
self.segment.as_ref().map(|segment| segment.instance_uid.as_ref())
|
||||
}
|
||||
|
||||
/// The method used to publish most analytics that do not need to be batched every hours
|
||||
fn publish(&self, event_name: String, send: Value, request: Option<&HttpRequest>);
|
||||
|
||||
/// This method should be called to aggregate a get search
|
||||
fn get_search(&self, aggregate: SearchAggregator);
|
||||
|
||||
/// This method should be called to aggregate a post search
|
||||
fn post_search(&self, aggregate: SearchAggregator);
|
||||
|
||||
/// This method should be called to aggregate a get similar request
|
||||
fn get_similar(&self, aggregate: SimilarAggregator);
|
||||
|
||||
/// This method should be called to aggregate a post similar request
|
||||
fn post_similar(&self, aggregate: SimilarAggregator);
|
||||
|
||||
/// This method should be called to aggregate a post array of searches
|
||||
fn post_multi_search(&self, aggregate: MultiSearchAggregator);
|
||||
|
||||
/// This method should be called to aggregate post facet values searches
|
||||
fn post_facet_search(&self, aggregate: FacetSearchAggregator);
|
||||
|
||||
// this method should be called to aggregate an add documents request
|
||||
fn add_documents(
|
||||
&self,
|
||||
documents_query: &UpdateDocumentsQuery,
|
||||
index_creation: bool,
|
||||
request: &HttpRequest,
|
||||
);
|
||||
|
||||
// this method should be called to aggregate a fetch documents request
|
||||
fn get_fetch_documents(&self, documents_query: &DocumentFetchKind, request: &HttpRequest);
|
||||
|
||||
// this method should be called to aggregate a fetch documents request
|
||||
fn post_fetch_documents(&self, documents_query: &DocumentFetchKind, request: &HttpRequest);
|
||||
|
||||
// this method should be called to aggregate a add documents request
|
||||
fn delete_documents(&self, kind: DocumentDeletionKind, request: &HttpRequest);
|
||||
|
||||
// this method should be called to batch an update documents request
|
||||
fn update_documents(
|
||||
&self,
|
||||
documents_query: &UpdateDocumentsQuery,
|
||||
index_creation: bool,
|
||||
request: &HttpRequest,
|
||||
);
|
||||
|
||||
// this method should be called to batch an update documents by function request
|
||||
fn update_documents_by_function(
|
||||
&self,
|
||||
documents_query: &DocumentEditionByFunction,
|
||||
index_creation: bool,
|
||||
request: &HttpRequest,
|
||||
);
|
||||
pub fn publish<T: Aggregate>(&self, event: T, request: &HttpRequest) {
|
||||
if let Some(ref segment) = self.segment {
|
||||
let _ = segment.sender.try_send(segment_analytics::Message::new(event, request));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -4,6 +4,7 @@ use byte_unit::{Byte, UnitType};
|
||||
use meilisearch_types::document_formats::{DocumentFormatError, PayloadType};
|
||||
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
|
||||
use meilisearch_types::index_uid::{IndexUid, IndexUidFormatError};
|
||||
use meilisearch_types::milli::OrderBy;
|
||||
use serde_json::Value;
|
||||
use tokio::task::JoinError;
|
||||
|
||||
@@ -27,10 +28,20 @@ pub enum MeilisearchHttpError {
|
||||
EmptyFilter,
|
||||
#[error("Invalid syntax for the filter parameter: `expected {}, found: {1}`.", .0.join(", "))]
|
||||
InvalidExpression(&'static [&'static str], Value),
|
||||
#[error("Using `federationOptions` is not allowed in a non-federated search.\n Hint: remove `federationOptions` from query #{0} or add `federation: {{}}` to the request.")]
|
||||
#[error("Using `federationOptions` is not allowed in a non-federated search.\n - Hint: remove `federationOptions` from query #{0} or add `federation` to the request.")]
|
||||
FederationOptionsInNonFederatedRequest(usize),
|
||||
#[error("Inside `.queries[{0}]`: Using pagination options is not allowed in federated queries.\n Hint: remove `{1}` from query #{0} or remove `federation: {{}}` from the request")]
|
||||
#[error("Inside `.queries[{0}]`: Using pagination options is not allowed in federated queries.\n - Hint: remove `{1}` from query #{0} or remove `federation` from the request\n - Hint: pass `federation.limit` and `federation.offset` for pagination in federated search")]
|
||||
PaginationInFederatedQuery(usize, &'static str),
|
||||
#[error("Inside `.queries[{0}]`: Using facet options is not allowed in federated queries.\n - Hint: remove `facets` from query #{0} or remove `federation` from the request\n - Hint: pass `federation.facetsByIndex.{1}: {2:?}` for facets in federated search")]
|
||||
FacetsInFederatedQuery(usize, String, Vec<String>),
|
||||
#[error("Inconsistent order for values in facet `{facet}`: index `{previous_uid}` orders {previous_facet_order}, but index `{current_uid}` orders {index_facet_order}.\n - Hint: Remove `federation.mergeFacets` or change `faceting.sortFacetValuesBy` to be consistent in settings.")]
|
||||
InconsistentFacetOrder {
|
||||
facet: String,
|
||||
previous_facet_order: OrderBy,
|
||||
previous_uid: String,
|
||||
index_facet_order: OrderBy,
|
||||
current_uid: String,
|
||||
},
|
||||
#[error("A {0} payload is missing.")]
|
||||
MissingPayload(PayloadType),
|
||||
#[error("Too many search requests running at the same time: {0}. Retry after 10s.")]
|
||||
@@ -61,7 +72,7 @@ pub enum MeilisearchHttpError {
|
||||
DocumentFormat(#[from] DocumentFormatError),
|
||||
#[error(transparent)]
|
||||
Join(#[from] JoinError),
|
||||
#[error("Invalid request: missing `hybrid` parameter when both `q` and `vector` are present.")]
|
||||
#[error("Invalid request: missing `hybrid` parameter when `vector` is present.")]
|
||||
MissingSearchHybrid,
|
||||
}
|
||||
|
||||
@@ -96,6 +107,10 @@ impl ErrorCode for MeilisearchHttpError {
|
||||
MeilisearchHttpError::PaginationInFederatedQuery(_, _) => {
|
||||
Code::InvalidMultiSearchQueryPagination
|
||||
}
|
||||
MeilisearchHttpError::FacetsInFederatedQuery(..) => Code::InvalidMultiSearchQueryFacets,
|
||||
MeilisearchHttpError::InconsistentFacetOrder { .. } => {
|
||||
Code::InvalidMultiSearchFacetOrder
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,11 +13,10 @@ pub mod search_queue;
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::{BufReader, BufWriter};
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::thread::{self, available_parallelism};
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
|
||||
use actix_cors::Cors;
|
||||
@@ -37,7 +36,7 @@ use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchR
|
||||
use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod};
|
||||
use meilisearch_types::settings::apply_settings_to_builder;
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use meilisearch_types::versioning::{check_version_file, create_version_file};
|
||||
use meilisearch_types::versioning::{check_version_file, create_current_version_file};
|
||||
use meilisearch_types::{compression, milli, VERSION_FILE_NAME};
|
||||
pub use option::Opt;
|
||||
use option::ScheduleSnapshot;
|
||||
@@ -118,9 +117,10 @@ pub type LogStderrType = tracing_subscriber::filter::Filtered<
|
||||
pub fn create_app(
|
||||
index_scheduler: Data<IndexScheduler>,
|
||||
auth_controller: Data<AuthController>,
|
||||
search_queue: Data<SearchQueue>,
|
||||
opt: Opt,
|
||||
logs: (LogRouteHandle, LogStderrHandle),
|
||||
analytics: Arc<dyn Analytics>,
|
||||
analytics: Data<Analytics>,
|
||||
enable_dashboard: bool,
|
||||
) -> actix_web::App<
|
||||
impl ServiceFactory<
|
||||
@@ -137,6 +137,7 @@ pub fn create_app(
|
||||
s,
|
||||
index_scheduler.clone(),
|
||||
auth_controller.clone(),
|
||||
search_queue.clone(),
|
||||
&opt,
|
||||
logs,
|
||||
analytics.clone(),
|
||||
@@ -318,7 +319,7 @@ fn open_or_create_database_unchecked(
|
||||
match (
|
||||
index_scheduler_builder(),
|
||||
auth_controller.map_err(anyhow::Error::from),
|
||||
create_version_file(&opt.db_path).map_err(anyhow::Error::from),
|
||||
create_current_version_file(&opt.db_path).map_err(anyhow::Error::from),
|
||||
) {
|
||||
(Ok(i), Ok(a), Ok(())) => Ok((i, a)),
|
||||
(Err(e), _, _) | (_, Err(e), _) | (_, _, Err(e)) => {
|
||||
@@ -469,20 +470,17 @@ pub fn configure_data(
|
||||
config: &mut web::ServiceConfig,
|
||||
index_scheduler: Data<IndexScheduler>,
|
||||
auth: Data<AuthController>,
|
||||
search_queue: Data<SearchQueue>,
|
||||
opt: &Opt,
|
||||
(logs_route, logs_stderr): (LogRouteHandle, LogStderrHandle),
|
||||
analytics: Arc<dyn Analytics>,
|
||||
analytics: Data<Analytics>,
|
||||
) {
|
||||
let search_queue = SearchQueue::new(
|
||||
opt.experimental_search_queue_size,
|
||||
available_parallelism().unwrap_or(NonZeroUsize::new(2).unwrap()),
|
||||
);
|
||||
let http_payload_size_limit = opt.http_payload_size_limit.as_u64() as usize;
|
||||
config
|
||||
.app_data(index_scheduler)
|
||||
.app_data(auth)
|
||||
.app_data(web::Data::new(search_queue))
|
||||
.app_data(web::Data::from(analytics))
|
||||
.app_data(search_queue)
|
||||
.app_data(analytics)
|
||||
.app_data(web::Data::new(logs_route))
|
||||
.app_data(web::Data::new(logs_stderr))
|
||||
.app_data(web::Data::new(opt.clone()))
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
use std::env;
|
||||
use std::io::{stderr, LineWriter, Write};
|
||||
use std::num::NonZeroUsize;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::thread::available_parallelism;
|
||||
use std::time::Duration;
|
||||
|
||||
use actix_web::http::KeepAlive;
|
||||
use actix_web::web::Data;
|
||||
@@ -11,6 +14,7 @@ use index_scheduler::IndexScheduler;
|
||||
use is_terminal::IsTerminal;
|
||||
use meilisearch::analytics::Analytics;
|
||||
use meilisearch::option::LogMode;
|
||||
use meilisearch::search_queue::SearchQueue;
|
||||
use meilisearch::{
|
||||
analytics, create_app, setup_meilisearch, LogRouteHandle, LogRouteType, LogStderrHandle,
|
||||
LogStderrType, Opt, SubscriberForSecondLayer,
|
||||
@@ -72,6 +76,19 @@ fn on_panic(info: &std::panic::PanicInfo) {
|
||||
|
||||
#[actix_web::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
try_main().await.inspect_err(|error| {
|
||||
tracing::error!(%error);
|
||||
let mut current = error.source();
|
||||
let mut depth = 0;
|
||||
while let Some(source) = current {
|
||||
tracing::info!(%source, depth, "Error caused by");
|
||||
current = source.source();
|
||||
depth += 1;
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async fn try_main() -> anyhow::Result<()> {
|
||||
let (opt, config_read_from) = Opt::try_build()?;
|
||||
|
||||
std::panic::set_hook(Box::new(on_panic));
|
||||
@@ -107,19 +124,12 @@ async fn main() -> anyhow::Result<()> {
|
||||
|
||||
let (index_scheduler, auth_controller) = setup_meilisearch(&opt)?;
|
||||
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
let analytics = if !opt.no_analytics {
|
||||
analytics::SegmentAnalytics::new(&opt, index_scheduler.clone(), auth_controller.clone())
|
||||
.await
|
||||
} else {
|
||||
analytics::MockAnalytics::new(&opt)
|
||||
};
|
||||
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
||||
let analytics = analytics::MockAnalytics::new(&opt);
|
||||
let analytics =
|
||||
analytics::Analytics::new(&opt, index_scheduler.clone(), auth_controller.clone()).await;
|
||||
|
||||
print_launch_resume(&opt, analytics.clone(), config_read_from);
|
||||
|
||||
run_http(index_scheduler, auth_controller, opt, log_handle, analytics).await?;
|
||||
run_http(index_scheduler, auth_controller, opt, log_handle, Arc::new(analytics)).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -129,17 +139,30 @@ async fn run_http(
|
||||
auth_controller: Arc<AuthController>,
|
||||
opt: Opt,
|
||||
logs: (LogRouteHandle, LogStderrHandle),
|
||||
analytics: Arc<dyn Analytics>,
|
||||
analytics: Arc<Analytics>,
|
||||
) -> anyhow::Result<()> {
|
||||
let enable_dashboard = &opt.env == "development";
|
||||
let opt_clone = opt.clone();
|
||||
let index_scheduler = Data::from(index_scheduler);
|
||||
let auth_controller = Data::from(auth_controller);
|
||||
let analytics = Data::from(analytics);
|
||||
let search_queue = SearchQueue::new(
|
||||
opt.experimental_search_queue_size,
|
||||
available_parallelism()
|
||||
.unwrap_or(NonZeroUsize::new(2).unwrap())
|
||||
.checked_mul(opt.experimental_nb_searches_per_core)
|
||||
.unwrap_or(NonZeroUsize::MAX),
|
||||
)
|
||||
.with_time_to_abort(Duration::from_secs(
|
||||
usize::from(opt.experimental_drop_search_after) as u64
|
||||
));
|
||||
let search_queue = Data::new(search_queue);
|
||||
|
||||
let http_server = HttpServer::new(move || {
|
||||
create_app(
|
||||
index_scheduler.clone(),
|
||||
auth_controller.clone(),
|
||||
search_queue.clone(),
|
||||
opt.clone(),
|
||||
logs.clone(),
|
||||
analytics.clone(),
|
||||
@@ -158,11 +181,7 @@ async fn run_http(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn print_launch_resume(
|
||||
opt: &Opt,
|
||||
analytics: Arc<dyn Analytics>,
|
||||
config_read_from: Option<PathBuf>,
|
||||
) {
|
||||
pub fn print_launch_resume(opt: &Opt, analytics: Analytics, config_read_from: Option<PathBuf>) {
|
||||
let build_info = build_info::BuildInfo::from_build();
|
||||
|
||||
let protocol =
|
||||
@@ -204,7 +223,6 @@ pub fn print_launch_resume(
|
||||
eprintln!("Prototype:\t\t{:?}", prototype);
|
||||
}
|
||||
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
{
|
||||
if !opt.no_analytics {
|
||||
eprintln!(
|
||||
|
||||
@@ -55,16 +55,17 @@ where
|
||||
let index_scheduler = req.app_data::<Data<IndexScheduler>>().unwrap();
|
||||
let features = index_scheduler.features();
|
||||
|
||||
let request_path = req.path();
|
||||
let request_pattern = req.match_pattern();
|
||||
let metric_path = request_pattern.as_ref().map_or(request_path, String::as_str).to_string();
|
||||
let request_method = req.method().to_string();
|
||||
|
||||
if features.check_metrics().is_ok() {
|
||||
let request_path = req.path();
|
||||
let is_registered_resource = req.resource_map().has_resource(request_path);
|
||||
if is_registered_resource {
|
||||
let request_pattern = req.match_pattern();
|
||||
let metric_path = request_pattern.as_ref().map_or(request_path, String::as_str);
|
||||
let request_method = req.method().to_string();
|
||||
histogram_timer = Some(
|
||||
crate::metrics::MEILISEARCH_HTTP_RESPONSE_TIME_SECONDS
|
||||
.with_label_values(&[&request_method, metric_path])
|
||||
.with_label_values(&[&request_method, &metric_path])
|
||||
.start_timer(),
|
||||
);
|
||||
}
|
||||
@@ -76,11 +77,7 @@ where
|
||||
let res = fut.await?;
|
||||
|
||||
crate::metrics::MEILISEARCH_HTTP_REQUESTS_TOTAL
|
||||
.with_label_values(&[
|
||||
res.request().method().as_str(),
|
||||
res.request().path(),
|
||||
res.status().as_str(),
|
||||
])
|
||||
.with_label_values(&[&request_method, &metric_path, res.status().as_str()])
|
||||
.inc();
|
||||
|
||||
if let Some(histogram_timer) = histogram_timer {
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::env::VarError;
|
||||
use std::ffi::OsStr;
|
||||
use std::fmt::Display;
|
||||
use std::io::{BufReader, Read};
|
||||
use std::num::ParseIntError;
|
||||
use std::num::{NonZeroUsize, ParseIntError};
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
@@ -29,7 +29,6 @@ const MEILI_MASTER_KEY: &str = "MEILI_MASTER_KEY";
|
||||
const MEILI_ENV: &str = "MEILI_ENV";
|
||||
const MEILI_TASK_WEBHOOK_URL: &str = "MEILI_TASK_WEBHOOK_URL";
|
||||
const MEILI_TASK_WEBHOOK_AUTHORIZATION_HEADER: &str = "MEILI_TASK_WEBHOOK_AUTHORIZATION_HEADER";
|
||||
#[cfg(feature = "analytics")]
|
||||
const MEILI_NO_ANALYTICS: &str = "MEILI_NO_ANALYTICS";
|
||||
const MEILI_HTTP_PAYLOAD_SIZE_LIMIT: &str = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT";
|
||||
const MEILI_SSL_CERT_PATH: &str = "MEILI_SSL_CERT_PATH";
|
||||
@@ -55,6 +54,8 @@ const MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE: &str = "MEILI_EXPERIMENTAL_ENABLE_LO
|
||||
const MEILI_EXPERIMENTAL_CONTAINS_FILTER: &str = "MEILI_EXPERIMENTAL_CONTAINS_FILTER";
|
||||
const MEILI_EXPERIMENTAL_ENABLE_METRICS: &str = "MEILI_EXPERIMENTAL_ENABLE_METRICS";
|
||||
const MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE: &str = "MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE";
|
||||
const MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER: &str = "MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER";
|
||||
const MEILI_EXPERIMENTAL_NB_SEARCHES_PER_CORE: &str = "MEILI_EXPERIMENTAL_NB_SEARCHES_PER_CORE";
|
||||
const MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE: &str =
|
||||
"MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE";
|
||||
const MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS: &str =
|
||||
@@ -208,7 +209,6 @@ pub struct Opt {
|
||||
/// Meilisearch automatically collects data from all instances that do not opt out using this flag.
|
||||
/// All gathered data is used solely for the purpose of improving Meilisearch, and can be deleted
|
||||
/// at any time.
|
||||
#[cfg(feature = "analytics")]
|
||||
#[serde(default)] // we can't send true
|
||||
#[clap(long, env = MEILI_NO_ANALYTICS)]
|
||||
pub no_analytics: bool,
|
||||
@@ -357,10 +357,26 @@ pub struct Opt {
|
||||
/// Lets you customize the size of the search queue. Meilisearch processes your search requests as fast as possible but once the
|
||||
/// queue is full it starts returning HTTP 503, Service Unavailable.
|
||||
/// The default value is 1000.
|
||||
#[clap(long, env = MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE, default_value_t = 1000)]
|
||||
#[serde(default)]
|
||||
#[clap(long, env = MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE, default_value_t = default_experimental_search_queue_size())]
|
||||
#[serde(default = "default_experimental_search_queue_size")]
|
||||
pub experimental_search_queue_size: usize,
|
||||
|
||||
/// Experimental drop search after. For more information, see: <https://github.com/orgs/meilisearch/discussions/783>
|
||||
///
|
||||
/// Let you customize after how many seconds Meilisearch should consider a search request irrelevant and drop it.
|
||||
/// The default value is 60.
|
||||
#[clap(long, env = MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER, default_value_t = default_drop_search_after())]
|
||||
#[serde(default = "default_drop_search_after")]
|
||||
pub experimental_drop_search_after: NonZeroUsize,
|
||||
|
||||
/// Experimental number of searches per core. For more information, see: <https://github.com/orgs/meilisearch/discussions/784>
|
||||
///
|
||||
/// Lets you customize how many search requests can run on each core concurrently.
|
||||
/// The default value is 4.
|
||||
#[clap(long, env = MEILI_EXPERIMENTAL_NB_SEARCHES_PER_CORE, default_value_t = default_nb_searches_per_core())]
|
||||
#[serde(default = "default_nb_searches_per_core")]
|
||||
pub experimental_nb_searches_per_core: NonZeroUsize,
|
||||
|
||||
/// Experimental logs mode feature. For more information, see: <https://github.com/orgs/meilisearch/discussions/723>
|
||||
///
|
||||
/// Change the mode of the logs on the console.
|
||||
@@ -407,7 +423,6 @@ pub struct Opt {
|
||||
|
||||
impl Opt {
|
||||
/// Whether analytics should be enabled or not.
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
pub fn analytics(&self) -> bool {
|
||||
!self.no_analytics
|
||||
}
|
||||
@@ -487,11 +502,12 @@ impl Opt {
|
||||
ignore_missing_dump: _,
|
||||
ignore_dump_if_db_exists: _,
|
||||
config_file_path: _,
|
||||
#[cfg(feature = "analytics")]
|
||||
no_analytics,
|
||||
experimental_contains_filter,
|
||||
experimental_enable_metrics,
|
||||
experimental_search_queue_size,
|
||||
experimental_drop_search_after,
|
||||
experimental_nb_searches_per_core,
|
||||
experimental_logs_mode,
|
||||
experimental_enable_logs_route,
|
||||
experimental_replication_parameters,
|
||||
@@ -513,10 +529,7 @@ impl Opt {
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(feature = "analytics")]
|
||||
{
|
||||
export_to_env_if_not_present(MEILI_NO_ANALYTICS, no_analytics.to_string());
|
||||
}
|
||||
export_to_env_if_not_present(MEILI_NO_ANALYTICS, no_analytics.to_string());
|
||||
export_to_env_if_not_present(
|
||||
MEILI_HTTP_PAYLOAD_SIZE_LIMIT,
|
||||
http_payload_size_limit.to_string(),
|
||||
@@ -559,6 +572,14 @@ impl Opt {
|
||||
MEILI_EXPERIMENTAL_SEARCH_QUEUE_SIZE,
|
||||
experimental_search_queue_size.to_string(),
|
||||
);
|
||||
export_to_env_if_not_present(
|
||||
MEILI_EXPERIMENTAL_DROP_SEARCH_AFTER,
|
||||
experimental_drop_search_after.to_string(),
|
||||
);
|
||||
export_to_env_if_not_present(
|
||||
MEILI_EXPERIMENTAL_NB_SEARCHES_PER_CORE,
|
||||
experimental_nb_searches_per_core.to_string(),
|
||||
);
|
||||
export_to_env_if_not_present(
|
||||
MEILI_EXPERIMENTAL_LOGS_MODE,
|
||||
experimental_logs_mode.to_string(),
|
||||
@@ -890,6 +911,18 @@ fn default_dump_dir() -> PathBuf {
|
||||
PathBuf::from(DEFAULT_DUMP_DIR)
|
||||
}
|
||||
|
||||
fn default_experimental_search_queue_size() -> usize {
|
||||
1000
|
||||
}
|
||||
|
||||
fn default_drop_search_after() -> NonZeroUsize {
|
||||
NonZeroUsize::new(60).unwrap()
|
||||
}
|
||||
|
||||
fn default_nb_searches_per_core() -> NonZeroUsize {
|
||||
NonZeroUsize::new(4).unwrap()
|
||||
}
|
||||
|
||||
/// Indicates if a snapshot was scheduled, and if yes with which interval.
|
||||
#[derive(Debug, Default, Copy, Clone, Deserialize, Serialize)]
|
||||
pub enum ScheduleSnapshot {
|
||||
|
||||
@@ -4,7 +4,6 @@ use index_scheduler::IndexScheduler;
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use serde_json::json;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
@@ -18,14 +17,16 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))));
|
||||
}
|
||||
|
||||
crate::empty_analytics!(DumpAnalytics, "Dump Created");
|
||||
|
||||
pub async fn create_dump(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DUMPS_CREATE }>, Data<IndexScheduler>>,
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::DUMPS_CREATE }>, Data<AuthController>>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish("Dump Created".to_string(), json!({}), Some(&req));
|
||||
analytics.publish(DumpAnalytics::default(), &req);
|
||||
|
||||
let task = KindWithContent::DumpCreation {
|
||||
keys: auth_controller.list_keys()?,
|
||||
|
||||
@@ -6,10 +6,10 @@ use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::keys::actions;
|
||||
use serde_json::json;
|
||||
use serde::Serialize;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
use crate::analytics::{Aggregate, Analytics};
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
@@ -17,7 +17,7 @@ use crate::extractors::sequential_extractor::SeqHandler;
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(SeqHandler(get_features)))
|
||||
.route(web::get().to(get_features))
|
||||
.route(web::patch().to(SeqHandler(patch_features))),
|
||||
);
|
||||
}
|
||||
@@ -27,12 +27,9 @@ async fn get_features(
|
||||
ActionPolicy<{ actions::EXPERIMENTAL_FEATURES_GET }>,
|
||||
Data<IndexScheduler>,
|
||||
>,
|
||||
req: HttpRequest,
|
||||
analytics: Data<dyn Analytics>,
|
||||
) -> HttpResponse {
|
||||
let features = index_scheduler.features();
|
||||
|
||||
analytics.publish("Experimental features Seen".to_string(), json!(null), Some(&req));
|
||||
let features = features.runtime_features();
|
||||
debug!(returns = ?features, "Get features");
|
||||
HttpResponse::Ok().json(features)
|
||||
@@ -53,6 +50,35 @@ pub struct RuntimeTogglableFeatures {
|
||||
pub contains_filter: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct PatchExperimentalFeatureAnalytics {
|
||||
vector_store: bool,
|
||||
metrics: bool,
|
||||
logs_route: bool,
|
||||
edit_documents_by_function: bool,
|
||||
contains_filter: bool,
|
||||
}
|
||||
|
||||
impl Aggregate for PatchExperimentalFeatureAnalytics {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Experimental features Updated"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
vector_store: new.vector_store,
|
||||
metrics: new.metrics,
|
||||
logs_route: new.logs_route,
|
||||
edit_documents_by_function: new.edit_documents_by_function,
|
||||
contains_filter: new.contains_filter,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
async fn patch_features(
|
||||
index_scheduler: GuardedData<
|
||||
ActionPolicy<{ actions::EXPERIMENTAL_FEATURES_UPDATE }>,
|
||||
@@ -60,7 +86,7 @@ async fn patch_features(
|
||||
>,
|
||||
new_features: AwebJson<RuntimeTogglableFeatures, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: Data<dyn Analytics>,
|
||||
analytics: Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let features = index_scheduler.features();
|
||||
debug!(parameters = ?new_features, "Patch features");
|
||||
@@ -89,15 +115,14 @@ async fn patch_features(
|
||||
} = new_features;
|
||||
|
||||
analytics.publish(
|
||||
"Experimental features Updated".to_string(),
|
||||
json!({
|
||||
"vector_store": vector_store,
|
||||
"metrics": metrics,
|
||||
"logs_route": logs_route,
|
||||
"edit_documents_by_function": edit_documents_by_function,
|
||||
"contains_filter": contains_filter,
|
||||
}),
|
||||
Some(&req),
|
||||
PatchExperimentalFeatureAnalytics {
|
||||
vector_store,
|
||||
metrics,
|
||||
logs_route,
|
||||
edit_documents_by_function,
|
||||
contains_filter,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
index_scheduler.put_runtime_features(new_features)?;
|
||||
debug!(returns = ?new_features, "Patch features");
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
use std::collections::HashSet;
|
||||
use std::io::ErrorKind;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use actix_web::http::header::CONTENT_TYPE;
|
||||
use actix_web::web::Data;
|
||||
@@ -23,14 +25,14 @@ use meilisearch_types::tasks::KindWithContent;
|
||||
use meilisearch_types::{milli, Document, Index};
|
||||
use mime::Mime;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::Deserialize;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use tempfile::tempfile;
|
||||
use tokio::fs::File;
|
||||
use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter};
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::{Analytics, DocumentDeletionKind, DocumentFetchKind};
|
||||
use crate::analytics::{Aggregate, AggregateMethod, Analytics};
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::error::PayloadError::ReceivePayload;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
@@ -41,7 +43,7 @@ use crate::routes::{
|
||||
get_task_id, is_dry_run, PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT,
|
||||
};
|
||||
use crate::search::{parse_filter, RetrieveVectors};
|
||||
use crate::Opt;
|
||||
use crate::{aggregate_methods, Opt};
|
||||
|
||||
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
||||
vec!["application/json".to_string(), "application/x-ndjson".to_string(), "text/csv".to_string()]
|
||||
@@ -100,12 +102,84 @@ pub struct GetDocument {
|
||||
retrieve_vectors: Param<bool>,
|
||||
}
|
||||
|
||||
aggregate_methods!(
|
||||
DocumentsGET => "Documents Fetched GET",
|
||||
DocumentsPOST => "Documents Fetched POST",
|
||||
);
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct DocumentsFetchAggregator<Method: AggregateMethod> {
|
||||
// a call on ../documents/:doc_id
|
||||
per_document_id: bool,
|
||||
// if a filter was used
|
||||
per_filter: bool,
|
||||
|
||||
#[serde(rename = "vector.retrieve_vectors")]
|
||||
retrieve_vectors: bool,
|
||||
|
||||
// pagination
|
||||
#[serde(rename = "pagination.max_limit")]
|
||||
max_limit: usize,
|
||||
#[serde(rename = "pagination.max_offset")]
|
||||
max_offset: usize,
|
||||
|
||||
marker: std::marker::PhantomData<Method>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum DocumentFetchKind {
|
||||
PerDocumentId { retrieve_vectors: bool },
|
||||
Normal { with_filter: bool, limit: usize, offset: usize, retrieve_vectors: bool },
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> DocumentsFetchAggregator<Method> {
|
||||
pub fn from_query(query: &DocumentFetchKind) -> Self {
|
||||
let (limit, offset, retrieve_vectors) = match query {
|
||||
DocumentFetchKind::PerDocumentId { retrieve_vectors } => (1, 0, *retrieve_vectors),
|
||||
DocumentFetchKind::Normal { limit, offset, retrieve_vectors, .. } => {
|
||||
(*limit, *offset, *retrieve_vectors)
|
||||
}
|
||||
};
|
||||
|
||||
Self {
|
||||
per_document_id: matches!(query, DocumentFetchKind::PerDocumentId { .. }),
|
||||
per_filter: matches!(query, DocumentFetchKind::Normal { with_filter, .. } if *with_filter),
|
||||
max_limit: limit,
|
||||
max_offset: offset,
|
||||
retrieve_vectors,
|
||||
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> Aggregate for DocumentsFetchAggregator<Method> {
|
||||
fn event_name(&self) -> &'static str {
|
||||
Method::event_name()
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
per_document_id: self.per_document_id | new.per_document_id,
|
||||
per_filter: self.per_filter | new.per_filter,
|
||||
retrieve_vectors: self.retrieve_vectors | new.retrieve_vectors,
|
||||
max_limit: self.max_limit.max(new.max_limit),
|
||||
max_offset: self.max_offset.max(new.max_offset),
|
||||
marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_document(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
|
||||
document_param: web::Path<DocumentParam>,
|
||||
params: AwebQueryParameter<GetDocument, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let DocumentParam { index_uid, document_id } = document_param.into_inner();
|
||||
debug!(parameters = ?params, "Get document");
|
||||
@@ -117,8 +191,15 @@ pub async fn get_document(
|
||||
let features = index_scheduler.features();
|
||||
let retrieve_vectors = RetrieveVectors::new(param_retrieve_vectors.0, features)?;
|
||||
|
||||
analytics.get_fetch_documents(
|
||||
&DocumentFetchKind::PerDocumentId { retrieve_vectors: param_retrieve_vectors.0 },
|
||||
analytics.publish(
|
||||
DocumentsFetchAggregator::<DocumentsGET> {
|
||||
retrieve_vectors: param_retrieve_vectors.0,
|
||||
per_document_id: true,
|
||||
per_filter: false,
|
||||
max_limit: 0,
|
||||
max_offset: 0,
|
||||
marker: PhantomData,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
@@ -129,17 +210,52 @@ pub async fn get_document(
|
||||
Ok(HttpResponse::Ok().json(document))
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct DocumentsDeletionAggregator {
|
||||
per_document_id: bool,
|
||||
clear_all: bool,
|
||||
per_batch: bool,
|
||||
per_filter: bool,
|
||||
}
|
||||
|
||||
impl Aggregate for DocumentsDeletionAggregator {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Documents Deleted"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
per_document_id: self.per_document_id | new.per_document_id,
|
||||
clear_all: self.clear_all | new.clear_all,
|
||||
per_batch: self.per_batch | new.per_batch,
|
||||
per_filter: self.per_filter | new.per_filter,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn delete_document(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
||||
path: web::Path<DocumentParam>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let DocumentParam { index_uid, document_id } = path.into_inner();
|
||||
let index_uid = IndexUid::try_from(index_uid)?;
|
||||
|
||||
analytics.delete_documents(DocumentDeletionKind::PerDocumentId, &req);
|
||||
analytics.publish(
|
||||
DocumentsDeletionAggregator {
|
||||
per_document_id: true,
|
||||
clear_all: false,
|
||||
per_batch: false,
|
||||
per_filter: false,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
let task = KindWithContent::DocumentDeletion {
|
||||
index_uid: index_uid.to_string(),
|
||||
@@ -190,17 +306,19 @@ pub async fn documents_by_query_post(
|
||||
index_uid: web::Path<String>,
|
||||
body: AwebJson<BrowseQuery, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let body = body.into_inner();
|
||||
debug!(parameters = ?body, "Get documents POST");
|
||||
|
||||
analytics.post_fetch_documents(
|
||||
&DocumentFetchKind::Normal {
|
||||
with_filter: body.filter.is_some(),
|
||||
limit: body.limit,
|
||||
offset: body.offset,
|
||||
analytics.publish(
|
||||
DocumentsFetchAggregator::<DocumentsPOST> {
|
||||
per_filter: body.filter.is_some(),
|
||||
retrieve_vectors: body.retrieve_vectors,
|
||||
max_limit: body.limit,
|
||||
max_offset: body.offset,
|
||||
per_document_id: false,
|
||||
marker: PhantomData,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
@@ -213,7 +331,7 @@ pub async fn get_documents(
|
||||
index_uid: web::Path<String>,
|
||||
params: AwebQueryParameter<BrowseQueryGet, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?params, "Get documents GET");
|
||||
|
||||
@@ -235,12 +353,14 @@ pub async fn get_documents(
|
||||
filter,
|
||||
};
|
||||
|
||||
analytics.get_fetch_documents(
|
||||
&DocumentFetchKind::Normal {
|
||||
with_filter: query.filter.is_some(),
|
||||
limit: query.limit,
|
||||
offset: query.offset,
|
||||
analytics.publish(
|
||||
DocumentsFetchAggregator::<DocumentsGET> {
|
||||
per_filter: query.filter.is_some(),
|
||||
retrieve_vectors: query.retrieve_vectors,
|
||||
max_limit: query.limit,
|
||||
max_offset: query.offset,
|
||||
per_document_id: false,
|
||||
marker: PhantomData,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
@@ -298,6 +418,39 @@ fn from_char_csv_delimiter(
|
||||
}
|
||||
}
|
||||
|
||||
aggregate_methods!(
|
||||
Replaced => "Documents Added",
|
||||
Updated => "Documents Updated",
|
||||
);
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct DocumentsAggregator<T: AggregateMethod> {
|
||||
payload_types: HashSet<String>,
|
||||
primary_key: HashSet<String>,
|
||||
index_creation: bool,
|
||||
#[serde(skip)]
|
||||
method: PhantomData<T>,
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> Aggregate for DocumentsAggregator<Method> {
|
||||
fn event_name(&self) -> &'static str {
|
||||
Method::event_name()
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
payload_types: self.payload_types.union(&new.payload_types).cloned().collect(),
|
||||
primary_key: self.primary_key.union(&new.primary_key).cloned().collect(),
|
||||
index_creation: self.index_creation | new.index_creation,
|
||||
method: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn replace_documents(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
@@ -305,16 +458,32 @@ pub async fn replace_documents(
|
||||
body: Payload,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
debug!(parameters = ?params, "Replace documents");
|
||||
let params = params.into_inner();
|
||||
|
||||
analytics.add_documents(
|
||||
¶ms,
|
||||
index_scheduler.index_exists(&index_uid).map_or(true, |x| !x),
|
||||
let mut content_types = HashSet::new();
|
||||
let content_type = req
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|s| s.to_str().ok())
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
content_types.insert(content_type);
|
||||
let mut primary_keys = HashSet::new();
|
||||
if let Some(primary_key) = params.primary_key.clone() {
|
||||
primary_keys.insert(primary_key);
|
||||
}
|
||||
analytics.publish(
|
||||
DocumentsAggregator::<Replaced> {
|
||||
payload_types: content_types,
|
||||
primary_key: primary_keys,
|
||||
index_creation: index_scheduler.index_exists(&index_uid).map_or(true, |x| !x),
|
||||
method: PhantomData,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
@@ -346,16 +515,32 @@ pub async fn update_documents(
|
||||
body: Payload,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
let params = params.into_inner();
|
||||
debug!(parameters = ?params, "Update documents");
|
||||
|
||||
analytics.add_documents(
|
||||
¶ms,
|
||||
index_scheduler.index_exists(&index_uid).map_or(true, |x| !x),
|
||||
let mut content_types = HashSet::new();
|
||||
let content_type = req
|
||||
.headers()
|
||||
.get(CONTENT_TYPE)
|
||||
.and_then(|s| s.to_str().ok())
|
||||
.unwrap_or("unknown")
|
||||
.to_string();
|
||||
content_types.insert(content_type);
|
||||
let mut primary_keys = HashSet::new();
|
||||
if let Some(primary_key) = params.primary_key.clone() {
|
||||
primary_keys.insert(primary_key);
|
||||
}
|
||||
analytics.publish(
|
||||
DocumentsAggregator::<Updated> {
|
||||
payload_types: content_types,
|
||||
primary_key: primary_keys,
|
||||
index_creation: index_scheduler.index_exists(&index_uid).map_or(true, |x| !x),
|
||||
method: PhantomData,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
@@ -524,12 +709,20 @@ pub async fn delete_documents_batch(
|
||||
body: web::Json<Vec<Value>>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?body, "Delete documents by batch");
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
analytics.delete_documents(DocumentDeletionKind::PerBatch, &req);
|
||||
analytics.publish(
|
||||
DocumentsDeletionAggregator {
|
||||
per_batch: true,
|
||||
per_document_id: false,
|
||||
clear_all: false,
|
||||
per_filter: false,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
let ids = body
|
||||
.iter()
|
||||
@@ -562,14 +755,22 @@ pub async fn delete_documents_by_filter(
|
||||
body: AwebJson<DocumentDeletionByFilter, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?body, "Delete documents by filter");
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
let index_uid = index_uid.into_inner();
|
||||
let filter = body.into_inner().filter;
|
||||
|
||||
analytics.delete_documents(DocumentDeletionKind::PerFilter, &req);
|
||||
analytics.publish(
|
||||
DocumentsDeletionAggregator {
|
||||
per_filter: true,
|
||||
per_document_id: false,
|
||||
clear_all: false,
|
||||
per_batch: false,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
// we ensure the filter is well formed before enqueuing it
|
||||
crate::search::parse_filter(&filter, Code::InvalidDocumentFilter, index_scheduler.features())?
|
||||
@@ -599,13 +800,41 @@ pub struct DocumentEditionByFunction {
|
||||
pub function: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct EditDocumentsByFunctionAggregator {
|
||||
// Set to true if at least one request was filtered
|
||||
filtered: bool,
|
||||
// Set to true if at least one request contained a context
|
||||
with_context: bool,
|
||||
|
||||
index_creation: bool,
|
||||
}
|
||||
|
||||
impl Aggregate for EditDocumentsByFunctionAggregator {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Documents Edited By Function"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
filtered: self.filtered | new.filtered,
|
||||
with_context: self.with_context | new.with_context,
|
||||
index_creation: self.index_creation | new.index_creation,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn edit_documents_by_function(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ALL }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
params: AwebJson<DocumentEditionByFunction, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?params, "Edit documents by function");
|
||||
|
||||
@@ -617,9 +846,12 @@ pub async fn edit_documents_by_function(
|
||||
let index_uid = index_uid.into_inner();
|
||||
let params = params.into_inner();
|
||||
|
||||
analytics.update_documents_by_function(
|
||||
¶ms,
|
||||
index_scheduler.index(&index_uid).is_err(),
|
||||
analytics.publish(
|
||||
EditDocumentsByFunctionAggregator {
|
||||
filtered: params.filter.is_some(),
|
||||
with_context: params.context.is_some(),
|
||||
index_creation: index_scheduler.index(&index_uid).is_err(),
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
@@ -670,10 +902,18 @@ pub async fn clear_all_documents(
|
||||
index_uid: web::Path<String>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
analytics.delete_documents(DocumentDeletionKind::ClearAll, &req);
|
||||
analytics.publish(
|
||||
DocumentsDeletionAggregator {
|
||||
clear_all: true,
|
||||
per_document_id: false,
|
||||
per_batch: false,
|
||||
per_filter: false,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
let task = KindWithContent::DocumentClear { index_uid: index_uid.to_string() };
|
||||
let uid = get_task_id(&req, &opt)?;
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::collections::{BinaryHeap, HashSet};
|
||||
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use deserr::actix_web::AwebJson;
|
||||
@@ -6,17 +8,19 @@ use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::deserr_codes::*;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::locales::Locale;
|
||||
use serde_json::Value;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::{Analytics, FacetSearchAggregator};
|
||||
use crate::analytics::{Aggregate, Analytics};
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::routes::indexes::search::search_kind;
|
||||
use crate::search::{
|
||||
add_search_rules, perform_facet_search, HybridQuery, MatchingStrategy, RankingScoreThreshold,
|
||||
SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG,
|
||||
DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
|
||||
add_search_rules, perform_facet_search, FacetSearchResult, HybridQuery, MatchingStrategy,
|
||||
RankingScoreThreshold, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
||||
DEFAULT_SEARCH_OFFSET,
|
||||
};
|
||||
use crate::search_queue::SearchQueue;
|
||||
|
||||
@@ -48,6 +52,110 @@ pub struct FacetSearchQuery {
|
||||
pub attributes_to_search_on: Option<Vec<String>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchRankingScoreThreshold>, default)]
|
||||
pub ranking_score_threshold: Option<RankingScoreThreshold>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchLocales>, default)]
|
||||
pub locales: Option<Vec<Locale>>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct FacetSearchAggregator {
|
||||
// requests
|
||||
total_received: usize,
|
||||
total_succeeded: usize,
|
||||
time_spent: BinaryHeap<usize>,
|
||||
|
||||
// The set of all facetNames that were used
|
||||
facet_names: HashSet<String>,
|
||||
|
||||
// As there been any other parameter than the facetName or facetQuery ones?
|
||||
additional_search_parameters_provided: bool,
|
||||
}
|
||||
|
||||
impl FacetSearchAggregator {
|
||||
#[allow(clippy::field_reassign_with_default)]
|
||||
pub fn from_query(query: &FacetSearchQuery) -> Self {
|
||||
let FacetSearchQuery {
|
||||
facet_query: _,
|
||||
facet_name,
|
||||
vector,
|
||||
q,
|
||||
filter,
|
||||
matching_strategy,
|
||||
attributes_to_search_on,
|
||||
hybrid,
|
||||
ranking_score_threshold,
|
||||
locales,
|
||||
} = query;
|
||||
|
||||
Self {
|
||||
total_received: 1,
|
||||
facet_names: Some(facet_name.clone()).into_iter().collect(),
|
||||
additional_search_parameters_provided: q.is_some()
|
||||
|| vector.is_some()
|
||||
|| filter.is_some()
|
||||
|| *matching_strategy != MatchingStrategy::default()
|
||||
|| attributes_to_search_on.is_some()
|
||||
|| hybrid.is_some()
|
||||
|| ranking_score_threshold.is_some()
|
||||
|| locales.is_some(),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, result: &FacetSearchResult) {
|
||||
let FacetSearchResult { facet_hits: _, facet_query: _, processing_time_ms } = result;
|
||||
self.total_succeeded = 1;
|
||||
self.time_spent.push(*processing_time_ms as usize);
|
||||
}
|
||||
}
|
||||
|
||||
impl Aggregate for FacetSearchAggregator {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Facet Searched POST"
|
||||
}
|
||||
|
||||
fn aggregate(mut self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
for time in new.time_spent {
|
||||
self.time_spent.push(time);
|
||||
}
|
||||
|
||||
Box::new(Self {
|
||||
total_received: self.total_received.saturating_add(new.total_received),
|
||||
total_succeeded: self.total_succeeded.saturating_add(new.total_succeeded),
|
||||
time_spent: self.time_spent,
|
||||
facet_names: self.facet_names.union(&new.facet_names).cloned().collect(),
|
||||
additional_search_parameters_provided: self.additional_search_parameters_provided
|
||||
| new.additional_search_parameters_provided,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
let Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
time_spent,
|
||||
facet_names,
|
||||
additional_search_parameters_provided,
|
||||
} = *self;
|
||||
// the index of the 99th percentage of value
|
||||
let percentile_99th = 0.99 * (total_succeeded as f64 - 1.) + 1.;
|
||||
// we get all the values in a sorted manner
|
||||
let time_spent = time_spent.into_sorted_vec();
|
||||
// We are only interested by the slowest value of the 99th fastest results
|
||||
let time_spent = time_spent.get(percentile_99th as usize);
|
||||
|
||||
serde_json::json!({
|
||||
"requests": {
|
||||
"99th_response_time": time_spent.map(|t| format!("{:.2}", t)),
|
||||
"total_succeeded": total_succeeded,
|
||||
"total_failed": total_received.saturating_sub(total_succeeded), // just to be sure we never panics
|
||||
"total_received": total_received,
|
||||
},
|
||||
"facets": {
|
||||
"total_distinct_facet_count": facet_names.len(),
|
||||
"additional_search_parameters_provided": additional_search_parameters_provided,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn search(
|
||||
@@ -56,17 +164,18 @@ pub async fn search(
|
||||
index_uid: web::Path<String>,
|
||||
params: AwebJson<FacetSearchQuery, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
let query = params.into_inner();
|
||||
debug!(parameters = ?query, "Facet search");
|
||||
|
||||
let mut aggregate = FacetSearchAggregator::from_query(&query, &req);
|
||||
let mut aggregate = FacetSearchAggregator::from_query(&query);
|
||||
|
||||
let facet_query = query.facet_query.clone();
|
||||
let facet_name = query.facet_name.clone();
|
||||
let locales = query.locales.clone().map(|l| l.into_iter().map(Into::into).collect());
|
||||
let mut search_query = SearchQuery::from(query);
|
||||
|
||||
// Tenant token search_rules.
|
||||
@@ -77,7 +186,7 @@ pub async fn search(
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let features = index_scheduler.features();
|
||||
let search_kind = search_kind(&search_query, &index_scheduler, &index, features)?;
|
||||
let _permit = search_queue.try_get_search_permit().await?;
|
||||
let permit = search_queue.try_get_search_permit().await?;
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_facet_search(
|
||||
&index,
|
||||
@@ -86,14 +195,17 @@ pub async fn search(
|
||||
facet_name,
|
||||
search_kind,
|
||||
index_scheduler.features(),
|
||||
locales,
|
||||
)
|
||||
})
|
||||
.await?;
|
||||
.await;
|
||||
permit.drop().await;
|
||||
let search_result = search_result?;
|
||||
|
||||
if let Ok(ref search_result) = search_result {
|
||||
aggregate.succeed(search_result);
|
||||
}
|
||||
analytics.post_facet_search(aggregate);
|
||||
analytics.publish(aggregate, &req);
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
@@ -113,6 +225,7 @@ impl From<FacetSearchQuery> for SearchQuery {
|
||||
attributes_to_search_on,
|
||||
hybrid,
|
||||
ranking_score_threshold,
|
||||
locales,
|
||||
} = value;
|
||||
|
||||
SearchQuery {
|
||||
@@ -141,6 +254,7 @@ impl From<FacetSearchQuery> for SearchQuery {
|
||||
attributes_to_search_on,
|
||||
hybrid,
|
||||
ranking_score_threshold,
|
||||
locales,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::collections::BTreeSet;
|
||||
use std::convert::Infallible;
|
||||
|
||||
use actix_web::web::Data;
|
||||
@@ -13,12 +14,11 @@ use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::milli::{self, FieldDistribution, Index};
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
use time::OffsetDateTime;
|
||||
use tracing::debug;
|
||||
|
||||
use super::{get_task_id, Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::analytics::{Aggregate, Analytics};
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
@@ -28,8 +28,11 @@ use crate::Opt;
|
||||
pub mod documents;
|
||||
pub mod facet_search;
|
||||
pub mod search;
|
||||
mod search_analytics;
|
||||
pub mod settings;
|
||||
mod settings_analytics;
|
||||
pub mod similar;
|
||||
mod similar_analytics;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
@@ -123,12 +126,31 @@ pub struct IndexCreateRequest {
|
||||
primary_key: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct IndexCreatedAggregate {
|
||||
primary_key: BTreeSet<String>,
|
||||
}
|
||||
|
||||
impl Aggregate for IndexCreatedAggregate {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Index Created"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self { primary_key: self.primary_key.union(&new.primary_key).cloned().collect() })
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn create_index(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>,
|
||||
body: AwebJson<IndexCreateRequest, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?body, "Create index");
|
||||
let IndexCreateRequest { primary_key, uid } = body.into_inner();
|
||||
@@ -136,9 +158,8 @@ pub async fn create_index(
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation(&uid);
|
||||
if allow_index_creation {
|
||||
analytics.publish(
|
||||
"Index Created".to_string(),
|
||||
json!({ "primary_key": primary_key }),
|
||||
Some(&req),
|
||||
IndexCreatedAggregate { primary_key: primary_key.iter().cloned().collect() },
|
||||
&req,
|
||||
);
|
||||
|
||||
let task = KindWithContent::IndexCreation { index_uid: uid.to_string(), primary_key };
|
||||
@@ -194,21 +215,38 @@ pub async fn get_index(
|
||||
Ok(HttpResponse::Ok().json(index_view))
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct IndexUpdatedAggregate {
|
||||
primary_key: BTreeSet<String>,
|
||||
}
|
||||
|
||||
impl Aggregate for IndexUpdatedAggregate {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Index Updated"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self { primary_key: self.primary_key.union(&new.primary_key).cloned().collect() })
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
pub async fn update_index(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
body: AwebJson<UpdateIndexRequest, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?body, "Update index");
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
let body = body.into_inner();
|
||||
analytics.publish(
|
||||
"Index Updated".to_string(),
|
||||
json!({ "primary_key": body.primary_key }),
|
||||
Some(&req),
|
||||
IndexUpdatedAggregate { primary_key: body.primary_key.iter().cloned().collect() },
|
||||
&req,
|
||||
);
|
||||
|
||||
let task = KindWithContent::IndexUpdate {
|
||||
|
||||
@@ -7,17 +7,19 @@ use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||
use meilisearch_types::error::deserr_codes::*;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::locales::Locale;
|
||||
use meilisearch_types::milli;
|
||||
use meilisearch_types::serde_cs::vec::CS;
|
||||
use serde_json::Value;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::{Analytics, SearchAggregator};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::metrics::MEILISEARCH_DEGRADED_SEARCH_REQUESTS;
|
||||
use crate::routes::indexes::search_analytics::{SearchAggregator, SearchGET, SearchPOST};
|
||||
use crate::search::{
|
||||
add_search_rules, perform_search, HybridQuery, MatchingStrategy, RankingScoreThreshold,
|
||||
RetrieveVectors, SearchKind, SearchQuery, SemanticRatio, DEFAULT_CROP_LENGTH,
|
||||
@@ -89,6 +91,8 @@ pub struct SearchQueryGet {
|
||||
pub hybrid_semantic_ratio: Option<SemanticRatioGet>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchRankingScoreThreshold>)]
|
||||
pub ranking_score_threshold: Option<RankingScoreThresholdGet>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchLocales>)]
|
||||
pub locales: Option<CS<Locale>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, deserr::Deserr)]
|
||||
@@ -125,8 +129,10 @@ impl std::ops::Deref for SemanticRatioGet {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SearchQueryGet> for SearchQuery {
|
||||
fn from(other: SearchQueryGet) -> Self {
|
||||
impl TryFrom<SearchQueryGet> for SearchQuery {
|
||||
type Error = ResponseError;
|
||||
|
||||
fn try_from(other: SearchQueryGet) -> Result<Self, Self::Error> {
|
||||
let filter = match other.filter {
|
||||
Some(f) => match serde_json::from_str(&f) {
|
||||
Ok(v) => Some(v),
|
||||
@@ -137,19 +143,28 @@ impl From<SearchQueryGet> for SearchQuery {
|
||||
|
||||
let hybrid = match (other.hybrid_embedder, other.hybrid_semantic_ratio) {
|
||||
(None, None) => None,
|
||||
(None, Some(semantic_ratio)) => {
|
||||
Some(HybridQuery { semantic_ratio: *semantic_ratio, embedder: None })
|
||||
(None, Some(_)) => {
|
||||
return Err(ResponseError::from_msg(
|
||||
"`hybridEmbedder` is mandatory when `hybridSemanticRatio` is present".into(),
|
||||
meilisearch_types::error::Code::InvalidHybridQuery,
|
||||
));
|
||||
}
|
||||
(Some(embedder), None) => {
|
||||
Some(HybridQuery { semantic_ratio: DEFAULT_SEMANTIC_RATIO(), embedder })
|
||||
}
|
||||
(Some(embedder), None) => Some(HybridQuery {
|
||||
semantic_ratio: DEFAULT_SEMANTIC_RATIO(),
|
||||
embedder: Some(embedder),
|
||||
}),
|
||||
(Some(embedder), Some(semantic_ratio)) => {
|
||||
Some(HybridQuery { semantic_ratio: *semantic_ratio, embedder: Some(embedder) })
|
||||
Some(HybridQuery { semantic_ratio: *semantic_ratio, embedder })
|
||||
}
|
||||
};
|
||||
|
||||
Self {
|
||||
if other.vector.is_some() && hybrid.is_none() {
|
||||
return Err(ResponseError::from_msg(
|
||||
"`hybridEmbedder` is mandatory when `vector` is present".into(),
|
||||
meilisearch_types::error::Code::MissingSearchHybrid,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
q: other.q,
|
||||
vector: other.vector.map(CS::into_inner),
|
||||
offset: other.offset.0,
|
||||
@@ -175,7 +190,8 @@ impl From<SearchQueryGet> for SearchQuery {
|
||||
attributes_to_search_on: other.attributes_to_search_on.map(|o| o.into_iter().collect()),
|
||||
hybrid,
|
||||
ranking_score_threshold: other.ranking_score_threshold.map(|o| o.0),
|
||||
}
|
||||
locales: other.locales.map(|o| o.into_iter().collect()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -210,34 +226,36 @@ pub async fn search_with_url_query(
|
||||
index_uid: web::Path<String>,
|
||||
params: AwebQueryParameter<SearchQueryGet, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
debug!(parameters = ?params, "Search get");
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
let mut query: SearchQuery = params.into_inner().into();
|
||||
let mut query: SearchQuery = params.into_inner().try_into()?;
|
||||
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
|
||||
add_search_rules(&mut query.filter, search_rules);
|
||||
}
|
||||
|
||||
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||
let mut aggregate = SearchAggregator::<SearchGET>::from_query(&query);
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let features = index_scheduler.features();
|
||||
|
||||
let search_kind = search_kind(&query, index_scheduler.get_ref(), &index, features)?;
|
||||
let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors, features)?;
|
||||
let _permit = search_queue.try_get_search_permit().await?;
|
||||
let permit = search_queue.try_get_search_permit().await?;
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_search(&index, query, search_kind, retrieve_vector, index_scheduler.features())
|
||||
})
|
||||
.await?;
|
||||
.await;
|
||||
permit.drop().await;
|
||||
let search_result = search_result?;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
aggregate.succeed(search_result);
|
||||
}
|
||||
analytics.get_search(aggregate);
|
||||
analytics.publish(aggregate, &req);
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
@@ -251,7 +269,7 @@ pub async fn search_with_post(
|
||||
index_uid: web::Path<String>,
|
||||
params: AwebJson<SearchQuery, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
@@ -263,7 +281,7 @@ pub async fn search_with_post(
|
||||
add_search_rules(&mut query.filter, search_rules);
|
||||
}
|
||||
|
||||
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||
let mut aggregate = SearchAggregator::<SearchPOST>::from_query(&query);
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
|
||||
@@ -272,18 +290,20 @@ pub async fn search_with_post(
|
||||
let search_kind = search_kind(&query, index_scheduler.get_ref(), &index, features)?;
|
||||
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors, features)?;
|
||||
|
||||
let _permit = search_queue.try_get_search_permit().await?;
|
||||
let permit = search_queue.try_get_search_permit().await?;
|
||||
let search_result = tokio::task::spawn_blocking(move || {
|
||||
perform_search(&index, query, search_kind, retrieve_vectors, index_scheduler.features())
|
||||
})
|
||||
.await?;
|
||||
.await;
|
||||
permit.drop().await;
|
||||
let search_result = search_result?;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
aggregate.succeed(search_result);
|
||||
if search_result.degraded {
|
||||
MEILISEARCH_DEGRADED_SEARCH_REQUESTS.inc();
|
||||
}
|
||||
}
|
||||
analytics.post_search(aggregate);
|
||||
analytics.publish(aggregate, &req);
|
||||
|
||||
let search_result = search_result?;
|
||||
|
||||
@@ -304,44 +324,36 @@ pub fn search_kind(
|
||||
features.check_vector("Passing `hybrid` as a parameter")?;
|
||||
}
|
||||
|
||||
// regardless of anything, always do a keyword search when we don't have a vector and the query is whitespace or missing
|
||||
if query.vector.is_none() {
|
||||
match &query.q {
|
||||
Some(q) if q.trim().is_empty() => return Ok(SearchKind::KeywordOnly),
|
||||
None => return Ok(SearchKind::KeywordOnly),
|
||||
_ => {}
|
||||
// handle with care, the order of cases matters, the semantics is subtle
|
||||
match (query.q.as_deref(), &query.hybrid, query.vector.as_deref()) {
|
||||
// empty query, no vector => placeholder search
|
||||
(Some(q), _, None) if q.trim().is_empty() => Ok(SearchKind::KeywordOnly),
|
||||
// no query, no vector => placeholder search
|
||||
(None, _, None) => Ok(SearchKind::KeywordOnly),
|
||||
// hybrid.semantic_ratio == 1.0 => vector
|
||||
(_, Some(HybridQuery { semantic_ratio, embedder }), v) if **semantic_ratio == 1.0 => {
|
||||
SearchKind::semantic(index_scheduler, index, embedder, v.map(|v| v.len()))
|
||||
}
|
||||
}
|
||||
|
||||
match &query.hybrid {
|
||||
Some(HybridQuery { semantic_ratio, embedder }) if **semantic_ratio == 1.0 => {
|
||||
Ok(SearchKind::semantic(
|
||||
index_scheduler,
|
||||
index,
|
||||
embedder.as_deref(),
|
||||
query.vector.as_ref().map(Vec::len),
|
||||
)?)
|
||||
}
|
||||
Some(HybridQuery { semantic_ratio, embedder: _ }) if **semantic_ratio == 0.0 => {
|
||||
// hybrid.semantic_ratio == 0.0 => keyword
|
||||
(_, Some(HybridQuery { semantic_ratio, embedder: _ }), _) if **semantic_ratio == 0.0 => {
|
||||
Ok(SearchKind::KeywordOnly)
|
||||
}
|
||||
Some(HybridQuery { semantic_ratio, embedder }) => Ok(SearchKind::hybrid(
|
||||
// no query, hybrid, vector => semantic
|
||||
(None, Some(HybridQuery { semantic_ratio: _, embedder }), Some(v)) => {
|
||||
SearchKind::semantic(index_scheduler, index, embedder, Some(v.len()))
|
||||
}
|
||||
// query, no hybrid, no vector => keyword
|
||||
(Some(_), None, None) => Ok(SearchKind::KeywordOnly),
|
||||
// query, hybrid, maybe vector => hybrid
|
||||
(Some(_), Some(HybridQuery { semantic_ratio, embedder }), v) => SearchKind::hybrid(
|
||||
index_scheduler,
|
||||
index,
|
||||
embedder.as_deref(),
|
||||
embedder,
|
||||
**semantic_ratio,
|
||||
query.vector.as_ref().map(Vec::len),
|
||||
)?),
|
||||
None => match (query.q.as_deref(), query.vector.as_deref()) {
|
||||
(_query, None) => Ok(SearchKind::KeywordOnly),
|
||||
(None, Some(_vector)) => Ok(SearchKind::semantic(
|
||||
index_scheduler,
|
||||
index,
|
||||
None,
|
||||
query.vector.as_ref().map(Vec::len),
|
||||
)?),
|
||||
(Some(_), Some(_)) => Err(MeilisearchHttpError::MissingSearchHybrid.into()),
|
||||
},
|
||||
v.map(|v| v.len()),
|
||||
),
|
||||
|
||||
(_, None, Some(_)) => Err(MeilisearchHttpError::MissingSearchHybrid.into()),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
485
meilisearch/src/routes/indexes/search_analytics.rs
Normal file
485
meilisearch/src/routes/indexes/search_analytics.rs
Normal file
@@ -0,0 +1,485 @@
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use serde_json::{json, Value};
|
||||
use std::collections::{BTreeSet, BinaryHeap, HashMap};
|
||||
|
||||
use meilisearch_types::locales::Locale;
|
||||
|
||||
use crate::{
|
||||
aggregate_methods,
|
||||
analytics::{Aggregate, AggregateMethod},
|
||||
search::{
|
||||
SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
||||
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
||||
DEFAULT_SEMANTIC_RATIO,
|
||||
},
|
||||
};
|
||||
|
||||
aggregate_methods!(
|
||||
SearchGET => "Documents Searched GET",
|
||||
SearchPOST => "Documents Searched POST",
|
||||
);
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SearchAggregator<Method: AggregateMethod> {
|
||||
// requests
|
||||
total_received: usize,
|
||||
total_succeeded: usize,
|
||||
total_degraded: usize,
|
||||
total_used_negative_operator: usize,
|
||||
time_spent: BinaryHeap<usize>,
|
||||
|
||||
// sort
|
||||
sort_with_geo_point: bool,
|
||||
// every time a request has a filter, this field must be incremented by the number of terms it contains
|
||||
sort_sum_of_criteria_terms: usize,
|
||||
// every time a request has a filter, this field must be incremented by one
|
||||
sort_total_number_of_criteria: usize,
|
||||
|
||||
// distinct
|
||||
distinct: bool,
|
||||
|
||||
// filter
|
||||
filter_with_geo_radius: bool,
|
||||
filter_with_geo_bounding_box: bool,
|
||||
// every time a request has a filter, this field must be incremented by the number of terms it contains
|
||||
filter_sum_of_criteria_terms: usize,
|
||||
// every time a request has a filter, this field must be incremented by one
|
||||
filter_total_number_of_criteria: usize,
|
||||
used_syntax: HashMap<String, usize>,
|
||||
|
||||
// attributes_to_search_on
|
||||
// every time a search is done using attributes_to_search_on
|
||||
attributes_to_search_on_total_number_of_uses: usize,
|
||||
|
||||
// q
|
||||
// The maximum number of terms in a q request
|
||||
max_terms_number: usize,
|
||||
|
||||
// vector
|
||||
// The maximum number of floats in a vector request
|
||||
max_vector_size: usize,
|
||||
// Whether the semantic ratio passed to a hybrid search equals the default ratio.
|
||||
semantic_ratio: bool,
|
||||
hybrid: bool,
|
||||
retrieve_vectors: bool,
|
||||
|
||||
// every time a search is done, we increment the counter linked to the used settings
|
||||
matching_strategy: HashMap<String, usize>,
|
||||
|
||||
// List of the unique Locales passed as parameter
|
||||
locales: BTreeSet<Locale>,
|
||||
|
||||
// pagination
|
||||
max_limit: usize,
|
||||
max_offset: usize,
|
||||
finite_pagination: usize,
|
||||
|
||||
// formatting
|
||||
max_attributes_to_retrieve: usize,
|
||||
max_attributes_to_highlight: usize,
|
||||
highlight_pre_tag: bool,
|
||||
highlight_post_tag: bool,
|
||||
max_attributes_to_crop: usize,
|
||||
crop_marker: bool,
|
||||
show_matches_position: bool,
|
||||
crop_length: bool,
|
||||
|
||||
// facets
|
||||
facets_sum_of_terms: usize,
|
||||
facets_total_number_of_facets: usize,
|
||||
|
||||
// scoring
|
||||
show_ranking_score: bool,
|
||||
show_ranking_score_details: bool,
|
||||
ranking_score_threshold: bool,
|
||||
|
||||
marker: std::marker::PhantomData<Method>,
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> SearchAggregator<Method> {
|
||||
#[allow(clippy::field_reassign_with_default)]
|
||||
pub fn from_query(query: &SearchQuery) -> Self {
|
||||
let SearchQuery {
|
||||
q,
|
||||
vector,
|
||||
offset,
|
||||
limit,
|
||||
page,
|
||||
hits_per_page,
|
||||
attributes_to_retrieve: _,
|
||||
retrieve_vectors,
|
||||
attributes_to_crop: _,
|
||||
crop_length,
|
||||
attributes_to_highlight: _,
|
||||
show_matches_position,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
filter,
|
||||
sort,
|
||||
distinct,
|
||||
facets: _,
|
||||
highlight_pre_tag,
|
||||
highlight_post_tag,
|
||||
crop_marker,
|
||||
matching_strategy,
|
||||
attributes_to_search_on,
|
||||
hybrid,
|
||||
ranking_score_threshold,
|
||||
locales,
|
||||
} = query;
|
||||
|
||||
let mut ret = Self::default();
|
||||
|
||||
ret.total_received = 1;
|
||||
|
||||
if let Some(ref sort) = sort {
|
||||
ret.sort_total_number_of_criteria = 1;
|
||||
ret.sort_with_geo_point = sort.iter().any(|s| s.contains("_geoPoint("));
|
||||
ret.sort_sum_of_criteria_terms = sort.len();
|
||||
}
|
||||
|
||||
ret.distinct = distinct.is_some();
|
||||
|
||||
if let Some(ref filter) = filter {
|
||||
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
|
||||
ret.filter_total_number_of_criteria = 1;
|
||||
|
||||
let syntax = match filter {
|
||||
Value::String(_) => "string".to_string(),
|
||||
Value::Array(values) => {
|
||||
if values.iter().map(|v| v.to_string()).any(|s| RE.is_match(&s)) {
|
||||
"mixed".to_string()
|
||||
} else {
|
||||
"array".to_string()
|
||||
}
|
||||
}
|
||||
_ => "none".to_string(),
|
||||
};
|
||||
// convert the string to a HashMap
|
||||
ret.used_syntax.insert(syntax, 1);
|
||||
|
||||
let stringified_filters = filter.to_string();
|
||||
ret.filter_with_geo_radius = stringified_filters.contains("_geoRadius(");
|
||||
ret.filter_with_geo_bounding_box = stringified_filters.contains("_geoBoundingBox(");
|
||||
ret.filter_sum_of_criteria_terms = RE.split(&stringified_filters).count();
|
||||
}
|
||||
|
||||
// attributes_to_search_on
|
||||
if attributes_to_search_on.is_some() {
|
||||
ret.attributes_to_search_on_total_number_of_uses = 1;
|
||||
}
|
||||
|
||||
if let Some(ref q) = q {
|
||||
ret.max_terms_number = q.split_whitespace().count();
|
||||
}
|
||||
|
||||
if let Some(ref vector) = vector {
|
||||
ret.max_vector_size = vector.len();
|
||||
}
|
||||
ret.retrieve_vectors |= retrieve_vectors;
|
||||
|
||||
if query.is_finite_pagination() {
|
||||
let limit = hits_per_page.unwrap_or_else(DEFAULT_SEARCH_LIMIT);
|
||||
ret.max_limit = limit;
|
||||
ret.max_offset = page.unwrap_or(1).saturating_sub(1) * limit;
|
||||
ret.finite_pagination = 1;
|
||||
} else {
|
||||
ret.max_limit = *limit;
|
||||
ret.max_offset = *offset;
|
||||
ret.finite_pagination = 0;
|
||||
}
|
||||
|
||||
ret.matching_strategy.insert(format!("{:?}", matching_strategy), 1);
|
||||
|
||||
if let Some(locales) = locales {
|
||||
ret.locales = locales.iter().copied().collect();
|
||||
}
|
||||
|
||||
ret.highlight_pre_tag = *highlight_pre_tag != DEFAULT_HIGHLIGHT_PRE_TAG();
|
||||
ret.highlight_post_tag = *highlight_post_tag != DEFAULT_HIGHLIGHT_POST_TAG();
|
||||
ret.crop_marker = *crop_marker != DEFAULT_CROP_MARKER();
|
||||
ret.crop_length = *crop_length != DEFAULT_CROP_LENGTH();
|
||||
ret.show_matches_position = *show_matches_position;
|
||||
|
||||
ret.show_ranking_score = *show_ranking_score;
|
||||
ret.show_ranking_score_details = *show_ranking_score_details;
|
||||
ret.ranking_score_threshold = ranking_score_threshold.is_some();
|
||||
|
||||
if let Some(hybrid) = hybrid {
|
||||
ret.semantic_ratio = hybrid.semantic_ratio != DEFAULT_SEMANTIC_RATIO();
|
||||
ret.hybrid = true;
|
||||
}
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, result: &SearchResult) {
|
||||
let SearchResult {
|
||||
hits: _,
|
||||
query: _,
|
||||
processing_time_ms,
|
||||
hits_info: _,
|
||||
semantic_hit_count: _,
|
||||
facet_distribution: _,
|
||||
facet_stats: _,
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
} = result;
|
||||
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(1);
|
||||
if *degraded {
|
||||
self.total_degraded = self.total_degraded.saturating_add(1);
|
||||
}
|
||||
if *used_negative_operator {
|
||||
self.total_used_negative_operator = self.total_used_negative_operator.saturating_add(1);
|
||||
}
|
||||
self.time_spent.push(*processing_time_ms as usize);
|
||||
}
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> Aggregate for SearchAggregator<Method> {
|
||||
fn event_name(&self) -> &'static str {
|
||||
Method::event_name()
|
||||
}
|
||||
|
||||
fn aggregate(mut self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
let Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
mut time_spent,
|
||||
sort_with_geo_point,
|
||||
sort_sum_of_criteria_terms,
|
||||
sort_total_number_of_criteria,
|
||||
distinct,
|
||||
filter_with_geo_radius,
|
||||
filter_with_geo_bounding_box,
|
||||
filter_sum_of_criteria_terms,
|
||||
filter_total_number_of_criteria,
|
||||
used_syntax,
|
||||
attributes_to_search_on_total_number_of_uses,
|
||||
max_terms_number,
|
||||
max_vector_size,
|
||||
retrieve_vectors,
|
||||
matching_strategy,
|
||||
max_limit,
|
||||
max_offset,
|
||||
finite_pagination,
|
||||
max_attributes_to_retrieve,
|
||||
max_attributes_to_highlight,
|
||||
highlight_pre_tag,
|
||||
highlight_post_tag,
|
||||
max_attributes_to_crop,
|
||||
crop_marker,
|
||||
show_matches_position,
|
||||
crop_length,
|
||||
facets_sum_of_terms,
|
||||
facets_total_number_of_facets,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
semantic_ratio,
|
||||
hybrid,
|
||||
total_degraded,
|
||||
total_used_negative_operator,
|
||||
ranking_score_threshold,
|
||||
mut locales,
|
||||
marker: _,
|
||||
} = *new;
|
||||
|
||||
// request
|
||||
self.total_received = self.total_received.saturating_add(total_received);
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(total_succeeded);
|
||||
self.total_degraded = self.total_degraded.saturating_add(total_degraded);
|
||||
self.total_used_negative_operator =
|
||||
self.total_used_negative_operator.saturating_add(total_used_negative_operator);
|
||||
self.time_spent.append(&mut time_spent);
|
||||
|
||||
// sort
|
||||
self.sort_with_geo_point |= sort_with_geo_point;
|
||||
self.sort_sum_of_criteria_terms =
|
||||
self.sort_sum_of_criteria_terms.saturating_add(sort_sum_of_criteria_terms);
|
||||
self.sort_total_number_of_criteria =
|
||||
self.sort_total_number_of_criteria.saturating_add(sort_total_number_of_criteria);
|
||||
|
||||
// distinct
|
||||
self.distinct |= distinct;
|
||||
|
||||
// filter
|
||||
self.filter_with_geo_radius |= filter_with_geo_radius;
|
||||
self.filter_with_geo_bounding_box |= filter_with_geo_bounding_box;
|
||||
self.filter_sum_of_criteria_terms =
|
||||
self.filter_sum_of_criteria_terms.saturating_add(filter_sum_of_criteria_terms);
|
||||
self.filter_total_number_of_criteria =
|
||||
self.filter_total_number_of_criteria.saturating_add(filter_total_number_of_criteria);
|
||||
for (key, value) in used_syntax.into_iter() {
|
||||
let used_syntax = self.used_syntax.entry(key).or_insert(0);
|
||||
*used_syntax = used_syntax.saturating_add(value);
|
||||
}
|
||||
|
||||
// attributes_to_search_on
|
||||
self.attributes_to_search_on_total_number_of_uses = self
|
||||
.attributes_to_search_on_total_number_of_uses
|
||||
.saturating_add(attributes_to_search_on_total_number_of_uses);
|
||||
|
||||
// q
|
||||
self.max_terms_number = self.max_terms_number.max(max_terms_number);
|
||||
|
||||
// vector
|
||||
self.max_vector_size = self.max_vector_size.max(max_vector_size);
|
||||
self.retrieve_vectors |= retrieve_vectors;
|
||||
self.semantic_ratio |= semantic_ratio;
|
||||
self.hybrid |= hybrid;
|
||||
|
||||
// pagination
|
||||
self.max_limit = self.max_limit.max(max_limit);
|
||||
self.max_offset = self.max_offset.max(max_offset);
|
||||
self.finite_pagination += finite_pagination;
|
||||
|
||||
// formatting
|
||||
self.max_attributes_to_retrieve =
|
||||
self.max_attributes_to_retrieve.max(max_attributes_to_retrieve);
|
||||
self.max_attributes_to_highlight =
|
||||
self.max_attributes_to_highlight.max(max_attributes_to_highlight);
|
||||
self.highlight_pre_tag |= highlight_pre_tag;
|
||||
self.highlight_post_tag |= highlight_post_tag;
|
||||
self.max_attributes_to_crop = self.max_attributes_to_crop.max(max_attributes_to_crop);
|
||||
self.crop_marker |= crop_marker;
|
||||
self.show_matches_position |= show_matches_position;
|
||||
self.crop_length |= crop_length;
|
||||
|
||||
// facets
|
||||
self.facets_sum_of_terms = self.facets_sum_of_terms.saturating_add(facets_sum_of_terms);
|
||||
self.facets_total_number_of_facets =
|
||||
self.facets_total_number_of_facets.saturating_add(facets_total_number_of_facets);
|
||||
|
||||
// matching strategy
|
||||
for (key, value) in matching_strategy.into_iter() {
|
||||
let matching_strategy = self.matching_strategy.entry(key).or_insert(0);
|
||||
*matching_strategy = matching_strategy.saturating_add(value);
|
||||
}
|
||||
|
||||
// scoring
|
||||
self.show_ranking_score |= show_ranking_score;
|
||||
self.show_ranking_score_details |= show_ranking_score_details;
|
||||
self.ranking_score_threshold |= ranking_score_threshold;
|
||||
|
||||
// locales
|
||||
self.locales.append(&mut locales);
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
let Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
time_spent,
|
||||
sort_with_geo_point,
|
||||
sort_sum_of_criteria_terms,
|
||||
sort_total_number_of_criteria,
|
||||
distinct,
|
||||
filter_with_geo_radius,
|
||||
filter_with_geo_bounding_box,
|
||||
filter_sum_of_criteria_terms,
|
||||
filter_total_number_of_criteria,
|
||||
used_syntax,
|
||||
attributes_to_search_on_total_number_of_uses,
|
||||
max_terms_number,
|
||||
max_vector_size,
|
||||
retrieve_vectors,
|
||||
matching_strategy,
|
||||
max_limit,
|
||||
max_offset,
|
||||
finite_pagination,
|
||||
max_attributes_to_retrieve,
|
||||
max_attributes_to_highlight,
|
||||
highlight_pre_tag,
|
||||
highlight_post_tag,
|
||||
max_attributes_to_crop,
|
||||
crop_marker,
|
||||
show_matches_position,
|
||||
crop_length,
|
||||
facets_sum_of_terms,
|
||||
facets_total_number_of_facets,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
semantic_ratio,
|
||||
hybrid,
|
||||
total_degraded,
|
||||
total_used_negative_operator,
|
||||
ranking_score_threshold,
|
||||
locales,
|
||||
marker: _,
|
||||
} = *self;
|
||||
|
||||
// we get all the values in a sorted manner
|
||||
let time_spent = time_spent.into_sorted_vec();
|
||||
// the index of the 99th percentage of value
|
||||
let percentile_99th = time_spent.len() * 99 / 100;
|
||||
// We are only interested by the slowest value of the 99th fastest results
|
||||
let time_spent = time_spent.get(percentile_99th);
|
||||
|
||||
json!({
|
||||
"requests": {
|
||||
"99th_response_time": time_spent.map(|t| format!("{:.2}", t)),
|
||||
"total_succeeded": total_succeeded,
|
||||
"total_failed": total_received.saturating_sub(total_succeeded), // just to be sure we never panics
|
||||
"total_received": total_received,
|
||||
"total_degraded": total_degraded,
|
||||
"total_used_negative_operator": total_used_negative_operator,
|
||||
},
|
||||
"sort": {
|
||||
"with_geoPoint": sort_with_geo_point,
|
||||
"avg_criteria_number": format!("{:.2}", sort_sum_of_criteria_terms as f64 / sort_total_number_of_criteria as f64),
|
||||
},
|
||||
"distinct": distinct,
|
||||
"filter": {
|
||||
"with_geoRadius": filter_with_geo_radius,
|
||||
"with_geoBoundingBox": filter_with_geo_bounding_box,
|
||||
"avg_criteria_number": format!("{:.2}", filter_sum_of_criteria_terms as f64 / filter_total_number_of_criteria as f64),
|
||||
"most_used_syntax": used_syntax.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)),
|
||||
},
|
||||
"attributes_to_search_on": {
|
||||
"total_number_of_uses": attributes_to_search_on_total_number_of_uses,
|
||||
},
|
||||
"q": {
|
||||
"max_terms_number": max_terms_number,
|
||||
},
|
||||
"vector": {
|
||||
"max_vector_size": max_vector_size,
|
||||
"retrieve_vectors": retrieve_vectors,
|
||||
},
|
||||
"hybrid": {
|
||||
"enabled": hybrid,
|
||||
"semantic_ratio": semantic_ratio,
|
||||
},
|
||||
"pagination": {
|
||||
"max_limit": max_limit,
|
||||
"max_offset": max_offset,
|
||||
"most_used_navigation": if finite_pagination > (total_received / 2) { "exhaustive" } else { "estimated" },
|
||||
},
|
||||
"formatting": {
|
||||
"max_attributes_to_retrieve": max_attributes_to_retrieve,
|
||||
"max_attributes_to_highlight": max_attributes_to_highlight,
|
||||
"highlight_pre_tag": highlight_pre_tag,
|
||||
"highlight_post_tag": highlight_post_tag,
|
||||
"max_attributes_to_crop": max_attributes_to_crop,
|
||||
"crop_marker": crop_marker,
|
||||
"show_matches_position": show_matches_position,
|
||||
"crop_length": crop_length,
|
||||
},
|
||||
"facets": {
|
||||
"avg_facets_number": format!("{:.2}", facets_sum_of_terms as f64 / facets_total_number_of_facets as f64),
|
||||
},
|
||||
"matching_strategy": {
|
||||
"most_used_strategy": matching_strategy.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)),
|
||||
},
|
||||
"locales": locales,
|
||||
"scoring": {
|
||||
"show_ranking_score": show_ranking_score,
|
||||
"show_ranking_score_details": show_ranking_score_details,
|
||||
"ranking_score_threshold": ranking_score_threshold,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,15 +1,14 @@
|
||||
use super::settings_analytics::*;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use deserr::actix_web::AwebJson;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::settings::{settings, RankingRuleView, SecretPolicy, Settings, Unchecked};
|
||||
use meilisearch_types::settings::{settings, SecretPolicy, Settings, Unchecked};
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use serde_json::json;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
@@ -20,7 +19,7 @@ use crate::Opt;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! make_setting_route {
|
||||
($route:literal, $update_verb:ident, $type:ty, $err_ty:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
|
||||
($route:literal, $update_verb:ident, $type:ty, $err_ty:ty, $attr:ident, $camelcase_attr:literal, $analytics:ident) => {
|
||||
pub mod $attr {
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
||||
@@ -80,7 +79,7 @@ macro_rules! make_setting_route {
|
||||
body: deserr::actix_web::AwebJson<Option<$type>, $err_ty>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
$analytics_var: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> std::result::Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
@@ -88,7 +87,10 @@ macro_rules! make_setting_route {
|
||||
debug!(parameters = ?body, "Update settings");
|
||||
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
$analytics(&body, &req);
|
||||
analytics.publish(
|
||||
$crate::routes::indexes::settings_analytics::$analytics::new(body.as_ref()).into_settings(),
|
||||
&req,
|
||||
);
|
||||
|
||||
let new_settings = Settings {
|
||||
$attr: match body {
|
||||
@@ -160,21 +162,7 @@ make_setting_route!(
|
||||
>,
|
||||
filterable_attributes,
|
||||
"filterableAttributes",
|
||||
analytics,
|
||||
|setting: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"FilterableAttributes Updated".to_string(),
|
||||
json!({
|
||||
"filterable_attributes": {
|
||||
"total": setting.as_ref().map(|filter| filter.len()).unwrap_or(0),
|
||||
"has_geo": setting.as_ref().map(|filter| filter.contains("_geo")).unwrap_or(false),
|
||||
}
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
FilterableAttributesAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@@ -186,21 +174,7 @@ make_setting_route!(
|
||||
>,
|
||||
sortable_attributes,
|
||||
"sortableAttributes",
|
||||
analytics,
|
||||
|setting: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"SortableAttributes Updated".to_string(),
|
||||
json!({
|
||||
"sortable_attributes": {
|
||||
"total": setting.as_ref().map(|sort| sort.len()),
|
||||
"has_geo": setting.as_ref().map(|sort| sort.contains("_geo")),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
SortableAttributesAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@@ -212,21 +186,7 @@ make_setting_route!(
|
||||
>,
|
||||
displayed_attributes,
|
||||
"displayedAttributes",
|
||||
analytics,
|
||||
|displayed: &Option<Vec<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"DisplayedAttributes Updated".to_string(),
|
||||
json!({
|
||||
"displayed_attributes": {
|
||||
"total": displayed.as_ref().map(|displayed| displayed.len()),
|
||||
"with_wildcard": displayed.as_ref().map(|displayed| displayed.iter().any(|displayed| displayed == "*")),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
DisplayedAttributesAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@@ -238,40 +198,7 @@ make_setting_route!(
|
||||
>,
|
||||
typo_tolerance,
|
||||
"typoTolerance",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_types::settings::TypoSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"TypoTolerance Updated".to_string(),
|
||||
json!({
|
||||
"typo_tolerance": {
|
||||
"enabled": setting.as_ref().map(|s| !matches!(s.enabled, Setting::Set(false))),
|
||||
"disable_on_attributes": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
|
||||
"disable_on_words": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.disable_on_words.as_ref().set().map(|m| !m.is_empty())),
|
||||
"min_word_size_for_one_typo": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.one_typo.set()))
|
||||
.flatten(),
|
||||
"min_word_size_for_two_typos": setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.two_typos.set()))
|
||||
.flatten(),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
TypoToleranceAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@@ -283,21 +210,7 @@ make_setting_route!(
|
||||
>,
|
||||
searchable_attributes,
|
||||
"searchableAttributes",
|
||||
analytics,
|
||||
|setting: &Option<Vec<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"SearchableAttributes Updated".to_string(),
|
||||
json!({
|
||||
"searchable_attributes": {
|
||||
"total": setting.as_ref().map(|searchable| searchable.len()),
|
||||
"with_wildcard": setting.as_ref().map(|searchable| searchable.iter().any(|searchable| searchable == "*")),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
SearchableAttributesAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@@ -309,20 +222,7 @@ make_setting_route!(
|
||||
>,
|
||||
stop_words,
|
||||
"stopWords",
|
||||
analytics,
|
||||
|stop_words: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"StopWords Updated".to_string(),
|
||||
json!({
|
||||
"stop_words": {
|
||||
"total": stop_words.as_ref().map(|stop_words| stop_words.len()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
StopWordsAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@@ -334,20 +234,7 @@ make_setting_route!(
|
||||
>,
|
||||
non_separator_tokens,
|
||||
"nonSeparatorTokens",
|
||||
analytics,
|
||||
|non_separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"nonSeparatorTokens Updated".to_string(),
|
||||
json!({
|
||||
"non_separator_tokens": {
|
||||
"total": non_separator_tokens.as_ref().map(|non_separator_tokens| non_separator_tokens.len()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
NonSeparatorTokensAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@@ -359,20 +246,7 @@ make_setting_route!(
|
||||
>,
|
||||
separator_tokens,
|
||||
"separatorTokens",
|
||||
analytics,
|
||||
|separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"separatorTokens Updated".to_string(),
|
||||
json!({
|
||||
"separator_tokens": {
|
||||
"total": separator_tokens.as_ref().map(|separator_tokens| separator_tokens.len()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
SeparatorTokensAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@@ -384,20 +258,7 @@ make_setting_route!(
|
||||
>,
|
||||
dictionary,
|
||||
"dictionary",
|
||||
analytics,
|
||||
|dictionary: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"dictionary Updated".to_string(),
|
||||
json!({
|
||||
"dictionary": {
|
||||
"total": dictionary.as_ref().map(|dictionary| dictionary.len()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
DictionaryAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@@ -409,20 +270,7 @@ make_setting_route!(
|
||||
>,
|
||||
synonyms,
|
||||
"synonyms",
|
||||
analytics,
|
||||
|synonyms: &Option<std::collections::BTreeMap<String, Vec<String>>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"Synonyms Updated".to_string(),
|
||||
json!({
|
||||
"synonyms": {
|
||||
"total": synonyms.as_ref().map(|synonyms| synonyms.len()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
SynonymsAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@@ -434,19 +282,7 @@ make_setting_route!(
|
||||
>,
|
||||
distinct_attribute,
|
||||
"distinctAttribute",
|
||||
analytics,
|
||||
|distinct: &Option<String>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
analytics.publish(
|
||||
"DistinctAttribute Updated".to_string(),
|
||||
json!({
|
||||
"distinct_attribute": {
|
||||
"set": distinct.is_some(),
|
||||
}
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
DistinctAttributeAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@@ -458,20 +294,19 @@ make_setting_route!(
|
||||
>,
|
||||
proximity_precision,
|
||||
"proximityPrecision",
|
||||
analytics,
|
||||
|precision: &Option<meilisearch_types::settings::ProximityPrecisionView>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
analytics.publish(
|
||||
"ProximityPrecision Updated".to_string(),
|
||||
json!({
|
||||
"proximity_precision": {
|
||||
"set": precision.is_some(),
|
||||
"value": precision.unwrap_or_default(),
|
||||
}
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
ProximityPrecisionAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
"/localized-attributes",
|
||||
put,
|
||||
Vec<meilisearch_types::locales::LocalizedAttributesRuleView>,
|
||||
meilisearch_types::deserr::DeserrJsonError<
|
||||
meilisearch_types::error::deserr_codes::InvalidSettingsLocalizedAttributes,
|
||||
>,
|
||||
localized_attributes,
|
||||
"localizedAttributes",
|
||||
LocalesAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@@ -483,26 +318,7 @@ make_setting_route!(
|
||||
>,
|
||||
ranking_rules,
|
||||
"rankingRules",
|
||||
analytics,
|
||||
|setting: &Option<Vec<meilisearch_types::settings::RankingRuleView>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"RankingRules Updated".to_string(),
|
||||
json!({
|
||||
"ranking_rules": {
|
||||
"words_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Words))),
|
||||
"typo_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Typo))),
|
||||
"proximity_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Proximity))),
|
||||
"attribute_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Attribute))),
|
||||
"sort_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Sort))),
|
||||
"exactness_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Exactness))),
|
||||
"values": setting.as_ref().map(|rr| rr.iter().filter(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Asc(_) | meilisearch_types::settings::RankingRuleView::Desc(_)) ).map(|x| x.to_string()).collect::<Vec<_>>().join(", ")),
|
||||
}
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
RankingRulesAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@@ -514,25 +330,7 @@ make_setting_route!(
|
||||
>,
|
||||
faceting,
|
||||
"faceting",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_types::settings::FacetingSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
||||
|
||||
analytics.publish(
|
||||
"Faceting Updated".to_string(),
|
||||
json!({
|
||||
"faceting": {
|
||||
"max_values_per_facet": setting.as_ref().and_then(|s| s.max_values_per_facet.set()),
|
||||
"sort_facet_values_by_star_count": setting.as_ref().and_then(|s| {
|
||||
s.sort_facet_values_by.as_ref().set().map(|s| s.iter().any(|(k, v)| k == "*" && v == &FacetValuesSort::Count))
|
||||
}),
|
||||
"sort_facet_values_by_total": setting.as_ref().and_then(|s| s.sort_facet_values_by.as_ref().set().map(|s| s.len())),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
FacetingAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@@ -544,20 +342,7 @@ make_setting_route!(
|
||||
>,
|
||||
pagination,
|
||||
"pagination",
|
||||
analytics,
|
||||
|setting: &Option<meilisearch_types::settings::PaginationSettings>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"Pagination Updated".to_string(),
|
||||
json!({
|
||||
"pagination": {
|
||||
"max_total_hits": setting.as_ref().and_then(|s| s.max_total_hits.set()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
PaginationAnalytics
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
@@ -569,60 +354,9 @@ make_setting_route!(
|
||||
>,
|
||||
embedders,
|
||||
"embedders",
|
||||
analytics,
|
||||
|setting: &Option<std::collections::BTreeMap<String, Setting<meilisearch_types::milli::vector::settings::EmbeddingSettings>>>, req: &HttpRequest| {
|
||||
|
||||
|
||||
analytics.publish(
|
||||
"Embedders Updated".to_string(),
|
||||
serde_json::json!({"embedders": crate::routes::indexes::settings::embedder_analytics(setting.as_ref())}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
EmbeddersAnalytics
|
||||
);
|
||||
|
||||
fn embedder_analytics(
|
||||
setting: Option<
|
||||
&std::collections::BTreeMap<
|
||||
String,
|
||||
Setting<meilisearch_types::milli::vector::settings::EmbeddingSettings>,
|
||||
>,
|
||||
>,
|
||||
) -> serde_json::Value {
|
||||
let mut sources = std::collections::HashSet::new();
|
||||
|
||||
if let Some(s) = &setting {
|
||||
for source in s
|
||||
.values()
|
||||
.filter_map(|config| config.clone().set())
|
||||
.filter_map(|config| config.source.set())
|
||||
{
|
||||
use meilisearch_types::milli::vector::settings::EmbedderSource;
|
||||
match source {
|
||||
EmbedderSource::OpenAi => sources.insert("openAi"),
|
||||
EmbedderSource::HuggingFace => sources.insert("huggingFace"),
|
||||
EmbedderSource::UserProvided => sources.insert("userProvided"),
|
||||
EmbedderSource::Ollama => sources.insert("ollama"),
|
||||
EmbedderSource::Rest => sources.insert("rest"),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
let document_template_used = setting.as_ref().map(|map| {
|
||||
map.values()
|
||||
.filter_map(|config| config.clone().set())
|
||||
.any(|config| config.document_template.set().is_some())
|
||||
});
|
||||
|
||||
json!(
|
||||
{
|
||||
"total": setting.as_ref().map(|s| s.len()),
|
||||
"sources": sources,
|
||||
"document_template_used": document_template_used,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
make_setting_route!(
|
||||
"/search-cutoff-ms",
|
||||
put,
|
||||
@@ -632,14 +366,7 @@ make_setting_route!(
|
||||
>,
|
||||
search_cutoff_ms,
|
||||
"searchCutoffMs",
|
||||
analytics,
|
||||
|setting: &Option<u64>, req: &HttpRequest| {
|
||||
analytics.publish(
|
||||
"Search Cutoff Updated".to_string(),
|
||||
serde_json::json!({"search_cutoff_ms": setting }),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
SearchCutoffMsAnalytics
|
||||
);
|
||||
|
||||
macro_rules! generate_configure {
|
||||
@@ -660,6 +387,7 @@ generate_configure!(
|
||||
filterable_attributes,
|
||||
sortable_attributes,
|
||||
displayed_attributes,
|
||||
localized_attributes,
|
||||
searchable_attributes,
|
||||
distinct_attribute,
|
||||
proximity_precision,
|
||||
@@ -682,7 +410,7 @@ pub async fn update_all(
|
||||
body: AwebJson<Settings<Unchecked>, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
@@ -691,103 +419,45 @@ pub async fn update_all(
|
||||
let new_settings = validate_settings(new_settings, &index_scheduler)?;
|
||||
|
||||
analytics.publish(
|
||||
"Settings Updated".to_string(),
|
||||
json!({
|
||||
"ranking_rules": {
|
||||
"words_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Words))),
|
||||
"typo_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Typo))),
|
||||
"proximity_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Proximity))),
|
||||
"attribute_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Attribute))),
|
||||
"sort_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Sort))),
|
||||
"exactness_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Exactness))),
|
||||
"values": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().filter(|s| !matches!(s, RankingRuleView::Asc(_) | RankingRuleView::Desc(_)) ).map(|x| x.to_string()).collect::<Vec<_>>().join(", ")),
|
||||
},
|
||||
"searchable_attributes": {
|
||||
"total": new_settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()),
|
||||
"with_wildcard": new_settings.searchable_attributes.as_ref().set().map(|searchable| searchable.iter().any(|searchable| searchable == "*")),
|
||||
},
|
||||
"displayed_attributes": {
|
||||
"total": new_settings.displayed_attributes.as_ref().set().map(|displayed| displayed.len()),
|
||||
"with_wildcard": new_settings.displayed_attributes.as_ref().set().map(|displayed| displayed.iter().any(|displayed| displayed == "*")),
|
||||
},
|
||||
"sortable_attributes": {
|
||||
"total": new_settings.sortable_attributes.as_ref().set().map(|sort| sort.len()),
|
||||
"has_geo": new_settings.sortable_attributes.as_ref().set().map(|sort| sort.iter().any(|s| s == "_geo")),
|
||||
},
|
||||
"filterable_attributes": {
|
||||
"total": new_settings.filterable_attributes.as_ref().set().map(|filter| filter.len()),
|
||||
"has_geo": new_settings.filterable_attributes.as_ref().set().map(|filter| filter.iter().any(|s| s == "_geo")),
|
||||
},
|
||||
"distinct_attribute": {
|
||||
"set": new_settings.distinct_attribute.as_ref().set().is_some()
|
||||
},
|
||||
"proximity_precision": {
|
||||
"set": new_settings.proximity_precision.as_ref().set().is_some(),
|
||||
"value": new_settings.proximity_precision.as_ref().set().copied().unwrap_or_default()
|
||||
},
|
||||
"typo_tolerance": {
|
||||
"enabled": new_settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.enabled.as_ref().set())
|
||||
.copied(),
|
||||
"disable_on_attributes": new_settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
|
||||
"disable_on_words": new_settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.disable_on_words.as_ref().set().map(|m| !m.is_empty())),
|
||||
"min_word_size_for_one_typo": new_settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.one_typo.set()))
|
||||
.flatten(),
|
||||
"min_word_size_for_two_typos": new_settings.typo_tolerance
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.min_word_size_for_typos
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.two_typos.set()))
|
||||
.flatten(),
|
||||
},
|
||||
"faceting": {
|
||||
"max_values_per_facet": new_settings.faceting
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.max_values_per_facet.as_ref().set()),
|
||||
"sort_facet_values_by_star_count": new_settings.faceting
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| {
|
||||
s.sort_facet_values_by.as_ref().set().map(|s| s.iter().any(|(k, v)| k == "*" && v == &FacetValuesSort::Count))
|
||||
}),
|
||||
"sort_facet_values_by_total": new_settings.faceting
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.sort_facet_values_by.as_ref().set().map(|s| s.len())),
|
||||
},
|
||||
"pagination": {
|
||||
"max_total_hits": new_settings.pagination
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.max_total_hits.as_ref().set()),
|
||||
},
|
||||
"stop_words": {
|
||||
"total": new_settings.stop_words.as_ref().set().map(|stop_words| stop_words.len()),
|
||||
},
|
||||
"synonyms": {
|
||||
"total": new_settings.synonyms.as_ref().set().map(|synonyms| synonyms.len()),
|
||||
},
|
||||
"embedders": crate::routes::indexes::settings::embedder_analytics(new_settings.embedders.as_ref().set()),
|
||||
"search_cutoff_ms": new_settings.search_cutoff_ms.as_ref().set(),
|
||||
}),
|
||||
Some(&req),
|
||||
SettingsAnalytics {
|
||||
ranking_rules: RankingRulesAnalytics::new(new_settings.ranking_rules.as_ref().set()),
|
||||
searchable_attributes: SearchableAttributesAnalytics::new(
|
||||
new_settings.searchable_attributes.as_ref().set(),
|
||||
),
|
||||
displayed_attributes: DisplayedAttributesAnalytics::new(
|
||||
new_settings.displayed_attributes.as_ref().set(),
|
||||
),
|
||||
sortable_attributes: SortableAttributesAnalytics::new(
|
||||
new_settings.sortable_attributes.as_ref().set(),
|
||||
),
|
||||
filterable_attributes: FilterableAttributesAnalytics::new(
|
||||
new_settings.filterable_attributes.as_ref().set(),
|
||||
),
|
||||
distinct_attribute: DistinctAttributeAnalytics::new(
|
||||
new_settings.distinct_attribute.as_ref().set(),
|
||||
),
|
||||
proximity_precision: ProximityPrecisionAnalytics::new(
|
||||
new_settings.proximity_precision.as_ref().set(),
|
||||
),
|
||||
typo_tolerance: TypoToleranceAnalytics::new(new_settings.typo_tolerance.as_ref().set()),
|
||||
faceting: FacetingAnalytics::new(new_settings.faceting.as_ref().set()),
|
||||
pagination: PaginationAnalytics::new(new_settings.pagination.as_ref().set()),
|
||||
stop_words: StopWordsAnalytics::new(new_settings.stop_words.as_ref().set()),
|
||||
synonyms: SynonymsAnalytics::new(new_settings.synonyms.as_ref().set()),
|
||||
embedders: EmbeddersAnalytics::new(new_settings.embedders.as_ref().set()),
|
||||
search_cutoff_ms: SearchCutoffMsAnalytics::new(
|
||||
new_settings.search_cutoff_ms.as_ref().set(),
|
||||
),
|
||||
locales: LocalesAnalytics::new(new_settings.localized_attributes.as_ref().set()),
|
||||
dictionary: DictionaryAnalytics::new(new_settings.dictionary.as_ref().set()),
|
||||
separator_tokens: SeparatorTokensAnalytics::new(
|
||||
new_settings.separator_tokens.as_ref().set(),
|
||||
),
|
||||
non_separator_tokens: NonSeparatorTokensAnalytics::new(
|
||||
new_settings.non_separator_tokens.as_ref().set(),
|
||||
),
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation(&index_uid);
|
||||
|
||||
621
meilisearch/src/routes/indexes/settings_analytics.rs
Normal file
621
meilisearch/src/routes/indexes/settings_analytics.rs
Normal file
@@ -0,0 +1,621 @@
|
||||
//! All the structures used to make the analytics on the settings works.
|
||||
//! The signatures of the `new` functions are not very rust idiomatic because they must match the types received
|
||||
//! through the sub-settings route directly without any manipulation.
|
||||
//! This is why we often use a `Option<&Vec<_>>` instead of a `Option<&[_]>`.
|
||||
|
||||
use meilisearch_types::locales::{Locale, LocalizedAttributesRuleView};
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::milli::vector::settings::EmbeddingSettings;
|
||||
use meilisearch_types::settings::{
|
||||
FacetingSettings, PaginationSettings, ProximityPrecisionView, TypoSettings,
|
||||
};
|
||||
use meilisearch_types::{facet_values_sort::FacetValuesSort, settings::RankingRuleView};
|
||||
use serde::Serialize;
|
||||
use std::collections::{BTreeMap, BTreeSet, HashSet};
|
||||
|
||||
use crate::analytics::Aggregate;
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct SettingsAnalytics {
|
||||
pub ranking_rules: RankingRulesAnalytics,
|
||||
pub searchable_attributes: SearchableAttributesAnalytics,
|
||||
pub displayed_attributes: DisplayedAttributesAnalytics,
|
||||
pub sortable_attributes: SortableAttributesAnalytics,
|
||||
pub filterable_attributes: FilterableAttributesAnalytics,
|
||||
pub distinct_attribute: DistinctAttributeAnalytics,
|
||||
pub proximity_precision: ProximityPrecisionAnalytics,
|
||||
pub typo_tolerance: TypoToleranceAnalytics,
|
||||
pub faceting: FacetingAnalytics,
|
||||
pub pagination: PaginationAnalytics,
|
||||
pub stop_words: StopWordsAnalytics,
|
||||
pub synonyms: SynonymsAnalytics,
|
||||
pub embedders: EmbeddersAnalytics,
|
||||
pub search_cutoff_ms: SearchCutoffMsAnalytics,
|
||||
pub locales: LocalesAnalytics,
|
||||
pub dictionary: DictionaryAnalytics,
|
||||
pub separator_tokens: SeparatorTokensAnalytics,
|
||||
pub non_separator_tokens: NonSeparatorTokensAnalytics,
|
||||
}
|
||||
|
||||
impl Aggregate for SettingsAnalytics {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Settings Updated"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
ranking_rules: RankingRulesAnalytics {
|
||||
words_position: new
|
||||
.ranking_rules
|
||||
.words_position
|
||||
.or(self.ranking_rules.words_position),
|
||||
typo_position: new.ranking_rules.typo_position.or(self.ranking_rules.typo_position),
|
||||
proximity_position: new
|
||||
.ranking_rules
|
||||
.proximity_position
|
||||
.or(self.ranking_rules.proximity_position),
|
||||
attribute_position: new
|
||||
.ranking_rules
|
||||
.attribute_position
|
||||
.or(self.ranking_rules.attribute_position),
|
||||
sort_position: new.ranking_rules.sort_position.or(self.ranking_rules.sort_position),
|
||||
exactness_position: new
|
||||
.ranking_rules
|
||||
.exactness_position
|
||||
.or(self.ranking_rules.exactness_position),
|
||||
values: new.ranking_rules.values.or(self.ranking_rules.values),
|
||||
},
|
||||
searchable_attributes: SearchableAttributesAnalytics {
|
||||
total: new.searchable_attributes.total.or(self.searchable_attributes.total),
|
||||
with_wildcard: new
|
||||
.searchable_attributes
|
||||
.with_wildcard
|
||||
.or(self.searchable_attributes.with_wildcard),
|
||||
},
|
||||
displayed_attributes: DisplayedAttributesAnalytics {
|
||||
total: new.displayed_attributes.total.or(self.displayed_attributes.total),
|
||||
with_wildcard: new
|
||||
.displayed_attributes
|
||||
.with_wildcard
|
||||
.or(self.displayed_attributes.with_wildcard),
|
||||
},
|
||||
sortable_attributes: SortableAttributesAnalytics {
|
||||
total: new.sortable_attributes.total.or(self.sortable_attributes.total),
|
||||
has_geo: new.sortable_attributes.has_geo.or(self.sortable_attributes.has_geo),
|
||||
},
|
||||
filterable_attributes: FilterableAttributesAnalytics {
|
||||
total: new.filterable_attributes.total.or(self.filterable_attributes.total),
|
||||
has_geo: new.filterable_attributes.has_geo.or(self.filterable_attributes.has_geo),
|
||||
},
|
||||
distinct_attribute: DistinctAttributeAnalytics {
|
||||
set: self.distinct_attribute.set | new.distinct_attribute.set,
|
||||
},
|
||||
proximity_precision: ProximityPrecisionAnalytics {
|
||||
set: self.proximity_precision.set | new.proximity_precision.set,
|
||||
value: new.proximity_precision.value.or(self.proximity_precision.value),
|
||||
},
|
||||
typo_tolerance: TypoToleranceAnalytics {
|
||||
enabled: new.typo_tolerance.enabled.or(self.typo_tolerance.enabled),
|
||||
disable_on_attributes: new
|
||||
.typo_tolerance
|
||||
.disable_on_attributes
|
||||
.or(self.typo_tolerance.disable_on_attributes),
|
||||
disable_on_words: new
|
||||
.typo_tolerance
|
||||
.disable_on_words
|
||||
.or(self.typo_tolerance.disable_on_words),
|
||||
min_word_size_for_one_typo: new
|
||||
.typo_tolerance
|
||||
.min_word_size_for_one_typo
|
||||
.or(self.typo_tolerance.min_word_size_for_one_typo),
|
||||
min_word_size_for_two_typos: new
|
||||
.typo_tolerance
|
||||
.min_word_size_for_two_typos
|
||||
.or(self.typo_tolerance.min_word_size_for_two_typos),
|
||||
},
|
||||
faceting: FacetingAnalytics {
|
||||
max_values_per_facet: new
|
||||
.faceting
|
||||
.max_values_per_facet
|
||||
.or(self.faceting.max_values_per_facet),
|
||||
sort_facet_values_by_star_count: new
|
||||
.faceting
|
||||
.sort_facet_values_by_star_count
|
||||
.or(self.faceting.sort_facet_values_by_star_count),
|
||||
sort_facet_values_by_total: new
|
||||
.faceting
|
||||
.sort_facet_values_by_total
|
||||
.or(self.faceting.sort_facet_values_by_total),
|
||||
},
|
||||
pagination: PaginationAnalytics {
|
||||
max_total_hits: new.pagination.max_total_hits.or(self.pagination.max_total_hits),
|
||||
},
|
||||
stop_words: StopWordsAnalytics {
|
||||
total: new.stop_words.total.or(self.stop_words.total),
|
||||
},
|
||||
synonyms: SynonymsAnalytics { total: new.synonyms.total.or(self.synonyms.total) },
|
||||
embedders: EmbeddersAnalytics {
|
||||
total: new.embedders.total.or(self.embedders.total),
|
||||
sources: match (self.embedders.sources, new.embedders.sources) {
|
||||
(None, None) => None,
|
||||
(Some(sources), None) | (None, Some(sources)) => Some(sources),
|
||||
(Some(this), Some(other)) => Some(this.union(&other).cloned().collect()),
|
||||
},
|
||||
document_template_used: match (
|
||||
self.embedders.document_template_used,
|
||||
new.embedders.document_template_used,
|
||||
) {
|
||||
(None, None) => None,
|
||||
(Some(used), None) | (None, Some(used)) => Some(used),
|
||||
(Some(this), Some(other)) => Some(this | other),
|
||||
},
|
||||
document_template_max_bytes: match (
|
||||
self.embedders.document_template_max_bytes,
|
||||
new.embedders.document_template_max_bytes,
|
||||
) {
|
||||
(None, None) => None,
|
||||
(Some(bytes), None) | (None, Some(bytes)) => Some(bytes),
|
||||
(Some(this), Some(other)) => Some(this.max(other)),
|
||||
},
|
||||
binary_quantization_used: match (
|
||||
self.embedders.binary_quantization_used,
|
||||
new.embedders.binary_quantization_used,
|
||||
) {
|
||||
(None, None) => None,
|
||||
(Some(bq), None) | (None, Some(bq)) => Some(bq),
|
||||
(Some(this), Some(other)) => Some(this | other),
|
||||
},
|
||||
},
|
||||
search_cutoff_ms: SearchCutoffMsAnalytics {
|
||||
search_cutoff_ms: new
|
||||
.search_cutoff_ms
|
||||
.search_cutoff_ms
|
||||
.or(self.search_cutoff_ms.search_cutoff_ms),
|
||||
},
|
||||
locales: LocalesAnalytics { locales: new.locales.locales.or(self.locales.locales) },
|
||||
dictionary: DictionaryAnalytics {
|
||||
total: new.dictionary.total.or(self.dictionary.total),
|
||||
},
|
||||
separator_tokens: SeparatorTokensAnalytics {
|
||||
total: new.non_separator_tokens.total.or(self.separator_tokens.total),
|
||||
},
|
||||
non_separator_tokens: NonSeparatorTokensAnalytics {
|
||||
total: new.non_separator_tokens.total.or(self.non_separator_tokens.total),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct RankingRulesAnalytics {
|
||||
pub words_position: Option<usize>,
|
||||
pub typo_position: Option<usize>,
|
||||
pub proximity_position: Option<usize>,
|
||||
pub attribute_position: Option<usize>,
|
||||
pub sort_position: Option<usize>,
|
||||
pub exactness_position: Option<usize>,
|
||||
pub values: Option<String>,
|
||||
}
|
||||
|
||||
impl RankingRulesAnalytics {
|
||||
pub fn new(rr: Option<&Vec<RankingRuleView>>) -> Self {
|
||||
RankingRulesAnalytics {
|
||||
words_position: rr.as_ref().and_then(|rr| {
|
||||
rr.iter()
|
||||
.position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Words))
|
||||
}),
|
||||
typo_position: rr.as_ref().and_then(|rr| {
|
||||
rr.iter()
|
||||
.position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Typo))
|
||||
}),
|
||||
proximity_position: rr.as_ref().and_then(|rr| {
|
||||
rr.iter().position(|s| {
|
||||
matches!(s, meilisearch_types::settings::RankingRuleView::Proximity)
|
||||
})
|
||||
}),
|
||||
attribute_position: rr.as_ref().and_then(|rr| {
|
||||
rr.iter().position(|s| {
|
||||
matches!(s, meilisearch_types::settings::RankingRuleView::Attribute)
|
||||
})
|
||||
}),
|
||||
sort_position: rr.as_ref().and_then(|rr| {
|
||||
rr.iter()
|
||||
.position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Sort))
|
||||
}),
|
||||
exactness_position: rr.as_ref().and_then(|rr| {
|
||||
rr.iter().position(|s| {
|
||||
matches!(s, meilisearch_types::settings::RankingRuleView::Exactness)
|
||||
})
|
||||
}),
|
||||
values: rr.as_ref().map(|rr| {
|
||||
rr.iter()
|
||||
.filter(|s| {
|
||||
matches!(
|
||||
s,
|
||||
meilisearch_types::settings::RankingRuleView::Asc(_)
|
||||
| meilisearch_types::settings::RankingRuleView::Desc(_)
|
||||
)
|
||||
})
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { ranking_rules: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct SearchableAttributesAnalytics {
|
||||
pub total: Option<usize>,
|
||||
pub with_wildcard: Option<bool>,
|
||||
}
|
||||
|
||||
impl SearchableAttributesAnalytics {
|
||||
pub fn new(setting: Option<&Vec<String>>) -> Self {
|
||||
Self {
|
||||
total: setting.as_ref().map(|searchable| searchable.len()),
|
||||
with_wildcard: setting
|
||||
.as_ref()
|
||||
.map(|searchable| searchable.iter().any(|searchable| searchable == "*")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { searchable_attributes: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct DisplayedAttributesAnalytics {
|
||||
pub total: Option<usize>,
|
||||
pub with_wildcard: Option<bool>,
|
||||
}
|
||||
|
||||
impl DisplayedAttributesAnalytics {
|
||||
pub fn new(displayed: Option<&Vec<String>>) -> Self {
|
||||
Self {
|
||||
total: displayed.as_ref().map(|displayed| displayed.len()),
|
||||
with_wildcard: displayed
|
||||
.as_ref()
|
||||
.map(|displayed| displayed.iter().any(|displayed| displayed == "*")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { displayed_attributes: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct SortableAttributesAnalytics {
|
||||
pub total: Option<usize>,
|
||||
pub has_geo: Option<bool>,
|
||||
}
|
||||
|
||||
impl SortableAttributesAnalytics {
|
||||
pub fn new(setting: Option<&BTreeSet<String>>) -> Self {
|
||||
Self {
|
||||
total: setting.as_ref().map(|sort| sort.len()),
|
||||
has_geo: setting.as_ref().map(|sort| sort.contains("_geo")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { sortable_attributes: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct FilterableAttributesAnalytics {
|
||||
pub total: Option<usize>,
|
||||
pub has_geo: Option<bool>,
|
||||
}
|
||||
|
||||
impl FilterableAttributesAnalytics {
|
||||
pub fn new(setting: Option<&BTreeSet<String>>) -> Self {
|
||||
Self {
|
||||
total: setting.as_ref().map(|filter| filter.len()),
|
||||
has_geo: setting.as_ref().map(|filter| filter.contains("_geo")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { filterable_attributes: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct DistinctAttributeAnalytics {
|
||||
pub set: bool,
|
||||
}
|
||||
|
||||
impl DistinctAttributeAnalytics {
|
||||
pub fn new(distinct: Option<&String>) -> Self {
|
||||
Self { set: distinct.is_some() }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { distinct_attribute: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct ProximityPrecisionAnalytics {
|
||||
pub set: bool,
|
||||
pub value: Option<ProximityPrecisionView>,
|
||||
}
|
||||
|
||||
impl ProximityPrecisionAnalytics {
|
||||
pub fn new(precision: Option<&ProximityPrecisionView>) -> Self {
|
||||
Self { set: precision.is_some(), value: precision.cloned() }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { proximity_precision: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct TypoToleranceAnalytics {
|
||||
pub enabled: Option<bool>,
|
||||
pub disable_on_attributes: Option<bool>,
|
||||
pub disable_on_words: Option<bool>,
|
||||
pub min_word_size_for_one_typo: Option<u8>,
|
||||
pub min_word_size_for_two_typos: Option<u8>,
|
||||
}
|
||||
|
||||
impl TypoToleranceAnalytics {
|
||||
pub fn new(setting: Option<&TypoSettings>) -> Self {
|
||||
Self {
|
||||
enabled: setting.as_ref().map(|s| !matches!(s.enabled, Setting::Set(false))),
|
||||
disable_on_attributes: setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.disable_on_attributes.as_ref().set().map(|m| !m.is_empty())),
|
||||
disable_on_words: setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.disable_on_words.as_ref().set().map(|m| !m.is_empty())),
|
||||
min_word_size_for_one_typo: setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.min_word_size_for_typos.as_ref().set().map(|s| s.one_typo.set()))
|
||||
.flatten(),
|
||||
min_word_size_for_two_typos: setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.min_word_size_for_typos.as_ref().set().map(|s| s.two_typos.set()))
|
||||
.flatten(),
|
||||
}
|
||||
}
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { typo_tolerance: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct FacetingAnalytics {
|
||||
pub max_values_per_facet: Option<usize>,
|
||||
pub sort_facet_values_by_star_count: Option<bool>,
|
||||
pub sort_facet_values_by_total: Option<usize>,
|
||||
}
|
||||
|
||||
impl FacetingAnalytics {
|
||||
pub fn new(setting: Option<&FacetingSettings>) -> Self {
|
||||
Self {
|
||||
max_values_per_facet: setting.as_ref().and_then(|s| s.max_values_per_facet.set()),
|
||||
sort_facet_values_by_star_count: setting.as_ref().and_then(|s| {
|
||||
s.sort_facet_values_by
|
||||
.as_ref()
|
||||
.set()
|
||||
.map(|s| s.iter().any(|(k, v)| k == "*" && v == &FacetValuesSort::Count))
|
||||
}),
|
||||
sort_facet_values_by_total: setting
|
||||
.as_ref()
|
||||
.and_then(|s| s.sort_facet_values_by.as_ref().set().map(|s| s.len())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { faceting: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct PaginationAnalytics {
|
||||
pub max_total_hits: Option<usize>,
|
||||
}
|
||||
|
||||
impl PaginationAnalytics {
|
||||
pub fn new(setting: Option<&PaginationSettings>) -> Self {
|
||||
Self { max_total_hits: setting.as_ref().and_then(|s| s.max_total_hits.set()) }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { pagination: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct StopWordsAnalytics {
|
||||
pub total: Option<usize>,
|
||||
}
|
||||
|
||||
impl StopWordsAnalytics {
|
||||
pub fn new(stop_words: Option<&BTreeSet<String>>) -> Self {
|
||||
Self { total: stop_words.as_ref().map(|stop_words| stop_words.len()) }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { stop_words: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct SynonymsAnalytics {
|
||||
pub total: Option<usize>,
|
||||
}
|
||||
|
||||
impl SynonymsAnalytics {
|
||||
pub fn new(synonyms: Option<&BTreeMap<String, Vec<String>>>) -> Self {
|
||||
Self { total: synonyms.as_ref().map(|synonyms| synonyms.len()) }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { synonyms: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct EmbeddersAnalytics {
|
||||
// last
|
||||
pub total: Option<usize>,
|
||||
// Merge the sources
|
||||
pub sources: Option<HashSet<String>>,
|
||||
// |=
|
||||
pub document_template_used: Option<bool>,
|
||||
// max
|
||||
pub document_template_max_bytes: Option<usize>,
|
||||
// |=
|
||||
pub binary_quantization_used: Option<bool>,
|
||||
}
|
||||
|
||||
impl EmbeddersAnalytics {
|
||||
pub fn new(setting: Option<&BTreeMap<String, Setting<EmbeddingSettings>>>) -> Self {
|
||||
let mut sources = std::collections::HashSet::new();
|
||||
|
||||
if let Some(s) = &setting {
|
||||
for source in s
|
||||
.values()
|
||||
.filter_map(|config| config.clone().set())
|
||||
.filter_map(|config| config.source.set())
|
||||
{
|
||||
use meilisearch_types::milli::vector::settings::EmbedderSource;
|
||||
match source {
|
||||
EmbedderSource::OpenAi => sources.insert("openAi".to_string()),
|
||||
EmbedderSource::HuggingFace => sources.insert("huggingFace".to_string()),
|
||||
EmbedderSource::UserProvided => sources.insert("userProvided".to_string()),
|
||||
EmbedderSource::Ollama => sources.insert("ollama".to_string()),
|
||||
EmbedderSource::Rest => sources.insert("rest".to_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
Self {
|
||||
total: setting.as_ref().map(|s| s.len()),
|
||||
sources: Some(sources),
|
||||
document_template_used: setting.as_ref().map(|map| {
|
||||
map.values()
|
||||
.filter_map(|config| config.clone().set())
|
||||
.any(|config| config.document_template.set().is_some())
|
||||
}),
|
||||
document_template_max_bytes: setting.as_ref().and_then(|map| {
|
||||
map.values()
|
||||
.filter_map(|config| config.clone().set())
|
||||
.filter_map(|config| config.document_template_max_bytes.set())
|
||||
.max()
|
||||
}),
|
||||
binary_quantization_used: setting.as_ref().map(|map| {
|
||||
map.values()
|
||||
.filter_map(|config| config.clone().set())
|
||||
.any(|config| config.binary_quantized.set().is_some())
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { embedders: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
#[serde(transparent)]
|
||||
pub struct SearchCutoffMsAnalytics {
|
||||
pub search_cutoff_ms: Option<u64>,
|
||||
}
|
||||
|
||||
impl SearchCutoffMsAnalytics {
|
||||
pub fn new(setting: Option<&u64>) -> Self {
|
||||
Self { search_cutoff_ms: setting.copied() }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { search_cutoff_ms: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
#[serde(transparent)]
|
||||
pub struct LocalesAnalytics {
|
||||
pub locales: Option<BTreeSet<Locale>>,
|
||||
}
|
||||
|
||||
impl LocalesAnalytics {
|
||||
pub fn new(rules: Option<&Vec<LocalizedAttributesRuleView>>) -> Self {
|
||||
LocalesAnalytics {
|
||||
locales: rules.as_ref().map(|rules| {
|
||||
rules
|
||||
.iter()
|
||||
.flat_map(|rule| rule.locales.iter().cloned())
|
||||
.collect::<std::collections::BTreeSet<_>>()
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { locales: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct DictionaryAnalytics {
|
||||
pub total: Option<usize>,
|
||||
}
|
||||
|
||||
impl DictionaryAnalytics {
|
||||
pub fn new(dictionary: Option<&BTreeSet<String>>) -> Self {
|
||||
Self { total: dictionary.as_ref().map(|dictionary| dictionary.len()) }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { dictionary: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct SeparatorTokensAnalytics {
|
||||
pub total: Option<usize>,
|
||||
}
|
||||
|
||||
impl SeparatorTokensAnalytics {
|
||||
pub fn new(separator_tokens: Option<&BTreeSet<String>>) -> Self {
|
||||
Self { total: separator_tokens.as_ref().map(|separator_tokens| separator_tokens.len()) }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { separator_tokens: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct NonSeparatorTokensAnalytics {
|
||||
pub total: Option<usize>,
|
||||
}
|
||||
|
||||
impl NonSeparatorTokensAnalytics {
|
||||
pub fn new(non_separator_tokens: Option<&BTreeSet<String>>) -> Self {
|
||||
Self {
|
||||
total: non_separator_tokens
|
||||
.as_ref()
|
||||
.map(|non_separator_tokens| non_separator_tokens.len()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { non_separator_tokens: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
@@ -13,9 +13,10 @@ use serde_json::Value;
|
||||
use tracing::debug;
|
||||
|
||||
use super::ActionPolicy;
|
||||
use crate::analytics::{Analytics, SimilarAggregator};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::indexes::similar_analytics::{SimilarAggregator, SimilarGET, SimilarPOST};
|
||||
use crate::search::{
|
||||
add_search_rules, perform_similar, RankingScoreThresholdSimilar, RetrieveVectors, SearchKind,
|
||||
SimilarQuery, SimilarResult, DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
|
||||
@@ -34,13 +35,13 @@ pub async fn similar_get(
|
||||
index_uid: web::Path<String>,
|
||||
params: AwebQueryParameter<SimilarQueryGet, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
let query = params.0.try_into()?;
|
||||
|
||||
let mut aggregate = SimilarAggregator::from_query(&query, &req);
|
||||
let mut aggregate = SimilarAggregator::<SimilarGET>::from_query(&query);
|
||||
|
||||
debug!(parameters = ?query, "Similar get");
|
||||
|
||||
@@ -49,7 +50,7 @@ pub async fn similar_get(
|
||||
if let Ok(similar) = &similar {
|
||||
aggregate.succeed(similar);
|
||||
}
|
||||
analytics.get_similar(aggregate);
|
||||
analytics.publish(aggregate, &req);
|
||||
|
||||
let similar = similar?;
|
||||
|
||||
@@ -62,21 +63,21 @@ pub async fn similar_post(
|
||||
index_uid: web::Path<String>,
|
||||
params: AwebJson<SimilarQuery, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
let query = params.into_inner();
|
||||
debug!(parameters = ?query, "Similar post");
|
||||
|
||||
let mut aggregate = SimilarAggregator::from_query(&query, &req);
|
||||
let mut aggregate = SimilarAggregator::<SimilarPOST>::from_query(&query);
|
||||
|
||||
let similar = similar(index_scheduler, index_uid, query).await;
|
||||
|
||||
if let Ok(similar) = &similar {
|
||||
aggregate.succeed(similar);
|
||||
}
|
||||
analytics.post_similar(aggregate);
|
||||
analytics.publish(aggregate, &req);
|
||||
|
||||
let similar = similar?;
|
||||
|
||||
@@ -102,8 +103,8 @@ async fn similar(
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
|
||||
let (embedder_name, embedder) =
|
||||
SearchKind::embedder(&index_scheduler, &index, query.embedder.as_deref(), None)?;
|
||||
let (embedder_name, embedder, quantized) =
|
||||
SearchKind::embedder(&index_scheduler, &index, &query.embedder, None)?;
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
perform_similar(
|
||||
@@ -111,6 +112,7 @@ async fn similar(
|
||||
query,
|
||||
embedder_name,
|
||||
embedder,
|
||||
quantized,
|
||||
retrieve_vectors,
|
||||
index_scheduler.features(),
|
||||
)
|
||||
@@ -139,8 +141,8 @@ pub struct SimilarQueryGet {
|
||||
show_ranking_score_details: Param<bool>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarRankingScoreThreshold>, default)]
|
||||
pub ranking_score_threshold: Option<RankingScoreThresholdGet>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidEmbedder>)]
|
||||
pub embedder: Option<String>,
|
||||
#[deserr(error = DeserrQueryParamError<InvalidEmbedder>)]
|
||||
pub embedder: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, deserr::Deserr)]
|
||||
|
||||
235
meilisearch/src/routes/indexes/similar_analytics.rs
Normal file
235
meilisearch/src/routes/indexes/similar_analytics.rs
Normal file
@@ -0,0 +1,235 @@
|
||||
use std::collections::{BinaryHeap, HashMap};
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::{
|
||||
aggregate_methods,
|
||||
analytics::{Aggregate, AggregateMethod},
|
||||
search::{SimilarQuery, SimilarResult},
|
||||
};
|
||||
|
||||
aggregate_methods!(
|
||||
SimilarPOST => "Similar POST",
|
||||
SimilarGET => "Similar GET",
|
||||
);
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SimilarAggregator<Method: AggregateMethod> {
|
||||
// requests
|
||||
total_received: usize,
|
||||
total_succeeded: usize,
|
||||
time_spent: BinaryHeap<usize>,
|
||||
|
||||
// filter
|
||||
filter_with_geo_radius: bool,
|
||||
filter_with_geo_bounding_box: bool,
|
||||
// every time a request has a filter, this field must be incremented by the number of terms it contains
|
||||
filter_sum_of_criteria_terms: usize,
|
||||
// every time a request has a filter, this field must be incremented by one
|
||||
filter_total_number_of_criteria: usize,
|
||||
used_syntax: HashMap<String, usize>,
|
||||
|
||||
// Whether a non-default embedder was specified
|
||||
retrieve_vectors: bool,
|
||||
|
||||
// pagination
|
||||
max_limit: usize,
|
||||
max_offset: usize,
|
||||
|
||||
// formatting
|
||||
max_attributes_to_retrieve: usize,
|
||||
|
||||
// scoring
|
||||
show_ranking_score: bool,
|
||||
show_ranking_score_details: bool,
|
||||
ranking_score_threshold: bool,
|
||||
|
||||
marker: std::marker::PhantomData<Method>,
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> SimilarAggregator<Method> {
|
||||
#[allow(clippy::field_reassign_with_default)]
|
||||
pub fn from_query(query: &SimilarQuery) -> Self {
|
||||
let SimilarQuery {
|
||||
id: _,
|
||||
embedder: _,
|
||||
offset,
|
||||
limit,
|
||||
attributes_to_retrieve: _,
|
||||
retrieve_vectors,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
filter,
|
||||
ranking_score_threshold,
|
||||
} = query;
|
||||
|
||||
let mut ret = Self::default();
|
||||
|
||||
ret.total_received = 1;
|
||||
|
||||
if let Some(ref filter) = filter {
|
||||
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
|
||||
ret.filter_total_number_of_criteria = 1;
|
||||
|
||||
let syntax = match filter {
|
||||
Value::String(_) => "string".to_string(),
|
||||
Value::Array(values) => {
|
||||
if values.iter().map(|v| v.to_string()).any(|s| RE.is_match(&s)) {
|
||||
"mixed".to_string()
|
||||
} else {
|
||||
"array".to_string()
|
||||
}
|
||||
}
|
||||
_ => "none".to_string(),
|
||||
};
|
||||
// convert the string to a HashMap
|
||||
ret.used_syntax.insert(syntax, 1);
|
||||
|
||||
let stringified_filters = filter.to_string();
|
||||
ret.filter_with_geo_radius = stringified_filters.contains("_geoRadius(");
|
||||
ret.filter_with_geo_bounding_box = stringified_filters.contains("_geoBoundingBox(");
|
||||
ret.filter_sum_of_criteria_terms = RE.split(&stringified_filters).count();
|
||||
}
|
||||
|
||||
ret.max_limit = *limit;
|
||||
ret.max_offset = *offset;
|
||||
|
||||
ret.show_ranking_score = *show_ranking_score;
|
||||
ret.show_ranking_score_details = *show_ranking_score_details;
|
||||
ret.ranking_score_threshold = ranking_score_threshold.is_some();
|
||||
|
||||
ret.retrieve_vectors = *retrieve_vectors;
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, result: &SimilarResult) {
|
||||
let SimilarResult { id: _, hits: _, processing_time_ms, hits_info: _ } = result;
|
||||
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(1);
|
||||
|
||||
self.time_spent.push(*processing_time_ms as usize);
|
||||
}
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod> Aggregate for SimilarAggregator<Method> {
|
||||
fn event_name(&self) -> &'static str {
|
||||
Method::event_name()
|
||||
}
|
||||
|
||||
/// Aggregate one [SimilarAggregator] into another.
|
||||
fn aggregate(mut self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
let Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
mut time_spent,
|
||||
filter_with_geo_radius,
|
||||
filter_with_geo_bounding_box,
|
||||
filter_sum_of_criteria_terms,
|
||||
filter_total_number_of_criteria,
|
||||
used_syntax,
|
||||
max_limit,
|
||||
max_offset,
|
||||
max_attributes_to_retrieve,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
ranking_score_threshold,
|
||||
retrieve_vectors,
|
||||
marker: _,
|
||||
} = *new;
|
||||
|
||||
// request
|
||||
self.total_received = self.total_received.saturating_add(total_received);
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(total_succeeded);
|
||||
self.time_spent.append(&mut time_spent);
|
||||
|
||||
// filter
|
||||
self.filter_with_geo_radius |= filter_with_geo_radius;
|
||||
self.filter_with_geo_bounding_box |= filter_with_geo_bounding_box;
|
||||
self.filter_sum_of_criteria_terms =
|
||||
self.filter_sum_of_criteria_terms.saturating_add(filter_sum_of_criteria_terms);
|
||||
self.filter_total_number_of_criteria =
|
||||
self.filter_total_number_of_criteria.saturating_add(filter_total_number_of_criteria);
|
||||
for (key, value) in used_syntax.into_iter() {
|
||||
let used_syntax = self.used_syntax.entry(key).or_insert(0);
|
||||
*used_syntax = used_syntax.saturating_add(value);
|
||||
}
|
||||
|
||||
self.retrieve_vectors |= retrieve_vectors;
|
||||
|
||||
// pagination
|
||||
self.max_limit = self.max_limit.max(max_limit);
|
||||
self.max_offset = self.max_offset.max(max_offset);
|
||||
|
||||
// formatting
|
||||
self.max_attributes_to_retrieve =
|
||||
self.max_attributes_to_retrieve.max(max_attributes_to_retrieve);
|
||||
|
||||
// scoring
|
||||
self.show_ranking_score |= show_ranking_score;
|
||||
self.show_ranking_score_details |= show_ranking_score_details;
|
||||
self.ranking_score_threshold |= ranking_score_threshold;
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
let Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
time_spent,
|
||||
filter_with_geo_radius,
|
||||
filter_with_geo_bounding_box,
|
||||
filter_sum_of_criteria_terms,
|
||||
filter_total_number_of_criteria,
|
||||
used_syntax,
|
||||
max_limit,
|
||||
max_offset,
|
||||
max_attributes_to_retrieve,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
ranking_score_threshold,
|
||||
retrieve_vectors,
|
||||
marker: _,
|
||||
} = *self;
|
||||
|
||||
// we get all the values in a sorted manner
|
||||
let time_spent = time_spent.into_sorted_vec();
|
||||
// the index of the 99th percentage of value
|
||||
let percentile_99th = time_spent.len() * 99 / 100;
|
||||
// We are only interested by the slowest value of the 99th fastest results
|
||||
let time_spent = time_spent.get(percentile_99th);
|
||||
|
||||
json!({
|
||||
"requests": {
|
||||
"99th_response_time": time_spent.map(|t| format!("{:.2}", t)),
|
||||
"total_succeeded": total_succeeded,
|
||||
"total_failed": total_received.saturating_sub(total_succeeded), // just to be sure we never panics
|
||||
"total_received": total_received,
|
||||
},
|
||||
"filter": {
|
||||
"with_geoRadius": filter_with_geo_radius,
|
||||
"with_geoBoundingBox": filter_with_geo_bounding_box,
|
||||
"avg_criteria_number": format!("{:.2}", filter_sum_of_criteria_terms as f64 / filter_total_number_of_criteria as f64),
|
||||
"most_used_syntax": used_syntax.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)),
|
||||
},
|
||||
"vector": {
|
||||
"retrieve_vectors": retrieve_vectors,
|
||||
},
|
||||
"pagination": {
|
||||
"max_limit": max_limit,
|
||||
"max_offset": max_offset,
|
||||
},
|
||||
"formatting": {
|
||||
"max_attributes_to_retrieve": max_attributes_to_retrieve,
|
||||
},
|
||||
"scoring": {
|
||||
"show_ranking_score": show_ranking_score,
|
||||
"show_ranking_score_details": show_ranking_score_details,
|
||||
"ranking_score_threshold": ranking_score_threshold,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -25,6 +25,7 @@ pub mod indexes;
|
||||
mod logs;
|
||||
mod metrics;
|
||||
mod multi_search;
|
||||
mod multi_search_analytics;
|
||||
mod snapshot;
|
||||
mod swap_indexes;
|
||||
pub mod tasks;
|
||||
|
||||
@@ -9,7 +9,7 @@ use meilisearch_types::keys::actions;
|
||||
use serde::Serialize;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::{Analytics, MultiSearchAggregator};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||
@@ -21,6 +21,8 @@ use crate::search::{
|
||||
};
|
||||
use crate::search_queue::SearchQueue;
|
||||
|
||||
use super::multi_search_analytics::MultiSearchAggregator;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(multi_search_with_post))));
|
||||
}
|
||||
@@ -35,15 +37,15 @@ pub async fn multi_search_with_post(
|
||||
search_queue: Data<SearchQueue>,
|
||||
params: AwebJson<FederatedSearch, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
// Since we don't want to process half of the search requests and then get a permit refused
|
||||
// we're going to get one permit for the whole duration of the multi-search request.
|
||||
let _permit = search_queue.try_get_search_permit().await?;
|
||||
let permit = search_queue.try_get_search_permit().await?;
|
||||
|
||||
let federated_search = params.into_inner();
|
||||
|
||||
let mut multi_aggregate = MultiSearchAggregator::from_federated_search(&federated_search, &req);
|
||||
let mut multi_aggregate = MultiSearchAggregator::from_federated_search(&federated_search);
|
||||
|
||||
let FederatedSearch { mut queries, federation } = federated_search;
|
||||
|
||||
@@ -81,12 +83,13 @@ pub async fn multi_search_with_post(
|
||||
perform_federated_search(&index_scheduler, queries, federation, features)
|
||||
})
|
||||
.await;
|
||||
permit.drop().await;
|
||||
|
||||
if let Ok(Ok(_)) = search_result {
|
||||
multi_aggregate.succeed();
|
||||
}
|
||||
|
||||
analytics.post_multi_search(multi_aggregate);
|
||||
analytics.publish(multi_aggregate, &req);
|
||||
HttpResponse::Ok().json(search_result??)
|
||||
}
|
||||
None => {
|
||||
@@ -143,11 +146,12 @@ pub async fn multi_search_with_post(
|
||||
Ok(search_results)
|
||||
}
|
||||
.await;
|
||||
permit.drop().await;
|
||||
|
||||
if search_results.is_ok() {
|
||||
multi_aggregate.succeed();
|
||||
}
|
||||
analytics.post_multi_search(multi_aggregate);
|
||||
analytics.publish(multi_aggregate, &req);
|
||||
|
||||
let search_results = search_results.map_err(|(mut err, query_index)| {
|
||||
// Add the query index that failed as context for the error message.
|
||||
|
||||
170
meilisearch/src/routes/multi_search_analytics.rs
Normal file
170
meilisearch/src/routes/multi_search_analytics.rs
Normal file
@@ -0,0 +1,170 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
use serde_json::json;
|
||||
|
||||
use crate::{
|
||||
analytics::Aggregate,
|
||||
search::{FederatedSearch, SearchQueryWithIndex},
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct MultiSearchAggregator {
|
||||
// requests
|
||||
total_received: usize,
|
||||
total_succeeded: usize,
|
||||
|
||||
// sum of the number of distinct indexes in each single request, use with total_received to compute an avg
|
||||
total_distinct_index_count: usize,
|
||||
// number of queries with a single index, use with total_received to compute a proportion
|
||||
total_single_index: usize,
|
||||
|
||||
// sum of the number of search queries in the requests, use with total_received to compute an average
|
||||
total_search_count: usize,
|
||||
|
||||
// scoring
|
||||
show_ranking_score: bool,
|
||||
show_ranking_score_details: bool,
|
||||
|
||||
// federation
|
||||
use_federation: bool,
|
||||
}
|
||||
|
||||
impl MultiSearchAggregator {
|
||||
pub fn from_federated_search(federated_search: &FederatedSearch) -> Self {
|
||||
let use_federation = federated_search.federation.is_some();
|
||||
|
||||
let distinct_indexes: HashSet<_> = federated_search
|
||||
.queries
|
||||
.iter()
|
||||
.map(|query| {
|
||||
let query = &query;
|
||||
// make sure we get a compilation error if a field gets added to / removed from SearchQueryWithIndex
|
||||
let SearchQueryWithIndex {
|
||||
index_uid,
|
||||
federation_options: _,
|
||||
q: _,
|
||||
vector: _,
|
||||
offset: _,
|
||||
limit: _,
|
||||
page: _,
|
||||
hits_per_page: _,
|
||||
attributes_to_retrieve: _,
|
||||
retrieve_vectors: _,
|
||||
attributes_to_crop: _,
|
||||
crop_length: _,
|
||||
attributes_to_highlight: _,
|
||||
show_ranking_score: _,
|
||||
show_ranking_score_details: _,
|
||||
show_matches_position: _,
|
||||
filter: _,
|
||||
sort: _,
|
||||
distinct: _,
|
||||
facets: _,
|
||||
highlight_pre_tag: _,
|
||||
highlight_post_tag: _,
|
||||
crop_marker: _,
|
||||
matching_strategy: _,
|
||||
attributes_to_search_on: _,
|
||||
hybrid: _,
|
||||
ranking_score_threshold: _,
|
||||
locales: _,
|
||||
} = query;
|
||||
|
||||
index_uid.as_str()
|
||||
})
|
||||
.collect();
|
||||
|
||||
let show_ranking_score =
|
||||
federated_search.queries.iter().any(|query| query.show_ranking_score);
|
||||
let show_ranking_score_details =
|
||||
federated_search.queries.iter().any(|query| query.show_ranking_score_details);
|
||||
|
||||
Self {
|
||||
total_received: 1,
|
||||
total_succeeded: 0,
|
||||
total_distinct_index_count: distinct_indexes.len(),
|
||||
total_single_index: if distinct_indexes.len() == 1 { 1 } else { 0 },
|
||||
total_search_count: federated_search.queries.len(),
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
use_federation,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self) {
|
||||
self.total_succeeded = self.total_succeeded.saturating_add(1);
|
||||
}
|
||||
}
|
||||
|
||||
impl Aggregate for MultiSearchAggregator {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Documents Searched by Multi-Search POST"
|
||||
}
|
||||
|
||||
/// Aggregate one [MultiSearchAggregator] into another.
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
// write the aggregate in a way that will cause a compilation error if a field is added.
|
||||
|
||||
// get ownership of self, replacing it by a default value.
|
||||
let this = *self;
|
||||
|
||||
let total_received = this.total_received.saturating_add(new.total_received);
|
||||
let total_succeeded = this.total_succeeded.saturating_add(new.total_succeeded);
|
||||
let total_distinct_index_count =
|
||||
this.total_distinct_index_count.saturating_add(new.total_distinct_index_count);
|
||||
let total_single_index = this.total_single_index.saturating_add(new.total_single_index);
|
||||
let total_search_count = this.total_search_count.saturating_add(new.total_search_count);
|
||||
let show_ranking_score = this.show_ranking_score || new.show_ranking_score;
|
||||
let show_ranking_score_details =
|
||||
this.show_ranking_score_details || new.show_ranking_score_details;
|
||||
let use_federation = this.use_federation || new.use_federation;
|
||||
|
||||
Box::new(Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
total_distinct_index_count,
|
||||
total_single_index,
|
||||
total_search_count,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
use_federation,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
let Self {
|
||||
total_received,
|
||||
total_succeeded,
|
||||
total_distinct_index_count,
|
||||
total_single_index,
|
||||
total_search_count,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
use_federation,
|
||||
} = *self;
|
||||
|
||||
json!({
|
||||
"requests": {
|
||||
"total_succeeded": total_succeeded,
|
||||
"total_failed": total_received.saturating_sub(total_succeeded), // just to be sure we never panics
|
||||
"total_received": total_received,
|
||||
},
|
||||
"indexes": {
|
||||
"total_single_index": total_single_index,
|
||||
"total_distinct_index_count": total_distinct_index_count,
|
||||
"avg_distinct_index_count": (total_distinct_index_count as f64) / (total_received as f64), // not 0 else returned early
|
||||
},
|
||||
"searches": {
|
||||
"total_search_count": total_search_count,
|
||||
"avg_search_count": (total_search_count as f64) / (total_received as f64),
|
||||
},
|
||||
"scoring": {
|
||||
"show_ranking_score": show_ranking_score,
|
||||
"show_ranking_score_details": show_ranking_score_details,
|
||||
},
|
||||
"federation": {
|
||||
"use_federation": use_federation,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@ use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use serde_json::json;
|
||||
use tracing::debug;
|
||||
|
||||
use crate::analytics::Analytics;
|
||||
@@ -17,13 +16,15 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_snapshot))));
|
||||
}
|
||||
|
||||
crate::empty_analytics!(SnapshotAnalytics, "Snapshot Created");
|
||||
|
||||
pub async fn create_snapshot(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SNAPSHOTS_CREATE }>, Data<IndexScheduler>>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish("Snapshot Created".to_string(), json!({}), Some(&req));
|
||||
analytics.publish(SnapshotAnalytics::default(), &req);
|
||||
|
||||
let task = KindWithContent::SnapshotCreation;
|
||||
let uid = get_task_id(&req, &opt)?;
|
||||
|
||||
@@ -8,10 +8,10 @@ use meilisearch_types::error::deserr_codes::InvalidSwapIndexes;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::tasks::{IndexSwap, KindWithContent};
|
||||
use serde_json::json;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::{get_task_id, is_dry_run, SummarizedTaskView};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::analytics::{Aggregate, Analytics};
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||
@@ -29,21 +29,36 @@ pub struct SwapIndexesPayload {
|
||||
indexes: Vec<IndexUid>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct IndexSwappedAnalytics {
|
||||
swap_operation_number: usize,
|
||||
}
|
||||
|
||||
impl Aggregate for IndexSwappedAnalytics {
|
||||
fn event_name(&self) -> &'static str {
|
||||
"Indexes Swapped"
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
swap_operation_number: self.swap_operation_number.max(new.swap_operation_number),
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn swap_indexes(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>,
|
||||
params: AwebJson<Vec<SwapIndexesPayload>, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let params = params.into_inner();
|
||||
analytics.publish(
|
||||
"Indexes Swapped".to_string(),
|
||||
json!({
|
||||
"swap_operation_number": params.len(),
|
||||
}),
|
||||
Some(&req),
|
||||
);
|
||||
analytics.publish(IndexSwappedAnalytics { swap_operation_number: params.len() }, &req);
|
||||
let filters = index_scheduler.filters();
|
||||
|
||||
let mut swaps = vec![];
|
||||
|
||||
@@ -12,18 +12,17 @@ use meilisearch_types::star_or::{OptionStarOr, OptionStarOrList};
|
||||
use meilisearch_types::task_view::TaskView;
|
||||
use meilisearch_types::tasks::{Kind, KindWithContent, Status};
|
||||
use serde::Serialize;
|
||||
use serde_json::json;
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::macros::format_description;
|
||||
use time::{Date, Duration, OffsetDateTime, Time};
|
||||
use tokio::task;
|
||||
|
||||
use super::{get_task_id, is_dry_run, SummarizedTaskView};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::analytics::{Aggregate, AggregateMethod, Analytics};
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::Opt;
|
||||
use crate::{aggregate_methods, Opt};
|
||||
|
||||
const DEFAULT_LIMIT: u32 = 20;
|
||||
|
||||
@@ -158,12 +157,69 @@ impl TaskDeletionOrCancelationQuery {
|
||||
}
|
||||
}
|
||||
|
||||
aggregate_methods!(
|
||||
CancelTasks => "Tasks Canceled",
|
||||
DeleteTasks => "Tasks Deleted",
|
||||
);
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct TaskFilterAnalytics<Method: AggregateMethod> {
|
||||
filtered_by_uid: bool,
|
||||
filtered_by_index_uid: bool,
|
||||
filtered_by_type: bool,
|
||||
filtered_by_status: bool,
|
||||
filtered_by_canceled_by: bool,
|
||||
filtered_by_before_enqueued_at: bool,
|
||||
filtered_by_after_enqueued_at: bool,
|
||||
filtered_by_before_started_at: bool,
|
||||
filtered_by_after_started_at: bool,
|
||||
filtered_by_before_finished_at: bool,
|
||||
filtered_by_after_finished_at: bool,
|
||||
|
||||
#[serde(skip)]
|
||||
marker: std::marker::PhantomData<Method>,
|
||||
}
|
||||
|
||||
impl<Method: AggregateMethod + 'static> Aggregate for TaskFilterAnalytics<Method> {
|
||||
fn event_name(&self) -> &'static str {
|
||||
Method::event_name()
|
||||
}
|
||||
|
||||
fn aggregate(self: Box<Self>, new: Box<Self>) -> Box<Self> {
|
||||
Box::new(Self {
|
||||
filtered_by_uid: self.filtered_by_uid | new.filtered_by_uid,
|
||||
filtered_by_index_uid: self.filtered_by_index_uid | new.filtered_by_index_uid,
|
||||
filtered_by_type: self.filtered_by_type | new.filtered_by_type,
|
||||
filtered_by_status: self.filtered_by_status | new.filtered_by_status,
|
||||
filtered_by_canceled_by: self.filtered_by_canceled_by | new.filtered_by_canceled_by,
|
||||
filtered_by_before_enqueued_at: self.filtered_by_before_enqueued_at
|
||||
| new.filtered_by_before_enqueued_at,
|
||||
filtered_by_after_enqueued_at: self.filtered_by_after_enqueued_at
|
||||
| new.filtered_by_after_enqueued_at,
|
||||
filtered_by_before_started_at: self.filtered_by_before_started_at
|
||||
| new.filtered_by_before_started_at,
|
||||
filtered_by_after_started_at: self.filtered_by_after_started_at
|
||||
| new.filtered_by_after_started_at,
|
||||
filtered_by_before_finished_at: self.filtered_by_before_finished_at
|
||||
| new.filtered_by_before_finished_at,
|
||||
filtered_by_after_finished_at: self.filtered_by_after_finished_at
|
||||
| new.filtered_by_after_finished_at,
|
||||
|
||||
marker: std::marker::PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
fn into_event(self: Box<Self>) -> serde_json::Value {
|
||||
serde_json::to_value(*self).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
async fn cancel_tasks(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_CANCEL }>, Data<IndexScheduler>>,
|
||||
params: AwebQueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let params = params.into_inner();
|
||||
|
||||
@@ -172,21 +228,22 @@ async fn cancel_tasks(
|
||||
}
|
||||
|
||||
analytics.publish(
|
||||
"Tasks Canceled".to_string(),
|
||||
json!({
|
||||
"filtered_by_uid": params.uids.is_some(),
|
||||
"filtered_by_index_uid": params.index_uids.is_some(),
|
||||
"filtered_by_type": params.types.is_some(),
|
||||
"filtered_by_status": params.statuses.is_some(),
|
||||
"filtered_by_canceled_by": params.canceled_by.is_some(),
|
||||
"filtered_by_before_enqueued_at": params.before_enqueued_at.is_some(),
|
||||
"filtered_by_after_enqueued_at": params.after_enqueued_at.is_some(),
|
||||
"filtered_by_before_started_at": params.before_started_at.is_some(),
|
||||
"filtered_by_after_started_at": params.after_started_at.is_some(),
|
||||
"filtered_by_before_finished_at": params.before_finished_at.is_some(),
|
||||
"filtered_by_after_finished_at": params.after_finished_at.is_some(),
|
||||
}),
|
||||
Some(&req),
|
||||
TaskFilterAnalytics::<CancelTasks> {
|
||||
filtered_by_uid: params.uids.is_some(),
|
||||
filtered_by_index_uid: params.index_uids.is_some(),
|
||||
filtered_by_type: params.types.is_some(),
|
||||
filtered_by_status: params.statuses.is_some(),
|
||||
filtered_by_canceled_by: params.canceled_by.is_some(),
|
||||
filtered_by_before_enqueued_at: params.before_enqueued_at.is_some(),
|
||||
filtered_by_after_enqueued_at: params.after_enqueued_at.is_some(),
|
||||
filtered_by_before_started_at: params.before_started_at.is_some(),
|
||||
filtered_by_after_started_at: params.after_started_at.is_some(),
|
||||
filtered_by_before_finished_at: params.before_finished_at.is_some(),
|
||||
filtered_by_after_finished_at: params.after_finished_at.is_some(),
|
||||
|
||||
marker: std::marker::PhantomData,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
let query = params.into_query();
|
||||
@@ -214,7 +271,7 @@ async fn delete_tasks(
|
||||
params: AwebQueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
opt: web::Data<Opt>,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
analytics: web::Data<Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let params = params.into_inner();
|
||||
|
||||
@@ -223,22 +280,24 @@ async fn delete_tasks(
|
||||
}
|
||||
|
||||
analytics.publish(
|
||||
"Tasks Deleted".to_string(),
|
||||
json!({
|
||||
"filtered_by_uid": params.uids.is_some(),
|
||||
"filtered_by_index_uid": params.index_uids.is_some(),
|
||||
"filtered_by_type": params.types.is_some(),
|
||||
"filtered_by_status": params.statuses.is_some(),
|
||||
"filtered_by_canceled_by": params.canceled_by.is_some(),
|
||||
"filtered_by_before_enqueued_at": params.before_enqueued_at.is_some(),
|
||||
"filtered_by_after_enqueued_at": params.after_enqueued_at.is_some(),
|
||||
"filtered_by_before_started_at": params.before_started_at.is_some(),
|
||||
"filtered_by_after_started_at": params.after_started_at.is_some(),
|
||||
"filtered_by_before_finished_at": params.before_finished_at.is_some(),
|
||||
"filtered_by_after_finished_at": params.after_finished_at.is_some(),
|
||||
}),
|
||||
Some(&req),
|
||||
TaskFilterAnalytics::<DeleteTasks> {
|
||||
filtered_by_uid: params.uids.is_some(),
|
||||
filtered_by_index_uid: params.index_uids.is_some(),
|
||||
filtered_by_type: params.types.is_some(),
|
||||
filtered_by_status: params.statuses.is_some(),
|
||||
filtered_by_canceled_by: params.canceled_by.is_some(),
|
||||
filtered_by_before_enqueued_at: params.before_enqueued_at.is_some(),
|
||||
filtered_by_after_enqueued_at: params.after_enqueued_at.is_some(),
|
||||
filtered_by_before_started_at: params.before_started_at.is_some(),
|
||||
filtered_by_after_started_at: params.after_started_at.is_some(),
|
||||
filtered_by_before_finished_at: params.before_finished_at.is_some(),
|
||||
filtered_by_after_finished_at: params.after_finished_at.is_some(),
|
||||
|
||||
marker: std::marker::PhantomData,
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
let query = params.into_query();
|
||||
|
||||
let (tasks, _) = index_scheduler.get_task_ids_from_authorized_indexes(
|
||||
|
||||
@@ -9,20 +9,24 @@ use std::vec::{IntoIter, Vec};
|
||||
|
||||
use actix_http::StatusCode;
|
||||
use index_scheduler::{IndexScheduler, RoFeatures};
|
||||
use indexmap::IndexMap;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::deserr_codes::{
|
||||
InvalidMultiSearchWeight, InvalidSearchLimit, InvalidSearchOffset,
|
||||
InvalidMultiSearchFacetsByIndex, InvalidMultiSearchMaxValuesPerFacet,
|
||||
InvalidMultiSearchMergeFacets, InvalidMultiSearchWeight, InvalidSearchLimit,
|
||||
InvalidSearchOffset,
|
||||
};
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::milli::score_details::{ScoreDetails, ScoreValue};
|
||||
use meilisearch_types::milli::{self, DocumentId, TimeBudget};
|
||||
use meilisearch_types::milli::{self, DocumentId, OrderBy, TimeBudget};
|
||||
use roaring::RoaringBitmap;
|
||||
use serde::Serialize;
|
||||
|
||||
use super::ranking_rules::{self, RankingRules};
|
||||
use super::{
|
||||
prepare_search, AttributesFormat, HitMaker, HitsInfo, RetrieveVectors, SearchHit, SearchKind,
|
||||
SearchQuery, SearchQueryWithIndex,
|
||||
compute_facet_distribution_stats, prepare_search, AttributesFormat, ComputedFacets, FacetStats,
|
||||
HitMaker, HitsInfo, RetrieveVectors, SearchHit, SearchKind, SearchQuery, SearchQueryWithIndex,
|
||||
};
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::routes::indexes::search::search_kind;
|
||||
@@ -73,6 +77,17 @@ pub struct Federation {
|
||||
pub limit: usize,
|
||||
#[deserr(default = super::DEFAULT_SEARCH_OFFSET(), error = DeserrJsonError<InvalidSearchOffset>)]
|
||||
pub offset: usize,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidMultiSearchFacetsByIndex>)]
|
||||
pub facets_by_index: BTreeMap<IndexUid, Option<Vec<String>>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidMultiSearchMergeFacets>)]
|
||||
pub merge_facets: Option<MergeFacets>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, deserr::Deserr, Default)]
|
||||
#[deserr(error = DeserrJsonError<InvalidMultiSearchMergeFacets>, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct MergeFacets {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidMultiSearchMaxValuesPerFacet>)]
|
||||
pub max_values_per_facet: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, deserr::Deserr)]
|
||||
@@ -82,7 +97,7 @@ pub struct FederatedSearch {
|
||||
#[deserr(default)]
|
||||
pub federation: Option<Federation>,
|
||||
}
|
||||
#[derive(Serialize, Clone, PartialEq)]
|
||||
#[derive(Serialize, Clone)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct FederatedSearchResult {
|
||||
pub hits: Vec<SearchHit>,
|
||||
@@ -93,6 +108,13 @@ pub struct FederatedSearchResult {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub semantic_hit_count: Option<u32>,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub facet_distribution: Option<BTreeMap<String, IndexMap<String, u64>>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub facet_stats: Option<BTreeMap<String, FacetStats>>,
|
||||
#[serde(skip_serializing_if = "FederatedFacets::is_empty")]
|
||||
pub facets_by_index: FederatedFacets,
|
||||
|
||||
// These fields are only used for analytics purposes
|
||||
#[serde(skip)]
|
||||
pub degraded: bool,
|
||||
@@ -109,6 +131,9 @@ impl fmt::Debug for FederatedSearchResult {
|
||||
semantic_hit_count,
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
facet_distribution,
|
||||
facet_stats,
|
||||
facets_by_index,
|
||||
} = self;
|
||||
|
||||
let mut debug = f.debug_struct("SearchResult");
|
||||
@@ -122,9 +147,18 @@ impl fmt::Debug for FederatedSearchResult {
|
||||
if *degraded {
|
||||
debug.field("degraded", degraded);
|
||||
}
|
||||
if let Some(facet_distribution) = facet_distribution {
|
||||
debug.field("facet_distribution", &facet_distribution);
|
||||
}
|
||||
if let Some(facet_stats) = facet_stats {
|
||||
debug.field("facet_stats", &facet_stats);
|
||||
}
|
||||
if let Some(semantic_hit_count) = semantic_hit_count {
|
||||
debug.field("semantic_hit_count", &semantic_hit_count);
|
||||
}
|
||||
if !facets_by_index.is_empty() {
|
||||
debug.field("facets_by_index", &facets_by_index);
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
@@ -313,16 +347,104 @@ struct SearchHitByIndex {
|
||||
}
|
||||
|
||||
struct SearchResultByIndex {
|
||||
index: String,
|
||||
hits: Vec<SearchHitByIndex>,
|
||||
candidates: RoaringBitmap,
|
||||
estimated_total_hits: usize,
|
||||
degraded: bool,
|
||||
used_negative_operator: bool,
|
||||
facets: Option<ComputedFacets>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize)]
|
||||
pub struct FederatedFacets(pub BTreeMap<String, ComputedFacets>);
|
||||
|
||||
impl FederatedFacets {
|
||||
pub fn insert(&mut self, index: String, facets: Option<ComputedFacets>) {
|
||||
if let Some(facets) = facets {
|
||||
self.0.insert(index, facets);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
pub fn merge(
|
||||
self,
|
||||
MergeFacets { max_values_per_facet }: MergeFacets,
|
||||
facet_order: BTreeMap<String, (String, OrderBy)>,
|
||||
) -> Option<ComputedFacets> {
|
||||
if self.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut distribution: BTreeMap<String, _> = Default::default();
|
||||
let mut stats: BTreeMap<String, FacetStats> = Default::default();
|
||||
|
||||
for facets_by_index in self.0.into_values() {
|
||||
for (facet, index_distribution) in facets_by_index.distribution {
|
||||
match distribution.entry(facet) {
|
||||
std::collections::btree_map::Entry::Vacant(entry) => {
|
||||
entry.insert(index_distribution);
|
||||
}
|
||||
std::collections::btree_map::Entry::Occupied(mut entry) => {
|
||||
let distribution = entry.get_mut();
|
||||
|
||||
for (value, index_count) in index_distribution {
|
||||
distribution
|
||||
.entry(value)
|
||||
.and_modify(|count| *count += index_count)
|
||||
.or_insert(index_count);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (facet, index_stats) in facets_by_index.stats {
|
||||
match stats.entry(facet) {
|
||||
std::collections::btree_map::Entry::Vacant(entry) => {
|
||||
entry.insert(index_stats);
|
||||
}
|
||||
std::collections::btree_map::Entry::Occupied(mut entry) => {
|
||||
let stats = entry.get_mut();
|
||||
|
||||
stats.min = f64::min(stats.min, index_stats.min);
|
||||
stats.max = f64::max(stats.max, index_stats.max);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// fixup order
|
||||
for (facet, values) in &mut distribution {
|
||||
let order_by = facet_order.get(facet).map(|(_, order)| *order).unwrap_or_default();
|
||||
|
||||
match order_by {
|
||||
OrderBy::Lexicographic => {
|
||||
values.sort_unstable_by(|left, _, right, _| left.cmp(right))
|
||||
}
|
||||
OrderBy::Count => {
|
||||
values.sort_unstable_by(|_, left, _, right| {
|
||||
left.cmp(right)
|
||||
// biggest first
|
||||
.reverse()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(max_values_per_facet) = max_values_per_facet {
|
||||
values.truncate(max_values_per_facet)
|
||||
};
|
||||
}
|
||||
|
||||
Some(ComputedFacets { distribution, stats })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn perform_federated_search(
|
||||
index_scheduler: &IndexScheduler,
|
||||
queries: Vec<SearchQueryWithIndex>,
|
||||
federation: Federation,
|
||||
mut federation: Federation,
|
||||
features: RoFeatures,
|
||||
) -> Result<FederatedSearchResult, ResponseError> {
|
||||
let before_search = std::time::Instant::now();
|
||||
@@ -342,6 +464,16 @@ pub fn perform_federated_search(
|
||||
.into());
|
||||
}
|
||||
|
||||
if let Some(facets) = federated_query.has_facets() {
|
||||
let facets = facets.to_owned();
|
||||
return Err(MeilisearchHttpError::FacetsInFederatedQuery(
|
||||
query_index,
|
||||
federated_query.index_uid.into_inner(),
|
||||
facets,
|
||||
)
|
||||
.into());
|
||||
}
|
||||
|
||||
let (index_uid, query, federation_options) = federated_query.into_index_query_federation();
|
||||
|
||||
queries_by_index.entry(index_uid.into_inner()).or_default().push(QueryByIndex {
|
||||
@@ -353,13 +485,24 @@ pub fn perform_federated_search(
|
||||
|
||||
// 2. perform queries, merge and make hits index by index
|
||||
let required_hit_count = federation.limit + federation.offset;
|
||||
|
||||
// In step (2), semantic_hit_count will be set to Some(0) if any search kind uses semantic
|
||||
// Then in step (3), we'll update its value if there is any semantic search
|
||||
let mut semantic_hit_count = None;
|
||||
let mut results_by_index = Vec::with_capacity(queries_by_index.len());
|
||||
let mut previous_query_data: Option<(RankingRules, usize, String)> = None;
|
||||
|
||||
// remember the order and name of first index for each facet when merging with index settings
|
||||
// to detect if the order is inconsistent for a facet.
|
||||
let mut facet_order: Option<BTreeMap<String, (String, OrderBy)>> = match federation.merge_facets
|
||||
{
|
||||
Some(MergeFacets { .. }) => Some(Default::default()),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
for (index_uid, queries) in queries_by_index {
|
||||
let first_query_index = queries.first().map(|query| query.query_index);
|
||||
|
||||
let index = match index_scheduler.index(&index_uid) {
|
||||
Ok(index) => index,
|
||||
Err(err) => {
|
||||
@@ -367,9 +510,8 @@ pub fn perform_federated_search(
|
||||
// Patch the HTTP status code to 400 as it defaults to 404 for `index_not_found`, but
|
||||
// here the resource not found is not part of the URL.
|
||||
err.code = StatusCode::BAD_REQUEST;
|
||||
if let Some(query) = queries.first() {
|
||||
err.message =
|
||||
format!("Inside `.queries[{}]`: {}", query.query_index, err.message);
|
||||
if let Some(query_index) = first_query_index {
|
||||
err.message = format!("Inside `.queries[{}]`: {}", query_index, err.message);
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
@@ -380,9 +522,6 @@ pub fn perform_federated_search(
|
||||
|
||||
let criteria = index.criteria(&rtxn)?;
|
||||
|
||||
// stuff we need for the hitmaker
|
||||
let script_lang_map = index.script_language(&rtxn)?;
|
||||
|
||||
let dictionary = index.dictionary(&rtxn)?;
|
||||
let dictionary: Option<Vec<_>> =
|
||||
dictionary.as_ref().map(|x| x.iter().map(String::as_str).collect());
|
||||
@@ -397,6 +536,23 @@ pub fn perform_federated_search(
|
||||
let mut used_negative_operator = false;
|
||||
let mut candidates = RoaringBitmap::new();
|
||||
|
||||
let facets_by_index = federation.facets_by_index.remove(&index_uid).flatten();
|
||||
|
||||
// TODO: recover the max size + facets_by_index as return value of this function so as not to ask it for all queries
|
||||
if let Err(mut error) =
|
||||
check_facet_order(&mut facet_order, &index_uid, &facets_by_index, &index, &rtxn)
|
||||
{
|
||||
error.message = format!(
|
||||
"Inside `.federation.facetsByIndex.{index_uid}`: {error}{}",
|
||||
if let Some(query_index) = first_query_index {
|
||||
format!("\n - Note: index `{index_uid}` used in `.queries[{query_index}]`")
|
||||
} else {
|
||||
Default::default()
|
||||
}
|
||||
);
|
||||
return Err(error);
|
||||
}
|
||||
|
||||
// 2.1. Compute all candidates for each query in the index
|
||||
let mut results_by_query = Vec::with_capacity(queries.len());
|
||||
|
||||
@@ -494,6 +650,7 @@ pub fn perform_federated_search(
|
||||
sort: query.sort,
|
||||
show_ranking_score: query.show_ranking_score,
|
||||
show_ranking_score_details: query.show_ranking_score_details,
|
||||
locales: query.locales.map(|l| l.iter().copied().map(Into::into).collect()),
|
||||
};
|
||||
|
||||
let milli::SearchResult {
|
||||
@@ -509,11 +666,7 @@ pub fn perform_federated_search(
|
||||
degraded |= query_degraded;
|
||||
used_negative_operator |= query_used_negative_operator;
|
||||
|
||||
let tokenizer = HitMaker::tokenizer(
|
||||
&script_lang_map,
|
||||
dictionary.as_deref(),
|
||||
separators.as_deref(),
|
||||
);
|
||||
let tokenizer = HitMaker::tokenizer(dictionary.as_deref(), separators.as_deref());
|
||||
|
||||
let formatter_builder = HitMaker::formatter_builder(matching_words, tokenizer);
|
||||
|
||||
@@ -568,34 +721,116 @@ pub fn perform_federated_search(
|
||||
.collect();
|
||||
|
||||
let merged_result = merged_result?;
|
||||
|
||||
let estimated_total_hits = candidates.len() as usize;
|
||||
|
||||
let facets = facets_by_index
|
||||
.map(|facets_by_index| {
|
||||
compute_facet_distribution_stats(
|
||||
&facets_by_index,
|
||||
&index,
|
||||
&rtxn,
|
||||
candidates,
|
||||
super::Route::MultiSearch,
|
||||
)
|
||||
})
|
||||
.transpose()
|
||||
.map_err(|mut error| {
|
||||
error.message = format!(
|
||||
"Inside `.federation.facetsByIndex.{index_uid}`: {}{}",
|
||||
error.message,
|
||||
if let Some(query_index) = first_query_index {
|
||||
format!("\n - Note: index `{index_uid}` used in `.queries[{query_index}]`")
|
||||
} else {
|
||||
Default::default()
|
||||
}
|
||||
);
|
||||
error
|
||||
})?;
|
||||
|
||||
results_by_index.push(SearchResultByIndex {
|
||||
index: index_uid,
|
||||
hits: merged_result,
|
||||
candidates,
|
||||
estimated_total_hits,
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
facets,
|
||||
});
|
||||
}
|
||||
|
||||
// bonus step, make sure to return an error if an index wants a non-faceted field, even if no query actually uses that index.
|
||||
for (index_uid, facets) in federation.facets_by_index {
|
||||
let index = match index_scheduler.index(&index_uid) {
|
||||
Ok(index) => index,
|
||||
Err(err) => {
|
||||
let mut err = ResponseError::from(err);
|
||||
// Patch the HTTP status code to 400 as it defaults to 404 for `index_not_found`, but
|
||||
// here the resource not found is not part of the URL.
|
||||
err.code = StatusCode::BAD_REQUEST;
|
||||
err.message = format!(
|
||||
"Inside `.federation.facetsByIndex.{index_uid}`: {}\n - Note: index `{index_uid}` is not used in queries",
|
||||
err.message
|
||||
);
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
|
||||
// Important: this is the only transaction we'll use for this index during this federated search
|
||||
let rtxn = index.read_txn()?;
|
||||
|
||||
if let Err(mut error) =
|
||||
check_facet_order(&mut facet_order, &index_uid, &facets, &index, &rtxn)
|
||||
{
|
||||
error.message = format!(
|
||||
"Inside `.federation.facetsByIndex.{index_uid}`: {error}\n - Note: index `{index_uid}` is not used in queries",
|
||||
);
|
||||
return Err(error);
|
||||
}
|
||||
|
||||
if let Some(facets) = facets {
|
||||
if let Err(mut error) = compute_facet_distribution_stats(
|
||||
&facets,
|
||||
&index,
|
||||
&rtxn,
|
||||
Default::default(),
|
||||
super::Route::MultiSearch,
|
||||
) {
|
||||
error.message =
|
||||
format!("Inside `.federation.facetsByIndex.{index_uid}`: {}\n - Note: index `{index_uid}` is not used in queries", error.message);
|
||||
return Err(error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 3. merge hits and metadata across indexes
|
||||
// 3.1 merge metadata
|
||||
let (estimated_total_hits, degraded, used_negative_operator) = {
|
||||
let (estimated_total_hits, degraded, used_negative_operator, facets) = {
|
||||
let mut estimated_total_hits = 0;
|
||||
let mut degraded = false;
|
||||
let mut used_negative_operator = false;
|
||||
|
||||
let mut facets: FederatedFacets = FederatedFacets::default();
|
||||
|
||||
for SearchResultByIndex {
|
||||
index,
|
||||
hits: _,
|
||||
candidates,
|
||||
estimated_total_hits: estimated_total_hits_by_index,
|
||||
facets: facets_by_index,
|
||||
degraded: degraded_by_index,
|
||||
used_negative_operator: used_negative_operator_by_index,
|
||||
} in &results_by_index
|
||||
} in &mut results_by_index
|
||||
{
|
||||
estimated_total_hits += candidates.len() as usize;
|
||||
estimated_total_hits += *estimated_total_hits_by_index;
|
||||
degraded |= *degraded_by_index;
|
||||
used_negative_operator |= *used_negative_operator_by_index;
|
||||
|
||||
let facets_by_index = std::mem::take(facets_by_index);
|
||||
let index = std::mem::take(index);
|
||||
|
||||
facets.insert(index, facets_by_index);
|
||||
}
|
||||
|
||||
(estimated_total_hits, degraded, used_negative_operator)
|
||||
(estimated_total_hits, degraded, used_negative_operator, facets)
|
||||
};
|
||||
|
||||
// 3.2 merge hits
|
||||
@@ -612,6 +847,20 @@ pub fn perform_federated_search(
|
||||
.map(|hit| hit.hit)
|
||||
.collect();
|
||||
|
||||
let (facet_distribution, facet_stats, facets_by_index) =
|
||||
match federation.merge_facets.zip(facet_order) {
|
||||
Some((merge_facets, facet_order)) => {
|
||||
let facets = facets.merge(merge_facets, facet_order);
|
||||
|
||||
let (facet_distribution, facet_stats) = facets
|
||||
.map(|ComputedFacets { distribution, stats }| (distribution, stats))
|
||||
.unzip();
|
||||
|
||||
(facet_distribution, facet_stats, FederatedFacets::default())
|
||||
}
|
||||
None => (None, None, facets),
|
||||
};
|
||||
|
||||
let search_result = FederatedSearchResult {
|
||||
hits: merged_hits,
|
||||
processing_time_ms: before_search.elapsed().as_millis(),
|
||||
@@ -623,7 +872,39 @@ pub fn perform_federated_search(
|
||||
semantic_hit_count,
|
||||
degraded,
|
||||
used_negative_operator,
|
||||
facet_distribution,
|
||||
facet_stats,
|
||||
facets_by_index,
|
||||
};
|
||||
|
||||
Ok(search_result)
|
||||
}
|
||||
|
||||
fn check_facet_order(
|
||||
facet_order: &mut Option<BTreeMap<String, (String, OrderBy)>>,
|
||||
current_index: &str,
|
||||
facets_by_index: &Option<Vec<String>>,
|
||||
index: &milli::Index,
|
||||
rtxn: &milli::heed::RoTxn<'_>,
|
||||
) -> Result<(), ResponseError> {
|
||||
if let (Some(facet_order), Some(facets_by_index)) = (facet_order, facets_by_index) {
|
||||
let index_facet_order = index.sort_facet_values_by(rtxn)?;
|
||||
for facet in facets_by_index {
|
||||
let index_facet_order = index_facet_order.get(facet);
|
||||
let (previous_index, previous_facet_order) = facet_order
|
||||
.entry(facet.to_owned())
|
||||
.or_insert_with(|| (current_index.to_owned(), index_facet_order));
|
||||
if previous_facet_order != &index_facet_order {
|
||||
return Err(MeilisearchHttpError::InconsistentFacetOrder {
|
||||
facet: facet.clone(),
|
||||
previous_facet_order: *previous_facet_order,
|
||||
previous_uid: previous_index.clone(),
|
||||
current_uid: current_index.to_owned(),
|
||||
index_facet_order,
|
||||
}
|
||||
.into());
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use core::fmt;
|
||||
use std::cmp::min;
|
||||
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
|
||||
use std::collections::{BTreeMap, BTreeSet, HashSet};
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::time::{Duration, Instant};
|
||||
@@ -15,16 +15,17 @@ use meilisearch_types::error::deserr_codes::*;
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
use meilisearch_types::heed::RoTxn;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::locales::Locale;
|
||||
use meilisearch_types::milli::score_details::{ScoreDetails, ScoringStrategy};
|
||||
use meilisearch_types::milli::vector::parsed_vectors::ExplicitVectors;
|
||||
use meilisearch_types::milli::vector::Embedder;
|
||||
use meilisearch_types::milli::{FacetValueHit, OrderBy, SearchForFacetValues, TimeBudget};
|
||||
use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS;
|
||||
use meilisearch_types::{milli, Document};
|
||||
use milli::tokenizer::TokenizerBuilder;
|
||||
use milli::tokenizer::{Language, TokenizerBuilder};
|
||||
use milli::{
|
||||
AscDesc, FieldId, FieldsIdsMap, Filter, FormatOptions, Index, MatchBounds, MatcherBuilder,
|
||||
SortError, TermsMatchingStrategy, DEFAULT_VALUES_PER_FACET,
|
||||
AscDesc, FieldId, FieldsIdsMap, Filter, FormatOptions, Index, LocalizedAttributesRule,
|
||||
MatchBounds, MatcherBuilder, SortError, TermsMatchingStrategy, DEFAULT_VALUES_PER_FACET,
|
||||
};
|
||||
use regex::Regex;
|
||||
use serde::Serialize;
|
||||
@@ -100,6 +101,8 @@ pub struct SearchQuery {
|
||||
pub attributes_to_search_on: Option<Vec<String>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchRankingScoreThreshold>, default)]
|
||||
pub ranking_score_threshold: Option<RankingScoreThreshold>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchLocales>, default)]
|
||||
pub locales: Option<Vec<Locale>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Deserr)]
|
||||
@@ -169,6 +172,7 @@ impl fmt::Debug for SearchQuery {
|
||||
matching_strategy,
|
||||
attributes_to_search_on,
|
||||
ranking_score_threshold,
|
||||
locales,
|
||||
} = self;
|
||||
|
||||
let mut debug = f.debug_struct("SearchQuery");
|
||||
@@ -250,6 +254,10 @@ impl fmt::Debug for SearchQuery {
|
||||
debug.field("ranking_score_threshold", &ranking_score_threshold);
|
||||
}
|
||||
|
||||
if let Some(locales) = locales {
|
||||
debug.field("locales", &locales);
|
||||
}
|
||||
|
||||
debug.finish()
|
||||
}
|
||||
}
|
||||
@@ -259,58 +267,54 @@ impl fmt::Debug for SearchQuery {
|
||||
pub struct HybridQuery {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchSemanticRatio>, default)]
|
||||
pub semantic_ratio: SemanticRatio,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidEmbedder>, default)]
|
||||
pub embedder: Option<String>,
|
||||
#[deserr(error = DeserrJsonError<InvalidEmbedder>)]
|
||||
pub embedder: String,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum SearchKind {
|
||||
KeywordOnly,
|
||||
SemanticOnly { embedder_name: String, embedder: Arc<Embedder> },
|
||||
Hybrid { embedder_name: String, embedder: Arc<Embedder>, semantic_ratio: f32 },
|
||||
SemanticOnly { embedder_name: String, embedder: Arc<Embedder>, quantized: bool },
|
||||
Hybrid { embedder_name: String, embedder: Arc<Embedder>, quantized: bool, semantic_ratio: f32 },
|
||||
}
|
||||
|
||||
impl SearchKind {
|
||||
pub(crate) fn semantic(
|
||||
index_scheduler: &index_scheduler::IndexScheduler,
|
||||
index: &Index,
|
||||
embedder_name: Option<&str>,
|
||||
embedder_name: &str,
|
||||
vector_len: Option<usize>,
|
||||
) -> Result<Self, ResponseError> {
|
||||
let (embedder_name, embedder) =
|
||||
let (embedder_name, embedder, quantized) =
|
||||
Self::embedder(index_scheduler, index, embedder_name, vector_len)?;
|
||||
Ok(Self::SemanticOnly { embedder_name, embedder })
|
||||
Ok(Self::SemanticOnly { embedder_name, embedder, quantized })
|
||||
}
|
||||
|
||||
pub(crate) fn hybrid(
|
||||
index_scheduler: &index_scheduler::IndexScheduler,
|
||||
index: &Index,
|
||||
embedder_name: Option<&str>,
|
||||
embedder_name: &str,
|
||||
semantic_ratio: f32,
|
||||
vector_len: Option<usize>,
|
||||
) -> Result<Self, ResponseError> {
|
||||
let (embedder_name, embedder) =
|
||||
let (embedder_name, embedder, quantized) =
|
||||
Self::embedder(index_scheduler, index, embedder_name, vector_len)?;
|
||||
Ok(Self::Hybrid { embedder_name, embedder, semantic_ratio })
|
||||
Ok(Self::Hybrid { embedder_name, embedder, quantized, semantic_ratio })
|
||||
}
|
||||
|
||||
pub(crate) fn embedder(
|
||||
index_scheduler: &index_scheduler::IndexScheduler,
|
||||
index: &Index,
|
||||
embedder_name: Option<&str>,
|
||||
embedder_name: &str,
|
||||
vector_len: Option<usize>,
|
||||
) -> Result<(String, Arc<Embedder>), ResponseError> {
|
||||
) -> Result<(String, Arc<Embedder>, bool), ResponseError> {
|
||||
let embedder_configs = index.embedding_configs(&index.read_txn()?)?;
|
||||
let embedders = index_scheduler.embedders(embedder_configs)?;
|
||||
|
||||
let embedder_name = embedder_name.unwrap_or_else(|| embedders.get_default_embedder_name());
|
||||
|
||||
let embedder = embedders.get(embedder_name);
|
||||
|
||||
let embedder = embedder
|
||||
let (embedder, _, quantized) = embedders
|
||||
.get(embedder_name)
|
||||
.ok_or(milli::UserError::InvalidEmbedder(embedder_name.to_owned()))
|
||||
.map_err(milli::Error::from)?
|
||||
.0;
|
||||
.map_err(milli::Error::from)?;
|
||||
|
||||
if let Some(vector_len) = vector_len {
|
||||
if vector_len != embedder.dimensions() {
|
||||
@@ -324,7 +328,7 @@ impl SearchKind {
|
||||
}
|
||||
}
|
||||
|
||||
Ok((embedder_name.to_owned(), embedder))
|
||||
Ok((embedder_name.to_owned(), embedder, quantized))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -425,15 +429,14 @@ pub struct SearchQueryWithIndex {
|
||||
pub attributes_to_search_on: Option<Vec<String>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchRankingScoreThreshold>, default)]
|
||||
pub ranking_score_threshold: Option<RankingScoreThreshold>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchLocales>, default)]
|
||||
pub locales: Option<Vec<Locale>>,
|
||||
|
||||
#[deserr(default)]
|
||||
pub federation_options: Option<FederationOptions>,
|
||||
}
|
||||
|
||||
impl SearchQueryWithIndex {
|
||||
pub fn has_federation_options(&self) -> bool {
|
||||
self.federation_options.is_some()
|
||||
}
|
||||
pub fn has_pagination(&self) -> Option<&'static str> {
|
||||
if self.offset.is_some() {
|
||||
Some("offset")
|
||||
@@ -448,6 +451,10 @@ impl SearchQueryWithIndex {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_facets(&self) -> Option<&[String]> {
|
||||
self.facets.as_deref().filter(|v| !v.is_empty())
|
||||
}
|
||||
|
||||
pub fn into_index_query_federation(self) -> (IndexUid, SearchQuery, Option<FederationOptions>) {
|
||||
let SearchQueryWithIndex {
|
||||
index_uid,
|
||||
@@ -477,6 +484,7 @@ impl SearchQueryWithIndex {
|
||||
attributes_to_search_on,
|
||||
hybrid,
|
||||
ranking_score_threshold,
|
||||
locales,
|
||||
} = self;
|
||||
(
|
||||
index_uid,
|
||||
@@ -506,6 +514,7 @@ impl SearchQueryWithIndex {
|
||||
attributes_to_search_on,
|
||||
hybrid,
|
||||
ranking_score_threshold,
|
||||
locales,
|
||||
// do not use ..Default::default() here,
|
||||
// rather add any missing field from `SearchQuery` to `SearchQueryWithIndex`
|
||||
},
|
||||
@@ -525,8 +534,8 @@ pub struct SimilarQuery {
|
||||
pub limit: usize,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSimilarFilter>)]
|
||||
pub filter: Option<Value>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidEmbedder>, default)]
|
||||
pub embedder: Option<String>,
|
||||
#[deserr(error = DeserrJsonError<InvalidEmbedder>)]
|
||||
pub embedder: String,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSimilarAttributesToRetrieve>)]
|
||||
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSimilarRetrieveVectors>)]
|
||||
@@ -780,28 +789,35 @@ fn prepare_search<'t>(
|
||||
search.query(q);
|
||||
}
|
||||
}
|
||||
SearchKind::SemanticOnly { embedder_name, embedder } => {
|
||||
SearchKind::SemanticOnly { embedder_name, embedder, quantized } => {
|
||||
let vector = match query.vector.clone() {
|
||||
Some(vector) => vector,
|
||||
None => {
|
||||
let span = tracing::trace_span!(target: "search::vector", "embed_one");
|
||||
let _entered = span.enter();
|
||||
|
||||
let deadline = std::time::Instant::now() + std::time::Duration::from_secs(10);
|
||||
|
||||
embedder
|
||||
.embed_one(query.q.clone().unwrap())
|
||||
.embed_one(query.q.clone().unwrap(), Some(deadline))
|
||||
.map_err(milli::vector::Error::from)
|
||||
.map_err(milli::Error::from)?
|
||||
}
|
||||
};
|
||||
|
||||
search.semantic(embedder_name.clone(), embedder.clone(), Some(vector));
|
||||
search.semantic(embedder_name.clone(), embedder.clone(), *quantized, Some(vector));
|
||||
}
|
||||
SearchKind::Hybrid { embedder_name, embedder, semantic_ratio: _ } => {
|
||||
SearchKind::Hybrid { embedder_name, embedder, quantized, semantic_ratio: _ } => {
|
||||
if let Some(q) = &query.q {
|
||||
search.query(q);
|
||||
}
|
||||
// will be embedded in hybrid search if necessary
|
||||
search.semantic(embedder_name.clone(), embedder.clone(), query.vector.clone());
|
||||
search.semantic(
|
||||
embedder_name.clone(),
|
||||
embedder.clone(),
|
||||
*quantized,
|
||||
query.vector.clone(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -866,6 +882,10 @@ fn prepare_search<'t>(
|
||||
search.sort_criteria(sort);
|
||||
}
|
||||
|
||||
if let Some(ref locales) = query.locales {
|
||||
search.locales(locales.iter().copied().map(Into::into).collect());
|
||||
}
|
||||
|
||||
Ok((search, is_finite_pagination, max_total_hits, offset))
|
||||
}
|
||||
|
||||
@@ -917,6 +937,7 @@ pub fn perform_search(
|
||||
highlight_pre_tag,
|
||||
highlight_post_tag,
|
||||
crop_marker,
|
||||
locales,
|
||||
// already used in prepare_search
|
||||
vector: _,
|
||||
hybrid: _,
|
||||
@@ -941,6 +962,7 @@ pub fn perform_search(
|
||||
sort,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
locales: locales.map(|l| l.iter().copied().map(Into::into).collect()),
|
||||
};
|
||||
|
||||
let documents = make_hits(
|
||||
@@ -969,39 +991,13 @@ pub fn perform_search(
|
||||
HitsInfo::OffsetLimit { limit, offset, estimated_total_hits: number_of_hits }
|
||||
};
|
||||
|
||||
let (facet_distribution, facet_stats) = match facets {
|
||||
Some(ref fields) => {
|
||||
let mut facet_distribution = index.facets_distribution(&rtxn);
|
||||
|
||||
let max_values_by_facet = index
|
||||
.max_values_per_facet(&rtxn)
|
||||
.map_err(milli::Error::from)?
|
||||
.map(|x| x as usize)
|
||||
.unwrap_or(DEFAULT_VALUES_PER_FACET);
|
||||
facet_distribution.max_values_per_facet(max_values_by_facet);
|
||||
|
||||
let sort_facet_values_by =
|
||||
index.sort_facet_values_by(&rtxn).map_err(milli::Error::from)?;
|
||||
|
||||
if fields.iter().all(|f| f != "*") {
|
||||
let fields: Vec<_> =
|
||||
fields.iter().map(|n| (n, sort_facet_values_by.get(n))).collect();
|
||||
facet_distribution.facets(fields);
|
||||
}
|
||||
|
||||
let distribution = facet_distribution
|
||||
.candidates(candidates)
|
||||
.default_order_by(sort_facet_values_by.get("*"))
|
||||
.execute()?;
|
||||
let stats = facet_distribution.compute_stats()?;
|
||||
(Some(distribution), Some(stats))
|
||||
}
|
||||
None => (None, None),
|
||||
};
|
||||
|
||||
let facet_stats = facet_stats.map(|stats| {
|
||||
stats.into_iter().map(|(k, (min, max))| (k, FacetStats { min, max })).collect()
|
||||
});
|
||||
let (facet_distribution, facet_stats) = facets
|
||||
.map(move |facets| {
|
||||
compute_facet_distribution_stats(&facets, index, &rtxn, candidates, Route::Search)
|
||||
})
|
||||
.transpose()?
|
||||
.map(|ComputedFacets { distribution, stats }| (distribution, stats))
|
||||
.unzip();
|
||||
|
||||
let result = SearchResult {
|
||||
hits: documents,
|
||||
@@ -1017,6 +1013,61 @@ pub fn perform_search(
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize)]
|
||||
pub struct ComputedFacets {
|
||||
pub distribution: BTreeMap<String, IndexMap<String, u64>>,
|
||||
pub stats: BTreeMap<String, FacetStats>,
|
||||
}
|
||||
|
||||
enum Route {
|
||||
Search,
|
||||
MultiSearch,
|
||||
}
|
||||
|
||||
fn compute_facet_distribution_stats<S: AsRef<str>>(
|
||||
facets: &[S],
|
||||
index: &Index,
|
||||
rtxn: &RoTxn,
|
||||
candidates: roaring::RoaringBitmap,
|
||||
route: Route,
|
||||
) -> Result<ComputedFacets, ResponseError> {
|
||||
let mut facet_distribution = index.facets_distribution(rtxn);
|
||||
|
||||
let max_values_by_facet = index
|
||||
.max_values_per_facet(rtxn)
|
||||
.map_err(milli::Error::from)?
|
||||
.map(|x| x as usize)
|
||||
.unwrap_or(DEFAULT_VALUES_PER_FACET);
|
||||
|
||||
facet_distribution.max_values_per_facet(max_values_by_facet);
|
||||
|
||||
let sort_facet_values_by = index.sort_facet_values_by(rtxn).map_err(milli::Error::from)?;
|
||||
|
||||
// add specific facet if there is no placeholder
|
||||
if facets.iter().all(|f| f.as_ref() != "*") {
|
||||
let fields: Vec<_> =
|
||||
facets.iter().map(|n| (n, sort_facet_values_by.get(n.as_ref()))).collect();
|
||||
facet_distribution.facets(fields);
|
||||
}
|
||||
|
||||
let distribution = facet_distribution
|
||||
.candidates(candidates)
|
||||
.default_order_by(sort_facet_values_by.get("*"))
|
||||
.execute()
|
||||
.map_err(|error| match (error, route) {
|
||||
(
|
||||
error @ milli::Error::UserError(milli::UserError::InvalidFacetsDistribution {
|
||||
..
|
||||
}),
|
||||
Route::MultiSearch,
|
||||
) => ResponseError::from_msg(error.to_string(), Code::InvalidMultiSearchFacets),
|
||||
(error, _) => error.into(),
|
||||
})?;
|
||||
let stats = facet_distribution.compute_stats()?;
|
||||
let stats = stats.into_iter().map(|(k, (min, max))| (k, FacetStats { min, max })).collect();
|
||||
Ok(ComputedFacets { distribution, stats })
|
||||
}
|
||||
|
||||
pub fn search_from_kind(
|
||||
search_kind: SearchKind,
|
||||
search: milli::Search<'_>,
|
||||
@@ -1046,6 +1097,7 @@ struct AttributesFormat {
|
||||
sort: Option<Vec<String>>,
|
||||
show_ranking_score: bool,
|
||||
show_ranking_score_details: bool,
|
||||
locales: Option<Vec<Language>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
@@ -1093,19 +1145,16 @@ struct HitMaker<'a> {
|
||||
show_ranking_score_details: bool,
|
||||
sort: Option<Vec<String>>,
|
||||
show_matches_position: bool,
|
||||
locales: Option<Vec<Language>>,
|
||||
}
|
||||
|
||||
impl<'a> HitMaker<'a> {
|
||||
pub fn tokenizer<'b>(
|
||||
script_lang_map: &'b HashMap<milli::tokenizer::Script, Vec<milli::tokenizer::Language>>,
|
||||
dictionary: Option<&'b [&'b str]>,
|
||||
separators: Option<&'b [&'b str]>,
|
||||
) -> milli::tokenizer::Tokenizer<'b> {
|
||||
let mut tokenizer_builder = TokenizerBuilder::default();
|
||||
tokenizer_builder.create_char_map(true);
|
||||
if !script_lang_map.is_empty() {
|
||||
tokenizer_builder.allow_list(script_lang_map);
|
||||
}
|
||||
|
||||
if let Some(separators) = separators {
|
||||
tokenizer_builder.separators(separators);
|
||||
@@ -1148,8 +1197,13 @@ impl<'a> HitMaker<'a> {
|
||||
let vectors_is_hidden = match (&displayed_ids, vectors_fid) {
|
||||
// displayed_ids is a wildcard, so `_vectors` can be displayed regardless of its fid
|
||||
(None, _) => false,
|
||||
// displayed_ids is a finite list, and `_vectors` cannot be part of it because it is not an existing field
|
||||
(Some(_), None) => true,
|
||||
// vectors has no fid, so check its explicit name
|
||||
(Some(_), None) => {
|
||||
// unwrap as otherwise we'd go to the first one
|
||||
let displayed_names = index.displayed_fields(rtxn)?.unwrap();
|
||||
!displayed_names
|
||||
.contains(&milli::vector::parsed_vectors::RESERVED_VECTORS_FIELD_NAME)
|
||||
}
|
||||
// displayed_ids is a finit list, so hide if `_vectors` is not part of it
|
||||
(Some(map), Some(vectors_fid)) => map.contains(&vectors_fid),
|
||||
};
|
||||
@@ -1218,6 +1272,7 @@ impl<'a> HitMaker<'a> {
|
||||
show_ranking_score_details: format.show_ranking_score_details,
|
||||
show_matches_position: format.show_matches_position,
|
||||
sort: format.sort,
|
||||
locales: format.locales,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1273,6 +1328,9 @@ impl<'a> HitMaker<'a> {
|
||||
document.insert("_vectors".into(), vectors.into());
|
||||
}
|
||||
|
||||
let localized_attributes =
|
||||
self.index.localized_attributes_rules(self.rtxn)?.unwrap_or_default();
|
||||
|
||||
let (matches_position, formatted) = format_fields(
|
||||
&displayed_document,
|
||||
&self.fields_ids_map,
|
||||
@@ -1280,6 +1338,8 @@ impl<'a> HitMaker<'a> {
|
||||
&self.formatted_options,
|
||||
self.show_matches_position,
|
||||
&self.displayed_ids,
|
||||
self.locales.as_deref(),
|
||||
&localized_attributes,
|
||||
)?;
|
||||
|
||||
if let Some(sort) = self.sort.as_ref() {
|
||||
@@ -1312,8 +1372,6 @@ fn make_hits<'a>(
|
||||
) -> Result<Vec<SearchHit>, MeilisearchHttpError> {
|
||||
let mut documents = Vec::new();
|
||||
|
||||
let script_lang_map = index.script_language(rtxn)?;
|
||||
|
||||
let dictionary = index.dictionary(rtxn)?;
|
||||
let dictionary: Option<Vec<_>> =
|
||||
dictionary.as_ref().map(|x| x.iter().map(String::as_str).collect());
|
||||
@@ -1321,8 +1379,7 @@ fn make_hits<'a>(
|
||||
let separators: Option<Vec<_>> =
|
||||
separators.as_ref().map(|x| x.iter().map(String::as_str).collect());
|
||||
|
||||
let tokenizer =
|
||||
HitMaker::tokenizer(&script_lang_map, dictionary.as_deref(), separators.as_deref());
|
||||
let tokenizer = HitMaker::tokenizer(dictionary.as_deref(), separators.as_deref());
|
||||
|
||||
let formatter_builder = HitMaker::formatter_builder(matching_words, tokenizer);
|
||||
|
||||
@@ -1341,6 +1398,7 @@ pub fn perform_facet_search(
|
||||
facet_name: String,
|
||||
search_kind: SearchKind,
|
||||
features: RoFeatures,
|
||||
locales: Option<Vec<Language>>,
|
||||
) -> Result<FacetSearchResult, ResponseError> {
|
||||
let before_search = Instant::now();
|
||||
let rtxn = index.read_txn()?;
|
||||
@@ -1349,6 +1407,20 @@ pub fn perform_facet_search(
|
||||
None => TimeBudget::default(),
|
||||
};
|
||||
|
||||
// In the faceted search context, we want to use the intersection between the locales provided by the user
|
||||
// and the locales of the facet string.
|
||||
// If the facet string is not localized, we **ignore** the locales provided by the user because the facet data has no locale.
|
||||
// If the user does not provide locales, we use the locales of the facet string.
|
||||
let localized_attributes = index.localized_attributes_rules(&rtxn)?.unwrap_or_default();
|
||||
let localized_attributes_locales =
|
||||
localized_attributes.into_iter().find(|attr| attr.match_str(&facet_name));
|
||||
let locales = localized_attributes_locales.map(|attr| {
|
||||
attr.locales
|
||||
.into_iter()
|
||||
.filter(|locale| locales.as_ref().map_or(true, |locales| locales.contains(locale)))
|
||||
.collect()
|
||||
});
|
||||
|
||||
let (search, _, _, _) =
|
||||
prepare_search(index, &rtxn, &search_query, &search_kind, time_budget, features)?;
|
||||
let mut facet_search = SearchForFacetValues::new(
|
||||
@@ -1363,6 +1435,10 @@ pub fn perform_facet_search(
|
||||
facet_search.max_values(max_facets as usize);
|
||||
}
|
||||
|
||||
if let Some(locales) = locales {
|
||||
facet_search.locales(locales);
|
||||
}
|
||||
|
||||
Ok(FacetSearchResult {
|
||||
facet_hits: facet_search.execute()?,
|
||||
facet_query,
|
||||
@@ -1375,6 +1451,7 @@ pub fn perform_similar(
|
||||
query: SimilarQuery,
|
||||
embedder_name: String,
|
||||
embedder: Arc<Embedder>,
|
||||
quantized: bool,
|
||||
retrieve_vectors: RetrieveVectors,
|
||||
features: RoFeatures,
|
||||
) -> Result<SimilarResult, ResponseError> {
|
||||
@@ -1403,8 +1480,16 @@ pub fn perform_similar(
|
||||
));
|
||||
};
|
||||
|
||||
let mut similar =
|
||||
milli::Similar::new(internal_id, offset, limit, index, &rtxn, embedder_name, embedder);
|
||||
let mut similar = milli::Similar::new(
|
||||
internal_id,
|
||||
offset,
|
||||
limit,
|
||||
index,
|
||||
&rtxn,
|
||||
embedder_name,
|
||||
embedder,
|
||||
quantized,
|
||||
);
|
||||
|
||||
if let Some(ref filter) = query.filter {
|
||||
if let Some(facets) = parse_filter(filter, Code::InvalidSimilarFilter, features)? {
|
||||
@@ -1443,6 +1528,7 @@ pub fn perform_similar(
|
||||
sort: None,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
locales: None,
|
||||
};
|
||||
|
||||
let hits = make_hits(
|
||||
@@ -1624,6 +1710,7 @@ fn make_document(
|
||||
Ok(document)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn format_fields(
|
||||
document: &Document,
|
||||
field_ids_map: &FieldsIdsMap,
|
||||
@@ -1631,6 +1718,8 @@ fn format_fields(
|
||||
formatted_options: &BTreeMap<FieldId, FormatOptions>,
|
||||
compute_matches: bool,
|
||||
displayable_ids: &BTreeSet<FieldId>,
|
||||
locales: Option<&[Language]>,
|
||||
localized_attributes: &[LocalizedAttributesRule],
|
||||
) -> Result<(Option<MatchesPosition>, Document), MeilisearchHttpError> {
|
||||
let mut matches_position = compute_matches.then(BTreeMap::new);
|
||||
let mut document = document.clone();
|
||||
@@ -1663,7 +1752,22 @@ fn format_fields(
|
||||
.reduce(|acc, option| acc.merge(option));
|
||||
let mut infos = Vec::new();
|
||||
|
||||
*value = format_value(std::mem::take(value), builder, format, &mut infos, compute_matches);
|
||||
// if no locales has been provided, we try to find the locales in the localized_attributes.
|
||||
let locales = locales.or_else(|| {
|
||||
localized_attributes
|
||||
.iter()
|
||||
.find(|rule| rule.match_str(key))
|
||||
.map(LocalizedAttributesRule::locales)
|
||||
});
|
||||
|
||||
*value = format_value(
|
||||
std::mem::take(value),
|
||||
builder,
|
||||
format,
|
||||
&mut infos,
|
||||
compute_matches,
|
||||
locales,
|
||||
);
|
||||
|
||||
if let Some(matches) = matches_position.as_mut() {
|
||||
if !infos.is_empty() {
|
||||
@@ -1688,10 +1792,11 @@ fn format_value(
|
||||
format_options: Option<FormatOptions>,
|
||||
infos: &mut Vec<MatchBounds>,
|
||||
compute_matches: bool,
|
||||
locales: Option<&[Language]>,
|
||||
) -> Value {
|
||||
match value {
|
||||
Value::String(old_string) => {
|
||||
let mut matcher = builder.build(&old_string);
|
||||
let mut matcher = builder.build(&old_string, locales);
|
||||
if compute_matches {
|
||||
let matches = matcher.matches();
|
||||
infos.extend_from_slice(&matches[..]);
|
||||
@@ -1718,6 +1823,7 @@ fn format_value(
|
||||
}),
|
||||
infos,
|
||||
compute_matches,
|
||||
locales,
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
@@ -1737,6 +1843,7 @@ fn format_value(
|
||||
}),
|
||||
infos,
|
||||
compute_matches,
|
||||
locales,
|
||||
),
|
||||
)
|
||||
})
|
||||
@@ -1745,7 +1852,7 @@ fn format_value(
|
||||
Value::Number(number) => {
|
||||
let s = number.to_string();
|
||||
|
||||
let mut matcher = builder.build(&s);
|
||||
let mut matcher = builder.build(&s, locales);
|
||||
if compute_matches {
|
||||
let matches = matcher.matches();
|
||||
infos.extend_from_slice(&matches[..]);
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
//! And should drop the Permit only once you have freed all the RAM consumed by the method.
|
||||
|
||||
use std::num::NonZeroUsize;
|
||||
use std::time::Duration;
|
||||
|
||||
use rand::rngs::StdRng;
|
||||
use rand::{Rng, SeedableRng};
|
||||
@@ -29,16 +30,31 @@ use crate::error::MeilisearchHttpError;
|
||||
pub struct SearchQueue {
|
||||
sender: mpsc::Sender<oneshot::Sender<Permit>>,
|
||||
capacity: usize,
|
||||
/// If we have waited longer than this to get a permit, we should abort the search request entirely.
|
||||
/// The client probably already closed the connection, but we have no way to find out.
|
||||
time_to_abort: Duration,
|
||||
}
|
||||
|
||||
/// You should only run search requests while holding this permit.
|
||||
/// Once it's dropped, a new search request will be able to process.
|
||||
/// You should always try to drop the permit yourself calling the `drop` async method on it.
|
||||
#[derive(Debug)]
|
||||
pub struct Permit {
|
||||
sender: mpsc::Sender<()>,
|
||||
}
|
||||
|
||||
impl Permit {
|
||||
/// Drop the permit giving back on permit to the search queue.
|
||||
pub async fn drop(self) {
|
||||
// if the channel is closed then the whole instance is down
|
||||
let _ = self.sender.send(()).await;
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Permit {
|
||||
/// The implicit drop implementation can still be called in multiple cases:
|
||||
/// - We forgot to call the explicit one somewhere => this should be fixed on our side asap
|
||||
/// - The future is cancelled while running and the permit dropped with it
|
||||
fn drop(&mut self) {
|
||||
let sender = self.sender.clone();
|
||||
// if the channel is closed then the whole instance is down
|
||||
@@ -53,7 +69,11 @@ impl SearchQueue {
|
||||
let (sender, receiver) = mpsc::channel(1);
|
||||
|
||||
tokio::task::spawn(Self::run(capacity, paralellism, receiver));
|
||||
Self { sender, capacity }
|
||||
Self { sender, capacity, time_to_abort: Duration::from_secs(60) }
|
||||
}
|
||||
|
||||
pub fn with_time_to_abort(self, time_to_abort: Duration) -> Self {
|
||||
Self { time_to_abort, ..self }
|
||||
}
|
||||
|
||||
/// This function is the main loop, it's in charge on scheduling which search request should execute first and
|
||||
@@ -119,9 +139,23 @@ impl SearchQueue {
|
||||
/// Returns a search `Permit`.
|
||||
/// It should be dropped as soon as you've freed all the RAM associated with the search request being processed.
|
||||
pub async fn try_get_search_permit(&self) -> Result<Permit, MeilisearchHttpError> {
|
||||
let now = std::time::Instant::now();
|
||||
let (sender, receiver) = oneshot::channel();
|
||||
self.sender.send(sender).await.map_err(|_| MeilisearchHttpError::SearchLimiterIsDown)?;
|
||||
receiver.await.map_err(|_| MeilisearchHttpError::TooManySearchRequests(self.capacity))
|
||||
let permit = receiver
|
||||
.await
|
||||
.map_err(|_| MeilisearchHttpError::TooManySearchRequests(self.capacity))?;
|
||||
|
||||
// If we've been for more than one minute to get a search permit, it's better to simply
|
||||
// abort the search request than spending time processing something were the client
|
||||
// most certainly exited or got a timeout a long time ago.
|
||||
// We may find a better solution in https://github.com/actix/actix-web/issues/3462.
|
||||
if now.elapsed() > self.time_to_abort {
|
||||
permit.drop().await;
|
||||
Err(MeilisearchHttpError::TooManySearchRequests(self.capacity))
|
||||
} else {
|
||||
Ok(permit)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `Ok(())` if everything seems normal.
|
||||
|
||||
@@ -5,63 +5,3 @@ mod payload;
|
||||
mod tenant_token;
|
||||
|
||||
mod tenant_token_multi_search;
|
||||
|
||||
use actix_web::http::StatusCode;
|
||||
|
||||
use crate::common::{Server, Value};
|
||||
use crate::json;
|
||||
|
||||
impl Server {
|
||||
pub fn use_api_key(&mut self, api_key: impl AsRef<str>) {
|
||||
self.service.api_key = Some(api_key.as_ref().to_string());
|
||||
}
|
||||
|
||||
/// Fetch and use the default admin key for nexts http requests.
|
||||
pub async fn use_admin_key(&mut self, master_key: impl AsRef<str>) {
|
||||
self.use_api_key(master_key);
|
||||
let (response, code) = self.list_api_keys("").await;
|
||||
assert_eq!(200, code, "{:?}", response);
|
||||
let admin_key = &response["results"][1]["key"];
|
||||
self.use_api_key(admin_key.as_str().unwrap());
|
||||
}
|
||||
|
||||
pub async fn add_api_key(&self, content: Value) -> (Value, StatusCode) {
|
||||
let url = "/keys";
|
||||
self.service.post(url, content).await
|
||||
}
|
||||
|
||||
pub async fn get_api_key(&self, key: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let url = format!("/keys/{}", key.as_ref());
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn patch_api_key(&self, key: impl AsRef<str>, content: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/keys/{}", key.as_ref());
|
||||
self.service.patch(url, content).await
|
||||
}
|
||||
|
||||
pub async fn list_api_keys(&self, params: &str) -> (Value, StatusCode) {
|
||||
let url = format!("/keys{params}");
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn delete_api_key(&self, key: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let url = format!("/keys/{}", key.as_ref());
|
||||
self.service.delete(url).await
|
||||
}
|
||||
|
||||
pub async fn dummy_request(
|
||||
&self,
|
||||
method: impl AsRef<str>,
|
||||
url: impl AsRef<str>,
|
||||
) -> (Value, StatusCode) {
|
||||
match method.as_ref() {
|
||||
"POST" => self.service.post(url, json!({})).await,
|
||||
"PUT" => self.service.put(url, json!({})).await,
|
||||
"PATCH" => self.service.patch(url, json!({})).await,
|
||||
"GET" => self.service.get(url).await,
|
||||
"DELETE" => self.service.delete(url).await,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ use once_cell::sync::Lazy;
|
||||
use time::{Duration, OffsetDateTime};
|
||||
|
||||
use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS};
|
||||
use crate::common::{Server, Value};
|
||||
use crate::common::{Server, Value, DOCUMENTS};
|
||||
use crate::json;
|
||||
|
||||
fn generate_tenant_token(
|
||||
@@ -22,36 +22,6 @@ fn generate_tenant_token(
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"title": "Shazam!",
|
||||
"id": "287947",
|
||||
"color": ["green", "blue"]
|
||||
},
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"id": "299537",
|
||||
"color": ["yellow", "blue"]
|
||||
},
|
||||
{
|
||||
"title": "Escape Room",
|
||||
"id": "522681",
|
||||
"color": ["yellow", "red"]
|
||||
},
|
||||
{
|
||||
"title": "How to Train Your Dragon: The Hidden World",
|
||||
"id": "166428",
|
||||
"color": ["green", "red"]
|
||||
},
|
||||
{
|
||||
"title": "Glass",
|
||||
"id": "450465",
|
||||
"color": ["blue", "red"]
|
||||
}
|
||||
])
|
||||
});
|
||||
|
||||
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
||||
json!({
|
||||
"message": null,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use std::fmt::Write;
|
||||
use std::marker::PhantomData;
|
||||
use std::panic::{catch_unwind, resume_unwind, UnwindSafe};
|
||||
use std::time::Duration;
|
||||
|
||||
@@ -9,19 +10,24 @@ use urlencoding::encode as urlencode;
|
||||
use super::encoder::Encoder;
|
||||
use super::service::Service;
|
||||
use super::Value;
|
||||
use super::{Owned, Shared};
|
||||
use crate::json;
|
||||
|
||||
pub struct Index<'a> {
|
||||
pub struct Index<'a, State = Owned> {
|
||||
pub uid: String,
|
||||
pub service: &'a Service,
|
||||
pub encoder: Encoder,
|
||||
pub(super) encoder: Encoder,
|
||||
pub(super) marker: PhantomData<State>,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl Index<'_> {
|
||||
pub async fn get(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}", urlencode(self.uid.as_ref()));
|
||||
self.service.get(url).await
|
||||
impl<'a> Index<'a, Owned> {
|
||||
pub fn to_shared(&self) -> Index<'a, Shared> {
|
||||
Index {
|
||||
uid: self.uid.clone(),
|
||||
service: self.service,
|
||||
encoder: self.encoder,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn load_test_set(&self) -> u64 {
|
||||
@@ -57,11 +63,7 @@ impl Index<'_> {
|
||||
}
|
||||
|
||||
pub async fn create(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
|
||||
let body = json!({
|
||||
"uid": self.uid,
|
||||
"primaryKey": primary_key,
|
||||
});
|
||||
self.service.post_encoded("/indexes", body, self.encoder).await
|
||||
self._create(primary_key).await
|
||||
}
|
||||
|
||||
pub async fn update_raw(&self, body: Value) -> (Value, StatusCode) {
|
||||
@@ -88,13 +90,7 @@ impl Index<'_> {
|
||||
documents: Value,
|
||||
primary_key: Option<&str>,
|
||||
) -> (Value, StatusCode) {
|
||||
let url = match primary_key {
|
||||
Some(key) => {
|
||||
format!("/indexes/{}/documents?primaryKey={}", urlencode(self.uid.as_ref()), key)
|
||||
}
|
||||
None => format!("/indexes/{}/documents", urlencode(self.uid.as_ref())),
|
||||
};
|
||||
self.service.post_encoded(url, documents, self.encoder).await
|
||||
self._add_documents(documents, primary_key).await
|
||||
}
|
||||
|
||||
pub async fn raw_add_documents(
|
||||
@@ -136,80 +132,11 @@ impl Index<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn wait_task(&self, update_id: u64) -> Value {
|
||||
// try several times to get status, or panic to not wait forever
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
for _ in 0..100 {
|
||||
let (response, status_code) = self.service.get(&url).await;
|
||||
assert_eq!(200, status_code, "response: {}", response);
|
||||
|
||||
if response["status"] == "succeeded" || response["status"] == "failed" {
|
||||
return response;
|
||||
}
|
||||
|
||||
// wait 0.5 second.
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
}
|
||||
panic!("Timeout waiting for update id");
|
||||
}
|
||||
|
||||
pub async fn get_task(&self, update_id: u64) -> (Value, StatusCode) {
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn list_tasks(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/tasks?indexUids={}", self.uid);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn filtered_tasks(
|
||||
&self,
|
||||
types: &[&str],
|
||||
statuses: &[&str],
|
||||
canceled_by: &[&str],
|
||||
) -> (Value, StatusCode) {
|
||||
let mut url = format!("/tasks?indexUids={}", self.uid);
|
||||
if !types.is_empty() {
|
||||
let _ = write!(url, "&types={}", types.join(","));
|
||||
}
|
||||
if !statuses.is_empty() {
|
||||
let _ = write!(url, "&statuses={}", statuses.join(","));
|
||||
}
|
||||
if !canceled_by.is_empty() {
|
||||
let _ = write!(url, "&canceledBy={}", canceled_by.join(","));
|
||||
}
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn get_document(&self, id: u64, options: Option<Value>) -> (Value, StatusCode) {
|
||||
let mut url = format!("/indexes/{}/documents/{}", urlencode(self.uid.as_ref()), id);
|
||||
if let Some(options) = options {
|
||||
write!(url, "{}", yaup::to_string(&options).unwrap()).unwrap();
|
||||
}
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn get_document_by_filter(&self, payload: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents/fetch", urlencode(self.uid.as_ref()));
|
||||
self.service.post(url, payload).await
|
||||
}
|
||||
|
||||
pub async fn get_all_documents_raw(&self, options: &str) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents{}", urlencode(self.uid.as_ref()), options);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn get_all_documents(&self, options: GetAllDocumentsOptions) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/documents{}",
|
||||
urlencode(self.uid.as_ref()),
|
||||
yaup::to_string(&options).unwrap()
|
||||
);
|
||||
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn delete_document(&self, id: u64) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents/{}", urlencode(self.uid.as_ref()), id);
|
||||
self.service.delete(url).await
|
||||
@@ -237,14 +164,8 @@ impl Index<'_> {
|
||||
self.service.post_encoded(url, body, self.encoder).await
|
||||
}
|
||||
|
||||
pub async fn settings(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref()));
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn update_settings(&self, settings: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref()));
|
||||
self.service.patch_encoded(url, settings, self.encoder).await
|
||||
self._update_settings(settings).await
|
||||
}
|
||||
|
||||
pub async fn update_settings_displayed_attributes(
|
||||
@@ -327,6 +248,146 @@ impl Index<'_> {
|
||||
self.service.delete(url).await
|
||||
}
|
||||
|
||||
pub async fn update_distinct_attribute(&self, value: Value) -> (Value, StatusCode) {
|
||||
let url =
|
||||
format!("/indexes/{}/settings/{}", urlencode(self.uid.as_ref()), "distinct-attribute");
|
||||
self.service.put_encoded(url, value, self.encoder).await
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Index<'a, Shared> {
|
||||
/// You cannot modify the content of a shared index, thus the delete_document_by_filter call
|
||||
/// must fail. If the task successfully enqueue itself, we'll wait for the task to finishes,
|
||||
/// and if it succeed the function will panic.
|
||||
pub async fn delete_document_by_filter_fail(&self, body: Value) -> (Value, StatusCode) {
|
||||
let (mut task, code) = self._delete_document_by_filter(body).await;
|
||||
if code.is_success() {
|
||||
task = self.wait_task(task.uid()).await;
|
||||
if task.is_success() {
|
||||
panic!(
|
||||
"`delete_document_by_filter_fail` succeeded: {}",
|
||||
serde_json::to_string_pretty(&task).unwrap()
|
||||
);
|
||||
}
|
||||
}
|
||||
(task, code)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl<State> Index<'_, State> {
|
||||
pub async fn get(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}", urlencode(self.uid.as_ref()));
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
/// add_documents is not allowed on shared index but we need to use it to initialize
|
||||
/// a bunch of very common indexes in `common/mod.rs`.
|
||||
pub(super) async fn _add_documents(
|
||||
&self,
|
||||
documents: Value,
|
||||
primary_key: Option<&str>,
|
||||
) -> (Value, StatusCode) {
|
||||
let url = match primary_key {
|
||||
Some(key) => {
|
||||
format!("/indexes/{}/documents?primaryKey={}", urlencode(self.uid.as_ref()), key)
|
||||
}
|
||||
None => format!("/indexes/{}/documents", urlencode(self.uid.as_ref())),
|
||||
};
|
||||
self.service.post_encoded(url, documents, self.encoder).await
|
||||
}
|
||||
|
||||
pub(super) async fn _update_settings(&self, settings: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref()));
|
||||
self.service.patch_encoded(url, settings, self.encoder).await
|
||||
}
|
||||
|
||||
pub(super) async fn _delete_document_by_filter(&self, body: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents/delete", urlencode(self.uid.as_ref()));
|
||||
self.service.post_encoded(url, body, self.encoder).await
|
||||
}
|
||||
|
||||
pub(super) async fn _create(&self, primary_key: Option<&str>) -> (Value, StatusCode) {
|
||||
let body = json!({
|
||||
"uid": self.uid,
|
||||
"primaryKey": primary_key,
|
||||
});
|
||||
self.service.post_encoded("/indexes", body, self.encoder).await
|
||||
}
|
||||
pub async fn wait_task(&self, update_id: u64) -> Value {
|
||||
// try several times to get status, or panic to not wait forever
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
for _ in 0..100 {
|
||||
let (response, status_code) = self.service.get(&url).await;
|
||||
assert_eq!(200, status_code, "response: {}", response);
|
||||
|
||||
if response["status"] == "succeeded" || response["status"] == "failed" {
|
||||
return response;
|
||||
}
|
||||
|
||||
// wait 0.5 second.
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
}
|
||||
panic!("Timeout waiting for update id");
|
||||
}
|
||||
|
||||
pub async fn get_task(&self, update_id: u64) -> (Value, StatusCode) {
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn filtered_tasks(
|
||||
&self,
|
||||
types: &[&str],
|
||||
statuses: &[&str],
|
||||
canceled_by: &[&str],
|
||||
) -> (Value, StatusCode) {
|
||||
let mut url = format!("/tasks?indexUids={}", self.uid);
|
||||
if !types.is_empty() {
|
||||
let _ = write!(url, "&types={}", types.join(","));
|
||||
}
|
||||
if !statuses.is_empty() {
|
||||
let _ = write!(url, "&statuses={}", statuses.join(","));
|
||||
}
|
||||
if !canceled_by.is_empty() {
|
||||
let _ = write!(url, "&canceledBy={}", canceled_by.join(","));
|
||||
}
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn get_document(&self, id: u64, options: Option<Value>) -> (Value, StatusCode) {
|
||||
let mut url = format!("/indexes/{}/documents/{}", urlencode(self.uid.as_ref()), id);
|
||||
if let Some(options) = options {
|
||||
write!(url, "{}", yaup::to_string(&options).unwrap()).unwrap();
|
||||
}
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn get_document_by_filter(&self, payload: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents/fetch", urlencode(self.uid.as_ref()));
|
||||
self.service.post(url, payload).await
|
||||
}
|
||||
|
||||
pub async fn get_all_documents_raw(&self, options: &str) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents{}", urlencode(self.uid.as_ref()), options);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn get_all_documents(&self, options: GetAllDocumentsOptions) -> (Value, StatusCode) {
|
||||
let url = format!(
|
||||
"/indexes/{}/documents{}",
|
||||
urlencode(self.uid.as_ref()),
|
||||
yaup::to_string(&options).unwrap()
|
||||
);
|
||||
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn settings(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref()));
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn stats(&self) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/stats", urlencode(self.uid.as_ref()));
|
||||
self.service.get(url).await
|
||||
@@ -411,12 +472,6 @@ impl Index<'_> {
|
||||
self.service.post_encoded(url, query, self.encoder).await
|
||||
}
|
||||
|
||||
pub async fn update_distinct_attribute(&self, value: Value) -> (Value, StatusCode) {
|
||||
let url =
|
||||
format!("/indexes/{}/settings/{}", urlencode(self.uid.as_ref()), "distinct-attribute");
|
||||
self.service.put_encoded(url, value, self.encoder).await
|
||||
}
|
||||
|
||||
pub async fn get_distinct_attribute(&self) -> (Value, StatusCode) {
|
||||
let url =
|
||||
format!("/indexes/{}/settings/{}", urlencode(self.uid.as_ref()), "distinct-attribute");
|
||||
|
||||
@@ -8,9 +8,16 @@ use std::fmt::{self, Display};
|
||||
#[allow(unused)]
|
||||
pub use index::GetAllDocumentsOptions;
|
||||
use meili_snap::json_string;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[allow(unused)]
|
||||
pub use server::{default_settings, Server};
|
||||
use tokio::sync::OnceCell;
|
||||
|
||||
use crate::common::index::Index;
|
||||
|
||||
pub enum Shared {}
|
||||
pub enum Owned {}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct Value(pub serde_json::Value);
|
||||
@@ -27,10 +34,20 @@ impl Value {
|
||||
}
|
||||
}
|
||||
|
||||
/// Return `true` if the `status` field is set to `succeeded`.
|
||||
/// Panic if the `status` field doesn't exists.
|
||||
#[track_caller]
|
||||
pub fn is_success(&self) -> bool {
|
||||
if !self["status"].is_string() {
|
||||
panic!("Called `is_success` on {}", serde_json::to_string_pretty(&self.0).unwrap());
|
||||
}
|
||||
self["status"] == serde_json::Value::String(String::from("succeeded"))
|
||||
}
|
||||
|
||||
// Panic if the json doesn't contain the `status` field set to "succeeded"
|
||||
#[track_caller]
|
||||
pub fn succeeded(&self) -> &Self {
|
||||
if self["status"] != serde_json::Value::String(String::from("succeeded")) {
|
||||
if !self.is_success() {
|
||||
panic!("Called succeeded on {}", serde_json::to_string_pretty(&self.0).unwrap());
|
||||
}
|
||||
self
|
||||
@@ -80,7 +97,15 @@ impl Display for Value {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
json_string!(self, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]", ".processingTimeMs" => "[duration]" })
|
||||
json_string!(self, {
|
||||
".uid" => "[uid]",
|
||||
".enqueuedAt" => "[date]",
|
||||
".startedAt" => "[date]",
|
||||
".finishedAt" => "[date]",
|
||||
".duration" => "[duration]",
|
||||
".processingTimeMs" => "[duration]",
|
||||
".details.embedders.*.url" => "[url]"
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -114,3 +139,253 @@ macro_rules! test_post_get_search {
|
||||
.map_err(|e| panic!("panic in post route: {:?}", e.downcast_ref::<&str>().unwrap()));
|
||||
};
|
||||
}
|
||||
|
||||
pub async fn shared_does_not_exists_index() -> &'static Index<'static, Shared> {
|
||||
static INDEX: Lazy<Index<'static, Shared>> = Lazy::new(|| {
|
||||
let server = Server::new_shared();
|
||||
server._index("DOES_NOT_EXISTS").to_shared()
|
||||
});
|
||||
&INDEX
|
||||
}
|
||||
|
||||
pub async fn shared_empty_index() -> &'static Index<'static, Shared> {
|
||||
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
|
||||
|
||||
INDEX
|
||||
.get_or_init(|| async {
|
||||
let server = Server::new_shared();
|
||||
let index = server._index("EMPTY_INDEX").to_shared();
|
||||
let (response, _code) = index._create(None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"title": "Shazam!",
|
||||
"id": "287947",
|
||||
"color": ["green", "blue"],
|
||||
"_vectors": { "manual": [1, 2, 3]},
|
||||
},
|
||||
{
|
||||
"title": "Captain Marvel",
|
||||
"id": "299537",
|
||||
"color": ["yellow", "blue"],
|
||||
"_vectors": { "manual": [1, 2, 54] },
|
||||
},
|
||||
{
|
||||
"title": "Escape Room",
|
||||
"id": "522681",
|
||||
"color": ["yellow", "red"],
|
||||
"_vectors": { "manual": [10, -23, 32] },
|
||||
},
|
||||
{
|
||||
"title": "How to Train Your Dragon: The Hidden World",
|
||||
"id": "166428",
|
||||
"color": ["green", "red"],
|
||||
"_vectors": { "manual": [-100, 231, 32] },
|
||||
},
|
||||
{
|
||||
"title": "Gläss",
|
||||
"id": "450465",
|
||||
"color": ["blue", "red"],
|
||||
"_vectors": { "manual": [-100, 340, 90] },
|
||||
}
|
||||
])
|
||||
});
|
||||
|
||||
pub async fn shared_index_with_documents() -> &'static Index<'static, Shared> {
|
||||
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
|
||||
INDEX.get_or_init(|| async {
|
||||
let server = Server::new_shared();
|
||||
let index = server._index("SHARED_DOCUMENTS").to_shared();
|
||||
let documents = DOCUMENTS.clone();
|
||||
let (response, _code) = index._add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, _code) = index
|
||||
._update_settings(
|
||||
json!({"filterableAttributes": ["id", "title"], "sortableAttributes": ["id", "title"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
}).await
|
||||
}
|
||||
|
||||
pub static SCORE_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"title": "Batman the dark knight returns: Part 1",
|
||||
"id": "A",
|
||||
},
|
||||
{
|
||||
"title": "Batman the dark knight returns: Part 2",
|
||||
"id": "B",
|
||||
},
|
||||
{
|
||||
"title": "Batman Returns",
|
||||
"id": "C",
|
||||
},
|
||||
{
|
||||
"title": "Batman",
|
||||
"id": "D",
|
||||
},
|
||||
{
|
||||
"title": "Badman",
|
||||
"id": "E",
|
||||
}
|
||||
])
|
||||
});
|
||||
|
||||
pub static NESTED_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"id": 852,
|
||||
"father": "jean",
|
||||
"mother": "michelle",
|
||||
"doggos": [
|
||||
{
|
||||
"name": "bobby",
|
||||
"age": 2,
|
||||
},
|
||||
{
|
||||
"name": "buddy",
|
||||
"age": 4,
|
||||
},
|
||||
],
|
||||
"cattos": "pésti",
|
||||
"_vectors": { "manual": [1, 2, 3]},
|
||||
},
|
||||
{
|
||||
"id": 654,
|
||||
"father": "pierre",
|
||||
"mother": "sabine",
|
||||
"doggos": [
|
||||
{
|
||||
"name": "gros bill",
|
||||
"age": 8,
|
||||
},
|
||||
],
|
||||
"cattos": ["simba", "pestiféré"],
|
||||
"_vectors": { "manual": [1, 2, 54] },
|
||||
},
|
||||
{
|
||||
"id": 750,
|
||||
"father": "romain",
|
||||
"mother": "michelle",
|
||||
"cattos": ["enigma"],
|
||||
"_vectors": { "manual": [10, 23, 32] },
|
||||
},
|
||||
{
|
||||
"id": 951,
|
||||
"father": "jean-baptiste",
|
||||
"mother": "sophie",
|
||||
"doggos": [
|
||||
{
|
||||
"name": "turbo",
|
||||
"age": 5,
|
||||
},
|
||||
{
|
||||
"name": "fast",
|
||||
"age": 6,
|
||||
},
|
||||
],
|
||||
"cattos": ["moumoute", "gomez"],
|
||||
"_vectors": { "manual": [10, 23, 32] },
|
||||
},
|
||||
])
|
||||
});
|
||||
|
||||
pub async fn shared_index_with_nested_documents() -> &'static Index<'static, Shared> {
|
||||
static INDEX: OnceCell<Index<'static, Shared>> = OnceCell::const_new();
|
||||
INDEX.get_or_init(|| async {
|
||||
let server = Server::new_shared();
|
||||
let index = server._index("SHARED_NESTED_DOCUMENTS").to_shared();
|
||||
let documents = NESTED_DOCUMENTS.clone();
|
||||
let (response, _code) = index._add_documents(documents, None).await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
let (response, _code) = index
|
||||
._update_settings(
|
||||
json!({"filterableAttributes": ["father", "doggos"], "sortableAttributes": ["doggos"]}),
|
||||
)
|
||||
.await;
|
||||
index.wait_task(response.uid()).await.succeeded();
|
||||
index
|
||||
}).await
|
||||
}
|
||||
|
||||
pub static FRUITS_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"name": "Exclusive sale: green apple",
|
||||
"id": "green-apple-boosted",
|
||||
"BOOST": true
|
||||
},
|
||||
{
|
||||
"name": "Pear",
|
||||
"id": "pear",
|
||||
},
|
||||
{
|
||||
"name": "Red apple gala",
|
||||
"id": "red-apple-gala",
|
||||
},
|
||||
{
|
||||
"name": "Exclusive sale: Red Tomato",
|
||||
"id": "red-tomatoes-boosted",
|
||||
"BOOST": true
|
||||
},
|
||||
{
|
||||
"name": "Exclusive sale: Red delicious apple",
|
||||
"id": "red-delicious-boosted",
|
||||
"BOOST": true,
|
||||
}
|
||||
])
|
||||
});
|
||||
|
||||
pub static VECTOR_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||
json!([
|
||||
{
|
||||
"id": "A",
|
||||
"description": "the dog barks at the cat",
|
||||
"_vectors": {
|
||||
// dimensions [canine, feline, young]
|
||||
"animal": [0.9, 0.8, 0.05],
|
||||
// dimensions [negative/positive, energy]
|
||||
"sentiment": [-0.1, 0.55]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "B",
|
||||
"description": "the kitten scratched the beagle",
|
||||
"_vectors": {
|
||||
// dimensions [canine, feline, young]
|
||||
"animal": [0.8, 0.9, 0.5],
|
||||
// dimensions [negative/positive, energy]
|
||||
"sentiment": [-0.2, 0.65]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "C",
|
||||
"description": "the dog had to stay alone today",
|
||||
"_vectors": {
|
||||
// dimensions [canine, feline, young]
|
||||
"animal": [0.85, 0.02, 0.1],
|
||||
// dimensions [negative/positive, energy]
|
||||
"sentiment": [-1.0, 0.1]
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "D",
|
||||
"description": "the little boy pets the puppy",
|
||||
"_vectors": {
|
||||
// dimensions [canine, feline, young]
|
||||
"animal": [0.8, 0.09, 0.8],
|
||||
// dimensions [negative/positive, energy]
|
||||
"sentiment": [0.8, 0.3]
|
||||
}
|
||||
},
|
||||
])
|
||||
});
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use std::marker::PhantomData;
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
use std::time::Duration;
|
||||
|
||||
use actix_http::body::MessageBody;
|
||||
@@ -8,29 +10,35 @@ use actix_web::dev::ServiceResponse;
|
||||
use actix_web::http::StatusCode;
|
||||
use byte_unit::{Byte, Unit};
|
||||
use clap::Parser;
|
||||
use meilisearch::option::{IndexerOpts, MaxMemory, Opt};
|
||||
use meilisearch::{analytics, create_app, setup_meilisearch, SubscriberForSecondLayer};
|
||||
use meilisearch::option::{IndexerOpts, MaxMemory, MaxThreads, Opt};
|
||||
use meilisearch::setup_meilisearch;
|
||||
use once_cell::sync::Lazy;
|
||||
use tempfile::TempDir;
|
||||
use tokio::sync::OnceCell;
|
||||
use tokio::time::sleep;
|
||||
use tracing::level_filters::LevelFilter;
|
||||
use tracing_subscriber::Layer;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::index::Index;
|
||||
use super::service::Service;
|
||||
use super::{Owned, Shared};
|
||||
use crate::common::encoder::Encoder;
|
||||
use crate::common::Value;
|
||||
use crate::json;
|
||||
|
||||
pub struct Server {
|
||||
pub struct Server<State = Owned> {
|
||||
pub service: Service,
|
||||
// hold ownership to the tempdir while we use the server instance.
|
||||
_dir: Option<TempDir>,
|
||||
_marker: PhantomData<State>,
|
||||
}
|
||||
|
||||
pub static TEST_TEMP_DIR: Lazy<TempDir> = Lazy::new(|| TempDir::new().unwrap());
|
||||
|
||||
impl Server {
|
||||
impl Server<Owned> {
|
||||
fn into_shared(self) -> Server<Shared> {
|
||||
Server { service: self.service, _dir: self._dir, _marker: PhantomData }
|
||||
}
|
||||
|
||||
pub async fn new() -> Self {
|
||||
let dir = TempDir::new().unwrap();
|
||||
|
||||
@@ -45,7 +53,7 @@ impl Server {
|
||||
let (index_scheduler, auth) = setup_meilisearch(&options).unwrap();
|
||||
let service = Service { index_scheduler, auth, options, api_key: None };
|
||||
|
||||
Server { service, _dir: Some(dir) }
|
||||
Server { service, _dir: Some(dir), _marker: PhantomData }
|
||||
}
|
||||
|
||||
pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self {
|
||||
@@ -60,7 +68,7 @@ impl Server {
|
||||
let (index_scheduler, auth) = setup_meilisearch(&options).unwrap();
|
||||
let service = Service { index_scheduler, auth, options, api_key: None };
|
||||
|
||||
Server { service, _dir: Some(dir) }
|
||||
Server { service, _dir: Some(dir), _marker: PhantomData }
|
||||
}
|
||||
|
||||
pub async fn new_auth() -> Self {
|
||||
@@ -73,38 +81,35 @@ impl Server {
|
||||
let (index_scheduler, auth) = setup_meilisearch(&options)?;
|
||||
let service = Service { index_scheduler, auth, options, api_key: None };
|
||||
|
||||
Ok(Server { service, _dir: None })
|
||||
Ok(Server { service, _dir: None, _marker: PhantomData })
|
||||
}
|
||||
|
||||
pub async fn init_web_app(
|
||||
&self,
|
||||
) -> impl actix_web::dev::Service<
|
||||
actix_http::Request,
|
||||
Response = ServiceResponse<impl MessageBody>,
|
||||
Error = actix_web::Error,
|
||||
> {
|
||||
let (_route_layer, route_layer_handle) =
|
||||
tracing_subscriber::reload::Layer::new(None.with_filter(
|
||||
tracing_subscriber::filter::Targets::new().with_target("", LevelFilter::OFF),
|
||||
));
|
||||
let (_stderr_layer, stderr_layer_handle) = tracing_subscriber::reload::Layer::new(
|
||||
(Box::new(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.with_span_events(tracing_subscriber::fmt::format::FmtSpan::CLOSE),
|
||||
)
|
||||
as Box<dyn tracing_subscriber::Layer<SubscriberForSecondLayer> + Send + Sync>)
|
||||
.with_filter(tracing_subscriber::filter::Targets::new()),
|
||||
);
|
||||
pub fn use_api_key(&mut self, api_key: impl AsRef<str>) {
|
||||
self.service.api_key = Some(api_key.as_ref().to_string());
|
||||
}
|
||||
|
||||
actix_web::test::init_service(create_app(
|
||||
self.service.index_scheduler.clone().into(),
|
||||
self.service.auth.clone().into(),
|
||||
self.service.options.clone(),
|
||||
(route_layer_handle, stderr_layer_handle),
|
||||
analytics::MockAnalytics::new(&self.service.options),
|
||||
true,
|
||||
))
|
||||
.await
|
||||
/// Fetch and use the default admin key for nexts http requests.
|
||||
pub async fn use_admin_key(&mut self, master_key: impl AsRef<str>) {
|
||||
self.use_api_key(master_key);
|
||||
let (response, code) = self.list_api_keys("").await;
|
||||
assert_eq!(200, code, "{:?}", response);
|
||||
let admin_key = &response["results"][1]["key"];
|
||||
self.use_api_key(admin_key.as_str().unwrap());
|
||||
}
|
||||
|
||||
pub async fn add_api_key(&self, content: Value) -> (Value, StatusCode) {
|
||||
let url = "/keys";
|
||||
self.service.post(url, content).await
|
||||
}
|
||||
|
||||
pub async fn patch_api_key(&self, key: impl AsRef<str>, content: Value) -> (Value, StatusCode) {
|
||||
let url = format!("/keys/{}", key.as_ref());
|
||||
self.service.patch(url, content).await
|
||||
}
|
||||
|
||||
pub async fn delete_api_key(&self, key: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let url = format!("/keys/{}", key.as_ref());
|
||||
self.service.delete(url).await
|
||||
}
|
||||
|
||||
/// Returns a view to an index. There is no guarantee that the index exists.
|
||||
@@ -117,15 +122,12 @@ impl Server {
|
||||
}
|
||||
|
||||
pub fn index_with_encoder(&self, uid: impl AsRef<str>, encoder: Encoder) -> Index<'_> {
|
||||
Index { uid: uid.as_ref().to_string(), service: &self.service, encoder }
|
||||
}
|
||||
|
||||
pub async fn multi_search(&self, queries: Value) -> (Value, StatusCode) {
|
||||
self.service.post("/multi-search", queries).await
|
||||
}
|
||||
|
||||
pub async fn list_indexes_raw(&self, parameters: &str) -> (Value, StatusCode) {
|
||||
self.service.get(format!("/indexes{parameters}")).await
|
||||
Index {
|
||||
uid: uid.as_ref().to_string(),
|
||||
service: &self.service,
|
||||
encoder,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_indexes(
|
||||
@@ -149,10 +151,6 @@ impl Server {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn version(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/version").await
|
||||
}
|
||||
|
||||
pub async fn stats(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/stats").await
|
||||
}
|
||||
@@ -161,12 +159,174 @@ impl Server {
|
||||
self.service.get("/tasks").await
|
||||
}
|
||||
|
||||
pub async fn set_features(&self, value: Value) -> (Value, StatusCode) {
|
||||
self.service.patch("/experimental-features", value).await
|
||||
}
|
||||
|
||||
pub async fn get_metrics(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/metrics").await
|
||||
}
|
||||
}
|
||||
|
||||
impl Server<Shared> {
|
||||
fn init_new_shared_instance() -> Server<Shared> {
|
||||
let dir = TempDir::new().unwrap();
|
||||
|
||||
if cfg!(windows) {
|
||||
std::env::set_var("TMP", TEST_TEMP_DIR.path());
|
||||
} else {
|
||||
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
|
||||
}
|
||||
|
||||
let options = default_settings(dir.path());
|
||||
|
||||
let (index_scheduler, auth) = setup_meilisearch(&options).unwrap();
|
||||
let service = Service { index_scheduler, auth, api_key: None, options };
|
||||
|
||||
Server { service, _dir: Some(dir), _marker: PhantomData }
|
||||
}
|
||||
|
||||
pub fn new_shared() -> &'static Server<Shared> {
|
||||
static SERVER: Lazy<Server<Shared>> = Lazy::new(Server::init_new_shared_instance);
|
||||
&SERVER
|
||||
}
|
||||
|
||||
pub async fn new_shared_with_admin_key() -> &'static Server<Shared> {
|
||||
static SERVER: OnceCell<Server<Shared>> = OnceCell::const_new();
|
||||
SERVER
|
||||
.get_or_init(|| async {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
server.into_shared()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// You shouldn't access random indexes on a shared instance thus this method
|
||||
/// must fail.
|
||||
pub async fn get_index_fail(&self, uid: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}", urlencoding::encode(uid.as_ref()));
|
||||
let (value, code) = self.service.get(url).await;
|
||||
if code.is_success() {
|
||||
panic!("`get_index_fail` succeeded with uid: {}", uid.as_ref());
|
||||
}
|
||||
(value, code)
|
||||
}
|
||||
|
||||
pub async fn delete_index_fail(&self, uid: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}", urlencoding::encode(uid.as_ref()));
|
||||
let (value, code) = self.service.delete(url).await;
|
||||
if code.is_success() {
|
||||
panic!("`delete_index_fail` succeeded with uid: {}", uid.as_ref());
|
||||
}
|
||||
(value, code)
|
||||
}
|
||||
|
||||
pub async fn update_raw_index_fail(
|
||||
&self,
|
||||
uid: impl AsRef<str>,
|
||||
body: Value,
|
||||
) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}", urlencoding::encode(uid.as_ref()));
|
||||
let (value, code) = self.service.patch_encoded(url, body, Encoder::Plain).await;
|
||||
if code.is_success() {
|
||||
panic!("`update_raw_index_fail` succeeded with uid: {}", uid.as_ref());
|
||||
}
|
||||
(value, code)
|
||||
}
|
||||
|
||||
/// Since this call updates the state of the instance, it must fail.
|
||||
/// If it doesn't fail, the test will panic to help you debug what
|
||||
/// is going on.
|
||||
pub async fn create_index_fail(&self, body: Value) -> (Value, StatusCode) {
|
||||
let (mut task, code) = self._create_index(body).await;
|
||||
if code.is_success() {
|
||||
task = self.wait_task(task.uid()).await;
|
||||
if task.is_success() {
|
||||
panic!(
|
||||
"`create_index_fail` succeeded: {}",
|
||||
serde_json::to_string_pretty(&task).unwrap()
|
||||
);
|
||||
}
|
||||
}
|
||||
(task, code)
|
||||
}
|
||||
}
|
||||
|
||||
impl<State> Server<State> {
|
||||
pub async fn init_web_app(
|
||||
&self,
|
||||
) -> impl actix_web::dev::Service<
|
||||
actix_http::Request,
|
||||
Response = ServiceResponse<impl MessageBody>,
|
||||
Error = actix_web::Error,
|
||||
> {
|
||||
self.service.init_web_app().await
|
||||
}
|
||||
|
||||
pub async fn list_api_keys(&self, params: &str) -> (Value, StatusCode) {
|
||||
let url = format!("/keys{params}");
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn dummy_request(
|
||||
&self,
|
||||
method: impl AsRef<str>,
|
||||
url: impl AsRef<str>,
|
||||
) -> (Value, StatusCode) {
|
||||
match method.as_ref() {
|
||||
"POST" => self.service.post(url, json!({})).await,
|
||||
"PUT" => self.service.put(url, json!({})).await,
|
||||
"PATCH" => self.service.patch(url, json!({})).await,
|
||||
"GET" => self.service.get(url).await,
|
||||
"DELETE" => self.service.delete(url).await,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_api_key(&self, key: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let url = format!("/keys/{}", key.as_ref());
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub(super) fn _index(&self, uid: impl AsRef<str>) -> Index<'_> {
|
||||
Index {
|
||||
uid: uid.as_ref().to_string(),
|
||||
service: &self.service,
|
||||
encoder: Encoder::Plain,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a view to an index. There is no guarantee that the index exists.
|
||||
pub fn unique_index(&self) -> Index<'_> {
|
||||
let uuid = Uuid::new_v4();
|
||||
Index {
|
||||
uid: uuid.to_string(),
|
||||
service: &self.service,
|
||||
encoder: Encoder::Plain,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) async fn _create_index(&self, body: Value) -> (Value, StatusCode) {
|
||||
self.service.post("/indexes", body).await
|
||||
}
|
||||
|
||||
pub async fn multi_search(&self, queries: Value) -> (Value, StatusCode) {
|
||||
self.service.post("/multi-search", queries).await
|
||||
}
|
||||
|
||||
pub async fn list_indexes_raw(&self, parameters: &str) -> (Value, StatusCode) {
|
||||
self.service.get(format!("/indexes{parameters}")).await
|
||||
}
|
||||
|
||||
pub async fn tasks_filter(&self, filter: &str) -> (Value, StatusCode) {
|
||||
self.service.get(format!("/tasks?{}", filter)).await
|
||||
}
|
||||
|
||||
pub async fn get_dump_status(&self, uid: &str) -> (Value, StatusCode) {
|
||||
self.service.get(format!("/dumps/{}/status", uid)).await
|
||||
pub async fn version(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/version").await
|
||||
}
|
||||
|
||||
pub async fn create_dump(&self) -> (Value, StatusCode) {
|
||||
@@ -214,14 +374,6 @@ impl Server {
|
||||
pub async fn get_features(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/experimental-features").await
|
||||
}
|
||||
|
||||
pub async fn set_features(&self, value: Value) -> (Value, StatusCode) {
|
||||
self.service.patch("/experimental-features", value).await
|
||||
}
|
||||
|
||||
pub async fn get_metrics(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/metrics").await
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
@@ -229,7 +381,6 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
db_path: dir.as_ref().join("db"),
|
||||
dump_dir: dir.as_ref().join("dumps"),
|
||||
env: "development".to_owned(),
|
||||
#[cfg(feature = "analytics")]
|
||||
no_analytics: true,
|
||||
max_index_size: Byte::from_u64_with_unit(100, Unit::MiB).unwrap(),
|
||||
max_task_db_size: Byte::from_u64_with_unit(1, Unit::GiB).unwrap(),
|
||||
@@ -239,7 +390,8 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
// memory has to be unlimited because several meilisearch are running in test context.
|
||||
max_indexing_memory: MaxMemory::unlimited(),
|
||||
skip_index_budget: true,
|
||||
..Parser::parse_from(None as Option<&str>)
|
||||
// Having 2 threads makes the tests way faster
|
||||
max_indexing_threads: MaxThreads::from_str("2").unwrap(),
|
||||
},
|
||||
experimental_enable_metrics: false,
|
||||
..Parser::parse_from(None as Option<&str>)
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
use std::num::NonZeroUsize;
|
||||
use std::sync::Arc;
|
||||
|
||||
use actix_web::body::MessageBody;
|
||||
use actix_web::dev::ServiceResponse;
|
||||
use actix_web::http::header::ContentType;
|
||||
use actix_web::http::StatusCode;
|
||||
use actix_web::test;
|
||||
use actix_web::test::TestRequest;
|
||||
use actix_web::web::Data;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch::{analytics, create_app, Opt, SubscriberForSecondLayer};
|
||||
use meilisearch::analytics::Analytics;
|
||||
use meilisearch::search_queue::SearchQueue;
|
||||
use meilisearch::{create_app, Opt, SubscriberForSecondLayer};
|
||||
use meilisearch_auth::AuthController;
|
||||
use tracing::level_filters::LevelFilter;
|
||||
use tracing_subscriber::Layer;
|
||||
@@ -106,7 +112,13 @@ impl Service {
|
||||
self.request(req).await
|
||||
}
|
||||
|
||||
pub async fn request(&self, mut req: test::TestRequest) -> (Value, StatusCode) {
|
||||
pub async fn init_web_app(
|
||||
&self,
|
||||
) -> impl actix_web::dev::Service<
|
||||
actix_http::Request,
|
||||
Response = ServiceResponse<impl MessageBody>,
|
||||
Error = actix_web::Error,
|
||||
> {
|
||||
let (_route_layer, route_layer_handle) =
|
||||
tracing_subscriber::reload::Layer::new(None.with_filter(
|
||||
tracing_subscriber::filter::Targets::new().with_target("", LevelFilter::OFF),
|
||||
@@ -119,16 +131,25 @@ impl Service {
|
||||
as Box<dyn tracing_subscriber::Layer<SubscriberForSecondLayer> + Send + Sync>)
|
||||
.with_filter(tracing_subscriber::filter::Targets::new()),
|
||||
);
|
||||
let search_queue = SearchQueue::new(
|
||||
self.options.experimental_search_queue_size,
|
||||
NonZeroUsize::new(1).unwrap(),
|
||||
);
|
||||
|
||||
let app = test::init_service(create_app(
|
||||
actix_web::test::init_service(create_app(
|
||||
self.index_scheduler.clone().into(),
|
||||
self.auth.clone().into(),
|
||||
Data::new(search_queue),
|
||||
self.options.clone(),
|
||||
(route_layer_handle, stderr_layer_handle),
|
||||
analytics::MockAnalytics::new(&self.options),
|
||||
Data::new(Analytics::no_analytics()),
|
||||
true,
|
||||
))
|
||||
.await;
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn request(&self, mut req: test::TestRequest) -> (Value, StatusCode) {
|
||||
let app = self.init_web_app().await;
|
||||
|
||||
if let Some(api_key) = &self.api_key {
|
||||
req = req.insert_header(("Authorization", ["Bearer ", api_key].concat()));
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user