mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-09 14:15:43 +00:00
Compare commits
206 Commits
v1.9.0-rc.
...
document-d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
deee22b5da | ||
|
|
fd8c90b858 | ||
|
|
4ceade43cd | ||
|
|
e95e47d258 | ||
|
|
e18b06ddda | ||
|
|
b15e8aacb6 | ||
|
|
767f20e30d | ||
|
|
0d63d02ab2 | ||
|
|
bf5d9f68fa | ||
|
|
e9d6b4222b | ||
|
|
2f0567fad1 | ||
|
|
2099b4f0dd | ||
|
|
0d5bc4578e | ||
|
|
8f60ad0a23 | ||
|
|
9570139eeb | ||
|
|
9d6885793e | ||
|
|
98cd6a865c | ||
|
|
5f4530ce57 | ||
|
|
0ecaf861fa | ||
|
|
4d5005b01a | ||
|
|
952e742321 | ||
|
|
ee9aa63044 | ||
|
|
43db4f4242 | ||
|
|
9feba5028d | ||
|
|
0a40a98bb6 | ||
|
|
aac15f6719 | ||
|
|
53a359286c | ||
|
|
4aa7d386d8 | ||
|
|
84fabb9314 | ||
|
|
cd46ebd6b5 | ||
|
|
ef8d9a20f8 | ||
|
|
6afa578688 | ||
|
|
300bdfc2a7 | ||
|
|
e7e74c0099 | ||
|
|
05cc2d1fac | ||
|
|
22b9c277d0 | ||
|
|
16bde973aa | ||
|
|
13d1d78a2d | ||
|
|
b2b7a633a6 | ||
|
|
7be109cafe | ||
|
|
6ebefd1067 | ||
|
|
d25ae36e22 | ||
|
|
b64b4ab6ca | ||
|
|
427861b323 | ||
|
|
d29cb75061 | ||
|
|
128e6c7502 | ||
|
|
3129f96603 | ||
|
|
c701d89fdc | ||
|
|
3d9befd64f | ||
|
|
ee14d5196c | ||
|
|
d96372b9c4 | ||
|
|
ea67816a21 | ||
|
|
c885fcebcc | ||
|
|
b6e1a1f2f5 | ||
|
|
277f4883f6 | ||
|
|
015d90a962 | ||
|
|
0df84bbba7 | ||
|
|
e53de15b8e | ||
|
|
8c4921b9dd | ||
|
|
f6a00f4a90 | ||
|
|
ce08dc509b | ||
|
|
1daaed163a | ||
|
|
809e742253 | ||
|
|
decdfe03bc | ||
|
|
aae5c324d7 | ||
|
|
a108d8f6f3 | ||
|
|
34cf576339 | ||
|
|
eb292a7a62 | ||
|
|
e28332a904 | ||
|
|
a1dcde6b9a | ||
|
|
544e98ca99 | ||
|
|
1e4699b82c | ||
|
|
2c09c324f7 | ||
|
|
3d6b61d8d2 | ||
|
|
1374b661d1 | ||
|
|
7e3c306c54 | ||
|
|
2608a596a0 | ||
|
|
e16edb2c35 | ||
|
|
5c758438fc | ||
|
|
ab6cac2321 | ||
|
|
6fb36ed30e | ||
|
|
dcdc83946f | ||
|
|
3c4c46377b | ||
|
|
7da21bb601 | ||
|
|
13161fd7d0 | ||
|
|
b81e2951a9 | ||
|
|
d75e0098c7 | ||
|
|
27496354e2 | ||
|
|
2e0ff56f3f | ||
|
|
a74fb87d1e | ||
|
|
558b66e535 | ||
|
|
cade18bd47 | ||
|
|
298c7b0c93 | ||
|
|
606e108420 | ||
|
|
7be17b7e4c | ||
|
|
1693332cab | ||
|
|
ddd564665b | ||
|
|
2a38f5c757 | ||
|
|
133d33d72c | ||
|
|
fb683fe88b | ||
|
|
4ae11bfd31 | ||
|
|
9736e16a88 | ||
|
|
6fa4da8ae7 | ||
|
|
19d7cdc20d | ||
|
|
c229200820 | ||
|
|
bad28cc9e2 | ||
|
|
534f696b29 | ||
|
|
a04041c8f2 | ||
|
|
b347b66619 | ||
|
|
e580d6b98f | ||
|
|
8ba65e333b | ||
|
|
43875e6758 | ||
|
|
d7844a6e45 | ||
|
|
e9bf4c43a4 | ||
|
|
a8a0854421 | ||
|
|
0a8f50695e | ||
|
|
09d9b63e1c | ||
|
|
b9b938c902 | ||
|
|
6bf07d969e | ||
|
|
e35ef31738 | ||
|
|
3f212a8202 | ||
|
|
bc547dad6f | ||
|
|
3bc8f81abc | ||
|
|
a89eea233b | ||
|
|
34fabed214 | ||
|
|
fca9fe39b3 | ||
|
|
f5cf01e7d1 | ||
|
|
d1dd7e5d09 | ||
|
|
d18c1f77d7 | ||
|
|
d0b05ae691 | ||
|
|
e9bf4eb100 | ||
|
|
b368105272 | ||
|
|
e0eff08095 | ||
|
|
304a9df52d | ||
|
|
39f60abd7d | ||
|
|
1991bd03da | ||
|
|
ee39309aae | ||
|
|
0d31be1494 | ||
|
|
3493093c4f | ||
|
|
7cef2299cf | ||
|
|
a838f39fce | ||
|
|
600e97d9dc | ||
|
|
d1962b2b0f | ||
|
|
8b450b84f8 | ||
|
|
7add7d053c | ||
|
|
7559dfc814 | ||
|
|
6c6c4732a1 | ||
|
|
0502b17501 | ||
|
|
3976fe660e | ||
|
|
50f8218a5d | ||
|
|
19585f1a4f | ||
|
|
8ec6e175e5 | ||
|
|
57d066595b | ||
|
|
75b2e02cd2 | ||
|
|
40f05fe156 | ||
|
|
734d1c53ad | ||
|
|
52d0d35b39 | ||
|
|
5432776132 | ||
|
|
66470b27e6 | ||
|
|
0a9bd398c7 | ||
|
|
7967e93c16 | ||
|
|
a6f3a01c6a | ||
|
|
4ca4a3f954 | ||
|
|
e4a69c5ac3 | ||
|
|
ff2e498267 | ||
|
|
531e3d7d6a | ||
|
|
63dded3961 | ||
|
|
2cdcb703d9 | ||
|
|
6607875f49 | ||
|
|
ea61e5cbec | ||
|
|
31a793d226 | ||
|
|
d85ab23b82 | ||
|
|
b7349910d9 | ||
|
|
49fa41ce65 | ||
|
|
400cf3eb92 | ||
|
|
376b3a19a7 | ||
|
|
d92c173fdc | ||
|
|
b867829ef1 | ||
|
|
6b29676e7e | ||
|
|
caad40964a | ||
|
|
cc5dca8321 | ||
|
|
5d50850e12 | ||
|
|
a73ccc78a6 | ||
|
|
9eb6f522ea | ||
|
|
04f6523f3c | ||
|
|
30d66abf8d | ||
|
|
84e498299b | ||
|
|
7a84697570 | ||
|
|
4148fbbe85 | ||
|
|
93f5defedc | ||
|
|
33241a6b12 | ||
|
|
ff87b4db26 | ||
|
|
ba9fadc8f1 | ||
|
|
d29d4f88da | ||
|
|
17c5ceeb9d | ||
|
|
c32d746069 | ||
|
|
b9a0ff0dd6 | ||
|
|
75496af985 | ||
|
|
0e9eb9eedb | ||
|
|
3a78e988da | ||
|
|
d9e5074189 | ||
|
|
bc210bdc00 | ||
|
|
4bf83f701c | ||
|
|
db3887929f | ||
|
|
9af103a88e | ||
|
|
99211eb375 |
4
.github/workflows/bench-manual.yml
vendored
4
.github/workflows/bench-manual.yml
vendored
@@ -18,11 +18,9 @@ jobs:
|
|||||||
timeout-minutes: 180 # 3h
|
timeout-minutes: 180 # 3h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Run benchmarks - workload ${WORKLOAD_NAME} - branch ${{ github.ref }} - commit ${{ github.sha }}
|
- name: Run benchmarks - workload ${WORKLOAD_NAME} - branch ${{ github.ref }} - commit ${{ github.sha }}
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
4
.github/workflows/bench-pr.yml
vendored
4
.github/workflows/bench-pr.yml
vendored
@@ -35,11 +35,9 @@ jobs:
|
|||||||
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
||||||
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
||||||
|
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Run benchmarks on PR ${{ github.event.issue.id }}
|
- name: Run benchmarks on PR ${{ github.event.issue.id }}
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
4
.github/workflows/bench-push-indexing.yml
vendored
4
.github/workflows/bench-push-indexing.yml
vendored
@@ -12,11 +12,9 @@ jobs:
|
|||||||
timeout-minutes: 180 # 3h
|
timeout-minutes: 180 # 3h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
# Run benchmarks
|
# Run benchmarks
|
||||||
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch main - Commit ${{ github.sha }}
|
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch main - Commit ${{ github.sha }}
|
||||||
|
|||||||
4
.github/workflows/benchmarks-manual.yml
vendored
4
.github/workflows/benchmarks-manual.yml
vendored
@@ -18,11 +18,9 @@ jobs:
|
|||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
# Set variables
|
# Set variables
|
||||||
- name: Set current branch name
|
- name: Set current branch name
|
||||||
|
|||||||
4
.github/workflows/benchmarks-pr.yml
vendored
4
.github/workflows/benchmarks-pr.yml
vendored
@@ -13,11 +13,9 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Check for Command
|
- name: Check for Command
|
||||||
id: command
|
id: command
|
||||||
|
|||||||
@@ -16,11 +16,9 @@ jobs:
|
|||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
# Set variables
|
# Set variables
|
||||||
- name: Set current branch name
|
- name: Set current branch name
|
||||||
|
|||||||
@@ -15,11 +15,9 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
# Set variables
|
# Set variables
|
||||||
- name: Set current branch name
|
- name: Set current branch name
|
||||||
|
|||||||
@@ -15,11 +15,9 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
# Set variables
|
# Set variables
|
||||||
- name: Set current branch name
|
- name: Set current branch name
|
||||||
|
|||||||
@@ -15,11 +15,9 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
# Set variables
|
# Set variables
|
||||||
- name: Set current branch name
|
- name: Set current branch name
|
||||||
|
|||||||
7
.github/workflows/flaky-tests.yml
vendored
7
.github/workflows/flaky-tests.yml
vendored
@@ -1,4 +1,6 @@
|
|||||||
name: Look for flaky tests
|
name: Look for flaky tests
|
||||||
|
env:
|
||||||
|
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
schedule:
|
schedule:
|
||||||
@@ -16,10 +18,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- name: Install cargo-flaky
|
- name: Install cargo-flaky
|
||||||
run: cargo install cargo-flaky
|
run: cargo install cargo-flaky
|
||||||
- name: Run cargo flaky in the dumps
|
- name: Run cargo flaky in the dumps
|
||||||
|
|||||||
7
.github/workflows/fuzzer-indexing.yml
vendored
7
.github/workflows/fuzzer-indexing.yml
vendored
@@ -1,5 +1,6 @@
|
|||||||
name: Run the indexing fuzzer
|
name: Run the indexing fuzzer
|
||||||
|
env:
|
||||||
|
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
@@ -12,11 +13,9 @@ jobs:
|
|||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
# Run benchmarks
|
# Run benchmarks
|
||||||
- name: Run the fuzzer
|
- name: Run the fuzzer
|
||||||
|
|||||||
7
.github/workflows/publish-apt-brew-pkg.yml
vendored
7
.github/workflows/publish-apt-brew-pkg.yml
vendored
@@ -15,6 +15,8 @@ jobs:
|
|||||||
|
|
||||||
debian:
|
debian:
|
||||||
name: Publish debian packagge
|
name: Publish debian packagge
|
||||||
|
env:
|
||||||
|
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: check-version
|
needs: check-version
|
||||||
container:
|
container:
|
||||||
@@ -25,10 +27,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- name: Install cargo-deb
|
- name: Install cargo-deb
|
||||||
run: cargo install cargo-deb
|
run: cargo install cargo-deb
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|||||||
22
.github/workflows/publish-binaries.yml
vendored
22
.github/workflows/publish-binaries.yml
vendored
@@ -35,6 +35,8 @@ jobs:
|
|||||||
publish-linux:
|
publish-linux:
|
||||||
name: Publish binary for Linux
|
name: Publish binary for Linux
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
|
||||||
needs: check-version
|
needs: check-version
|
||||||
container:
|
container:
|
||||||
# Use ubuntu-18.04 to compile with glibc 2.27
|
# Use ubuntu-18.04 to compile with glibc 2.27
|
||||||
@@ -45,10 +47,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --release --locked
|
run: cargo build --release --locked
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
@@ -78,10 +77,7 @@ jobs:
|
|||||||
asset_name: meilisearch-windows-amd64.exe
|
asset_name: meilisearch-windows-amd64.exe
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --release --locked
|
run: cargo build --release --locked
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
@@ -107,12 +103,10 @@ jobs:
|
|||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
- name: Installing Rust toolchain
|
- name: Installing Rust toolchain
|
||||||
uses: actions-rs/toolchain@v1
|
uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
|
||||||
profile: minimal
|
profile: minimal
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
override: true
|
|
||||||
- name: Cargo build
|
- name: Cargo build
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -132,6 +126,8 @@ jobs:
|
|||||||
name: Publish binary for aarch64
|
name: Publish binary for aarch64
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: check-version
|
needs: check-version
|
||||||
|
env:
|
||||||
|
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
|
||||||
container:
|
container:
|
||||||
# Use ubuntu-18.04 to compile with glibc 2.27
|
# Use ubuntu-18.04 to compile with glibc 2.27
|
||||||
image: ubuntu:18.04
|
image: ubuntu:18.04
|
||||||
@@ -154,12 +150,10 @@ jobs:
|
|||||||
add-apt-repository "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
add-apt-repository "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||||
apt-get update -y && apt-get install -y docker-ce
|
apt-get update -y && apt-get install -y docker-ce
|
||||||
- name: Installing Rust toolchain
|
- name: Installing Rust toolchain
|
||||||
uses: actions-rs/toolchain@v1
|
uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
|
||||||
profile: minimal
|
profile: minimal
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
override: true
|
|
||||||
- name: Configure target aarch64 GNU
|
- name: Configure target aarch64 GNU
|
||||||
## Environment variable is not passed using env:
|
## Environment variable is not passed using env:
|
||||||
## LD gold won't work with MUSL
|
## LD gold won't work with MUSL
|
||||||
|
|||||||
3
.github/workflows/publish-docker-images.yml
vendored
3
.github/workflows/publish-docker-images.yml
vendored
@@ -80,10 +80,11 @@ jobs:
|
|||||||
type=ref,event=tag
|
type=ref,event=tag
|
||||||
type=raw,value=nightly,enable=${{ github.event_name != 'push' }}
|
type=raw,value=nightly,enable=${{ github.event_name != 'push' }}
|
||||||
type=semver,pattern=v{{major}}.{{minor}},enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
|
type=semver,pattern=v{{major}}.{{minor}},enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
|
||||||
|
type=semver,pattern=v{{major}},enable=${{ steps.check-tag-format.outputs.stable == 'true' }}
|
||||||
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }}
|
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v6
|
||||||
with:
|
with:
|
||||||
push: true
|
push: true
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|||||||
43
.github/workflows/test-suite.yml
vendored
43
.github/workflows/test-suite.yml
vendored
@@ -21,6 +21,8 @@ jobs:
|
|||||||
test-linux:
|
test-linux:
|
||||||
name: Tests on ubuntu-18.04
|
name: Tests on ubuntu-18.04
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
|
||||||
container:
|
container:
|
||||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
||||||
image: ubuntu:18.04
|
image: ubuntu:18.04
|
||||||
@@ -31,10 +33,7 @@ jobs:
|
|||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- name: Setup test with Rust stable
|
- name: Setup test with Rust stable
|
||||||
uses: actions-rs/toolchain@v1
|
uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.1
|
uses: Swatinem/rust-cache@v2.7.1
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
@@ -59,10 +58,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.1
|
uses: Swatinem/rust-cache@v2.7.1
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -77,6 +73,8 @@ jobs:
|
|||||||
test-all-features:
|
test-all-features:
|
||||||
name: Tests almost all features
|
name: Tests almost all features
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
|
||||||
container:
|
container:
|
||||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
||||||
image: ubuntu:18.04
|
image: ubuntu:18.04
|
||||||
@@ -87,10 +85,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get install --assume-yes build-essential curl
|
apt-get install --assume-yes build-essential curl
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- name: Run cargo build with almost all features
|
- name: Run cargo build with almost all features
|
||||||
run: |
|
run: |
|
||||||
cargo build --workspace --locked --release --features "$(cargo xtask list-features --exclude-feature cuda)"
|
cargo build --workspace --locked --release --features "$(cargo xtask list-features --exclude-feature cuda)"
|
||||||
@@ -100,6 +95,8 @@ jobs:
|
|||||||
|
|
||||||
test-disabled-tokenization:
|
test-disabled-tokenization:
|
||||||
name: Test disabled tokenization
|
name: Test disabled tokenization
|
||||||
|
env:
|
||||||
|
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: ubuntu:18.04
|
image: ubuntu:18.04
|
||||||
@@ -110,13 +107,10 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get install --assume-yes build-essential curl
|
apt-get install --assume-yes build-essential curl
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- name: Run cargo tree without default features and check lindera is not present
|
- name: Run cargo tree without default features and check lindera is not present
|
||||||
run: |
|
run: |
|
||||||
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -vqz lindera; then
|
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -qz lindera; then
|
||||||
echo "lindera has been found in the sources and it shouldn't"
|
echo "lindera has been found in the sources and it shouldn't"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
@@ -127,6 +121,8 @@ jobs:
|
|||||||
# We run tests in debug also, to make sure that the debug_assertions are hit
|
# We run tests in debug also, to make sure that the debug_assertions are hit
|
||||||
test-debug:
|
test-debug:
|
||||||
name: Run tests in debug
|
name: Run tests in debug
|
||||||
|
env:
|
||||||
|
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
||||||
@@ -137,10 +133,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.1
|
uses: Swatinem/rust-cache@v2.7.1
|
||||||
- name: Run tests in debug
|
- name: Run tests in debug
|
||||||
@@ -154,11 +147,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: 1.75.0
|
|
||||||
override: true
|
|
||||||
components: clippy
|
components: clippy
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.1
|
uses: Swatinem/rust-cache@v2.7.1
|
||||||
@@ -173,10 +164,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: nightly
|
toolchain: nightly-2024-07-09
|
||||||
override: true
|
override: true
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
|
|||||||
@@ -18,11 +18,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: helix-editor/rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- name: Install sd
|
- name: Install sd
|
||||||
run: cargo install sd
|
run: cargo install sd
|
||||||
- name: Update Cargo.toml file
|
- name: Update Cargo.toml file
|
||||||
|
|||||||
@@ -109,6 +109,12 @@ They are JSON files with the following structure (comments are not actually supp
|
|||||||
"run_count": 3,
|
"run_count": 3,
|
||||||
// List of arguments to add to the Meilisearch command line.
|
// List of arguments to add to the Meilisearch command line.
|
||||||
"extra_cli_args": ["--max-indexing-threads=1"],
|
"extra_cli_args": ["--max-indexing-threads=1"],
|
||||||
|
// An expression that can be parsed as a comma-separated list of targets and levels
|
||||||
|
// as described in [tracing_subscriber's documentation](https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/targets/struct.Targets.html#examples).
|
||||||
|
// The expression is used to filter the spans that are measured for profiling purposes.
|
||||||
|
// Optional, defaults to "indexing::=trace" (for indexing workloads), common other values is
|
||||||
|
// "search::=trace"
|
||||||
|
"target": "indexing::=trace",
|
||||||
// List of named assets that can be used in the commands.
|
// List of named assets that can be used in the commands.
|
||||||
"assets": {
|
"assets": {
|
||||||
// name of the asset.
|
// name of the asset.
|
||||||
|
|||||||
@@ -52,6 +52,16 @@ cargo test
|
|||||||
|
|
||||||
This command will be triggered to each PR as a requirement for merging it.
|
This command will be triggered to each PR as a requirement for merging it.
|
||||||
|
|
||||||
|
#### Faster build
|
||||||
|
|
||||||
|
You can set the `LINDERA_CACHE` environment variable to speed up your successive builds by up to 2 minutes.
|
||||||
|
It'll store some built artifacts in the directory of your choice.
|
||||||
|
|
||||||
|
We recommend using the standard `$HOME/.cache/lindera` directory:
|
||||||
|
```sh
|
||||||
|
export LINDERA_CACHE=$HOME/.cache/lindera
|
||||||
|
```
|
||||||
|
|
||||||
#### Snapshot-based tests
|
#### Snapshot-based tests
|
||||||
|
|
||||||
We are using [insta](https://insta.rs) to perform snapshot-based testing.
|
We are using [insta](https://insta.rs) to perform snapshot-based testing.
|
||||||
@@ -63,7 +73,7 @@ Furthermore, we provide some macros on top of insta, notably a way to use snapsh
|
|||||||
|
|
||||||
To effectively debug snapshot-based hashes, we recommend you export the `MEILI_TEST_FULL_SNAPS` environment variable so that snapshot are fully created locally:
|
To effectively debug snapshot-based hashes, we recommend you export the `MEILI_TEST_FULL_SNAPS` environment variable so that snapshot are fully created locally:
|
||||||
|
|
||||||
```
|
```sh
|
||||||
export MEILI_TEST_FULL_SNAPS=true # add this to your .bashrc, .zshrc, ...
|
export MEILI_TEST_FULL_SNAPS=true # add this to your .bashrc, .zshrc, ...
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
1579
Cargo.lock
generated
1579
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
|||||||
# Compile
|
# Compile
|
||||||
FROM rust:1.75.0-alpine3.18 AS compiler
|
FROM rust:1.79.0-alpine3.20 AS compiler
|
||||||
|
|
||||||
RUN apk add -q --update-cache --no-cache build-base openssl-dev
|
RUN apk add -q --no-cache build-base openssl-dev
|
||||||
|
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
|
|
||||||
@@ -20,13 +20,12 @@ RUN set -eux; \
|
|||||||
cargo build --release -p meilisearch -p meilitool
|
cargo build --release -p meilisearch -p meilitool
|
||||||
|
|
||||||
# Run
|
# Run
|
||||||
FROM alpine:3.16
|
FROM alpine:3.20
|
||||||
|
|
||||||
ENV MEILI_HTTP_ADDR 0.0.0.0:7700
|
ENV MEILI_HTTP_ADDR 0.0.0.0:7700
|
||||||
ENV MEILI_SERVER_PROVIDER docker
|
ENV MEILI_SERVER_PROVIDER docker
|
||||||
|
|
||||||
RUN apk update --quiet \
|
RUN apk add -q --no-cache libgcc tini curl
|
||||||
&& apk add -q --no-cache libgcc tini curl
|
|
||||||
|
|
||||||
# add meilisearch and meilitool to the `/bin` so you can run it from anywhere
|
# add meilisearch and meilitool to the `/bin` so you can run it from anywhere
|
||||||
# and it's easy to find.
|
# and it's easy to find.
|
||||||
|
|||||||
30
README.md
30
README.md
@@ -1,9 +1,6 @@
|
|||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://www.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=logo#gh-light-mode-only" target="_blank">
|
<a href="https://www.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=logo" target="_blank">
|
||||||
<img src="assets/meilisearch-logo-light.svg?sanitize=true#gh-light-mode-only">
|
<img src="assets/meilisearch-logo-kawaii.png">
|
||||||
</a>
|
|
||||||
<a href="https://www.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=logo#gh-dark-mode-only" target="_blank">
|
|
||||||
<img src="assets/meilisearch-logo-dark.svg?sanitize=true#gh-dark-mode-only">
|
|
||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
@@ -25,7 +22,7 @@
|
|||||||
|
|
||||||
<p align="center">⚡ A lightning-fast search engine that fits effortlessly into your apps, websites, and workflow 🔍</p>
|
<p align="center">⚡ A lightning-fast search engine that fits effortlessly into your apps, websites, and workflow 🔍</p>
|
||||||
|
|
||||||
[Meilisearch](https://www.meilisearch.com) helps you shape a delightful search experience in a snap, offering features that work out of the box to speed up your workflow.
|
[Meilisearch](https://www.meilisearch.com?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=intro) helps you shape a delightful search experience in a snap, offering features that work out of the box to speed up your workflow.
|
||||||
|
|
||||||
<p align="center" name="demo">
|
<p align="center" name="demo">
|
||||||
<a href="https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demo-gif#gh-light-mode-only" target="_blank">
|
<a href="https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demo-gif#gh-light-mode-only" target="_blank">
|
||||||
@@ -36,11 +33,18 @@
|
|||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
🔥 [**Try it!**](https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demo-link) 🔥
|
## 🖥 Examples
|
||||||
|
|
||||||
|
- [**Movies**](https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=organization) — An application to help you find streaming platforms to watch movies using [hybrid search](https://www.meilisearch.com/solutions/hybrid-search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos).
|
||||||
|
- [**Ecommerce**](https://ecommerce.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) — Ecommerce website using disjunctive [facets](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos), range and rating filtering, and pagination.
|
||||||
|
- [**Songs**](https://music.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) — Search through 47 million of songs.
|
||||||
|
- [**SaaS**](https://saas.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) — Search for contacts, deals, and companies in this [multi-tenant](https://www.meilisearch.com/docs/learn/security/multitenancy_tenant_tokens?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) CRM application.
|
||||||
|
|
||||||
|
See the list of all our example apps in our [demos repository](https://github.com/meilisearch/demos).
|
||||||
|
|
||||||
## ✨ Features
|
## ✨ Features
|
||||||
- **Hybrid search:** Combine the best of both [semantic](https://www.meilisearch.com/docs/learn/experimental/vector_search) & full-text search to get the most relevant results
|
- **Hybrid search:** Combine the best of both [semantic](https://www.meilisearch.com/docs/learn/experimental/vector_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features) & full-text search to get the most relevant results
|
||||||
- **Search-as-you-type:** find & display results in less than 50 milliseconds to provide an intuitive experience
|
- **Search-as-you-type:** Find & display results in less than 50 milliseconds to provide an intuitive experience
|
||||||
- **[Typo tolerance](https://www.meilisearch.com/docs/learn/configuration/typo_tolerance?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** get relevant matches even when queries contain typos and misspellings
|
- **[Typo tolerance](https://www.meilisearch.com/docs/learn/configuration/typo_tolerance?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** get relevant matches even when queries contain typos and misspellings
|
||||||
- **[Filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features) and [faceted search](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** enhance your users' search experience with custom filters and build a faceted search interface in a few lines of code
|
- **[Filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features) and [faceted search](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** enhance your users' search experience with custom filters and build a faceted search interface in a few lines of code
|
||||||
- **[Sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** sort results based on price, date, or pretty much anything else your users need
|
- **[Sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** sort results based on price, date, or pretty much anything else your users need
|
||||||
@@ -59,7 +63,7 @@ You can consult Meilisearch's documentation at [meilisearch.com/docs](https://ww
|
|||||||
|
|
||||||
## 🚀 Getting started
|
## 🚀 Getting started
|
||||||
|
|
||||||
For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://www.meilisearch.com/docs/learn/getting_started/quick_start?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=get-started) guide.
|
For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [documentation](https://www.meilisearch.com/docs?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=get-started) guide.
|
||||||
|
|
||||||
## 🌍 Supercharge your Meilisearch experience
|
## 🌍 Supercharge your Meilisearch experience
|
||||||
|
|
||||||
@@ -83,7 +87,7 @@ Finally, for more in-depth information, refer to our articles explaining fundame
|
|||||||
|
|
||||||
## 📊 Telemetry
|
## 📊 Telemetry
|
||||||
|
|
||||||
Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=telemetry#how-to-disable-data-collection) whenever you want.
|
Meilisearch collects **anonymized** user data to help us improve our product. You can [deactivate this](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=telemetry#how-to-disable-data-collection) whenever you want.
|
||||||
|
|
||||||
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Remember to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
|
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Remember to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
|
||||||
|
|
||||||
@@ -105,11 +109,11 @@ Thank you for your support!
|
|||||||
|
|
||||||
## 👩💻 Contributing
|
## 👩💻 Contributing
|
||||||
|
|
||||||
Meilisearch is, and will always be, open-source! If you want to contribute to the project, please take a look at [our contribution guidelines](CONTRIBUTING.md).
|
Meilisearch is, and will always be, open-source! If you want to contribute to the project, please look at [our contribution guidelines](CONTRIBUTING.md).
|
||||||
|
|
||||||
## 📦 Versioning
|
## 📦 Versioning
|
||||||
|
|
||||||
Meilisearch releases and their associated binaries are available [in this GitHub page](https://github.com/meilisearch/meilisearch/releases).
|
Meilisearch releases and their associated binaries are available on the project's [releases page](https://github.com/meilisearch/meilisearch/releases).
|
||||||
|
|
||||||
The binaries are versioned following [SemVer conventions](https://semver.org/). To know more, read our [versioning policy](https://github.com/meilisearch/engine-team/blob/main/resources/versioning-policy.md).
|
The binaries are versioned following [SemVer conventions](https://semver.org/). To know more, read our [versioning policy](https://github.com/meilisearch/engine-team/blob/main/resources/versioning-policy.md).
|
||||||
|
|
||||||
|
|||||||
BIN
assets/meilisearch-logo-kawaii.png
Normal file
BIN
assets/meilisearch-logo-kawaii.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 98 KiB |
@@ -11,24 +11,24 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.79"
|
anyhow = "1.0.86"
|
||||||
csv = "1.3.0"
|
csv = "1.3.0"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
mimalloc = { version = "0.1.39", default-features = false }
|
mimalloc = { version = "0.1.43", default-features = false }
|
||||||
serde_json = { version = "1.0.111", features = ["preserve_order"] }
|
serde_json = { version = "1.0.120", features = ["preserve_order"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = { version = "0.5.1", features = ["html_reports"] }
|
criterion = { version = "0.5.1", features = ["html_reports"] }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
rand_chacha = "0.3.1"
|
rand_chacha = "0.3.1"
|
||||||
roaring = "0.10.2"
|
roaring = "0.10.6"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
anyhow = "1.0.79"
|
anyhow = "1.0.86"
|
||||||
bytes = "1.5.0"
|
bytes = "1.6.0"
|
||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
flate2 = "1.0.28"
|
flate2 = "1.0.30"
|
||||||
reqwest = { version = "0.11.23", features = ["blocking", "rustls-tls"], default-features = false }
|
reqwest = { version = "0.12.5", features = ["blocking", "rustls-tls"], default-features = false }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["milli/all-tokenizations"]
|
default = ["milli/all-tokenizations"]
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ license.workspace = true
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
time = { version = "0.3.34", features = ["parsing"] }
|
time = { version = "0.3.36", features = ["parsing"] }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
anyhow = "1.0.80"
|
anyhow = "1.0.86"
|
||||||
vergen-git2 = "1.0.0-beta.2"
|
vergen-git2 = "1.0.0"
|
||||||
|
|||||||
@@ -11,22 +11,21 @@ readme.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.79"
|
anyhow = "1.0.86"
|
||||||
flate2 = "1.0.28"
|
flate2 = "1.0.30"
|
||||||
http = "0.2.11"
|
http = "1.1.0"
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
once_cell = "1.19.0"
|
once_cell = "1.19.0"
|
||||||
regex = "1.10.2"
|
regex = "1.10.5"
|
||||||
roaring = { version = "0.10.2", features = ["serde"] }
|
roaring = { version = "0.10.6", features = ["serde"] }
|
||||||
serde = { version = "1.0.195", features = ["derive"] }
|
serde = { version = "1.0.204", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.111", features = ["preserve_order"] }
|
serde_json = { version = "1.0.120", features = ["preserve_order"] }
|
||||||
tar = "0.4.40"
|
tar = "0.4.41"
|
||||||
tempfile = "3.9.0"
|
tempfile = "3.10.1"
|
||||||
thiserror = "1.0.56"
|
thiserror = "1.0.61"
|
||||||
time = { version = "0.3.31", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
time = { version = "0.3.36", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
tracing = "0.1.40"
|
tracing = "0.1.40"
|
||||||
uuid = { version = "1.6.1", features = ["serde", "v4"] }
|
uuid = { version = "1.10.0", features = ["serde", "v4"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
big_s = "1.0.2"
|
big_s = "1.0.2"
|
||||||
|
|||||||
@@ -425,7 +425,7 @@ pub(crate) mod test {
|
|||||||
let mut dump = v2::V2Reader::open(dir).unwrap().to_v3();
|
let mut dump = v2::V2Reader::open(dir).unwrap().to_v3();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-09 20:27:59.904096267 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-09 20:27:59.904096267 +00:00:00");
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
|||||||
@@ -358,7 +358,7 @@ pub(crate) mod test {
|
|||||||
let mut dump = v3::V3Reader::open(dir).unwrap().to_v4();
|
let mut dump = v3::V3Reader::open(dir).unwrap().to_v4();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-07 11:39:03.709153554 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-07 11:39:03.709153554 +00:00:00");
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
|||||||
@@ -394,8 +394,8 @@ pub(crate) mod test {
|
|||||||
let mut dump = v4::V4Reader::open(dir).unwrap().to_v5();
|
let mut dump = v4::V4Reader::open(dir).unwrap().to_v5();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-06 12:53:49.131989609 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-06 12:53:49.131989609 +00:00:00");
|
||||||
insta::assert_display_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
|
insta::assert_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
|||||||
@@ -442,8 +442,8 @@ pub(crate) mod test {
|
|||||||
let mut dump = v5::V5Reader::open(dir).unwrap().to_v6();
|
let mut dump = v5::V5Reader::open(dir).unwrap().to_v6();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-04 15:55:10.344982459 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-04 15:55:10.344982459 +00:00:00");
|
||||||
insta::assert_display_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
|
insta::assert_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
|||||||
@@ -216,7 +216,7 @@ pub(crate) mod test {
|
|||||||
let mut dump = DumpReader::open(dump).unwrap();
|
let mut dump = DumpReader::open(dump).unwrap();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2024-05-16 15:51:34.151044 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2024-05-16 15:51:34.151044 +00:00:00");
|
||||||
insta::assert_debug_snapshot!(dump.instance_uid().unwrap(), @"None");
|
insta::assert_debug_snapshot!(dump.instance_uid().unwrap(), @"None");
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
@@ -337,7 +337,7 @@ pub(crate) mod test {
|
|||||||
let mut dump = DumpReader::open(dump).unwrap();
|
let mut dump = DumpReader::open(dump).unwrap();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-07-06 7:10:27.21958 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2023-07-06 7:10:27.21958 +00:00:00");
|
||||||
insta::assert_debug_snapshot!(dump.instance_uid().unwrap(), @"None");
|
insta::assert_debug_snapshot!(dump.instance_uid().unwrap(), @"None");
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
@@ -383,8 +383,8 @@ pub(crate) mod test {
|
|||||||
let mut dump = DumpReader::open(dump).unwrap();
|
let mut dump = DumpReader::open(dump).unwrap();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-04 15:55:10.344982459 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-04 15:55:10.344982459 +00:00:00");
|
||||||
insta::assert_display_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
|
insta::assert_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
@@ -463,8 +463,8 @@ pub(crate) mod test {
|
|||||||
let mut dump = DumpReader::open(dump).unwrap();
|
let mut dump = DumpReader::open(dump).unwrap();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-06 12:53:49.131989609 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-06 12:53:49.131989609 +00:00:00");
|
||||||
insta::assert_display_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
|
insta::assert_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
@@ -540,7 +540,7 @@ pub(crate) mod test {
|
|||||||
let mut dump = DumpReader::open(dump).unwrap();
|
let mut dump = DumpReader::open(dump).unwrap();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-07 11:39:03.709153554 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-07 11:39:03.709153554 +00:00:00");
|
||||||
assert_eq!(dump.instance_uid().unwrap(), None);
|
assert_eq!(dump.instance_uid().unwrap(), None);
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
@@ -633,7 +633,7 @@ pub(crate) mod test {
|
|||||||
let mut dump = DumpReader::open(dump).unwrap();
|
let mut dump = DumpReader::open(dump).unwrap();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-09 20:27:59.904096267 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-09 20:27:59.904096267 +00:00:00");
|
||||||
assert_eq!(dump.instance_uid().unwrap(), None);
|
assert_eq!(dump.instance_uid().unwrap(), None);
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
@@ -726,7 +726,7 @@ pub(crate) mod test {
|
|||||||
let mut dump = DumpReader::open(dump).unwrap();
|
let mut dump = DumpReader::open(dump).unwrap();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00");
|
||||||
assert_eq!(dump.instance_uid().unwrap(), None);
|
assert_eq!(dump.instance_uid().unwrap(), None);
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
|
|||||||
@@ -780,7 +780,7 @@ expression: document
|
|||||||
1.3484878540039063
|
1.3484878540039063
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
"userProvided": false
|
"regenerate": true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -779,7 +779,7 @@ expression: document
|
|||||||
1.04031240940094
|
1.04031240940094
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
"userProvided": false
|
"regenerate": true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -252,7 +252,7 @@ pub(crate) mod test {
|
|||||||
let mut dump = V2Reader::open(dir).unwrap();
|
let mut dump = V2Reader::open(dir).unwrap();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-09 20:27:59.904096267 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-09 20:27:59.904096267 +00:00:00");
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
||||||
@@ -349,7 +349,7 @@ pub(crate) mod test {
|
|||||||
let mut dump = V2Reader::open(dir).unwrap();
|
let mut dump = V2Reader::open(dir).unwrap();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00");
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
|||||||
@@ -267,7 +267,7 @@ pub(crate) mod test {
|
|||||||
let mut dump = V3Reader::open(dir).unwrap();
|
let mut dump = V3Reader::open(dir).unwrap();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-07 11:39:03.709153554 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-07 11:39:03.709153554 +00:00:00");
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
|||||||
@@ -152,6 +152,7 @@ impl Settings<Unchecked> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Deserialize)]
|
#[derive(Debug, Clone, Deserialize)]
|
||||||
|
#[allow(dead_code)] // otherwise rustc complains that the fields go unused
|
||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
|
|||||||
@@ -262,8 +262,8 @@ pub(crate) mod test {
|
|||||||
let mut dump = V4Reader::open(dir).unwrap();
|
let mut dump = V4Reader::open(dir).unwrap();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-06 12:53:49.131989609 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-06 12:53:49.131989609 +00:00:00");
|
||||||
insta::assert_display_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
|
insta::assert_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
|||||||
@@ -182,6 +182,7 @@ impl Settings<Unchecked> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)] // otherwise rustc complains that the fields go unused
|
||||||
#[derive(Debug, Clone, Deserialize)]
|
#[derive(Debug, Clone, Deserialize)]
|
||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
|
|||||||
@@ -299,8 +299,8 @@ pub(crate) mod test {
|
|||||||
let mut dump = V5Reader::open(dir).unwrap();
|
let mut dump = V5Reader::open(dir).unwrap();
|
||||||
|
|
||||||
// top level infos
|
// top level infos
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2022-10-04 15:55:10.344982459 +00:00:00");
|
insta::assert_snapshot!(dump.date().unwrap(), @"2022-10-04 15:55:10.344982459 +00:00:00");
|
||||||
insta::assert_display_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
|
insta::assert_snapshot!(dump.instance_uid().unwrap().unwrap(), @"9e15e977-f2ae-4761-943f-1eaf75fd736d");
|
||||||
|
|
||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
|||||||
@@ -200,6 +200,7 @@ impl std::ops::Deref for IndexUid {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(dead_code)] // otherwise rustc complains that the fields go unused
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
#[cfg_attr(test, serde(rename_all = "camelCase"))]
|
#[cfg_attr(test, serde(rename_all = "camelCase"))]
|
||||||
|
|||||||
@@ -281,7 +281,7 @@ pub(crate) mod test {
|
|||||||
let dump_path = dump.path();
|
let dump_path = dump.path();
|
||||||
|
|
||||||
// ==== checking global file hierarchy (we want to be sure there isn't too many files or too few)
|
// ==== checking global file hierarchy (we want to be sure there isn't too many files or too few)
|
||||||
insta::assert_display_snapshot!(create_directory_hierarchy(dump_path), @r###"
|
insta::assert_snapshot!(create_directory_hierarchy(dump_path), @r###"
|
||||||
.
|
.
|
||||||
├---- indexes/
|
├---- indexes/
|
||||||
│ └---- doggos/
|
│ └---- doggos/
|
||||||
|
|||||||
Binary file not shown.
@@ -11,10 +11,7 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tempfile = "3.9.0"
|
tempfile = "3.10.1"
|
||||||
thiserror = "1.0.56"
|
thiserror = "1.0.61"
|
||||||
tracing = "0.1.40"
|
tracing = "0.1.40"
|
||||||
uuid = { version = "1.6.1", features = ["serde", "v4"] }
|
uuid = { version = "1.10.0", features = ["serde", "v4"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
faux = "0.1.10"
|
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ license.workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
nom = "7.1.3"
|
nom = "7.1.3"
|
||||||
nom_locate = "4.2.0"
|
nom_locate = "4.2.0"
|
||||||
unescaper = "0.1.3"
|
unescaper = "0.1.5"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = "1.34.0"
|
insta = "1.39.0"
|
||||||
|
|||||||
@@ -564,121 +564,121 @@ pub mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_escaped() {
|
fn parse_escaped() {
|
||||||
insta::assert_display_snapshot!(p(r"title = 'foo\\'"), @r#"{title} = {foo\}"#);
|
insta::assert_snapshot!(p(r"title = 'foo\\'"), @r#"{title} = {foo\}"#);
|
||||||
insta::assert_display_snapshot!(p(r"title = 'foo\\\\'"), @r#"{title} = {foo\\}"#);
|
insta::assert_snapshot!(p(r"title = 'foo\\\\'"), @r#"{title} = {foo\\}"#);
|
||||||
insta::assert_display_snapshot!(p(r"title = 'foo\\\\\\'"), @r#"{title} = {foo\\\}"#);
|
insta::assert_snapshot!(p(r"title = 'foo\\\\\\'"), @r#"{title} = {foo\\\}"#);
|
||||||
insta::assert_display_snapshot!(p(r"title = 'foo\\\\\\\\'"), @r#"{title} = {foo\\\\}"#);
|
insta::assert_snapshot!(p(r"title = 'foo\\\\\\\\'"), @r#"{title} = {foo\\\\}"#);
|
||||||
// but it also works with other sequences
|
// but it also works with other sequences
|
||||||
insta::assert_display_snapshot!(p(r#"title = 'foo\x20\n\t\"\'"'"#), @"{title} = {foo \n\t\"\'\"}");
|
insta::assert_snapshot!(p(r#"title = 'foo\x20\n\t\"\'"'"#), @"{title} = {foo \n\t\"\'\"}");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse() {
|
fn parse() {
|
||||||
// Test equal
|
// Test equal
|
||||||
insta::assert_display_snapshot!(p("channel = Ponce"), @"{channel} = {Ponce}");
|
insta::assert_snapshot!(p("channel = Ponce"), @"{channel} = {Ponce}");
|
||||||
insta::assert_display_snapshot!(p("subscribers = 12"), @"{subscribers} = {12}");
|
insta::assert_snapshot!(p("subscribers = 12"), @"{subscribers} = {12}");
|
||||||
insta::assert_display_snapshot!(p("channel = 'Mister Mv'"), @"{channel} = {Mister Mv}");
|
insta::assert_snapshot!(p("channel = 'Mister Mv'"), @"{channel} = {Mister Mv}");
|
||||||
insta::assert_display_snapshot!(p("channel = \"Mister Mv\""), @"{channel} = {Mister Mv}");
|
insta::assert_snapshot!(p("channel = \"Mister Mv\""), @"{channel} = {Mister Mv}");
|
||||||
insta::assert_display_snapshot!(p("'dog race' = Borzoi"), @"{dog race} = {Borzoi}");
|
insta::assert_snapshot!(p("'dog race' = Borzoi"), @"{dog race} = {Borzoi}");
|
||||||
insta::assert_display_snapshot!(p("\"dog race\" = Chusky"), @"{dog race} = {Chusky}");
|
insta::assert_snapshot!(p("\"dog race\" = Chusky"), @"{dog race} = {Chusky}");
|
||||||
insta::assert_display_snapshot!(p("\"dog race\" = \"Bernese Mountain\""), @"{dog race} = {Bernese Mountain}");
|
insta::assert_snapshot!(p("\"dog race\" = \"Bernese Mountain\""), @"{dog race} = {Bernese Mountain}");
|
||||||
insta::assert_display_snapshot!(p("'dog race' = 'Bernese Mountain'"), @"{dog race} = {Bernese Mountain}");
|
insta::assert_snapshot!(p("'dog race' = 'Bernese Mountain'"), @"{dog race} = {Bernese Mountain}");
|
||||||
insta::assert_display_snapshot!(p("\"dog race\" = 'Bernese Mountain'"), @"{dog race} = {Bernese Mountain}");
|
insta::assert_snapshot!(p("\"dog race\" = 'Bernese Mountain'"), @"{dog race} = {Bernese Mountain}");
|
||||||
|
|
||||||
// Test IN
|
// Test IN
|
||||||
insta::assert_display_snapshot!(p("colour IN[]"), @"{colour} IN[]");
|
insta::assert_snapshot!(p("colour IN[]"), @"{colour} IN[]");
|
||||||
insta::assert_display_snapshot!(p("colour IN[green]"), @"{colour} IN[{green}, ]");
|
insta::assert_snapshot!(p("colour IN[green]"), @"{colour} IN[{green}, ]");
|
||||||
insta::assert_display_snapshot!(p("colour IN[green,]"), @"{colour} IN[{green}, ]");
|
insta::assert_snapshot!(p("colour IN[green,]"), @"{colour} IN[{green}, ]");
|
||||||
insta::assert_display_snapshot!(p("colour NOT IN[green,blue]"), @"NOT ({colour} IN[{green}, {blue}, ])");
|
insta::assert_snapshot!(p("colour NOT IN[green,blue]"), @"NOT ({colour} IN[{green}, {blue}, ])");
|
||||||
insta::assert_display_snapshot!(p(" colour IN [ green , blue , ]"), @"{colour} IN[{green}, {blue}, ]");
|
insta::assert_snapshot!(p(" colour IN [ green , blue , ]"), @"{colour} IN[{green}, {blue}, ]");
|
||||||
|
|
||||||
// Test IN + OR/AND/()
|
// Test IN + OR/AND/()
|
||||||
insta::assert_display_snapshot!(p(" colour IN [green, blue] AND color = green "), @"AND[{colour} IN[{green}, {blue}, ], {color} = {green}, ]");
|
insta::assert_snapshot!(p(" colour IN [green, blue] AND color = green "), @"AND[{colour} IN[{green}, {blue}, ], {color} = {green}, ]");
|
||||||
insta::assert_display_snapshot!(p("NOT (colour IN [green, blue]) AND color = green "), @"AND[NOT ({colour} IN[{green}, {blue}, ]), {color} = {green}, ]");
|
insta::assert_snapshot!(p("NOT (colour IN [green, blue]) AND color = green "), @"AND[NOT ({colour} IN[{green}, {blue}, ]), {color} = {green}, ]");
|
||||||
insta::assert_display_snapshot!(p("x = 1 OR NOT (colour IN [green, blue] OR color = green) "), @"OR[{x} = {1}, NOT (OR[{colour} IN[{green}, {blue}, ], {color} = {green}, ]), ]");
|
insta::assert_snapshot!(p("x = 1 OR NOT (colour IN [green, blue] OR color = green) "), @"OR[{x} = {1}, NOT (OR[{colour} IN[{green}, {blue}, ], {color} = {green}, ]), ]");
|
||||||
|
|
||||||
// Test whitespace start/end
|
// Test whitespace start/end
|
||||||
insta::assert_display_snapshot!(p(" colour = green "), @"{colour} = {green}");
|
insta::assert_snapshot!(p(" colour = green "), @"{colour} = {green}");
|
||||||
insta::assert_display_snapshot!(p(" (colour = green OR colour = red) "), @"OR[{colour} = {green}, {colour} = {red}, ]");
|
insta::assert_snapshot!(p(" (colour = green OR colour = red) "), @"OR[{colour} = {green}, {colour} = {red}, ]");
|
||||||
insta::assert_display_snapshot!(p(" colour IN [green, blue] AND color = green "), @"AND[{colour} IN[{green}, {blue}, ], {color} = {green}, ]");
|
insta::assert_snapshot!(p(" colour IN [green, blue] AND color = green "), @"AND[{colour} IN[{green}, {blue}, ], {color} = {green}, ]");
|
||||||
insta::assert_display_snapshot!(p(" colour NOT IN [green, blue] "), @"NOT ({colour} IN[{green}, {blue}, ])");
|
insta::assert_snapshot!(p(" colour NOT IN [green, blue] "), @"NOT ({colour} IN[{green}, {blue}, ])");
|
||||||
insta::assert_display_snapshot!(p(" colour IN [green, blue] "), @"{colour} IN[{green}, {blue}, ]");
|
insta::assert_snapshot!(p(" colour IN [green, blue] "), @"{colour} IN[{green}, {blue}, ]");
|
||||||
|
|
||||||
// Test conditions
|
// Test conditions
|
||||||
insta::assert_display_snapshot!(p("channel != ponce"), @"{channel} != {ponce}");
|
insta::assert_snapshot!(p("channel != ponce"), @"{channel} != {ponce}");
|
||||||
insta::assert_display_snapshot!(p("NOT channel = ponce"), @"NOT ({channel} = {ponce})");
|
insta::assert_snapshot!(p("NOT channel = ponce"), @"NOT ({channel} = {ponce})");
|
||||||
insta::assert_display_snapshot!(p("subscribers < 1000"), @"{subscribers} < {1000}");
|
insta::assert_snapshot!(p("subscribers < 1000"), @"{subscribers} < {1000}");
|
||||||
insta::assert_display_snapshot!(p("subscribers > 1000"), @"{subscribers} > {1000}");
|
insta::assert_snapshot!(p("subscribers > 1000"), @"{subscribers} > {1000}");
|
||||||
insta::assert_display_snapshot!(p("subscribers <= 1000"), @"{subscribers} <= {1000}");
|
insta::assert_snapshot!(p("subscribers <= 1000"), @"{subscribers} <= {1000}");
|
||||||
insta::assert_display_snapshot!(p("subscribers >= 1000"), @"{subscribers} >= {1000}");
|
insta::assert_snapshot!(p("subscribers >= 1000"), @"{subscribers} >= {1000}");
|
||||||
insta::assert_display_snapshot!(p("subscribers <= 1000"), @"{subscribers} <= {1000}");
|
insta::assert_snapshot!(p("subscribers <= 1000"), @"{subscribers} <= {1000}");
|
||||||
insta::assert_display_snapshot!(p("subscribers 100 TO 1000"), @"{subscribers} {100} TO {1000}");
|
insta::assert_snapshot!(p("subscribers 100 TO 1000"), @"{subscribers} {100} TO {1000}");
|
||||||
|
|
||||||
// Test NOT
|
// Test NOT
|
||||||
insta::assert_display_snapshot!(p("NOT subscribers < 1000"), @"NOT ({subscribers} < {1000})");
|
insta::assert_snapshot!(p("NOT subscribers < 1000"), @"NOT ({subscribers} < {1000})");
|
||||||
insta::assert_display_snapshot!(p("NOT subscribers 100 TO 1000"), @"NOT ({subscribers} {100} TO {1000})");
|
insta::assert_snapshot!(p("NOT subscribers 100 TO 1000"), @"NOT ({subscribers} {100} TO {1000})");
|
||||||
|
|
||||||
// Test NULL + NOT NULL
|
// Test NULL + NOT NULL
|
||||||
insta::assert_display_snapshot!(p("subscribers IS NULL"), @"{subscribers} IS NULL");
|
insta::assert_snapshot!(p("subscribers IS NULL"), @"{subscribers} IS NULL");
|
||||||
insta::assert_display_snapshot!(p("NOT subscribers IS NULL"), @"NOT ({subscribers} IS NULL)");
|
insta::assert_snapshot!(p("NOT subscribers IS NULL"), @"NOT ({subscribers} IS NULL)");
|
||||||
insta::assert_display_snapshot!(p("subscribers IS NOT NULL"), @"NOT ({subscribers} IS NULL)");
|
insta::assert_snapshot!(p("subscribers IS NOT NULL"), @"NOT ({subscribers} IS NULL)");
|
||||||
insta::assert_display_snapshot!(p("NOT subscribers IS NOT NULL"), @"{subscribers} IS NULL");
|
insta::assert_snapshot!(p("NOT subscribers IS NOT NULL"), @"{subscribers} IS NULL");
|
||||||
insta::assert_display_snapshot!(p("subscribers IS NOT NULL"), @"NOT ({subscribers} IS NULL)");
|
insta::assert_snapshot!(p("subscribers IS NOT NULL"), @"NOT ({subscribers} IS NULL)");
|
||||||
|
|
||||||
// Test EMPTY + NOT EMPTY
|
// Test EMPTY + NOT EMPTY
|
||||||
insta::assert_display_snapshot!(p("subscribers IS EMPTY"), @"{subscribers} IS EMPTY");
|
insta::assert_snapshot!(p("subscribers IS EMPTY"), @"{subscribers} IS EMPTY");
|
||||||
insta::assert_display_snapshot!(p("NOT subscribers IS EMPTY"), @"NOT ({subscribers} IS EMPTY)");
|
insta::assert_snapshot!(p("NOT subscribers IS EMPTY"), @"NOT ({subscribers} IS EMPTY)");
|
||||||
insta::assert_display_snapshot!(p("subscribers IS NOT EMPTY"), @"NOT ({subscribers} IS EMPTY)");
|
insta::assert_snapshot!(p("subscribers IS NOT EMPTY"), @"NOT ({subscribers} IS EMPTY)");
|
||||||
insta::assert_display_snapshot!(p("NOT subscribers IS NOT EMPTY"), @"{subscribers} IS EMPTY");
|
insta::assert_snapshot!(p("NOT subscribers IS NOT EMPTY"), @"{subscribers} IS EMPTY");
|
||||||
insta::assert_display_snapshot!(p("subscribers IS NOT EMPTY"), @"NOT ({subscribers} IS EMPTY)");
|
insta::assert_snapshot!(p("subscribers IS NOT EMPTY"), @"NOT ({subscribers} IS EMPTY)");
|
||||||
|
|
||||||
// Test EXISTS + NOT EXITS
|
// Test EXISTS + NOT EXITS
|
||||||
insta::assert_display_snapshot!(p("subscribers EXISTS"), @"{subscribers} EXISTS");
|
insta::assert_snapshot!(p("subscribers EXISTS"), @"{subscribers} EXISTS");
|
||||||
insta::assert_display_snapshot!(p("NOT subscribers EXISTS"), @"NOT ({subscribers} EXISTS)");
|
insta::assert_snapshot!(p("NOT subscribers EXISTS"), @"NOT ({subscribers} EXISTS)");
|
||||||
insta::assert_display_snapshot!(p("subscribers NOT EXISTS"), @"NOT ({subscribers} EXISTS)");
|
insta::assert_snapshot!(p("subscribers NOT EXISTS"), @"NOT ({subscribers} EXISTS)");
|
||||||
insta::assert_display_snapshot!(p("NOT subscribers NOT EXISTS"), @"{subscribers} EXISTS");
|
insta::assert_snapshot!(p("NOT subscribers NOT EXISTS"), @"{subscribers} EXISTS");
|
||||||
insta::assert_display_snapshot!(p("subscribers NOT EXISTS"), @"NOT ({subscribers} EXISTS)");
|
insta::assert_snapshot!(p("subscribers NOT EXISTS"), @"NOT ({subscribers} EXISTS)");
|
||||||
|
|
||||||
// Test nested NOT
|
// Test nested NOT
|
||||||
insta::assert_display_snapshot!(p("NOT NOT NOT NOT x = 5"), @"{x} = {5}");
|
insta::assert_snapshot!(p("NOT NOT NOT NOT x = 5"), @"{x} = {5}");
|
||||||
insta::assert_display_snapshot!(p("NOT NOT (NOT NOT x = 5)"), @"{x} = {5}");
|
insta::assert_snapshot!(p("NOT NOT (NOT NOT x = 5)"), @"{x} = {5}");
|
||||||
|
|
||||||
// Test geo radius
|
// Test geo radius
|
||||||
insta::assert_display_snapshot!(p("_geoRadius(12, 13, 14)"), @"_geoRadius({12}, {13}, {14})");
|
insta::assert_snapshot!(p("_geoRadius(12, 13, 14)"), @"_geoRadius({12}, {13}, {14})");
|
||||||
insta::assert_display_snapshot!(p("NOT _geoRadius(12, 13, 14)"), @"NOT (_geoRadius({12}, {13}, {14}))");
|
insta::assert_snapshot!(p("NOT _geoRadius(12, 13, 14)"), @"NOT (_geoRadius({12}, {13}, {14}))");
|
||||||
insta::assert_display_snapshot!(p("_geoRadius(12,13,14)"), @"_geoRadius({12}, {13}, {14})");
|
insta::assert_snapshot!(p("_geoRadius(12,13,14)"), @"_geoRadius({12}, {13}, {14})");
|
||||||
|
|
||||||
// Test geo bounding box
|
// Test geo bounding box
|
||||||
insta::assert_display_snapshot!(p("_geoBoundingBox([12, 13], [14, 15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
insta::assert_snapshot!(p("_geoBoundingBox([12, 13], [14, 15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
||||||
insta::assert_display_snapshot!(p("NOT _geoBoundingBox([12, 13], [14, 15])"), @"NOT (_geoBoundingBox([{12}, {13}], [{14}, {15}]))");
|
insta::assert_snapshot!(p("NOT _geoBoundingBox([12, 13], [14, 15])"), @"NOT (_geoBoundingBox([{12}, {13}], [{14}, {15}]))");
|
||||||
insta::assert_display_snapshot!(p("_geoBoundingBox([12,13],[14,15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
insta::assert_snapshot!(p("_geoBoundingBox([12,13],[14,15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
||||||
|
|
||||||
// Test OR + AND
|
// Test OR + AND
|
||||||
insta::assert_display_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain'"), @"AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
insta::assert_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain'"), @"AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
||||||
insta::assert_display_snapshot!(p("channel = ponce OR 'dog race' != 'bernese mountain'"), @"OR[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
insta::assert_snapshot!(p("channel = ponce OR 'dog race' != 'bernese mountain'"), @"OR[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
||||||
insta::assert_display_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain' OR subscribers > 1000"), @"OR[AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ], {subscribers} > {1000}, ]");
|
insta::assert_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain' OR subscribers > 1000"), @"OR[AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ], {subscribers} > {1000}, ]");
|
||||||
insta::assert_display_snapshot!(
|
insta::assert_snapshot!(
|
||||||
p("channel = ponce AND 'dog race' != 'bernese mountain' OR subscribers > 1000 OR colour = red OR colour = blue AND size = 7"),
|
p("channel = ponce AND 'dog race' != 'bernese mountain' OR subscribers > 1000 OR colour = red OR colour = blue AND size = 7"),
|
||||||
@"OR[AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ], {subscribers} > {1000}, {colour} = {red}, AND[{colour} = {blue}, {size} = {7}, ], ]"
|
@"OR[AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ], {subscribers} > {1000}, {colour} = {red}, AND[{colour} = {blue}, {size} = {7}, ], ]"
|
||||||
);
|
);
|
||||||
|
|
||||||
// Test parentheses
|
// Test parentheses
|
||||||
insta::assert_display_snapshot!(p("channel = ponce AND ( 'dog race' != 'bernese mountain' OR subscribers > 1000 )"), @"AND[{channel} = {ponce}, OR[{dog race} != {bernese mountain}, {subscribers} > {1000}, ], ]");
|
insta::assert_snapshot!(p("channel = ponce AND ( 'dog race' != 'bernese mountain' OR subscribers > 1000 )"), @"AND[{channel} = {ponce}, OR[{dog race} != {bernese mountain}, {subscribers} > {1000}, ], ]");
|
||||||
insta::assert_display_snapshot!(p("(channel = ponce AND 'dog race' != 'bernese mountain' OR subscribers > 1000) AND _geoRadius(12, 13, 14)"), @"AND[OR[AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ], {subscribers} > {1000}, ], _geoRadius({12}, {13}, {14}), ]");
|
insta::assert_snapshot!(p("(channel = ponce AND 'dog race' != 'bernese mountain' OR subscribers > 1000) AND _geoRadius(12, 13, 14)"), @"AND[OR[AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ], {subscribers} > {1000}, ], _geoRadius({12}, {13}, {14}), ]");
|
||||||
|
|
||||||
// Test recursion
|
// Test recursion
|
||||||
// This is the most that is allowed
|
// This is the most that is allowed
|
||||||
insta::assert_display_snapshot!(
|
insta::assert_snapshot!(
|
||||||
p("(((((((((((((((((((((((((((((((((((((((((((((((((x = 1)))))))))))))))))))))))))))))))))))))))))))))))))"),
|
p("(((((((((((((((((((((((((((((((((((((((((((((((((x = 1)))))))))))))))))))))))))))))))))))))))))))))))))"),
|
||||||
@"{x} = {1}"
|
@"{x} = {1}"
|
||||||
);
|
);
|
||||||
insta::assert_display_snapshot!(
|
insta::assert_snapshot!(
|
||||||
p("NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT x = 1"),
|
p("NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT x = 1"),
|
||||||
@"NOT ({x} = {1})"
|
@"NOT ({x} = {1})"
|
||||||
);
|
);
|
||||||
|
|
||||||
// Confusing keywords
|
// Confusing keywords
|
||||||
insta::assert_display_snapshot!(p(r#"NOT "OR" EXISTS AND "EXISTS" NOT EXISTS"#), @"AND[NOT ({OR} EXISTS), NOT ({EXISTS} EXISTS), ]");
|
insta::assert_snapshot!(p(r#"NOT "OR" EXISTS AND "EXISTS" NOT EXISTS"#), @"AND[NOT ({OR} EXISTS), NOT ({EXISTS} EXISTS), ]");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -689,182 +689,182 @@ pub mod tests {
|
|||||||
Fc::parse(s).unwrap_err().to_string()
|
Fc::parse(s).unwrap_err().to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("channel = Ponce = 12"), @r###"
|
insta::assert_snapshot!(p("channel = Ponce = 12"), @r###"
|
||||||
Found unexpected characters at the end of the filter: `= 12`. You probably forgot an `OR` or an `AND` rule.
|
Found unexpected characters at the end of the filter: `= 12`. You probably forgot an `OR` or an `AND` rule.
|
||||||
17:21 channel = Ponce = 12
|
17:21 channel = Ponce = 12
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("channel = "), @r###"
|
insta::assert_snapshot!(p("channel = "), @r###"
|
||||||
Was expecting a value but instead got nothing.
|
Was expecting a value but instead got nothing.
|
||||||
14:14 channel =
|
14:14 channel =
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("channel = 🐻"), @r###"
|
insta::assert_snapshot!(p("channel = 🐻"), @r###"
|
||||||
Was expecting a value but instead got `🐻`.
|
Was expecting a value but instead got `🐻`.
|
||||||
11:12 channel = 🐻
|
11:12 channel = 🐻
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("channel = 🐻 AND followers < 100"), @r###"
|
insta::assert_snapshot!(p("channel = 🐻 AND followers < 100"), @r###"
|
||||||
Was expecting a value but instead got `🐻`.
|
Was expecting a value but instead got `🐻`.
|
||||||
11:12 channel = 🐻 AND followers < 100
|
11:12 channel = 🐻 AND followers < 100
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("'OR'"), @r###"
|
insta::assert_snapshot!(p("'OR'"), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`.
|
||||||
1:5 'OR'
|
1:5 'OR'
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("OR"), @r###"
|
insta::assert_snapshot!(p("OR"), @r###"
|
||||||
Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes.
|
Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes.
|
||||||
1:3 OR
|
1:3 OR
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("channel Ponce"), @r###"
|
insta::assert_snapshot!(p("channel Ponce"), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`.
|
||||||
1:14 channel Ponce
|
1:14 channel Ponce
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("channel = Ponce OR"), @r###"
|
insta::assert_snapshot!(p("channel = Ponce OR"), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.
|
||||||
19:19 channel = Ponce OR
|
19:19 channel = Ponce OR
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("_geoRadius"), @r###"
|
insta::assert_snapshot!(p("_geoRadius"), @r###"
|
||||||
The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.
|
The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.
|
||||||
1:11 _geoRadius
|
1:11 _geoRadius
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("_geoRadius = 12"), @r###"
|
insta::assert_snapshot!(p("_geoRadius = 12"), @r###"
|
||||||
The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.
|
The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.
|
||||||
1:16 _geoRadius = 12
|
1:16 _geoRadius = 12
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("_geoBoundingBox"), @r###"
|
insta::assert_snapshot!(p("_geoBoundingBox"), @r###"
|
||||||
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
||||||
1:16 _geoBoundingBox
|
1:16 _geoBoundingBox
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("_geoBoundingBox = 12"), @r###"
|
insta::assert_snapshot!(p("_geoBoundingBox = 12"), @r###"
|
||||||
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
||||||
1:21 _geoBoundingBox = 12
|
1:21 _geoBoundingBox = 12
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r###"
|
insta::assert_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r###"
|
||||||
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
||||||
1:26 _geoBoundingBox(1.0, 1.0)
|
1:26 _geoBoundingBox(1.0, 1.0)
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("_geoPoint(12, 13, 14)"), @r###"
|
insta::assert_snapshot!(p("_geoPoint(12, 13, 14)"), @r###"
|
||||||
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
||||||
1:22 _geoPoint(12, 13, 14)
|
1:22 _geoPoint(12, 13, 14)
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("position <= _geoPoint(12, 13, 14)"), @r###"
|
insta::assert_snapshot!(p("position <= _geoPoint(12, 13, 14)"), @r###"
|
||||||
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
||||||
13:34 position <= _geoPoint(12, 13, 14)
|
13:34 position <= _geoPoint(12, 13, 14)
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("_geoDistance(12, 13, 14)"), @r###"
|
insta::assert_snapshot!(p("_geoDistance(12, 13, 14)"), @r###"
|
||||||
`_geoDistance` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
`_geoDistance` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
||||||
1:25 _geoDistance(12, 13, 14)
|
1:25 _geoDistance(12, 13, 14)
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("position <= _geoDistance(12, 13, 14)"), @r###"
|
insta::assert_snapshot!(p("position <= _geoDistance(12, 13, 14)"), @r###"
|
||||||
`_geoDistance` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
`_geoDistance` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
||||||
13:37 position <= _geoDistance(12, 13, 14)
|
13:37 position <= _geoDistance(12, 13, 14)
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("_geo(12, 13, 14)"), @r###"
|
insta::assert_snapshot!(p("_geo(12, 13, 14)"), @r###"
|
||||||
`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
||||||
1:17 _geo(12, 13, 14)
|
1:17 _geo(12, 13, 14)
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("position <= _geo(12, 13, 14)"), @r###"
|
insta::assert_snapshot!(p("position <= _geo(12, 13, 14)"), @r###"
|
||||||
`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` coordinates.
|
||||||
13:29 position <= _geo(12, 13, 14)
|
13:29 position <= _geo(12, 13, 14)
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("position <= _geoRadius(12, 13, 14)"), @r###"
|
insta::assert_snapshot!(p("position <= _geoRadius(12, 13, 14)"), @r###"
|
||||||
The `_geoRadius` filter is an operation and can't be used as a value.
|
The `_geoRadius` filter is an operation and can't be used as a value.
|
||||||
13:35 position <= _geoRadius(12, 13, 14)
|
13:35 position <= _geoRadius(12, 13, 14)
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("channel = 'ponce"), @r###"
|
insta::assert_snapshot!(p("channel = 'ponce"), @r###"
|
||||||
Expression `\'ponce` is missing the following closing delimiter: `'`.
|
Expression `\'ponce` is missing the following closing delimiter: `'`.
|
||||||
11:17 channel = 'ponce
|
11:17 channel = 'ponce
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("channel = \"ponce"), @r###"
|
insta::assert_snapshot!(p("channel = \"ponce"), @r###"
|
||||||
Expression `\"ponce` is missing the following closing delimiter: `"`.
|
Expression `\"ponce` is missing the following closing delimiter: `"`.
|
||||||
11:17 channel = "ponce
|
11:17 channel = "ponce
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("channel = mv OR (followers >= 1000"), @r###"
|
insta::assert_snapshot!(p("channel = mv OR (followers >= 1000"), @r###"
|
||||||
Expression `(followers >= 1000` is missing the following closing delimiter: `)`.
|
Expression `(followers >= 1000` is missing the following closing delimiter: `)`.
|
||||||
17:35 channel = mv OR (followers >= 1000
|
17:35 channel = mv OR (followers >= 1000
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("channel = mv OR followers >= 1000)"), @r###"
|
insta::assert_snapshot!(p("channel = mv OR followers >= 1000)"), @r###"
|
||||||
Found unexpected characters at the end of the filter: `)`. You probably forgot an `OR` or an `AND` rule.
|
Found unexpected characters at the end of the filter: `)`. You probably forgot an `OR` or an `AND` rule.
|
||||||
34:35 channel = mv OR followers >= 1000)
|
34:35 channel = mv OR followers >= 1000)
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("colour NOT EXIST"), @r###"
|
insta::assert_snapshot!(p("colour NOT EXIST"), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`.
|
||||||
1:17 colour NOT EXIST
|
1:17 colour NOT EXIST
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("subscribers 100 TO1000"), @r###"
|
insta::assert_snapshot!(p("subscribers 100 TO1000"), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`.
|
||||||
1:23 subscribers 100 TO1000
|
1:23 subscribers 100 TO1000
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("channel = ponce ORdog != 'bernese mountain'"), @r###"
|
insta::assert_snapshot!(p("channel = ponce ORdog != 'bernese mountain'"), @r###"
|
||||||
Found unexpected characters at the end of the filter: `ORdog != \'bernese mountain\'`. You probably forgot an `OR` or an `AND` rule.
|
Found unexpected characters at the end of the filter: `ORdog != \'bernese mountain\'`. You probably forgot an `OR` or an `AND` rule.
|
||||||
17:44 channel = ponce ORdog != 'bernese mountain'
|
17:44 channel = ponce ORdog != 'bernese mountain'
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("colour IN blue, green]"), @r###"
|
insta::assert_snapshot!(p("colour IN blue, green]"), @r###"
|
||||||
Expected `[` after `IN` keyword.
|
Expected `[` after `IN` keyword.
|
||||||
11:23 colour IN blue, green]
|
11:23 colour IN blue, green]
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("colour IN [blue, green, 'blue' > 2]"), @r###"
|
insta::assert_snapshot!(p("colour IN [blue, green, 'blue' > 2]"), @r###"
|
||||||
Expected only comma-separated field names inside `IN[..]` but instead found `> 2]`.
|
Expected only comma-separated field names inside `IN[..]` but instead found `> 2]`.
|
||||||
32:36 colour IN [blue, green, 'blue' > 2]
|
32:36 colour IN [blue, green, 'blue' > 2]
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("colour IN [blue, green, AND]"), @r###"
|
insta::assert_snapshot!(p("colour IN [blue, green, AND]"), @r###"
|
||||||
Expected only comma-separated field names inside `IN[..]` but instead found `AND]`.
|
Expected only comma-separated field names inside `IN[..]` but instead found `AND]`.
|
||||||
25:29 colour IN [blue, green, AND]
|
25:29 colour IN [blue, green, AND]
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("colour IN [blue, green"), @r###"
|
insta::assert_snapshot!(p("colour IN [blue, green"), @r###"
|
||||||
Expected matching `]` after the list of field names given to `IN[`
|
Expected matching `]` after the list of field names given to `IN[`
|
||||||
23:23 colour IN [blue, green
|
23:23 colour IN [blue, green
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("colour IN ['blue, green"), @r###"
|
insta::assert_snapshot!(p("colour IN ['blue, green"), @r###"
|
||||||
Expression `\'blue, green` is missing the following closing delimiter: `'`.
|
Expression `\'blue, green` is missing the following closing delimiter: `'`.
|
||||||
12:24 colour IN ['blue, green
|
12:24 colour IN ['blue, green
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("x = EXISTS"), @r###"
|
insta::assert_snapshot!(p("x = EXISTS"), @r###"
|
||||||
Was expecting a value but instead got `EXISTS`, which is a reserved keyword. To use `EXISTS` as a field name or a value, surround it by quotes.
|
Was expecting a value but instead got `EXISTS`, which is a reserved keyword. To use `EXISTS` as a field name or a value, surround it by quotes.
|
||||||
5:11 x = EXISTS
|
5:11 x = EXISTS
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("AND = 8"), @r###"
|
insta::assert_snapshot!(p("AND = 8"), @r###"
|
||||||
Was expecting a value but instead got `AND`, which is a reserved keyword. To use `AND` as a field name or a value, surround it by quotes.
|
Was expecting a value but instead got `AND`, which is a reserved keyword. To use `AND` as a field name or a value, surround it by quotes.
|
||||||
1:4 AND = 8
|
1:4 AND = 8
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("((((((((((((((((((((((((((((((((((((((((((((((((((x = 1))))))))))))))))))))))))))))))))))))))))))))))))))"), @r###"
|
insta::assert_snapshot!(p("((((((((((((((((((((((((((((((((((((((((((((((((((x = 1))))))))))))))))))))))))))))))))))))))))))))))))))"), @r###"
|
||||||
The filter exceeded the maximum depth limit. Try rewriting the filter so that it contains fewer nested conditions.
|
The filter exceeded the maximum depth limit. Try rewriting the filter so that it contains fewer nested conditions.
|
||||||
51:106 ((((((((((((((((((((((((((((((((((((((((((((((((((x = 1))))))))))))))))))))))))))))))))))))))))))))))))))
|
51:106 ((((((((((((((((((((((((((((((((((((((((((((((((((x = 1))))))))))))))))))))))))))))))))))))))))))))))))))
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(
|
insta::assert_snapshot!(
|
||||||
p("NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT x = 1"),
|
p("NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT NOT x = 1"),
|
||||||
@r###"
|
@r###"
|
||||||
The filter exceeded the maximum depth limit. Try rewriting the filter so that it contains fewer nested conditions.
|
The filter exceeded the maximum depth limit. Try rewriting the filter so that it contains fewer nested conditions.
|
||||||
@@ -872,40 +872,40 @@ pub mod tests {
|
|||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p(r#"NOT OR EXISTS AND EXISTS NOT EXISTS"#), @r###"
|
insta::assert_snapshot!(p(r#"NOT OR EXISTS AND EXISTS NOT EXISTS"#), @r###"
|
||||||
Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes.
|
Was expecting a value but instead got `OR`, which is a reserved keyword. To use `OR` as a field name or a value, surround it by quotes.
|
||||||
5:7 NOT OR EXISTS AND EXISTS NOT EXISTS
|
5:7 NOT OR EXISTS AND EXISTS NOT EXISTS
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p(r#"value NULL"#), @r###"
|
insta::assert_snapshot!(p(r#"value NULL"#), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NULL`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NULL`.
|
||||||
1:11 value NULL
|
1:11 value NULL
|
||||||
"###);
|
"###);
|
||||||
insta::assert_display_snapshot!(p(r#"value NOT NULL"#), @r###"
|
insta::assert_snapshot!(p(r#"value NOT NULL"#), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NOT NULL`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NOT NULL`.
|
||||||
1:15 value NOT NULL
|
1:15 value NOT NULL
|
||||||
"###);
|
"###);
|
||||||
insta::assert_display_snapshot!(p(r#"value EMPTY"#), @r###"
|
insta::assert_snapshot!(p(r#"value EMPTY"#), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value EMPTY`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value EMPTY`.
|
||||||
1:12 value EMPTY
|
1:12 value EMPTY
|
||||||
"###);
|
"###);
|
||||||
insta::assert_display_snapshot!(p(r#"value NOT EMPTY"#), @r###"
|
insta::assert_snapshot!(p(r#"value NOT EMPTY"#), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NOT EMPTY`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value NOT EMPTY`.
|
||||||
1:16 value NOT EMPTY
|
1:16 value NOT EMPTY
|
||||||
"###);
|
"###);
|
||||||
insta::assert_display_snapshot!(p(r#"value IS"#), @r###"
|
insta::assert_snapshot!(p(r#"value IS"#), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS`.
|
||||||
1:9 value IS
|
1:9 value IS
|
||||||
"###);
|
"###);
|
||||||
insta::assert_display_snapshot!(p(r#"value IS NOT"#), @r###"
|
insta::assert_snapshot!(p(r#"value IS NOT"#), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT`.
|
||||||
1:13 value IS NOT
|
1:13 value IS NOT
|
||||||
"###);
|
"###);
|
||||||
insta::assert_display_snapshot!(p(r#"value IS EXISTS"#), @r###"
|
insta::assert_snapshot!(p(r#"value IS EXISTS"#), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS EXISTS`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS EXISTS`.
|
||||||
1:16 value IS EXISTS
|
1:16 value IS EXISTS
|
||||||
"###);
|
"###);
|
||||||
insta::assert_display_snapshot!(p(r#"value IS NOT EXISTS"#), @r###"
|
insta::assert_snapshot!(p(r#"value IS NOT EXISTS"#), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT EXISTS`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `value IS NOT EXISTS`.
|
||||||
1:20 value IS NOT EXISTS
|
1:20 value IS NOT EXISTS
|
||||||
"###);
|
"###);
|
||||||
|
|||||||
@@ -12,9 +12,9 @@ license.workspace = true
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
arbitrary = { version = "1.3.2", features = ["derive"] }
|
arbitrary = { version = "1.3.2", features = ["derive"] }
|
||||||
clap = { version = "4.4.17", features = ["derive"] }
|
clap = { version = "4.5.9", features = ["derive"] }
|
||||||
fastrand = "2.0.1"
|
fastrand = "2.1.0"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
serde = { version = "1.0.195", features = ["derive"] }
|
serde = { version = "1.0.204", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.111", features = ["preserve_order"] }
|
serde_json = { version = "1.0.120", features = ["preserve_order"] }
|
||||||
tempfile = "3.9.0"
|
tempfile = "3.10.1"
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ fn main() {
|
|||||||
|
|
||||||
// after executing a batch we check if the database is corrupted
|
// after executing a batch we check if the database is corrupted
|
||||||
let res = index.search(&wtxn).execute().unwrap();
|
let res = index.search(&wtxn).execute().unwrap();
|
||||||
index.documents(&wtxn, res.documents_ids).unwrap();
|
index.compressed_documents(&wtxn, res.documents_ids).unwrap();
|
||||||
progression.fetch_add(1, Ordering::Relaxed);
|
progression.fetch_add(1, Ordering::Relaxed);
|
||||||
}
|
}
|
||||||
wtxn.abort();
|
wtxn.abort();
|
||||||
|
|||||||
@@ -11,36 +11,38 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.79"
|
anyhow = "1.0.86"
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
csv = "1.3.0"
|
csv = "1.3.0"
|
||||||
derive_builder = "0.12.0"
|
derive_builder = "0.20.0"
|
||||||
dump = { path = "../dump" }
|
dump = { path = "../dump" }
|
||||||
enum-iterator = "1.5.0"
|
enum-iterator = "2.1.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.0.28"
|
flate2 = "1.0.30"
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
page_size = "0.5.0"
|
page_size = "0.6.0"
|
||||||
rayon = "1.8.1"
|
rayon = "1.10.0"
|
||||||
roaring = { version = "0.10.2", features = ["serde"] }
|
roaring = { version = "0.10.6", features = ["serde"] }
|
||||||
serde = { version = "1.0.195", features = ["derive"] }
|
serde = { version = "1.0.204", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.111", features = ["preserve_order"] }
|
serde_json = { version = "1.0.120", features = ["preserve_order"] }
|
||||||
synchronoise = "1.0.1"
|
synchronoise = "1.0.1"
|
||||||
tempfile = "3.9.0"
|
tempfile = "3.10.1"
|
||||||
thiserror = "1.0.56"
|
thiserror = "1.0.61"
|
||||||
time = { version = "0.3.31", features = [
|
time = { version = "0.3.36", features = [
|
||||||
"serde-well-known",
|
"serde-well-known",
|
||||||
"formatting",
|
"formatting",
|
||||||
"parsing",
|
"parsing",
|
||||||
"macros",
|
"macros",
|
||||||
] }
|
] }
|
||||||
tracing = "0.1.40"
|
tracing = "0.1.40"
|
||||||
ureq = "2.9.7"
|
ureq = "2.10.0"
|
||||||
uuid = { version = "1.6.1", features = ["serde", "v4"] }
|
uuid = { version = "1.10.0", features = ["serde", "v4"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
arroy = "0.4.0"
|
||||||
big_s = "1.0.2"
|
big_s = "1.0.2"
|
||||||
crossbeam = "0.8.4"
|
crossbeam = "0.8.4"
|
||||||
insta = { version = "1.34.0", features = ["json", "redactions"] }
|
insta = { version = "1.39.0", features = ["json", "redactions"] }
|
||||||
|
maplit = "1.0.2"
|
||||||
meili-snap = { path = "../meili-snap" }
|
meili-snap = { path = "../meili-snap" }
|
||||||
|
|||||||
@@ -908,15 +908,22 @@ impl IndexScheduler {
|
|||||||
let mut index_dumper = dump.create_index(uid, &metadata)?;
|
let mut index_dumper = dump.create_index(uid, &metadata)?;
|
||||||
|
|
||||||
let fields_ids_map = index.fields_ids_map(&rtxn)?;
|
let fields_ids_map = index.fields_ids_map(&rtxn)?;
|
||||||
|
let dictionary = index.document_decompression_dictionary(&rtxn)?;
|
||||||
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
|
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
|
||||||
|
let embedding_configs = index.embedding_configs(&rtxn)?;
|
||||||
|
let mut buffer = Vec::new();
|
||||||
|
|
||||||
// 3.1. Dump the documents
|
// 3.1. Dump the documents
|
||||||
for ret in index.all_documents(&rtxn)? {
|
for ret in index.all_compressed_documents(&rtxn)? {
|
||||||
if self.must_stop_processing.get() {
|
if self.must_stop_processing.get() {
|
||||||
return Err(Error::AbortedTask);
|
return Err(Error::AbortedTask);
|
||||||
}
|
}
|
||||||
|
|
||||||
let (id, doc) = ret?;
|
let (id, compressed) = ret?;
|
||||||
|
let doc = compressed.decompress_with_optional_dictionary(
|
||||||
|
&mut buffer,
|
||||||
|
dictionary.as_ref(),
|
||||||
|
)?;
|
||||||
|
|
||||||
let mut document = milli::obkv_to_json(&all_fields, &fields_ids_map, doc)?;
|
let mut document = milli::obkv_to_json(&all_fields, &fields_ids_map, doc)?;
|
||||||
|
|
||||||
@@ -951,16 +958,21 @@ impl IndexScheduler {
|
|||||||
};
|
};
|
||||||
|
|
||||||
for (embedder_name, embeddings) in embeddings {
|
for (embedder_name, embeddings) in embeddings {
|
||||||
// don't change the entry if it already exists, because it was user-provided
|
let user_provided = embedding_configs
|
||||||
vectors.entry(embedder_name).or_insert_with(|| {
|
.iter()
|
||||||
let embeddings = ExplicitVectors {
|
.find(|conf| conf.name == embedder_name)
|
||||||
embeddings: VectorOrArrayOfVectors::from_array_of_vectors(
|
.is_some_and(|conf| conf.user_provided.contains(id));
|
||||||
embeddings,
|
|
||||||
),
|
let embeddings = ExplicitVectors {
|
||||||
user_provided: false,
|
embeddings: Some(
|
||||||
};
|
VectorOrArrayOfVectors::from_array_of_vectors(embeddings),
|
||||||
serde_json::to_value(embeddings).unwrap()
|
),
|
||||||
});
|
regenerate: !user_provided,
|
||||||
|
};
|
||||||
|
vectors.insert(
|
||||||
|
embedder_name,
|
||||||
|
serde_json::to_value(embeddings).unwrap(),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -6,10 +6,6 @@ expression: doc
|
|||||||
"doggo": "Intel",
|
"doggo": "Intel",
|
||||||
"breed": "beagle",
|
"breed": "beagle",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"A_fakerest": {
|
|
||||||
"embeddings": "[vector]",
|
|
||||||
"userProvided": true
|
|
||||||
},
|
|
||||||
"noise": [
|
"noise": [
|
||||||
0.1,
|
0.1,
|
||||||
0.2,
|
0.2,
|
||||||
@@ -6,10 +6,6 @@ expression: doc
|
|||||||
"doggo": "kefir",
|
"doggo": "kefir",
|
||||||
"breed": "patou",
|
"breed": "patou",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"A_fakerest": {
|
|
||||||
"embeddings": "[vector]",
|
|
||||||
"userProvided": true
|
|
||||||
},
|
|
||||||
"noise": [
|
"noise": [
|
||||||
0.1,
|
0.1,
|
||||||
0.2,
|
0.2,
|
||||||
File diff suppressed because one or more lines are too long
@@ -11,6 +11,6 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
insta = { version = "^1.34.0", features = ["json", "redactions"] }
|
insta = { version = "^1.39.0", features = ["json", "redactions"] }
|
||||||
md5 = "0.7.0"
|
md5 = "0.7.0"
|
||||||
once_cell = "1.19"
|
once_cell = "1.19"
|
||||||
|
|||||||
@@ -11,16 +11,16 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
base64 = "0.21.7"
|
base64 = "0.22.1"
|
||||||
enum-iterator = "1.5.0"
|
enum-iterator = "2.1.0"
|
||||||
hmac = "0.12.1"
|
hmac = "0.12.1"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
roaring = { version = "0.10.2", features = ["serde"] }
|
roaring = { version = "0.10.6", features = ["serde"] }
|
||||||
serde = { version = "1.0.195", features = ["derive"] }
|
serde = { version = "1.0.204", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.111", features = ["preserve_order"] }
|
serde_json = { version = "1.0.120", features = ["preserve_order"] }
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
thiserror = "1.0.56"
|
thiserror = "1.0.61"
|
||||||
time = { version = "0.3.31", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
time = { version = "0.3.36", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
uuid = { version = "1.6.1", features = ["serde", "v4"] }
|
uuid = { version = "1.10.0", features = ["serde", "v4"] }
|
||||||
|
|||||||
@@ -188,6 +188,12 @@ impl AuthFilter {
|
|||||||
self.allow_index_creation && self.is_index_authorized(index)
|
self.allow_index_creation && self.is_index_authorized(index)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
/// Return true if a tenant token was used to generate the search rules.
|
||||||
|
pub fn is_tenant_token(&self) -> bool {
|
||||||
|
self.search_rules.is_some()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn with_allowed_indexes(allowed_indexes: HashSet<IndexUidPattern>) -> Self {
|
pub fn with_allowed_indexes(allowed_indexes: HashSet<IndexUidPattern>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
search_rules: None,
|
search_rules: None,
|
||||||
@@ -205,6 +211,7 @@ impl AuthFilter {
|
|||||||
.unwrap_or(true)
|
.unwrap_or(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Check if the index is authorized by the API key and the tenant token.
|
||||||
pub fn is_index_authorized(&self, index: &str) -> bool {
|
pub fn is_index_authorized(&self, index: &str) -> bool {
|
||||||
self.key_authorized_indexes.is_index_authorized(index)
|
self.key_authorized_indexes.is_index_authorized(index)
|
||||||
&& self
|
&& self
|
||||||
@@ -214,6 +221,44 @@ impl AuthFilter {
|
|||||||
.unwrap_or(true)
|
.unwrap_or(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Only check if the index is authorized by the API key
|
||||||
|
pub fn api_key_is_index_authorized(&self, index: &str) -> bool {
|
||||||
|
self.key_authorized_indexes.is_index_authorized(index)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Only check if the index is authorized by the tenant token
|
||||||
|
pub fn tenant_token_is_index_authorized(&self, index: &str) -> bool {
|
||||||
|
self.search_rules
|
||||||
|
.as_ref()
|
||||||
|
.map(|search_rules| search_rules.is_index_authorized(index))
|
||||||
|
.unwrap_or(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the list of authorized indexes by the tenant token if any
|
||||||
|
pub fn tenant_token_list_index_authorized(&self) -> Vec<String> {
|
||||||
|
match self.search_rules {
|
||||||
|
Some(ref search_rules) => {
|
||||||
|
let mut indexes: Vec<_> = match search_rules {
|
||||||
|
SearchRules::Set(set) => set.iter().map(|s| s.to_string()).collect(),
|
||||||
|
SearchRules::Map(map) => map.keys().map(|s| s.to_string()).collect(),
|
||||||
|
};
|
||||||
|
indexes.sort_unstable();
|
||||||
|
indexes
|
||||||
|
}
|
||||||
|
None => Vec::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return the list of authorized indexes by the api key if any
|
||||||
|
pub fn api_key_list_index_authorized(&self) -> Vec<String> {
|
||||||
|
let mut indexes: Vec<_> = match self.key_authorized_indexes {
|
||||||
|
SearchRules::Set(ref set) => set.iter().map(|s| s.to_string()).collect(),
|
||||||
|
SearchRules::Map(ref map) => map.keys().map(|s| s.to_string()).collect(),
|
||||||
|
};
|
||||||
|
indexes.sort_unstable();
|
||||||
|
indexes
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_index_search_rules(&self, index: &str) -> Option<IndexSearchRules> {
|
pub fn get_index_search_rules(&self, index: &str) -> Option<IndexSearchRules> {
|
||||||
if !self.is_index_authorized(index) {
|
if !self.is_index_authorized(index) {
|
||||||
return None;
|
return None;
|
||||||
|
|||||||
@@ -11,36 +11,36 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = { version = "4.6.0", default-features = false }
|
actix-web = { version = "4.8.0", default-features = false }
|
||||||
anyhow = "1.0.79"
|
anyhow = "1.0.86"
|
||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
csv = "1.3.0"
|
csv = "1.3.0"
|
||||||
deserr = { version = "0.6.1", features = ["actix-web"] }
|
deserr = { version = "0.6.2", features = ["actix-web"] }
|
||||||
either = { version = "1.9.0", features = ["serde"] }
|
either = { version = "1.13.0", features = ["serde"] }
|
||||||
enum-iterator = "1.5.0"
|
enum-iterator = "2.1.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.0.28"
|
flate2 = "1.0.30"
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
memmap2 = "0.7.1"
|
memmap2 = "0.9.4"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
roaring = { version = "0.10.2", features = ["serde"] }
|
roaring = { version = "0.10.6", features = ["serde"] }
|
||||||
serde = { version = "1.0.195", features = ["derive"] }
|
serde = { version = "1.0.204", features = ["derive"] }
|
||||||
serde-cs = "0.2.4"
|
serde-cs = "0.2.4"
|
||||||
serde_json = "1.0.111"
|
serde_json = "1.0.120"
|
||||||
tar = "0.4.40"
|
tar = "0.4.41"
|
||||||
tempfile = "3.9.0"
|
tempfile = "3.10.1"
|
||||||
thiserror = "1.0.56"
|
thiserror = "1.0.61"
|
||||||
time = { version = "0.3.31", features = [
|
time = { version = "0.3.36", features = [
|
||||||
"serde-well-known",
|
"serde-well-known",
|
||||||
"formatting",
|
"formatting",
|
||||||
"parsing",
|
"parsing",
|
||||||
"macros",
|
"macros",
|
||||||
] }
|
] }
|
||||||
tokio = "1.35"
|
tokio = "1.38"
|
||||||
uuid = { version = "1.6.1", features = ["serde", "v4"] }
|
uuid = { version = "1.10.0", features = ["serde", "v4"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = "1.34.0"
|
insta = "1.39.0"
|
||||||
meili-snap = { path = "../meili-snap" }
|
meili-snap = { path = "../meili-snap" }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
@@ -54,6 +54,8 @@ chinese-pinyin = ["milli/chinese-pinyin"]
|
|||||||
hebrew = ["milli/hebrew"]
|
hebrew = ["milli/hebrew"]
|
||||||
# japanese specialized tokenization
|
# japanese specialized tokenization
|
||||||
japanese = ["milli/japanese"]
|
japanese = ["milli/japanese"]
|
||||||
|
# korean specialized tokenization
|
||||||
|
korean = ["milli/korean"]
|
||||||
# thai specialized tokenization
|
# thai specialized tokenization
|
||||||
thai = ["milli/thai"]
|
thai = ["milli/thai"]
|
||||||
# allow greek specialized tokenization
|
# allow greek specialized tokenization
|
||||||
|
|||||||
@@ -222,6 +222,7 @@ InvalidApiKeyUid , InvalidRequest , BAD_REQUEST ;
|
|||||||
InvalidContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
|
InvalidContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
|
||||||
InvalidDocumentCsvDelimiter , InvalidRequest , BAD_REQUEST ;
|
InvalidDocumentCsvDelimiter , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidDocumentFields , InvalidRequest , BAD_REQUEST ;
|
InvalidDocumentFields , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidDocumentRetrieveVectors , InvalidRequest , BAD_REQUEST ;
|
||||||
MissingDocumentFilter , InvalidRequest , BAD_REQUEST ;
|
MissingDocumentFilter , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidDocumentFilter , InvalidRequest , BAD_REQUEST ;
|
InvalidDocumentFilter , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidDocumentGeoField , InvalidRequest , BAD_REQUEST ;
|
InvalidDocumentGeoField , InvalidRequest , BAD_REQUEST ;
|
||||||
@@ -240,9 +241,11 @@ InvalidSearchAttributesToSearchOn , InvalidRequest , BAD_REQUEST ;
|
|||||||
InvalidSearchAttributesToCrop , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchAttributesToCrop , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchAttributesToHighlight , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchAttributesToHighlight , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSimilarAttributesToRetrieve , InvalidRequest , BAD_REQUEST ;
|
InvalidSimilarAttributesToRetrieve , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidSimilarRetrieveVectors , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchAttributesToRetrieve , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchAttributesToRetrieve , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchRankingScoreThreshold , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchRankingScoreThreshold , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSimilarRankingScoreThreshold , InvalidRequest , BAD_REQUEST ;
|
InvalidSimilarRankingScoreThreshold , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidSearchRetrieveVectors , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchCropLength , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchCropLength , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchCropMarker , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchCropMarker , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchFacets , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchFacets , InvalidRequest , BAD_REQUEST ;
|
||||||
@@ -270,13 +273,14 @@ InvalidSimilarShowRankingScore , InvalidRequest , BAD_REQUEST ;
|
|||||||
InvalidSearchShowRankingScoreDetails , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchShowRankingScoreDetails , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSimilarShowRankingScoreDetails , InvalidRequest , BAD_REQUEST ;
|
InvalidSimilarShowRankingScoreDetails , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchSort , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchSort , InvalidRequest , BAD_REQUEST ;
|
||||||
|
InvalidSearchDistinct , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsProximityPrecision , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsProximityPrecision , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsFaceting , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsFaceting , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsFilterableAttributes , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsFilterableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsPagination , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsPagination , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsSearchCutoffMs , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsSearchCutoffMs , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsEmbedders , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsEmbedders , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
@@ -381,6 +385,7 @@ impl ErrorCode for milli::Error {
|
|||||||
Code::IndexPrimaryKeyMultipleCandidatesFound
|
Code::IndexPrimaryKeyMultipleCandidatesFound
|
||||||
}
|
}
|
||||||
UserError::PrimaryKeyCannotBeChanged(_) => Code::IndexPrimaryKeyAlreadyExists,
|
UserError::PrimaryKeyCannotBeChanged(_) => Code::IndexPrimaryKeyAlreadyExists,
|
||||||
|
UserError::InvalidDistinctAttribute { .. } => Code::InvalidSearchDistinct,
|
||||||
UserError::SortRankingRuleMissing => Code::InvalidSearchSort,
|
UserError::SortRankingRuleMissing => Code::InvalidSearchSort,
|
||||||
UserError::InvalidFacetsDistribution { .. } => Code::InvalidSearchFacets,
|
UserError::InvalidFacetsDistribution { .. } => Code::InvalidSearchFacets,
|
||||||
UserError::InvalidSortableAttribute { .. } => Code::InvalidSearchSort,
|
UserError::InvalidSortableAttribute { .. } => Code::InvalidSearchSort,
|
||||||
@@ -393,7 +398,8 @@ impl ErrorCode for milli::Error {
|
|||||||
UserError::CriterionError(_) => Code::InvalidSettingsRankingRules,
|
UserError::CriterionError(_) => Code::InvalidSettingsRankingRules,
|
||||||
UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField,
|
UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField,
|
||||||
UserError::InvalidVectorDimensions { .. } => Code::InvalidVectorDimensions,
|
UserError::InvalidVectorDimensions { .. } => Code::InvalidVectorDimensions,
|
||||||
UserError::InvalidVectorsMapType { .. } => Code::InvalidVectorsType,
|
UserError::InvalidVectorsMapType { .. }
|
||||||
|
| UserError::InvalidVectorsEmbedderConf { .. } => Code::InvalidVectorsType,
|
||||||
UserError::TooManyVectors(_, _) => Code::TooManyVectors,
|
UserError::TooManyVectors(_, _) => Code::TooManyVectors,
|
||||||
UserError::SortError(_) => Code::InvalidSearchSort,
|
UserError::SortError(_) => Code::InvalidSearchSort,
|
||||||
UserError::InvalidMinTypoWordLenSetting(_, _) => {
|
UserError::InvalidMinTypoWordLenSetting(_, _) => {
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ pub mod star_or;
|
|||||||
pub mod task_view;
|
pub mod task_view;
|
||||||
pub mod tasks;
|
pub mod tasks;
|
||||||
pub mod versioning;
|
pub mod versioning;
|
||||||
pub use milli::{heed, Index};
|
pub use milli::{heed, zstd, Index};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
pub use versioning::VERSION_FILE_NAME;
|
pub use versioning::VERSION_FILE_NAME;
|
||||||
pub use {milli, serde_cs};
|
pub use {milli, serde_cs};
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ use std::str::FromStr;
|
|||||||
|
|
||||||
use deserr::{DeserializeError, Deserr, ErrorKind, MergeWithError, ValuePointerRef};
|
use deserr::{DeserializeError, Deserr, ErrorKind, MergeWithError, ValuePointerRef};
|
||||||
use fst::IntoStreamer;
|
use fst::IntoStreamer;
|
||||||
|
use milli::index::IndexEmbeddingConfig;
|
||||||
use milli::proximity::ProximityPrecision;
|
use milli::proximity::ProximityPrecision;
|
||||||
use milli::update::Setting;
|
use milli::update::Setting;
|
||||||
use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET};
|
use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET};
|
||||||
@@ -672,7 +673,7 @@ pub fn settings(
|
|||||||
let embedders: BTreeMap<_, _> = index
|
let embedders: BTreeMap<_, _> = index
|
||||||
.embedding_configs(rtxn)?
|
.embedding_configs(rtxn)?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(name, config)| (name, Setting::Set(config.into())))
|
.map(|IndexEmbeddingConfig { name, config, .. }| (name, Setting::Set(config.into())))
|
||||||
.collect();
|
.collect();
|
||||||
let embedders = if embedders.is_empty() { Setting::NotSet } else { Setting::Set(embedders) };
|
let embedders = if embedders.is_empty() { Setting::NotSet } else { Setting::Set(embedders) };
|
||||||
|
|
||||||
|
|||||||
@@ -14,130 +14,123 @@ default-run = "meilisearch"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-cors = "0.7.0"
|
actix-cors = "0.7.0"
|
||||||
actix-http = { version = "3.7.0", default-features = false, features = [
|
actix-http = { version = "3.8.0", default-features = false, features = [
|
||||||
"compress-brotli",
|
"compress-brotli",
|
||||||
"compress-gzip",
|
"compress-gzip",
|
||||||
"rustls-0_21",
|
"rustls-0_21",
|
||||||
] }
|
] }
|
||||||
actix-utils = "3.0.1"
|
actix-utils = "3.0.1"
|
||||||
actix-web = { version = "4.6.0", default-features = false, features = [
|
actix-web = { version = "4.8.0", default-features = false, features = [
|
||||||
"macros",
|
"macros",
|
||||||
"compress-brotli",
|
"compress-brotli",
|
||||||
"compress-gzip",
|
"compress-gzip",
|
||||||
"cookies",
|
"cookies",
|
||||||
"rustls-0_21",
|
"rustls-0_21",
|
||||||
] }
|
] }
|
||||||
actix-web-static-files = { version = "4.0.1", optional = true }
|
anyhow = { version = "1.0.86", features = ["backtrace"] }
|
||||||
anyhow = { version = "1.0.79", features = ["backtrace"] }
|
async-trait = "0.1.81"
|
||||||
async-stream = "0.3.5"
|
bstr = "1.9.1"
|
||||||
async-trait = "0.1.77"
|
byte-unit = { version = "5.1.4", default-features = false, features = [
|
||||||
bstr = "1.9.0"
|
|
||||||
byte-unit = { version = "4.0.19", default-features = false, features = [
|
|
||||||
"std",
|
"std",
|
||||||
|
"byte",
|
||||||
"serde",
|
"serde",
|
||||||
] }
|
] }
|
||||||
bytes = "1.5.0"
|
bytes = "1.6.0"
|
||||||
clap = { version = "4.4.17", features = ["derive", "env"] }
|
clap = { version = "4.5.9", features = ["derive", "env"] }
|
||||||
crossbeam-channel = "0.5.11"
|
crossbeam-channel = "0.5.13"
|
||||||
deserr = { version = "0.6.1", features = ["actix-web"] }
|
deserr = { version = "0.6.2", features = ["actix-web"] }
|
||||||
dump = { path = "../dump" }
|
dump = { path = "../dump" }
|
||||||
either = "1.9.0"
|
either = "1.13.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.0.28"
|
flate2 = "1.0.30"
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
futures = "0.3.30"
|
futures = "0.3.30"
|
||||||
futures-util = "0.3.30"
|
futures-util = "0.3.30"
|
||||||
http = "0.2.11"
|
|
||||||
index-scheduler = { path = "../index-scheduler" }
|
index-scheduler = { path = "../index-scheduler" }
|
||||||
indexmap = { version = "2.1.0", features = ["serde"] }
|
indexmap = { version = "2.2.6", features = ["serde"] }
|
||||||
is-terminal = "0.4.10"
|
is-terminal = "0.4.12"
|
||||||
itertools = "0.11.0"
|
itertools = "0.13.0"
|
||||||
jsonwebtoken = "9.2.0"
|
jsonwebtoken = "9.3.0"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.5.0"
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
mimalloc = { version = "0.1.39", default-features = false }
|
mimalloc = { version = "0.1.43", default-features = false }
|
||||||
mime = "0.3.17"
|
mime = "0.3.17"
|
||||||
num_cpus = "1.16.0"
|
num_cpus = "1.16.0"
|
||||||
obkv = "0.2.1"
|
obkv = "0.2.2"
|
||||||
once_cell = "1.19.0"
|
once_cell = "1.19.0"
|
||||||
ordered-float = "4.2.0"
|
ordered-float = "4.2.1"
|
||||||
parking_lot = "0.12.1"
|
parking_lot = "0.12.3"
|
||||||
permissive-json-pointer = { path = "../permissive-json-pointer" }
|
permissive-json-pointer = { path = "../permissive-json-pointer" }
|
||||||
pin-project-lite = "0.2.13"
|
pin-project-lite = "0.2.14"
|
||||||
platform-dirs = "0.3.0"
|
platform-dirs = "0.3.0"
|
||||||
prometheus = { version = "0.13.3", features = ["process"] }
|
prometheus = { version = "0.13.4", features = ["process"] }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
rayon = "1.8.0"
|
rayon = "1.10.0"
|
||||||
regex = "1.10.2"
|
regex = "1.10.5"
|
||||||
reqwest = { version = "0.11.23", features = [
|
reqwest = { version = "0.12.5", features = [
|
||||||
"rustls-tls",
|
"rustls-tls",
|
||||||
"json",
|
"json",
|
||||||
], default-features = false }
|
], default-features = false }
|
||||||
rustls = "0.21.12"
|
rustls = "0.21.12"
|
||||||
rustls-pemfile = "1.0.2"
|
rustls-pemfile = "1.0.4"
|
||||||
segment = { version = "0.2.3", optional = true }
|
segment = { version = "0.2.4", optional = true }
|
||||||
serde = { version = "1.0.195", features = ["derive"] }
|
serde = { version = "1.0.204", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.111", features = ["preserve_order"] }
|
serde_json = { version = "1.0.120", features = ["preserve_order"] }
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
siphasher = "1.0.0"
|
siphasher = "1.0.1"
|
||||||
slice-group-by = "0.3.1"
|
slice-group-by = "0.3.1"
|
||||||
static-files = { version = "0.2.3", optional = true }
|
static-files = { version = "0.2.4", optional = true }
|
||||||
sysinfo = "0.30.5"
|
sysinfo = "0.30.13"
|
||||||
tar = "0.4.40"
|
tar = "0.4.41"
|
||||||
tempfile = "3.9.0"
|
tempfile = "3.10.1"
|
||||||
thiserror = "1.0.56"
|
thiserror = "1.0.61"
|
||||||
time = { version = "0.3.31", features = [
|
time = { version = "0.3.36", features = [
|
||||||
"serde-well-known",
|
"serde-well-known",
|
||||||
"formatting",
|
"formatting",
|
||||||
"parsing",
|
"parsing",
|
||||||
"macros",
|
"macros",
|
||||||
] }
|
] }
|
||||||
tokio = { version = "1.35.1", features = ["full"] }
|
tokio = { version = "1.38.0", features = ["full"] }
|
||||||
tokio-stream = "0.1.14"
|
toml = "0.8.14"
|
||||||
toml = "0.8.8"
|
uuid = { version = "1.10.0", features = ["serde", "v4"] }
|
||||||
uuid = { version = "1.6.1", features = ["serde", "v4"] }
|
|
||||||
walkdir = "2.4.0"
|
|
||||||
yaup = "0.2.1"
|
|
||||||
serde_urlencoded = "0.7.1"
|
serde_urlencoded = "0.7.1"
|
||||||
termcolor = "1.4.1"
|
termcolor = "1.4.1"
|
||||||
url = { version = "2.5.0", features = ["serde"] }
|
url = { version = "2.5.2", features = ["serde"] }
|
||||||
tracing = "0.1.40"
|
tracing = "0.1.40"
|
||||||
tracing-subscriber = { version = "0.3.18", features = ["json"] }
|
tracing-subscriber = { version = "0.3.18", features = ["json"] }
|
||||||
tracing-trace = { version = "0.1.0", path = "../tracing-trace" }
|
tracing-trace = { version = "0.1.0", path = "../tracing-trace" }
|
||||||
tracing-actix-web = "0.7.10"
|
tracing-actix-web = "0.7.11"
|
||||||
build-info = { version = "1.7.0", path = "../build-info" }
|
build-info = { version = "1.7.0", path = "../build-info" }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-rt = "2.9.0"
|
actix-rt = "2.10.0"
|
||||||
assert-json-diff = "2.0.2"
|
|
||||||
brotli = "6.0.0"
|
brotli = "6.0.0"
|
||||||
insta = "1.34.0"
|
insta = "1.39.0"
|
||||||
manifest-dir-macros = "0.1.18"
|
manifest-dir-macros = "0.1.18"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
meili-snap = { path = "../meili-snap" }
|
meili-snap = { path = "../meili-snap" }
|
||||||
temp-env = "0.3.6"
|
temp-env = "0.3.6"
|
||||||
urlencoding = "2.1.3"
|
urlencoding = "2.1.3"
|
||||||
yaup = "0.2.1"
|
yaup = "0.3.1"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
anyhow = { version = "1.0.79", optional = true }
|
anyhow = { version = "1.0.86", optional = true }
|
||||||
cargo_toml = { version = "0.18.0", optional = true }
|
cargo_toml = { version = "0.20.3", optional = true }
|
||||||
hex = { version = "0.4.3", optional = true }
|
hex = { version = "0.4.3", optional = true }
|
||||||
reqwest = { version = "0.11.23", features = [
|
reqwest = { version = "0.12.5", features = [
|
||||||
"blocking",
|
"blocking",
|
||||||
"rustls-tls",
|
"rustls-tls",
|
||||||
], default-features = false, optional = true }
|
], default-features = false, optional = true }
|
||||||
sha-1 = { version = "0.10.1", optional = true }
|
sha-1 = { version = "0.10.1", optional = true }
|
||||||
static-files = { version = "0.2.3", optional = true }
|
static-files = { version = "0.2.4", optional = true }
|
||||||
tempfile = { version = "3.9.0", optional = true }
|
tempfile = { version = "3.10.1", optional = true }
|
||||||
zip = { version = "0.6.6", optional = true }
|
zip = { version = "2.1.3", default-features = false, features = ["deflate"], optional = true }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"]
|
default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"]
|
||||||
analytics = ["segment"]
|
analytics = ["segment"]
|
||||||
mini-dashboard = [
|
mini-dashboard = [
|
||||||
"actix-web-static-files",
|
|
||||||
"static-files",
|
"static-files",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"cargo_toml",
|
"cargo_toml",
|
||||||
@@ -151,6 +144,7 @@ chinese = ["meilisearch-types/chinese"]
|
|||||||
chinese-pinyin = ["meilisearch-types/chinese-pinyin"]
|
chinese-pinyin = ["meilisearch-types/chinese-pinyin"]
|
||||||
hebrew = ["meilisearch-types/hebrew"]
|
hebrew = ["meilisearch-types/hebrew"]
|
||||||
japanese = ["meilisearch-types/japanese"]
|
japanese = ["meilisearch-types/japanese"]
|
||||||
|
korean = ["meilisearch-types/korean"]
|
||||||
thai = ["meilisearch-types/thai"]
|
thai = ["meilisearch-types/thai"]
|
||||||
greek = ["meilisearch-types/greek"]
|
greek = ["meilisearch-types/greek"]
|
||||||
khmer = ["meilisearch-types/khmer"]
|
khmer = ["meilisearch-types/khmer"]
|
||||||
@@ -158,5 +152,5 @@ vietnamese = ["meilisearch-types/vietnamese"]
|
|||||||
swedish-recomposition = ["meilisearch-types/swedish-recomposition"]
|
swedish-recomposition = ["meilisearch-types/swedish-recomposition"]
|
||||||
|
|
||||||
[package.metadata.mini-dashboard]
|
[package.metadata.mini-dashboard]
|
||||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.13/build.zip"
|
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.14/build.zip"
|
||||||
sha1 = "e20cc9b390003c6c844f4b8bcc5c5013191a77ff"
|
sha1 = "592d1b5a3459d621d0aae1dded8fe3154f5c38fe"
|
||||||
|
|||||||
@@ -74,8 +74,8 @@ pub enum DocumentDeletionKind {
|
|||||||
|
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||||
pub enum DocumentFetchKind {
|
pub enum DocumentFetchKind {
|
||||||
PerDocumentId,
|
PerDocumentId { retrieve_vectors: bool },
|
||||||
Normal { with_filter: bool, limit: usize, offset: usize },
|
Normal { with_filter: bool, limit: usize, offset: usize, retrieve_vectors: bool },
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Analytics: Sync + Send {
|
pub trait Analytics: Sync + Send {
|
||||||
|
|||||||
@@ -5,10 +5,9 @@ use std::path::{Path, PathBuf};
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::{Duration, Instant};
|
use std::time::{Duration, Instant};
|
||||||
|
|
||||||
use actix_web::http::header::USER_AGENT;
|
use actix_web::http::header::{CONTENT_TYPE, USER_AGENT};
|
||||||
use actix_web::HttpRequest;
|
use actix_web::HttpRequest;
|
||||||
use byte_unit::Byte;
|
use byte_unit::Byte;
|
||||||
use http::header::CONTENT_TYPE;
|
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use meilisearch_auth::{AuthController, AuthFilter};
|
use meilisearch_auth::{AuthController, AuthFilter};
|
||||||
use meilisearch_types::InstanceUid;
|
use meilisearch_types::InstanceUid;
|
||||||
@@ -597,6 +596,9 @@ pub struct SearchAggregator {
|
|||||||
// every time a request has a filter, this field must be incremented by one
|
// every time a request has a filter, this field must be incremented by one
|
||||||
sort_total_number_of_criteria: usize,
|
sort_total_number_of_criteria: usize,
|
||||||
|
|
||||||
|
// distinct
|
||||||
|
distinct: bool,
|
||||||
|
|
||||||
// filter
|
// filter
|
||||||
filter_with_geo_radius: bool,
|
filter_with_geo_radius: bool,
|
||||||
filter_with_geo_bounding_box: bool,
|
filter_with_geo_bounding_box: bool,
|
||||||
@@ -622,6 +624,7 @@ pub struct SearchAggregator {
|
|||||||
// Whether a non-default embedder was specified
|
// Whether a non-default embedder was specified
|
||||||
embedder: bool,
|
embedder: bool,
|
||||||
hybrid: bool,
|
hybrid: bool,
|
||||||
|
retrieve_vectors: bool,
|
||||||
|
|
||||||
// every time a search is done, we increment the counter linked to the used settings
|
// every time a search is done, we increment the counter linked to the used settings
|
||||||
matching_strategy: HashMap<String, usize>,
|
matching_strategy: HashMap<String, usize>,
|
||||||
@@ -662,6 +665,7 @@ impl SearchAggregator {
|
|||||||
page,
|
page,
|
||||||
hits_per_page,
|
hits_per_page,
|
||||||
attributes_to_retrieve: _,
|
attributes_to_retrieve: _,
|
||||||
|
retrieve_vectors,
|
||||||
attributes_to_crop: _,
|
attributes_to_crop: _,
|
||||||
crop_length,
|
crop_length,
|
||||||
attributes_to_highlight: _,
|
attributes_to_highlight: _,
|
||||||
@@ -670,6 +674,7 @@ impl SearchAggregator {
|
|||||||
show_ranking_score_details,
|
show_ranking_score_details,
|
||||||
filter,
|
filter,
|
||||||
sort,
|
sort,
|
||||||
|
distinct,
|
||||||
facets: _,
|
facets: _,
|
||||||
highlight_pre_tag,
|
highlight_pre_tag,
|
||||||
highlight_post_tag,
|
highlight_post_tag,
|
||||||
@@ -692,6 +697,8 @@ impl SearchAggregator {
|
|||||||
ret.sort_sum_of_criteria_terms = sort.len();
|
ret.sort_sum_of_criteria_terms = sort.len();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ret.distinct = distinct.is_some();
|
||||||
|
|
||||||
if let Some(ref filter) = filter {
|
if let Some(ref filter) = filter {
|
||||||
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
|
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
|
||||||
ret.filter_total_number_of_criteria = 1;
|
ret.filter_total_number_of_criteria = 1;
|
||||||
@@ -728,6 +735,7 @@ impl SearchAggregator {
|
|||||||
if let Some(ref vector) = vector {
|
if let Some(ref vector) = vector {
|
||||||
ret.max_vector_size = vector.len();
|
ret.max_vector_size = vector.len();
|
||||||
}
|
}
|
||||||
|
ret.retrieve_vectors |= retrieve_vectors;
|
||||||
|
|
||||||
if query.is_finite_pagination() {
|
if query.is_finite_pagination() {
|
||||||
let limit = hits_per_page.unwrap_or_else(DEFAULT_SEARCH_LIMIT);
|
let limit = hits_per_page.unwrap_or_else(DEFAULT_SEARCH_LIMIT);
|
||||||
@@ -795,6 +803,7 @@ impl SearchAggregator {
|
|||||||
sort_with_geo_point,
|
sort_with_geo_point,
|
||||||
sort_sum_of_criteria_terms,
|
sort_sum_of_criteria_terms,
|
||||||
sort_total_number_of_criteria,
|
sort_total_number_of_criteria,
|
||||||
|
distinct,
|
||||||
filter_with_geo_radius,
|
filter_with_geo_radius,
|
||||||
filter_with_geo_bounding_box,
|
filter_with_geo_bounding_box,
|
||||||
filter_sum_of_criteria_terms,
|
filter_sum_of_criteria_terms,
|
||||||
@@ -803,6 +812,7 @@ impl SearchAggregator {
|
|||||||
attributes_to_search_on_total_number_of_uses,
|
attributes_to_search_on_total_number_of_uses,
|
||||||
max_terms_number,
|
max_terms_number,
|
||||||
max_vector_size,
|
max_vector_size,
|
||||||
|
retrieve_vectors,
|
||||||
matching_strategy,
|
matching_strategy,
|
||||||
max_limit,
|
max_limit,
|
||||||
max_offset,
|
max_offset,
|
||||||
@@ -851,6 +861,9 @@ impl SearchAggregator {
|
|||||||
self.sort_total_number_of_criteria =
|
self.sort_total_number_of_criteria =
|
||||||
self.sort_total_number_of_criteria.saturating_add(sort_total_number_of_criteria);
|
self.sort_total_number_of_criteria.saturating_add(sort_total_number_of_criteria);
|
||||||
|
|
||||||
|
// distinct
|
||||||
|
self.distinct |= distinct;
|
||||||
|
|
||||||
// filter
|
// filter
|
||||||
self.filter_with_geo_radius |= filter_with_geo_radius;
|
self.filter_with_geo_radius |= filter_with_geo_radius;
|
||||||
self.filter_with_geo_bounding_box |= filter_with_geo_bounding_box;
|
self.filter_with_geo_bounding_box |= filter_with_geo_bounding_box;
|
||||||
@@ -873,6 +886,7 @@ impl SearchAggregator {
|
|||||||
|
|
||||||
// vector
|
// vector
|
||||||
self.max_vector_size = self.max_vector_size.max(max_vector_size);
|
self.max_vector_size = self.max_vector_size.max(max_vector_size);
|
||||||
|
self.retrieve_vectors |= retrieve_vectors;
|
||||||
self.semantic_ratio |= semantic_ratio;
|
self.semantic_ratio |= semantic_ratio;
|
||||||
self.hybrid |= hybrid;
|
self.hybrid |= hybrid;
|
||||||
self.embedder |= embedder;
|
self.embedder |= embedder;
|
||||||
@@ -921,6 +935,7 @@ impl SearchAggregator {
|
|||||||
sort_with_geo_point,
|
sort_with_geo_point,
|
||||||
sort_sum_of_criteria_terms,
|
sort_sum_of_criteria_terms,
|
||||||
sort_total_number_of_criteria,
|
sort_total_number_of_criteria,
|
||||||
|
distinct,
|
||||||
filter_with_geo_radius,
|
filter_with_geo_radius,
|
||||||
filter_with_geo_bounding_box,
|
filter_with_geo_bounding_box,
|
||||||
filter_sum_of_criteria_terms,
|
filter_sum_of_criteria_terms,
|
||||||
@@ -929,6 +944,7 @@ impl SearchAggregator {
|
|||||||
attributes_to_search_on_total_number_of_uses,
|
attributes_to_search_on_total_number_of_uses,
|
||||||
max_terms_number,
|
max_terms_number,
|
||||||
max_vector_size,
|
max_vector_size,
|
||||||
|
retrieve_vectors,
|
||||||
matching_strategy,
|
matching_strategy,
|
||||||
max_limit,
|
max_limit,
|
||||||
max_offset,
|
max_offset,
|
||||||
@@ -977,6 +993,7 @@ impl SearchAggregator {
|
|||||||
"with_geoPoint": sort_with_geo_point,
|
"with_geoPoint": sort_with_geo_point,
|
||||||
"avg_criteria_number": format!("{:.2}", sort_sum_of_criteria_terms as f64 / sort_total_number_of_criteria as f64),
|
"avg_criteria_number": format!("{:.2}", sort_sum_of_criteria_terms as f64 / sort_total_number_of_criteria as f64),
|
||||||
},
|
},
|
||||||
|
"distinct": distinct,
|
||||||
"filter": {
|
"filter": {
|
||||||
"with_geoRadius": filter_with_geo_radius,
|
"with_geoRadius": filter_with_geo_radius,
|
||||||
"with_geoBoundingBox": filter_with_geo_bounding_box,
|
"with_geoBoundingBox": filter_with_geo_bounding_box,
|
||||||
@@ -991,6 +1008,7 @@ impl SearchAggregator {
|
|||||||
},
|
},
|
||||||
"vector": {
|
"vector": {
|
||||||
"max_vector_size": max_vector_size,
|
"max_vector_size": max_vector_size,
|
||||||
|
"retrieve_vectors": retrieve_vectors,
|
||||||
},
|
},
|
||||||
"hybrid": {
|
"hybrid": {
|
||||||
"enabled": hybrid,
|
"enabled": hybrid,
|
||||||
@@ -1079,6 +1097,7 @@ impl MultiSearchAggregator {
|
|||||||
page: _,
|
page: _,
|
||||||
hits_per_page: _,
|
hits_per_page: _,
|
||||||
attributes_to_retrieve: _,
|
attributes_to_retrieve: _,
|
||||||
|
retrieve_vectors: _,
|
||||||
attributes_to_crop: _,
|
attributes_to_crop: _,
|
||||||
crop_length: _,
|
crop_length: _,
|
||||||
attributes_to_highlight: _,
|
attributes_to_highlight: _,
|
||||||
@@ -1087,6 +1106,7 @@ impl MultiSearchAggregator {
|
|||||||
show_matches_position: _,
|
show_matches_position: _,
|
||||||
filter: _,
|
filter: _,
|
||||||
sort: _,
|
sort: _,
|
||||||
|
distinct: _,
|
||||||
facets: _,
|
facets: _,
|
||||||
highlight_pre_tag: _,
|
highlight_pre_tag: _,
|
||||||
highlight_post_tag: _,
|
highlight_post_tag: _,
|
||||||
@@ -1534,6 +1554,9 @@ pub struct DocumentsFetchAggregator {
|
|||||||
// if a filter was used
|
// if a filter was used
|
||||||
per_filter: bool,
|
per_filter: bool,
|
||||||
|
|
||||||
|
#[serde(rename = "vector.retrieve_vectors")]
|
||||||
|
retrieve_vectors: bool,
|
||||||
|
|
||||||
// pagination
|
// pagination
|
||||||
#[serde(rename = "pagination.max_limit")]
|
#[serde(rename = "pagination.max_limit")]
|
||||||
max_limit: usize,
|
max_limit: usize,
|
||||||
@@ -1543,18 +1566,21 @@ pub struct DocumentsFetchAggregator {
|
|||||||
|
|
||||||
impl DocumentsFetchAggregator {
|
impl DocumentsFetchAggregator {
|
||||||
pub fn from_query(query: &DocumentFetchKind, request: &HttpRequest) -> Self {
|
pub fn from_query(query: &DocumentFetchKind, request: &HttpRequest) -> Self {
|
||||||
let (limit, offset) = match query {
|
let (limit, offset, retrieve_vectors) = match query {
|
||||||
DocumentFetchKind::PerDocumentId => (1, 0),
|
DocumentFetchKind::PerDocumentId { retrieve_vectors } => (1, 0, *retrieve_vectors),
|
||||||
DocumentFetchKind::Normal { limit, offset, .. } => (*limit, *offset),
|
DocumentFetchKind::Normal { limit, offset, retrieve_vectors, .. } => {
|
||||||
|
(*limit, *offset, *retrieve_vectors)
|
||||||
|
}
|
||||||
};
|
};
|
||||||
Self {
|
Self {
|
||||||
timestamp: Some(OffsetDateTime::now_utc()),
|
timestamp: Some(OffsetDateTime::now_utc()),
|
||||||
user_agents: extract_user_agents(request).into_iter().collect(),
|
user_agents: extract_user_agents(request).into_iter().collect(),
|
||||||
total_received: 1,
|
total_received: 1,
|
||||||
per_document_id: matches!(query, DocumentFetchKind::PerDocumentId),
|
per_document_id: matches!(query, DocumentFetchKind::PerDocumentId { .. }),
|
||||||
per_filter: matches!(query, DocumentFetchKind::Normal { with_filter, .. } if *with_filter),
|
per_filter: matches!(query, DocumentFetchKind::Normal { with_filter, .. } if *with_filter),
|
||||||
max_limit: limit,
|
max_limit: limit,
|
||||||
max_offset: offset,
|
max_offset: offset,
|
||||||
|
retrieve_vectors,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1568,6 +1594,7 @@ impl DocumentsFetchAggregator {
|
|||||||
per_filter,
|
per_filter,
|
||||||
max_limit,
|
max_limit,
|
||||||
max_offset,
|
max_offset,
|
||||||
|
retrieve_vectors,
|
||||||
} = other;
|
} = other;
|
||||||
|
|
||||||
if self.timestamp.is_none() {
|
if self.timestamp.is_none() {
|
||||||
@@ -1583,6 +1610,8 @@ impl DocumentsFetchAggregator {
|
|||||||
|
|
||||||
self.max_limit = self.max_limit.max(max_limit);
|
self.max_limit = self.max_limit.max(max_limit);
|
||||||
self.max_offset = self.max_offset.max(max_offset);
|
self.max_offset = self.max_offset.max(max_offset);
|
||||||
|
|
||||||
|
self.retrieve_vectors |= retrieve_vectors;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_event(self, user: &User, event_name: &str) -> Option<Track> {
|
pub fn into_event(self, user: &User, event_name: &str) -> Option<Track> {
|
||||||
@@ -1623,6 +1652,7 @@ pub struct SimilarAggregator {
|
|||||||
|
|
||||||
// Whether a non-default embedder was specified
|
// Whether a non-default embedder was specified
|
||||||
embedder: bool,
|
embedder: bool,
|
||||||
|
retrieve_vectors: bool,
|
||||||
|
|
||||||
// pagination
|
// pagination
|
||||||
max_limit: usize,
|
max_limit: usize,
|
||||||
@@ -1646,6 +1676,7 @@ impl SimilarAggregator {
|
|||||||
offset,
|
offset,
|
||||||
limit,
|
limit,
|
||||||
attributes_to_retrieve: _,
|
attributes_to_retrieve: _,
|
||||||
|
retrieve_vectors,
|
||||||
show_ranking_score,
|
show_ranking_score,
|
||||||
show_ranking_score_details,
|
show_ranking_score_details,
|
||||||
filter,
|
filter,
|
||||||
@@ -1690,6 +1721,7 @@ impl SimilarAggregator {
|
|||||||
ret.ranking_score_threshold = ranking_score_threshold.is_some();
|
ret.ranking_score_threshold = ranking_score_threshold.is_some();
|
||||||
|
|
||||||
ret.embedder = embedder.is_some();
|
ret.embedder = embedder.is_some();
|
||||||
|
ret.retrieve_vectors = *retrieve_vectors;
|
||||||
|
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
@@ -1722,6 +1754,7 @@ impl SimilarAggregator {
|
|||||||
show_ranking_score_details,
|
show_ranking_score_details,
|
||||||
embedder,
|
embedder,
|
||||||
ranking_score_threshold,
|
ranking_score_threshold,
|
||||||
|
retrieve_vectors,
|
||||||
} = other;
|
} = other;
|
||||||
|
|
||||||
if self.timestamp.is_none() {
|
if self.timestamp.is_none() {
|
||||||
@@ -1751,6 +1784,7 @@ impl SimilarAggregator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
self.embedder |= embedder;
|
self.embedder |= embedder;
|
||||||
|
self.retrieve_vectors |= retrieve_vectors;
|
||||||
|
|
||||||
// pagination
|
// pagination
|
||||||
self.max_limit = self.max_limit.max(max_limit);
|
self.max_limit = self.max_limit.max(max_limit);
|
||||||
@@ -1785,6 +1819,7 @@ impl SimilarAggregator {
|
|||||||
show_ranking_score_details,
|
show_ranking_score_details,
|
||||||
embedder,
|
embedder,
|
||||||
ranking_score_threshold,
|
ranking_score_threshold,
|
||||||
|
retrieve_vectors,
|
||||||
} = self;
|
} = self;
|
||||||
|
|
||||||
if total_received == 0 {
|
if total_received == 0 {
|
||||||
@@ -1811,6 +1846,9 @@ impl SimilarAggregator {
|
|||||||
"avg_criteria_number": format!("{:.2}", filter_sum_of_criteria_terms as f64 / filter_total_number_of_criteria as f64),
|
"avg_criteria_number": format!("{:.2}", filter_sum_of_criteria_terms as f64 / filter_total_number_of_criteria as f64),
|
||||||
"most_used_syntax": used_syntax.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)),
|
"most_used_syntax": used_syntax.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)),
|
||||||
},
|
},
|
||||||
|
"vector": {
|
||||||
|
"retrieve_vectors": retrieve_vectors,
|
||||||
|
},
|
||||||
"hybrid": {
|
"hybrid": {
|
||||||
"embedder": embedder,
|
"embedder": embedder,
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
use actix_web as aweb;
|
use actix_web as aweb;
|
||||||
use aweb::error::{JsonPayloadError, QueryPayloadError};
|
use aweb::error::{JsonPayloadError, QueryPayloadError};
|
||||||
use byte_unit::Byte;
|
use byte_unit::{Byte, UnitType};
|
||||||
use meilisearch_types::document_formats::{DocumentFormatError, PayloadType};
|
use meilisearch_types::document_formats::{DocumentFormatError, PayloadType};
|
||||||
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
|
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
|
||||||
use meilisearch_types::index_uid::{IndexUid, IndexUidFormatError};
|
use meilisearch_types::index_uid::{IndexUid, IndexUidFormatError};
|
||||||
@@ -33,7 +33,7 @@ pub enum MeilisearchHttpError {
|
|||||||
TooManySearchRequests(usize),
|
TooManySearchRequests(usize),
|
||||||
#[error("Internal error: Search limiter is down.")]
|
#[error("Internal error: Search limiter is down.")]
|
||||||
SearchLimiterIsDown,
|
SearchLimiterIsDown,
|
||||||
#[error("The provided payload reached the size limit. The maximum accepted payload size is {}.", Byte::from_bytes(*.0 as u64).get_appropriate_unit(true))]
|
#[error("The provided payload reached the size limit. The maximum accepted payload size is {}.", Byte::from_u64(*.0 as u64).get_appropriate_unit(UnitType::Binary))]
|
||||||
PayloadTooLarge(usize),
|
PayloadTooLarge(usize),
|
||||||
#[error("Two indexes must be given for each swap. The list `[{}]` contains {} indexes.",
|
#[error("Two indexes must be given for each swap. The list `[{}]` contains {} indexes.",
|
||||||
.0.iter().map(|uid| format!("\"{uid}\"")).collect::<Vec<_>>().join(", "), .0.len()
|
.0.iter().map(|uid| format!("\"{uid}\"")).collect::<Vec<_>>().join(", "), .0.len()
|
||||||
@@ -98,14 +98,29 @@ impl From<MeilisearchHttpError> for aweb::Error {
|
|||||||
|
|
||||||
impl From<aweb::error::PayloadError> for MeilisearchHttpError {
|
impl From<aweb::error::PayloadError> for MeilisearchHttpError {
|
||||||
fn from(error: aweb::error::PayloadError) -> Self {
|
fn from(error: aweb::error::PayloadError) -> Self {
|
||||||
MeilisearchHttpError::Payload(PayloadError::Payload(error))
|
match error {
|
||||||
|
aweb::error::PayloadError::Incomplete(_) => MeilisearchHttpError::Payload(
|
||||||
|
PayloadError::Payload(ActixPayloadError::IncompleteError),
|
||||||
|
),
|
||||||
|
_ => MeilisearchHttpError::Payload(PayloadError::Payload(
|
||||||
|
ActixPayloadError::OtherError(error),
|
||||||
|
)),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, thiserror::Error)]
|
||||||
|
pub enum ActixPayloadError {
|
||||||
|
#[error("The provided payload is incomplete and cannot be parsed")]
|
||||||
|
IncompleteError,
|
||||||
|
#[error(transparent)]
|
||||||
|
OtherError(aweb::error::PayloadError),
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub enum PayloadError {
|
pub enum PayloadError {
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Payload(aweb::error::PayloadError),
|
Payload(ActixPayloadError),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Json(JsonPayloadError),
|
Json(JsonPayloadError),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
@@ -122,13 +137,15 @@ impl ErrorCode for PayloadError {
|
|||||||
fn error_code(&self) -> Code {
|
fn error_code(&self) -> Code {
|
||||||
match self {
|
match self {
|
||||||
PayloadError::Payload(e) => match e {
|
PayloadError::Payload(e) => match e {
|
||||||
aweb::error::PayloadError::Incomplete(_) => Code::Internal,
|
ActixPayloadError::IncompleteError => Code::BadRequest,
|
||||||
aweb::error::PayloadError::EncodingCorrupted => Code::Internal,
|
ActixPayloadError::OtherError(error) => match error {
|
||||||
aweb::error::PayloadError::Overflow => Code::PayloadTooLarge,
|
aweb::error::PayloadError::EncodingCorrupted => Code::Internal,
|
||||||
aweb::error::PayloadError::UnknownLength => Code::Internal,
|
aweb::error::PayloadError::Overflow => Code::PayloadTooLarge,
|
||||||
aweb::error::PayloadError::Http2Payload(_) => Code::Internal,
|
aweb::error::PayloadError::UnknownLength => Code::Internal,
|
||||||
aweb::error::PayloadError::Io(_) => Code::Internal,
|
aweb::error::PayloadError::Http2Payload(_) => Code::Internal,
|
||||||
_ => todo!(),
|
aweb::error::PayloadError::Io(_) => Code::Internal,
|
||||||
|
_ => todo!(),
|
||||||
|
},
|
||||||
},
|
},
|
||||||
PayloadError::Json(err) => match err {
|
PayloadError::Json(err) => match err {
|
||||||
JsonPayloadError::Overflow { .. } => Code::PayloadTooLarge,
|
JsonPayloadError::Overflow { .. } => Code::PayloadTooLarge,
|
||||||
|
|||||||
@@ -12,6 +12,8 @@ use futures::Future;
|
|||||||
use meilisearch_auth::{AuthController, AuthFilter};
|
use meilisearch_auth::{AuthController, AuthFilter};
|
||||||
use meilisearch_types::error::{Code, ResponseError};
|
use meilisearch_types::error::{Code, ResponseError};
|
||||||
|
|
||||||
|
use self::policies::AuthError;
|
||||||
|
|
||||||
pub struct GuardedData<P, D> {
|
pub struct GuardedData<P, D> {
|
||||||
data: D,
|
data: D,
|
||||||
filters: AuthFilter,
|
filters: AuthFilter,
|
||||||
@@ -35,12 +37,12 @@ impl<P, D> GuardedData<P, D> {
|
|||||||
let missing_master_key = auth.get_master_key().is_none();
|
let missing_master_key = auth.get_master_key().is_none();
|
||||||
|
|
||||||
match Self::authenticate(auth, token, index).await? {
|
match Self::authenticate(auth, token, index).await? {
|
||||||
Some(filters) => match data {
|
Ok(filters) => match data {
|
||||||
Some(data) => Ok(Self { data, filters, _marker: PhantomData }),
|
Some(data) => Ok(Self { data, filters, _marker: PhantomData }),
|
||||||
None => Err(AuthenticationError::IrretrievableState.into()),
|
None => Err(AuthenticationError::IrretrievableState.into()),
|
||||||
},
|
},
|
||||||
None if missing_master_key => Err(AuthenticationError::MissingMasterKey.into()),
|
Err(_) if missing_master_key => Err(AuthenticationError::MissingMasterKey.into()),
|
||||||
None => Err(AuthenticationError::InvalidToken.into()),
|
Err(e) => Err(ResponseError::from_msg(e.to_string(), Code::InvalidApiKey)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -51,12 +53,12 @@ impl<P, D> GuardedData<P, D> {
|
|||||||
let missing_master_key = auth.get_master_key().is_none();
|
let missing_master_key = auth.get_master_key().is_none();
|
||||||
|
|
||||||
match Self::authenticate(auth, String::new(), None).await? {
|
match Self::authenticate(auth, String::new(), None).await? {
|
||||||
Some(filters) => match data {
|
Ok(filters) => match data {
|
||||||
Some(data) => Ok(Self { data, filters, _marker: PhantomData }),
|
Some(data) => Ok(Self { data, filters, _marker: PhantomData }),
|
||||||
None => Err(AuthenticationError::IrretrievableState.into()),
|
None => Err(AuthenticationError::IrretrievableState.into()),
|
||||||
},
|
},
|
||||||
None if missing_master_key => Err(AuthenticationError::MissingMasterKey.into()),
|
Err(_) if missing_master_key => Err(AuthenticationError::MissingMasterKey.into()),
|
||||||
None => Err(AuthenticationError::MissingAuthorizationHeader.into()),
|
Err(_) => Err(AuthenticationError::MissingAuthorizationHeader.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -64,7 +66,7 @@ impl<P, D> GuardedData<P, D> {
|
|||||||
auth: Data<AuthController>,
|
auth: Data<AuthController>,
|
||||||
token: String,
|
token: String,
|
||||||
index: Option<String>,
|
index: Option<String>,
|
||||||
) -> Result<Option<AuthFilter>, ResponseError>
|
) -> Result<Result<AuthFilter, AuthError>, ResponseError>
|
||||||
where
|
where
|
||||||
P: Policy + 'static,
|
P: Policy + 'static,
|
||||||
{
|
{
|
||||||
@@ -127,13 +129,14 @@ pub trait Policy {
|
|||||||
auth: Data<AuthController>,
|
auth: Data<AuthController>,
|
||||||
token: &str,
|
token: &str,
|
||||||
index: Option<&str>,
|
index: Option<&str>,
|
||||||
) -> Option<AuthFilter>;
|
) -> Result<AuthFilter, policies::AuthError>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub mod policies {
|
pub mod policies {
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation};
|
use jsonwebtoken::{decode, Algorithm, DecodingKey, Validation};
|
||||||
use meilisearch_auth::{AuthController, AuthFilter, SearchRules};
|
use meilisearch_auth::{AuthController, AuthFilter, SearchRules};
|
||||||
|
use meilisearch_types::error::{Code, ErrorCode};
|
||||||
// reexport actions in policies in order to be used in routes configuration.
|
// reexport actions in policies in order to be used in routes configuration.
|
||||||
pub use meilisearch_types::keys::{actions, Action};
|
pub use meilisearch_types::keys::{actions, Action};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@@ -144,11 +147,53 @@ pub mod policies {
|
|||||||
|
|
||||||
enum TenantTokenOutcome {
|
enum TenantTokenOutcome {
|
||||||
NotATenantToken,
|
NotATenantToken,
|
||||||
Invalid,
|
|
||||||
Expired,
|
|
||||||
Valid(Uuid, SearchRules),
|
Valid(Uuid, SearchRules),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(thiserror::Error, Debug)]
|
||||||
|
pub enum AuthError {
|
||||||
|
#[error("Tenant token expired. Was valid up to `{exp}` and we're now `{now}`.")]
|
||||||
|
ExpiredTenantToken { exp: i64, now: i64 },
|
||||||
|
#[error("The provided API key is invalid.")]
|
||||||
|
InvalidApiKey,
|
||||||
|
#[error("The provided tenant token cannot acces the index `{index}`, allowed indexes are {allowed:?}.")]
|
||||||
|
TenantTokenAccessingnUnauthorizedIndex { index: String, allowed: Vec<String> },
|
||||||
|
#[error(
|
||||||
|
"The API key used to generate this tenant token cannot acces the index `{index}`."
|
||||||
|
)]
|
||||||
|
TenantTokenApiKeyAccessingnUnauthorizedIndex { index: String },
|
||||||
|
#[error(
|
||||||
|
"The API key cannot acces the index `{index}`, authorized indexes are {allowed:?}."
|
||||||
|
)]
|
||||||
|
ApiKeyAccessingnUnauthorizedIndex { index: String, allowed: Vec<String> },
|
||||||
|
#[error("The provided tenant token is invalid.")]
|
||||||
|
InvalidTenantToken,
|
||||||
|
#[error("Could not decode tenant token, {0}.")]
|
||||||
|
CouldNotDecodeTenantToken(jsonwebtoken::errors::Error),
|
||||||
|
#[error("Invalid action `{0}`.")]
|
||||||
|
InternalInvalidAction(u8),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<jsonwebtoken::errors::Error> for AuthError {
|
||||||
|
fn from(error: jsonwebtoken::errors::Error) -> Self {
|
||||||
|
use jsonwebtoken::errors::ErrorKind;
|
||||||
|
|
||||||
|
match error.kind() {
|
||||||
|
ErrorKind::InvalidToken => AuthError::InvalidTenantToken,
|
||||||
|
_ => AuthError::CouldNotDecodeTenantToken(error),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ErrorCode for AuthError {
|
||||||
|
fn error_code(&self) -> Code {
|
||||||
|
match self {
|
||||||
|
AuthError::InternalInvalidAction(_) => Code::Internal,
|
||||||
|
_ => Code::InvalidApiKey,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn tenant_token_validation() -> Validation {
|
fn tenant_token_validation() -> Validation {
|
||||||
let mut validation = Validation::default();
|
let mut validation = Validation::default();
|
||||||
validation.validate_exp = false;
|
validation.validate_exp = false;
|
||||||
@@ -158,15 +203,15 @@ pub mod policies {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Extracts the key id used to sign the payload, without performing any validation.
|
/// Extracts the key id used to sign the payload, without performing any validation.
|
||||||
fn extract_key_id(token: &str) -> Option<Uuid> {
|
fn extract_key_id(token: &str) -> Result<Uuid, AuthError> {
|
||||||
let mut validation = tenant_token_validation();
|
let mut validation = tenant_token_validation();
|
||||||
validation.insecure_disable_signature_validation();
|
validation.insecure_disable_signature_validation();
|
||||||
let dummy_key = DecodingKey::from_secret(b"secret");
|
let dummy_key = DecodingKey::from_secret(b"secret");
|
||||||
let token_data = decode::<Claims>(token, &dummy_key, &validation).ok()?;
|
let token_data = decode::<Claims>(token, &dummy_key, &validation)?;
|
||||||
|
|
||||||
// get token fields without validating it.
|
// get token fields without validating it.
|
||||||
let Claims { api_key_uid, .. } = token_data.claims;
|
let Claims { api_key_uid, .. } = token_data.claims;
|
||||||
Some(api_key_uid)
|
Ok(api_key_uid)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_keys_action(action: u8) -> bool {
|
fn is_keys_action(action: u8) -> bool {
|
||||||
@@ -187,76 +232,102 @@ pub mod policies {
|
|||||||
auth: Data<AuthController>,
|
auth: Data<AuthController>,
|
||||||
token: &str,
|
token: &str,
|
||||||
index: Option<&str>,
|
index: Option<&str>,
|
||||||
) -> Option<AuthFilter> {
|
) -> Result<AuthFilter, AuthError> {
|
||||||
// authenticate if token is the master key.
|
// authenticate if token is the master key.
|
||||||
// Without a master key, all routes are accessible except the key-related routes.
|
// Without a master key, all routes are accessible except the key-related routes.
|
||||||
if auth.get_master_key().map_or_else(|| !is_keys_action(A), |mk| mk == token) {
|
if auth.get_master_key().map_or_else(|| !is_keys_action(A), |mk| mk == token) {
|
||||||
return Some(AuthFilter::default());
|
return Ok(AuthFilter::default());
|
||||||
}
|
}
|
||||||
|
|
||||||
let (key_uuid, search_rules) =
|
let (key_uuid, search_rules) =
|
||||||
match ActionPolicy::<A>::authenticate_tenant_token(&auth, token) {
|
match ActionPolicy::<A>::authenticate_tenant_token(&auth, token) {
|
||||||
TenantTokenOutcome::Valid(key_uuid, search_rules) => {
|
Ok(TenantTokenOutcome::Valid(key_uuid, search_rules)) => {
|
||||||
(key_uuid, Some(search_rules))
|
(key_uuid, Some(search_rules))
|
||||||
}
|
}
|
||||||
TenantTokenOutcome::Expired => return None,
|
Ok(TenantTokenOutcome::NotATenantToken)
|
||||||
TenantTokenOutcome::Invalid => return None,
|
| Err(AuthError::InvalidTenantToken) => (
|
||||||
TenantTokenOutcome::NotATenantToken => {
|
auth.get_optional_uid_from_encoded_key(token.as_bytes())
|
||||||
(auth.get_optional_uid_from_encoded_key(token.as_bytes()).ok()??, None)
|
.map_err(|_e| AuthError::InvalidApiKey)?
|
||||||
}
|
.ok_or(AuthError::InvalidApiKey)?,
|
||||||
|
None,
|
||||||
|
),
|
||||||
|
Err(e) => return Err(e),
|
||||||
};
|
};
|
||||||
|
|
||||||
// check that the indexes are allowed
|
// check that the indexes are allowed
|
||||||
let action = Action::from_repr(A)?;
|
let action = Action::from_repr(A).ok_or(AuthError::InternalInvalidAction(A))?;
|
||||||
let auth_filter = auth.get_key_filters(key_uuid, search_rules).ok()?;
|
let auth_filter = auth
|
||||||
if auth.is_key_authorized(key_uuid, action, index).unwrap_or(false)
|
.get_key_filters(key_uuid, search_rules)
|
||||||
&& index.map(|index| auth_filter.is_index_authorized(index)).unwrap_or(true)
|
.map_err(|_e| AuthError::InvalidApiKey)?;
|
||||||
{
|
|
||||||
return Some(auth_filter);
|
// First check if the index is authorized in the tenant token, this is a public
|
||||||
|
// information, we can return a nice error message.
|
||||||
|
if let Some(index) = index {
|
||||||
|
if !auth_filter.tenant_token_is_index_authorized(index) {
|
||||||
|
return Err(AuthError::TenantTokenAccessingnUnauthorizedIndex {
|
||||||
|
index: index.to_string(),
|
||||||
|
allowed: auth_filter.tenant_token_list_index_authorized(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if !auth_filter.api_key_is_index_authorized(index) {
|
||||||
|
if auth_filter.is_tenant_token() {
|
||||||
|
// If the error comes from a tenant token we cannot share the list
|
||||||
|
// of authorized indexes in the API key. This is not public information.
|
||||||
|
return Err(AuthError::TenantTokenApiKeyAccessingnUnauthorizedIndex {
|
||||||
|
index: index.to_string(),
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Otherwise we can share the list
|
||||||
|
// of authorized indexes in the API key.
|
||||||
|
return Err(AuthError::ApiKeyAccessingnUnauthorizedIndex {
|
||||||
|
index: index.to_string(),
|
||||||
|
allowed: auth_filter.api_key_list_index_authorized(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if auth.is_key_authorized(key_uuid, action, index).unwrap_or(false) {
|
||||||
|
return Ok(auth_filter);
|
||||||
}
|
}
|
||||||
|
|
||||||
None
|
Err(AuthError::InvalidApiKey)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<const A: u8> ActionPolicy<A> {
|
impl<const A: u8> ActionPolicy<A> {
|
||||||
fn authenticate_tenant_token(auth: &AuthController, token: &str) -> TenantTokenOutcome {
|
fn authenticate_tenant_token(
|
||||||
|
auth: &AuthController,
|
||||||
|
token: &str,
|
||||||
|
) -> Result<TenantTokenOutcome, AuthError> {
|
||||||
// Only search action can be accessed by a tenant token.
|
// Only search action can be accessed by a tenant token.
|
||||||
if A != actions::SEARCH {
|
if A != actions::SEARCH {
|
||||||
return TenantTokenOutcome::NotATenantToken;
|
return Ok(TenantTokenOutcome::NotATenantToken);
|
||||||
}
|
}
|
||||||
|
|
||||||
let uid = if let Some(uid) = extract_key_id(token) {
|
let uid = extract_key_id(token)?;
|
||||||
uid
|
|
||||||
} else {
|
|
||||||
return TenantTokenOutcome::NotATenantToken;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Check if tenant token is valid.
|
// Check if tenant token is valid.
|
||||||
let key = if let Some(key) = auth.generate_key(uid) {
|
let key = if let Some(key) = auth.generate_key(uid) {
|
||||||
key
|
key
|
||||||
} else {
|
} else {
|
||||||
return TenantTokenOutcome::Invalid;
|
return Err(AuthError::InvalidTenantToken);
|
||||||
};
|
};
|
||||||
|
|
||||||
let data = if let Ok(data) = decode::<Claims>(
|
let data = decode::<Claims>(
|
||||||
token,
|
token,
|
||||||
&DecodingKey::from_secret(key.as_bytes()),
|
&DecodingKey::from_secret(key.as_bytes()),
|
||||||
&tenant_token_validation(),
|
&tenant_token_validation(),
|
||||||
) {
|
)?;
|
||||||
data
|
|
||||||
} else {
|
|
||||||
return TenantTokenOutcome::Invalid;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Check if token is expired.
|
// Check if token is expired.
|
||||||
if let Some(exp) = data.claims.exp {
|
if let Some(exp) = data.claims.exp {
|
||||||
if OffsetDateTime::now_utc().unix_timestamp() > exp {
|
let now = OffsetDateTime::now_utc().unix_timestamp();
|
||||||
return TenantTokenOutcome::Expired;
|
if now > exp {
|
||||||
|
return Err(AuthError::ExpiredTenantToken { exp, now });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
TenantTokenOutcome::Valid(uid, data.claims.search_rules)
|
Ok(TenantTokenOutcome::Valid(uid, data.claims.search_rules))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ use std::fs::File;
|
|||||||
use std::io::{BufReader, BufWriter};
|
use std::io::{BufReader, BufWriter};
|
||||||
use std::num::NonZeroUsize;
|
use std::num::NonZeroUsize;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
use std::str::FromStr;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::thread::{self, available_parallelism};
|
use std::thread::{self, available_parallelism};
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
@@ -23,13 +24,13 @@ use actix_cors::Cors;
|
|||||||
use actix_http::body::MessageBody;
|
use actix_http::body::MessageBody;
|
||||||
use actix_web::dev::{ServiceFactory, ServiceResponse};
|
use actix_web::dev::{ServiceFactory, ServiceResponse};
|
||||||
use actix_web::error::JsonPayloadError;
|
use actix_web::error::JsonPayloadError;
|
||||||
|
use actix_web::http::header::{CONTENT_TYPE, USER_AGENT};
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest};
|
use actix_web::{web, HttpRequest};
|
||||||
use analytics::Analytics;
|
use analytics::Analytics;
|
||||||
use anyhow::bail;
|
use anyhow::bail;
|
||||||
use error::PayloadError;
|
use error::PayloadError;
|
||||||
use extractors::payload::PayloadConfig;
|
use extractors::payload::PayloadConfig;
|
||||||
use http::header::CONTENT_TYPE;
|
|
||||||
use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
|
use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
|
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
|
||||||
@@ -167,7 +168,7 @@ impl tracing_actix_web::RootSpanBuilder for AwebTracingLogger {
|
|||||||
let conn_info = request.connection_info();
|
let conn_info = request.connection_info();
|
||||||
let headers = request.headers();
|
let headers = request.headers();
|
||||||
let user_agent = headers
|
let user_agent = headers
|
||||||
.get(http::header::USER_AGENT)
|
.get(USER_AGENT)
|
||||||
.map(|value| String::from_utf8_lossy(value.as_bytes()).into_owned())
|
.map(|value| String::from_utf8_lossy(value.as_bytes()).into_owned())
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
info_span!("HTTP request", method = %request.method(), host = conn_info.host(), route = %request.path(), query_parameters = %request.query_string(), %user_agent, status_code = Empty, error = Empty)
|
info_span!("HTTP request", method = %request.method(), host = conn_info.host(), route = %request.path(), query_parameters = %request.query_string(), %user_agent, status_code = Empty, error = Empty)
|
||||||
@@ -300,15 +301,15 @@ fn open_or_create_database_unchecked(
|
|||||||
dumps_path: opt.dump_dir.clone(),
|
dumps_path: opt.dump_dir.clone(),
|
||||||
webhook_url: opt.task_webhook_url.as_ref().map(|url| url.to_string()),
|
webhook_url: opt.task_webhook_url.as_ref().map(|url| url.to_string()),
|
||||||
webhook_authorization_header: opt.task_webhook_authorization_header.clone(),
|
webhook_authorization_header: opt.task_webhook_authorization_header.clone(),
|
||||||
task_db_size: opt.max_task_db_size.get_bytes() as usize,
|
task_db_size: opt.max_task_db_size.as_u64() as usize,
|
||||||
index_base_map_size: opt.max_index_size.get_bytes() as usize,
|
index_base_map_size: opt.max_index_size.as_u64() as usize,
|
||||||
enable_mdb_writemap: opt.experimental_reduce_indexing_memory_usage,
|
enable_mdb_writemap: opt.experimental_reduce_indexing_memory_usage,
|
||||||
indexer_config: (&opt.indexer_options).try_into()?,
|
indexer_config: (&opt.indexer_options).try_into()?,
|
||||||
autobatching_enabled: true,
|
autobatching_enabled: true,
|
||||||
cleanup_enabled: !opt.experimental_replication_parameters,
|
cleanup_enabled: !opt.experimental_replication_parameters,
|
||||||
max_number_of_tasks: 1_000_000,
|
max_number_of_tasks: 1_000_000,
|
||||||
max_number_of_batched_tasks: opt.experimental_max_number_of_batched_tasks,
|
max_number_of_batched_tasks: opt.experimental_max_number_of_batched_tasks,
|
||||||
index_growth_amount: byte_unit::Byte::from_str("10GiB").unwrap().get_bytes() as usize,
|
index_growth_amount: byte_unit::Byte::from_str("10GiB").unwrap().as_u64() as usize,
|
||||||
index_count: DEFAULT_INDEX_COUNT,
|
index_count: DEFAULT_INDEX_COUNT,
|
||||||
instance_features,
|
instance_features,
|
||||||
})?)
|
})?)
|
||||||
@@ -476,7 +477,7 @@ pub fn configure_data(
|
|||||||
opt.experimental_search_queue_size,
|
opt.experimental_search_queue_size,
|
||||||
available_parallelism().unwrap_or(NonZeroUsize::new(2).unwrap()),
|
available_parallelism().unwrap_or(NonZeroUsize::new(2).unwrap()),
|
||||||
);
|
);
|
||||||
let http_payload_size_limit = opt.http_payload_size_limit.get_bytes() as usize;
|
let http_payload_size_limit = opt.http_payload_size_limit.as_u64() as usize;
|
||||||
config
|
config
|
||||||
.app_data(index_scheduler)
|
.app_data(index_scheduler)
|
||||||
.app_data(auth)
|
.app_data(auth)
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ use std::str::FromStr;
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::{env, fmt, fs};
|
use std::{env, fmt, fs};
|
||||||
|
|
||||||
use byte_unit::{Byte, ByteError};
|
use byte_unit::{Byte, ParseError, UnitType};
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use meilisearch_types::features::InstanceTogglableFeatures;
|
use meilisearch_types::features::InstanceTogglableFeatures;
|
||||||
use meilisearch_types::milli::update::IndexerConfig;
|
use meilisearch_types::milli::update::IndexerConfig;
|
||||||
@@ -674,7 +674,7 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
|
|||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
log_every_n: Some(DEFAULT_LOG_EVERY_N),
|
log_every_n: Some(DEFAULT_LOG_EVERY_N),
|
||||||
max_memory: other.max_indexing_memory.map(|b| b.get_bytes() as usize),
|
max_memory: other.max_indexing_memory.map(|b| b.as_u64() as usize),
|
||||||
thread_pool: Some(thread_pool),
|
thread_pool: Some(thread_pool),
|
||||||
max_positions_per_attributes: None,
|
max_positions_per_attributes: None,
|
||||||
skip_index_budget: other.skip_index_budget,
|
skip_index_budget: other.skip_index_budget,
|
||||||
@@ -688,23 +688,25 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
|
|||||||
pub struct MaxMemory(Option<Byte>);
|
pub struct MaxMemory(Option<Byte>);
|
||||||
|
|
||||||
impl FromStr for MaxMemory {
|
impl FromStr for MaxMemory {
|
||||||
type Err = ByteError;
|
type Err = ParseError;
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<MaxMemory, ByteError> {
|
fn from_str(s: &str) -> Result<MaxMemory, Self::Err> {
|
||||||
Byte::from_str(s).map(Some).map(MaxMemory)
|
Byte::from_str(s).map(Some).map(MaxMemory)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for MaxMemory {
|
impl Default for MaxMemory {
|
||||||
fn default() -> MaxMemory {
|
fn default() -> MaxMemory {
|
||||||
MaxMemory(total_memory_bytes().map(|bytes| bytes * 2 / 3).map(Byte::from_bytes))
|
MaxMemory(total_memory_bytes().map(|bytes| bytes * 2 / 3).map(Byte::from_u64))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for MaxMemory {
|
impl fmt::Display for MaxMemory {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match self.0 {
|
match self.0 {
|
||||||
Some(memory) => write!(f, "{}", memory.get_appropriate_unit(true)),
|
Some(memory) => {
|
||||||
|
write!(f, "{}", memory.get_appropriate_unit(UnitType::Binary))
|
||||||
|
}
|
||||||
None => f.write_str("unknown"),
|
None => f.write_str("unknown"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -844,11 +846,11 @@ fn default_env() -> String {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn default_max_index_size() -> Byte {
|
fn default_max_index_size() -> Byte {
|
||||||
Byte::from_bytes(INDEX_SIZE)
|
Byte::from_u64(INDEX_SIZE)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_max_task_db_size() -> Byte {
|
fn default_max_task_db_size() -> Byte {
|
||||||
Byte::from_bytes(TASK_DB_SIZE)
|
Byte::from_u64(TASK_DB_SIZE)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_http_payload_size_limit() -> Byte {
|
fn default_http_payload_size_limit() -> Byte {
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ use meilisearch_types::error::{Code, ResponseError};
|
|||||||
use meilisearch_types::heed::RoTxn;
|
use meilisearch_types::heed::RoTxn;
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
||||||
|
use meilisearch_types::milli::vector::parsed_vectors::ExplicitVectors;
|
||||||
use meilisearch_types::milli::DocumentId;
|
use meilisearch_types::milli::DocumentId;
|
||||||
use meilisearch_types::star_or::OptionStarOrList;
|
use meilisearch_types::star_or::OptionStarOrList;
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
@@ -39,7 +40,7 @@ use crate::extractors::sequential_extractor::SeqHandler;
|
|||||||
use crate::routes::{
|
use crate::routes::{
|
||||||
get_task_id, is_dry_run, PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT,
|
get_task_id, is_dry_run, PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT,
|
||||||
};
|
};
|
||||||
use crate::search::parse_filter;
|
use crate::search::{parse_filter, RetrieveVectors};
|
||||||
use crate::Opt;
|
use crate::Opt;
|
||||||
|
|
||||||
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
||||||
@@ -94,6 +95,8 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
pub struct GetDocument {
|
pub struct GetDocument {
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFields>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFields>)]
|
||||||
fields: OptionStarOrList<String>,
|
fields: OptionStarOrList<String>,
|
||||||
|
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentRetrieveVectors>)]
|
||||||
|
retrieve_vectors: Param<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_document(
|
pub async fn get_document(
|
||||||
@@ -107,13 +110,20 @@ pub async fn get_document(
|
|||||||
debug!(parameters = ?params, "Get document");
|
debug!(parameters = ?params, "Get document");
|
||||||
let index_uid = IndexUid::try_from(index_uid)?;
|
let index_uid = IndexUid::try_from(index_uid)?;
|
||||||
|
|
||||||
analytics.get_fetch_documents(&DocumentFetchKind::PerDocumentId, &req);
|
let GetDocument { fields, retrieve_vectors: param_retrieve_vectors } = params.into_inner();
|
||||||
|
|
||||||
let GetDocument { fields } = params.into_inner();
|
|
||||||
let attributes_to_retrieve = fields.merge_star_and_none();
|
let attributes_to_retrieve = fields.merge_star_and_none();
|
||||||
|
|
||||||
|
let features = index_scheduler.features();
|
||||||
|
let retrieve_vectors = RetrieveVectors::new(param_retrieve_vectors.0, features)?;
|
||||||
|
|
||||||
|
analytics.get_fetch_documents(
|
||||||
|
&DocumentFetchKind::PerDocumentId { retrieve_vectors: param_retrieve_vectors.0 },
|
||||||
|
&req,
|
||||||
|
);
|
||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let document = retrieve_document(&index, &document_id, attributes_to_retrieve)?;
|
let document =
|
||||||
|
retrieve_document(&index, &document_id, attributes_to_retrieve, retrieve_vectors)?;
|
||||||
debug!(returns = ?document, "Get document");
|
debug!(returns = ?document, "Get document");
|
||||||
Ok(HttpResponse::Ok().json(document))
|
Ok(HttpResponse::Ok().json(document))
|
||||||
}
|
}
|
||||||
@@ -153,6 +163,8 @@ pub struct BrowseQueryGet {
|
|||||||
limit: Param<usize>,
|
limit: Param<usize>,
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFields>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFields>)]
|
||||||
fields: OptionStarOrList<String>,
|
fields: OptionStarOrList<String>,
|
||||||
|
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentRetrieveVectors>)]
|
||||||
|
retrieve_vectors: Param<bool>,
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFilter>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFilter>)]
|
||||||
filter: Option<String>,
|
filter: Option<String>,
|
||||||
}
|
}
|
||||||
@@ -166,6 +178,8 @@ pub struct BrowseQuery {
|
|||||||
limit: usize,
|
limit: usize,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidDocumentFields>)]
|
#[deserr(default, error = DeserrJsonError<InvalidDocumentFields>)]
|
||||||
fields: Option<Vec<String>>,
|
fields: Option<Vec<String>>,
|
||||||
|
#[deserr(default, error = DeserrJsonError<InvalidDocumentRetrieveVectors>)]
|
||||||
|
retrieve_vectors: bool,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidDocumentFilter>)]
|
#[deserr(default, error = DeserrJsonError<InvalidDocumentFilter>)]
|
||||||
filter: Option<Value>,
|
filter: Option<Value>,
|
||||||
}
|
}
|
||||||
@@ -185,6 +199,7 @@ pub async fn documents_by_query_post(
|
|||||||
with_filter: body.filter.is_some(),
|
with_filter: body.filter.is_some(),
|
||||||
limit: body.limit,
|
limit: body.limit,
|
||||||
offset: body.offset,
|
offset: body.offset,
|
||||||
|
retrieve_vectors: body.retrieve_vectors,
|
||||||
},
|
},
|
||||||
&req,
|
&req,
|
||||||
);
|
);
|
||||||
@@ -201,7 +216,7 @@ pub async fn get_documents(
|
|||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!(parameters = ?params, "Get documents GET");
|
debug!(parameters = ?params, "Get documents GET");
|
||||||
|
|
||||||
let BrowseQueryGet { limit, offset, fields, filter } = params.into_inner();
|
let BrowseQueryGet { limit, offset, fields, retrieve_vectors, filter } = params.into_inner();
|
||||||
|
|
||||||
let filter = match filter {
|
let filter = match filter {
|
||||||
Some(f) => match serde_json::from_str(&f) {
|
Some(f) => match serde_json::from_str(&f) {
|
||||||
@@ -215,6 +230,7 @@ pub async fn get_documents(
|
|||||||
offset: offset.0,
|
offset: offset.0,
|
||||||
limit: limit.0,
|
limit: limit.0,
|
||||||
fields: fields.merge_star_and_none(),
|
fields: fields.merge_star_and_none(),
|
||||||
|
retrieve_vectors: retrieve_vectors.0,
|
||||||
filter,
|
filter,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -223,6 +239,7 @@ pub async fn get_documents(
|
|||||||
with_filter: query.filter.is_some(),
|
with_filter: query.filter.is_some(),
|
||||||
limit: query.limit,
|
limit: query.limit,
|
||||||
offset: query.offset,
|
offset: query.offset,
|
||||||
|
retrieve_vectors: query.retrieve_vectors,
|
||||||
},
|
},
|
||||||
&req,
|
&req,
|
||||||
);
|
);
|
||||||
@@ -236,10 +253,14 @@ fn documents_by_query(
|
|||||||
query: BrowseQuery,
|
query: BrowseQuery,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
let BrowseQuery { offset, limit, fields, filter } = query;
|
let BrowseQuery { offset, limit, fields, retrieve_vectors, filter } = query;
|
||||||
|
|
||||||
|
let features = index_scheduler.features();
|
||||||
|
let retrieve_vectors = RetrieveVectors::new(retrieve_vectors, features)?;
|
||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let (total, documents) = retrieve_documents(&index, offset, limit, filter, fields)?;
|
let (total, documents) =
|
||||||
|
retrieve_documents(&index, offset, limit, filter, fields, retrieve_vectors)?;
|
||||||
|
|
||||||
let ret = PaginationView::new(offset, limit, total as usize, documents);
|
let ret = PaginationView::new(offset, limit, total as usize, documents);
|
||||||
|
|
||||||
@@ -579,14 +600,54 @@ fn some_documents<'a, 't: 'a>(
|
|||||||
index: &'a Index,
|
index: &'a Index,
|
||||||
rtxn: &'t RoTxn,
|
rtxn: &'t RoTxn,
|
||||||
doc_ids: impl IntoIterator<Item = DocumentId> + 'a,
|
doc_ids: impl IntoIterator<Item = DocumentId> + 'a,
|
||||||
|
retrieve_vectors: RetrieveVectors,
|
||||||
) -> Result<impl Iterator<Item = Result<Document, ResponseError>> + 'a, ResponseError> {
|
) -> Result<impl Iterator<Item = Result<Document, ResponseError>> + 'a, ResponseError> {
|
||||||
let fields_ids_map = index.fields_ids_map(rtxn)?;
|
let fields_ids_map = index.fields_ids_map(rtxn)?;
|
||||||
|
let dictionary = index.document_decompression_dictionary(rtxn)?;
|
||||||
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
|
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
|
||||||
|
let embedding_configs = index.embedding_configs(rtxn)?;
|
||||||
|
let mut buffer = Vec::new();
|
||||||
|
|
||||||
Ok(index.iter_documents(rtxn, doc_ids)?.map(move |ret| {
|
Ok(index.iter_compressed_documents(rtxn, doc_ids)?.map(move |ret| {
|
||||||
ret.map_err(ResponseError::from).and_then(|(_key, document)| -> Result<_, ResponseError> {
|
ret.map_err(ResponseError::from).and_then(
|
||||||
Ok(milli::obkv_to_json(&all_fields, &fields_ids_map, document)?)
|
|(key, compressed_document)| -> Result<_, ResponseError> {
|
||||||
})
|
let document = compressed_document
|
||||||
|
.decompress_with_optional_dictionary(&mut buffer, dictionary.as_ref())?;
|
||||||
|
let mut document = milli::obkv_to_json(&all_fields, &fields_ids_map, document)?;
|
||||||
|
match retrieve_vectors {
|
||||||
|
RetrieveVectors::Ignore => {}
|
||||||
|
RetrieveVectors::Hide => {
|
||||||
|
document.remove("_vectors");
|
||||||
|
}
|
||||||
|
RetrieveVectors::Retrieve => {
|
||||||
|
// Clippy is simply wrong
|
||||||
|
#[allow(clippy::manual_unwrap_or_default)]
|
||||||
|
let mut vectors = match document.remove("_vectors") {
|
||||||
|
Some(Value::Object(map)) => map,
|
||||||
|
_ => Default::default(),
|
||||||
|
};
|
||||||
|
for (name, vector) in index.embeddings(rtxn, key)? {
|
||||||
|
let user_provided = embedding_configs
|
||||||
|
.iter()
|
||||||
|
.find(|conf| conf.name == name)
|
||||||
|
.is_some_and(|conf| conf.user_provided.contains(key));
|
||||||
|
let embeddings = ExplicitVectors {
|
||||||
|
embeddings: Some(vector.into()),
|
||||||
|
regenerate: !user_provided,
|
||||||
|
};
|
||||||
|
vectors.insert(
|
||||||
|
name,
|
||||||
|
serde_json::to_value(embeddings)
|
||||||
|
.map_err(MeilisearchHttpError::from)?,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
document.insert("_vectors".into(), vectors.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(document)
|
||||||
|
},
|
||||||
|
)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -596,6 +657,7 @@ fn retrieve_documents<S: AsRef<str>>(
|
|||||||
limit: usize,
|
limit: usize,
|
||||||
filter: Option<Value>,
|
filter: Option<Value>,
|
||||||
attributes_to_retrieve: Option<Vec<S>>,
|
attributes_to_retrieve: Option<Vec<S>>,
|
||||||
|
retrieve_vectors: RetrieveVectors,
|
||||||
) -> Result<(u64, Vec<Document>), ResponseError> {
|
) -> Result<(u64, Vec<Document>), ResponseError> {
|
||||||
let rtxn = index.read_txn()?;
|
let rtxn = index.read_txn()?;
|
||||||
let filter = &filter;
|
let filter = &filter;
|
||||||
@@ -620,53 +682,57 @@ fn retrieve_documents<S: AsRef<str>>(
|
|||||||
let (it, number_of_documents) = {
|
let (it, number_of_documents) = {
|
||||||
let number_of_documents = candidates.len();
|
let number_of_documents = candidates.len();
|
||||||
(
|
(
|
||||||
some_documents(index, &rtxn, candidates.into_iter().skip(offset).take(limit))?,
|
some_documents(
|
||||||
|
index,
|
||||||
|
&rtxn,
|
||||||
|
candidates.into_iter().skip(offset).take(limit),
|
||||||
|
retrieve_vectors,
|
||||||
|
)?,
|
||||||
number_of_documents,
|
number_of_documents,
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
let documents: Result<Vec<_>, ResponseError> = it
|
let documents: Vec<_> = it
|
||||||
.map(|document| {
|
.map(|document| {
|
||||||
Ok(match &attributes_to_retrieve {
|
Ok(match &attributes_to_retrieve {
|
||||||
Some(attributes_to_retrieve) => permissive_json_pointer::select_values(
|
Some(attributes_to_retrieve) => permissive_json_pointer::select_values(
|
||||||
&document?,
|
&document?,
|
||||||
attributes_to_retrieve.iter().map(|s| s.as_ref()),
|
attributes_to_retrieve.iter().map(|s| s.as_ref()).chain(
|
||||||
|
(retrieve_vectors == RetrieveVectors::Retrieve).then_some("_vectors"),
|
||||||
|
),
|
||||||
),
|
),
|
||||||
None => document?,
|
None => document?,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
.collect();
|
.collect::<Result<_, ResponseError>>()?;
|
||||||
|
|
||||||
Ok((number_of_documents, documents?))
|
Ok((number_of_documents, documents))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn retrieve_document<S: AsRef<str>>(
|
fn retrieve_document<S: AsRef<str>>(
|
||||||
index: &Index,
|
index: &Index,
|
||||||
doc_id: &str,
|
doc_id: &str,
|
||||||
attributes_to_retrieve: Option<Vec<S>>,
|
attributes_to_retrieve: Option<Vec<S>>,
|
||||||
|
retrieve_vectors: RetrieveVectors,
|
||||||
) -> Result<Document, ResponseError> {
|
) -> Result<Document, ResponseError> {
|
||||||
let txn = index.read_txn()?;
|
let txn = index.read_txn()?;
|
||||||
|
|
||||||
let fields_ids_map = index.fields_ids_map(&txn)?;
|
|
||||||
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
|
|
||||||
|
|
||||||
let internal_id = index
|
let internal_id = index
|
||||||
.external_documents_ids()
|
.external_documents_ids()
|
||||||
.get(&txn, doc_id)?
|
.get(&txn, doc_id)?
|
||||||
.ok_or_else(|| MeilisearchHttpError::DocumentNotFound(doc_id.to_string()))?;
|
.ok_or_else(|| MeilisearchHttpError::DocumentNotFound(doc_id.to_string()))?;
|
||||||
|
|
||||||
let document = index
|
let document = some_documents(index, &txn, Some(internal_id), retrieve_vectors)?
|
||||||
.documents(&txn, std::iter::once(internal_id))?
|
|
||||||
.into_iter()
|
|
||||||
.next()
|
.next()
|
||||||
.map(|(_, d)| d)
|
.ok_or_else(|| MeilisearchHttpError::DocumentNotFound(doc_id.to_string()))??;
|
||||||
.ok_or_else(|| MeilisearchHttpError::DocumentNotFound(doc_id.to_string()))?;
|
|
||||||
|
|
||||||
let document = meilisearch_types::milli::obkv_to_json(&all_fields, &fields_ids_map, document)?;
|
|
||||||
let document = match &attributes_to_retrieve {
|
let document = match &attributes_to_retrieve {
|
||||||
Some(attributes_to_retrieve) => permissive_json_pointer::select_values(
|
Some(attributes_to_retrieve) => permissive_json_pointer::select_values(
|
||||||
&document,
|
&document,
|
||||||
attributes_to_retrieve.iter().map(|s| s.as_ref()),
|
attributes_to_retrieve
|
||||||
|
.iter()
|
||||||
|
.map(|s| s.as_ref())
|
||||||
|
.chain((retrieve_vectors == RetrieveVectors::Retrieve).then_some("_vectors")),
|
||||||
),
|
),
|
||||||
None => document,
|
None => document,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -115,6 +115,7 @@ impl From<FacetSearchQuery> for SearchQuery {
|
|||||||
page: None,
|
page: None,
|
||||||
hits_per_page: None,
|
hits_per_page: None,
|
||||||
attributes_to_retrieve: None,
|
attributes_to_retrieve: None,
|
||||||
|
retrieve_vectors: false,
|
||||||
attributes_to_crop: None,
|
attributes_to_crop: None,
|
||||||
crop_length: DEFAULT_CROP_LENGTH(),
|
crop_length: DEFAULT_CROP_LENGTH(),
|
||||||
attributes_to_highlight: None,
|
attributes_to_highlight: None,
|
||||||
@@ -123,6 +124,7 @@ impl From<FacetSearchQuery> for SearchQuery {
|
|||||||
show_ranking_score_details: false,
|
show_ranking_score_details: false,
|
||||||
filter,
|
filter,
|
||||||
sort: None,
|
sort: None,
|
||||||
|
distinct: None,
|
||||||
facets: None,
|
facets: None,
|
||||||
highlight_pre_tag: DEFAULT_HIGHLIGHT_PRE_TAG(),
|
highlight_pre_tag: DEFAULT_HIGHLIGHT_PRE_TAG(),
|
||||||
highlight_post_tag: DEFAULT_HIGHLIGHT_POST_TAG(),
|
highlight_post_tag: DEFAULT_HIGHLIGHT_POST_TAG(),
|
||||||
|
|||||||
@@ -20,9 +20,9 @@ use crate::extractors::sequential_extractor::SeqHandler;
|
|||||||
use crate::metrics::MEILISEARCH_DEGRADED_SEARCH_REQUESTS;
|
use crate::metrics::MEILISEARCH_DEGRADED_SEARCH_REQUESTS;
|
||||||
use crate::search::{
|
use crate::search::{
|
||||||
add_search_rules, perform_search, HybridQuery, MatchingStrategy, RankingScoreThreshold,
|
add_search_rules, perform_search, HybridQuery, MatchingStrategy, RankingScoreThreshold,
|
||||||
SearchKind, SearchQuery, SemanticRatio, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
|
RetrieveVectors, SearchKind, SearchQuery, SemanticRatio, DEFAULT_CROP_LENGTH,
|
||||||
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
|
DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG,
|
||||||
DEFAULT_SEARCH_OFFSET, DEFAULT_SEMANTIC_RATIO,
|
DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET, DEFAULT_SEMANTIC_RATIO,
|
||||||
};
|
};
|
||||||
use crate::search_queue::SearchQueue;
|
use crate::search_queue::SearchQueue;
|
||||||
|
|
||||||
@@ -51,6 +51,8 @@ pub struct SearchQueryGet {
|
|||||||
hits_per_page: Option<Param<usize>>,
|
hits_per_page: Option<Param<usize>>,
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToRetrieve>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToRetrieve>)]
|
||||||
attributes_to_retrieve: Option<CS<String>>,
|
attributes_to_retrieve: Option<CS<String>>,
|
||||||
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchRetrieveVectors>)]
|
||||||
|
retrieve_vectors: Param<bool>,
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToCrop>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToCrop>)]
|
||||||
attributes_to_crop: Option<CS<String>>,
|
attributes_to_crop: Option<CS<String>>,
|
||||||
#[deserr(default = Param(DEFAULT_CROP_LENGTH()), error = DeserrQueryParamError<InvalidSearchCropLength>)]
|
#[deserr(default = Param(DEFAULT_CROP_LENGTH()), error = DeserrQueryParamError<InvalidSearchCropLength>)]
|
||||||
@@ -61,6 +63,8 @@ pub struct SearchQueryGet {
|
|||||||
filter: Option<String>,
|
filter: Option<String>,
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchSort>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchSort>)]
|
||||||
sort: Option<String>,
|
sort: Option<String>,
|
||||||
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchDistinct>)]
|
||||||
|
distinct: Option<String>,
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowMatchesPosition>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowMatchesPosition>)]
|
||||||
show_matches_position: Param<bool>,
|
show_matches_position: Param<bool>,
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowRankingScore>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowRankingScore>)]
|
||||||
@@ -153,11 +157,13 @@ impl From<SearchQueryGet> for SearchQuery {
|
|||||||
page: other.page.as_deref().copied(),
|
page: other.page.as_deref().copied(),
|
||||||
hits_per_page: other.hits_per_page.as_deref().copied(),
|
hits_per_page: other.hits_per_page.as_deref().copied(),
|
||||||
attributes_to_retrieve: other.attributes_to_retrieve.map(|o| o.into_iter().collect()),
|
attributes_to_retrieve: other.attributes_to_retrieve.map(|o| o.into_iter().collect()),
|
||||||
|
retrieve_vectors: other.retrieve_vectors.0,
|
||||||
attributes_to_crop: other.attributes_to_crop.map(|o| o.into_iter().collect()),
|
attributes_to_crop: other.attributes_to_crop.map(|o| o.into_iter().collect()),
|
||||||
crop_length: other.crop_length.0,
|
crop_length: other.crop_length.0,
|
||||||
attributes_to_highlight: other.attributes_to_highlight.map(|o| o.into_iter().collect()),
|
attributes_to_highlight: other.attributes_to_highlight.map(|o| o.into_iter().collect()),
|
||||||
filter,
|
filter,
|
||||||
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
|
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
|
||||||
|
distinct: other.distinct,
|
||||||
show_matches_position: other.show_matches_position.0,
|
show_matches_position: other.show_matches_position.0,
|
||||||
show_ranking_score: other.show_ranking_score.0,
|
show_ranking_score: other.show_ranking_score.0,
|
||||||
show_ranking_score_details: other.show_ranking_score_details.0,
|
show_ranking_score_details: other.show_ranking_score_details.0,
|
||||||
@@ -222,10 +228,12 @@ pub async fn search_with_url_query(
|
|||||||
let features = index_scheduler.features();
|
let features = index_scheduler.features();
|
||||||
|
|
||||||
let search_kind = search_kind(&query, index_scheduler.get_ref(), &index, features)?;
|
let search_kind = search_kind(&query, index_scheduler.get_ref(), &index, features)?;
|
||||||
|
let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors, features)?;
|
||||||
let _permit = search_queue.try_get_search_permit().await?;
|
let _permit = search_queue.try_get_search_permit().await?;
|
||||||
let search_result =
|
let search_result = tokio::task::spawn_blocking(move || {
|
||||||
tokio::task::spawn_blocking(move || perform_search(&index, query, search_kind)).await?;
|
perform_search(&index, query, search_kind, retrieve_vector)
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
if let Ok(ref search_result) = search_result {
|
if let Ok(ref search_result) = search_result {
|
||||||
aggregate.succeed(search_result);
|
aggregate.succeed(search_result);
|
||||||
}
|
}
|
||||||
@@ -262,10 +270,13 @@ pub async fn search_with_post(
|
|||||||
let features = index_scheduler.features();
|
let features = index_scheduler.features();
|
||||||
|
|
||||||
let search_kind = search_kind(&query, index_scheduler.get_ref(), &index, features)?;
|
let search_kind = search_kind(&query, index_scheduler.get_ref(), &index, features)?;
|
||||||
|
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors, features)?;
|
||||||
|
|
||||||
let _permit = search_queue.try_get_search_permit().await?;
|
let _permit = search_queue.try_get_search_permit().await?;
|
||||||
let search_result =
|
let search_result = tokio::task::spawn_blocking(move || {
|
||||||
tokio::task::spawn_blocking(move || perform_search(&index, query, search_kind)).await?;
|
perform_search(&index, query, search_kind, retrieve_vectors)
|
||||||
|
})
|
||||||
|
.await?;
|
||||||
if let Ok(ref search_result) = search_result {
|
if let Ok(ref search_result) = search_result {
|
||||||
aggregate.succeed(search_result);
|
aggregate.succeed(search_result);
|
||||||
if search_result.degraded {
|
if search_result.degraded {
|
||||||
@@ -287,11 +298,10 @@ pub fn search_kind(
|
|||||||
features: RoFeatures,
|
features: RoFeatures,
|
||||||
) -> Result<SearchKind, ResponseError> {
|
) -> Result<SearchKind, ResponseError> {
|
||||||
if query.vector.is_some() {
|
if query.vector.is_some() {
|
||||||
features.check_vector("Passing `vector` as a query parameter")?;
|
features.check_vector("Passing `vector` as a parameter")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if query.hybrid.is_some() {
|
if query.hybrid.is_some() {
|
||||||
features.check_vector("Passing `hybrid` as a query parameter")?;
|
features.check_vector("Passing `hybrid` as a parameter")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// regardless of anything, always do a keyword search when we don't have a vector and the query is whitespace or missing
|
// regardless of anything, always do a keyword search when we don't have a vector and the query is whitespace or missing
|
||||||
|
|||||||
@@ -4,11 +4,7 @@ use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
|||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use meilisearch_types::deserr::query_params::Param;
|
use meilisearch_types::deserr::query_params::Param;
|
||||||
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||||
use meilisearch_types::error::deserr_codes::{
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
InvalidEmbedder, InvalidSimilarAttributesToRetrieve, InvalidSimilarFilter, InvalidSimilarId,
|
|
||||||
InvalidSimilarLimit, InvalidSimilarOffset, InvalidSimilarRankingScoreThreshold,
|
|
||||||
InvalidSimilarShowRankingScore, InvalidSimilarShowRankingScoreDetails,
|
|
||||||
};
|
|
||||||
use meilisearch_types::error::{ErrorCode as _, ResponseError};
|
use meilisearch_types::error::{ErrorCode as _, ResponseError};
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::keys::actions;
|
use meilisearch_types::keys::actions;
|
||||||
@@ -21,8 +17,8 @@ use crate::analytics::{Analytics, SimilarAggregator};
|
|||||||
use crate::extractors::authentication::GuardedData;
|
use crate::extractors::authentication::GuardedData;
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::search::{
|
use crate::search::{
|
||||||
add_search_rules, perform_similar, RankingScoreThresholdSimilar, SearchKind, SimilarQuery,
|
add_search_rules, perform_similar, RankingScoreThresholdSimilar, RetrieveVectors, SearchKind,
|
||||||
SimilarResult, DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
|
SimilarQuery, SimilarResult, DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
@@ -97,6 +93,8 @@ async fn similar(
|
|||||||
|
|
||||||
features.check_vector("Using the similar API")?;
|
features.check_vector("Using the similar API")?;
|
||||||
|
|
||||||
|
let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors, features)?;
|
||||||
|
|
||||||
// Tenant token search_rules.
|
// Tenant token search_rules.
|
||||||
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
|
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
|
||||||
add_search_rules(&mut query.filter, search_rules);
|
add_search_rules(&mut query.filter, search_rules);
|
||||||
@@ -107,8 +105,10 @@ async fn similar(
|
|||||||
let (embedder_name, embedder) =
|
let (embedder_name, embedder) =
|
||||||
SearchKind::embedder(&index_scheduler, &index, query.embedder.as_deref(), None)?;
|
SearchKind::embedder(&index_scheduler, &index, query.embedder.as_deref(), None)?;
|
||||||
|
|
||||||
tokio::task::spawn_blocking(move || perform_similar(&index, query, embedder_name, embedder))
|
tokio::task::spawn_blocking(move || {
|
||||||
.await?
|
perform_similar(&index, query, embedder_name, embedder, retrieve_vectors)
|
||||||
|
})
|
||||||
|
.await?
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, deserr::Deserr)]
|
#[derive(Debug, deserr::Deserr)]
|
||||||
@@ -122,6 +122,8 @@ pub struct SimilarQueryGet {
|
|||||||
limit: Param<usize>,
|
limit: Param<usize>,
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarAttributesToRetrieve>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarAttributesToRetrieve>)]
|
||||||
attributes_to_retrieve: Option<CS<String>>,
|
attributes_to_retrieve: Option<CS<String>>,
|
||||||
|
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarRetrieveVectors>)]
|
||||||
|
retrieve_vectors: Param<bool>,
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarFilter>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarFilter>)]
|
||||||
filter: Option<String>,
|
filter: Option<String>,
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarShowRankingScore>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSimilarShowRankingScore>)]
|
||||||
@@ -156,6 +158,7 @@ impl TryFrom<SimilarQueryGet> for SimilarQuery {
|
|||||||
offset,
|
offset,
|
||||||
limit,
|
limit,
|
||||||
attributes_to_retrieve,
|
attributes_to_retrieve,
|
||||||
|
retrieve_vectors,
|
||||||
filter,
|
filter,
|
||||||
show_ranking_score,
|
show_ranking_score,
|
||||||
show_ranking_score_details,
|
show_ranking_score_details,
|
||||||
@@ -180,6 +183,7 @@ impl TryFrom<SimilarQueryGet> for SimilarQuery {
|
|||||||
filter,
|
filter,
|
||||||
embedder,
|
embedder,
|
||||||
attributes_to_retrieve: attributes_to_retrieve.map(|o| o.into_iter().collect()),
|
attributes_to_retrieve: attributes_to_retrieve.map(|o| o.into_iter().collect()),
|
||||||
|
retrieve_vectors: retrieve_vectors.0,
|
||||||
show_ranking_score: show_ranking_score.0,
|
show_ranking_score: show_ranking_score.0,
|
||||||
show_ranking_score_details: show_ranking_score_details.0,
|
show_ranking_score_details: show_ranking_score_details.0,
|
||||||
ranking_score_threshold: ranking_score_threshold.map(|x| x.0),
|
ranking_score_threshold: ranking_score_threshold.map(|x| x.0),
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
|||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::routes::indexes::search::search_kind;
|
use crate::routes::indexes::search::search_kind;
|
||||||
use crate::search::{
|
use crate::search::{
|
||||||
add_search_rules, perform_search, SearchQueryWithIndex, SearchResultWithIndex,
|
add_search_rules, perform_search, RetrieveVectors, SearchQueryWithIndex, SearchResultWithIndex,
|
||||||
};
|
};
|
||||||
use crate::search_queue::SearchQueue;
|
use crate::search_queue::SearchQueue;
|
||||||
|
|
||||||
@@ -83,11 +83,14 @@ pub async fn multi_search_with_post(
|
|||||||
|
|
||||||
let search_kind = search_kind(&query, index_scheduler.get_ref(), &index, features)
|
let search_kind = search_kind(&query, index_scheduler.get_ref(), &index, features)
|
||||||
.with_index(query_index)?;
|
.with_index(query_index)?;
|
||||||
|
let retrieve_vector =
|
||||||
|
RetrieveVectors::new(query.retrieve_vectors, features).with_index(query_index)?;
|
||||||
|
|
||||||
let search_result =
|
let search_result = tokio::task::spawn_blocking(move || {
|
||||||
tokio::task::spawn_blocking(move || perform_search(&index, query, search_kind))
|
perform_search(&index, query, search_kind, retrieve_vector)
|
||||||
.await
|
})
|
||||||
.with_index(query_index)?;
|
.await
|
||||||
|
.with_index(query_index)?;
|
||||||
|
|
||||||
search_results.push(SearchResultWithIndex {
|
search_results.push(SearchResultWithIndex {
|
||||||
index_uid: index_uid.into_inner(),
|
index_uid: index_uid.into_inner(),
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ use meilisearch_types::error::{Code, ResponseError};
|
|||||||
use meilisearch_types::heed::RoTxn;
|
use meilisearch_types::heed::RoTxn;
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::milli::score_details::{ScoreDetails, ScoringStrategy};
|
use meilisearch_types::milli::score_details::{ScoreDetails, ScoringStrategy};
|
||||||
|
use meilisearch_types::milli::vector::parsed_vectors::ExplicitVectors;
|
||||||
use meilisearch_types::milli::vector::Embedder;
|
use meilisearch_types::milli::vector::Embedder;
|
||||||
use meilisearch_types::milli::{FacetValueHit, OrderBy, SearchForFacetValues, TimeBudget};
|
use meilisearch_types::milli::{FacetValueHit, OrderBy, SearchForFacetValues, TimeBudget};
|
||||||
use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS;
|
use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS;
|
||||||
@@ -59,6 +60,8 @@ pub struct SearchQuery {
|
|||||||
pub hits_per_page: Option<usize>,
|
pub hits_per_page: Option<usize>,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToRetrieve>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToRetrieve>)]
|
||||||
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
||||||
|
#[deserr(default, error = DeserrJsonError<InvalidSearchRetrieveVectors>)]
|
||||||
|
pub retrieve_vectors: bool,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToCrop>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToCrop>)]
|
||||||
pub attributes_to_crop: Option<Vec<String>>,
|
pub attributes_to_crop: Option<Vec<String>>,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchCropLength>, default = DEFAULT_CROP_LENGTH())]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchCropLength>, default = DEFAULT_CROP_LENGTH())]
|
||||||
@@ -75,6 +78,8 @@ pub struct SearchQuery {
|
|||||||
pub filter: Option<Value>,
|
pub filter: Option<Value>,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchSort>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchSort>)]
|
||||||
pub sort: Option<Vec<String>>,
|
pub sort: Option<Vec<String>>,
|
||||||
|
#[deserr(default, error = DeserrJsonError<InvalidSearchDistinct>)]
|
||||||
|
pub distinct: Option<String>,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchFacets>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchFacets>)]
|
||||||
pub facets: Option<Vec<String>>,
|
pub facets: Option<Vec<String>>,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchHighlightPreTag>, default = DEFAULT_HIGHLIGHT_PRE_TAG())]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchHighlightPreTag>, default = DEFAULT_HIGHLIGHT_PRE_TAG())]
|
||||||
@@ -141,6 +146,7 @@ impl fmt::Debug for SearchQuery {
|
|||||||
page,
|
page,
|
||||||
hits_per_page,
|
hits_per_page,
|
||||||
attributes_to_retrieve,
|
attributes_to_retrieve,
|
||||||
|
retrieve_vectors,
|
||||||
attributes_to_crop,
|
attributes_to_crop,
|
||||||
crop_length,
|
crop_length,
|
||||||
attributes_to_highlight,
|
attributes_to_highlight,
|
||||||
@@ -149,6 +155,7 @@ impl fmt::Debug for SearchQuery {
|
|||||||
show_ranking_score_details,
|
show_ranking_score_details,
|
||||||
filter,
|
filter,
|
||||||
sort,
|
sort,
|
||||||
|
distinct,
|
||||||
facets,
|
facets,
|
||||||
highlight_pre_tag,
|
highlight_pre_tag,
|
||||||
highlight_post_tag,
|
highlight_post_tag,
|
||||||
@@ -173,6 +180,9 @@ impl fmt::Debug for SearchQuery {
|
|||||||
if let Some(q) = q {
|
if let Some(q) = q {
|
||||||
debug.field("q", &q);
|
debug.field("q", &q);
|
||||||
}
|
}
|
||||||
|
if *retrieve_vectors {
|
||||||
|
debug.field("retrieve_vectors", &retrieve_vectors);
|
||||||
|
}
|
||||||
if let Some(v) = vector {
|
if let Some(v) = vector {
|
||||||
if v.len() < 10 {
|
if v.len() < 10 {
|
||||||
debug.field("vector", &v);
|
debug.field("vector", &v);
|
||||||
@@ -195,6 +205,9 @@ impl fmt::Debug for SearchQuery {
|
|||||||
if let Some(sort) = sort {
|
if let Some(sort) = sort {
|
||||||
debug.field("sort", &sort);
|
debug.field("sort", &sort);
|
||||||
}
|
}
|
||||||
|
if let Some(distinct) = distinct {
|
||||||
|
debug.field("distinct", &distinct);
|
||||||
|
}
|
||||||
if let Some(facets) = facets {
|
if let Some(facets) = facets {
|
||||||
debug.field("facets", &facets);
|
debug.field("facets", &facets);
|
||||||
}
|
}
|
||||||
@@ -370,6 +383,8 @@ pub struct SearchQueryWithIndex {
|
|||||||
pub hits_per_page: Option<usize>,
|
pub hits_per_page: Option<usize>,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToRetrieve>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToRetrieve>)]
|
||||||
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
||||||
|
#[deserr(default, error = DeserrJsonError<InvalidSearchRetrieveVectors>)]
|
||||||
|
pub retrieve_vectors: bool,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToCrop>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToCrop>)]
|
||||||
pub attributes_to_crop: Option<Vec<String>>,
|
pub attributes_to_crop: Option<Vec<String>>,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchCropLength>, default = DEFAULT_CROP_LENGTH())]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchCropLength>, default = DEFAULT_CROP_LENGTH())]
|
||||||
@@ -386,6 +401,8 @@ pub struct SearchQueryWithIndex {
|
|||||||
pub filter: Option<Value>,
|
pub filter: Option<Value>,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchSort>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchSort>)]
|
||||||
pub sort: Option<Vec<String>>,
|
pub sort: Option<Vec<String>>,
|
||||||
|
#[deserr(default, error = DeserrJsonError<InvalidSearchDistinct>)]
|
||||||
|
pub distinct: Option<String>,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchFacets>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchFacets>)]
|
||||||
pub facets: Option<Vec<String>>,
|
pub facets: Option<Vec<String>>,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchHighlightPreTag>, default = DEFAULT_HIGHLIGHT_PRE_TAG())]
|
#[deserr(default, error = DeserrJsonError<InvalidSearchHighlightPreTag>, default = DEFAULT_HIGHLIGHT_PRE_TAG())]
|
||||||
@@ -413,6 +430,7 @@ impl SearchQueryWithIndex {
|
|||||||
page,
|
page,
|
||||||
hits_per_page,
|
hits_per_page,
|
||||||
attributes_to_retrieve,
|
attributes_to_retrieve,
|
||||||
|
retrieve_vectors,
|
||||||
attributes_to_crop,
|
attributes_to_crop,
|
||||||
crop_length,
|
crop_length,
|
||||||
attributes_to_highlight,
|
attributes_to_highlight,
|
||||||
@@ -421,6 +439,7 @@ impl SearchQueryWithIndex {
|
|||||||
show_matches_position,
|
show_matches_position,
|
||||||
filter,
|
filter,
|
||||||
sort,
|
sort,
|
||||||
|
distinct,
|
||||||
facets,
|
facets,
|
||||||
highlight_pre_tag,
|
highlight_pre_tag,
|
||||||
highlight_post_tag,
|
highlight_post_tag,
|
||||||
@@ -440,6 +459,7 @@ impl SearchQueryWithIndex {
|
|||||||
page,
|
page,
|
||||||
hits_per_page,
|
hits_per_page,
|
||||||
attributes_to_retrieve,
|
attributes_to_retrieve,
|
||||||
|
retrieve_vectors,
|
||||||
attributes_to_crop,
|
attributes_to_crop,
|
||||||
crop_length,
|
crop_length,
|
||||||
attributes_to_highlight,
|
attributes_to_highlight,
|
||||||
@@ -448,6 +468,7 @@ impl SearchQueryWithIndex {
|
|||||||
show_matches_position,
|
show_matches_position,
|
||||||
filter,
|
filter,
|
||||||
sort,
|
sort,
|
||||||
|
distinct,
|
||||||
facets,
|
facets,
|
||||||
highlight_pre_tag,
|
highlight_pre_tag,
|
||||||
highlight_post_tag,
|
highlight_post_tag,
|
||||||
@@ -478,6 +499,8 @@ pub struct SimilarQuery {
|
|||||||
pub embedder: Option<String>,
|
pub embedder: Option<String>,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSimilarAttributesToRetrieve>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSimilarAttributesToRetrieve>)]
|
||||||
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
pub attributes_to_retrieve: Option<BTreeSet<String>>,
|
||||||
|
#[deserr(default, error = DeserrJsonError<InvalidSimilarRetrieveVectors>)]
|
||||||
|
pub retrieve_vectors: bool,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSimilarShowRankingScore>, default)]
|
#[deserr(default, error = DeserrJsonError<InvalidSimilarShowRankingScore>, default)]
|
||||||
pub show_ranking_score: bool,
|
pub show_ranking_score: bool,
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSimilarShowRankingScoreDetails>, default)]
|
#[deserr(default, error = DeserrJsonError<InvalidSimilarShowRankingScoreDetails>, default)]
|
||||||
@@ -716,6 +739,10 @@ fn prepare_search<'t>(
|
|||||||
search.ranking_score_threshold(ranking_score_threshold.0);
|
search.ranking_score_threshold(ranking_score_threshold.0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(distinct) = &query.distinct {
|
||||||
|
search.distinct(distinct.clone());
|
||||||
|
}
|
||||||
|
|
||||||
match search_kind {
|
match search_kind {
|
||||||
SearchKind::KeywordOnly => {
|
SearchKind::KeywordOnly => {
|
||||||
if let Some(q) = &query.q {
|
if let Some(q) = &query.q {
|
||||||
@@ -725,10 +752,15 @@ fn prepare_search<'t>(
|
|||||||
SearchKind::SemanticOnly { embedder_name, embedder } => {
|
SearchKind::SemanticOnly { embedder_name, embedder } => {
|
||||||
let vector = match query.vector.clone() {
|
let vector = match query.vector.clone() {
|
||||||
Some(vector) => vector,
|
Some(vector) => vector,
|
||||||
None => embedder
|
None => {
|
||||||
.embed_one(query.q.clone().unwrap())
|
let span = tracing::trace_span!(target: "search::vector", "embed_one");
|
||||||
.map_err(milli::vector::Error::from)
|
let _entered = span.enter();
|
||||||
.map_err(milli::Error::from)?,
|
|
||||||
|
embedder
|
||||||
|
.embed_one(query.q.clone().unwrap())
|
||||||
|
.map_err(milli::vector::Error::from)
|
||||||
|
.map_err(milli::Error::from)?
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
search.semantic(embedder_name.clone(), embedder.clone(), Some(vector));
|
search.semantic(embedder_name.clone(), embedder.clone(), Some(vector));
|
||||||
@@ -810,6 +842,7 @@ pub fn perform_search(
|
|||||||
index: &Index,
|
index: &Index,
|
||||||
query: SearchQuery,
|
query: SearchQuery,
|
||||||
search_kind: SearchKind,
|
search_kind: SearchKind,
|
||||||
|
retrieve_vectors: RetrieveVectors,
|
||||||
) -> Result<SearchResult, MeilisearchHttpError> {
|
) -> Result<SearchResult, MeilisearchHttpError> {
|
||||||
let before_search = Instant::now();
|
let before_search = Instant::now();
|
||||||
let rtxn = index.read_txn()?;
|
let rtxn = index.read_txn()?;
|
||||||
@@ -847,6 +880,8 @@ pub fn perform_search(
|
|||||||
page,
|
page,
|
||||||
hits_per_page,
|
hits_per_page,
|
||||||
attributes_to_retrieve,
|
attributes_to_retrieve,
|
||||||
|
// use the enum passed as parameter
|
||||||
|
retrieve_vectors: _,
|
||||||
attributes_to_crop,
|
attributes_to_crop,
|
||||||
crop_length,
|
crop_length,
|
||||||
attributes_to_highlight,
|
attributes_to_highlight,
|
||||||
@@ -866,10 +901,12 @@ pub fn perform_search(
|
|||||||
matching_strategy: _,
|
matching_strategy: _,
|
||||||
attributes_to_search_on: _,
|
attributes_to_search_on: _,
|
||||||
filter: _,
|
filter: _,
|
||||||
|
distinct: _,
|
||||||
} = query;
|
} = query;
|
||||||
|
|
||||||
let format = AttributesFormat {
|
let format = AttributesFormat {
|
||||||
attributes_to_retrieve,
|
attributes_to_retrieve,
|
||||||
|
retrieve_vectors,
|
||||||
attributes_to_highlight,
|
attributes_to_highlight,
|
||||||
attributes_to_crop,
|
attributes_to_crop,
|
||||||
crop_length,
|
crop_length,
|
||||||
@@ -953,6 +990,7 @@ pub fn perform_search(
|
|||||||
|
|
||||||
struct AttributesFormat {
|
struct AttributesFormat {
|
||||||
attributes_to_retrieve: Option<BTreeSet<String>>,
|
attributes_to_retrieve: Option<BTreeSet<String>>,
|
||||||
|
retrieve_vectors: RetrieveVectors,
|
||||||
attributes_to_highlight: Option<HashSet<String>>,
|
attributes_to_highlight: Option<HashSet<String>>,
|
||||||
attributes_to_crop: Option<Vec<String>>,
|
attributes_to_crop: Option<Vec<String>>,
|
||||||
crop_length: usize,
|
crop_length: usize,
|
||||||
@@ -965,6 +1003,36 @@ struct AttributesFormat {
|
|||||||
show_ranking_score_details: bool,
|
show_ranking_score_details: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
|
pub enum RetrieveVectors {
|
||||||
|
/// Do not touch the `_vectors` field
|
||||||
|
///
|
||||||
|
/// this is the behavior when the vectorStore feature is disabled
|
||||||
|
Ignore,
|
||||||
|
/// Remove the `_vectors` field
|
||||||
|
///
|
||||||
|
/// this is the behavior when the vectorStore feature is enabled, and `retrieveVectors` is `false`
|
||||||
|
Hide,
|
||||||
|
/// Retrieve vectors from the DB and merge them into the `_vectors` field
|
||||||
|
///
|
||||||
|
/// this is the behavior when the vectorStore feature is enabled, and `retrieveVectors` is `true`
|
||||||
|
Retrieve,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RetrieveVectors {
|
||||||
|
pub fn new(
|
||||||
|
retrieve_vector: bool,
|
||||||
|
features: index_scheduler::RoFeatures,
|
||||||
|
) -> Result<Self, index_scheduler::Error> {
|
||||||
|
match (retrieve_vector, features.check_vector("Passing `retrieveVectors` as a parameter")) {
|
||||||
|
(true, Ok(())) => Ok(Self::Retrieve),
|
||||||
|
(true, Err(error)) => Err(error),
|
||||||
|
(false, Ok(())) => Ok(Self::Hide),
|
||||||
|
(false, Err(_)) => Ok(Self::Ignore),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn make_hits(
|
fn make_hits(
|
||||||
index: &Index,
|
index: &Index,
|
||||||
rtxn: &RoTxn<'_>,
|
rtxn: &RoTxn<'_>,
|
||||||
@@ -974,10 +1042,32 @@ fn make_hits(
|
|||||||
document_scores: Vec<Vec<ScoreDetails>>,
|
document_scores: Vec<Vec<ScoreDetails>>,
|
||||||
) -> Result<Vec<SearchHit>, MeilisearchHttpError> {
|
) -> Result<Vec<SearchHit>, MeilisearchHttpError> {
|
||||||
let fields_ids_map = index.fields_ids_map(rtxn).unwrap();
|
let fields_ids_map = index.fields_ids_map(rtxn).unwrap();
|
||||||
let displayed_ids = index
|
let displayed_ids =
|
||||||
.displayed_fields_ids(rtxn)?
|
index.displayed_fields_ids(rtxn)?.map(|fields| fields.into_iter().collect::<BTreeSet<_>>());
|
||||||
.map(|fields| fields.into_iter().collect::<BTreeSet<_>>())
|
|
||||||
.unwrap_or_else(|| fields_ids_map.iter().map(|(id, _)| id).collect());
|
let vectors_fid = fields_ids_map.id(milli::vector::parsed_vectors::RESERVED_VECTORS_FIELD_NAME);
|
||||||
|
|
||||||
|
let vectors_is_hidden = match (&displayed_ids, vectors_fid) {
|
||||||
|
// displayed_ids is a wildcard, so `_vectors` can be displayed regardless of its fid
|
||||||
|
(None, _) => false,
|
||||||
|
// displayed_ids is a finite list, and `_vectors` cannot be part of it because it is not an existing field
|
||||||
|
(Some(_), None) => true,
|
||||||
|
// displayed_ids is a finit list, so hide if `_vectors` is not part of it
|
||||||
|
(Some(map), Some(vectors_fid)) => map.contains(&vectors_fid),
|
||||||
|
};
|
||||||
|
|
||||||
|
let retrieve_vectors = if let RetrieveVectors::Retrieve = format.retrieve_vectors {
|
||||||
|
if vectors_is_hidden {
|
||||||
|
RetrieveVectors::Hide
|
||||||
|
} else {
|
||||||
|
RetrieveVectors::Retrieve
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
format.retrieve_vectors
|
||||||
|
};
|
||||||
|
|
||||||
|
let displayed_ids =
|
||||||
|
displayed_ids.unwrap_or_else(|| fields_ids_map.iter().map(|(id, _)| id).collect());
|
||||||
let fids = |attrs: &BTreeSet<String>| {
|
let fids = |attrs: &BTreeSet<String>| {
|
||||||
let mut ids = BTreeSet::new();
|
let mut ids = BTreeSet::new();
|
||||||
for attr in attrs {
|
for attr in attrs {
|
||||||
@@ -1000,6 +1090,7 @@ fn make_hits(
|
|||||||
.intersection(&displayed_ids)
|
.intersection(&displayed_ids)
|
||||||
.cloned()
|
.cloned()
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let attr_to_highlight = format.attributes_to_highlight.unwrap_or_default();
|
let attr_to_highlight = format.attributes_to_highlight.unwrap_or_default();
|
||||||
let attr_to_crop = format.attributes_to_crop.unwrap_or_default();
|
let attr_to_crop = format.attributes_to_crop.unwrap_or_default();
|
||||||
let formatted_options = compute_formatted_options(
|
let formatted_options = compute_formatted_options(
|
||||||
@@ -1032,19 +1123,57 @@ fn make_hits(
|
|||||||
formatter_builder.crop_marker(format.crop_marker);
|
formatter_builder.crop_marker(format.crop_marker);
|
||||||
formatter_builder.highlight_prefix(format.highlight_pre_tag);
|
formatter_builder.highlight_prefix(format.highlight_pre_tag);
|
||||||
formatter_builder.highlight_suffix(format.highlight_post_tag);
|
formatter_builder.highlight_suffix(format.highlight_post_tag);
|
||||||
|
let decompression_dictionary = index.document_decompression_dictionary(rtxn)?;
|
||||||
|
let mut buffer = Vec::new();
|
||||||
let mut documents = Vec::new();
|
let mut documents = Vec::new();
|
||||||
let documents_iter = index.documents(rtxn, documents_ids)?;
|
let embedding_configs = index.embedding_configs(rtxn)?;
|
||||||
for ((_id, obkv), score) in documents_iter.into_iter().zip(document_scores.into_iter()) {
|
let documents_iter = index.compressed_documents(rtxn, documents_ids)?;
|
||||||
|
for ((id, compressed), score) in documents_iter.into_iter().zip(document_scores.into_iter()) {
|
||||||
|
let obkv = compressed
|
||||||
|
.decompress_with_optional_dictionary(&mut buffer, decompression_dictionary.as_ref())
|
||||||
|
// TODO use a better error?
|
||||||
|
.map_err(|e| MeilisearchHttpError::HeedError(e.into()))?;
|
||||||
// First generate a document with all the displayed fields
|
// First generate a document with all the displayed fields
|
||||||
let displayed_document = make_document(&displayed_ids, &fields_ids_map, obkv)?;
|
let displayed_document = make_document(&displayed_ids, &fields_ids_map, obkv)?;
|
||||||
|
|
||||||
|
let add_vectors_fid =
|
||||||
|
vectors_fid.filter(|_fid| retrieve_vectors == RetrieveVectors::Retrieve);
|
||||||
|
|
||||||
// select the attributes to retrieve
|
// select the attributes to retrieve
|
||||||
let attributes_to_retrieve = to_retrieve_ids
|
let attributes_to_retrieve = to_retrieve_ids
|
||||||
.iter()
|
.iter()
|
||||||
|
// skip the vectors_fid if RetrieveVectors::Hide
|
||||||
|
.filter(|fid| match vectors_fid {
|
||||||
|
Some(vectors_fid) => {
|
||||||
|
!(retrieve_vectors == RetrieveVectors::Hide && **fid == vectors_fid)
|
||||||
|
}
|
||||||
|
None => true,
|
||||||
|
})
|
||||||
|
// need to retrieve the existing `_vectors` field if the `RetrieveVectors::Retrieve`
|
||||||
|
.chain(add_vectors_fid.iter())
|
||||||
.map(|&fid| fields_ids_map.name(fid).expect("Missing field name"));
|
.map(|&fid| fields_ids_map.name(fid).expect("Missing field name"));
|
||||||
let mut document =
|
let mut document =
|
||||||
permissive_json_pointer::select_values(&displayed_document, attributes_to_retrieve);
|
permissive_json_pointer::select_values(&displayed_document, attributes_to_retrieve);
|
||||||
|
|
||||||
|
if retrieve_vectors == RetrieveVectors::Retrieve {
|
||||||
|
// Clippy is wrong
|
||||||
|
#[allow(clippy::manual_unwrap_or_default)]
|
||||||
|
let mut vectors = match document.remove("_vectors") {
|
||||||
|
Some(Value::Object(map)) => map,
|
||||||
|
_ => Default::default(),
|
||||||
|
};
|
||||||
|
for (name, vector) in index.embeddings(rtxn, id)? {
|
||||||
|
let user_provided = embedding_configs
|
||||||
|
.iter()
|
||||||
|
.find(|conf| conf.name == name)
|
||||||
|
.is_some_and(|conf| conf.user_provided.contains(id));
|
||||||
|
let embeddings =
|
||||||
|
ExplicitVectors { embeddings: Some(vector.into()), regenerate: !user_provided };
|
||||||
|
vectors.insert(name, serde_json::to_value(embeddings)?);
|
||||||
|
}
|
||||||
|
document.insert("_vectors".into(), vectors.into());
|
||||||
|
}
|
||||||
|
|
||||||
let (matches_position, formatted) = format_fields(
|
let (matches_position, formatted) = format_fields(
|
||||||
&displayed_document,
|
&displayed_document,
|
||||||
&fields_ids_map,
|
&fields_ids_map,
|
||||||
@@ -1114,6 +1243,7 @@ pub fn perform_similar(
|
|||||||
query: SimilarQuery,
|
query: SimilarQuery,
|
||||||
embedder_name: String,
|
embedder_name: String,
|
||||||
embedder: Arc<Embedder>,
|
embedder: Arc<Embedder>,
|
||||||
|
retrieve_vectors: RetrieveVectors,
|
||||||
) -> Result<SimilarResult, ResponseError> {
|
) -> Result<SimilarResult, ResponseError> {
|
||||||
let before_search = Instant::now();
|
let before_search = Instant::now();
|
||||||
let rtxn = index.read_txn()?;
|
let rtxn = index.read_txn()?;
|
||||||
@@ -1125,6 +1255,7 @@ pub fn perform_similar(
|
|||||||
filter: _,
|
filter: _,
|
||||||
embedder: _,
|
embedder: _,
|
||||||
attributes_to_retrieve,
|
attributes_to_retrieve,
|
||||||
|
retrieve_vectors: _,
|
||||||
show_ranking_score,
|
show_ranking_score,
|
||||||
show_ranking_score_details,
|
show_ranking_score_details,
|
||||||
ranking_score_threshold,
|
ranking_score_threshold,
|
||||||
@@ -1171,6 +1302,7 @@ pub fn perform_similar(
|
|||||||
|
|
||||||
let format = AttributesFormat {
|
let format = AttributesFormat {
|
||||||
attributes_to_retrieve,
|
attributes_to_retrieve,
|
||||||
|
retrieve_vectors,
|
||||||
attributes_to_highlight: None,
|
attributes_to_highlight: None,
|
||||||
attributes_to_crop: None,
|
attributes_to_crop: None,
|
||||||
crop_length: DEFAULT_CROP_LENGTH(),
|
crop_length: DEFAULT_CROP_LENGTH(),
|
||||||
@@ -1212,13 +1344,23 @@ fn insert_geo_distance(sorts: &[String], document: &mut Document) {
|
|||||||
// TODO: TAMO: milli encountered an internal error, what do we want to do?
|
// TODO: TAMO: milli encountered an internal error, what do we want to do?
|
||||||
let base = [capture_group[1].parse().unwrap(), capture_group[2].parse().unwrap()];
|
let base = [capture_group[1].parse().unwrap(), capture_group[2].parse().unwrap()];
|
||||||
let geo_point = &document.get("_geo").unwrap_or(&json!(null));
|
let geo_point = &document.get("_geo").unwrap_or(&json!(null));
|
||||||
if let Some((lat, lng)) = geo_point["lat"].as_f64().zip(geo_point["lng"].as_f64()) {
|
if let Some((lat, lng)) =
|
||||||
|
extract_geo_value(&geo_point["lat"]).zip(extract_geo_value(&geo_point["lng"]))
|
||||||
|
{
|
||||||
let distance = milli::distance_between_two_points(&base, &[lat, lng]);
|
let distance = milli::distance_between_two_points(&base, &[lat, lng]);
|
||||||
document.insert("_geoDistance".to_string(), json!(distance.round() as usize));
|
document.insert("_geoDistance".to_string(), json!(distance.round() as usize));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn extract_geo_value(value: &Value) -> Option<f64> {
|
||||||
|
match value {
|
||||||
|
Value::Number(n) => n.as_f64(),
|
||||||
|
Value::String(s) => s.parse().ok(),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn compute_formatted_options(
|
fn compute_formatted_options(
|
||||||
attr_to_highlight: &HashSet<String>,
|
attr_to_highlight: &HashSet<String>,
|
||||||
attr_to_crop: &[String],
|
attr_to_crop: &[String],
|
||||||
@@ -1592,4 +1734,54 @@ mod test {
|
|||||||
insert_geo_distance(sorters, &mut document);
|
insert_geo_distance(sorters, &mut document);
|
||||||
assert_eq!(document.get("_geoDistance"), None);
|
assert_eq!(document.get("_geoDistance"), None);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_insert_geo_distance_with_coords_as_string() {
|
||||||
|
let value: Document = serde_json::from_str(
|
||||||
|
r#"{
|
||||||
|
"_geo": {
|
||||||
|
"lat": "50",
|
||||||
|
"lng": 3
|
||||||
|
}
|
||||||
|
}"#,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let sorters = &["_geoPoint(50,3):desc".to_string()];
|
||||||
|
let mut document = value.clone();
|
||||||
|
insert_geo_distance(sorters, &mut document);
|
||||||
|
assert_eq!(document.get("_geoDistance"), Some(&json!(0)));
|
||||||
|
|
||||||
|
let value: Document = serde_json::from_str(
|
||||||
|
r#"{
|
||||||
|
"_geo": {
|
||||||
|
"lat": "50",
|
||||||
|
"lng": "3"
|
||||||
|
},
|
||||||
|
"id": "1"
|
||||||
|
}"#,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let sorters = &["_geoPoint(50,3):desc".to_string()];
|
||||||
|
let mut document = value.clone();
|
||||||
|
insert_geo_distance(sorters, &mut document);
|
||||||
|
assert_eq!(document.get("_geoDistance"), Some(&json!(0)));
|
||||||
|
|
||||||
|
let value: Document = serde_json::from_str(
|
||||||
|
r#"{
|
||||||
|
"_geo": {
|
||||||
|
"lat": 50,
|
||||||
|
"lng": "3"
|
||||||
|
},
|
||||||
|
"id": "1"
|
||||||
|
}"#,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let sorters = &["_geoPoint(50,3):desc".to_string()];
|
||||||
|
let mut document = value.clone();
|
||||||
|
insert_geo_distance(sorters, &mut document);
|
||||||
|
assert_eq!(document.get("_geoDistance"), Some(&json!(0)));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -40,8 +40,9 @@ pub struct Permit {
|
|||||||
|
|
||||||
impl Drop for Permit {
|
impl Drop for Permit {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
|
let sender = self.sender.clone();
|
||||||
// if the channel is closed then the whole instance is down
|
// if the channel is closed then the whole instance is down
|
||||||
let _ = futures::executor::block_on(self.sender.send(()));
|
std::mem::drop(tokio::spawn(async move { sender.send(()).await }));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -78,7 +78,7 @@ pub static ALL_ACTIONS: Lazy<HashSet<&'static str>> = Lazy::new(|| {
|
|||||||
});
|
});
|
||||||
|
|
||||||
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
||||||
json!({"message": "The provided API key is invalid.",
|
json!({"message": null,
|
||||||
"code": "invalid_api_key",
|
"code": "invalid_api_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
@@ -119,7 +119,8 @@ async fn error_access_expired_key() {
|
|||||||
thread::sleep(time::Duration::new(1, 0));
|
thread::sleep(time::Duration::new(1, 0));
|
||||||
|
|
||||||
for (method, route) in AUTHORIZATIONS.keys() {
|
for (method, route) in AUTHORIZATIONS.keys() {
|
||||||
let (response, code) = server.dummy_request(method, route).await;
|
let (mut response, code) = server.dummy_request(method, route).await;
|
||||||
|
response["message"] = serde_json::json!(null);
|
||||||
|
|
||||||
assert_eq!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
|
assert_eq!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
|
||||||
assert_eq!(403, code, "{:?}", &response);
|
assert_eq!(403, code, "{:?}", &response);
|
||||||
@@ -149,7 +150,8 @@ async fn error_access_unauthorized_index() {
|
|||||||
// filter `products` index routes
|
// filter `products` index routes
|
||||||
.filter(|(_, route)| route.starts_with("/indexes/products"))
|
.filter(|(_, route)| route.starts_with("/indexes/products"))
|
||||||
{
|
{
|
||||||
let (response, code) = server.dummy_request(method, route).await;
|
let (mut response, code) = server.dummy_request(method, route).await;
|
||||||
|
response["message"] = serde_json::json!(null);
|
||||||
|
|
||||||
assert_eq!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
|
assert_eq!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
|
||||||
assert_eq!(403, code, "{:?}", &response);
|
assert_eq!(403, code, "{:?}", &response);
|
||||||
@@ -176,7 +178,8 @@ async fn error_access_unauthorized_action() {
|
|||||||
|
|
||||||
let key = response["key"].as_str().unwrap();
|
let key = response["key"].as_str().unwrap();
|
||||||
server.use_api_key(key);
|
server.use_api_key(key);
|
||||||
let (response, code) = server.dummy_request(method, route).await;
|
let (mut response, code) = server.dummy_request(method, route).await;
|
||||||
|
response["message"] = serde_json::json!(null);
|
||||||
|
|
||||||
assert_eq!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
|
assert_eq!(response, INVALID_RESPONSE.clone(), "on route: {:?} - {:?}", method, route);
|
||||||
assert_eq!(403, code, "{:?}", &response);
|
assert_eq!(403, code, "{:?}", &response);
|
||||||
@@ -280,7 +283,7 @@ async fn access_authorized_no_index_restriction() {
|
|||||||
route,
|
route,
|
||||||
action
|
action
|
||||||
);
|
);
|
||||||
assert_ne!(code, 403);
|
assert_ne!(code, 403, "on route: {:?} - {:?} with action: {:?}", method, route, action);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,10 @@
|
|||||||
|
use actix_web::http::StatusCode;
|
||||||
|
use actix_web::test;
|
||||||
|
use jsonwebtoken::{EncodingKey, Header};
|
||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::common::Server;
|
use crate::common::{Server, Value};
|
||||||
use crate::json;
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -436,3 +439,262 @@ async fn patch_api_keys_unknown_field() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn send_request_with_custom_auth(
|
||||||
|
app: impl actix_web::dev::Service<
|
||||||
|
actix_http::Request,
|
||||||
|
Response = actix_web::dev::ServiceResponse<impl actix_web::body::MessageBody>,
|
||||||
|
Error = actix_web::Error,
|
||||||
|
>,
|
||||||
|
url: &str,
|
||||||
|
auth: &str,
|
||||||
|
) -> (Value, StatusCode) {
|
||||||
|
let req = test::TestRequest::get().uri(url).insert_header(("Authorization", auth)).to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
|
||||||
|
(response, status_code)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn invalid_auth_format() {
|
||||||
|
let server = Server::new_auth().await;
|
||||||
|
let app = server.init_web_app().await;
|
||||||
|
|
||||||
|
let req = test::TestRequest::get().uri("/indexes/dog/documents").to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
snapshot!(status_code, @"401 Unauthorized");
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
||||||
|
"code": "missing_authorization_header",
|
||||||
|
"type": "auth",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let req = test::TestRequest::get().uri("/indexes/dog/documents").to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
snapshot!(status_code, @"401 Unauthorized");
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "The Authorization header is missing. It must use the bearer authorization method.",
|
||||||
|
"code": "missing_authorization_header",
|
||||||
|
"type": "auth",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#missing_authorization_header"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, status_code) =
|
||||||
|
send_request_with_custom_auth(&app, "/indexes/dog/documents", "Bearer").await;
|
||||||
|
snapshot!(status_code, @"403 Forbidden");
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "The provided API key is invalid.",
|
||||||
|
"code": "invalid_api_key",
|
||||||
|
"type": "auth",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn invalid_api_key() {
|
||||||
|
let server = Server::new_auth().await;
|
||||||
|
let app = server.init_web_app().await;
|
||||||
|
|
||||||
|
let (response, status_code) =
|
||||||
|
send_request_with_custom_auth(&app, "/indexes/dog/search", "Bearer kefir").await;
|
||||||
|
snapshot!(status_code, @"403 Forbidden");
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "The provided API key is invalid.",
|
||||||
|
"code": "invalid_api_key",
|
||||||
|
"type": "auth",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let uuid = Uuid::nil();
|
||||||
|
let key = json!({ "actions": ["search"], "indexes": ["dog"], "expiresAt": null, "uid": uuid.to_string() });
|
||||||
|
let req = test::TestRequest::post()
|
||||||
|
.uri("/keys")
|
||||||
|
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||||
|
.set_json(&key)
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
snapshot!(json_string!(response, { ".createdAt" => "[date]", ".updatedAt" => "[date]" }), @r###"
|
||||||
|
{
|
||||||
|
"name": null,
|
||||||
|
"description": null,
|
||||||
|
"key": "aeb94973e0b6e912d94165430bbe87dee91a7c4f891ce19050c3910ec96977e9",
|
||||||
|
"uid": "00000000-0000-0000-0000-000000000000",
|
||||||
|
"actions": [
|
||||||
|
"search"
|
||||||
|
],
|
||||||
|
"indexes": [
|
||||||
|
"dog"
|
||||||
|
],
|
||||||
|
"expiresAt": null,
|
||||||
|
"createdAt": "[date]",
|
||||||
|
"updatedAt": "[date]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
|
||||||
|
let (response, status_code) =
|
||||||
|
send_request_with_custom_auth(&app, "/indexes/doggo/search", &format!("Bearer {key}"))
|
||||||
|
.await;
|
||||||
|
snapshot!(status_code, @"403 Forbidden");
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "The API key cannot acces the index `doggo`, authorized indexes are [\"dog\"].",
|
||||||
|
"code": "invalid_api_key",
|
||||||
|
"type": "auth",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn invalid_tenant_token() {
|
||||||
|
let server = Server::new_auth().await;
|
||||||
|
let app = server.init_web_app().await;
|
||||||
|
|
||||||
|
// The tenant token won't be recognized at all if we're not on a search route
|
||||||
|
let claims = json!({ "tamo": "kefir" });
|
||||||
|
let jwt = jsonwebtoken::encode(&Header::default(), &claims, &EncodingKey::from_secret(b"tamo"))
|
||||||
|
.unwrap();
|
||||||
|
let (response, status_code) =
|
||||||
|
send_request_with_custom_auth(&app, "/indexes/dog/documents", &format!("Bearer {jwt}"))
|
||||||
|
.await;
|
||||||
|
snapshot!(status_code, @"403 Forbidden");
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "The provided API key is invalid.",
|
||||||
|
"code": "invalid_api_key",
|
||||||
|
"type": "auth",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let claims = json!({ "tamo": "kefir" });
|
||||||
|
let jwt = jsonwebtoken::encode(&Header::default(), &claims, &EncodingKey::from_secret(b"tamo"))
|
||||||
|
.unwrap();
|
||||||
|
let (response, status_code) =
|
||||||
|
send_request_with_custom_auth(&app, "/indexes/dog/search", &format!("Bearer {jwt}")).await;
|
||||||
|
snapshot!(status_code, @"403 Forbidden");
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "Could not decode tenant token, JSON error: missing field `searchRules` at line 1 column 16.",
|
||||||
|
"code": "invalid_api_key",
|
||||||
|
"type": "auth",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// The error messages are not ideal but that's expected since we cannot _yet_ use deserr
|
||||||
|
let claims = json!({ "searchRules": "kefir" });
|
||||||
|
let jwt = jsonwebtoken::encode(&Header::default(), &claims, &EncodingKey::from_secret(b"tamo"))
|
||||||
|
.unwrap();
|
||||||
|
let (response, status_code) =
|
||||||
|
send_request_with_custom_auth(&app, "/indexes/dog/search", &format!("Bearer {jwt}")).await;
|
||||||
|
snapshot!(status_code, @"403 Forbidden");
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "Could not decode tenant token, JSON error: data did not match any variant of untagged enum SearchRules at line 1 column 23.",
|
||||||
|
"code": "invalid_api_key",
|
||||||
|
"type": "auth",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let uuid = Uuid::nil();
|
||||||
|
let claims = json!({ "searchRules": ["kefir"], "apiKeyUid": uuid.to_string() });
|
||||||
|
let jwt = jsonwebtoken::encode(&Header::default(), &claims, &EncodingKey::from_secret(b"tamo"))
|
||||||
|
.unwrap();
|
||||||
|
let (response, status_code) =
|
||||||
|
send_request_with_custom_auth(&app, "/indexes/dog/search", &format!("Bearer {jwt}")).await;
|
||||||
|
snapshot!(status_code, @"403 Forbidden");
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "Could not decode tenant token, InvalidSignature.",
|
||||||
|
"code": "invalid_api_key",
|
||||||
|
"type": "auth",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// ~~ For the next tests we first need a valid API key
|
||||||
|
let key = json!({ "actions": ["search"], "indexes": ["dog"], "expiresAt": null, "uid": uuid.to_string() });
|
||||||
|
let req = test::TestRequest::post()
|
||||||
|
.uri("/keys")
|
||||||
|
.insert_header(("Authorization", "Bearer MASTER_KEY"))
|
||||||
|
.set_json(&key)
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
snapshot!(json_string!(response, { ".createdAt" => "[date]", ".updatedAt" => "[date]" }), @r###"
|
||||||
|
{
|
||||||
|
"name": null,
|
||||||
|
"description": null,
|
||||||
|
"key": "aeb94973e0b6e912d94165430bbe87dee91a7c4f891ce19050c3910ec96977e9",
|
||||||
|
"uid": "00000000-0000-0000-0000-000000000000",
|
||||||
|
"actions": [
|
||||||
|
"search"
|
||||||
|
],
|
||||||
|
"indexes": [
|
||||||
|
"dog"
|
||||||
|
],
|
||||||
|
"expiresAt": null,
|
||||||
|
"createdAt": "[date]",
|
||||||
|
"updatedAt": "[date]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
let key = response["key"].as_str().unwrap();
|
||||||
|
|
||||||
|
let claims = json!({ "searchRules": ["doggo", "catto"], "apiKeyUid": uuid.to_string() });
|
||||||
|
let jwt = jsonwebtoken::encode(
|
||||||
|
&Header::default(),
|
||||||
|
&claims,
|
||||||
|
&EncodingKey::from_secret(key.as_bytes()),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
// Try to access an index that is not authorized by the tenant token
|
||||||
|
let (response, status_code) =
|
||||||
|
send_request_with_custom_auth(&app, "/indexes/dog/search", &format!("Bearer {jwt}")).await;
|
||||||
|
snapshot!(status_code, @"403 Forbidden");
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "The provided tenant token cannot acces the index `dog`, allowed indexes are [\"catto\", \"doggo\"].",
|
||||||
|
"code": "invalid_api_key",
|
||||||
|
"type": "auth",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// Try to access an index that *is* authorized by the tenant token but not by the api key used to generate the tt
|
||||||
|
let (response, status_code) =
|
||||||
|
send_request_with_custom_auth(&app, "/indexes/doggo/search", &format!("Bearer {jwt}"))
|
||||||
|
.await;
|
||||||
|
snapshot!(status_code, @"403 Forbidden");
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "The API key used to generate this tenant token cannot acces the index `doggo`.",
|
||||||
|
"code": "invalid_api_key",
|
||||||
|
"type": "auth",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|||||||
@@ -53,7 +53,8 @@ static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
|||||||
});
|
});
|
||||||
|
|
||||||
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
||||||
json!({"message": "The provided API key is invalid.",
|
json!({
|
||||||
|
"message": null,
|
||||||
"code": "invalid_api_key",
|
"code": "invalid_api_key",
|
||||||
"type": "auth",
|
"type": "auth",
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
|
||||||
@@ -191,7 +192,9 @@ macro_rules! compute_forbidden_search {
|
|||||||
server.use_api_key(&web_token);
|
server.use_api_key(&web_token);
|
||||||
let index = server.index("sales");
|
let index = server.index("sales");
|
||||||
index
|
index
|
||||||
.search(json!({}), |response, code| {
|
.search(json!({}), |mut response, code| {
|
||||||
|
// We don't assert anything on the message since it may change between cases
|
||||||
|
response["message"] = serde_json::json!(null);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
INVALID_RESPONSE.clone(),
|
INVALID_RESPONSE.clone(),
|
||||||
@@ -495,7 +498,8 @@ async fn error_access_forbidden_routes() {
|
|||||||
|
|
||||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||||
if !actions.contains("search") {
|
if !actions.contains("search") {
|
||||||
let (response, code) = server.dummy_request(method, route).await;
|
let (mut response, code) = server.dummy_request(method, route).await;
|
||||||
|
response["message"] = serde_json::json!(null);
|
||||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||||
assert_eq!(code, 403);
|
assert_eq!(code, 403);
|
||||||
}
|
}
|
||||||
@@ -529,14 +533,16 @@ async fn error_access_expired_parent_key() {
|
|||||||
server.use_api_key(&web_token);
|
server.use_api_key(&web_token);
|
||||||
|
|
||||||
// test search request while parent_key is not expired
|
// test search request while parent_key is not expired
|
||||||
let (response, code) = server.dummy_request("POST", "/indexes/products/search").await;
|
let (mut response, code) = server.dummy_request("POST", "/indexes/products/search").await;
|
||||||
|
response["message"] = serde_json::json!(null);
|
||||||
assert_ne!(response, INVALID_RESPONSE.clone());
|
assert_ne!(response, INVALID_RESPONSE.clone());
|
||||||
assert_ne!(code, 403);
|
assert_ne!(code, 403);
|
||||||
|
|
||||||
// wait until the key is expired.
|
// wait until the key is expired.
|
||||||
thread::sleep(time::Duration::new(1, 0));
|
thread::sleep(time::Duration::new(1, 0));
|
||||||
|
|
||||||
let (response, code) = server.dummy_request("POST", "/indexes/products/search").await;
|
let (mut response, code) = server.dummy_request("POST", "/indexes/products/search").await;
|
||||||
|
response["message"] = serde_json::json!(null);
|
||||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||||
assert_eq!(code, 403);
|
assert_eq!(code, 403);
|
||||||
}
|
}
|
||||||
@@ -585,7 +591,8 @@ async fn error_access_modified_token() {
|
|||||||
.join(".");
|
.join(".");
|
||||||
|
|
||||||
server.use_api_key(&altered_token);
|
server.use_api_key(&altered_token);
|
||||||
let (response, code) = server.dummy_request("POST", "/indexes/products/search").await;
|
let (mut response, code) = server.dummy_request("POST", "/indexes/products/search").await;
|
||||||
|
response["message"] = serde_json::json!(null);
|
||||||
assert_eq!(response, INVALID_RESPONSE.clone());
|
assert_eq!(response, INVALID_RESPONSE.clone());
|
||||||
assert_eq!(code, 403);
|
assert_eq!(code, 403);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -109,9 +109,11 @@ static NESTED_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
|||||||
|
|
||||||
fn invalid_response(query_index: Option<usize>) -> Value {
|
fn invalid_response(query_index: Option<usize>) -> Value {
|
||||||
let message = if let Some(query_index) = query_index {
|
let message = if let Some(query_index) = query_index {
|
||||||
format!("Inside `.queries[{query_index}]`: The provided API key is invalid.")
|
json!(format!("Inside `.queries[{query_index}]`: The provided API key is invalid."))
|
||||||
} else {
|
} else {
|
||||||
"The provided API key is invalid.".to_string()
|
// if it's anything else we simply return null and will tests all the
|
||||||
|
// error messages somewhere else
|
||||||
|
json!(null)
|
||||||
};
|
};
|
||||||
json!({"message": message,
|
json!({"message": message,
|
||||||
"code": "invalid_api_key",
|
"code": "invalid_api_key",
|
||||||
@@ -414,7 +416,10 @@ macro_rules! compute_forbidden_single_search {
|
|||||||
for (tenant_token, failed_query_index) in $tenant_tokens.iter().zip(failed_query_indexes.into_iter()) {
|
for (tenant_token, failed_query_index) in $tenant_tokens.iter().zip(failed_query_indexes.into_iter()) {
|
||||||
let web_token = generate_tenant_token(&uid, &key, tenant_token.clone());
|
let web_token = generate_tenant_token(&uid, &key, tenant_token.clone());
|
||||||
server.use_api_key(&web_token);
|
server.use_api_key(&web_token);
|
||||||
let (response, code) = server.multi_search(json!({"queries" : [{"indexUid": "sales"}]})).await;
|
let (mut response, code) = server.multi_search(json!({"queries" : [{"indexUid": "sales"}]})).await;
|
||||||
|
if failed_query_index.is_none() && !response["message"].is_null() {
|
||||||
|
response["message"] = serde_json::json!(null);
|
||||||
|
}
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
invalid_response(failed_query_index),
|
invalid_response(failed_query_index),
|
||||||
@@ -469,10 +474,13 @@ macro_rules! compute_forbidden_multiple_search {
|
|||||||
for (tenant_token, failed_query_index) in $tenant_tokens.iter().zip(failed_query_indexes.into_iter()) {
|
for (tenant_token, failed_query_index) in $tenant_tokens.iter().zip(failed_query_indexes.into_iter()) {
|
||||||
let web_token = generate_tenant_token(&uid, &key, tenant_token.clone());
|
let web_token = generate_tenant_token(&uid, &key, tenant_token.clone());
|
||||||
server.use_api_key(&web_token);
|
server.use_api_key(&web_token);
|
||||||
let (response, code) = server.multi_search(json!({"queries" : [
|
let (mut response, code) = server.multi_search(json!({"queries" : [
|
||||||
{"indexUid": "sales"},
|
{"indexUid": "sales"},
|
||||||
{"indexUid": "products"},
|
{"indexUid": "products"},
|
||||||
]})).await;
|
]})).await;
|
||||||
|
if failed_query_index.is_none() && !response["message"].is_null() {
|
||||||
|
response["message"] = serde_json::json!(null);
|
||||||
|
}
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
invalid_response(failed_query_index),
|
invalid_response(failed_query_index),
|
||||||
@@ -1073,18 +1081,20 @@ async fn error_access_expired_parent_key() {
|
|||||||
server.use_api_key(&web_token);
|
server.use_api_key(&web_token);
|
||||||
|
|
||||||
// test search request while parent_key is not expired
|
// test search request while parent_key is not expired
|
||||||
let (response, code) = server
|
let (mut response, code) = server
|
||||||
.multi_search(json!({"queries" : [{"indexUid": "sales"}, {"indexUid": "products"}]}))
|
.multi_search(json!({"queries" : [{"indexUid": "sales"}, {"indexUid": "products"}]}))
|
||||||
.await;
|
.await;
|
||||||
|
response["message"] = serde_json::json!(null);
|
||||||
assert_ne!(response, invalid_response(None));
|
assert_ne!(response, invalid_response(None));
|
||||||
assert_ne!(code, 403);
|
assert_ne!(code, 403);
|
||||||
|
|
||||||
// wait until the key is expired.
|
// wait until the key is expired.
|
||||||
thread::sleep(time::Duration::new(1, 0));
|
thread::sleep(time::Duration::new(1, 0));
|
||||||
|
|
||||||
let (response, code) = server
|
let (mut response, code) = server
|
||||||
.multi_search(json!({"queries" : [{"indexUid": "sales"}, {"indexUid": "products"}]}))
|
.multi_search(json!({"queries" : [{"indexUid": "sales"}, {"indexUid": "products"}]}))
|
||||||
.await;
|
.await;
|
||||||
|
response["message"] = serde_json::json!(null);
|
||||||
assert_eq!(response, invalid_response(None));
|
assert_eq!(response, invalid_response(None));
|
||||||
assert_eq!(code, 403);
|
assert_eq!(code, 403);
|
||||||
}
|
}
|
||||||
@@ -1134,8 +1144,9 @@ async fn error_access_modified_token() {
|
|||||||
.join(".");
|
.join(".");
|
||||||
|
|
||||||
server.use_api_key(&altered_token);
|
server.use_api_key(&altered_token);
|
||||||
let (response, code) =
|
let (mut response, code) =
|
||||||
server.multi_search(json!({"queries" : [{"indexUid": "products"}]})).await;
|
server.multi_search(json!({"queries" : [{"indexUid": "products"}]})).await;
|
||||||
|
response["message"] = serde_json::json!(null);
|
||||||
assert_eq!(response, invalid_response(None));
|
assert_eq!(response, invalid_response(None));
|
||||||
assert_eq!(code, 403);
|
assert_eq!(code, 403);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -182,14 +182,10 @@ impl Index<'_> {
|
|||||||
self.service.get(url).await
|
self.service.get(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_document(
|
pub async fn get_document(&self, id: u64, options: Option<Value>) -> (Value, StatusCode) {
|
||||||
&self,
|
|
||||||
id: u64,
|
|
||||||
options: Option<GetDocumentOptions>,
|
|
||||||
) -> (Value, StatusCode) {
|
|
||||||
let mut url = format!("/indexes/{}/documents/{}", urlencode(self.uid.as_ref()), id);
|
let mut url = format!("/indexes/{}/documents/{}", urlencode(self.uid.as_ref()), id);
|
||||||
if let Some(fields) = options.and_then(|o| o.fields) {
|
if let Some(options) = options {
|
||||||
let _ = write!(url, "?fields={}", fields.join(","));
|
write!(url, "{}", yaup::to_string(&options).unwrap()).unwrap();
|
||||||
}
|
}
|
||||||
self.service.get(url).await
|
self.service.get(url).await
|
||||||
}
|
}
|
||||||
@@ -205,18 +201,11 @@ impl Index<'_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_all_documents(&self, options: GetAllDocumentsOptions) -> (Value, StatusCode) {
|
pub async fn get_all_documents(&self, options: GetAllDocumentsOptions) -> (Value, StatusCode) {
|
||||||
let mut url = format!("/indexes/{}/documents?", urlencode(self.uid.as_ref()));
|
let url = format!(
|
||||||
if let Some(limit) = options.limit {
|
"/indexes/{}/documents{}",
|
||||||
let _ = write!(url, "limit={}&", limit);
|
urlencode(self.uid.as_ref()),
|
||||||
}
|
yaup::to_string(&options).unwrap()
|
||||||
|
);
|
||||||
if let Some(offset) = options.offset {
|
|
||||||
let _ = write!(url, "offset={}&", offset);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(attributes_to_retrieve) = options.attributes_to_retrieve {
|
|
||||||
let _ = write!(url, "fields={}&", attributes_to_retrieve.join(","));
|
|
||||||
}
|
|
||||||
|
|
||||||
self.service.get(url).await
|
self.service.get(url).await
|
||||||
}
|
}
|
||||||
@@ -376,7 +365,7 @@ impl Index<'_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn search_get(&self, query: &str) -> (Value, StatusCode) {
|
pub async fn search_get(&self, query: &str) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/search?{}", urlencode(self.uid.as_ref()), query);
|
let url = format!("/indexes/{}/search{}", urlencode(self.uid.as_ref()), query);
|
||||||
self.service.get(url).await
|
self.service.get(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -413,7 +402,7 @@ impl Index<'_> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn similar_get(&self, query: &str) -> (Value, StatusCode) {
|
pub async fn similar_get(&self, query: &str) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/similar?{}", urlencode(self.uid.as_ref()), query);
|
let url = format!("/indexes/{}/similar{}", urlencode(self.uid.as_ref()), query);
|
||||||
self.service.get(url).await
|
self.service.get(url).await
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -435,13 +424,14 @@ impl Index<'_> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct GetDocumentOptions {
|
#[derive(Debug, Default, serde::Serialize)]
|
||||||
pub fields: Option<Vec<&'static str>>,
|
#[serde(rename_all = "camelCase")]
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
|
||||||
pub struct GetAllDocumentsOptions {
|
pub struct GetAllDocumentsOptions {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub limit: Option<usize>,
|
pub limit: Option<usize>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub offset: Option<usize>,
|
pub offset: Option<usize>,
|
||||||
pub attributes_to_retrieve: Option<Vec<&'static str>>,
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub fields: Option<Vec<&'static str>>,
|
||||||
|
pub retrieve_vectors: bool,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ pub mod service;
|
|||||||
use std::fmt::{self, Display};
|
use std::fmt::{self, Display};
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
pub use index::{GetAllDocumentsOptions, GetDocumentOptions};
|
pub use index::GetAllDocumentsOptions;
|
||||||
use meili_snap::json_string;
|
use meili_snap::json_string;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
@@ -42,6 +42,12 @@ impl std::ops::Deref for Value {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl std::ops::DerefMut for Value {
|
||||||
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||||
|
&mut self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl PartialEq<serde_json::Value> for Value {
|
impl PartialEq<serde_json::Value> for Value {
|
||||||
fn eq(&self, other: &serde_json::Value) -> bool {
|
fn eq(&self, other: &serde_json::Value) -> bool {
|
||||||
&self.0 == other
|
&self.0 == other
|
||||||
@@ -65,7 +71,7 @@ impl Display for Value {
|
|||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"{}",
|
"{}",
|
||||||
json_string!(self, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" })
|
json_string!(self, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]", ".processingTimeMs" => "[duration]" })
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ use std::time::Duration;
|
|||||||
use actix_http::body::MessageBody;
|
use actix_http::body::MessageBody;
|
||||||
use actix_web::dev::ServiceResponse;
|
use actix_web::dev::ServiceResponse;
|
||||||
use actix_web::http::StatusCode;
|
use actix_web::http::StatusCode;
|
||||||
use byte_unit::{Byte, ByteUnit};
|
use byte_unit::{Byte, Unit};
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use meilisearch::option::{IndexerOpts, MaxMemory, Opt};
|
use meilisearch::option::{IndexerOpts, MaxMemory, Opt};
|
||||||
use meilisearch::{analytics, create_app, setup_meilisearch, SubscriberForSecondLayer};
|
use meilisearch::{analytics, create_app, setup_meilisearch, SubscriberForSecondLayer};
|
||||||
@@ -231,9 +231,9 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
|||||||
env: "development".to_owned(),
|
env: "development".to_owned(),
|
||||||
#[cfg(feature = "analytics")]
|
#[cfg(feature = "analytics")]
|
||||||
no_analytics: true,
|
no_analytics: true,
|
||||||
max_index_size: Byte::from_unit(100.0, ByteUnit::MiB).unwrap(),
|
max_index_size: Byte::from_u64_with_unit(100, Unit::MiB).unwrap(),
|
||||||
max_task_db_size: Byte::from_unit(1.0, ByteUnit::GiB).unwrap(),
|
max_task_db_size: Byte::from_u64_with_unit(1, Unit::GiB).unwrap(),
|
||||||
http_payload_size_limit: Byte::from_unit(10.0, ByteUnit::MiB).unwrap(),
|
http_payload_size_limit: Byte::from_u64_with_unit(10, Unit::MiB).unwrap(),
|
||||||
snapshot_dir: ".".into(),
|
snapshot_dir: ".".into(),
|
||||||
indexer_options: IndexerOpts {
|
indexer_options: IndexerOpts {
|
||||||
// memory has to be unlimited because several meilisearch are running in test context.
|
// memory has to be unlimited because several meilisearch are running in test context.
|
||||||
|
|||||||
@@ -183,6 +183,58 @@ async fn add_single_document_gzip_encoded() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn add_single_document_gzip_encoded_with_incomplete_error() {
|
||||||
|
let document = json!("kefir");
|
||||||
|
|
||||||
|
// this is a what is expected and should work
|
||||||
|
let server = Server::new().await;
|
||||||
|
let app = server.init_web_app().await;
|
||||||
|
// post
|
||||||
|
let document = serde_json::to_string(&document).unwrap();
|
||||||
|
let req = test::TestRequest::post()
|
||||||
|
.uri("/indexes/dog/documents")
|
||||||
|
.set_payload(document.to_string())
|
||||||
|
.insert_header(("content-type", "application/json"))
|
||||||
|
.insert_header(("content-encoding", "gzip"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
snapshot!(status_code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response),
|
||||||
|
@r###"
|
||||||
|
{
|
||||||
|
"message": "The provided payload is incomplete and cannot be parsed",
|
||||||
|
"code": "bad_request",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// put
|
||||||
|
let req = test::TestRequest::put()
|
||||||
|
.uri("/indexes/dog/documents")
|
||||||
|
.set_payload(document.to_string())
|
||||||
|
.insert_header(("content-type", "application/json"))
|
||||||
|
.insert_header(("content-encoding", "gzip"))
|
||||||
|
.to_request();
|
||||||
|
let res = test::call_service(&app, req).await;
|
||||||
|
let status_code = res.status();
|
||||||
|
let body = test::read_body(res).await;
|
||||||
|
let response: Value = serde_json::from_slice(&body).unwrap_or_default();
|
||||||
|
snapshot!(status_code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response),
|
||||||
|
@r###"
|
||||||
|
{
|
||||||
|
"message": "The provided payload is incomplete and cannot be parsed",
|
||||||
|
"code": "bad_request",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
/// Here we try document request with every encoding
|
/// Here we try document request with every encoding
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -1040,6 +1092,52 @@ async fn document_addition_with_primary_key() {
|
|||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn document_addition_with_huge_int_primary_key() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let documents = json!([
|
||||||
|
{
|
||||||
|
"primary": 14630868576586246730u64,
|
||||||
|
"content": "foo",
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
let (response, code) = index.add_documents(documents, Some("primary")).await;
|
||||||
|
snapshot!(code, @"202 Accepted");
|
||||||
|
|
||||||
|
let response = index.wait_task(response.uid()).await;
|
||||||
|
snapshot!(response,
|
||||||
|
@r###"
|
||||||
|
{
|
||||||
|
"uid": 0,
|
||||||
|
"indexUid": "test",
|
||||||
|
"status": "succeeded",
|
||||||
|
"type": "documentAdditionOrUpdate",
|
||||||
|
"canceledBy": null,
|
||||||
|
"details": {
|
||||||
|
"receivedDocuments": 1,
|
||||||
|
"indexedDocuments": 1
|
||||||
|
},
|
||||||
|
"error": null,
|
||||||
|
"duration": "[duration]",
|
||||||
|
"enqueuedAt": "[date]",
|
||||||
|
"startedAt": "[date]",
|
||||||
|
"finishedAt": "[date]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.get_document(14630868576586246730u64, None).await;
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response),
|
||||||
|
@r###"
|
||||||
|
{
|
||||||
|
"primary": 14630868576586246730,
|
||||||
|
"content": "foo"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn replace_document() {
|
async fn replace_document() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
@@ -2176,7 +2274,7 @@ async fn error_add_documents_payload_size() {
|
|||||||
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
||||||
@r###"
|
@r###"
|
||||||
{
|
{
|
||||||
"message": "The provided payload reached the size limit. The maximum accepted payload size is 10.00 MiB.",
|
"message": "The provided payload reached the size limit. The maximum accepted payload size is 10 MiB.",
|
||||||
"code": "payload_too_large",
|
"code": "payload_too_large",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#payload_too_large"
|
"link": "https://docs.meilisearch.com/errors#payload_too_large"
|
||||||
|
|||||||
@@ -719,7 +719,7 @@ async fn fetch_document_by_filter() {
|
|||||||
|
|
||||||
let (response, code) = index.get_document_by_filter(json!(null)).await;
|
let (response, code) = index.get_document_by_filter(json!(null)).await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(response, @r###"
|
||||||
{
|
{
|
||||||
"message": "Invalid value type: expected an object, but found null",
|
"message": "Invalid value type: expected an object, but found null",
|
||||||
"code": "bad_request",
|
"code": "bad_request",
|
||||||
@@ -730,7 +730,7 @@ async fn fetch_document_by_filter() {
|
|||||||
|
|
||||||
let (response, code) = index.get_document_by_filter(json!({ "offset": "doggo" })).await;
|
let (response, code) = index.get_document_by_filter(json!({ "offset": "doggo" })).await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(response, @r###"
|
||||||
{
|
{
|
||||||
"message": "Invalid value type at `.offset`: expected a positive integer, but found a string: `\"doggo\"`",
|
"message": "Invalid value type at `.offset`: expected a positive integer, but found a string: `\"doggo\"`",
|
||||||
"code": "invalid_document_offset",
|
"code": "invalid_document_offset",
|
||||||
@@ -741,7 +741,7 @@ async fn fetch_document_by_filter() {
|
|||||||
|
|
||||||
let (response, code) = index.get_document_by_filter(json!({ "limit": "doggo" })).await;
|
let (response, code) = index.get_document_by_filter(json!({ "limit": "doggo" })).await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(response, @r###"
|
||||||
{
|
{
|
||||||
"message": "Invalid value type at `.limit`: expected a positive integer, but found a string: `\"doggo\"`",
|
"message": "Invalid value type at `.limit`: expected a positive integer, but found a string: `\"doggo\"`",
|
||||||
"code": "invalid_document_limit",
|
"code": "invalid_document_limit",
|
||||||
@@ -752,7 +752,7 @@ async fn fetch_document_by_filter() {
|
|||||||
|
|
||||||
let (response, code) = index.get_document_by_filter(json!({ "fields": "doggo" })).await;
|
let (response, code) = index.get_document_by_filter(json!({ "fields": "doggo" })).await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(response, @r###"
|
||||||
{
|
{
|
||||||
"message": "Invalid value type at `.fields`: expected an array, but found a string: `\"doggo\"`",
|
"message": "Invalid value type at `.fields`: expected an array, but found a string: `\"doggo\"`",
|
||||||
"code": "invalid_document_fields",
|
"code": "invalid_document_fields",
|
||||||
@@ -763,7 +763,7 @@ async fn fetch_document_by_filter() {
|
|||||||
|
|
||||||
let (response, code) = index.get_document_by_filter(json!({ "filter": true })).await;
|
let (response, code) = index.get_document_by_filter(json!({ "filter": true })).await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(response, @r###"
|
||||||
{
|
{
|
||||||
"message": "Invalid syntax for the filter parameter: `expected String, Array, found: true`.",
|
"message": "Invalid syntax for the filter parameter: `expected String, Array, found: true`.",
|
||||||
"code": "invalid_document_filter",
|
"code": "invalid_document_filter",
|
||||||
@@ -774,7 +774,7 @@ async fn fetch_document_by_filter() {
|
|||||||
|
|
||||||
let (response, code) = index.get_document_by_filter(json!({ "filter": "cool doggo" })).await;
|
let (response, code) = index.get_document_by_filter(json!({ "filter": "cool doggo" })).await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(response, @r###"
|
||||||
{
|
{
|
||||||
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `cool doggo`.\n1:11 cool doggo",
|
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` at `cool doggo`.\n1:11 cool doggo",
|
||||||
"code": "invalid_document_filter",
|
"code": "invalid_document_filter",
|
||||||
@@ -786,7 +786,7 @@ async fn fetch_document_by_filter() {
|
|||||||
let (response, code) =
|
let (response, code) =
|
||||||
index.get_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
index.get_document_by_filter(json!({ "filter": "doggo = bernese" })).await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(response, @r###"
|
||||||
{
|
{
|
||||||
"message": "Attribute `doggo` is not filterable. Available filterable attributes are: `color`.\n1:6 doggo = bernese",
|
"message": "Attribute `doggo` is not filterable. Available filterable attributes are: `color`.\n1:6 doggo = bernese",
|
||||||
"code": "invalid_document_filter",
|
"code": "invalid_document_filter",
|
||||||
@@ -795,3 +795,70 @@ async fn fetch_document_by_filter() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn retrieve_vectors() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("doggo");
|
||||||
|
|
||||||
|
// GET ALL DOCUMENTS BY QUERY
|
||||||
|
let (response, _code) = index.get_all_documents_raw("?retrieveVectors=tamo").await;
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `retrieveVectors`: could not parse `tamo` as a boolean, expected either `true` or `false`",
|
||||||
|
"code": "invalid_document_retrieve_vectors",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_document_retrieve_vectors"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
let (response, _code) = index.get_all_documents_raw("?retrieveVectors=true").await;
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "Passing `retrieveVectors` as a parameter requires enabling the `vector store` experimental feature. See https://github.com/meilisearch/product/discussions/677",
|
||||||
|
"code": "feature_not_enabled",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// FETCH ALL DOCUMENTS BY POST
|
||||||
|
let (response, _code) =
|
||||||
|
index.get_document_by_filter(json!({ "retrieveVectors": "tamo" })).await;
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value type at `.retrieveVectors`: expected a boolean, but found a string: `\"tamo\"`",
|
||||||
|
"code": "invalid_document_retrieve_vectors",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_document_retrieve_vectors"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
let (response, _code) = index.get_document_by_filter(json!({ "retrieveVectors": true })).await;
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "Passing `retrieveVectors` as a parameter requires enabling the `vector store` experimental feature. See https://github.com/meilisearch/product/discussions/677",
|
||||||
|
"code": "feature_not_enabled",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// GET A SINGLE DOCUMENT
|
||||||
|
let (response, _code) = index.get_document(0, Some(json!({"retrieveVectors": "tamo"}))).await;
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `retrieveVectors`: could not parse `tamo` as a boolean, expected either `true` or `false`",
|
||||||
|
"code": "invalid_document_retrieve_vectors",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_document_retrieve_vectors"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
let (response, _code) = index.get_document(0, Some(json!({"retrieveVectors": true}))).await;
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "Passing `retrieveVectors` as a parameter requires enabling the `vector store` experimental feature. See https://github.com/meilisearch/product/discussions/677",
|
||||||
|
"code": "feature_not_enabled",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
|
use actix_web::http::header::ACCEPT_ENCODING;
|
||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use http::header::ACCEPT_ENCODING;
|
|
||||||
use meili_snap::*;
|
use meili_snap::*;
|
||||||
use urlencoding::encode as urlencode;
|
use urlencoding::encode as urlencode;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::{GetAllDocumentsOptions, GetDocumentOptions, Server, Value};
|
use crate::common::{GetAllDocumentsOptions, Server, Value};
|
||||||
use crate::json;
|
use crate::json;
|
||||||
|
|
||||||
// TODO: partial test since we are testing error, amd error is not yet fully implemented in
|
// TODO: partial test since we are testing error, amd error is not yet fully implemented in
|
||||||
@@ -59,8 +59,7 @@ async fn get_document() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
let (response, code) =
|
let (response, code) = index.get_document(0, Some(json!({ "fields": ["id"] }))).await;
|
||||||
index.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["id"]) })).await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
@@ -69,9 +68,8 @@ async fn get_document() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) =
|
||||||
.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["nested.content"]) }))
|
index.get_document(0, Some(json!({ "fields": ["nested.content"] }))).await;
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
@@ -211,7 +209,7 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
|||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_all_documents(GetAllDocumentsOptions {
|
.get_all_documents(GetAllDocumentsOptions {
|
||||||
attributes_to_retrieve: Some(vec!["name"]),
|
fields: Some(vec!["name"]),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
@@ -225,9 +223,19 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
|||||||
assert_eq!(response["limit"], json!(20));
|
assert_eq!(response["limit"], json!(20));
|
||||||
assert_eq!(response["total"], json!(77));
|
assert_eq!(response["total"], json!(77));
|
||||||
|
|
||||||
|
let (response, code) = index.get_all_documents_raw("?fields=").await;
|
||||||
|
assert_eq!(code, 200);
|
||||||
|
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||||
|
for results in response["results"].as_array().unwrap() {
|
||||||
|
assert_eq!(results.as_object().unwrap().keys().count(), 0);
|
||||||
|
}
|
||||||
|
assert_eq!(response["offset"], json!(0));
|
||||||
|
assert_eq!(response["limit"], json!(20));
|
||||||
|
assert_eq!(response["total"], json!(77));
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_all_documents(GetAllDocumentsOptions {
|
.get_all_documents(GetAllDocumentsOptions {
|
||||||
attributes_to_retrieve: Some(vec![]),
|
fields: Some(vec!["wrong"]),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
@@ -242,22 +250,7 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
|||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_all_documents(GetAllDocumentsOptions {
|
.get_all_documents(GetAllDocumentsOptions {
|
||||||
attributes_to_retrieve: Some(vec!["wrong"]),
|
fields: Some(vec!["name", "tags"]),
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
|
||||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
|
||||||
for results in response["results"].as_array().unwrap() {
|
|
||||||
assert_eq!(results.as_object().unwrap().keys().count(), 0);
|
|
||||||
}
|
|
||||||
assert_eq!(response["offset"], json!(0));
|
|
||||||
assert_eq!(response["limit"], json!(20));
|
|
||||||
assert_eq!(response["total"], json!(77));
|
|
||||||
|
|
||||||
let (response, code) = index
|
|
||||||
.get_all_documents(GetAllDocumentsOptions {
|
|
||||||
attributes_to_retrieve: Some(vec!["name", "tags"]),
|
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
@@ -270,10 +263,7 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_all_documents(GetAllDocumentsOptions {
|
.get_all_documents(GetAllDocumentsOptions { fields: Some(vec!["*"]), ..Default::default() })
|
||||||
attributes_to_retrieve: Some(vec!["*"]),
|
|
||||||
..Default::default()
|
|
||||||
})
|
|
||||||
.await;
|
.await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
assert_eq!(response["results"].as_array().unwrap().len(), 20);
|
||||||
@@ -283,7 +273,7 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
|||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.get_all_documents(GetAllDocumentsOptions {
|
.get_all_documents(GetAllDocumentsOptions {
|
||||||
attributes_to_retrieve: Some(vec!["*", "wrong"]),
|
fields: Some(vec!["*", "wrong"]),
|
||||||
..Default::default()
|
..Default::default()
|
||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
@@ -316,12 +306,10 @@ async fn get_document_s_nested_attributes_to_retrieve() {
|
|||||||
assert_eq!(code, 202);
|
assert_eq!(code, 202);
|
||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
let (response, code) =
|
let (response, code) = index.get_document(0, Some(json!({ "fields": ["content"] }))).await;
|
||||||
index.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["content"]) })).await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response, json!({}));
|
assert_eq!(response, json!({}));
|
||||||
let (response, code) =
|
let (response, code) = index.get_document(1, Some(json!({ "fields": ["content"] }))).await;
|
||||||
index.get_document(1, Some(GetDocumentOptions { fields: Some(vec!["content"]) })).await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
@@ -333,9 +321,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
|
|||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index.get_document(0, Some(json!({ "fields": ["content.truc"] }))).await;
|
||||||
.get_document(0, Some(GetDocumentOptions { fields: Some(vec!["content.truc"]) }))
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
@@ -343,9 +329,7 @@ async fn get_document_s_nested_attributes_to_retrieve() {
|
|||||||
"content.truc": "foobar",
|
"content.truc": "foobar",
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
let (response, code) = index
|
let (response, code) = index.get_document(1, Some(json!({ "fields": ["content.truc"] }))).await;
|
||||||
.get_document(1, Some(GetDocumentOptions { fields: Some(vec!["content.truc"]) }))
|
|
||||||
.await;
|
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
response,
|
response,
|
||||||
@@ -540,3 +524,207 @@ async fn get_document_by_filter() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn get_document_with_vectors() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("doggo");
|
||||||
|
let (value, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(value, @r###"
|
||||||
|
{
|
||||||
|
"vectorStore": true,
|
||||||
|
"metrics": false,
|
||||||
|
"logsRoute": false
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index
|
||||||
|
.update_settings(json!({
|
||||||
|
"embedders": {
|
||||||
|
"manual": {
|
||||||
|
"source": "userProvided",
|
||||||
|
"dimensions": 3,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
.await;
|
||||||
|
snapshot!(code, @"202 Accepted");
|
||||||
|
server.wait_task(response.uid()).await;
|
||||||
|
|
||||||
|
let documents = json!([
|
||||||
|
{"id": 0, "name": "kefir", "_vectors": { "manual": [0, 0, 0] }},
|
||||||
|
{"id": 1, "name": "echo", "_vectors": { "manual": null }},
|
||||||
|
]);
|
||||||
|
let (value, code) = index.add_documents(documents, None).await;
|
||||||
|
snapshot!(code, @"202 Accepted");
|
||||||
|
index.wait_task(value.uid()).await;
|
||||||
|
|
||||||
|
// by default you shouldn't see the `_vectors` object
|
||||||
|
let (documents, _code) = index.get_all_documents(Default::default()).await;
|
||||||
|
snapshot!(json_string!(documents), @r###"
|
||||||
|
{
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"id": 0,
|
||||||
|
"name": "kefir"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"name": "echo"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"offset": 0,
|
||||||
|
"limit": 20,
|
||||||
|
"total": 2
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
let (documents, _code) = index.get_document(0, None).await;
|
||||||
|
snapshot!(json_string!(documents), @r###"
|
||||||
|
{
|
||||||
|
"id": 0,
|
||||||
|
"name": "kefir"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// if we try to retrieve the vectors with the `fields` parameter they
|
||||||
|
// still shouldn't be displayed
|
||||||
|
let (documents, _code) = index
|
||||||
|
.get_all_documents(GetAllDocumentsOptions {
|
||||||
|
fields: Some(vec!["name", "_vectors"]),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
snapshot!(json_string!(documents), @r###"
|
||||||
|
{
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"name": "kefir"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "echo"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"offset": 0,
|
||||||
|
"limit": 20,
|
||||||
|
"total": 2
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
let (documents, _code) =
|
||||||
|
index.get_document(0, Some(json!({"fields": ["name", "_vectors"]}))).await;
|
||||||
|
snapshot!(json_string!(documents), @r###"
|
||||||
|
{
|
||||||
|
"name": "kefir"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// If we specify the retrieve vectors boolean and nothing else we should get the vectors
|
||||||
|
let (documents, _code) = index
|
||||||
|
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
|
||||||
|
.await;
|
||||||
|
snapshot!(json_string!(documents), @r###"
|
||||||
|
{
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"id": 0,
|
||||||
|
"name": "kefir",
|
||||||
|
"_vectors": {
|
||||||
|
"manual": {
|
||||||
|
"embeddings": [
|
||||||
|
[
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"name": "echo",
|
||||||
|
"_vectors": {}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"offset": 0,
|
||||||
|
"limit": 20,
|
||||||
|
"total": 2
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
let (documents, _code) = index.get_document(0, Some(json!({"retrieveVectors": true}))).await;
|
||||||
|
snapshot!(json_string!(documents), @r###"
|
||||||
|
{
|
||||||
|
"id": 0,
|
||||||
|
"name": "kefir",
|
||||||
|
"_vectors": {
|
||||||
|
"manual": {
|
||||||
|
"embeddings": [
|
||||||
|
[
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// If we specify the retrieve vectors boolean and exclude vectors form the `fields` we should still get the vectors
|
||||||
|
let (documents, _code) = index
|
||||||
|
.get_all_documents(GetAllDocumentsOptions {
|
||||||
|
retrieve_vectors: true,
|
||||||
|
fields: Some(vec!["name"]),
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
snapshot!(json_string!(documents), @r###"
|
||||||
|
{
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"name": "kefir",
|
||||||
|
"_vectors": {
|
||||||
|
"manual": {
|
||||||
|
"embeddings": [
|
||||||
|
[
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "echo",
|
||||||
|
"_vectors": {}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"offset": 0,
|
||||||
|
"limit": 20,
|
||||||
|
"total": 2
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
let (documents, _code) =
|
||||||
|
index.get_document(0, Some(json!({"retrieveVectors": true, "fields": ["name"]}))).await;
|
||||||
|
snapshot!(json_string!(documents), @r###"
|
||||||
|
{
|
||||||
|
"name": "kefir",
|
||||||
|
"_vectors": {
|
||||||
|
"manual": {
|
||||||
|
"embeddings": [
|
||||||
|
[
|
||||||
|
0.0,
|
||||||
|
0.0,
|
||||||
|
0.0
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|||||||
@@ -1938,3 +1938,210 @@ async fn import_dump_v6_containing_experimental_features() {
|
|||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// In this test we must generate the dump ourselves to ensure the
|
||||||
|
// `user provided` vectors are well set
|
||||||
|
#[actix_rt::test]
|
||||||
|
#[cfg_attr(target_os = "windows", ignore)]
|
||||||
|
async fn generate_and_import_dump_containing_vectors() {
|
||||||
|
let temp = tempfile::tempdir().unwrap();
|
||||||
|
let mut opt = default_settings(temp.path());
|
||||||
|
let server = Server::new_with_options(opt.clone()).await.unwrap();
|
||||||
|
let (code, _) = server.set_features(json!({"vectorStore": true})).await;
|
||||||
|
snapshot!(code, @r###"
|
||||||
|
{
|
||||||
|
"vectorStore": true,
|
||||||
|
"metrics": false,
|
||||||
|
"logsRoute": false
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
let index = server.index("pets");
|
||||||
|
let (response, code) = index
|
||||||
|
.update_settings(json!(
|
||||||
|
{
|
||||||
|
"embedders": {
|
||||||
|
"doggo_embedder": {
|
||||||
|
"source": "huggingFace",
|
||||||
|
"model": "sentence-transformers/all-MiniLM-L6-v2",
|
||||||
|
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
|
||||||
|
"documentTemplate": "{{doc.doggo}}",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
))
|
||||||
|
.await;
|
||||||
|
snapshot!(code, @"202 Accepted");
|
||||||
|
let response = index.wait_task(response.uid()).await;
|
||||||
|
snapshot!(response);
|
||||||
|
let (response, code) = index
|
||||||
|
.add_documents(
|
||||||
|
json!([
|
||||||
|
{"id": 0, "doggo": "kefir", "_vectors": { "doggo_embedder": vec![0; 384] }},
|
||||||
|
{"id": 1, "doggo": "echo", "_vectors": { "doggo_embedder": { "regenerate": false, "embeddings": vec![1; 384] }}},
|
||||||
|
{"id": 2, "doggo": "intel", "_vectors": { "doggo_embedder": { "regenerate": true, "embeddings": vec![2; 384] }}},
|
||||||
|
{"id": 3, "doggo": "bill", "_vectors": { "doggo_embedder": { "regenerate": true }}},
|
||||||
|
{"id": 4, "doggo": "max" },
|
||||||
|
]),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
snapshot!(code, @"202 Accepted");
|
||||||
|
let response = index.wait_task(response.uid()).await;
|
||||||
|
snapshot!(response);
|
||||||
|
|
||||||
|
let (response, code) = server.create_dump().await;
|
||||||
|
snapshot!(code, @"202 Accepted");
|
||||||
|
let response = index.wait_task(response.uid()).await;
|
||||||
|
snapshot!(response["status"], @r###""succeeded""###);
|
||||||
|
|
||||||
|
// ========= We made a dump, now we should clear the DB and try to import our dump
|
||||||
|
drop(server);
|
||||||
|
tokio::fs::remove_dir_all(&opt.db_path).await.unwrap();
|
||||||
|
let dump_name = format!("{}.dump", response["details"]["dumpUid"].as_str().unwrap());
|
||||||
|
let dump_path = opt.dump_dir.join(dump_name);
|
||||||
|
assert!(dump_path.exists(), "path: `{}`", dump_path.display());
|
||||||
|
|
||||||
|
opt.import_dump = Some(dump_path);
|
||||||
|
// NOTE: We shouldn't have to change the database path but I lost one hour
|
||||||
|
// because of a « bad path » error and that fixed it.
|
||||||
|
opt.db_path = temp.path().join("data.ms");
|
||||||
|
|
||||||
|
let mut server = Server::new_auth_with_options(opt, temp).await;
|
||||||
|
server.use_api_key("MASTER_KEY");
|
||||||
|
|
||||||
|
let (indexes, code) = server.list_indexes(None, None).await;
|
||||||
|
assert_eq!(code, 200, "{indexes}");
|
||||||
|
|
||||||
|
snapshot!(indexes["results"].as_array().unwrap().len(), @"1");
|
||||||
|
snapshot!(indexes["results"][0]["uid"], @r###""pets""###);
|
||||||
|
snapshot!(indexes["results"][0]["primaryKey"], @r###""id""###);
|
||||||
|
|
||||||
|
let (response, code) = server.get_features().await;
|
||||||
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"vectorStore": true,
|
||||||
|
"metrics": false,
|
||||||
|
"logsRoute": false
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let index = server.index("pets");
|
||||||
|
|
||||||
|
let (response, code) = index.settings().await;
|
||||||
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
|
"rankingRules": [
|
||||||
|
"words",
|
||||||
|
"typo",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"sort",
|
||||||
|
"exactness"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"nonSeparatorTokens": [],
|
||||||
|
"separatorTokens": [],
|
||||||
|
"dictionary": [],
|
||||||
|
"synonyms": {},
|
||||||
|
"distinctAttribute": null,
|
||||||
|
"proximityPrecision": "byWord",
|
||||||
|
"typoTolerance": {
|
||||||
|
"enabled": true,
|
||||||
|
"minWordSizeForTypos": {
|
||||||
|
"oneTypo": 5,
|
||||||
|
"twoTypos": 9
|
||||||
|
},
|
||||||
|
"disableOnWords": [],
|
||||||
|
"disableOnAttributes": []
|
||||||
|
},
|
||||||
|
"faceting": {
|
||||||
|
"maxValuesPerFacet": 100,
|
||||||
|
"sortFacetValuesBy": {
|
||||||
|
"*": "alpha"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"pagination": {
|
||||||
|
"maxTotalHits": 1000
|
||||||
|
},
|
||||||
|
"embedders": {
|
||||||
|
"doggo_embedder": {
|
||||||
|
"source": "huggingFace",
|
||||||
|
"model": "sentence-transformers/all-MiniLM-L6-v2",
|
||||||
|
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
|
||||||
|
"documentTemplate": "{{doc.doggo}}"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"searchCutoffMs": null
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(json!({"retrieveVectors": true}), |response, code| {
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(json_string!(response["hits"], { "[]._vectors.doggo_embedder.embeddings" => "[vector]" }), @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 0,
|
||||||
|
"doggo": "kefir",
|
||||||
|
"_vectors": {
|
||||||
|
"doggo_embedder": {
|
||||||
|
"embeddings": "[vector]",
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"doggo": "echo",
|
||||||
|
"_vectors": {
|
||||||
|
"doggo_embedder": {
|
||||||
|
"embeddings": "[vector]",
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"doggo": "intel",
|
||||||
|
"_vectors": {
|
||||||
|
"doggo_embedder": {
|
||||||
|
"embeddings": "[vector]",
|
||||||
|
"regenerate": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"doggo": "bill",
|
||||||
|
"_vectors": {
|
||||||
|
"doggo_embedder": {
|
||||||
|
"embeddings": "[vector]",
|
||||||
|
"regenerate": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"doggo": "max",
|
||||||
|
"_vectors": {
|
||||||
|
"doggo_embedder": {
|
||||||
|
"embeddings": "[vector]",
|
||||||
|
"regenerate": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|||||||
@@ -0,0 +1,25 @@
|
|||||||
|
---
|
||||||
|
source: meilisearch/tests/dumps/mod.rs
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"uid": 0,
|
||||||
|
"indexUid": "pets",
|
||||||
|
"status": "succeeded",
|
||||||
|
"type": "settingsUpdate",
|
||||||
|
"canceledBy": null,
|
||||||
|
"details": {
|
||||||
|
"embedders": {
|
||||||
|
"doggo_embedder": {
|
||||||
|
"source": "huggingFace",
|
||||||
|
"model": "sentence-transformers/all-MiniLM-L6-v2",
|
||||||
|
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
|
||||||
|
"documentTemplate": "{{doc.doggo}}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"error": null,
|
||||||
|
"duration": "[duration]",
|
||||||
|
"enqueuedAt": "[date]",
|
||||||
|
"startedAt": "[date]",
|
||||||
|
"finishedAt": "[date]"
|
||||||
|
}
|
||||||
@@ -0,0 +1,19 @@
|
|||||||
|
---
|
||||||
|
source: meilisearch/tests/dumps/mod.rs
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"uid": 1,
|
||||||
|
"indexUid": "pets",
|
||||||
|
"status": "succeeded",
|
||||||
|
"type": "documentAdditionOrUpdate",
|
||||||
|
"canceledBy": null,
|
||||||
|
"details": {
|
||||||
|
"receivedDocuments": 5,
|
||||||
|
"indexedDocuments": 5
|
||||||
|
},
|
||||||
|
"error": null,
|
||||||
|
"duration": "[duration]",
|
||||||
|
"enqueuedAt": "[date]",
|
||||||
|
"startedAt": "[date]",
|
||||||
|
"finishedAt": "[date]"
|
||||||
|
}
|
||||||
@@ -1,6 +1,5 @@
|
|||||||
use actix_web::http::header::ContentType;
|
use actix_web::http::header::{ContentType, ACCEPT_ENCODING};
|
||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use http::header::ACCEPT_ENCODING;
|
|
||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use meilisearch::Opt;
|
use meilisearch::Opt;
|
||||||
|
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ mod snapshot;
|
|||||||
mod stats;
|
mod stats;
|
||||||
mod swap_indexes;
|
mod swap_indexes;
|
||||||
mod tasks;
|
mod tasks;
|
||||||
|
mod vector;
|
||||||
|
|
||||||
// Tests are isolated by features in different modules to allow better readability, test
|
// Tests are isolated by features in different modules to allow better readability, test
|
||||||
// targetability, and improved incremental compilation times.
|
// targetability, and improved incremental compilation times.
|
||||||
|
|||||||
@@ -107,6 +107,39 @@ static DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
|||||||
])
|
])
|
||||||
});
|
});
|
||||||
|
|
||||||
|
static NESTED_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
||||||
|
json!([
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"description": "Leather Jacket",
|
||||||
|
"brand": "Lee Jeans",
|
||||||
|
"product_id": "123456",
|
||||||
|
"color": { "main": "Brown", "pattern": "stripped" },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"description": "Leather Jacket",
|
||||||
|
"brand": "Lee Jeans",
|
||||||
|
"product_id": "123456",
|
||||||
|
"color": { "main": "Black", "pattern": "stripped" },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"description": "Leather Jacket",
|
||||||
|
"brand": "Lee Jeans",
|
||||||
|
"product_id": "123456",
|
||||||
|
"color": { "main": "Blue", "pattern": "used" },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"description": "T-Shirt",
|
||||||
|
"brand": "Nike",
|
||||||
|
"product_id": "789012",
|
||||||
|
"color": { "main": "Blue", "pattern": "stripped" },
|
||||||
|
}
|
||||||
|
])
|
||||||
|
});
|
||||||
|
|
||||||
static DOCUMENT_PRIMARY_KEY: &str = "id";
|
static DOCUMENT_PRIMARY_KEY: &str = "id";
|
||||||
static DOCUMENT_DISTINCT_KEY: &str = "product_id";
|
static DOCUMENT_DISTINCT_KEY: &str = "product_id";
|
||||||
|
|
||||||
@@ -239,3 +272,35 @@ async fn distinct_search_with_pagination_no_ranking() {
|
|||||||
snapshot!(response["totalPages"], @"2");
|
snapshot!(response["totalPages"], @"2");
|
||||||
snapshot!(response["totalHits"], @"6");
|
snapshot!(response["totalHits"], @"6");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn distinct_at_search_time() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("tamo");
|
||||||
|
|
||||||
|
let documents = NESTED_DOCUMENTS.clone();
|
||||||
|
index.add_documents(documents, Some(DOCUMENT_PRIMARY_KEY)).await;
|
||||||
|
let (task, _) = index.update_settings_filterable_attributes(json!(["color.main"])).await;
|
||||||
|
let task = index.wait_task(task.uid()).await;
|
||||||
|
snapshot!(task, name: "succeed");
|
||||||
|
|
||||||
|
fn get_hits(response: &Value) -> Vec<String> {
|
||||||
|
let hits_array = response["hits"]
|
||||||
|
.as_array()
|
||||||
|
.unwrap_or_else(|| panic!("{}", &serde_json::to_string_pretty(&response).unwrap()));
|
||||||
|
hits_array
|
||||||
|
.iter()
|
||||||
|
.map(|h| h[DOCUMENT_PRIMARY_KEY].as_number().unwrap().to_string())
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
}
|
||||||
|
|
||||||
|
let (response, code) =
|
||||||
|
index.search_post(json!({"page": 1, "hitsPerPage": 3, "distinct": "color.main"})).await;
|
||||||
|
let hits = get_hits(&response);
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(hits.len(), @"3");
|
||||||
|
snapshot!(format!("{:?}", hits), @r###"["1", "2", "3"]"###);
|
||||||
|
snapshot!(response["page"], @"1");
|
||||||
|
snapshot!(response["totalPages"], @"1");
|
||||||
|
snapshot!(response["totalHits"], @"3");
|
||||||
|
}
|
||||||
|
|||||||
@@ -71,7 +71,7 @@ async fn search_bad_offset() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index.search_get("offset=doggo").await;
|
let (response, code) = index.search_get("?offset=doggo").await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -99,7 +99,7 @@ async fn search_bad_limit() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index.search_get("limit=doggo").await;
|
let (response, code) = index.search_get("?limit=doggo").await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -127,7 +127,7 @@ async fn search_bad_page() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index.search_get("page=doggo").await;
|
let (response, code) = index.search_get("?page=doggo").await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -155,7 +155,7 @@ async fn search_bad_hits_per_page() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index.search_get("hitsPerPage=doggo").await;
|
let (response, code) = index.search_get("?hitsPerPage=doggo").await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -167,6 +167,74 @@ async fn search_bad_hits_per_page() {
|
|||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn search_bad_attributes_to_retrieve() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"attributesToRetrieve": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value type at `.attributesToRetrieve`: expected an array, but found a string: `\"doggo\"`",
|
||||||
|
"code": "invalid_search_attributes_to_retrieve",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_retrieve"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
// Can't make the `attributes_to_retrieve` fail with a get search since it'll accept anything as an array of strings.
|
||||||
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn search_bad_retrieve_vectors() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"retrieveVectors": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value type at `.retrieveVectors`: expected a boolean, but found a string: `\"doggo\"`",
|
||||||
|
"code": "invalid_search_retrieve_vectors",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_search_retrieve_vectors"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.search_post(json!({"retrieveVectors": [true]})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value type at `.retrieveVectors`: expected a boolean, but found an array: `[true]`",
|
||||||
|
"code": "invalid_search_retrieve_vectors",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_search_retrieve_vectors"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.search_get("?retrieveVectors=").await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `retrieveVectors`: could not parse `` as a boolean, expected either `true` or `false`",
|
||||||
|
"code": "invalid_search_retrieve_vectors",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_search_retrieve_vectors"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.search_get("?retrieveVectors=doggo").await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `retrieveVectors`: could not parse `doggo` as a boolean, expected either `true` or `false`",
|
||||||
|
"code": "invalid_search_retrieve_vectors",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_search_retrieve_vectors"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn search_bad_attributes_to_crop() {
|
async fn search_bad_attributes_to_crop() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
@@ -201,7 +269,7 @@ async fn search_bad_crop_length() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index.search_get("cropLength=doggo").await;
|
let (response, code) = index.search_get("?cropLength=doggo").await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -291,7 +359,7 @@ async fn search_bad_show_matches_position() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index.search_get("showMatchesPosition=doggo").await;
|
let (response, code) = index.search_get("?showMatchesPosition=doggo").await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -374,7 +442,7 @@ async fn search_non_filterable_facets() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index.search_get("facets=doggo").await;
|
let (response, code) = index.search_get("?facets=doggo").await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -404,7 +472,7 @@ async fn search_non_filterable_facets_multiple_filterable() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index.search_get("facets=doggo").await;
|
let (response, code) = index.search_get("?facets=doggo").await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -434,7 +502,7 @@ async fn search_non_filterable_facets_no_filterable() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index.search_get("facets=doggo").await;
|
let (response, code) = index.search_get("?facets=doggo").await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -464,7 +532,7 @@ async fn search_non_filterable_facets_multiple_facets() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index.search_get("facets=doggo,neko").await;
|
let (response, code) = index.search_get("?facets=doggo,neko").await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -557,7 +625,7 @@ async fn search_bad_matching_strategy() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index.search_get("matchingStrategy=doggo").await;
|
let (response, code) = index.search_get("?matchingStrategy=doggo").await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -1072,3 +1140,66 @@ async fn search_on_unknown_field_plus_joker() {
|
|||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn distinct_at_search_time() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = server.index("tamo");
|
||||||
|
let (task, _) = index.create(None).await;
|
||||||
|
let task = index.wait_task(task.uid()).await;
|
||||||
|
snapshot!(task, name: "task-succeed");
|
||||||
|
|
||||||
|
let (response, code) =
|
||||||
|
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.",
|
||||||
|
"code": "invalid_search_distinct",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (task, _) = index.update_settings_filterable_attributes(json!(["color", "machin"])).await;
|
||||||
|
index.wait_task(task.uid()).await;
|
||||||
|
|
||||||
|
let (response, code) =
|
||||||
|
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes are: `color, machin`.",
|
||||||
|
"code": "invalid_search_distinct",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (task, _) = index.update_settings_displayed_attributes(json!(["color"])).await;
|
||||||
|
index.wait_task(task.uid()).await;
|
||||||
|
|
||||||
|
let (response, code) =
|
||||||
|
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes are: `color, <..hidden-attributes>`.",
|
||||||
|
"code": "invalid_search_distinct",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) =
|
||||||
|
index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": true})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(response, @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value type at `.distinct`: expected a string, but found a boolean: `true`",
|
||||||
|
"code": "invalid_search_distinct",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_search_distinct"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|||||||
@@ -150,7 +150,8 @@ async fn bug_4640() {
|
|||||||
"_geo": {
|
"_geo": {
|
||||||
"lat": "45.4777599",
|
"lat": "45.4777599",
|
||||||
"lng": "9.1967508"
|
"lng": "9.1967508"
|
||||||
}
|
},
|
||||||
|
"_geoDistance": 0
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": 1,
|
"id": 1,
|
||||||
|
|||||||
@@ -124,32 +124,61 @@ async fn simple_search() {
|
|||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(
|
.search_post(
|
||||||
json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.2}}),
|
json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.2}, "retrieveVectors": true}),
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]}},{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]}},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]}}]"###);
|
snapshot!(response["hits"], @r###"[{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}}},{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}}},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}}}]"###);
|
||||||
snapshot!(response["semanticHitCount"], @"0");
|
snapshot!(response["semanticHitCount"], @"0");
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(
|
.search_post(
|
||||||
json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.5}, "showRankingScore": true}),
|
json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.5}, "showRankingScore": true, "retrieveVectors": true}),
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_rankingScore":0.9848484848484848},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_rankingScore":0.9472135901451112}]"###);
|
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":0.9848484848484848},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":0.9472135901451112}]"###);
|
||||||
snapshot!(response["semanticHitCount"], @"2");
|
snapshot!(response["semanticHitCount"], @"2");
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(
|
.search_post(
|
||||||
json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.8}, "showRankingScore": true}),
|
json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.8}, "showRankingScore": true, "retrieveVectors": true}),
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_rankingScore":0.974341630935669},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_rankingScore":0.9472135901451112}]"###);
|
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":0.974341630935669},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":0.9472135901451112}]"###);
|
||||||
snapshot!(response["semanticHitCount"], @"3");
|
snapshot!(response["semanticHitCount"], @"3");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn limit_offset() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||||
|
|
||||||
|
let (response, code) = index
|
||||||
|
.search_post(
|
||||||
|
json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.2}, "retrieveVectors": true, "offset": 1, "limit": 1}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}}}]"###);
|
||||||
|
snapshot!(response["semanticHitCount"], @"0");
|
||||||
|
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||||
|
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||||
|
|
||||||
|
let (response, code) = index
|
||||||
|
.search_post(
|
||||||
|
json!({"q": "Captain", "vector": [1.0, 1.0], "hybrid": {"semanticRatio": 0.9}, "retrieveVectors": true, "offset": 1, "limit": 1}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
snapshot!(response["hits"], @r###"[{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}}}]"###);
|
||||||
|
snapshot!(response["semanticHitCount"], @"1");
|
||||||
|
assert_eq!(response["hits"].as_array().unwrap().len(), 1);
|
||||||
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn simple_search_hf() {
|
async fn simple_search_hf() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
@@ -204,10 +233,10 @@ async fn distribution_shift() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
let index = index_with_documents_user_provided(&server, &SIMPLE_SEARCH_DOCUMENTS_VEC).await;
|
||||||
|
|
||||||
let search = json!({"q": "Captain", "vector": [1.0, 1.0], "showRankingScore": true, "hybrid": {"semanticRatio": 1.0}});
|
let search = json!({"q": "Captain", "vector": [1.0, 1.0], "showRankingScore": true, "hybrid": {"semanticRatio": 1.0}, "retrieveVectors": true});
|
||||||
let (response, code) = index.search_post(search.clone()).await;
|
let (response, code) = index.search_post(search.clone()).await;
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_rankingScore":0.974341630935669},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_rankingScore":0.9472135901451112}]"###);
|
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":0.974341630935669},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":0.9472135901451112}]"###);
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.update_settings(json!({
|
.update_settings(json!({
|
||||||
@@ -228,7 +257,7 @@ async fn distribution_shift() {
|
|||||||
|
|
||||||
let (response, code) = index.search_post(search).await;
|
let (response, code) = index.search_post(search).await;
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_rankingScore":0.19161224365234375},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_rankingScore":1.1920928955078125e-7},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_rankingScore":1.1920928955078125e-7}]"###);
|
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_rankingScore":0.19161224365234375},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":1.1920928955078125e-7},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":1.1920928955078125e-7}]"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -239,20 +268,23 @@ async fn highlighter() {
|
|||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
|
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
|
||||||
"hybrid": {"semanticRatio": 0.2},
|
"hybrid": {"semanticRatio": 0.2},
|
||||||
"attributesToHighlight": [
|
"retrieveVectors": true,
|
||||||
"desc"
|
"attributesToHighlight": [
|
||||||
|
"desc",
|
||||||
|
"_vectors",
|
||||||
],
|
],
|
||||||
"highlightPreTag": "**BEGIN**",
|
"highlightPreTag": "**BEGIN**",
|
||||||
"highlightPostTag": "**END**"
|
"highlightPostTag": "**END**",
|
||||||
}))
|
}))
|
||||||
.await;
|
.await;
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_formatted":{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":["2.0","3.0"]}}},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_formatted":{"title":"Shazam!","desc":"a **BEGIN**Captain**END** **BEGIN**Marvel**END** ersatz","id":"1","_vectors":{"default":["1.0","3.0"]}}},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_formatted":{"title":"Captain Planet","desc":"He's not part of the **BEGIN**Marvel**END** Cinematic Universe","id":"2","_vectors":{"default":["1.0","2.0"]}}}]"###);
|
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_formatted":{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3"}},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_formatted":{"title":"Shazam!","desc":"a **BEGIN**Captain**END** **BEGIN**Marvel**END** ersatz","id":"1"}},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_formatted":{"title":"Captain Planet","desc":"He's not part of the **BEGIN**Marvel**END** Cinematic Universe","id":"2"}}]"###);
|
||||||
snapshot!(response["semanticHitCount"], @"0");
|
snapshot!(response["semanticHitCount"], @"0");
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
|
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
|
||||||
"hybrid": {"semanticRatio": 0.8},
|
"hybrid": {"semanticRatio": 0.8},
|
||||||
|
"retrieveVectors": true,
|
||||||
"showRankingScore": true,
|
"showRankingScore": true,
|
||||||
"attributesToHighlight": [
|
"attributesToHighlight": [
|
||||||
"desc"
|
"desc"
|
||||||
@@ -262,13 +294,14 @@ async fn highlighter() {
|
|||||||
}))
|
}))
|
||||||
.await;
|
.await;
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_formatted":{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":["2.0","3.0"]}},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_formatted":{"title":"Captain Planet","desc":"He's not part of the **BEGIN**Marvel**END** Cinematic Universe","id":"2","_vectors":{"default":["1.0","2.0"]}},"_rankingScore":0.974341630935669},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_formatted":{"title":"Shazam!","desc":"a **BEGIN**Captain**END** **BEGIN**Marvel**END** ersatz","id":"1","_vectors":{"default":["1.0","3.0"]}},"_rankingScore":0.9472135901451112}]"###);
|
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_formatted":{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3"},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_formatted":{"title":"Captain Planet","desc":"He's not part of the **BEGIN**Marvel**END** Cinematic Universe","id":"2"},"_rankingScore":0.974341630935669},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_formatted":{"title":"Shazam!","desc":"a **BEGIN**Captain**END** **BEGIN**Marvel**END** ersatz","id":"1"},"_rankingScore":0.9472135901451112}]"###);
|
||||||
snapshot!(response["semanticHitCount"], @"3");
|
snapshot!(response["semanticHitCount"], @"3");
|
||||||
|
|
||||||
// no highlighting on full semantic
|
// no highlighting on full semantic
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
|
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
|
||||||
"hybrid": {"semanticRatio": 1.0},
|
"hybrid": {"semanticRatio": 1.0},
|
||||||
|
"retrieveVectors": true,
|
||||||
"showRankingScore": true,
|
"showRankingScore": true,
|
||||||
"attributesToHighlight": [
|
"attributesToHighlight": [
|
||||||
"desc"
|
"desc"
|
||||||
@@ -278,7 +311,7 @@ async fn highlighter() {
|
|||||||
}))
|
}))
|
||||||
.await;
|
.await;
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_formatted":{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":["2.0","3.0"]}},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_formatted":{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":["1.0","2.0"]}},"_rankingScore":0.974341630935669},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_formatted":{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":["1.0","3.0"]}},"_rankingScore":0.9472135901451112}]"###);
|
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_formatted":{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3"},"_rankingScore":0.990290343761444},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_formatted":{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2"},"_rankingScore":0.974341630935669},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_formatted":{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1"},"_rankingScore":0.9472135901451112}]"###);
|
||||||
snapshot!(response["semanticHitCount"], @"3");
|
snapshot!(response["semanticHitCount"], @"3");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -361,12 +394,12 @@ async fn single_document() {
|
|||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(
|
.search_post(
|
||||||
json!({"vector": [1.0, 3.0], "hybrid": {"semanticRatio": 1.0}, "showRankingScore": true}),
|
json!({"vector": [1.0, 3.0], "hybrid": {"semanticRatio": 1.0}, "showRankingScore": true, "retrieveVectors": true}),
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(response["hits"][0], @r###"{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_rankingScore":1.0}"###);
|
snapshot!(response["hits"][0], @r###"{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":1.0}"###);
|
||||||
snapshot!(response["semanticHitCount"], @"1");
|
snapshot!(response["semanticHitCount"], @"1");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -377,25 +410,25 @@ async fn query_combination() {
|
|||||||
|
|
||||||
// search without query and vector, but with hybrid => still placeholder
|
// search without query and vector, but with hybrid => still placeholder
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(json!({"hybrid": {"semanticRatio": 1.0}, "showRankingScore": true}))
|
.search_post(json!({"hybrid": {"semanticRatio": 1.0}, "showRankingScore": true, "retrieveVectors": true}))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_rankingScore":1.0},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_rankingScore":1.0},{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_rankingScore":1.0}]"###);
|
snapshot!(response["hits"], @r###"[{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":1.0},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":1.0},{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_rankingScore":1.0}]"###);
|
||||||
snapshot!(response["semanticHitCount"], @"null");
|
snapshot!(response["semanticHitCount"], @"null");
|
||||||
|
|
||||||
// same with a different semantic ratio
|
// same with a different semantic ratio
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(json!({"hybrid": {"semanticRatio": 0.76}, "showRankingScore": true}))
|
.search_post(json!({"hybrid": {"semanticRatio": 0.76}, "showRankingScore": true, "retrieveVectors": true}))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_rankingScore":1.0},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_rankingScore":1.0},{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_rankingScore":1.0}]"###);
|
snapshot!(response["hits"], @r###"[{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":1.0},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":1.0},{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_rankingScore":1.0}]"###);
|
||||||
snapshot!(response["semanticHitCount"], @"null");
|
snapshot!(response["semanticHitCount"], @"null");
|
||||||
|
|
||||||
// wrong vector dimensions
|
// wrong vector dimensions
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(json!({"vector": [1.0, 0.0, 1.0], "hybrid": {"semanticRatio": 1.0}, "showRankingScore": true}))
|
.search_post(json!({"vector": [1.0, 0.0, 1.0], "hybrid": {"semanticRatio": 1.0}, "showRankingScore": true, "retrieveVectors": true}))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
@@ -410,34 +443,34 @@ async fn query_combination() {
|
|||||||
|
|
||||||
// full vector
|
// full vector
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(json!({"vector": [1.0, 0.0], "hybrid": {"semanticRatio": 1.0}, "showRankingScore": true}))
|
.search_post(json!({"vector": [1.0, 0.0], "hybrid": {"semanticRatio": 1.0}, "showRankingScore": true, "retrieveVectors": true}))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_rankingScore":0.7773500680923462},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_rankingScore":0.7236068248748779},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_rankingScore":0.6581138968467712}]"###);
|
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_rankingScore":0.7773500680923462},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":0.7236068248748779},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":0.6581138968467712}]"###);
|
||||||
snapshot!(response["semanticHitCount"], @"3");
|
snapshot!(response["semanticHitCount"], @"3");
|
||||||
|
|
||||||
// full keyword, without a query
|
// full keyword, without a query
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(json!({"vector": [1.0, 0.0], "hybrid": {"semanticRatio": 0.0}, "showRankingScore": true}))
|
.search_post(json!({"vector": [1.0, 0.0], "hybrid": {"semanticRatio": 0.0}, "showRankingScore": true, "retrieveVectors": true}))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_rankingScore":1.0},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_rankingScore":1.0},{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_rankingScore":1.0}]"###);
|
snapshot!(response["hits"], @r###"[{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":1.0},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":1.0},{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_rankingScore":1.0}]"###);
|
||||||
snapshot!(response["semanticHitCount"], @"null");
|
snapshot!(response["semanticHitCount"], @"null");
|
||||||
|
|
||||||
// query + vector, full keyword => keyword
|
// query + vector, full keyword => keyword
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(json!({"q": "Captain", "vector": [1.0, 0.0], "hybrid": {"semanticRatio": 0.0}, "showRankingScore": true}))
|
.search_post(json!({"q": "Captain", "vector": [1.0, 0.0], "hybrid": {"semanticRatio": 0.0}, "showRankingScore": true, "retrieveVectors": true}))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_rankingScore":0.9848484848484848},{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_rankingScore":0.9848484848484848},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_rankingScore":0.9242424242424242}]"###);
|
snapshot!(response["hits"], @r###"[{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":0.9848484848484848},{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":{"embeddings":[[2.0,3.0]],"regenerate":false}},"_rankingScore":0.9848484848484848},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":{"embeddings":[[1.0,3.0]],"regenerate":false}},"_rankingScore":0.9242424242424242}]"###);
|
||||||
snapshot!(response["semanticHitCount"], @"null");
|
snapshot!(response["semanticHitCount"], @"null");
|
||||||
|
|
||||||
// query + vector, no hybrid keyword =>
|
// query + vector, no hybrid keyword =>
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(json!({"q": "Captain", "vector": [1.0, 0.0], "showRankingScore": true}))
|
.search_post(json!({"q": "Captain", "vector": [1.0, 0.0], "showRankingScore": true, "retrieveVectors": true}))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
@@ -453,7 +486,7 @@ async fn query_combination() {
|
|||||||
// full vector, without a vector => error
|
// full vector, without a vector => error
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(
|
.search_post(
|
||||||
json!({"q": "Captain", "hybrid": {"semanticRatio": 1.0}, "showRankingScore": true}),
|
json!({"q": "Captain", "hybrid": {"semanticRatio": 1.0}, "showRankingScore": true, "retrieveVectors": true}),
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
@@ -470,11 +503,93 @@ async fn query_combination() {
|
|||||||
// hybrid without a vector => full keyword
|
// hybrid without a vector => full keyword
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.search_post(
|
.search_post(
|
||||||
json!({"q": "Planet", "hybrid": {"semanticRatio": 0.99}, "showRankingScore": true}),
|
json!({"q": "Planet", "hybrid": {"semanticRatio": 0.99}, "showRankingScore": true, "retrieveVectors": true}),
|
||||||
)
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_rankingScore":0.9242424242424242}]"###);
|
snapshot!(response["hits"], @r###"[{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":{"embeddings":[[1.0,2.0]],"regenerate":false}},"_rankingScore":0.9242424242424242}]"###);
|
||||||
snapshot!(response["semanticHitCount"], @"0");
|
snapshot!(response["semanticHitCount"], @"0");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn retrieve_vectors() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
let index = index_with_documents_hf(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
||||||
|
|
||||||
|
let (response, code) = index
|
||||||
|
.search_post(
|
||||||
|
json!({"q": "Captain", "hybrid": {"semanticRatio": 0.2}, "retrieveVectors": true}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
insta::assert_json_snapshot!(response["hits"], {"[]._vectors.default.embeddings" => "[vectors]"}, @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"title": "Captain Planet",
|
||||||
|
"desc": "He's not part of the Marvel Cinematic Universe",
|
||||||
|
"id": "2",
|
||||||
|
"_vectors": {
|
||||||
|
"default": {
|
||||||
|
"embeddings": "[vectors]",
|
||||||
|
"regenerate": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Captain Marvel",
|
||||||
|
"desc": "a Shazam ersatz",
|
||||||
|
"id": "3",
|
||||||
|
"_vectors": {
|
||||||
|
"default": {
|
||||||
|
"embeddings": "[vectors]",
|
||||||
|
"regenerate": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Shazam!",
|
||||||
|
"desc": "a Captain Marvel ersatz",
|
||||||
|
"id": "1",
|
||||||
|
"_vectors": {
|
||||||
|
"default": {
|
||||||
|
"embeddings": "[vectors]",
|
||||||
|
"regenerate": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
|
||||||
|
// remove `_vectors` from displayed attributes
|
||||||
|
let (response, code) =
|
||||||
|
index.update_settings(json!({ "displayedAttributes": ["id", "title", "desc"]} )).await;
|
||||||
|
assert_eq!(202, code, "{:?}", response);
|
||||||
|
index.wait_task(response.uid()).await;
|
||||||
|
|
||||||
|
let (response, code) = index
|
||||||
|
.search_post(
|
||||||
|
json!({"q": "Captain", "hybrid": {"semanticRatio": 0.2}, "retrieveVectors": true}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
snapshot!(code, @"200 OK");
|
||||||
|
insta::assert_json_snapshot!(response["hits"], {"[]._vectors.default.embeddings" => "[vectors]"}, @r###"
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"title": "Captain Planet",
|
||||||
|
"desc": "He's not part of the Marvel Cinematic Universe",
|
||||||
|
"id": "2"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Captain Marvel",
|
||||||
|
"desc": "a Shazam ersatz",
|
||||||
|
"id": "3"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Shazam!",
|
||||||
|
"desc": "a Captain Marvel ersatz",
|
||||||
|
"id": "1"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|||||||
@@ -301,7 +301,7 @@ async fn negative_special_cases_search() {
|
|||||||
index.add_documents(documents, None).await;
|
index.add_documents(documents, None).await;
|
||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
index.update_settings(json!({"synonyms": { "escape": ["glass"] }})).await;
|
index.update_settings(json!({"synonyms": { "escape": ["gläss"] }})).await;
|
||||||
index.wait_task(1).await;
|
index.wait_task(1).await;
|
||||||
|
|
||||||
// There is a synonym for escape -> glass but we don't want "escape", only the derivates: glass
|
// There is a synonym for escape -> glass but we don't want "escape", only the derivates: glass
|
||||||
@@ -1290,21 +1290,38 @@ async fn experimental_feature_vector_store() {
|
|||||||
index.add_documents(json!(documents), None).await;
|
index.add_documents(json!(documents), None).await;
|
||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
let (response, code) = index
|
index
|
||||||
.search_post(json!({
|
.search(json!({
|
||||||
"vector": [1.0, 2.0, 3.0],
|
"vector": [1.0, 2.0, 3.0],
|
||||||
"showRankingScore": true
|
"showRankingScore": true
|
||||||
}))
|
}), |response, code|{
|
||||||
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Passing `vector` as a parameter requires enabling the `vector store` experimental feature. See https://github.com/meilisearch/product/discussions/677",
|
||||||
|
"code": "feature_not_enabled",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
})
|
||||||
|
.await;
|
||||||
|
index
|
||||||
|
.search(json!({
|
||||||
|
"retrieveVectors": true,
|
||||||
|
"showRankingScore": true
|
||||||
|
}), |response, code|{
|
||||||
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Passing `retrieveVectors` as a parameter requires enabling the `vector store` experimental feature. See https://github.com/meilisearch/product/discussions/677",
|
||||||
|
"code": "feature_not_enabled",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
})
|
||||||
.await;
|
.await;
|
||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
|
||||||
{
|
|
||||||
"message": "Passing `vector` as a query parameter requires enabling the `vector store` experimental feature. See https://github.com/meilisearch/product/discussions/677",
|
|
||||||
"code": "feature_not_enabled",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
let (response, code) = server.set_features(json!({"vectorStore": true})).await;
|
let (response, code) = server.set_features(json!({"vectorStore": true})).await;
|
||||||
meili_snap::snapshot!(code, @"200 OK");
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
@@ -1337,6 +1354,7 @@ async fn experimental_feature_vector_store() {
|
|||||||
.search_post(json!({
|
.search_post(json!({
|
||||||
"vector": [1.0, 2.0, 3.0],
|
"vector": [1.0, 2.0, 3.0],
|
||||||
"showRankingScore": true,
|
"showRankingScore": true,
|
||||||
|
"retrieveVectors": true,
|
||||||
}))
|
}))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
@@ -1348,11 +1366,16 @@ async fn experimental_feature_vector_store() {
|
|||||||
"title": "Shazam!",
|
"title": "Shazam!",
|
||||||
"id": "287947",
|
"id": "287947",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
1.0,
|
"embeddings": [
|
||||||
2.0,
|
[
|
||||||
3.0
|
1.0,
|
||||||
]
|
2.0,
|
||||||
|
3.0
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"_rankingScore": 1.0
|
"_rankingScore": 1.0
|
||||||
},
|
},
|
||||||
@@ -1360,11 +1383,16 @@ async fn experimental_feature_vector_store() {
|
|||||||
"title": "Captain Marvel",
|
"title": "Captain Marvel",
|
||||||
"id": "299537",
|
"id": "299537",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
1.0,
|
"embeddings": [
|
||||||
2.0,
|
[
|
||||||
54.0
|
1.0,
|
||||||
]
|
2.0,
|
||||||
|
54.0
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"_rankingScore": 0.9129111766815186
|
"_rankingScore": 0.9129111766815186
|
||||||
},
|
},
|
||||||
@@ -1372,11 +1400,16 @@ async fn experimental_feature_vector_store() {
|
|||||||
"title": "Gläss",
|
"title": "Gläss",
|
||||||
"id": "450465",
|
"id": "450465",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
-100.0,
|
"embeddings": [
|
||||||
340.0,
|
[
|
||||||
90.0
|
-100.0,
|
||||||
]
|
340.0,
|
||||||
|
90.0
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"_rankingScore": 0.8106412887573242
|
"_rankingScore": 0.8106412887573242
|
||||||
},
|
},
|
||||||
@@ -1384,11 +1417,16 @@ async fn experimental_feature_vector_store() {
|
|||||||
"title": "How to Train Your Dragon: The Hidden World",
|
"title": "How to Train Your Dragon: The Hidden World",
|
||||||
"id": "166428",
|
"id": "166428",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
-100.0,
|
"embeddings": [
|
||||||
231.0,
|
[
|
||||||
32.0
|
-100.0,
|
||||||
]
|
231.0,
|
||||||
|
32.0
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"_rankingScore": 0.7412010431289673
|
"_rankingScore": 0.7412010431289673
|
||||||
},
|
},
|
||||||
@@ -1396,11 +1434,16 @@ async fn experimental_feature_vector_store() {
|
|||||||
"title": "Escape Room",
|
"title": "Escape Room",
|
||||||
"id": "522681",
|
"id": "522681",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
10.0,
|
"embeddings": [
|
||||||
-23.0,
|
[
|
||||||
32.0
|
10.0,
|
||||||
]
|
-23.0,
|
||||||
|
32.0
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"_rankingScore": 0.6972063183784485
|
"_rankingScore": 0.6972063183784485
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,20 @@
|
|||||||
|
---
|
||||||
|
source: meilisearch/tests/search/distinct.rs
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"uid": 1,
|
||||||
|
"indexUid": "tamo",
|
||||||
|
"status": "succeeded",
|
||||||
|
"type": "settingsUpdate",
|
||||||
|
"canceledBy": null,
|
||||||
|
"details": {
|
||||||
|
"filterableAttributes": [
|
||||||
|
"color.main"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"error": null,
|
||||||
|
"duration": "[duration]",
|
||||||
|
"enqueuedAt": "[date]",
|
||||||
|
"startedAt": "[date]",
|
||||||
|
"finishedAt": "[date]"
|
||||||
|
}
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
---
|
||||||
|
source: meilisearch/tests/search/errors.rs
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"uid": 0,
|
||||||
|
"indexUid": "tamo",
|
||||||
|
"status": "succeeded",
|
||||||
|
"type": "indexCreation",
|
||||||
|
"canceledBy": null,
|
||||||
|
"details": {
|
||||||
|
"primaryKey": null
|
||||||
|
},
|
||||||
|
"error": null,
|
||||||
|
"duration": "[duration]",
|
||||||
|
"enqueuedAt": "[date]",
|
||||||
|
"startedAt": "[date]",
|
||||||
|
"finishedAt": "[date]"
|
||||||
|
}
|
||||||
@@ -241,7 +241,7 @@ async fn similar_bad_offset() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index.similar_get("id=287947&offset=doggo").await;
|
let (response, code) = index.similar_get("?id=287947&offset=doggo").await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -283,7 +283,7 @@ async fn similar_bad_limit() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index.similar_get("id=287946&limit=doggo").await;
|
let (response, code) = index.similar_get("?id=287946&limit=doggo").await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -756,3 +756,54 @@ async fn filter_reserved_geo_point_string() {
|
|||||||
})
|
})
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[actix_rt::test]
|
||||||
|
async fn similar_bad_retrieve_vectors() {
|
||||||
|
let server = Server::new().await;
|
||||||
|
server.set_features(json!({"vectorStore": true})).await;
|
||||||
|
let index = server.index("test");
|
||||||
|
|
||||||
|
let (response, code) = index.similar_post(json!({"retrieveVectors": "doggo"})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value type at `.retrieveVectors`: expected a boolean, but found a string: `\"doggo\"`",
|
||||||
|
"code": "invalid_similar_retrieve_vectors",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_similar_retrieve_vectors"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.similar_post(json!({"retrieveVectors": [true]})).await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value type at `.retrieveVectors`: expected a boolean, but found an array: `[true]`",
|
||||||
|
"code": "invalid_similar_retrieve_vectors",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_similar_retrieve_vectors"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.similar_get("?retrieveVectors=").await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `retrieveVectors`: could not parse `` as a boolean, expected either `true` or `false`",
|
||||||
|
"code": "invalid_similar_retrieve_vectors",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_similar_retrieve_vectors"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
let (response, code) = index.similar_get("?retrieveVectors=doggo").await;
|
||||||
|
snapshot!(code, @"400 Bad Request");
|
||||||
|
snapshot!(json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Invalid value in parameter `retrieveVectors`: could not parse `doggo` as a boolean, expected either `true` or `false`",
|
||||||
|
"code": "invalid_similar_retrieve_vectors",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#invalid_similar_retrieve_vectors"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
}
|
||||||
|
|||||||
@@ -78,7 +78,7 @@ async fn basic() {
|
|||||||
index.wait_task(value.uid()).await;
|
index.wait_task(value.uid()).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.similar(json!({"id": 143}), |response, code| {
|
.similar(json!({"id": 143, "retrieveVectors": true}), |response, code| {
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(json_string!(response["hits"]), @r###"
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
[
|
[
|
||||||
@@ -87,11 +87,16 @@ async fn basic() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "522681",
|
"id": "522681",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.1,
|
"embeddings": [
|
||||||
0.6,
|
[
|
||||||
0.8
|
0.10000000149011612,
|
||||||
]
|
0.6000000238418579,
|
||||||
|
0.800000011920929
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -99,11 +104,16 @@ async fn basic() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "299537",
|
"id": "299537",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.6,
|
"embeddings": [
|
||||||
0.8,
|
[
|
||||||
-0.2
|
0.6000000238418579,
|
||||||
]
|
0.800000011920929,
|
||||||
|
-0.20000000298023224
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -111,11 +121,16 @@ async fn basic() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "166428",
|
"id": "166428",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.7,
|
"embeddings": [
|
||||||
0.7,
|
[
|
||||||
-0.4
|
0.699999988079071,
|
||||||
]
|
0.699999988079071,
|
||||||
|
-0.4000000059604645
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -123,11 +138,16 @@ async fn basic() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "287947",
|
"id": "287947",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.8,
|
"embeddings": [
|
||||||
0.4,
|
[
|
||||||
-0.5
|
0.800000011920929,
|
||||||
]
|
0.4000000059604645,
|
||||||
|
-0.5
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@@ -136,7 +156,7 @@ async fn basic() {
|
|||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.similar(json!({"id": "299537"}), |response, code| {
|
.similar(json!({"id": "299537", "retrieveVectors": true}), |response, code| {
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(json_string!(response["hits"]), @r###"
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
[
|
[
|
||||||
@@ -145,11 +165,16 @@ async fn basic() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "166428",
|
"id": "166428",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.7,
|
"embeddings": [
|
||||||
0.7,
|
[
|
||||||
-0.4
|
0.699999988079071,
|
||||||
]
|
0.699999988079071,
|
||||||
|
-0.4000000059604645
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -157,11 +182,16 @@ async fn basic() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "287947",
|
"id": "287947",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.8,
|
"embeddings": [
|
||||||
0.4,
|
[
|
||||||
-0.5
|
0.800000011920929,
|
||||||
]
|
0.4000000059604645,
|
||||||
|
-0.5
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -169,11 +199,16 @@ async fn basic() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "522681",
|
"id": "522681",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.1,
|
"embeddings": [
|
||||||
0.6,
|
[
|
||||||
0.8
|
0.10000000149011612,
|
||||||
]
|
0.6000000238418579,
|
||||||
|
0.800000011920929
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -181,11 +216,16 @@ async fn basic() {
|
|||||||
"release_year": 1930,
|
"release_year": 1930,
|
||||||
"id": "143",
|
"id": "143",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
-0.5,
|
"embeddings": [
|
||||||
0.3,
|
[
|
||||||
0.85
|
-0.5,
|
||||||
]
|
0.30000001192092896,
|
||||||
|
0.8500000238418579
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@@ -228,7 +268,7 @@ async fn ranking_score_threshold() {
|
|||||||
|
|
||||||
index
|
index
|
||||||
.similar(
|
.similar(
|
||||||
json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0}),
|
json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0, "retrieveVectors": true}),
|
||||||
|response, code| {
|
|response, code| {
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"4");
|
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"4");
|
||||||
@@ -239,11 +279,16 @@ async fn ranking_score_threshold() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "522681",
|
"id": "522681",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.1,
|
"embeddings": [
|
||||||
0.6,
|
[
|
||||||
0.8
|
0.10000000149011612,
|
||||||
]
|
0.6000000238418579,
|
||||||
|
0.800000011920929
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"_rankingScore": 0.890957772731781
|
"_rankingScore": 0.890957772731781
|
||||||
},
|
},
|
||||||
@@ -252,11 +297,16 @@ async fn ranking_score_threshold() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "299537",
|
"id": "299537",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.6,
|
"embeddings": [
|
||||||
0.8,
|
[
|
||||||
-0.2
|
0.6000000238418579,
|
||||||
]
|
0.800000011920929,
|
||||||
|
-0.20000000298023224
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"_rankingScore": 0.39060014486312866
|
"_rankingScore": 0.39060014486312866
|
||||||
},
|
},
|
||||||
@@ -265,11 +315,16 @@ async fn ranking_score_threshold() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "166428",
|
"id": "166428",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.7,
|
"embeddings": [
|
||||||
0.7,
|
[
|
||||||
-0.4
|
0.699999988079071,
|
||||||
]
|
0.699999988079071,
|
||||||
|
-0.4000000059604645
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"_rankingScore": 0.2819308042526245
|
"_rankingScore": 0.2819308042526245
|
||||||
},
|
},
|
||||||
@@ -278,11 +333,16 @@ async fn ranking_score_threshold() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "287947",
|
"id": "287947",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.8,
|
"embeddings": [
|
||||||
0.4,
|
[
|
||||||
-0.5
|
0.800000011920929,
|
||||||
]
|
0.4000000059604645,
|
||||||
|
-0.5
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"_rankingScore": 0.1662663221359253
|
"_rankingScore": 0.1662663221359253
|
||||||
}
|
}
|
||||||
@@ -294,7 +354,7 @@ async fn ranking_score_threshold() {
|
|||||||
|
|
||||||
index
|
index
|
||||||
.similar(
|
.similar(
|
||||||
json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.2}),
|
json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.2, "retrieveVectors": true}),
|
||||||
|response, code| {
|
|response, code| {
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"3");
|
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"3");
|
||||||
@@ -305,11 +365,16 @@ async fn ranking_score_threshold() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "522681",
|
"id": "522681",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.1,
|
"embeddings": [
|
||||||
0.6,
|
[
|
||||||
0.8
|
0.10000000149011612,
|
||||||
]
|
0.6000000238418579,
|
||||||
|
0.800000011920929
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"_rankingScore": 0.890957772731781
|
"_rankingScore": 0.890957772731781
|
||||||
},
|
},
|
||||||
@@ -318,11 +383,16 @@ async fn ranking_score_threshold() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "299537",
|
"id": "299537",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.6,
|
"embeddings": [
|
||||||
0.8,
|
[
|
||||||
-0.2
|
0.6000000238418579,
|
||||||
]
|
0.800000011920929,
|
||||||
|
-0.20000000298023224
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"_rankingScore": 0.39060014486312866
|
"_rankingScore": 0.39060014486312866
|
||||||
},
|
},
|
||||||
@@ -331,11 +401,16 @@ async fn ranking_score_threshold() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "166428",
|
"id": "166428",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.7,
|
"embeddings": [
|
||||||
0.7,
|
[
|
||||||
-0.4
|
0.699999988079071,
|
||||||
]
|
0.699999988079071,
|
||||||
|
-0.4000000059604645
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"_rankingScore": 0.2819308042526245
|
"_rankingScore": 0.2819308042526245
|
||||||
}
|
}
|
||||||
@@ -347,7 +422,7 @@ async fn ranking_score_threshold() {
|
|||||||
|
|
||||||
index
|
index
|
||||||
.similar(
|
.similar(
|
||||||
json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.3}),
|
json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.3, "retrieveVectors": true}),
|
||||||
|response, code| {
|
|response, code| {
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"2");
|
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"2");
|
||||||
@@ -358,11 +433,16 @@ async fn ranking_score_threshold() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "522681",
|
"id": "522681",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.1,
|
"embeddings": [
|
||||||
0.6,
|
[
|
||||||
0.8
|
0.10000000149011612,
|
||||||
]
|
0.6000000238418579,
|
||||||
|
0.800000011920929
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"_rankingScore": 0.890957772731781
|
"_rankingScore": 0.890957772731781
|
||||||
},
|
},
|
||||||
@@ -371,11 +451,16 @@ async fn ranking_score_threshold() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "299537",
|
"id": "299537",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.6,
|
"embeddings": [
|
||||||
0.8,
|
[
|
||||||
-0.2
|
0.6000000238418579,
|
||||||
]
|
0.800000011920929,
|
||||||
|
-0.20000000298023224
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"_rankingScore": 0.39060014486312866
|
"_rankingScore": 0.39060014486312866
|
||||||
}
|
}
|
||||||
@@ -387,7 +472,7 @@ async fn ranking_score_threshold() {
|
|||||||
|
|
||||||
index
|
index
|
||||||
.similar(
|
.similar(
|
||||||
json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.6}),
|
json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.6, "retrieveVectors": true}),
|
||||||
|response, code| {
|
|response, code| {
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"1");
|
meili_snap::snapshot!(meili_snap::json_string!(response["estimatedTotalHits"]), @"1");
|
||||||
@@ -398,11 +483,16 @@ async fn ranking_score_threshold() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "522681",
|
"id": "522681",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.1,
|
"embeddings": [
|
||||||
0.6,
|
[
|
||||||
0.8
|
0.10000000149011612,
|
||||||
]
|
0.6000000238418579,
|
||||||
|
0.800000011920929
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
},
|
},
|
||||||
"_rankingScore": 0.890957772731781
|
"_rankingScore": 0.890957772731781
|
||||||
}
|
}
|
||||||
@@ -414,7 +504,7 @@ async fn ranking_score_threshold() {
|
|||||||
|
|
||||||
index
|
index
|
||||||
.similar(
|
.similar(
|
||||||
json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.9}),
|
json!({"id": 143, "showRankingScore": true, "rankingScoreThreshold": 0.9, "retrieveVectors": true}),
|
||||||
|response, code| {
|
|response, code| {
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(json_string!(response["hits"]), @"[]");
|
snapshot!(json_string!(response["hits"]), @"[]");
|
||||||
@@ -456,71 +546,97 @@ async fn filter() {
|
|||||||
index.wait_task(value.uid()).await;
|
index.wait_task(value.uid()).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.similar(json!({"id": 522681, "filter": "release_year = 2019"}), |response, code| {
|
.similar(
|
||||||
snapshot!(code, @"200 OK");
|
json!({"id": 522681, "filter": "release_year = 2019", "retrieveVectors": true}),
|
||||||
snapshot!(json_string!(response["hits"]), @r###"
|
|response, code| {
|
||||||
[
|
snapshot!(code, @"200 OK");
|
||||||
{
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
"title": "Captain Marvel",
|
[
|
||||||
"release_year": 2019,
|
{
|
||||||
"id": "299537",
|
"title": "Captain Marvel",
|
||||||
"_vectors": {
|
"release_year": 2019,
|
||||||
"manual": [
|
"id": "299537",
|
||||||
0.6,
|
"_vectors": {
|
||||||
0.8,
|
"manual": {
|
||||||
-0.2
|
"embeddings": [
|
||||||
]
|
[
|
||||||
}
|
0.6000000238418579,
|
||||||
},
|
0.800000011920929,
|
||||||
{
|
-0.20000000298023224
|
||||||
"title": "How to Train Your Dragon: The Hidden World",
|
]
|
||||||
"release_year": 2019,
|
],
|
||||||
"id": "166428",
|
"regenerate": false
|
||||||
"_vectors": {
|
}
|
||||||
"manual": [
|
}
|
||||||
0.7,
|
},
|
||||||
0.7,
|
{
|
||||||
-0.4
|
"title": "How to Train Your Dragon: The Hidden World",
|
||||||
]
|
"release_year": 2019,
|
||||||
}
|
"id": "166428",
|
||||||
},
|
"_vectors": {
|
||||||
{
|
"manual": {
|
||||||
"title": "Shazam!",
|
"embeddings": [
|
||||||
"release_year": 2019,
|
[
|
||||||
"id": "287947",
|
0.699999988079071,
|
||||||
"_vectors": {
|
0.699999988079071,
|
||||||
"manual": [
|
-0.4000000059604645
|
||||||
0.8,
|
]
|
||||||
0.4,
|
],
|
||||||
-0.5
|
"regenerate": false
|
||||||
]
|
}
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
]
|
{
|
||||||
"###);
|
"title": "Shazam!",
|
||||||
})
|
"release_year": 2019,
|
||||||
|
"id": "287947",
|
||||||
|
"_vectors": {
|
||||||
|
"manual": {
|
||||||
|
"embeddings": [
|
||||||
|
[
|
||||||
|
0.800000011920929,
|
||||||
|
0.4000000059604645,
|
||||||
|
-0.5
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
},
|
||||||
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.similar(json!({"id": 522681, "filter": "release_year < 2000"}), |response, code| {
|
.similar(
|
||||||
snapshot!(code, @"200 OK");
|
json!({"id": 522681, "filter": "release_year < 2000", "retrieveVectors": true}),
|
||||||
snapshot!(json_string!(response["hits"]), @r###"
|
|response, code| {
|
||||||
[
|
snapshot!(code, @"200 OK");
|
||||||
{
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
"title": "All Quiet on the Western Front",
|
[
|
||||||
"release_year": 1930,
|
{
|
||||||
"id": "143",
|
"title": "All Quiet on the Western Front",
|
||||||
"_vectors": {
|
"release_year": 1930,
|
||||||
"manual": [
|
"id": "143",
|
||||||
-0.5,
|
"_vectors": {
|
||||||
0.3,
|
"manual": {
|
||||||
0.85
|
"embeddings": [
|
||||||
]
|
[
|
||||||
}
|
-0.5,
|
||||||
}
|
0.30000001192092896,
|
||||||
]
|
0.8500000238418579
|
||||||
"###);
|
]
|
||||||
})
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
},
|
||||||
|
)
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -557,7 +673,7 @@ async fn limit_and_offset() {
|
|||||||
index.wait_task(value.uid()).await;
|
index.wait_task(value.uid()).await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.similar(json!({"id": 143, "limit": 1}), |response, code| {
|
.similar(json!({"id": 143, "limit": 1, "retrieveVectors": true}), |response, code| {
|
||||||
snapshot!(code, @"200 OK");
|
snapshot!(code, @"200 OK");
|
||||||
snapshot!(json_string!(response["hits"]), @r###"
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
[
|
[
|
||||||
@@ -566,11 +682,16 @@ async fn limit_and_offset() {
|
|||||||
"release_year": 2019,
|
"release_year": 2019,
|
||||||
"id": "522681",
|
"id": "522681",
|
||||||
"_vectors": {
|
"_vectors": {
|
||||||
"manual": [
|
"manual": {
|
||||||
0.1,
|
"embeddings": [
|
||||||
0.6,
|
[
|
||||||
0.8
|
0.10000000149011612,
|
||||||
]
|
0.6000000238418579,
|
||||||
|
0.800000011920929
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@@ -579,24 +700,32 @@ async fn limit_and_offset() {
|
|||||||
.await;
|
.await;
|
||||||
|
|
||||||
index
|
index
|
||||||
.similar(json!({"id": 143, "limit": 1, "offset": 1}), |response, code| {
|
.similar(
|
||||||
snapshot!(code, @"200 OK");
|
json!({"id": 143, "limit": 1, "offset": 1, "retrieveVectors": true}),
|
||||||
snapshot!(json_string!(response["hits"]), @r###"
|
|response, code| {
|
||||||
[
|
snapshot!(code, @"200 OK");
|
||||||
{
|
snapshot!(json_string!(response["hits"]), @r###"
|
||||||
"title": "Captain Marvel",
|
[
|
||||||
"release_year": 2019,
|
{
|
||||||
"id": "299537",
|
"title": "Captain Marvel",
|
||||||
"_vectors": {
|
"release_year": 2019,
|
||||||
"manual": [
|
"id": "299537",
|
||||||
0.6,
|
"_vectors": {
|
||||||
0.8,
|
"manual": {
|
||||||
-0.2
|
"embeddings": [
|
||||||
]
|
[
|
||||||
}
|
0.6000000238418579,
|
||||||
}
|
0.800000011920929,
|
||||||
]
|
-0.20000000298023224
|
||||||
"###);
|
]
|
||||||
})
|
],
|
||||||
|
"regenerate": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
"###);
|
||||||
|
},
|
||||||
|
)
|
||||||
.await;
|
.await;
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user