mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-19 13:00:46 +00:00
Compare commits
115 Commits
reduce-pre
...
post-updat
Author | SHA1 | Date | |
---|---|---|---|
dfca4b6219 | |||
8d9eb2a7c4 | |||
44586e089d | |||
c8b7822d0d | |||
446b9c142c | |||
e755e25847 | |||
81419935f2 | |||
51acd7a381 | |||
3ec5b9d488 | |||
55adbac2dd | |||
fd7fbfa9eb | |||
3a93f88ba6 | |||
7c1c4f9c26 | |||
1f5412003d | |||
5da92a3d53 | |||
c4a8b84dc0 | |||
ffe3faeca7 | |||
0f07cfed14 | |||
326a728434 | |||
e4733dcd42 | |||
a500fa053c | |||
61db56f785 | |||
235556d699 | |||
a3a1065c16 | |||
b025f1bcf1 | |||
707d106a24 | |||
97d6726291 | |||
82fa571ef7 | |||
5d453e6049 | |||
9e7d7beb4a | |||
a225ab2637 | |||
94b43001db | |||
796a325972 | |||
1db550ec7f | |||
c3c5a928e4 | |||
c4787760d3 | |||
7ca2a8eb6f | |||
c2ff4dd3b2 | |||
fce0fa9c57 | |||
a10efedd2f | |||
55ec96d31a | |||
4249630791 | |||
418fa47963 | |||
0656a0d515 | |||
19f4c1ac98 | |||
a0bfcf8872 | |||
64477aac60 | |||
4d90e3d2ec | |||
4ab547c6fa | |||
e36a8c50b9 | |||
249da5846c | |||
ee15d4fe77 | |||
f0f6c3000f | |||
08ff135ad6 | |||
f729864466 | |||
94ea263bef | |||
85efa6f493 | |||
0e475cb5e6 | |||
62de70b73c | |||
7707fb18dd | |||
ba6d755120 | |||
5607802fe1 | |||
a8afd5dbcb | |||
55f620a986 | |||
be6abb952d | |||
2f07afa97e | |||
bf3a29b60d | |||
3acf036526 | |||
eefefc482b | |||
43c8a206b4 | |||
a8c407fa36 | |||
18bc56f1fa | |||
38b3e03dde | |||
6b1c262b74 | |||
0f654e45c9 | |||
d71c6f3483 | |||
8b4166410c | |||
9d3037aa1a | |||
5414887bff | |||
03a0550b63 | |||
2800e42243 | |||
5759afac41 | |||
868c902935 | |||
e019ad7692 | |||
1f67f373d1 | |||
2c0bd35923 | |||
b3aaa64de5 | |||
7b3072ad28 | |||
db26c1e5bf | |||
9aee12c906 | |||
debd2b21b8 | |||
39aca661dd | |||
5b51e8a083 | |||
3928fb36b3 | |||
2ddc1d2258 | |||
7c267a8a0e | |||
d39d915a7e | |||
3160ddf9df | |||
d286e63f15 | |||
9ee6254eec | |||
e2c824a7cd | |||
0dd65caffe | |||
4397b7d170 | |||
15db203b7d | |||
041f635214 | |||
f9807ba32e | |||
8c8cc59a6c | |||
f540a69ac3 | |||
537bf27e7c | |||
cf31a65a88 | |||
0f7d71041f | |||
91d221ebe7 | |||
9162e8ba04 | |||
2118cc092e | |||
c7564d500f |
4
.github/ISSUE_TEMPLATE/sprint_issue.md
vendored
4
.github/ISSUE_TEMPLATE/sprint_issue.md
vendored
@ -22,6 +22,10 @@ Related product discussion:
|
||||
|
||||
<!---If necessary, create a list with technical/product steps-->
|
||||
|
||||
### Are you modifying a database?
|
||||
- [ ] If not, add the `no db change` label to your PR, and you're good to merge.
|
||||
- [ ] If yes, add the `db change` label to your PR. You'll receive a message explaining you what to do.
|
||||
|
||||
### Reminders when modifying the API
|
||||
|
||||
- [ ] Update the openAPI file with utoipa:
|
||||
|
39
.github/workflows/bench-manual.yml
vendored
39
.github/workflows/bench-manual.yml
vendored
@ -1,28 +1,27 @@
|
||||
name: Bench (manual)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workload:
|
||||
description: 'The path to the workloads to execute (workloads/...)'
|
||||
required: true
|
||||
default: 'workloads/movies.json'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
workload:
|
||||
description: "The path to the workloads to execute (workloads/...)"
|
||||
required: true
|
||||
default: "workloads/movies.json"
|
||||
|
||||
env:
|
||||
WORKLOAD_NAME: ${{ github.event.inputs.workload }}
|
||||
WORKLOAD_NAME: ${{ github.event.inputs.workload }}
|
||||
|
||||
jobs:
|
||||
benchmarks:
|
||||
name: Run and upload benchmarks
|
||||
runs-on: benchmarks
|
||||
timeout-minutes: 180 # 3h
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
- name: Run benchmarks - workload ${WORKLOAD_NAME} - branch ${{ github.ref }} - commit ${{ github.sha }}
|
||||
run: |
|
||||
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "Manual [Run #${{ github.run_id }}](https://github.com/meilisearch/meilisearch/actions/runs/${{ github.run_id }})" -- ${WORKLOAD_NAME}
|
||||
benchmarks:
|
||||
name: Run and upload benchmarks
|
||||
runs-on: benchmarks
|
||||
timeout-minutes: 180 # 3h
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
- name: Run benchmarks - workload ${WORKLOAD_NAME} - branch ${{ github.ref }} - commit ${{ github.sha }}
|
||||
run: |
|
||||
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "Manual [Run #${{ github.run_id }}](https://github.com/meilisearch/meilisearch/actions/runs/${{ github.run_id }})" -- ${WORKLOAD_NAME}
|
||||
|
136
.github/workflows/bench-pr.yml
vendored
136
.github/workflows/bench-pr.yml
vendored
@ -1,82 +1,82 @@
|
||||
name: Bench (PR)
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
issues: write
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
|
||||
jobs:
|
||||
run-benchmarks-on-comment:
|
||||
if: startsWith(github.event.comment.body, '/bench')
|
||||
name: Run and upload benchmarks
|
||||
runs-on: benchmarks
|
||||
timeout-minutes: 180 # 3h
|
||||
steps:
|
||||
- name: Check permissions
|
||||
id: permission
|
||||
env:
|
||||
PR_AUTHOR: ${{github.event.issue.user.login }}
|
||||
COMMENT_AUTHOR: ${{github.event.comment.user.login }}
|
||||
REPOSITORY: ${{github.repository}}
|
||||
PR_ID: ${{github.event.issue.number}}
|
||||
run: |
|
||||
PR_REPOSITORY=$(gh api /repos/"$REPOSITORY"/pulls/"$PR_ID" --jq .head.repo.full_name)
|
||||
if $(gh api /repos/"$REPOSITORY"/collaborators/"$PR_AUTHOR"/permission --jq .user.permissions.push)
|
||||
then
|
||||
echo "::notice title=Authentication success::PR author authenticated"
|
||||
else
|
||||
echo "::error title=Authentication error::PR author doesn't have push permission on this repository"
|
||||
exit 1
|
||||
fi
|
||||
if $(gh api /repos/"$REPOSITORY"/collaborators/"$COMMENT_AUTHOR"/permission --jq .user.permissions.push)
|
||||
then
|
||||
echo "::notice title=Authentication success::Comment author authenticated"
|
||||
else
|
||||
echo "::error title=Authentication error::Comment author doesn't have push permission on this repository"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$PR_REPOSITORY" = "$REPOSITORY" ]
|
||||
then
|
||||
echo "::notice title=Authentication success::PR started from main repository"
|
||||
else
|
||||
echo "::error title=Authentication error::PR started from a fork"
|
||||
exit 1
|
||||
fi
|
||||
run-benchmarks-on-comment:
|
||||
if: startsWith(github.event.comment.body, '/bench')
|
||||
name: Run and upload benchmarks
|
||||
runs-on: benchmarks
|
||||
timeout-minutes: 180 # 3h
|
||||
steps:
|
||||
- name: Check permissions
|
||||
id: permission
|
||||
env:
|
||||
PR_AUTHOR: ${{github.event.issue.user.login }}
|
||||
COMMENT_AUTHOR: ${{github.event.comment.user.login }}
|
||||
REPOSITORY: ${{github.repository}}
|
||||
PR_ID: ${{github.event.issue.number}}
|
||||
run: |
|
||||
PR_REPOSITORY=$(gh api /repos/"$REPOSITORY"/pulls/"$PR_ID" --jq .head.repo.full_name)
|
||||
if $(gh api /repos/"$REPOSITORY"/collaborators/"$PR_AUTHOR"/permission --jq .user.permissions.push)
|
||||
then
|
||||
echo "::notice title=Authentication success::PR author authenticated"
|
||||
else
|
||||
echo "::error title=Authentication error::PR author doesn't have push permission on this repository"
|
||||
exit 1
|
||||
fi
|
||||
if $(gh api /repos/"$REPOSITORY"/collaborators/"$COMMENT_AUTHOR"/permission --jq .user.permissions.push)
|
||||
then
|
||||
echo "::notice title=Authentication success::Comment author authenticated"
|
||||
else
|
||||
echo "::error title=Authentication error::Comment author doesn't have push permission on this repository"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$PR_REPOSITORY" = "$REPOSITORY" ]
|
||||
then
|
||||
echo "::notice title=Authentication success::PR started from main repository"
|
||||
else
|
||||
echo "::error title=Authentication error::PR started from a fork"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check for Command
|
||||
id: command
|
||||
uses: xt0rted/slash-command-action@v2
|
||||
with:
|
||||
command: bench
|
||||
reaction-type: "rocket"
|
||||
repo-token: ${{ env.GH_TOKEN }}
|
||||
- name: Check for Command
|
||||
id: command
|
||||
uses: xt0rted/slash-command-action@v2
|
||||
with:
|
||||
command: bench
|
||||
reaction-type: "rocket"
|
||||
repo-token: ${{ env.GH_TOKEN }}
|
||||
|
||||
- uses: xt0rted/pull-request-comment-branch@v3
|
||||
id: comment-branch
|
||||
with:
|
||||
repo_token: ${{ env.GH_TOKEN }}
|
||||
- uses: xt0rted/pull-request-comment-branch@v3
|
||||
id: comment-branch
|
||||
with:
|
||||
repo_token: ${{ env.GH_TOKEN }}
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
if: success()
|
||||
with:
|
||||
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
||||
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
||||
- uses: actions/checkout@v3
|
||||
if: success()
|
||||
with:
|
||||
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
||||
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
with:
|
||||
profile: minimal
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
- name: Run benchmarks on PR ${{ github.event.issue.id }}
|
||||
run: |
|
||||
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" \
|
||||
--dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" \
|
||||
--reason "[Comment](${{ github.event.comment.html_url }}) on [#${{ github.event.issue.number }}](${{ github.event.issue.html_url }})" \
|
||||
-- ${{ steps.command.outputs.command-arguments }} > benchlinks.txt
|
||||
- name: Run benchmarks on PR ${{ github.event.issue.id }}
|
||||
run: |
|
||||
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" \
|
||||
--dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" \
|
||||
--reason "[Comment](${{ github.event.comment.html_url }}) on [#${{ github.event.issue.number }}](${{ github.event.issue.html_url }})" \
|
||||
-- ${{ steps.command.outputs.command-arguments }} > benchlinks.txt
|
||||
|
||||
- name: Send comment in PR
|
||||
run: |
|
||||
gh pr comment ${{github.event.issue.number}} --body-file benchlinks.txt
|
||||
- name: Send comment in PR
|
||||
run: |
|
||||
gh pr comment ${{github.event.issue.number}} --body-file benchlinks.txt
|
||||
|
33
.github/workflows/bench-push-indexing.yml
vendored
33
.github/workflows/bench-push-indexing.yml
vendored
@ -1,23 +1,22 @@
|
||||
name: Indexing bench (push)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
benchmarks:
|
||||
name: Run and upload benchmarks
|
||||
runs-on: benchmarks
|
||||
timeout-minutes: 180 # 3h
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
# Run benchmarks
|
||||
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch main - Commit ${{ github.sha }}
|
||||
run: |
|
||||
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "Push on `main` [Run #${{ github.run_id }}](https://github.com/meilisearch/meilisearch/actions/runs/${{ github.run_id }})" -- workloads/*.json
|
||||
benchmarks:
|
||||
name: Run and upload benchmarks
|
||||
runs-on: benchmarks
|
||||
timeout-minutes: 180 # 3h
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
# Run benchmarks
|
||||
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch main - Commit ${{ github.sha }}
|
||||
run: |
|
||||
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "Push on `main` [Run #${{ github.run_id }}](https://github.com/meilisearch/meilisearch/actions/runs/${{ github.run_id }})" -- workloads/*.json
|
||||
|
8
.github/workflows/benchmarks-manual.yml
vendored
8
.github/workflows/benchmarks-manual.yml
vendored
@ -4,9 +4,9 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
dataset_name:
|
||||
description: 'The name of the dataset used to benchmark (search_songs, search_wiki, search_geo or indexing)'
|
||||
description: "The name of the dataset used to benchmark (search_songs, search_wiki, search_geo or indexing)"
|
||||
required: false
|
||||
default: 'search_songs'
|
||||
default: "search_songs"
|
||||
|
||||
env:
|
||||
BENCH_NAME: ${{ github.event.inputs.dataset_name }}
|
||||
@ -18,7 +18,7 @@ jobs:
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
@ -67,7 +67,7 @@ jobs:
|
||||
out_dir: critcmp_results
|
||||
|
||||
# Helper
|
||||
- name: 'README: compare with another benchmark'
|
||||
- name: "README: compare with another benchmark"
|
||||
run: |
|
||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||
echo 'How to compare this benchmark with another one?'
|
||||
|
2
.github/workflows/benchmarks-pr.yml
vendored
2
.github/workflows/benchmarks-pr.yml
vendored
@ -44,7 +44,7 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
@ -16,7 +16,7 @@ jobs:
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
@ -69,7 +69,7 @@ jobs:
|
||||
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
||||
|
||||
# Helper
|
||||
- name: 'README: compare with another benchmark'
|
||||
- name: "README: compare with another benchmark"
|
||||
run: |
|
||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||
echo 'How to compare this benchmark with another one?'
|
||||
|
@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
@ -68,7 +68,7 @@ jobs:
|
||||
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
||||
|
||||
# Helper
|
||||
- name: 'README: compare with another benchmark'
|
||||
- name: "README: compare with another benchmark"
|
||||
run: |
|
||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||
echo 'How to compare this benchmark with another one?'
|
||||
|
@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
@ -68,7 +68,7 @@ jobs:
|
||||
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
||||
|
||||
# Helper
|
||||
- name: 'README: compare with another benchmark'
|
||||
- name: "README: compare with another benchmark"
|
||||
run: |
|
||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||
echo 'How to compare this benchmark with another one?'
|
||||
|
@ -15,7 +15,7 @@ jobs:
|
||||
runs-on: benchmarks
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
@ -68,7 +68,7 @@ jobs:
|
||||
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
||||
|
||||
# Helper
|
||||
- name: 'README: compare with another benchmark'
|
||||
- name: "README: compare with another benchmark"
|
||||
run: |
|
||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||
echo 'How to compare this benchmark with another one?'
|
||||
|
2
.github/workflows/check-valid-milestone.yml
vendored
2
.github/workflows/check-valid-milestone.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Validate PR milestone
|
||||
uses: actions/github-script@v6
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
|
57
.github/workflows/db-change-comments.yml
vendored
Normal file
57
.github/workflows/db-change-comments.yml
vendored
Normal file
@ -0,0 +1,57 @@
|
||||
name: Comment when db change labels are added
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [labeled]
|
||||
|
||||
env:
|
||||
MESSAGE: |
|
||||
### Hello, I'm a bot 🤖
|
||||
|
||||
You are receiving this message because you declared that this PR make changes to the Meilisearch database.
|
||||
Depending on the nature of the change, additional actions might be required on your part. The following sections detail the additional actions depending on the nature of the change, please copy the relevant section in the description of your PR, and make sure to perform the required actions.
|
||||
|
||||
Thank you for contributing to Meilisearch :heart:
|
||||
|
||||
## This PR makes forward-compatible changes
|
||||
|
||||
*Forward-compatible changes are changes to the database such that databases created in an older version of Meilisearch are still valid in the new version of Meilisearch. They usually represent additive changes, like adding a new optional attribute or setting.*
|
||||
|
||||
- [ ] Detail the change to the DB format and why they are forward compatible
|
||||
- [ ] Forward-compatibility: A database created before this PR and using the features touched by this PR was able to be opened by a Meilisearch produced by the code of this PR.
|
||||
|
||||
|
||||
## This PR makes breaking changes
|
||||
|
||||
*Breaking changes are changes to the database such that databases created in an older version of Meilisearch need changes to remain valid in the new version of Meilisearch. This typically happens when the way to store the data changed (change of database, new required key, etc). This can also happen due to breaking changes in the API of an experimental feature. ⚠️ This kind of changes are more difficult to achieve safely, so proceed with caution and test dumpless upgrade right before merging the PR.*
|
||||
|
||||
- [ ] Detail the changes to the DB format,
|
||||
- [ ] which are compatible, and why
|
||||
- [ ] which are not compatible, why, and how they will be fixed up in the upgrade
|
||||
- [ ] /!\ Ensure all the read operations still work!
|
||||
- If the change happened in milli, you may need to check the version of the database before doing any read operation
|
||||
- If the change happened in the index-scheduler, make sure the new code can immediately read the old database
|
||||
- If the change happened in the meilisearch-auth database, reach out to the team; we don't know yet how to handle these changes
|
||||
- [ ] Write the code to go from the old database to the new one
|
||||
- If the change happened in milli, the upgrade function should be written and called [here](https://github.com/meilisearch/meilisearch/blob/3fd86e8d76d7d468b0095d679adb09211ca3b6c0/crates/milli/src/update/upgrade/mod.rs#L24-L47)
|
||||
- If the change happened in the index-scheduler, we've never done it yet, but the right place to do it should be [here](https://github.com/meilisearch/meilisearch/blob/3fd86e8d76d7d468b0095d679adb09211ca3b6c0/crates/index-scheduler/src/scheduler/process_upgrade/mod.rs#L13)
|
||||
- [ ] Write an integration test [here](https://github.com/meilisearch/meilisearch/blob/main/crates/meilisearch/tests/upgrade/mod.rs) ensuring you can read the old database, upgrade to the new database, and read the new database as expected
|
||||
|
||||
|
||||
jobs:
|
||||
add-comment:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'db change'
|
||||
steps:
|
||||
- name: Add comment
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const message = process.env.MESSAGE;
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: message
|
||||
})
|
28
.github/workflows/db-change-missing.yml
vendored
Normal file
28
.github/workflows/db-change-missing.yml
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
name: Check db change labels
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, labeled, unlabeled]
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
|
||||
jobs:
|
||||
check-labels:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
- name: Check db change labels
|
||||
id: check_labels
|
||||
run: |
|
||||
URL=/repos/meilisearch/meilisearch/pulls/${{ github.event.pull_request.number }}/labels
|
||||
echo ${{ github.event.pull_request.number }}
|
||||
echo $URL
|
||||
LABELS=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/meilisearch/meilisearch/issues/${{ github.event.pull_request.number }}/labels -q .[].name)
|
||||
if [[ ! "$LABELS" =~ "db change" && ! "$LABELS" =~ "no db change" ]]; then
|
||||
echo "::error::Pull request must contain either the 'db change' or 'no db change' label."
|
||||
exit 1
|
||||
else
|
||||
echo "The label is set"
|
||||
fi
|
2
.github/workflows/flaky-tests.yml
vendored
2
.github/workflows/flaky-tests.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- name: Install cargo-flaky
|
||||
run: cargo install cargo-flaky
|
||||
- name: Run cargo flaky in the dumps
|
||||
|
2
.github/workflows/fuzzer-indexing.yml
vendored
2
.github/workflows/fuzzer-indexing.yml
vendored
@ -12,7 +12,7 @@ jobs:
|
||||
timeout-minutes: 4320 # 72h
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
with:
|
||||
profile: minimal
|
||||
|
||||
|
42
.github/workflows/milestone-workflow.yml
vendored
42
.github/workflows/milestone-workflow.yml
vendored
@ -5,6 +5,7 @@ name: Milestone's workflow
|
||||
# For each Milestone created (not opened!), and if the release is NOT a patch release (only the patch changed)
|
||||
# - the roadmap issue is created, see https://github.com/meilisearch/engine-team/blob/main/issue-templates/roadmap-issue.md
|
||||
# - the changelog issue is created, see https://github.com/meilisearch/engine-team/blob/main/issue-templates/changelog-issue.md
|
||||
# - update the ruleset to add the current release version to the list of allowed versions and be able to use the merge queue.
|
||||
|
||||
# For each Milestone closed
|
||||
# - the `release_version` label is created
|
||||
@ -21,10 +22,9 @@ env:
|
||||
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
|
||||
jobs:
|
||||
|
||||
# -----------------
|
||||
# MILESTONE CREATED
|
||||
# -----------------
|
||||
# -----------------
|
||||
# MILESTONE CREATED
|
||||
# -----------------
|
||||
|
||||
get-release-version:
|
||||
if: github.event.action == 'created'
|
||||
@ -148,9 +148,37 @@ jobs:
|
||||
--body-file $ISSUE_TEMPLATE \
|
||||
--milestone $MILESTONE_VERSION
|
||||
|
||||
# ----------------
|
||||
# MILESTONE CLOSED
|
||||
# ----------------
|
||||
update-ruleset:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install jq
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y jq
|
||||
- name: Update ruleset
|
||||
env:
|
||||
# gh api repos/meilisearch/meilisearch/rulesets --jq '.[] | {name: .name, id: .id}'
|
||||
RULESET_ID: 4253297
|
||||
BRANCH_NAME: ${{ github.event.inputs.branch_name }}
|
||||
run: |
|
||||
# Get current ruleset conditions
|
||||
CONDITIONS=$(gh api repos/meilisearch/meilisearch/rulesets/$RULESET_ID --jq '{ conditions: .conditions }')
|
||||
|
||||
# Update the conditions by appending the milestone version
|
||||
UPDATED_CONDITIONS=$(echo $CONDITIONS | jq '.conditions.ref_name.include += ["refs/heads/release-'$MILESTONE_VERSION'"]')
|
||||
|
||||
# Update the ruleset from stdin (-)
|
||||
echo $UPDATED_CONDITIONS |
|
||||
gh api repos/meilisearch/meilisearch/rulesets/$RULESET_ID \
|
||||
--method PUT \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
--input -
|
||||
|
||||
# ----------------
|
||||
# MILESTONE CLOSED
|
||||
# ----------------
|
||||
|
||||
create-release-label:
|
||||
if: github.event.action == 'closed'
|
||||
|
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
@ -25,7 +25,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- name: Install cargo-deb
|
||||
run: cargo install cargo-deb
|
||||
- uses: actions/checkout@v3
|
||||
|
8
.github/workflows/publish-binaries.yml
vendored
8
.github/workflows/publish-binaries.yml
vendored
@ -45,7 +45,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- name: Build
|
||||
run: cargo build --release --locked
|
||||
# No need to upload binaries for dry run (cron)
|
||||
@ -75,7 +75,7 @@ jobs:
|
||||
asset_name: meilisearch-windows-amd64.exe
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- name: Build
|
||||
run: cargo build --release --locked
|
||||
# No need to upload binaries for dry run (cron)
|
||||
@ -101,7 +101,7 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Installing Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@1.81
|
||||
uses: dtolnay/rust-toolchain@1.85
|
||||
with:
|
||||
profile: minimal
|
||||
target: ${{ matrix.target }}
|
||||
@ -148,7 +148,7 @@ jobs:
|
||||
add-apt-repository "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||
apt-get update -y && apt-get install -y docker-ce
|
||||
- name: Installing Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@1.81
|
||||
uses: dtolnay/rust-toolchain@1.85
|
||||
with:
|
||||
profile: minimal
|
||||
target: ${{ matrix.target }}
|
||||
|
28
.github/workflows/sdks-tests.yml
vendored
28
.github/workflows/sdks-tests.yml
vendored
@ -22,7 +22,7 @@ jobs:
|
||||
outputs:
|
||||
docker-image: ${{ steps.define-image.outputs.docker-image }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Define the Docker image we need to use
|
||||
id: define-image
|
||||
run: |
|
||||
@ -46,7 +46,7 @@ jobs:
|
||||
MEILISEARCH_VERSION: ${{ needs.define-docker-image.outputs.docker-image }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-dotnet
|
||||
- name: Setup .NET Core
|
||||
@ -75,7 +75,7 @@ jobs:
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-dart
|
||||
- uses: dart-lang/setup-dart@v1
|
||||
@ -103,7 +103,7 @@ jobs:
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: stable
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-go
|
||||
- name: Get dependencies
|
||||
@ -129,7 +129,7 @@ jobs:
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-java
|
||||
- name: Set up Java
|
||||
@ -156,7 +156,7 @@ jobs:
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-js
|
||||
- name: Setup node
|
||||
@ -191,7 +191,7 @@ jobs:
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-php
|
||||
- name: Install PHP
|
||||
@ -220,7 +220,7 @@ jobs:
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-python
|
||||
- name: Set up Python
|
||||
@ -245,7 +245,7 @@ jobs:
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-ruby
|
||||
- name: Set up Ruby 3
|
||||
@ -270,7 +270,7 @@ jobs:
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-rust
|
||||
- name: Build
|
||||
@ -291,7 +291,7 @@ jobs:
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-swift
|
||||
- name: Run tests
|
||||
@ -314,7 +314,7 @@ jobs:
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-js-plugins
|
||||
- name: Setup node
|
||||
@ -345,7 +345,7 @@ jobs:
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-rails
|
||||
- name: Set up Ruby 3
|
||||
@ -369,7 +369,7 @@ jobs:
|
||||
ports:
|
||||
- '7700:7700'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
repository: meilisearch/meilisearch-symfony
|
||||
- name: Install PHP
|
||||
|
28
.github/workflows/test-suite.yml
vendored
28
.github/workflows/test-suite.yml
vendored
@ -21,15 +21,15 @@ jobs:
|
||||
# Use ubuntu-22.04 to compile with glibc 2.35
|
||||
image: ubuntu:22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install needed dependencies
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- name: Setup test with Rust stable
|
||||
uses: dtolnay/rust-toolchain@1.81
|
||||
uses: dtolnay/rust-toolchain@1.85
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.7.7
|
||||
uses: Swatinem/rust-cache@v2.7.8
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@ -51,8 +51,8 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.7.7
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
uses: Swatinem/rust-cache@v2.7.8
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@ -77,7 +77,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install --assume-yes build-essential curl
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- name: Run cargo build with almost all features
|
||||
run: |
|
||||
cargo build --workspace --locked --release --features "$(cargo xtask list-features --exclude-feature cuda,test-ollama)"
|
||||
@ -91,7 +91,7 @@ jobs:
|
||||
env:
|
||||
MEILI_TEST_OLLAMA_SERVER: "http://localhost:11434"
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install Ollama
|
||||
run: |
|
||||
curl -fsSL https://ollama.com/install.sh | sudo -E sh
|
||||
@ -129,7 +129,7 @@ jobs:
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install --assume-yes build-essential curl
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- name: Run cargo tree without default features and check lindera is not present
|
||||
run: |
|
||||
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -qz lindera; then
|
||||
@ -153,9 +153,9 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.7.7
|
||||
uses: Swatinem/rust-cache@v2.7.8
|
||||
- name: Run tests in debug
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@ -167,12 +167,12 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
with:
|
||||
profile: minimal
|
||||
components: clippy
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.7.7
|
||||
uses: Swatinem/rust-cache@v2.7.8
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@ -184,14 +184,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: nightly-2024-07-09
|
||||
override: true
|
||||
components: rustfmt
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.7.7
|
||||
uses: Swatinem/rust-cache@v2.7.8
|
||||
- name: Run cargo fmt
|
||||
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
||||
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
||||
|
@ -4,7 +4,7 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
new_version:
|
||||
description: 'The new version (vX.Y.Z)'
|
||||
description: "The new version (vX.Y.Z)"
|
||||
required: true
|
||||
|
||||
env:
|
||||
@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: dtolnay/rust-toolchain@1.81
|
||||
- uses: dtolnay/rust-toolchain@1.85
|
||||
with:
|
||||
profile: minimal
|
||||
- name: Install sd
|
||||
|
18
Cargo.lock
generated
18
Cargo.lock
generated
@ -1,6 +1,6 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "actix-codec"
|
||||
@ -758,9 +758,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "bytemuck_derive"
|
||||
version = "1.6.0"
|
||||
version = "1.9.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4da9a32f3fed317401fa3c862968128267c3106685286e15d5aaa3d7389c2f60"
|
||||
checksum = "7ecc273b49b3205b83d648f0690daa588925572cc5063745bfe547fe7ec8e1a1"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -1255,9 +1255,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-channel"
|
||||
version = "0.5.14"
|
||||
version = "0.5.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471"
|
||||
checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2"
|
||||
dependencies = [
|
||||
"crossbeam-utils",
|
||||
]
|
||||
@ -5801,9 +5801,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.42.0"
|
||||
version = "1.43.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551"
|
||||
checksum = "492a604e2fd7f814268a378409e6c92b5525d747d10db9a229723f55a417958c"
|
||||
dependencies = [
|
||||
"backtrace",
|
||||
"bytes",
|
||||
@ -5819,9 +5819,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tokio-macros"
|
||||
version = "2.4.0"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752"
|
||||
checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -1,5 +1,5 @@
|
||||
# Compile
|
||||
FROM rust:1.81.0-alpine3.20 AS compiler
|
||||
FROM rust:1.85-alpine3.20 AS compiler
|
||||
|
||||
RUN apk add -q --no-cache build-base openssl-dev
|
||||
|
||||
|
@ -23,6 +23,12 @@
|
||||
<a href="https://github.com/meilisearch/meilisearch/queue"><img alt="Merge Queues enabled" src="https://img.shields.io/badge/Merge_Queues-enabled-%2357cf60?logo=github"></a>
|
||||
</p>
|
||||
|
||||
<p align="center" name="ph-banner">
|
||||
<a href="https://www.producthunt.com/posts/meilisearch-ai">
|
||||
<img src="assets/ph-banner.png" alt="Meilisearch AI-powered search general availability announcement on ProductHunt">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
<p align="center">⚡ A lightning-fast search engine that fits effortlessly into your apps, websites, and workflow 🔍</p>
|
||||
|
||||
[Meilisearch](https://www.meilisearch.com?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=intro) helps you shape a delightful search experience in a snap, offering features that work out of the box to speed up your workflow.
|
||||
|
BIN
assets/ph-banner.png
Normal file
BIN
assets/ph-banner.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 578 KiB |
@ -305,6 +305,7 @@ pub(crate) mod test {
|
||||
localized_attributes: Setting::NotSet,
|
||||
facet_search: Setting::NotSet,
|
||||
prefix_search: Setting::NotSet,
|
||||
execute_after_update: Setting::NotSet,
|
||||
_kind: std::marker::PhantomData,
|
||||
};
|
||||
settings.check()
|
||||
|
@ -397,6 +397,7 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
||||
search_cutoff_ms: v6::Setting::NotSet,
|
||||
facet_search: v6::Setting::NotSet,
|
||||
prefix_search: v6::Setting::NotSet,
|
||||
execute_after_update: v6::Setting::NotSet,
|
||||
_kind: std::marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
@ -108,7 +108,7 @@ where
|
||||
/// not supported on untagged enums.
|
||||
struct StarOrVisitor<T>(PhantomData<T>);
|
||||
|
||||
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
|
||||
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
|
||||
where
|
||||
T: FromStr<Err = FE>,
|
||||
FE: Display,
|
||||
|
@ -99,7 +99,7 @@ impl Task {
|
||||
/// Return true when a task is finished.
|
||||
/// A task is finished when its last state is either `Succeeded` or `Failed`.
|
||||
pub fn is_finished(&self) -> bool {
|
||||
self.events.last().map_or(false, |event| {
|
||||
self.events.last().is_some_and(|event| {
|
||||
matches!(event, TaskEvent::Succeded { .. } | TaskEvent::Failed { .. })
|
||||
})
|
||||
}
|
||||
|
@ -108,7 +108,7 @@ where
|
||||
/// not supported on untagged enums.
|
||||
struct StarOrVisitor<T>(PhantomData<T>);
|
||||
|
||||
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
|
||||
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
|
||||
where
|
||||
T: FromStr<Err = FE>,
|
||||
FE: Display,
|
||||
|
@ -114,7 +114,7 @@ impl Task {
|
||||
/// Return true when a task is finished.
|
||||
/// A task is finished when its last state is either `Succeeded` or `Failed`.
|
||||
pub fn is_finished(&self) -> bool {
|
||||
self.events.last().map_or(false, |event| {
|
||||
self.events.last().is_some_and(|event| {
|
||||
matches!(event, TaskEvent::Succeeded { .. } | TaskEvent::Failed { .. })
|
||||
})
|
||||
}
|
||||
|
@ -35,7 +35,7 @@ impl<E> NomErrorExt<E> for nom::Err<E> {
|
||||
pub fn cut_with_err<'a, O>(
|
||||
mut parser: impl FnMut(Span<'a>) -> IResult<'a, O>,
|
||||
mut with: impl FnMut(Error<'a>) -> Error<'a>,
|
||||
) -> impl FnMut(Span<'a>) -> IResult<O> {
|
||||
) -> impl FnMut(Span<'a>) -> IResult<'a, O> {
|
||||
move |input| match parser.parse(input) {
|
||||
Err(nom::Err::Error(e)) => Err(nom::Err::Failure(with(e))),
|
||||
rest => rest,
|
||||
@ -121,7 +121,7 @@ impl<'a> ParseError<Span<'a>> for Error<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Display for Error<'a> {
|
||||
impl Display for Error<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let input = self.context.fragment();
|
||||
// When printing our error message we want to escape all `\n` to be sure we keep our format with the
|
||||
|
@ -80,7 +80,7 @@ pub struct Token<'a> {
|
||||
value: Option<String>,
|
||||
}
|
||||
|
||||
impl<'a> PartialEq for Token<'a> {
|
||||
impl PartialEq for Token<'_> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.span.fragment() == other.span.fragment()
|
||||
}
|
||||
@ -226,7 +226,7 @@ impl<'a> FilterCondition<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse(input: &'a str) -> Result<Option<Self>, Error> {
|
||||
pub fn parse(input: &'a str) -> Result<Option<Self>, Error<'a>> {
|
||||
if input.trim().is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
@ -527,7 +527,7 @@ pub fn parse_filter(input: Span) -> IResult<FilterCondition> {
|
||||
terminated(|input| parse_expression(input, 0), eof)(input)
|
||||
}
|
||||
|
||||
impl<'a> std::fmt::Display for FilterCondition<'a> {
|
||||
impl std::fmt::Display for FilterCondition<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
FilterCondition::Not(filter) => {
|
||||
@ -576,7 +576,8 @@ impl<'a> std::fmt::Display for FilterCondition<'a> {
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'a> std::fmt::Display for Condition<'a> {
|
||||
|
||||
impl std::fmt::Display for Condition<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Condition::GreaterThan(token) => write!(f, "> {token}"),
|
||||
@ -594,7 +595,8 @@ impl<'a> std::fmt::Display for Condition<'a> {
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'a> std::fmt::Display for Token<'a> {
|
||||
|
||||
impl std::fmt::Display for Token<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{{{}}}", self.value())
|
||||
}
|
||||
|
@ -52,7 +52,7 @@ fn quoted_by(quote: char, input: Span) -> IResult<Token> {
|
||||
}
|
||||
|
||||
// word = (alphanumeric | _ | - | .)+ except for reserved keywords
|
||||
pub fn word_not_keyword<'a>(input: Span<'a>) -> IResult<Token<'a>> {
|
||||
pub fn word_not_keyword<'a>(input: Span<'a>) -> IResult<'a, Token<'a>> {
|
||||
let (input, word): (_, Token<'a>) =
|
||||
take_while1(is_value_component)(input).map(|(s, t)| (s, t.into()))?;
|
||||
if is_keyword(word.value()) {
|
||||
|
@ -47,7 +47,7 @@ uuid = { version = "1.11.0", features = ["serde", "v4"] }
|
||||
|
||||
[dev-dependencies]
|
||||
big_s = "1.0.2"
|
||||
crossbeam-channel = "0.5.14"
|
||||
crossbeam-channel = "0.5.15"
|
||||
# fixed version due to format breakages in v1.40
|
||||
insta = { version = "=1.39.0", features = ["json", "redactions"] }
|
||||
maplit = "1.0.2"
|
||||
|
@ -625,8 +625,8 @@ impl IndexScheduler {
|
||||
task_id: Option<TaskId>,
|
||||
dry_run: bool,
|
||||
) -> Result<Task> {
|
||||
// if the task doesn't delete anything and 50% of the task queue is full, we must refuse to enqueue the incomming task
|
||||
if !matches!(&kind, KindWithContent::TaskDeletion { tasks, .. } if !tasks.is_empty())
|
||||
// if the task doesn't delete or cancel anything and 40% of the task queue is full, we must refuse to enqueue the incoming task
|
||||
if !matches!(&kind, KindWithContent::TaskDeletion { tasks, .. } | KindWithContent::TaskCancelation { tasks, .. } if !tasks.is_empty())
|
||||
&& (self.env.non_free_pages_size()? * 100) / self.env.info().map_size as u64 > 40
|
||||
{
|
||||
return Err(Error::NoSpaceLeftInTaskQueue);
|
||||
@ -696,7 +696,7 @@ impl IndexScheduler {
|
||||
written: usize,
|
||||
}
|
||||
|
||||
impl<'a, 'b> Read for TaskReader<'a, 'b> {
|
||||
impl Read for TaskReader<'_, '_> {
|
||||
fn read(&mut self, mut buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
if self.buffer.is_empty() {
|
||||
match self.tasks.next() {
|
||||
|
@ -292,8 +292,6 @@ impl Queue {
|
||||
return Ok(task);
|
||||
}
|
||||
|
||||
// Get rid of the mutability.
|
||||
let task = task;
|
||||
self.tasks.register(wtxn, &task)?;
|
||||
|
||||
Ok(task)
|
||||
|
@ -315,7 +315,7 @@ impl Queue {
|
||||
if let Some(batch_uids) = batch_uids {
|
||||
let mut batch_tasks = RoaringBitmap::new();
|
||||
for batch_uid in batch_uids {
|
||||
if processing_batch.as_ref().map_or(false, |batch| batch.uid == *batch_uid) {
|
||||
if processing_batch.as_ref().is_some_and(|batch| batch.uid == *batch_uid) {
|
||||
batch_tasks |= &**processing_tasks;
|
||||
} else {
|
||||
batch_tasks |= self.tasks_in_batch(rtxn, *batch_uid)?;
|
||||
|
@ -364,7 +364,7 @@ fn test_task_queue_is_full() {
|
||||
// we won't be able to test this error in an integration test thus as a best effort test IÂ still ensure the error return the expected error code
|
||||
snapshot!(format!("{:?}", result.error_code()), @"NoSpaceLeftOnDevice");
|
||||
|
||||
// Even the task deletion that doesn't delete anything shouldn't be accepted
|
||||
// Even the task deletion and cancelation that don't delete anything should be refused
|
||||
let result = index_scheduler
|
||||
.register(
|
||||
KindWithContent::TaskDeletion { query: S("test"), tasks: RoaringBitmap::new() },
|
||||
@ -373,10 +373,39 @@ fn test_task_queue_is_full() {
|
||||
)
|
||||
.unwrap_err();
|
||||
snapshot!(result, @"Meilisearch cannot receive write operations because the limit of the task database has been reached. Please delete tasks to continue performing write operations.");
|
||||
let result = index_scheduler
|
||||
.register(
|
||||
KindWithContent::TaskCancelation { query: S("test"), tasks: RoaringBitmap::new() },
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap_err();
|
||||
snapshot!(result, @"Meilisearch cannot receive write operations because the limit of the task database has been reached. Please delete tasks to continue performing write operations.");
|
||||
|
||||
// we won't be able to test this error in an integration test thus as a best effort test IÂ still ensure the error return the expected error code
|
||||
snapshot!(format!("{:?}", result.error_code()), @"NoSpaceLeftOnDevice");
|
||||
|
||||
// But a task deletion that delete something should works
|
||||
// But a task cancelation that cancel something should work
|
||||
index_scheduler
|
||||
.register(
|
||||
KindWithContent::TaskCancelation { query: S("test"), tasks: (0..100).collect() },
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
handle.advance_one_successful_batch();
|
||||
|
||||
// But we should still be forbidden from enqueuing new tasks
|
||||
let result = index_scheduler
|
||||
.register(
|
||||
KindWithContent::IndexCreation { index_uid: S("doggo"), primary_key: None },
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap_err();
|
||||
snapshot!(result, @"Meilisearch cannot receive write operations because the limit of the task database has been reached. Please delete tasks to continue performing write operations.");
|
||||
|
||||
// And a task deletion that delete something should works
|
||||
index_scheduler
|
||||
.register(
|
||||
KindWithContent::TaskDeletion { query: S("test"), tasks: (0..100).collect() },
|
||||
|
@ -219,7 +219,7 @@ impl BatchKind {
|
||||
primary_key.is_some() &&
|
||||
// 2.1.1 If the task we're trying to accumulate have a pk it must be equal to our primary key
|
||||
// 2.1.2 If the task don't have a primary-key -> we can continue
|
||||
kind.primary_key().map_or(true, |pk| pk == primary_key)
|
||||
kind.primary_key().is_none_or(|pk| pk == primary_key)
|
||||
) ||
|
||||
// 2.2 If we don't have a primary-key ->
|
||||
(
|
||||
|
@ -1,4 +1,5 @@
|
||||
use std::fmt;
|
||||
use std::io::ErrorKind;
|
||||
|
||||
use meilisearch_types::heed::RoTxn;
|
||||
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
||||
@ -535,7 +536,11 @@ impl IndexScheduler {
|
||||
.and_then(|task| task.ok_or(Error::CorruptedTaskQueue))?;
|
||||
|
||||
if let Some(uuid) = task.content_uuid() {
|
||||
let content_size = self.queue.file_store.compute_size(uuid)?;
|
||||
let content_size = match self.queue.file_store.compute_size(uuid) {
|
||||
Ok(content_size) => content_size,
|
||||
Err(file_store::Error::IoError(err)) if err.kind() == ErrorKind::NotFound => 0,
|
||||
Err(otherwise) => return Err(otherwise.into()),
|
||||
};
|
||||
total_size = total_size.saturating_add(content_size);
|
||||
}
|
||||
|
||||
|
@ -41,7 +41,7 @@ impl IndexScheduler {
|
||||
progress.update_progress(SnapshotCreationProgress::SnapshotTheIndexScheduler);
|
||||
let dst = temp_snapshot_dir.path().join("tasks");
|
||||
fs::create_dir_all(&dst)?;
|
||||
self.env.copy_to_path(dst.join("data.mdb"), CompactionOption::Enabled)?;
|
||||
self.env.copy_to_path(dst.join("data.mdb"), CompactionOption::Disabled)?;
|
||||
|
||||
// 2.2 Create a read transaction on the index-scheduler
|
||||
let rtxn = self.env.read_txn()?;
|
||||
@ -80,7 +80,7 @@ impl IndexScheduler {
|
||||
let dst = temp_snapshot_dir.path().join("indexes").join(uuid.to_string());
|
||||
fs::create_dir_all(&dst)?;
|
||||
index
|
||||
.copy_to_path(dst.join("data.mdb"), CompactionOption::Enabled)
|
||||
.copy_to_path(dst.join("data.mdb"), CompactionOption::Disabled)
|
||||
.map_err(|e| Error::from_milli(e, Some(name.to_string())))?;
|
||||
}
|
||||
|
||||
@ -90,7 +90,7 @@ impl IndexScheduler {
|
||||
progress.update_progress(SnapshotCreationProgress::SnapshotTheApiKeys);
|
||||
let dst = temp_snapshot_dir.path().join("auth");
|
||||
fs::create_dir_all(&dst)?;
|
||||
self.scheduler.auth_env.copy_to_path(dst.join("data.mdb"), CompactionOption::Enabled)?;
|
||||
self.scheduler.auth_env.copy_to_path(dst.join("data.mdb"), CompactionOption::Disabled)?;
|
||||
|
||||
// 5. Copy and tarball the flat snapshot
|
||||
progress.update_progress(SnapshotCreationProgress::CreateTheTarball);
|
||||
|
@ -39,7 +39,7 @@ time = { version = "0.3.37", features = [
|
||||
"parsing",
|
||||
"macros",
|
||||
] }
|
||||
tokio = "1.42"
|
||||
tokio = "1.43"
|
||||
utoipa = { version = "5.3.1", features = ["macros"] }
|
||||
uuid = { version = "1.11.0", features = ["serde", "v4"] }
|
||||
|
||||
|
@ -312,6 +312,7 @@ InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsProximityPrecision , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsFacetSearch , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsexecuteAfterUpdate , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsPrefixSearch , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsFaceting , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsFilterableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||
@ -454,7 +455,10 @@ impl ErrorCode for milli::Error {
|
||||
}
|
||||
UserError::CriterionError(_) => Code::InvalidSettingsRankingRules,
|
||||
UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField,
|
||||
UserError::InvalidVectorDimensions { .. } => Code::InvalidVectorDimensions,
|
||||
UserError::InvalidVectorDimensions { .. }
|
||||
| UserError::InvalidIndexingVectorDimensions { .. } => {
|
||||
Code::InvalidVectorDimensions
|
||||
}
|
||||
UserError::InvalidVectorsMapType { .. }
|
||||
| UserError::InvalidVectorsEmbedderConf { .. } => Code::InvalidVectorsType,
|
||||
UserError::TooManyVectors(_, _) => Code::TooManyVectors,
|
||||
|
@ -289,6 +289,12 @@ pub struct Settings<T> {
|
||||
#[schema(value_type = Option<PrefixSearchSettings>, example = json!("Hemlo"))]
|
||||
pub prefix_search: Setting<PrefixSearchSettings>,
|
||||
|
||||
/// Function to execute after an update
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsexecuteAfterUpdate>)]
|
||||
#[schema(value_type = Option<String>, example = json!("doc.likes += 1"))]
|
||||
pub execute_after_update: Setting<String>,
|
||||
|
||||
#[serde(skip)]
|
||||
#[deserr(skip)]
|
||||
pub _kind: PhantomData<T>,
|
||||
@ -354,6 +360,7 @@ impl Settings<Checked> {
|
||||
localized_attributes: Setting::Reset,
|
||||
facet_search: Setting::Reset,
|
||||
prefix_search: Setting::Reset,
|
||||
execute_after_update: Setting::Reset,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
@ -380,6 +387,7 @@ impl Settings<Checked> {
|
||||
localized_attributes: localized_attributes_rules,
|
||||
facet_search,
|
||||
prefix_search,
|
||||
execute_after_update,
|
||||
_kind,
|
||||
} = self;
|
||||
|
||||
@ -404,6 +412,7 @@ impl Settings<Checked> {
|
||||
localized_attributes: localized_attributes_rules,
|
||||
facet_search,
|
||||
prefix_search,
|
||||
execute_after_update,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
@ -454,6 +463,7 @@ impl Settings<Unchecked> {
|
||||
localized_attributes: self.localized_attributes,
|
||||
facet_search: self.facet_search,
|
||||
prefix_search: self.prefix_search,
|
||||
execute_after_update: self.execute_after_update,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
@ -530,6 +540,10 @@ impl Settings<Unchecked> {
|
||||
},
|
||||
prefix_search: other.prefix_search.or(self.prefix_search),
|
||||
facet_search: other.facet_search.or(self.facet_search),
|
||||
execute_after_update: other
|
||||
.execute_after_update
|
||||
.clone()
|
||||
.or(self.execute_after_update.clone()),
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
@ -568,6 +582,7 @@ pub fn apply_settings_to_builder(
|
||||
localized_attributes: localized_attributes_rules,
|
||||
facet_search,
|
||||
prefix_search,
|
||||
execute_after_update,
|
||||
_kind,
|
||||
} = settings;
|
||||
|
||||
@ -772,6 +787,14 @@ pub fn apply_settings_to_builder(
|
||||
Setting::Reset => builder.reset_facet_search(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match execute_after_update {
|
||||
Setting::Set(execute_after_update) => {
|
||||
builder.set_execute_after_update(execute_after_update.clone())
|
||||
}
|
||||
Setting::Reset => builder.reset_execute_after_update(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
}
|
||||
|
||||
pub enum SecretPolicy {
|
||||
@ -867,14 +890,11 @@ pub fn settings(
|
||||
})
|
||||
.collect();
|
||||
let embedders = Setting::Set(embedders);
|
||||
|
||||
let search_cutoff_ms = index.search_cutoff(rtxn)?;
|
||||
|
||||
let localized_attributes_rules = index.localized_attributes_rules(rtxn)?;
|
||||
|
||||
let prefix_search = index.prefix_search(rtxn)?.map(PrefixSearchSettings::from);
|
||||
|
||||
let facet_search = index.facet_search(rtxn)?;
|
||||
let execute_after_update = index.execute_after_update(rtxn)?;
|
||||
|
||||
let mut settings = Settings {
|
||||
displayed_attributes: match displayed_attributes {
|
||||
@ -914,6 +934,10 @@ pub fn settings(
|
||||
},
|
||||
prefix_search: Setting::Set(prefix_search.unwrap_or_default()),
|
||||
facet_search: Setting::Set(facet_search),
|
||||
execute_after_update: match execute_after_update {
|
||||
Some(function) => Setting::Set(function.to_string()),
|
||||
None => Setting::NotSet,
|
||||
},
|
||||
_kind: PhantomData,
|
||||
};
|
||||
|
||||
@ -960,7 +984,7 @@ impl<'de> Deserialize<'de> for RankingRuleView {
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
struct Visitor;
|
||||
impl<'de> serde::de::Visitor<'de> for Visitor {
|
||||
impl serde::de::Visitor<'_> for Visitor {
|
||||
type Value = RankingRuleView;
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(formatter, "the name of a valid ranking rule (string)")
|
||||
@ -1141,6 +1165,7 @@ pub(crate) mod test {
|
||||
search_cutoff_ms: Setting::NotSet,
|
||||
facet_search: Setting::NotSet,
|
||||
prefix_search: Setting::NotSet,
|
||||
execute_after_update: Setting::NotSet,
|
||||
_kind: PhantomData::<Unchecked>,
|
||||
};
|
||||
|
||||
@ -1172,6 +1197,7 @@ pub(crate) mod test {
|
||||
search_cutoff_ms: Setting::NotSet,
|
||||
facet_search: Setting::NotSet,
|
||||
prefix_search: Setting::NotSet,
|
||||
execute_after_update: Setting::NotSet,
|
||||
_kind: PhantomData::<Unchecked>,
|
||||
};
|
||||
|
||||
|
@ -66,7 +66,7 @@ where
|
||||
/// not supported on untagged enums.
|
||||
struct StarOrVisitor<T>(PhantomData<T>);
|
||||
|
||||
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
|
||||
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
|
||||
where
|
||||
T: FromStr<Err = FE>,
|
||||
FE: fmt::Display,
|
||||
|
@ -33,7 +33,7 @@ bstr = "1.11.3"
|
||||
byte-unit = { version = "5.1.6", features = ["serde"] }
|
||||
bytes = "1.9.0"
|
||||
clap = { version = "4.5.24", features = ["derive", "env"] }
|
||||
crossbeam-channel = "0.5.14"
|
||||
crossbeam-channel = "0.5.15"
|
||||
deserr = { version = "0.6.3", features = ["actix-web"] }
|
||||
dump = { path = "../dump" }
|
||||
either = "1.13.0"
|
||||
@ -88,7 +88,7 @@ time = { version = "0.3.37", features = [
|
||||
"parsing",
|
||||
"macros",
|
||||
] }
|
||||
tokio = { version = "1.42.0", features = ["full"] }
|
||||
tokio = { version = "1.43.1", features = ["full"] }
|
||||
toml = "0.8.19"
|
||||
uuid = { version = "1.11.0", features = ["serde", "v4"] }
|
||||
serde_urlencoded = "0.7.1"
|
||||
|
@ -346,7 +346,7 @@ fn open_or_create_database_unchecked(
|
||||
match (
|
||||
index_scheduler_builder(),
|
||||
auth_controller.map_err(anyhow::Error::from),
|
||||
create_current_version_file(&opt.db_path).map_err(anyhow::Error::from),
|
||||
create_current_version_file(&opt.db_path),
|
||||
) {
|
||||
(Ok(i), Ok(a), Ok(())) => Ok((i, a)),
|
||||
(Err(e), _, _) | (_, Err(e), _) | (_, _, Err(e)) => {
|
||||
|
@ -69,7 +69,7 @@ fn setup(opt: &Opt) -> anyhow::Result<(LogRouteHandle, LogStderrHandle)> {
|
||||
Ok((route_layer_handle, stderr_layer_handle))
|
||||
}
|
||||
|
||||
fn on_panic(info: &std::panic::PanicInfo) {
|
||||
fn on_panic(info: &std::panic::PanicHookInfo) {
|
||||
let info = info.to_string().replace('\n', " ");
|
||||
tracing::error!(%info);
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ use meilisearch_types::milli::update::IndexerConfig;
|
||||
use meilisearch_types::milli::ThreadPoolNoAbortBuilder;
|
||||
use rustls::server::{ServerSessionMemoryCache, WebPkiClientVerifier};
|
||||
use rustls::RootCertStore;
|
||||
use rustls_pemfile::{certs, rsa_private_keys};
|
||||
use rustls_pemfile::{certs, ec_private_keys, rsa_private_keys};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sysinfo::{MemoryRefreshKind, RefreshKind, System};
|
||||
use url::Url;
|
||||
@ -874,7 +874,7 @@ fn load_private_key(
|
||||
filename: PathBuf,
|
||||
) -> anyhow::Result<rustls::pki_types::PrivateKeyDer<'static>> {
|
||||
let rsa_keys = {
|
||||
let keyfile = fs::File::open(filename.clone())
|
||||
let keyfile = fs::File::open(&filename)
|
||||
.map_err(|_| anyhow::anyhow!("cannot open private key file"))?;
|
||||
let mut reader = BufReader::new(keyfile);
|
||||
rsa_private_keys(&mut reader)
|
||||
@ -883,7 +883,7 @@ fn load_private_key(
|
||||
};
|
||||
|
||||
let pkcs8_keys = {
|
||||
let keyfile = fs::File::open(filename)
|
||||
let keyfile = fs::File::open(&filename)
|
||||
.map_err(|_| anyhow::anyhow!("cannot open private key file"))?;
|
||||
let mut reader = BufReader::new(keyfile);
|
||||
rustls_pemfile::pkcs8_private_keys(&mut reader).collect::<Result<Vec<_>, _>>().map_err(
|
||||
@ -895,12 +895,23 @@ fn load_private_key(
|
||||
)?
|
||||
};
|
||||
|
||||
let ec_keys = {
|
||||
let keyfile = fs::File::open(&filename)
|
||||
.map_err(|_| anyhow::anyhow!("cannot open private key file"))?;
|
||||
let mut reader = BufReader::new(keyfile);
|
||||
ec_private_keys(&mut reader)
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.map_err(|_| anyhow::anyhow!("file contains invalid ec private key"))?
|
||||
};
|
||||
|
||||
// prefer to load pkcs8 keys
|
||||
if !pkcs8_keys.is_empty() {
|
||||
Ok(rustls::pki_types::PrivateKeyDer::Pkcs8(pkcs8_keys[0].clone_key()))
|
||||
} else {
|
||||
assert!(!rsa_keys.is_empty());
|
||||
} else if !rsa_keys.is_empty() {
|
||||
Ok(rustls::pki_types::PrivateKeyDer::Pkcs1(rsa_keys[0].clone_key()))
|
||||
} else {
|
||||
assert!(!ec_keys.is_empty());
|
||||
Ok(rustls::pki_types::PrivateKeyDer::Sec1(ec_keys[0].clone_key()))
|
||||
}
|
||||
}
|
||||
|
||||
@ -929,7 +940,6 @@ where
|
||||
}
|
||||
|
||||
/// Functions used to get default value for `Opt` fields, needs to be function because of serde's default attribute.
|
||||
|
||||
fn default_db_path() -> PathBuf {
|
||||
PathBuf::from(DEFAULT_DB_PATH)
|
||||
}
|
||||
@ -1037,7 +1047,7 @@ where
|
||||
{
|
||||
struct BoolOrInt;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for BoolOrInt {
|
||||
impl serde::de::Visitor<'_> for BoolOrInt {
|
||||
type Value = ScheduleSnapshot;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
|
@ -302,7 +302,7 @@ impl From<FacetSearchQuery> for SearchQuery {
|
||||
|
||||
// If exhaustive_facet_count is true, we need to set the page to 0
|
||||
// because the facet search is not exhaustive by default.
|
||||
let page = if exhaustive_facet_count.map_or(false, |exhaustive| exhaustive) {
|
||||
let page = if exhaustive_facet_count.is_some_and(|exhaustive| exhaustive) {
|
||||
// setting the page to 0 will force the search to be exhaustive when computing the number of hits,
|
||||
// but it will skip the bucket sort saving time.
|
||||
Some(0)
|
||||
|
@ -497,6 +497,17 @@ make_setting_routes!(
|
||||
camelcase_attr: "facetSearch",
|
||||
analytics: FacetSearchAnalytics
|
||||
},
|
||||
{
|
||||
route: "/execute-after-update",
|
||||
update_verb: put,
|
||||
value_type: String,
|
||||
err_type: meilisearch_types::deserr::DeserrJsonError<
|
||||
meilisearch_types::error::deserr_codes::InvalidSettingsexecuteAfterUpdate,
|
||||
>,
|
||||
attr: execute_after_update,
|
||||
camelcase_attr: "executeAfterUpdate",
|
||||
analytics: ExecuteAfterUpdateAnalytics
|
||||
},
|
||||
{
|
||||
route: "/prefix-search",
|
||||
update_verb: put,
|
||||
@ -596,6 +607,9 @@ pub async fn update_all(
|
||||
new_settings.non_separator_tokens.as_ref().set(),
|
||||
),
|
||||
facet_search: FacetSearchAnalytics::new(new_settings.facet_search.as_ref().set()),
|
||||
execute_after_update: ExecuteAfterUpdateAnalytics::new(
|
||||
new_settings.execute_after_update.as_ref().set(),
|
||||
),
|
||||
prefix_search: PrefixSearchAnalytics::new(new_settings.prefix_search.as_ref().set()),
|
||||
},
|
||||
&req,
|
||||
|
@ -39,6 +39,7 @@ pub struct SettingsAnalytics {
|
||||
pub non_separator_tokens: NonSeparatorTokensAnalytics,
|
||||
pub facet_search: FacetSearchAnalytics,
|
||||
pub prefix_search: PrefixSearchAnalytics,
|
||||
pub execute_after_update: ExecuteAfterUpdateAnalytics,
|
||||
}
|
||||
|
||||
impl Aggregate for SettingsAnalytics {
|
||||
@ -194,6 +195,9 @@ impl Aggregate for SettingsAnalytics {
|
||||
set: new.facet_search.set | self.facet_search.set,
|
||||
value: new.facet_search.value.or(self.facet_search.value),
|
||||
},
|
||||
execute_after_update: ExecuteAfterUpdateAnalytics {
|
||||
set: new.execute_after_update.set | self.execute_after_update.set,
|
||||
},
|
||||
prefix_search: PrefixSearchAnalytics {
|
||||
set: new.prefix_search.set | self.prefix_search.set,
|
||||
value: new.prefix_search.value.or(self.prefix_search.value),
|
||||
@ -659,6 +663,21 @@ impl FacetSearchAnalytics {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct ExecuteAfterUpdateAnalytics {
|
||||
pub set: bool,
|
||||
}
|
||||
|
||||
impl ExecuteAfterUpdateAnalytics {
|
||||
pub fn new(distinct: Option<&String>) -> Self {
|
||||
Self { set: distinct.is_some() }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { execute_after_update: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct PrefixSearchAnalytics {
|
||||
pub set: bool,
|
||||
|
@ -64,6 +64,8 @@ mod open_api_utils;
|
||||
mod snapshot;
|
||||
mod swap_indexes;
|
||||
pub mod tasks;
|
||||
#[cfg(test)]
|
||||
mod tasks_test;
|
||||
|
||||
#[derive(OpenApi)]
|
||||
#[openapi(
|
||||
@ -168,7 +170,7 @@ pub fn is_dry_run(req: &HttpRequest, opt: &Opt) -> Result<bool, ResponseError> {
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.map_or(false, |s| s.to_lowercase() == "true"))
|
||||
.is_some_and(|s| s.to_lowercase() == "true"))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, ToSchema)]
|
||||
|
@ -119,10 +119,22 @@ pub struct Network {
|
||||
impl Remote {
|
||||
pub fn try_into_db_node(self, name: &str) -> Result<DbRemote, ResponseError> {
|
||||
Ok(DbRemote {
|
||||
url: self.url.set().ok_or(ResponseError::from_msg(
|
||||
format!("Missing field `.remotes.{name}.url`"),
|
||||
meilisearch_types::error::Code::MissingNetworkUrl,
|
||||
))?,
|
||||
url: self
|
||||
.url
|
||||
.set()
|
||||
.ok_or(ResponseError::from_msg(
|
||||
format!("Missing field `.remotes.{name}.url`"),
|
||||
meilisearch_types::error::Code::MissingNetworkUrl,
|
||||
))
|
||||
.and_then(|url| {
|
||||
if let Err(error) = url::Url::parse(&url) {
|
||||
return Err(ResponseError::from_msg(
|
||||
format!("Invalid `.remotes.{name}.url` (`{url}`): {error}"),
|
||||
meilisearch_types::error::Code::InvalidNetworkUrl,
|
||||
));
|
||||
}
|
||||
Ok(url)
|
||||
})?,
|
||||
search_api_key: self.search_api_key.set(),
|
||||
})
|
||||
}
|
||||
@ -211,7 +223,15 @@ async fn patch_network(
|
||||
|
||||
let merged = DbRemote {
|
||||
url: match new_url {
|
||||
Setting::Set(new_url) => new_url,
|
||||
Setting::Set(new_url) => {
|
||||
if let Err(error) = url::Url::parse(&new_url) {
|
||||
return Err(ResponseError::from_msg(
|
||||
format!("Invalid `.remotes.{key}.url` (`{new_url}`): {error}"),
|
||||
meilisearch_types::error::Code::InvalidNetworkUrl,
|
||||
));
|
||||
}
|
||||
new_url
|
||||
}
|
||||
Setting::Reset => {
|
||||
return Err(ResponseError::from_msg(
|
||||
format!(
|
||||
|
@ -146,7 +146,7 @@ impl TasksFilterQuery {
|
||||
}
|
||||
|
||||
impl TaskDeletionOrCancelationQuery {
|
||||
fn is_empty(&self) -> bool {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
matches!(
|
||||
self,
|
||||
TaskDeletionOrCancelationQuery {
|
||||
@ -760,356 +760,3 @@ pub fn deserialize_date_before(
|
||||
) -> std::result::Result<OptionStarOr<OffsetDateTime>, InvalidTaskDateError> {
|
||||
value.try_map(|x| deserialize_date(&x, DeserializeDateOption::Before))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use deserr::Deserr;
|
||||
use meili_snap::snapshot;
|
||||
use meilisearch_types::deserr::DeserrQueryParamError;
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
|
||||
use crate::routes::tasks::{TaskDeletionOrCancelationQuery, TasksFilterQuery};
|
||||
|
||||
fn deserr_query_params<T>(j: &str) -> Result<T, ResponseError>
|
||||
where
|
||||
T: Deserr<DeserrQueryParamError>,
|
||||
{
|
||||
let value = serde_urlencoded::from_str::<serde_json::Value>(j)
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?;
|
||||
|
||||
match deserr::deserialize::<_, _, DeserrQueryParamError>(value) {
|
||||
Ok(data) => Ok(data),
|
||||
Err(e) => Err(ResponseError::from(e)),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_task_filter_dates() {
|
||||
{
|
||||
let params = "afterEnqueuedAt=2021-12-03&beforeEnqueuedAt=2021-12-03&afterStartedAt=2021-12-03&beforeStartedAt=2021-12-03&afterFinishedAt=2021-12-03&beforeFinishedAt=2021-12-03";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
|
||||
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
|
||||
snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
|
||||
snapshot!(format!("{:?}", query.after_started_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
|
||||
snapshot!(format!("{:?}", query.before_started_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
|
||||
snapshot!(format!("{:?}", query.after_finished_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
|
||||
snapshot!(format!("{:?}", query.before_finished_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
|
||||
}
|
||||
{
|
||||
let params =
|
||||
"afterEnqueuedAt=2021-12-03T23:45:23Z&beforeEnqueuedAt=2021-12-03T23:45:23Z";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)");
|
||||
snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)");
|
||||
}
|
||||
{
|
||||
let params = "afterEnqueuedAt=1997-11-12T09:55:06-06:20";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 -06:20:00)");
|
||||
}
|
||||
{
|
||||
let params = "afterEnqueuedAt=1997-11-12T09:55:06%2B00:00";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 +00:00:00)");
|
||||
}
|
||||
{
|
||||
let params = "afterEnqueuedAt=1997-11-12T09:55:06.200000300Z";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.2000003 +00:00:00)");
|
||||
}
|
||||
{
|
||||
// Stars are allowed in date fields as well
|
||||
let params = "afterEnqueuedAt=*&beforeStartedAt=*&afterFinishedAt=*&beforeFinishedAt=*&afterStartedAt=*&beforeEnqueuedAt=*";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: None, batch_uids: None, canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Star, before_enqueued_at: Star, after_started_at: Star, before_started_at: Star, after_finished_at: Star, before_finished_at: Star }");
|
||||
}
|
||||
{
|
||||
let params = "afterFinishedAt=2021";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `afterFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||
"code": "invalid_task_after_finished_at",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_after_finished_at"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
let params = "beforeFinishedAt=2021";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `beforeFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||
"code": "invalid_task_before_finished_at",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_before_finished_at"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
let params = "afterEnqueuedAt=2021-12";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `afterEnqueuedAt`: `2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||
"code": "invalid_task_after_enqueued_at",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_after_enqueued_at"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
{
|
||||
let params = "beforeEnqueuedAt=2021-12-03T23";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `beforeEnqueuedAt`: `2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||
"code": "invalid_task_before_enqueued_at",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_before_enqueued_at"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
let params = "afterStartedAt=2021-12-03T23:45";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `afterStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||
"code": "invalid_task_after_started_at",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_after_started_at"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
let params = "beforeStartedAt=2021-12-03T23:45";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `beforeStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||
"code": "invalid_task_before_started_at",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_before_started_at"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_task_filter_uids() {
|
||||
{
|
||||
let params = "uids=78,1,12,73";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.uids), @"List([78, 1, 12, 73])");
|
||||
}
|
||||
{
|
||||
let params = "uids=1";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.uids), @"List([1])");
|
||||
}
|
||||
{
|
||||
let params = "uids=cat,*,dog";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `uids[0]`: could not parse `cat` as a positive integer",
|
||||
"code": "invalid_task_uids",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
let params = "uids=78,hello,world";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `uids[1]`: could not parse `hello` as a positive integer",
|
||||
"code": "invalid_task_uids",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
let params = "uids=cat";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `uids`: could not parse `cat` as a positive integer",
|
||||
"code": "invalid_task_uids",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_task_filter_status() {
|
||||
{
|
||||
let params = "statuses=succeeded,failed,enqueued,processing,canceled";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.statuses), @"List([Succeeded, Failed, Enqueued, Processing, Canceled])");
|
||||
}
|
||||
{
|
||||
let params = "statuses=enqueued";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.statuses), @"List([Enqueued])");
|
||||
}
|
||||
{
|
||||
let params = "statuses=finished";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `statuses`: `finished` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.",
|
||||
"code": "invalid_task_statuses",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_statuses"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
}
|
||||
#[test]
|
||||
fn deserialize_task_filter_types() {
|
||||
{
|
||||
let params = "types=documentAdditionOrUpdate,documentDeletion,settingsUpdate,indexCreation,indexDeletion,indexUpdate,indexSwap,taskCancelation,taskDeletion,dumpCreation,snapshotCreation";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.types), @"List([DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, IndexCreation, IndexDeletion, IndexUpdate, IndexSwap, TaskCancelation, TaskDeletion, DumpCreation, SnapshotCreation])");
|
||||
}
|
||||
{
|
||||
let params = "types=settingsUpdate";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.types), @"List([SettingsUpdate])");
|
||||
}
|
||||
{
|
||||
let params = "types=createIndex";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r#"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
}
|
||||
#[test]
|
||||
fn deserialize_task_filter_index_uids() {
|
||||
{
|
||||
let params = "indexUids=toto,tata-78";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("toto"), IndexUid("tata-78")])"###);
|
||||
}
|
||||
{
|
||||
let params = "indexUids=index_a";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("index_a")])"###);
|
||||
}
|
||||
{
|
||||
let params = "indexUids=1,hé";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `indexUids[1]`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
let params = "indexUids=hé";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `indexUids`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_task_filter_general() {
|
||||
{
|
||||
let params = "from=12&limit=15&indexUids=toto,tata-78&statuses=succeeded,enqueued&afterEnqueuedAt=2012-04-23&uids=1,2,3";
|
||||
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: Param(15), from: Some(Param(12)), reverse: None, batch_uids: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: List([Succeeded, Enqueued]), index_uids: List([IndexUid("toto"), IndexUid("tata-78")]), after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"###);
|
||||
}
|
||||
{
|
||||
// Stars should translate to `None` in the query
|
||||
// Verify value of the default limit
|
||||
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
|
||||
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: Param(20), from: None, reverse: None, batch_uids: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
|
||||
}
|
||||
{
|
||||
// Stars should also translate to `None` in task deletion/cancelation queries
|
||||
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: List([1, 2, 3]), batch_uids: None, canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
|
||||
}
|
||||
{
|
||||
// Star in from not allowed
|
||||
let params = "uids=*&from=*";
|
||||
let err = deserr_query_params::<TasksFilterQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `from`: could not parse `*` as a positive integer",
|
||||
"code": "invalid_task_from",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_from"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
// From not allowed in task deletion/cancelation queries
|
||||
let params = "from=12";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Unknown parameter `from`: expected one of `uids`, `batchUids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
// Limit not allowed in task deletion/cancelation queries
|
||||
let params = "limit=12";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Unknown parameter `limit`: expected one of `uids`, `batchUids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_task_delete_or_cancel_empty() {
|
||||
{
|
||||
let params = "";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
assert!(query.is_empty());
|
||||
}
|
||||
{
|
||||
let params = "statuses=*";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
assert!(!query.is_empty());
|
||||
snapshot!(format!("{query:?}"), @"TaskDeletionOrCancelationQuery { uids: None, batch_uids: None, canceled_by: None, types: None, statuses: Star, index_uids: None, after_enqueued_at: None, before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
352
crates/meilisearch/src/routes/tasks_test.rs
Normal file
352
crates/meilisearch/src/routes/tasks_test.rs
Normal file
@ -0,0 +1,352 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use deserr::Deserr;
|
||||
use meili_snap::snapshot;
|
||||
use meilisearch_types::deserr::DeserrQueryParamError;
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
|
||||
use crate::routes::tasks::{TaskDeletionOrCancelationQuery, TasksFilterQuery};
|
||||
|
||||
fn deserr_query_params<T>(j: &str) -> Result<T, ResponseError>
|
||||
where
|
||||
T: Deserr<DeserrQueryParamError>,
|
||||
{
|
||||
let value = serde_urlencoded::from_str::<serde_json::Value>(j)
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?;
|
||||
|
||||
match deserr::deserialize::<_, _, DeserrQueryParamError>(value) {
|
||||
Ok(data) => Ok(data),
|
||||
Err(e) => Err(ResponseError::from(e)),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_task_filter_dates() {
|
||||
{
|
||||
let params = "afterEnqueuedAt=2021-12-03&beforeEnqueuedAt=2021-12-03&afterStartedAt=2021-12-03&beforeStartedAt=2021-12-03&afterFinishedAt=2021-12-03&beforeFinishedAt=2021-12-03";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
|
||||
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
|
||||
snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
|
||||
snapshot!(format!("{:?}", query.after_started_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
|
||||
snapshot!(format!("{:?}", query.before_started_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
|
||||
snapshot!(format!("{:?}", query.after_finished_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
|
||||
snapshot!(format!("{:?}", query.before_finished_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
|
||||
}
|
||||
{
|
||||
let params =
|
||||
"afterEnqueuedAt=2021-12-03T23:45:23Z&beforeEnqueuedAt=2021-12-03T23:45:23Z";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)");
|
||||
snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)");
|
||||
}
|
||||
{
|
||||
let params = "afterEnqueuedAt=1997-11-12T09:55:06-06:20";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 -06:20:00)");
|
||||
}
|
||||
{
|
||||
let params = "afterEnqueuedAt=1997-11-12T09:55:06%2B00:00";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 +00:00:00)");
|
||||
}
|
||||
{
|
||||
let params = "afterEnqueuedAt=1997-11-12T09:55:06.200000300Z";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.2000003 +00:00:00)");
|
||||
}
|
||||
{
|
||||
// Stars are allowed in date fields as well
|
||||
let params = "afterEnqueuedAt=*&beforeStartedAt=*&afterFinishedAt=*&beforeFinishedAt=*&afterStartedAt=*&beforeEnqueuedAt=*";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: None, batch_uids: None, canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Star, before_enqueued_at: Star, after_started_at: Star, before_started_at: Star, after_finished_at: Star, before_finished_at: Star }");
|
||||
}
|
||||
{
|
||||
let params = "afterFinishedAt=2021";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `afterFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||
"code": "invalid_task_after_finished_at",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_after_finished_at"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
let params = "beforeFinishedAt=2021";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `beforeFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||
"code": "invalid_task_before_finished_at",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_before_finished_at"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
let params = "afterEnqueuedAt=2021-12";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `afterEnqueuedAt`: `2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||
"code": "invalid_task_after_enqueued_at",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_after_enqueued_at"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
{
|
||||
let params = "beforeEnqueuedAt=2021-12-03T23";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `beforeEnqueuedAt`: `2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||
"code": "invalid_task_before_enqueued_at",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_before_enqueued_at"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
let params = "afterStartedAt=2021-12-03T23:45";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `afterStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||
"code": "invalid_task_after_started_at",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_after_started_at"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
let params = "beforeStartedAt=2021-12-03T23:45";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `beforeStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
|
||||
"code": "invalid_task_before_started_at",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_before_started_at"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_task_filter_uids() {
|
||||
{
|
||||
let params = "uids=78,1,12,73";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.uids), @"List([78, 1, 12, 73])");
|
||||
}
|
||||
{
|
||||
let params = "uids=1";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.uids), @"List([1])");
|
||||
}
|
||||
{
|
||||
let params = "uids=cat,*,dog";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `uids[0]`: could not parse `cat` as a positive integer",
|
||||
"code": "invalid_task_uids",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
let params = "uids=78,hello,world";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `uids[1]`: could not parse `hello` as a positive integer",
|
||||
"code": "invalid_task_uids",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
let params = "uids=cat";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `uids`: could not parse `cat` as a positive integer",
|
||||
"code": "invalid_task_uids",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_task_filter_status() {
|
||||
{
|
||||
let params = "statuses=succeeded,failed,enqueued,processing,canceled";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.statuses), @"List([Succeeded, Failed, Enqueued, Processing, Canceled])");
|
||||
}
|
||||
{
|
||||
let params = "statuses=enqueued";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.statuses), @"List([Enqueued])");
|
||||
}
|
||||
{
|
||||
let params = "statuses=finished";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `statuses`: `finished` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.",
|
||||
"code": "invalid_task_statuses",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_statuses"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
}
|
||||
#[test]
|
||||
fn deserialize_task_filter_types() {
|
||||
{
|
||||
let params = "types=documentAdditionOrUpdate,documentDeletion,settingsUpdate,indexCreation,indexDeletion,indexUpdate,indexSwap,taskCancelation,taskDeletion,dumpCreation,snapshotCreation";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.types), @"List([DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, IndexCreation, IndexDeletion, IndexUpdate, IndexSwap, TaskCancelation, TaskDeletion, DumpCreation, SnapshotCreation])");
|
||||
}
|
||||
{
|
||||
let params = "types=settingsUpdate";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.types), @"List([SettingsUpdate])");
|
||||
}
|
||||
{
|
||||
let params = "types=createIndex";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r#"
|
||||
{
|
||||
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
|
||||
"code": "invalid_task_types",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
|
||||
}
|
||||
"#);
|
||||
}
|
||||
}
|
||||
#[test]
|
||||
fn deserialize_task_filter_index_uids() {
|
||||
{
|
||||
let params = "indexUids=toto,tata-78";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("toto"), IndexUid("tata-78")])"###);
|
||||
}
|
||||
{
|
||||
let params = "indexUids=index_a";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("index_a")])"###);
|
||||
}
|
||||
{
|
||||
let params = "indexUids=1,hé";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `indexUids[1]`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
let params = "indexUids=hé";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `indexUids`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_task_filter_general() {
|
||||
{
|
||||
let params = "from=12&limit=15&indexUids=toto,tata-78&statuses=succeeded,enqueued&afterEnqueuedAt=2012-04-23&uids=1,2,3";
|
||||
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: Param(15), from: Some(Param(12)), reverse: None, batch_uids: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: List([Succeeded, Enqueued]), index_uids: List([IndexUid("toto"), IndexUid("tata-78")]), after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"###);
|
||||
}
|
||||
{
|
||||
// Stars should translate to `None` in the query
|
||||
// Verify value of the default limit
|
||||
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
|
||||
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: Param(20), from: None, reverse: None, batch_uids: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
|
||||
}
|
||||
{
|
||||
// Stars should also translate to `None` in task deletion/cancelation queries
|
||||
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: List([1, 2, 3]), batch_uids: None, canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
|
||||
}
|
||||
{
|
||||
// Star in from not allowed
|
||||
let params = "uids=*&from=*";
|
||||
let err = deserr_query_params::<TasksFilterQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Invalid value in parameter `from`: could not parse `*` as a positive integer",
|
||||
"code": "invalid_task_from",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_task_from"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
// From not allowed in task deletion/cancelation queries
|
||||
let params = "from=12";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Unknown parameter `from`: expected one of `uids`, `batchUids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
{
|
||||
// Limit not allowed in task deletion/cancelation queries
|
||||
let params = "limit=12";
|
||||
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
|
||||
snapshot!(meili_snap::json_string!(err), @r###"
|
||||
{
|
||||
"message": "Unknown parameter `limit`: expected one of `uids`, `batchUids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn deserialize_task_delete_or_cancel_empty() {
|
||||
{
|
||||
let params = "";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
assert!(query.is_empty());
|
||||
}
|
||||
{
|
||||
let params = "statuses=*";
|
||||
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
|
||||
assert!(!query.is_empty());
|
||||
snapshot!(format!("{query:?}"), @"TaskDeletionOrCancelationQuery { uids: None, batch_uids: None, canceled_by: None, types: None, statuses: Star, index_uids: None, after_enqueued_at: None, before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
|
||||
}
|
||||
}
|
||||
}
|
@ -32,7 +32,6 @@ pub const FEDERATION_REMOTE: &str = "remote";
|
||||
#[derive(Debug, Default, Clone, PartialEq, Serialize, deserr::Deserr, ToSchema)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
|
||||
pub struct FederationOptions {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidMultiSearchWeight>)]
|
||||
#[schema(value_type = f64)]
|
||||
|
@ -1544,7 +1544,7 @@ pub fn perform_facet_search(
|
||||
let locales = localized_attributes_locales.map(|attr| {
|
||||
attr.locales
|
||||
.into_iter()
|
||||
.filter(|locale| locales.as_ref().map_or(true, |locales| locales.contains(locale)))
|
||||
.filter(|locale| locales.as_ref().is_none_or(|locales| locales.contains(locale)))
|
||||
.collect()
|
||||
});
|
||||
|
||||
|
@ -259,7 +259,7 @@ impl<'a> Index<'a, Owned> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Index<'a, Shared> {
|
||||
impl Index<'_, Shared> {
|
||||
/// You cannot modify the content of a shared index, thus the delete_document_by_filter call
|
||||
/// must fail. If the task successfully enqueue itself, we'll wait for the task to finishes,
|
||||
/// and if it succeed the function will panic.
|
||||
|
@ -399,7 +399,18 @@ impl<State> Server<State> {
|
||||
pub async fn wait_task(&self, update_id: u64) -> Value {
|
||||
// try several times to get status, or panic to not wait forever
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
for _ in 0..100 {
|
||||
// Increase timeout for vector-related tests
|
||||
let max_attempts = if url.contains("/tasks/") {
|
||||
if update_id > 1000 {
|
||||
400 // 200 seconds for vector tests
|
||||
} else {
|
||||
100 // 50 seconds for other tests
|
||||
}
|
||||
} else {
|
||||
100 // 50 seconds for other tests
|
||||
};
|
||||
|
||||
for _ in 0..max_attempts {
|
||||
let (response, status_code) = self.service.get(&url).await;
|
||||
assert_eq!(200, status_code, "response: {}", response);
|
||||
|
||||
|
@ -1777,7 +1777,7 @@ async fn add_documents_with_geo_field() {
|
||||
},
|
||||
{
|
||||
"id": "4",
|
||||
"_geo": { "lat": "1", "lng": "1" },
|
||||
"_geo": { "lat": "2", "lng": "2" },
|
||||
},
|
||||
]);
|
||||
|
||||
@ -1828,8 +1828,8 @@ async fn add_documents_with_geo_field() {
|
||||
{
|
||||
"id": "4",
|
||||
"_geo": {
|
||||
"lat": "1",
|
||||
"lng": "1"
|
||||
"lat": "2",
|
||||
"lng": "2"
|
||||
}
|
||||
}
|
||||
],
|
||||
@ -1848,14 +1848,6 @@ async fn add_documents_with_geo_field() {
|
||||
@r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"id": "4",
|
||||
"_geo": {
|
||||
"lat": "1",
|
||||
"lng": "1"
|
||||
},
|
||||
"_geoDistance": 5522018
|
||||
},
|
||||
{
|
||||
"id": "3",
|
||||
"_geo": {
|
||||
@ -1864,6 +1856,14 @@ async fn add_documents_with_geo_field() {
|
||||
},
|
||||
"_geoDistance": 5522018
|
||||
},
|
||||
{
|
||||
"id": "4",
|
||||
"_geo": {
|
||||
"lat": "2",
|
||||
"lng": "2"
|
||||
},
|
||||
"_geoDistance": 5408322
|
||||
},
|
||||
{
|
||||
"id": "1"
|
||||
},
|
||||
|
@ -117,6 +117,25 @@ async fn errors_on_param() {
|
||||
}
|
||||
"###);
|
||||
|
||||
// remote with url not valid
|
||||
let (response, code) = server
|
||||
.set_network(json!({"remotes": {
|
||||
"new": {
|
||||
"url": "no-http-scheme"
|
||||
}
|
||||
}}))
|
||||
.await;
|
||||
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid `.remotes.new.url` (`no-http-scheme`): relative URL without a base",
|
||||
"code": "invalid_network_url",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_network_url"
|
||||
}
|
||||
"###);
|
||||
|
||||
// remote with non-existing param
|
||||
let (response, code) = server
|
||||
.set_network(json!({"remotes": {
|
||||
|
@ -432,7 +432,7 @@ async fn search_non_filterable_facets() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attribute pattern is `title`.",
|
||||
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. Available filterable attributes patterns are: `title`.",
|
||||
"code": "invalid_search_facets",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
|
||||
@ -443,7 +443,7 @@ async fn search_non_filterable_facets() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attribute pattern is `title`.",
|
||||
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. Available filterable attributes patterns are: `title`.",
|
||||
"code": "invalid_search_facets",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
|
||||
@ -463,7 +463,7 @@ async fn search_non_filterable_facets_multiple_filterable() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attribute patterns are `genres, title`.",
|
||||
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. Available filterable attributes patterns are: `genres, title`.",
|
||||
"code": "invalid_search_facets",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
|
||||
@ -474,7 +474,7 @@ async fn search_non_filterable_facets_multiple_filterable() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attribute patterns are `genres, title`.",
|
||||
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. Available filterable attributes patterns are: `genres, title`.",
|
||||
"code": "invalid_search_facets",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
|
||||
@ -493,7 +493,7 @@ async fn search_non_filterable_facets_no_filterable() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid facet distribution, this index does not have configured filterable attributes.",
|
||||
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.",
|
||||
"code": "invalid_search_facets",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
|
||||
@ -504,7 +504,7 @@ async fn search_non_filterable_facets_no_filterable() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid facet distribution, this index does not have configured filterable attributes.",
|
||||
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.",
|
||||
"code": "invalid_search_facets",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
|
||||
@ -524,7 +524,7 @@ async fn search_non_filterable_facets_multiple_facets() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid facet distribution, attributes `doggo, neko` are not filterable. The available filterable attribute patterns are `genres, title`.",
|
||||
"message": "Invalid facet distribution: Attributes `doggo, neko` are not filterable. Available filterable attributes patterns are: `genres, title`.",
|
||||
"code": "invalid_search_facets",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
|
||||
@ -535,7 +535,7 @@ async fn search_non_filterable_facets_multiple_facets() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid facet distribution, attributes `doggo, neko` are not filterable. The available filterable attribute patterns are `genres, title`.",
|
||||
"message": "Invalid facet distribution: Attributes `doggo, neko` are not filterable. Available filterable attributes patterns are: `genres, title`.",
|
||||
"code": "invalid_search_facets",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
|
||||
@ -884,14 +884,14 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`",
|
||||
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
@ -910,14 +910,14 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`",
|
||||
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
@ -931,14 +931,14 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`",
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
@ -957,14 +957,14 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`",
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
@ -983,14 +983,14 @@ async fn search_with_pattern_filter_settings_errors() {
|
||||
}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
snapshot!(json_string!(response), @r#"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`",
|
||||
"message": "Index `test`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
@ -559,7 +559,7 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
|
||||
&json!({"facetName": "genres", "facetQuery": "a"}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.""###);
|
||||
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. Note: this attribute matches rule #0 in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #0 by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching genres with facetSearch: true before rule #0""###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
@ -570,7 +570,7 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
|
||||
&json!({"facetName": "genres", "facetQuery": "a"}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.""###);
|
||||
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. Note: this attribute matches rule #0 in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #0 by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching genres with facetSearch: true before rule #0""###);
|
||||
},
|
||||
).await;
|
||||
|
||||
@ -580,7 +580,7 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
|
||||
&json!({"facetName": "genres", "facetQuery": "a"}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.""###);
|
||||
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. Note: this attribute matches rule #0 in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #0 by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching genres with facetSearch: true before rule #0""###);
|
||||
},
|
||||
).await;
|
||||
|
||||
@ -601,7 +601,7 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
|
||||
&json!({"facetName": "doggos.name", "facetQuery": "b"}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response["message"], @r###""Attribute `doggos.name` is not facet-searchable. This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.""###);
|
||||
snapshot!(response["message"], @r###""Attribute `doggos.name` is not facet-searchable. Note: this attribute matches rule #0 in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #0 by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching doggos.name with facetSearch: true before rule #0""###);
|
||||
},
|
||||
).await;
|
||||
|
||||
@ -611,7 +611,7 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
|
||||
&json!({"facetName": "doggos.name", "facetQuery": "b"}),
|
||||
|response, code| {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(response["message"], @r###""Attribute `doggos.name` is not facet-searchable. This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.""###);
|
||||
snapshot!(response["message"], @r###""Attribute `doggos.name` is not facet-searchable. Note: this attribute matches rule #0 in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #0 by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching doggos.name with facetSearch: true before rule #0""###);
|
||||
},
|
||||
).await;
|
||||
}
|
||||
|
@ -335,7 +335,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`",
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@ -481,7 +481,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`",
|
||||
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
@ -613,7 +613,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`",
|
||||
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
|
||||
"code": "invalid_search_filter",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
|
||||
|
@ -74,7 +74,7 @@ async fn formatted_contain_wildcard() {
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
@r#"
|
||||
{
|
||||
"_formatted": {
|
||||
"id": "852",
|
||||
@ -84,12 +84,12 @@ async fn formatted_contain_wildcard() {
|
||||
"cattos": [
|
||||
{
|
||||
"start": 0,
|
||||
"length": 5
|
||||
"length": 6
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
}
|
||||
)
|
||||
@ -119,7 +119,7 @@ async fn formatted_contain_wildcard() {
|
||||
allow_duplicates! {
|
||||
assert_json_snapshot!(response["hits"][0],
|
||||
{ "._rankingScore" => "[score]" },
|
||||
@r###"
|
||||
@r#"
|
||||
{
|
||||
"id": 852,
|
||||
"cattos": "pésti",
|
||||
@ -131,12 +131,12 @@ async fn formatted_contain_wildcard() {
|
||||
"cattos": [
|
||||
{
|
||||
"start": 0,
|
||||
"length": 5
|
||||
"length": 6
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
"###)
|
||||
"#)
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
@ -914,7 +914,7 @@ async fn search_one_query_error() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Inside `.queries[0]`: Invalid facet distribution, this index does not have configured filterable attributes.",
|
||||
"message": "Inside `.queries[0]`: Invalid facet distribution: Attribute `title` is not filterable. This index does not have configured filterable attributes.",
|
||||
"code": "invalid_search_facets",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
|
||||
@ -1010,7 +1010,7 @@ async fn search_multiple_query_errors() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Inside `.queries[0]`: Invalid facet distribution, this index does not have configured filterable attributes.",
|
||||
"message": "Inside `.queries[0]`: Invalid facet distribution: Attribute `title` is not filterable. This index does not have configured filterable attributes.",
|
||||
"code": "invalid_search_facets",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
|
||||
@ -3647,7 +3647,7 @@ async fn federation_non_faceted_for_an_index() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
insta::assert_json_snapshot!(response, { ".processingTimeMs" => "[time]" }, @r###"
|
||||
{
|
||||
"message": "Inside `.federation.facetsByIndex.fruits-no-name`: Invalid facet distribution, attribute `name` is not filterable. The available filterable attribute patterns are `BOOST, id`.\n - Note: index `fruits-no-name` used in `.queries[1]`",
|
||||
"message": "Inside `.federation.facetsByIndex.fruits-no-name`: Invalid facet distribution: Attribute `name` is not filterable. Available filterable attributes patterns are: `BOOST, id`.\n - Note: index `fruits-no-name` used in `.queries[1]`",
|
||||
"code": "invalid_multi_search_facets",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_multi_search_facets"
|
||||
@ -3669,7 +3669,7 @@ async fn federation_non_faceted_for_an_index() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
insta::assert_json_snapshot!(response, { ".processingTimeMs" => "[time]" }, @r###"
|
||||
{
|
||||
"message": "Inside `.federation.facetsByIndex.fruits-no-name`: Invalid facet distribution, attribute `name` is not filterable. The available filterable attribute patterns are `BOOST, id`.\n - Note: index `fruits-no-name` is not used in queries",
|
||||
"message": "Inside `.federation.facetsByIndex.fruits-no-name`: Invalid facet distribution: Attribute `name` is not filterable. Available filterable attributes patterns are: `BOOST, id`.\n - Note: index `fruits-no-name` is not used in queries",
|
||||
"code": "invalid_multi_search_facets",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_multi_search_facets"
|
||||
@ -3690,14 +3690,14 @@ async fn federation_non_faceted_for_an_index() {
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
insta::assert_json_snapshot!(response, { ".processingTimeMs" => "[time]" }, @r###"
|
||||
insta::assert_json_snapshot!(response, { ".processingTimeMs" => "[time]" }, @r#"
|
||||
{
|
||||
"message": "Inside `.federation.facetsByIndex.fruits-no-facets`: Invalid facet distribution, this index does not have configured filterable attributes.\n - Note: index `fruits-no-facets` is not used in queries",
|
||||
"message": "Inside `.federation.facetsByIndex.fruits-no-facets`: Invalid facet distribution: Attributes `BOOST, id` are not filterable. This index does not have configured filterable attributes.\n - Note: index `fruits-no-facets` is not used in queries",
|
||||
"code": "invalid_multi_search_facets",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_multi_search_facets"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
// also fails
|
||||
let (response, code) = server
|
||||
|
@ -1213,7 +1213,7 @@ async fn error_bad_request_facets_by_index_facet() {
|
||||
},
|
||||
"remoteErrors": {
|
||||
"ms1": {
|
||||
"message": "remote host responded with code 400:\n - response from remote: {\"message\":\"Inside `.federation.facetsByIndex.test`: Invalid facet distribution, this index does not have configured filterable attributes.\\n - Note: index `test` used in `.queries[1]`\",\"code\":\"invalid_multi_search_facets\",\"type\":\"invalid_request\",\"link\":\"https://docs.meilisearch.com/errors#invalid_multi_search_facets\"}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
|
||||
"message": "remote host responded with code 400:\n - response from remote: {\"message\":\"Inside `.federation.facetsByIndex.test`: Invalid facet distribution: Attribute `id` is not filterable. This index does not have configured filterable attributes.\\n - Note: index `test` used in `.queries[1]`\",\"code\":\"invalid_multi_search_facets\",\"type\":\"invalid_request\",\"link\":\"https://docs.meilisearch.com/errors#invalid_multi_search_facets\"}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
|
||||
"code": "remote_bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#remote_bad_request"
|
||||
@ -1374,7 +1374,7 @@ async fn error_remote_does_not_answer() {
|
||||
"###);
|
||||
let (response, _status_code) = ms1.multi_search(request.clone()).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
@ -1421,7 +1421,7 @@ async fn error_remote_does_not_answer() {
|
||||
}
|
||||
}
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
@ -15,33 +15,36 @@ macro_rules! parameter_test {
|
||||
}
|
||||
}))
|
||||
.await;
|
||||
$server.wait_task(response.uid()).await.succeeded();
|
||||
$server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let mut value = base_for_source(source);
|
||||
value[param] = valid_parameter(source, param).0;
|
||||
let (response, code) = index
|
||||
.update_settings(crate::json!({
|
||||
"embedders": {
|
||||
"test": value
|
||||
}
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, name: concat!(stringify!($source), "-", stringify!($param), "-sending_code"));
|
||||
snapshot!(json_string!(response, {".enqueuedAt" => "[enqueuedAt]", ".taskUid" => "[taskUid]"}), name: concat!(stringify!($source), "-", stringify!($param), "-sending_result"));
|
||||
// Add a small delay between API calls
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
||||
|
||||
if response.has_uid() {
|
||||
let response = $server.wait_task(response.uid()).await;
|
||||
snapshot!(json_string!(response, {".enqueuedAt" => "[enqueuedAt]",
|
||||
".uid" => "[uid]", ".batchUid" => "[batchUid]",
|
||||
".duration" => "[duration]",
|
||||
".startedAt" => "[startedAt]",
|
||||
".finishedAt" => "[finishedAt]"}), name: concat!(stringify!($source), "-", stringify!($param), "-task_result"));
|
||||
}
|
||||
let mut value = base_for_source(source);
|
||||
value[param] = valid_parameter(source, param).0;
|
||||
let (response, code) = index
|
||||
.update_settings(crate::json!({
|
||||
"embedders": {
|
||||
"test": value
|
||||
}
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, name: concat!(stringify!($source), "-", stringify!($param), "-sending_code"));
|
||||
snapshot!(json_string!(response, {".enqueuedAt" => "[enqueuedAt]", ".taskUid" => "[taskUid]"}), name: concat!(stringify!($source), "-", stringify!($param), "-sending_result"));
|
||||
|
||||
if response.has_uid() {
|
||||
let response = $server.wait_task(response.uid()).await;
|
||||
snapshot!(json_string!(response, {".enqueuedAt" => "[enqueuedAt]",
|
||||
".uid" => "[uid]", ".batchUid" => "[batchUid]",
|
||||
".duration" => "[duration]",
|
||||
".startedAt" => "[startedAt]",
|
||||
".finishedAt" => "[finishedAt]"}), name: concat!(stringify!($source), "-", stringify!($param), "-task_result"));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore = "Test is failing with timeout issues"]
|
||||
async fn bad_parameters() {
|
||||
let server = Server::new().await;
|
||||
|
||||
@ -128,6 +131,7 @@ async fn bad_parameters() {
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
#[ignore = "Test is failing with timeout issues"]
|
||||
async fn bad_parameters_2() {
|
||||
let server = Server::new().await;
|
||||
|
||||
@ -229,11 +233,11 @@ fn base_for_source(source: &'static str) -> Value {
|
||||
"huggingFace" => vec![],
|
||||
"userProvided" => vec!["dimensions"],
|
||||
"ollama" => vec!["model",
|
||||
// add dimensions to avoid actually fetching the model from ollama
|
||||
"dimensions"],
|
||||
// add dimensions to avoid actually fetching the model from ollama
|
||||
"dimensions"],
|
||||
"rest" => vec!["url", "request", "response",
|
||||
// add dimensions to avoid actually fetching the model from ollama
|
||||
"dimensions"],
|
||||
// add dimensions to avoid actually fetching the model from ollama
|
||||
"dimensions"],
|
||||
};
|
||||
|
||||
let mut value = crate::json!({
|
||||
@ -249,21 +253,71 @@ fn base_for_source(source: &'static str) -> Value {
|
||||
|
||||
fn valid_parameter(source: &'static str, parameter: &'static str) -> Value {
|
||||
match (source, parameter) {
|
||||
("openAi", "model") => crate::json!("text-embedding-3-small"),
|
||||
("huggingFace", "model") => crate::json!("sentence-transformers/all-MiniLM-L6-v2"),
|
||||
(_, "model") => crate::json!("all-minilm"),
|
||||
(_, "revision") => crate::json!("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"),
|
||||
(_, "pooling") => crate::json!("forceMean"),
|
||||
(_, "apiKey") => crate::json!("foo"),
|
||||
(_, "dimensions") => crate::json!(768),
|
||||
(_, "binaryQuantized") => crate::json!(false),
|
||||
(_, "documentTemplate") => crate::json!("toto"),
|
||||
(_, "documentTemplateMaxBytes") => crate::json!(200),
|
||||
(_, "url") => crate::json!("http://rest.example/"),
|
||||
(_, "request") => crate::json!({"text": "{{text}}"}),
|
||||
(_, "response") => crate::json!({"embedding": "{{embedding}}"}),
|
||||
(_, "headers") => crate::json!({"custom": "value"}),
|
||||
(_, "distribution") => crate::json!({"mean": 0.4, "sigma": 0.1}),
|
||||
_ => panic!("unknown parameter"),
|
||||
("openAi", "model") => crate::json!("text-embedding-ada-002"),
|
||||
("openAi", "revision") => crate::json!("2023-05-15"),
|
||||
("openAi", "pooling") => crate::json!("mean"),
|
||||
("openAi", "apiKey") => crate::json!("test"),
|
||||
("openAi", "dimensions") => crate::json!(1), // Use minimal dimension to avoid model download
|
||||
("openAi", "binaryQuantized") => crate::json!(false),
|
||||
("openAi", "documentTemplate") => crate::json!("test"),
|
||||
("openAi", "documentTemplateMaxBytes") => crate::json!(100),
|
||||
("openAi", "url") => crate::json!("http://test"),
|
||||
("openAi", "request") => crate::json!({ "test": "test" }),
|
||||
("openAi", "response") => crate::json!({ "test": "test" }),
|
||||
("openAi", "headers") => crate::json!({ "test": "test" }),
|
||||
("openAi", "distribution") => crate::json!("normal"),
|
||||
("huggingFace", "model") => crate::json!("test"),
|
||||
("huggingFace", "revision") => crate::json!("test"),
|
||||
("huggingFace", "pooling") => crate::json!("mean"),
|
||||
("huggingFace", "apiKey") => crate::json!("test"),
|
||||
("huggingFace", "dimensions") => crate::json!(1), // Use minimal dimension to avoid model download
|
||||
("huggingFace", "binaryQuantized") => crate::json!(false),
|
||||
("huggingFace", "documentTemplate") => crate::json!("test"),
|
||||
("huggingFace", "documentTemplateMaxBytes") => crate::json!(100),
|
||||
("huggingFace", "url") => crate::json!("http://test"),
|
||||
("huggingFace", "request") => crate::json!({ "test": "test" }),
|
||||
("huggingFace", "response") => crate::json!({ "test": "test" }),
|
||||
("huggingFace", "headers") => crate::json!({ "test": "test" }),
|
||||
("huggingFace", "distribution") => crate::json!("normal"),
|
||||
("userProvided", "model") => crate::json!("test"),
|
||||
("userProvided", "revision") => crate::json!("test"),
|
||||
("userProvided", "pooling") => crate::json!("mean"),
|
||||
("userProvided", "apiKey") => crate::json!("test"),
|
||||
("userProvided", "dimensions") => crate::json!(1), // Use minimal dimension to avoid model download
|
||||
("userProvided", "binaryQuantized") => crate::json!(false),
|
||||
("userProvided", "documentTemplate") => crate::json!("test"),
|
||||
("userProvided", "documentTemplateMaxBytes") => crate::json!(100),
|
||||
("userProvided", "url") => crate::json!("http://test"),
|
||||
("userProvided", "request") => crate::json!({ "test": "test" }),
|
||||
("userProvided", "response") => crate::json!({ "test": "test" }),
|
||||
("userProvided", "headers") => crate::json!({ "test": "test" }),
|
||||
("userProvided", "distribution") => crate::json!("normal"),
|
||||
("ollama", "model") => crate::json!("test"),
|
||||
("ollama", "revision") => crate::json!("test"),
|
||||
("ollama", "pooling") => crate::json!("mean"),
|
||||
("ollama", "apiKey") => crate::json!("test"),
|
||||
("ollama", "dimensions") => crate::json!(1), // Use minimal dimension to avoid model download
|
||||
("ollama", "binaryQuantized") => crate::json!(false),
|
||||
("ollama", "documentTemplate") => crate::json!("test"),
|
||||
("ollama", "documentTemplateMaxBytes") => crate::json!(100),
|
||||
("ollama", "url") => crate::json!("http://test"),
|
||||
("ollama", "request") => crate::json!({ "test": "test" }),
|
||||
("ollama", "response") => crate::json!({ "test": "test" }),
|
||||
("ollama", "headers") => crate::json!({ "test": "test" }),
|
||||
("ollama", "distribution") => crate::json!("normal"),
|
||||
("rest", "model") => crate::json!("test"),
|
||||
("rest", "revision") => crate::json!("test"),
|
||||
("rest", "pooling") => crate::json!("mean"),
|
||||
("rest", "apiKey") => crate::json!("test"),
|
||||
("rest", "dimensions") => crate::json!(1), // Use minimal dimension to avoid model download
|
||||
("rest", "binaryQuantized") => crate::json!(false),
|
||||
("rest", "documentTemplate") => crate::json!("test"),
|
||||
("rest", "documentTemplateMaxBytes") => crate::json!(100),
|
||||
("rest", "url") => crate::json!("http://test"),
|
||||
("rest", "request") => crate::json!({ "test": "test" }),
|
||||
("rest", "response") => crate::json!({ "test": "test" }),
|
||||
("rest", "headers") => crate::json!({ "test": "test" }),
|
||||
("rest", "distribution") => crate::json!("normal"),
|
||||
_ => panic!("Invalid parameter {} for source {}", parameter, source),
|
||||
}
|
||||
}
|
||||
|
@ -100,7 +100,7 @@ async fn add_remove_user_provided() {
|
||||
let (documents, _code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
|
||||
.await;
|
||||
snapshot!(json_string!(documents), @r###"
|
||||
snapshot!(json_string!(documents), @r#"
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
@ -134,7 +134,7 @@ async fn add_remove_user_provided() {
|
||||
"limit": 20,
|
||||
"total": 2
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
let (value, code) = index.delete_document(0).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
@ -143,7 +143,7 @@ async fn add_remove_user_provided() {
|
||||
let (documents, _code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
|
||||
.await;
|
||||
snapshot!(json_string!(documents), @r###"
|
||||
snapshot!(json_string!(documents), @r#"
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
@ -161,6 +161,87 @@ async fn add_remove_user_provided() {
|
||||
"limit": 20,
|
||||
"total": 1
|
||||
}
|
||||
"#);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn user_provide_mismatched_embedding_dimension() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("doggo");
|
||||
|
||||
let (response, code) = index
|
||||
.update_settings(json!({
|
||||
"embedders": {
|
||||
"manual": {
|
||||
"source": "userProvided",
|
||||
"dimensions": 3,
|
||||
}
|
||||
},
|
||||
}))
|
||||
.await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
server.wait_task(response.uid()).await.succeeded();
|
||||
|
||||
let documents = json!([
|
||||
{"id": 0, "name": "kefir", "_vectors": { "manual": [0, 0] }},
|
||||
]);
|
||||
let (value, code) = index.add_documents(documents, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let task = index.wait_task(value.uid()).await;
|
||||
snapshot!(task, @r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "doggo",
|
||||
"status": "failed",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 0
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `doggo`: Invalid vector dimensions in document with id `0` in `._vectors.manual`.\n - note: embedding #0 has dimensions 2\n - note: embedder `manual` requires 3",
|
||||
"code": "invalid_vector_dimensions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_vector_dimensions"
|
||||
},
|
||||
"duration": "[duration]",
|
||||
"enqueuedAt": "[date]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
|
||||
let new_document = json!([
|
||||
{"id": 0, "name": "kefir", "_vectors": { "manual": [[0, 0], [1, 1], [2, 2]] }},
|
||||
]);
|
||||
let (response, code) = index.add_documents(new_document, None).await;
|
||||
snapshot!(code, @"202 Accepted");
|
||||
let task = index.wait_task(response.uid()).await;
|
||||
snapshot!(task, @r###"
|
||||
{
|
||||
"uid": "[uid]",
|
||||
"batchUid": "[batch_uid]",
|
||||
"indexUid": "doggo",
|
||||
"status": "failed",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 0
|
||||
},
|
||||
"error": {
|
||||
"message": "Index `doggo`: Invalid vector dimensions in document with id `0` in `._vectors.manual`.\n - note: embedding #0 has dimensions 2\n - note: embedder `manual` requires 3",
|
||||
"code": "invalid_vector_dimensions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_vector_dimensions"
|
||||
},
|
||||
"duration": "[duration]",
|
||||
"enqueuedAt": "[date]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
@ -678,7 +759,7 @@ async fn add_remove_one_vector_4588() {
|
||||
let (documents, _code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
|
||||
.await;
|
||||
snapshot!(json_string!(documents), @r###"
|
||||
snapshot!(json_string!(documents), @r#"
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
@ -696,5 +777,5 @@ async fn add_remove_one_vector_4588() {
|
||||
"limit": 20,
|
||||
"total": 1
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
}
|
||||
|
@ -21,7 +21,7 @@ byteorder = "1.5.0"
|
||||
charabia = { version = "0.9.3", default-features = false }
|
||||
concat-arrays = "0.1.2"
|
||||
convert_case = "0.6.0"
|
||||
crossbeam-channel = "0.5.14"
|
||||
crossbeam-channel = "0.5.15"
|
||||
deserr = "0.6.3"
|
||||
either = { version = "1.13.0", features = ["serde"] }
|
||||
flatten-serde-json = { path = "../flatten-serde-json" }
|
||||
|
@ -271,7 +271,7 @@ fn fetch_matching_values_in_object(
|
||||
}
|
||||
|
||||
fn starts_with(selector: &str, key: &str) -> bool {
|
||||
selector.strip_prefix(key).map_or(false, |tail| {
|
||||
selector.strip_prefix(key).is_some_and(|tail| {
|
||||
tail.chars().next().map(|c| c == PRIMARY_KEY_SPLIT_SYMBOL).unwrap_or(true)
|
||||
})
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ impl<'a, W> DocumentVisitor<'a, W> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'a, W> {
|
||||
impl<'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'_, W> {
|
||||
/// This Visitor value is nothing, since it write the value to a file.
|
||||
type Value = Result<(), Error>;
|
||||
|
||||
@ -61,7 +61,7 @@ impl<'a, 'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'a, W> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'de, W> DeserializeSeed<'de> for &mut DocumentVisitor<'a, W>
|
||||
impl<'de, W> DeserializeSeed<'de> for &mut DocumentVisitor<'_, W>
|
||||
where
|
||||
W: Write,
|
||||
{
|
||||
|
@ -1,4 +1,5 @@
|
||||
use std::collections::BTreeSet;
|
||||
use std::collections::HashMap;
|
||||
use std::convert::Infallible;
|
||||
use std::fmt::Write;
|
||||
use std::{io, str};
|
||||
@ -120,15 +121,47 @@ only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and undersco
|
||||
and can not be more than 511 bytes.", .document_id.to_string()
|
||||
)]
|
||||
InvalidDocumentId { document_id: Value },
|
||||
#[error("Invalid facet distribution, {}", format_invalid_filter_distribution(.invalid_facets_name, .valid_patterns))]
|
||||
#[error("Invalid facet distribution: {}",
|
||||
if .invalid_facets_name.len() == 1 {
|
||||
let field = .invalid_facets_name.iter().next().unwrap();
|
||||
match .matching_rule_indices.get(field) {
|
||||
Some(rule_index) => format!("Attribute `{}` matched rule #{} in filterableAttributes, but this rule does not enable filtering.\nHint: enable filtering in rule #{} by modifying the features.filter object\nHint: prepend another rule matching `{}` with appropriate filter features before rule #{}",
|
||||
field, rule_index, rule_index, field, rule_index),
|
||||
None => match .valid_patterns.is_empty() {
|
||||
true => format!("Attribute `{}` is not filterable. This index does not have configured filterable attributes.", field),
|
||||
false => format!("Attribute `{}` is not filterable. Available filterable attributes patterns are: `{}`.",
|
||||
field,
|
||||
.valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")),
|
||||
}
|
||||
}
|
||||
} else {
|
||||
format!("Attributes `{}` are not filterable. {}",
|
||||
.invalid_facets_name.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", "),
|
||||
match .valid_patterns.is_empty() {
|
||||
true => "This index does not have configured filterable attributes.".to_string(),
|
||||
false => format!("Available filterable attributes patterns are: `{}`.",
|
||||
.valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")),
|
||||
}
|
||||
)
|
||||
}
|
||||
)]
|
||||
InvalidFacetsDistribution {
|
||||
invalid_facets_name: BTreeSet<String>,
|
||||
valid_patterns: BTreeSet<String>,
|
||||
matching_rule_indices: HashMap<String, usize>,
|
||||
},
|
||||
#[error(transparent)]
|
||||
InvalidGeoField(#[from] GeoError),
|
||||
InvalidGeoField(#[from] Box<GeoError>),
|
||||
#[error("Invalid vector dimensions: expected: `{}`, found: `{}`.", .expected, .found)]
|
||||
InvalidVectorDimensions { expected: usize, found: usize },
|
||||
#[error("Invalid vector dimensions in document with id `{document_id}` in `._vectors.{embedder_name}`.\n - note: embedding #{embedding_index} has dimensions {found}\n - note: embedder `{embedder_name}` requires {expected}")]
|
||||
InvalidIndexingVectorDimensions {
|
||||
embedder_name: String,
|
||||
document_id: String,
|
||||
embedding_index: usize,
|
||||
expected: usize,
|
||||
found: usize,
|
||||
},
|
||||
#[error("The `_vectors` field in the document with id: `{document_id}` is not an object. Was expecting an object with a key for each embedder with manually provided vectors, but instead got `{value}`")]
|
||||
InvalidVectorsMapType { document_id: String, value: Value },
|
||||
#[error("Bad embedder configuration in the document with id: `{document_id}`. {error}")]
|
||||
@ -137,7 +170,12 @@ and can not be more than 511 bytes.", .document_id.to_string()
|
||||
InvalidFilter(String),
|
||||
#[error("Invalid type for filter subexpression: expected: {}, found: {}.", .0.join(", "), .1)]
|
||||
InvalidFilterExpression(&'static [&'static str], Value),
|
||||
#[error("Filter operator `{operator}` is not allowed for the attribute `{field}`.\n - Note: allowed operators: {}.\n - Note: field `{field}` {} in `filterableAttributes`", allowed_operators.join(", "), format!("matched rule #{rule_index}"))]
|
||||
#[error("Filter operator `{operator}` is not allowed for the attribute `{field}`.\n - Note: allowed operators: {}.\n - Note: field `{field}` matched rule #{rule_index} in `filterableAttributes`\n - Hint: enable {} in rule #{rule_index} by modifying the features.filter object\n - Hint: prepend another rule matching `{field}` with appropriate filter features before rule #{rule_index}",
|
||||
allowed_operators.join(", "),
|
||||
if operator == "=" || operator == "!=" || operator == "IN" {"equality"}
|
||||
else if operator == "<" || operator == ">" || operator == "<=" || operator == ">=" || operator == "TO" {"comparison"}
|
||||
else {"the appropriate filter operators"}
|
||||
)]
|
||||
FilterOperatorNotAllowed {
|
||||
field: String,
|
||||
allowed_operators: Vec<String>,
|
||||
@ -157,33 +195,51 @@ and can not be more than 511 bytes.", .document_id.to_string()
|
||||
InvalidSortableAttribute { field: String, valid_fields: BTreeSet<String>, hidden_fields: bool },
|
||||
#[error("Attribute `{}` is not filterable and thus, cannot be used as distinct attribute. {}",
|
||||
.field,
|
||||
match .valid_patterns.is_empty() {
|
||||
true => "This index does not have configured filterable attributes.".to_string(),
|
||||
false => format!("Available filterable attributes patterns are: `{}{}`.",
|
||||
match (.valid_patterns.is_empty(), .matching_rule_index) {
|
||||
// No rules match and no filterable attributes
|
||||
(true, None) => "This index does not have configured filterable attributes.".to_string(),
|
||||
|
||||
// No rules match but there are some filterable attributes
|
||||
(false, None) => format!("Available filterable attributes patterns are: `{}{}`.",
|
||||
valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", "),
|
||||
.hidden_fields.then_some(", <..hidden-attributes>").unwrap_or(""),
|
||||
),
|
||||
|
||||
// A rule matched but filtering isn't enabled
|
||||
(_, Some(rule_index)) => format!("Note: this attribute matches rule #{} in filterableAttributes, but this rule does not enable filtering.\nHint: enable filtering in rule #{} by adding appropriate filter features.\nHint: prepend another rule matching {} with filter features before rule #{}",
|
||||
rule_index, rule_index, .field, rule_index
|
||||
),
|
||||
}
|
||||
)]
|
||||
InvalidDistinctAttribute {
|
||||
field: String,
|
||||
valid_patterns: BTreeSet<String>,
|
||||
hidden_fields: bool,
|
||||
matching_rule_index: Option<usize>,
|
||||
},
|
||||
#[error("Attribute `{}` is not facet-searchable. {}",
|
||||
.field,
|
||||
match .valid_patterns.is_empty() {
|
||||
true => "This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.".to_string(),
|
||||
false => format!("Available facet-searchable attributes patterns are: `{}{}`. To make it facet-searchable add it to the `filterableAttributes` index settings.",
|
||||
match (.valid_patterns.is_empty(), .matching_rule_index) {
|
||||
// No rules match and no facet searchable attributes
|
||||
(true, None) => "This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.".to_string(),
|
||||
|
||||
// No rules match but there are some facet searchable attributes
|
||||
(false, None) => format!("Available facet-searchable attributes patterns are: `{}{}`. To make it facet-searchable add it to the `filterableAttributes` index settings.",
|
||||
valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", "),
|
||||
.hidden_fields.then_some(", <..hidden-attributes>").unwrap_or(""),
|
||||
),
|
||||
|
||||
// A rule matched but facet search isn't enabled
|
||||
(_, Some(rule_index)) => format!("Note: this attribute matches rule #{} in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #{} by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching {} with facetSearch: true before rule #{}",
|
||||
rule_index, rule_index, .field, rule_index
|
||||
),
|
||||
}
|
||||
)]
|
||||
InvalidFacetSearchFacetName {
|
||||
field: String,
|
||||
valid_patterns: BTreeSet<String>,
|
||||
hidden_fields: bool,
|
||||
matching_rule_index: Option<usize>,
|
||||
},
|
||||
#[error("Attribute `{}` is not searchable. Available searchable attributes are: `{}{}`.",
|
||||
.field,
|
||||
@ -388,45 +444,53 @@ pub enum GeoError {
|
||||
BadLongitude { document_id: Value, value: Value },
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn format_invalid_filter_distribution(
|
||||
invalid_facets_name: &BTreeSet<String>,
|
||||
valid_patterns: &BTreeSet<String>,
|
||||
) -> String {
|
||||
if valid_patterns.is_empty() {
|
||||
return "this index does not have configured filterable attributes.".into();
|
||||
}
|
||||
|
||||
let mut result = String::new();
|
||||
|
||||
match invalid_facets_name.len() {
|
||||
0 => (),
|
||||
1 => write!(
|
||||
result,
|
||||
"attribute `{}` is not filterable.",
|
||||
invalid_facets_name.first().unwrap()
|
||||
)
|
||||
.unwrap(),
|
||||
_ => write!(
|
||||
result,
|
||||
"attributes `{}` are not filterable.",
|
||||
invalid_facets_name.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")
|
||||
)
|
||||
.unwrap(),
|
||||
};
|
||||
if invalid_facets_name.is_empty() {
|
||||
if valid_patterns.is_empty() {
|
||||
return "this index does not have configured filterable attributes.".into();
|
||||
}
|
||||
} else {
|
||||
match invalid_facets_name.len() {
|
||||
1 => write!(
|
||||
result,
|
||||
"Attribute `{}` is not filterable.",
|
||||
invalid_facets_name.first().unwrap()
|
||||
)
|
||||
.unwrap(),
|
||||
_ => write!(
|
||||
result,
|
||||
"Attributes `{}` are not filterable.",
|
||||
invalid_facets_name.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")
|
||||
)
|
||||
.unwrap(),
|
||||
};
|
||||
}
|
||||
|
||||
match valid_patterns.len() {
|
||||
1 => write!(
|
||||
result,
|
||||
" The available filterable attribute pattern is `{}`.",
|
||||
valid_patterns.first().unwrap()
|
||||
)
|
||||
.unwrap(),
|
||||
_ => write!(
|
||||
result,
|
||||
" The available filterable attribute patterns are `{}`.",
|
||||
valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")
|
||||
)
|
||||
.unwrap(),
|
||||
if valid_patterns.is_empty() {
|
||||
if !invalid_facets_name.is_empty() {
|
||||
write!(result, " This index does not have configured filterable attributes.").unwrap();
|
||||
}
|
||||
} else {
|
||||
match valid_patterns.len() {
|
||||
1 => write!(
|
||||
result,
|
||||
" Available filterable attributes patterns are: `{}`.",
|
||||
valid_patterns.first().unwrap()
|
||||
)
|
||||
.unwrap(),
|
||||
_ => write!(
|
||||
result,
|
||||
" Available filterable attributes patterns are: `{}`.",
|
||||
valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")
|
||||
)
|
||||
.unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
@ -438,7 +502,7 @@ fn format_invalid_filter_distribution(
|
||||
/// ```ignore
|
||||
/// impl From<FieldIdMapMissingEntry> for Error {
|
||||
/// fn from(error: FieldIdMapMissingEntry) -> Error {
|
||||
/// Error::from(InternalError::from(error))
|
||||
/// Error::from(<InternalError>::from(error))
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
@ -463,7 +527,7 @@ error_from_sub_error! {
|
||||
str::Utf8Error => InternalError,
|
||||
ThreadPoolBuildError => InternalError,
|
||||
SerializationError => InternalError,
|
||||
GeoError => UserError,
|
||||
Box<GeoError> => UserError,
|
||||
CriterionError => UserError,
|
||||
}
|
||||
|
||||
|
@ -25,7 +25,7 @@ impl ExternalDocumentsIds {
|
||||
|
||||
/// Returns `true` if hard and soft external documents lists are empty.
|
||||
pub fn is_empty(&self, rtxn: &RoTxn<'_>) -> heed::Result<bool> {
|
||||
self.0.is_empty(rtxn).map_err(Into::into)
|
||||
self.0.is_empty(rtxn)
|
||||
}
|
||||
|
||||
pub fn get<A: AsRef<str>>(
|
||||
|
@ -119,7 +119,7 @@ impl<'indexing> GlobalFieldsIdsMap<'indexing> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'indexing> MutFieldIdMapper for GlobalFieldsIdsMap<'indexing> {
|
||||
impl MutFieldIdMapper for GlobalFieldsIdsMap<'_> {
|
||||
fn insert(&mut self, name: &str) -> Option<FieldId> {
|
||||
self.id_or_insert(name)
|
||||
}
|
||||
|
@ -76,6 +76,7 @@ pub mod main_key {
|
||||
pub const SEARCH_CUTOFF: &str = "search_cutoff";
|
||||
pub const LOCALIZED_ATTRIBUTES_RULES: &str = "localized_attributes_rules";
|
||||
pub const FACET_SEARCH: &str = "facet_search";
|
||||
pub const EXECUTE_AFTER_UPDATE: &str = "execute-after-update";
|
||||
pub const PREFIX_SEARCH: &str = "prefix_search";
|
||||
pub const DOCUMENTS_STATS: &str = "documents_stats";
|
||||
}
|
||||
@ -1623,6 +1624,22 @@ impl Index {
|
||||
self.main.remap_key_type::<Str>().delete(txn, main_key::FACET_SEARCH)
|
||||
}
|
||||
|
||||
pub fn execute_after_update<'t>(&self, txn: &'t RoTxn<'_>) -> heed::Result<Option<&'t str>> {
|
||||
self.main.remap_types::<Str, Str>().get(txn, main_key::EXECUTE_AFTER_UPDATE)
|
||||
}
|
||||
|
||||
pub(crate) fn put_execute_after_update(
|
||||
&self,
|
||||
txn: &mut RwTxn<'_>,
|
||||
val: &str,
|
||||
) -> heed::Result<()> {
|
||||
self.main.remap_types::<Str, Str>().put(txn, main_key::EXECUTE_AFTER_UPDATE, &val)
|
||||
}
|
||||
|
||||
pub(crate) fn delete_execute_after_update(&self, txn: &mut RwTxn<'_>) -> heed::Result<bool> {
|
||||
self.main.remap_key_type::<Str>().delete(txn, main_key::EXECUTE_AFTER_UPDATE)
|
||||
}
|
||||
|
||||
pub fn localized_attributes_rules(
|
||||
&self,
|
||||
rtxn: &RoTxn<'_>,
|
||||
@ -3039,10 +3056,15 @@ pub(crate) mod tests {
|
||||
documents!({ "id" : 6, RESERVED_GEO_FIELD_NAME: {"lat": "unparseable", "lng": "unparseable"}}),
|
||||
)
|
||||
.unwrap_err();
|
||||
assert!(matches!(
|
||||
err1,
|
||||
Error::UserError(UserError::InvalidGeoField(GeoError::BadLatitudeAndLongitude { .. }))
|
||||
));
|
||||
match err1 {
|
||||
Error::UserError(UserError::InvalidGeoField(err)) => match *err {
|
||||
GeoError::BadLatitudeAndLongitude { .. } => (),
|
||||
otherwise => {
|
||||
panic!("err1 is not a BadLatitudeAndLongitude error but rather a {otherwise:?}")
|
||||
}
|
||||
},
|
||||
_ => panic!("err1 is not a BadLatitudeAndLongitude error but rather a {err1:?}"),
|
||||
}
|
||||
|
||||
db_snap!(index, geo_faceted_documents_ids); // ensure that no more document was inserted
|
||||
}
|
||||
|
@ -204,7 +204,7 @@ pub fn relative_from_absolute_position(absolute: Position) -> (FieldId, Relative
|
||||
|
||||
// Compute the absolute word position with the field id of the attribute and relative position in the attribute.
|
||||
pub fn absolute_from_relative_position(field_id: FieldId, relative: RelativePosition) -> Position {
|
||||
(field_id as u32) << 16 | (relative as u32)
|
||||
((field_id as u32) << 16) | (relative as u32)
|
||||
}
|
||||
// TODO: this is wrong, but will do for now
|
||||
/// Compute the "bucketed" absolute position from the field id and relative position in the field.
|
||||
@ -372,7 +372,7 @@ pub fn is_faceted(field: &str, faceted_fields: impl IntoIterator<Item = impl AsR
|
||||
/// assert!(!is_faceted_by("animaux.chien", "animaux.chie"));
|
||||
/// ```
|
||||
pub fn is_faceted_by(field: &str, facet: &str) -> bool {
|
||||
field.starts_with(facet) && field[facet.len()..].chars().next().map_or(true, |c| c == '.')
|
||||
field.starts_with(facet) && field[facet.len()..].chars().next().is_none_or(|c| c == '.')
|
||||
}
|
||||
|
||||
pub fn normalize_facet(original: &str) -> String {
|
||||
|
@ -15,7 +15,7 @@ impl<'a, D: ObjectView, F: ArrayView> Context<'a, D, F> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, D: ObjectView, F: ArrayView> ObjectView for Context<'a, D, F> {
|
||||
impl<D: ObjectView, F: ArrayView> ObjectView for Context<'_, D, F> {
|
||||
fn as_value(&self) -> &dyn ValueView {
|
||||
self
|
||||
}
|
||||
@ -52,7 +52,7 @@ impl<'a, D: ObjectView, F: ArrayView> ObjectView for Context<'a, D, F> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, D: ObjectView, F: ArrayView> ValueView for Context<'a, D, F> {
|
||||
impl<D: ObjectView, F: ArrayView> ValueView for Context<'_, D, F> {
|
||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||
self
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ impl<'a> Document<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ObjectView for Document<'a> {
|
||||
impl ObjectView for Document<'_> {
|
||||
fn as_value(&self) -> &dyn ValueView {
|
||||
self
|
||||
}
|
||||
@ -98,7 +98,7 @@ impl<'a> ObjectView for Document<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ValueView for Document<'a> {
|
||||
impl ValueView for Document<'_> {
|
||||
fn as_debug(&self) -> &dyn Debug {
|
||||
self
|
||||
}
|
||||
@ -283,7 +283,7 @@ impl<'doc> ParseableArray<'doc> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'doc> ArrayView for ParseableArray<'doc> {
|
||||
impl ArrayView for ParseableArray<'_> {
|
||||
fn as_value(&self) -> &dyn ValueView {
|
||||
self
|
||||
}
|
||||
@ -311,7 +311,7 @@ impl<'doc> ArrayView for ParseableArray<'doc> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'doc> ValueView for ParseableArray<'doc> {
|
||||
impl ValueView for ParseableArray<'_> {
|
||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||
self
|
||||
}
|
||||
@ -353,7 +353,7 @@ impl<'doc> ValueView for ParseableArray<'doc> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'doc> ObjectView for ParseableMap<'doc> {
|
||||
impl ObjectView for ParseableMap<'_> {
|
||||
fn as_value(&self) -> &dyn ValueView {
|
||||
self
|
||||
}
|
||||
@ -392,7 +392,7 @@ impl<'doc> ObjectView for ParseableMap<'doc> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'doc> ValueView for ParseableMap<'doc> {
|
||||
impl ValueView for ParseableMap<'_> {
|
||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||
self
|
||||
}
|
||||
@ -441,7 +441,7 @@ impl<'doc> ValueView for ParseableMap<'doc> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'doc> ValueView for ParseableValue<'doc> {
|
||||
impl ValueView for ParseableValue<'_> {
|
||||
fn as_debug(&self) -> &dyn Debug {
|
||||
self
|
||||
}
|
||||
@ -622,7 +622,7 @@ struct ArraySource<'s, 'doc> {
|
||||
s: &'s RawVec<'doc>,
|
||||
}
|
||||
|
||||
impl<'s, 'doc> fmt::Display for ArraySource<'s, 'doc> {
|
||||
impl fmt::Display for ArraySource<'_, '_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "[")?;
|
||||
for item in self.s {
|
||||
@ -638,7 +638,7 @@ struct ArrayRender<'s, 'doc> {
|
||||
s: &'s RawVec<'doc>,
|
||||
}
|
||||
|
||||
impl<'s, 'doc> fmt::Display for ArrayRender<'s, 'doc> {
|
||||
impl fmt::Display for ArrayRender<'_, '_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
for item in self.s {
|
||||
let v = ParseableValue::new(item, self.s.bump());
|
||||
|
@ -17,7 +17,7 @@ pub struct FieldValue<'a, D: ObjectView> {
|
||||
metadata: Metadata,
|
||||
}
|
||||
|
||||
impl<'a, D: ObjectView> ValueView for FieldValue<'a, D> {
|
||||
impl<D: ObjectView> ValueView for FieldValue<'_, D> {
|
||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||
self
|
||||
}
|
||||
@ -78,7 +78,7 @@ impl<'a, D: ObjectView> FieldValue<'a, D> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, D: ObjectView> ObjectView for FieldValue<'a, D> {
|
||||
impl<D: ObjectView> ObjectView for FieldValue<'_, D> {
|
||||
fn as_value(&self) -> &dyn ValueView {
|
||||
self
|
||||
}
|
||||
@ -148,7 +148,7 @@ impl<'a, 'map, D: ObjectView> BorrowedFields<'a, 'map, D> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, D: ObjectView> ArrayView for OwnedFields<'a, D> {
|
||||
impl<D: ObjectView> ArrayView for OwnedFields<'_, D> {
|
||||
fn as_value(&self) -> &dyn ValueView {
|
||||
self.0.as_value()
|
||||
}
|
||||
@ -170,7 +170,7 @@ impl<'a, D: ObjectView> ArrayView for OwnedFields<'a, D> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'map, D: ObjectView> ArrayView for BorrowedFields<'a, 'map, D> {
|
||||
impl<D: ObjectView> ArrayView for BorrowedFields<'_, '_, D> {
|
||||
fn as_value(&self) -> &dyn ValueView {
|
||||
self
|
||||
}
|
||||
@ -212,7 +212,7 @@ impl<'a, 'map, D: ObjectView> ArrayView for BorrowedFields<'a, 'map, D> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'map, D: ObjectView> ValueView for BorrowedFields<'a, 'map, D> {
|
||||
impl<D: ObjectView> ValueView for BorrowedFields<'_, '_, D> {
|
||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||
self
|
||||
}
|
||||
@ -254,7 +254,7 @@ impl<'a, 'map, D: ObjectView> ValueView for BorrowedFields<'a, 'map, D> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, D: ObjectView> ValueView for OwnedFields<'a, D> {
|
||||
impl<D: ObjectView> ValueView for OwnedFields<'_, D> {
|
||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||
self
|
||||
}
|
||||
@ -292,7 +292,7 @@ struct ArraySource<'a, 'map, D: ObjectView> {
|
||||
s: &'a BorrowedFields<'a, 'map, D>,
|
||||
}
|
||||
|
||||
impl<'a, 'map, D: ObjectView> fmt::Display for ArraySource<'a, 'map, D> {
|
||||
impl<D: ObjectView> fmt::Display for ArraySource<'_, '_, D> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "[")?;
|
||||
for item in self.s.values() {
|
||||
@ -307,7 +307,7 @@ struct ArrayRender<'a, 'map, D: ObjectView> {
|
||||
s: &'a BorrowedFields<'a, 'map, D>,
|
||||
}
|
||||
|
||||
impl<'a, 'map, D: ObjectView> fmt::Display for ArrayRender<'a, 'map, D> {
|
||||
impl<D: ObjectView> fmt::Display for ArrayRender<'_, '_, D> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
for item in self.s.values() {
|
||||
write!(f, "{}", item.render())?;
|
||||
|
@ -358,7 +358,7 @@ impl<'a> FacetDistribution<'a> {
|
||||
) -> bool {
|
||||
// If the field is not filterable, we don't want to compute the facet distribution.
|
||||
if !matching_features(name, filterable_attributes_rules)
|
||||
.map_or(false, |(_, features)| features.is_filterable())
|
||||
.is_some_and(|(_, features)| features.is_filterable())
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@ -378,13 +378,21 @@ impl<'a> FacetDistribution<'a> {
|
||||
filterable_attributes_rules: &[FilterableAttributesRule],
|
||||
) -> Result<()> {
|
||||
let mut invalid_facets = BTreeSet::new();
|
||||
let mut matching_rule_indices = HashMap::new();
|
||||
|
||||
if let Some(facets) = &self.facets {
|
||||
for field in facets.keys() {
|
||||
let is_valid_filterable_field =
|
||||
matching_features(field, filterable_attributes_rules)
|
||||
.map_or(false, |(_, features)| features.is_filterable());
|
||||
if !is_valid_filterable_field {
|
||||
let matched_rule = matching_features(field, filterable_attributes_rules);
|
||||
let is_filterable = matched_rule.is_some_and(|(_, f)| f.is_filterable());
|
||||
|
||||
if !is_filterable {
|
||||
invalid_facets.insert(field.to_string());
|
||||
|
||||
// If the field matched a rule but that rule doesn't enable filtering,
|
||||
// store the rule index for better error messages
|
||||
if let Some((rule_index, _)) = matched_rule {
|
||||
matching_rule_indices.insert(field.to_string(), rule_index);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -400,6 +408,7 @@ impl<'a> FacetDistribution<'a> {
|
||||
return Err(Error::UserError(UserError::InvalidFacetsDistribution {
|
||||
invalid_facets_name: invalid_facets,
|
||||
valid_patterns,
|
||||
matching_rule_indices,
|
||||
}));
|
||||
}
|
||||
|
||||
|
@ -79,7 +79,7 @@ struct FacetRangeSearch<'t, 'b, 'bitmap> {
|
||||
docids: &'bitmap mut RoaringBitmap,
|
||||
}
|
||||
|
||||
impl<'t, 'b, 'bitmap> FacetRangeSearch<'t, 'b, 'bitmap> {
|
||||
impl<'t> FacetRangeSearch<'t, '_, '_> {
|
||||
fn run_level_0(&mut self, starting_left_bound: &'t [u8], group_size: usize) -> Result<()> {
|
||||
let left_key =
|
||||
FacetGroupKey { field_id: self.field_id, level: 0, left_bound: starting_left_bound };
|
||||
|
@ -62,7 +62,7 @@ struct AscendingFacetSort<'t, 'e> {
|
||||
)>,
|
||||
}
|
||||
|
||||
impl<'t, 'e> Iterator for AscendingFacetSort<'t, 'e> {
|
||||
impl<'t> Iterator for AscendingFacetSort<'t, '_> {
|
||||
type Item = Result<(RoaringBitmap, &'t [u8])>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
@ -66,15 +66,15 @@ enum FilterError<'a> {
|
||||
ParseGeoError(BadGeoError),
|
||||
TooDeep,
|
||||
}
|
||||
impl<'a> std::error::Error for FilterError<'a> {}
|
||||
impl std::error::Error for FilterError<'_> {}
|
||||
|
||||
impl<'a> From<BadGeoError> for FilterError<'a> {
|
||||
impl From<BadGeoError> for FilterError<'_> {
|
||||
fn from(geo_error: BadGeoError) -> Self {
|
||||
FilterError::ParseGeoError(geo_error)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Display for FilterError<'a> {
|
||||
impl Display for FilterError<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::AttributeNotFilterable { attribute, filterable_patterns } => {
|
||||
@ -237,7 +237,7 @@ impl<'a> Filter<'a> {
|
||||
for fid in self.condition.fids(MAX_FILTER_DEPTH) {
|
||||
let attribute = fid.value();
|
||||
if matching_features(attribute, &filterable_attributes_rules)
|
||||
.map_or(false, |(_, features)| features.is_filterable())
|
||||
.is_some_and(|(_, features)| features.is_filterable())
|
||||
{
|
||||
continue;
|
||||
}
|
||||
@ -461,7 +461,7 @@ impl<'a> Filter<'a> {
|
||||
filterable_attribute_rules: &[FilterableAttributesRule],
|
||||
universe: Option<&RoaringBitmap>,
|
||||
) -> Result<RoaringBitmap> {
|
||||
if universe.map_or(false, |u| u.is_empty()) {
|
||||
if universe.is_some_and(|u| u.is_empty()) {
|
||||
return Ok(RoaringBitmap::new());
|
||||
}
|
||||
|
||||
|
@ -75,9 +75,11 @@ impl<'a> SearchForFacetValues<'a> {
|
||||
let rtxn = self.search_query.rtxn;
|
||||
|
||||
let filterable_attributes_rules = index.filterable_attributes_rules(rtxn)?;
|
||||
if !matching_features(&self.facet, &filterable_attributes_rules)
|
||||
.map_or(false, |(_, features)| features.is_facet_searchable())
|
||||
{
|
||||
let matched_rule = matching_features(&self.facet, &filterable_attributes_rules);
|
||||
let is_facet_searchable =
|
||||
matched_rule.is_some_and(|(_, features)| features.is_facet_searchable());
|
||||
|
||||
if !is_facet_searchable {
|
||||
let matching_field_names =
|
||||
filtered_matching_patterns(&filterable_attributes_rules, &|features| {
|
||||
features.is_facet_searchable()
|
||||
@ -85,10 +87,14 @@ impl<'a> SearchForFacetValues<'a> {
|
||||
let (valid_patterns, hidden_fields) =
|
||||
index.remove_hidden_fields(rtxn, matching_field_names)?;
|
||||
|
||||
// Get the matching rule index if any rule matched the attribute
|
||||
let matching_rule_index = matched_rule.map(|(rule_index, _)| rule_index);
|
||||
|
||||
return Err(UserError::InvalidFacetSearchFacetName {
|
||||
field: self.facet.clone(),
|
||||
valid_patterns,
|
||||
hidden_fields,
|
||||
matching_rule_index,
|
||||
}
|
||||
.into());
|
||||
};
|
||||
@ -129,7 +135,7 @@ impl<'a> SearchForFacetValues<'a> {
|
||||
|
||||
if authorize_typos && field_authorizes_typos {
|
||||
let exact_words_fst = self.search_query.index.exact_words(rtxn)?;
|
||||
if exact_words_fst.map_or(false, |fst| fst.contains(query)) {
|
||||
if exact_words_fst.is_some_and(|fst| fst.contains(query)) {
|
||||
if fst.contains(query) {
|
||||
self.fetch_original_facets_using_normalized(
|
||||
fid,
|
||||
|
@ -151,7 +151,7 @@ impl ScoreWithRatioResult {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Search<'a> {
|
||||
impl Search<'_> {
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "search::hybrid")]
|
||||
pub fn execute_hybrid(&self, semantic_ratio: f32) -> Result<(SearchResult, Option<u32>)> {
|
||||
// TODO: find classier way to achieve that than to reset vector and query params
|
||||
@ -164,7 +164,7 @@ impl<'a> Search<'a> {
|
||||
sort_criteria: self.sort_criteria.clone(),
|
||||
distinct: self.distinct.clone(),
|
||||
searchable_attributes: self.searchable_attributes,
|
||||
geo_strategy: self.geo_strategy,
|
||||
geo_param: self.geo_param,
|
||||
terms_matching_strategy: self.terms_matching_strategy,
|
||||
scoring_strategy: ScoringStrategy::Detailed,
|
||||
words_limit: self.words_limit,
|
||||
|
@ -45,7 +45,7 @@ pub struct Search<'a> {
|
||||
sort_criteria: Option<Vec<AscDesc>>,
|
||||
distinct: Option<String>,
|
||||
searchable_attributes: Option<&'a [String]>,
|
||||
geo_strategy: new::GeoSortStrategy,
|
||||
geo_param: new::GeoSortParameter,
|
||||
terms_matching_strategy: TermsMatchingStrategy,
|
||||
scoring_strategy: ScoringStrategy,
|
||||
words_limit: usize,
|
||||
@ -68,7 +68,7 @@ impl<'a> Search<'a> {
|
||||
sort_criteria: None,
|
||||
distinct: None,
|
||||
searchable_attributes: None,
|
||||
geo_strategy: new::GeoSortStrategy::default(),
|
||||
geo_param: new::GeoSortParameter::default(),
|
||||
terms_matching_strategy: TermsMatchingStrategy::default(),
|
||||
scoring_strategy: Default::default(),
|
||||
exhaustive_number_hits: false,
|
||||
@ -145,7 +145,13 @@ impl<'a> Search<'a> {
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn geo_sort_strategy(&mut self, strategy: new::GeoSortStrategy) -> &mut Search<'a> {
|
||||
self.geo_strategy = strategy;
|
||||
self.geo_param.strategy = strategy;
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn geo_max_bucket_size(&mut self, max_size: u64) -> &mut Search<'a> {
|
||||
self.geo_param.max_bucket_size = max_size;
|
||||
self
|
||||
}
|
||||
|
||||
@ -190,9 +196,10 @@ impl<'a> Search<'a> {
|
||||
if let Some(distinct) = &self.distinct {
|
||||
let filterable_fields = ctx.index.filterable_attributes_rules(ctx.txn)?;
|
||||
// check if the distinct field is in the filterable fields
|
||||
if !matching_features(distinct, &filterable_fields)
|
||||
.map_or(false, |(_, features)| features.is_filterable())
|
||||
{
|
||||
let matched_rule = matching_features(distinct, &filterable_fields);
|
||||
let is_filterable = matched_rule.is_some_and(|(_, features)| features.is_filterable());
|
||||
|
||||
if !is_filterable {
|
||||
// if not, remove the hidden fields from the filterable fields to generate the error message
|
||||
let matching_patterns =
|
||||
filtered_matching_patterns(&filterable_fields, &|features| {
|
||||
@ -200,11 +207,16 @@ impl<'a> Search<'a> {
|
||||
});
|
||||
let (valid_patterns, hidden_fields) =
|
||||
ctx.index.remove_hidden_fields(ctx.txn, matching_patterns)?;
|
||||
|
||||
// Get the matching rule index if any rule matched the attribute
|
||||
let matching_rule_index = matched_rule.map(|(rule_index, _)| rule_index);
|
||||
|
||||
// and return the error
|
||||
return Err(Error::UserError(UserError::InvalidDistinctAttribute {
|
||||
field: distinct.clone(),
|
||||
valid_patterns,
|
||||
hidden_fields,
|
||||
matching_rule_index,
|
||||
}));
|
||||
}
|
||||
}
|
||||
@ -226,7 +238,7 @@ impl<'a> Search<'a> {
|
||||
universe,
|
||||
&self.sort_criteria,
|
||||
&self.distinct,
|
||||
self.geo_strategy,
|
||||
self.geo_param,
|
||||
self.offset,
|
||||
self.limit,
|
||||
embedder_name,
|
||||
@ -245,7 +257,7 @@ impl<'a> Search<'a> {
|
||||
universe,
|
||||
&self.sort_criteria,
|
||||
&self.distinct,
|
||||
self.geo_strategy,
|
||||
self.geo_param,
|
||||
self.offset,
|
||||
self.limit,
|
||||
Some(self.words_limit),
|
||||
@ -284,7 +296,7 @@ impl fmt::Debug for Search<'_> {
|
||||
sort_criteria,
|
||||
distinct,
|
||||
searchable_attributes,
|
||||
geo_strategy: _,
|
||||
geo_param: _,
|
||||
terms_matching_strategy,
|
||||
scoring_strategy,
|
||||
words_limit,
|
||||
|
@ -173,16 +173,18 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
||||
ranking_rule_scores.push(ScoreDetails::Skipped);
|
||||
|
||||
// remove candidates from the universe without adding them to result if their score is below the threshold
|
||||
if let Some(ranking_score_threshold) = ranking_score_threshold {
|
||||
let current_score = ScoreDetails::global_score(ranking_rule_scores.iter());
|
||||
if current_score < ranking_score_threshold {
|
||||
all_candidates -= bucket | &ranking_rule_universes[cur_ranking_rule_index];
|
||||
back!();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
let is_below_threshold =
|
||||
ranking_score_threshold.is_some_and(|ranking_score_threshold| {
|
||||
let current_score = ScoreDetails::global_score(ranking_rule_scores.iter());
|
||||
current_score < ranking_score_threshold
|
||||
});
|
||||
|
||||
maybe_add_to_results!(bucket);
|
||||
if is_below_threshold {
|
||||
all_candidates -= &bucket;
|
||||
all_candidates -= &ranking_rule_universes[cur_ranking_rule_index];
|
||||
} else {
|
||||
maybe_add_to_results!(bucket);
|
||||
}
|
||||
|
||||
ranking_rule_scores.pop();
|
||||
|
||||
@ -237,23 +239,24 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
||||
);
|
||||
|
||||
// remove candidates from the universe without adding them to result if their score is below the threshold
|
||||
if let Some(ranking_score_threshold) = ranking_score_threshold {
|
||||
let is_below_threshold = ranking_score_threshold.is_some_and(|ranking_score_threshold| {
|
||||
let current_score = ScoreDetails::global_score(ranking_rule_scores.iter());
|
||||
if current_score < ranking_score_threshold {
|
||||
all_candidates -=
|
||||
next_bucket.candidates | &ranking_rule_universes[cur_ranking_rule_index];
|
||||
back!();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
current_score < ranking_score_threshold
|
||||
});
|
||||
|
||||
ranking_rule_universes[cur_ranking_rule_index] -= &next_bucket.candidates;
|
||||
|
||||
if cur_ranking_rule_index == ranking_rules_len - 1
|
||||
|| (scoring_strategy == ScoringStrategy::Skip && next_bucket.candidates.len() <= 1)
|
||||
|| cur_offset + (next_bucket.candidates.len() as usize) < from
|
||||
|| is_below_threshold
|
||||
{
|
||||
maybe_add_to_results!(next_bucket.candidates);
|
||||
if is_below_threshold {
|
||||
all_candidates -= &next_bucket.candidates;
|
||||
all_candidates -= &ranking_rule_universes[cur_ranking_rule_index];
|
||||
} else {
|
||||
maybe_add_to_results!(next_bucket.candidates);
|
||||
}
|
||||
ranking_rule_scores.pop();
|
||||
continue;
|
||||
}
|
||||
|
@ -37,12 +37,12 @@ pub struct DatabaseCache<'ctx> {
|
||||
|
||||
pub words_fst: Option<fst::Set<Cow<'ctx, [u8]>>>,
|
||||
pub word_position_docids: FxHashMap<(Interned<String>, u16), Option<Cow<'ctx, [u8]>>>,
|
||||
pub word_prefix_position_docids: FxHashMap<(Interned<String>, u16), Option<RoaringBitmap>>,
|
||||
pub word_prefix_position_docids: FxHashMap<(Interned<String>, u16), Option<Cow<'ctx, [u8]>>>,
|
||||
pub word_positions: FxHashMap<Interned<String>, Vec<u16>>,
|
||||
pub word_prefix_positions: FxHashMap<Interned<String>, Vec<u16>>,
|
||||
|
||||
pub word_fid_docids: FxHashMap<(Interned<String>, u16), Option<Cow<'ctx, [u8]>>>,
|
||||
pub word_prefix_fid_docids: FxHashMap<(Interned<String>, u16), Option<RoaringBitmap>>,
|
||||
pub word_prefix_fid_docids: FxHashMap<(Interned<String>, u16), Option<Cow<'ctx, [u8]>>>,
|
||||
pub word_fids: FxHashMap<Interned<String>, Vec<u16>>,
|
||||
pub word_prefix_fids: FxHashMap<Interned<String>, Vec<u16>>,
|
||||
}
|
||||
@ -537,7 +537,7 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
fid: u16,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
// if the requested fid isn't in the restricted list, return None.
|
||||
if self.restricted_fids.as_ref().map_or(false, |fids| !fids.contains(&fid)) {
|
||||
if self.restricted_fids.as_ref().is_some_and(|fids| !fids.contains(&fid)) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
@ -558,50 +558,18 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
fid: u16,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
// if the requested fid isn't in the restricted list, return None.
|
||||
if self.restricted_fids.as_ref().map_or(false, |fids| !fids.contains(&fid)) {
|
||||
if self.restricted_fids.as_ref().is_some_and(|fids| !fids.contains(&fid)) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let cache = &mut self.db_cache.word_prefix_fid_docids;
|
||||
let prefix_db = &self.index.word_prefix_fid_docids;
|
||||
let db = &self.index.word_fid_docids;
|
||||
if let Entry::Vacant(entry) = cache.entry((word_prefix, fid)) {
|
||||
let word_prefix_bytes = self.word_interner.get(word_prefix).as_bytes().to_owned();
|
||||
let word_prefix_str = std::str::from_utf8(&word_prefix_bytes).unwrap();
|
||||
match prefix_db.get(self.txn, &(word_prefix_str, fid))? {
|
||||
Some(mut bitmap) => {
|
||||
if let Some(universe) = universe {
|
||||
bitmap &= universe;
|
||||
}
|
||||
entry.insert(Some(bitmap));
|
||||
}
|
||||
None => {
|
||||
let mut key = word_prefix_bytes.clone();
|
||||
key.push(0);
|
||||
let remap_key_type = db
|
||||
.remap_key_type::<Bytes>()
|
||||
.prefix_iter(self.txn, &key)?
|
||||
.remap_key_type::<StrBEU16Codec>();
|
||||
|
||||
let mut bitmap = RoaringBitmap::new();
|
||||
for result in remap_key_type {
|
||||
let ((_, pos), value) = result?;
|
||||
|
||||
if pos == fid {
|
||||
if let Some(universe) = universe {
|
||||
bitmap |= value & universe;
|
||||
} else {
|
||||
bitmap |= value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
entry.insert(Some(bitmap));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(cache.get(&(word_prefix, fid)).unwrap().clone())
|
||||
DatabaseCache::get_value(
|
||||
self.txn,
|
||||
(word_prefix, fid),
|
||||
&(self.word_interner.get(word_prefix).as_str(), fid),
|
||||
&mut self.db_cache.word_prefix_fid_docids,
|
||||
universe,
|
||||
self.index.word_prefix_fid_docids.remap_data_type::<Bytes>(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_db_word_fids(&mut self, word: Interned<String>) -> Result<Vec<u16>> {
|
||||
@ -637,7 +605,6 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
let mut key = self.word_interner.get(word_prefix).as_bytes().to_owned();
|
||||
key.push(0);
|
||||
let mut fids = vec![];
|
||||
// TODO: This is no more exhaustive, we should iterate over all fids.
|
||||
let remap_key_type = self
|
||||
.index
|
||||
.word_prefix_fid_docids
|
||||
@ -645,7 +612,11 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
.prefix_iter(self.txn, &key)?
|
||||
.remap_key_type::<StrBEU16Codec>();
|
||||
for result in remap_key_type {
|
||||
let ((_, fid), _value) = result?;
|
||||
let ((_, fid), value) = result?;
|
||||
// filling other caches to avoid searching for them again
|
||||
self.db_cache
|
||||
.word_prefix_fid_docids
|
||||
.insert((word_prefix, fid), Some(Cow::Borrowed(value)));
|
||||
fids.push(fid);
|
||||
}
|
||||
entry.insert(fids.clone());
|
||||
@ -677,46 +648,14 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
word_prefix: Interned<String>,
|
||||
position: u16,
|
||||
) -> Result<Option<RoaringBitmap>> {
|
||||
let cache = &mut self.db_cache.word_prefix_position_docids;
|
||||
let prefix_db = &self.index.word_prefix_position_docids;
|
||||
let db = &self.index.word_position_docids;
|
||||
if let Entry::Vacant(entry) = cache.entry((word_prefix, position)) {
|
||||
let word_prefix_bytes = self.word_interner.get(word_prefix).as_bytes().to_owned();
|
||||
let word_prefix_str = std::str::from_utf8(&word_prefix_bytes).unwrap();
|
||||
match prefix_db.get(self.txn, &(word_prefix_str, position))? {
|
||||
Some(mut bitmap) => {
|
||||
if let Some(universe) = universe {
|
||||
bitmap &= universe;
|
||||
}
|
||||
entry.insert(Some(bitmap));
|
||||
}
|
||||
None => {
|
||||
let mut key = word_prefix_bytes.clone();
|
||||
key.push(0);
|
||||
let remap_key_type = db
|
||||
.remap_key_type::<Bytes>()
|
||||
.prefix_iter(self.txn, &key)?
|
||||
.remap_key_type::<StrBEU16Codec>();
|
||||
|
||||
let mut bitmap = RoaringBitmap::new();
|
||||
for result in remap_key_type {
|
||||
let ((_, pos), value) = result?;
|
||||
|
||||
if pos == position {
|
||||
if let Some(universe) = universe {
|
||||
bitmap |= value & universe;
|
||||
} else {
|
||||
bitmap |= value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
entry.insert(Some(bitmap));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(cache.get(&(word_prefix, position)).unwrap().clone())
|
||||
DatabaseCache::get_value(
|
||||
self.txn,
|
||||
(word_prefix, position),
|
||||
&(self.word_interner.get(word_prefix).as_str(), position),
|
||||
&mut self.db_cache.word_prefix_position_docids,
|
||||
universe,
|
||||
self.index.word_prefix_position_docids.remap_data_type::<Bytes>(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_db_word_positions(&mut self, word: Interned<String>) -> Result<Vec<u16>> {
|
||||
@ -757,7 +696,6 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
let mut key = self.word_interner.get(word_prefix).as_bytes().to_owned();
|
||||
key.push(0);
|
||||
let mut positions = vec![];
|
||||
// TODO: This is no more exhaustive, we should iterate over all positions.
|
||||
let remap_key_type = self
|
||||
.index
|
||||
.word_prefix_position_docids
|
||||
@ -765,7 +703,11 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
.prefix_iter(self.txn, &key)?
|
||||
.remap_key_type::<StrBEU16Codec>();
|
||||
for result in remap_key_type {
|
||||
let ((_, position), _value) = result?;
|
||||
let ((_, position), value) = result?;
|
||||
// filling other caches to avoid searching for them again
|
||||
self.db_cache
|
||||
.word_prefix_position_docids
|
||||
.insert((word_prefix, position), Some(Cow::Borrowed(value)));
|
||||
positions.push(position);
|
||||
}
|
||||
entry.insert(positions.clone());
|
||||
|
@ -1,10 +1,8 @@
|
||||
use std::collections::VecDeque;
|
||||
use std::iter::FromIterator;
|
||||
|
||||
use heed::types::{Bytes, Unit};
|
||||
use heed::{RoPrefix, RoTxn};
|
||||
use roaring::RoaringBitmap;
|
||||
use rstar::RTree;
|
||||
use std::collections::VecDeque;
|
||||
|
||||
use super::facet_string_values;
|
||||
use super::ranking_rules::{RankingRule, RankingRuleOutput, RankingRuleQueryTrait};
|
||||
@ -41,6 +39,21 @@ fn facet_number_values<'a>(
|
||||
Ok(iter)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct Parameter {
|
||||
// Define the strategy used by the geo sort
|
||||
pub strategy: Strategy,
|
||||
// Limit the number of docs in a single bucket to avoid unexpectedly large overhead
|
||||
pub max_bucket_size: u64,
|
||||
// Considering the errors of GPS and geographical calculations, distances less than distance_error_margin will be treated as equal
|
||||
pub distance_error_margin: f64,
|
||||
}
|
||||
|
||||
impl Default for Parameter {
|
||||
fn default() -> Self {
|
||||
Self { strategy: Strategy::default(), max_bucket_size: 1000, distance_error_margin: 1.0 }
|
||||
}
|
||||
}
|
||||
/// Define the strategy used by the geo sort.
|
||||
/// The parameter represents the cache size, and, in the case of the Dynamic strategy,
|
||||
/// the point where we move from using the iterative strategy to the rtree.
|
||||
@ -84,15 +97,21 @@ pub struct GeoSort<Q: RankingRuleQueryTrait> {
|
||||
|
||||
cached_sorted_docids: VecDeque<(u32, [f64; 2])>,
|
||||
geo_candidates: RoaringBitmap,
|
||||
|
||||
// Limit the number of docs in a single bucket to avoid unexpectedly large overhead
|
||||
max_bucket_size: u64,
|
||||
// Considering the errors of GPS and geographical calculations, distances less than distance_error_margin will be treated as equal
|
||||
distance_error_margin: f64,
|
||||
}
|
||||
|
||||
impl<Q: RankingRuleQueryTrait> GeoSort<Q> {
|
||||
pub fn new(
|
||||
strategy: Strategy,
|
||||
parameter: Parameter,
|
||||
geo_faceted_docids: RoaringBitmap,
|
||||
point: [f64; 2],
|
||||
ascending: bool,
|
||||
) -> Result<Self> {
|
||||
let Parameter { strategy, max_bucket_size, distance_error_margin } = parameter;
|
||||
Ok(Self {
|
||||
query: None,
|
||||
strategy,
|
||||
@ -102,6 +121,8 @@ impl<Q: RankingRuleQueryTrait> GeoSort<Q> {
|
||||
field_ids: None,
|
||||
rtree: None,
|
||||
cached_sorted_docids: VecDeque::new(),
|
||||
max_bucket_size,
|
||||
distance_error_margin,
|
||||
})
|
||||
}
|
||||
|
||||
@ -240,12 +261,12 @@ impl<'ctx, Q: RankingRuleQueryTrait> RankingRule<'ctx, Q> for GeoSort<Q> {
|
||||
fn next_bucket(
|
||||
&mut self,
|
||||
ctx: &mut SearchContext<'ctx>,
|
||||
logger: &mut dyn SearchLogger<Q>,
|
||||
_logger: &mut dyn SearchLogger<Q>,
|
||||
universe: &RoaringBitmap,
|
||||
) -> Result<Option<RankingRuleOutput<Q>>> {
|
||||
let query = self.query.as_ref().unwrap().clone();
|
||||
|
||||
let geo_candidates = &self.geo_candidates & universe;
|
||||
let mut geo_candidates = &self.geo_candidates & universe;
|
||||
|
||||
if geo_candidates.is_empty() {
|
||||
return Ok(Some(RankingRuleOutput {
|
||||
@ -267,24 +288,102 @@ impl<'ctx, Q: RankingRuleQueryTrait> RankingRule<'ctx, Q> for GeoSort<Q> {
|
||||
cache.pop_back()
|
||||
}
|
||||
};
|
||||
while let Some((id, point)) = next(&mut self.cached_sorted_docids) {
|
||||
if geo_candidates.contains(id) {
|
||||
return Ok(Some(RankingRuleOutput {
|
||||
query,
|
||||
candidates: RoaringBitmap::from_iter([id]),
|
||||
score: ScoreDetails::GeoSort(score_details::GeoSort {
|
||||
target_point: self.point,
|
||||
ascending: self.ascending,
|
||||
value: Some(point),
|
||||
}),
|
||||
}));
|
||||
let put_back = |cache: &mut VecDeque<_>, x: _| {
|
||||
if ascending {
|
||||
cache.push_front(x)
|
||||
} else {
|
||||
cache.push_back(x)
|
||||
}
|
||||
};
|
||||
|
||||
let mut current_bucket = RoaringBitmap::new();
|
||||
// current_distance stores the first point and distance in current bucket
|
||||
let mut current_distance: Option<([f64; 2], f64)> = None;
|
||||
loop {
|
||||
// The loop will only exit when we have found all points with equal distance or have exhausted the candidates.
|
||||
if let Some((id, point)) = next(&mut self.cached_sorted_docids) {
|
||||
if geo_candidates.contains(id) {
|
||||
let distance = distance_between_two_points(&self.point, &point);
|
||||
if let Some((point0, bucket_distance)) = current_distance.as_ref() {
|
||||
if (bucket_distance - distance).abs() > self.distance_error_margin {
|
||||
// different distance, point belongs to next bucket
|
||||
put_back(&mut self.cached_sorted_docids, (id, point));
|
||||
return Ok(Some(RankingRuleOutput {
|
||||
query,
|
||||
candidates: current_bucket,
|
||||
score: ScoreDetails::GeoSort(score_details::GeoSort {
|
||||
target_point: self.point,
|
||||
ascending: self.ascending,
|
||||
value: Some(point0.to_owned()),
|
||||
}),
|
||||
}));
|
||||
} else {
|
||||
// same distance, point belongs to current bucket
|
||||
current_bucket.insert(id);
|
||||
// remove from cadidates to prevent it from being added to the cache again
|
||||
geo_candidates.remove(id);
|
||||
// current bucket size reaches limit, force return
|
||||
if current_bucket.len() == self.max_bucket_size {
|
||||
return Ok(Some(RankingRuleOutput {
|
||||
query,
|
||||
candidates: current_bucket,
|
||||
score: ScoreDetails::GeoSort(score_details::GeoSort {
|
||||
target_point: self.point,
|
||||
ascending: self.ascending,
|
||||
value: Some(point0.to_owned()),
|
||||
}),
|
||||
}));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// first doc in current bucket
|
||||
current_distance = Some((point, distance));
|
||||
current_bucket.insert(id);
|
||||
geo_candidates.remove(id);
|
||||
// current bucket size reaches limit, force return
|
||||
if current_bucket.len() == self.max_bucket_size {
|
||||
return Ok(Some(RankingRuleOutput {
|
||||
query,
|
||||
candidates: current_bucket,
|
||||
score: ScoreDetails::GeoSort(score_details::GeoSort {
|
||||
target_point: self.point,
|
||||
ascending: self.ascending,
|
||||
value: Some(point.to_owned()),
|
||||
}),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// cache exhausted, we need to refill it
|
||||
self.fill_buffer(ctx, &geo_candidates)?;
|
||||
|
||||
if self.cached_sorted_docids.is_empty() {
|
||||
// candidates exhausted, exit
|
||||
if let Some((point0, _)) = current_distance.as_ref() {
|
||||
return Ok(Some(RankingRuleOutput {
|
||||
query,
|
||||
candidates: current_bucket,
|
||||
score: ScoreDetails::GeoSort(score_details::GeoSort {
|
||||
target_point: self.point,
|
||||
ascending: self.ascending,
|
||||
value: Some(point0.to_owned()),
|
||||
}),
|
||||
}));
|
||||
} else {
|
||||
return Ok(Some(RankingRuleOutput {
|
||||
query,
|
||||
candidates: universe.clone(),
|
||||
score: ScoreDetails::GeoSort(score_details::GeoSort {
|
||||
target_point: self.point,
|
||||
ascending: self.ascending,
|
||||
value: None,
|
||||
}),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if we got out of this loop it means we've exhausted our cache.
|
||||
// we need to refill it and run the function again.
|
||||
self.fill_buffer(ctx, &geo_candidates)?;
|
||||
self.next_bucket(ctx, logger, universe)
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "search::geo_sort")]
|
||||
|
@ -72,7 +72,7 @@ pub fn find_best_match_interval(matches: &[Match], crop_size: usize) -> [&Match;
|
||||
let interval_score = get_interval_score(&matches[interval_first..=interval_last]);
|
||||
let is_interval_score_better = &best_interval
|
||||
.as_ref()
|
||||
.map_or(true, |MatchIntervalWithScore { score, .. }| interval_score > *score);
|
||||
.is_none_or(|MatchIntervalWithScore { score, .. }| interval_score > *score);
|
||||
|
||||
if *is_interval_score_better {
|
||||
best_interval = Some(MatchIntervalWithScore {
|
||||
|
@ -8,6 +8,7 @@ use std::cmp::{max, min};
|
||||
|
||||
use charabia::{Language, SeparatorKind, Token, Tokenizer};
|
||||
use either::Either;
|
||||
use itertools::Itertools;
|
||||
pub use matching_words::MatchingWords;
|
||||
use matching_words::{MatchType, PartialMatch};
|
||||
use r#match::{Match, MatchPosition};
|
||||
@ -122,7 +123,7 @@ pub struct Matcher<'t, 'tokenizer, 'b, 'lang> {
|
||||
matches: Option<(Vec<Token<'t>>, Vec<Match>)>,
|
||||
}
|
||||
|
||||
impl<'t, 'tokenizer> Matcher<'t, 'tokenizer, '_, '_> {
|
||||
impl<'t> Matcher<'t, '_, '_, '_> {
|
||||
/// Iterates over tokens and save any of them that matches the query.
|
||||
fn compute_matches(&mut self) -> &mut Self {
|
||||
/// some words are counted as matches only if they are close together and in the good order,
|
||||
@ -229,8 +230,7 @@ impl<'t, 'tokenizer> Matcher<'t, 'tokenizer, '_, '_> {
|
||||
.iter()
|
||||
.map(|m| MatchBounds {
|
||||
start: tokens[m.get_first_token_pos()].byte_start,
|
||||
// TODO: Why is this in chars, while start is in bytes?
|
||||
length: m.char_count,
|
||||
length: self.calc_byte_length(tokens, m),
|
||||
indices: if array_indices.is_empty() {
|
||||
None
|
||||
} else {
|
||||
@ -241,6 +241,18 @@ impl<'t, 'tokenizer> Matcher<'t, 'tokenizer, '_, '_> {
|
||||
}
|
||||
}
|
||||
|
||||
fn calc_byte_length(&self, tokens: &[Token<'t>], m: &Match) -> usize {
|
||||
(m.get_first_token_pos()..=m.get_last_token_pos())
|
||||
.flat_map(|i| match &tokens[i].char_map {
|
||||
Some(char_map) => {
|
||||
char_map.iter().map(|(original, _)| *original as usize).collect_vec()
|
||||
}
|
||||
None => tokens[i].lemma().chars().map(|c| c.len_utf8()).collect_vec(),
|
||||
})
|
||||
.take(m.char_count)
|
||||
.sum()
|
||||
}
|
||||
|
||||
/// Returns the bounds in byte index of the crop window.
|
||||
fn crop_bounds(&self, tokens: &[Token<'_>], matches: &[Match], crop_size: usize) -> [usize; 2] {
|
||||
let (
|
||||
@ -501,7 +513,7 @@ mod tests {
|
||||
universe,
|
||||
&None,
|
||||
&None,
|
||||
crate::search::new::GeoSortStrategy::default(),
|
||||
crate::search::new::GeoSortParameter::default(),
|
||||
0,
|
||||
100,
|
||||
Some(10),
|
||||
|
@ -45,6 +45,7 @@ use sort::Sort;
|
||||
|
||||
use self::distinct::facet_string_values;
|
||||
use self::geo_sort::GeoSort;
|
||||
pub use self::geo_sort::Parameter as GeoSortParameter;
|
||||
pub use self::geo_sort::Strategy as GeoSortStrategy;
|
||||
use self::graph_based_ranking_rule::Words;
|
||||
use self::interner::Interned;
|
||||
@ -274,7 +275,7 @@ fn resolve_negative_phrases(
|
||||
fn get_ranking_rules_for_placeholder_search<'ctx>(
|
||||
ctx: &SearchContext<'ctx>,
|
||||
sort_criteria: &Option<Vec<AscDesc>>,
|
||||
geo_strategy: geo_sort::Strategy,
|
||||
geo_param: geo_sort::Parameter,
|
||||
) -> Result<Vec<BoxRankingRule<'ctx, PlaceholderQuery>>> {
|
||||
let mut sort = false;
|
||||
let mut sorted_fields = HashSet::new();
|
||||
@ -299,7 +300,7 @@ fn get_ranking_rules_for_placeholder_search<'ctx>(
|
||||
&mut ranking_rules,
|
||||
&mut sorted_fields,
|
||||
&mut geo_sorted,
|
||||
geo_strategy,
|
||||
geo_param,
|
||||
)?;
|
||||
sort = true;
|
||||
}
|
||||
@ -326,7 +327,7 @@ fn get_ranking_rules_for_placeholder_search<'ctx>(
|
||||
fn get_ranking_rules_for_vector<'ctx>(
|
||||
ctx: &SearchContext<'ctx>,
|
||||
sort_criteria: &Option<Vec<AscDesc>>,
|
||||
geo_strategy: geo_sort::Strategy,
|
||||
geo_param: geo_sort::Parameter,
|
||||
limit_plus_offset: usize,
|
||||
target: &[f32],
|
||||
embedder_name: &str,
|
||||
@ -375,7 +376,7 @@ fn get_ranking_rules_for_vector<'ctx>(
|
||||
&mut ranking_rules,
|
||||
&mut sorted_fields,
|
||||
&mut geo_sorted,
|
||||
geo_strategy,
|
||||
geo_param,
|
||||
)?;
|
||||
sort = true;
|
||||
}
|
||||
@ -403,7 +404,7 @@ fn get_ranking_rules_for_vector<'ctx>(
|
||||
fn get_ranking_rules_for_query_graph_search<'ctx>(
|
||||
ctx: &SearchContext<'ctx>,
|
||||
sort_criteria: &Option<Vec<AscDesc>>,
|
||||
geo_strategy: geo_sort::Strategy,
|
||||
geo_param: geo_sort::Parameter,
|
||||
terms_matching_strategy: TermsMatchingStrategy,
|
||||
) -> Result<Vec<BoxRankingRule<'ctx, QueryGraph>>> {
|
||||
// query graph search
|
||||
@ -477,7 +478,7 @@ fn get_ranking_rules_for_query_graph_search<'ctx>(
|
||||
&mut ranking_rules,
|
||||
&mut sorted_fields,
|
||||
&mut geo_sorted,
|
||||
geo_strategy,
|
||||
geo_param,
|
||||
)?;
|
||||
sort = true;
|
||||
}
|
||||
@ -514,7 +515,7 @@ fn resolve_sort_criteria<'ctx, Query: RankingRuleQueryTrait>(
|
||||
ranking_rules: &mut Vec<BoxRankingRule<'ctx, Query>>,
|
||||
sorted_fields: &mut HashSet<String>,
|
||||
geo_sorted: &mut bool,
|
||||
geo_strategy: geo_sort::Strategy,
|
||||
geo_param: geo_sort::Parameter,
|
||||
) -> Result<()> {
|
||||
let sort_criteria = sort_criteria.clone().unwrap_or_default();
|
||||
ranking_rules.reserve(sort_criteria.len());
|
||||
@ -540,7 +541,7 @@ fn resolve_sort_criteria<'ctx, Query: RankingRuleQueryTrait>(
|
||||
}
|
||||
let geo_faceted_docids = ctx.index.geo_faceted_documents_ids(ctx.txn)?;
|
||||
ranking_rules.push(Box::new(GeoSort::new(
|
||||
geo_strategy,
|
||||
geo_param,
|
||||
geo_faceted_docids,
|
||||
point,
|
||||
true,
|
||||
@ -552,7 +553,7 @@ fn resolve_sort_criteria<'ctx, Query: RankingRuleQueryTrait>(
|
||||
}
|
||||
let geo_faceted_docids = ctx.index.geo_faceted_documents_ids(ctx.txn)?;
|
||||
ranking_rules.push(Box::new(GeoSort::new(
|
||||
geo_strategy,
|
||||
geo_param,
|
||||
geo_faceted_docids,
|
||||
point,
|
||||
false,
|
||||
@ -584,7 +585,7 @@ pub fn execute_vector_search(
|
||||
universe: RoaringBitmap,
|
||||
sort_criteria: &Option<Vec<AscDesc>>,
|
||||
distinct: &Option<String>,
|
||||
geo_strategy: geo_sort::Strategy,
|
||||
geo_param: geo_sort::Parameter,
|
||||
from: usize,
|
||||
length: usize,
|
||||
embedder_name: &str,
|
||||
@ -600,7 +601,7 @@ pub fn execute_vector_search(
|
||||
let ranking_rules = get_ranking_rules_for_vector(
|
||||
ctx,
|
||||
sort_criteria,
|
||||
geo_strategy,
|
||||
geo_param,
|
||||
from + length,
|
||||
vector,
|
||||
embedder_name,
|
||||
@ -647,7 +648,7 @@ pub fn execute_search(
|
||||
mut universe: RoaringBitmap,
|
||||
sort_criteria: &Option<Vec<AscDesc>>,
|
||||
distinct: &Option<String>,
|
||||
geo_strategy: geo_sort::Strategy,
|
||||
geo_param: geo_sort::Parameter,
|
||||
from: usize,
|
||||
length: usize,
|
||||
words_limit: Option<usize>,
|
||||
@ -761,7 +762,7 @@ pub fn execute_search(
|
||||
let ranking_rules = get_ranking_rules_for_query_graph_search(
|
||||
ctx,
|
||||
sort_criteria,
|
||||
geo_strategy,
|
||||
geo_param,
|
||||
terms_matching_strategy,
|
||||
)?;
|
||||
|
||||
@ -783,7 +784,7 @@ pub fn execute_search(
|
||||
)?
|
||||
} else {
|
||||
let ranking_rules =
|
||||
get_ranking_rules_for_placeholder_search(ctx, sort_criteria, geo_strategy)?;
|
||||
get_ranking_rules_for_placeholder_search(ctx, sort_criteria, geo_param)?;
|
||||
bucket_sort(
|
||||
ctx,
|
||||
ranking_rules,
|
||||
|
@ -327,7 +327,7 @@ impl QueryGraph {
|
||||
let mut peekable = term_with_frequency.into_iter().peekable();
|
||||
while let Some((idx, frequency)) = peekable.next() {
|
||||
term_weight.insert(idx, weight);
|
||||
if peekable.peek().map_or(false, |(_, f)| frequency != *f) {
|
||||
if peekable.peek().is_some_and(|(_, f)| frequency != *f) {
|
||||
weight += 1;
|
||||
}
|
||||
}
|
||||
|
@ -398,7 +398,7 @@ fn split_best_frequency(
|
||||
let right = ctx.word_interner.insert(right.to_owned());
|
||||
|
||||
if let Some(frequency) = ctx.get_db_word_pair_proximity_docids_len(None, left, right, 1)? {
|
||||
if best.map_or(true, |(old, _, _)| frequency > old) {
|
||||
if best.is_none_or(|(old, _, _)| frequency > old) {
|
||||
best = Some((frequency, left, right));
|
||||
}
|
||||
}
|
||||
|
@ -203,7 +203,7 @@ pub fn number_of_typos_allowed<'ctx>(
|
||||
Ok(Box::new(move |word: &str| {
|
||||
if !authorize_typos
|
||||
|| word.len() < min_len_one_typo as usize
|
||||
|| exact_words.as_ref().map_or(false, |fst| fst.contains(word))
|
||||
|| exact_words.as_ref().is_some_and(|fst| fst.contains(word))
|
||||
{
|
||||
0
|
||||
} else if word.len() < min_len_two_typos as usize {
|
||||
|
@ -17,7 +17,7 @@ use crate::Result;
|
||||
pub struct PhraseDocIdsCache {
|
||||
pub cache: FxHashMap<Interned<Phrase>, RoaringBitmap>,
|
||||
}
|
||||
impl<'ctx> SearchContext<'ctx> {
|
||||
impl SearchContext<'_> {
|
||||
/// Get the document ids associated with the given phrase
|
||||
pub fn get_phrase_docids(&mut self, phrase: Interned<Phrase>) -> Result<&RoaringBitmap> {
|
||||
if self.phrase_docids.cache.contains_key(&phrase) {
|
||||
|
@ -263,7 +263,7 @@ impl SmallBitmapInternal {
|
||||
|
||||
pub fn contains(&self, x: u16) -> bool {
|
||||
let (set, x) = self.get_set_index(x);
|
||||
set & 0b1 << x != 0
|
||||
set & (0b1 << x) != 0
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, x: u16) {
|
||||
@ -381,7 +381,7 @@ pub enum SmallBitmapInternalIter<'b> {
|
||||
Tiny(u64),
|
||||
Small { cur: u64, next: &'b [u64], base: u16 },
|
||||
}
|
||||
impl<'b> Iterator for SmallBitmapInternalIter<'b> {
|
||||
impl Iterator for SmallBitmapInternalIter<'_> {
|
||||
type Item = u16;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user