Compare commits

...

59 Commits

Author SHA1 Message Date
Clément Renault
aa87064a13 Fix some of the edition 2024 warnings 2025-04-01 12:21:33 +02:00
Clément Renault
2762d5a32a Bump crates versions 2025-04-01 11:38:28 +02:00
Clément Renault
a0bfcf8872 Make cargo fmt happy 2025-04-01 11:27:41 +02:00
Clément Renault
64477aac60 Box the large GeoError error variant 2025-04-01 11:26:34 +02:00
Clément Renault
4d90e3d2ec Make Cargo and Clippy happy 2025-04-01 11:26:34 +02:00
Kerollmops
249da5846c Bump version in Dockerfile 2025-03-31 16:46:12 +02:00
Kerollmops
ee15d4fe77 Bump version in the CIs 2025-03-31 16:45:08 +02:00
Kerollmops
f0f6c3000f Bump version in the rust-toolchain TOML 2025-03-31 16:43:36 +02:00
Tamo
5607802fe1 Merge pull request #5449 from vuthanhtung2412/fix-dim-mismatch
Display more detailed error message instead of panic on embeddings dimension mismatch
2025-03-27 10:52:23 +00:00
Tamo
a8afd5dbcb fix warn and show what meilisearch understood of the vectors in the cursed test 2025-03-27 11:07:01 +01:00
Many the fish
55f620a986 Merge pull request #5425 from CodeMan62/enhance-filterable-error-messages
Enhance filterable error messages
2025-03-27 09:18:37 +00:00
Clément Renault
be6abb952d Merge pull request #5466 from meilisearch/update-charabia-v0.9.3
Update charabia v0.9.3
2025-03-26 18:23:31 +00:00
ManyTheFish
2f07afa97e Update Charabia v0.9.3 2025-03-26 17:43:19 +01:00
vuthanhtung2412
bf3a29b60d Document problematic case in test and acknowledge PR comment 2025-03-26 12:57:25 +01:00
CodeMan62
3acf036526 fix: improve error messages for filterable attributes and fix formatting 2025-03-25 21:44:39 +05:30
Many the fish
eefefc482b Merge pull request #5446 from shaokeyibb/main
Fix _matchesPosition length calculate
2025-03-25 14:16:38 +00:00
vuthanhtung2412
43c8a206b4 detail comments 2025-03-25 13:07:17 +01:00
vuthanhtung2412
a8c407fa36 fix failling tests 2025-03-25 13:06:11 +01:00
vuthanhtung2412
18bc56f1fa update cargo insta 2025-03-25 12:54:49 +01:00
vuthanhtung2412
38b3e03dde add embedding with dimension mismatch test case 2025-03-25 12:51:36 +01:00
vuthanhtung2412
6b1c262b74 fix all tests 2025-03-25 12:43:15 +01:00
Clément Renault
0f654e45c9 Merge pull request #5458 from meilisearch/update-again-ph-link
Fix the PH link on the README
2025-03-25 11:27:31 +00:00
vuthanhtung2412
d71c6f3483 allow multiple embedding in per document per embedder to pass 2025-03-25 12:04:25 +01:00
Kerollmops
8b4166410c Fix the PH link on the README 2025-03-25 11:45:47 +01:00
HikariLan
9d3037aa1a Fix clippy error 2025-03-25 18:12:36 +08:00
Tamo
5414887bff Merge pull request #5455 from meilisearch/update-readme-ph-link
Fix the Product Hunt link
2025-03-25 09:44:09 +00:00
Kerollmops
03a0550b63 Fix the Product Hunt link to link to meilisearch-ai 2025-03-25 10:00:24 +01:00
HikariLan
2800e42243 Separate calc_byte_length function 2025-03-25 00:47:17 +08:00
Many the fish
5759afac41 Merge pull request #5424 from shu-kitamura/split-tasks-test
Split unit test in tasks.rs
2025-03-24 09:55:50 +00:00
vuthanhtung2412
868c902935 fix meilisearch integration vector tests 2025-03-24 00:24:50 +01:00
vuthanhtung2412
e019ad7692 Display more detailed error message instead of panic 2025-03-21 15:41:31 +01:00
CodeMan62
1f67f373d1 fixed all the tests failing will "cargo insta test --accept" 2025-03-20 22:51:56 +05:30
Clément Renault
2c0bd35923 Merge pull request #5447 from meilisearch/clean-up-bors
Remove bors references from the repository
2025-03-20 16:11:11 +00:00
Kerollmops
b3aaa64de5 Remove the bors file 2025-03-20 16:28:08 +01:00
Kerollmops
7b3072ad28 Remove bors references from the repository 2025-03-20 15:57:05 +01:00
Louis Dureuil
db26c1e5bf Merge pull request #5395 from meilisearch/update-process-for-dumpless-upgrade
Update process for dumpless upgrade
2025-03-20 13:42:50 +00:00
CodeMan62
9aee12c906 fixed the failing tests from snapshots 2025-03-20 17:55:12 +05:30
HikariLan
debd2b21b8 Merge branch 'meilisearch:main' into main 2025-03-20 20:10:00 +08:00
HikariLan
39aca661dd Make _matchesPosition length byte based instead of char based 2025-03-20 20:02:51 +08:00
Tamo
5b51e8a083 simplify the sprint issue to only tell you to add a label on your PR 2025-03-20 12:41:34 +01:00
Tamo
3928fb36b3 Introduce a second github action that post the right message when we declare there are db changes 2025-03-20 12:41:34 +01:00
Tamo
2ddc1d2258 update the CI to enforce the db change label on PR 2025-03-20 12:41:34 +01:00
Tamo
7c267a8a0e update the issue template for the sprint issue 2025-03-20 12:41:34 +01:00
Clément Renault
d39d915a7e Merge pull request #5445 from meilisearch/support-merge-grouping
Make the CI work with merge queue grouping
2025-03-20 12:30:52 +01:00
Kerollmops
3160ddf9df Make the CI work with merge queue grouping 2025-03-20 12:29:08 +01:00
Clément Renault
d286e63f15 Merge pull request #5444 from meilisearch/setup-ci-with-rulesets
Setup the Milestone CI to update the Ruleset
2025-03-20 12:12:57 +01:00
Kerollmops
9ee6254eec Setup the Milestone CI to update the Ruleset 2025-03-20 11:28:03 +01:00
CodeMan62
e2c824a7cd fixed all test fails in the run 2025-03-20 15:21:47 +05:30
CodeMan62
0dd65caffe test: update test snapshots to match new error message format 2025-03-20 10:59:21 +05:30
CodeMan62
4397b7d170 chore: revert Cargo.lock changes 2025-03-20 10:54:14 +05:30
CodeMan62
15db203b7d refactor: update error message format for filterable attributes 2025-03-20 00:08:37 +05:30
CodeMan62
041f635214 Fix: Add #[allow(dead_code)] to format_invalid_filter_distribution function 2025-03-19 20:13:28 +05:30
shu-kitamura
537bf27e7c Update crates/meilisearch/src/routes/tasks_test.rs
Co-authored-by: Many the fish <many@meilisearch.com>
2025-03-19 19:11:04 +09:00
Clément Renault
cf31a65a88 Merge pull request #5431 from meilisearch/add-ph-readme-banner
Display the ProductHunt banner on the README
2025-03-18 11:26:45 +01:00
Kerollmops
0f7d71041f Display the ProductHunt banner on the README 2025-03-18 11:21:07 +01:00
CodeMan62
91d221ebe7 revert: Remove unintended Cargo.lock changes 2025-03-17 22:13:59 +05:30
CodeMan62
9162e8ba04 Enhance error messages for filterable attributes and improve error handling 2025-03-17 22:04:18 +05:30
shu-kitamura
2118cc092e rm db.snapshot 2025-03-17 23:04:13 +09:00
shu-kitamura
c7564d500f Split unit test in tasks.rs 2025-03-17 22:55:23 +09:00
168 changed files with 2817 additions and 2145 deletions

View File

@@ -22,6 +22,10 @@ Related product discussion:
<!---If necessary, create a list with technical/product steps-->
### Are you modifying a database?
- [ ] If not, add the `no db change` label to your PR, and you're good to merge.
- [ ] If yes, add the `db change` label to your PR. You'll receive a message explaining you what to do.
### Reminders when modifying the API
- [ ] Update the openAPI file with utoipa:

View File

@@ -1,28 +1,27 @@
name: Bench (manual)
on:
workflow_dispatch:
inputs:
workload:
description: 'The path to the workloads to execute (workloads/...)'
required: true
default: 'workloads/movies.json'
workflow_dispatch:
inputs:
workload:
description: "The path to the workloads to execute (workloads/...)"
required: true
default: "workloads/movies.json"
env:
WORKLOAD_NAME: ${{ github.event.inputs.workload }}
WORKLOAD_NAME: ${{ github.event.inputs.workload }}
jobs:
benchmarks:
name: Run and upload benchmarks
runs-on: benchmarks
timeout-minutes: 180 # 3h
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@1.81
with:
profile: minimal
- name: Run benchmarks - workload ${WORKLOAD_NAME} - branch ${{ github.ref }} - commit ${{ github.sha }}
run: |
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "Manual [Run #${{ github.run_id }}](https://github.com/meilisearch/meilisearch/actions/runs/${{ github.run_id }})" -- ${WORKLOAD_NAME}
benchmarks:
name: Run and upload benchmarks
runs-on: benchmarks
timeout-minutes: 180 # 3h
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@1.85
with:
profile: minimal
- name: Run benchmarks - workload ${WORKLOAD_NAME} - branch ${{ github.ref }} - commit ${{ github.sha }}
run: |
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "Manual [Run #${{ github.run_id }}](https://github.com/meilisearch/meilisearch/actions/runs/${{ github.run_id }})" -- ${WORKLOAD_NAME}

View File

@@ -1,82 +1,82 @@
name: Bench (PR)
on:
issue_comment:
types: [created]
issue_comment:
types: [created]
permissions:
issues: write
issues: write
env:
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
jobs:
run-benchmarks-on-comment:
if: startsWith(github.event.comment.body, '/bench')
name: Run and upload benchmarks
runs-on: benchmarks
timeout-minutes: 180 # 3h
steps:
- name: Check permissions
id: permission
env:
PR_AUTHOR: ${{github.event.issue.user.login }}
COMMENT_AUTHOR: ${{github.event.comment.user.login }}
REPOSITORY: ${{github.repository}}
PR_ID: ${{github.event.issue.number}}
run: |
PR_REPOSITORY=$(gh api /repos/"$REPOSITORY"/pulls/"$PR_ID" --jq .head.repo.full_name)
if $(gh api /repos/"$REPOSITORY"/collaborators/"$PR_AUTHOR"/permission --jq .user.permissions.push)
then
echo "::notice title=Authentication success::PR author authenticated"
else
echo "::error title=Authentication error::PR author doesn't have push permission on this repository"
exit 1
fi
if $(gh api /repos/"$REPOSITORY"/collaborators/"$COMMENT_AUTHOR"/permission --jq .user.permissions.push)
then
echo "::notice title=Authentication success::Comment author authenticated"
else
echo "::error title=Authentication error::Comment author doesn't have push permission on this repository"
exit 1
fi
if [ "$PR_REPOSITORY" = "$REPOSITORY" ]
then
echo "::notice title=Authentication success::PR started from main repository"
else
echo "::error title=Authentication error::PR started from a fork"
exit 1
fi
run-benchmarks-on-comment:
if: startsWith(github.event.comment.body, '/bench')
name: Run and upload benchmarks
runs-on: benchmarks
timeout-minutes: 180 # 3h
steps:
- name: Check permissions
id: permission
env:
PR_AUTHOR: ${{github.event.issue.user.login }}
COMMENT_AUTHOR: ${{github.event.comment.user.login }}
REPOSITORY: ${{github.repository}}
PR_ID: ${{github.event.issue.number}}
run: |
PR_REPOSITORY=$(gh api /repos/"$REPOSITORY"/pulls/"$PR_ID" --jq .head.repo.full_name)
if $(gh api /repos/"$REPOSITORY"/collaborators/"$PR_AUTHOR"/permission --jq .user.permissions.push)
then
echo "::notice title=Authentication success::PR author authenticated"
else
echo "::error title=Authentication error::PR author doesn't have push permission on this repository"
exit 1
fi
if $(gh api /repos/"$REPOSITORY"/collaborators/"$COMMENT_AUTHOR"/permission --jq .user.permissions.push)
then
echo "::notice title=Authentication success::Comment author authenticated"
else
echo "::error title=Authentication error::Comment author doesn't have push permission on this repository"
exit 1
fi
if [ "$PR_REPOSITORY" = "$REPOSITORY" ]
then
echo "::notice title=Authentication success::PR started from main repository"
else
echo "::error title=Authentication error::PR started from a fork"
exit 1
fi
- name: Check for Command
id: command
uses: xt0rted/slash-command-action@v2
with:
command: bench
reaction-type: "rocket"
repo-token: ${{ env.GH_TOKEN }}
- name: Check for Command
id: command
uses: xt0rted/slash-command-action@v2
with:
command: bench
reaction-type: "rocket"
repo-token: ${{ env.GH_TOKEN }}
- uses: xt0rted/pull-request-comment-branch@v3
id: comment-branch
with:
repo_token: ${{ env.GH_TOKEN }}
- uses: xt0rted/pull-request-comment-branch@v3
id: comment-branch
with:
repo_token: ${{ env.GH_TOKEN }}
- uses: actions/checkout@v3
if: success()
with:
fetch-depth: 0 # fetch full history to be able to get main commit sha
ref: ${{ steps.comment-branch.outputs.head_ref }}
- uses: actions/checkout@v3
if: success()
with:
fetch-depth: 0 # fetch full history to be able to get main commit sha
ref: ${{ steps.comment-branch.outputs.head_ref }}
- uses: dtolnay/rust-toolchain@1.81
with:
profile: minimal
- uses: dtolnay/rust-toolchain@1.85
with:
profile: minimal
- name: Run benchmarks on PR ${{ github.event.issue.id }}
run: |
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" \
--dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" \
--reason "[Comment](${{ github.event.comment.html_url }}) on [#${{ github.event.issue.number }}](${{ github.event.issue.html_url }})" \
-- ${{ steps.command.outputs.command-arguments }} > benchlinks.txt
- name: Run benchmarks on PR ${{ github.event.issue.id }}
run: |
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" \
--dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" \
--reason "[Comment](${{ github.event.comment.html_url }}) on [#${{ github.event.issue.number }}](${{ github.event.issue.html_url }})" \
-- ${{ steps.command.outputs.command-arguments }} > benchlinks.txt
- name: Send comment in PR
run: |
gh pr comment ${{github.event.issue.number}} --body-file benchlinks.txt
- name: Send comment in PR
run: |
gh pr comment ${{github.event.issue.number}} --body-file benchlinks.txt

View File

@@ -1,23 +1,22 @@
name: Indexing bench (push)
on:
push:
branches:
- main
push:
branches:
- main
jobs:
benchmarks:
name: Run and upload benchmarks
runs-on: benchmarks
timeout-minutes: 180 # 3h
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@1.81
with:
profile: minimal
# Run benchmarks
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch main - Commit ${{ github.sha }}
run: |
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "Push on `main` [Run #${{ github.run_id }}](https://github.com/meilisearch/meilisearch/actions/runs/${{ github.run_id }})" -- workloads/*.json
benchmarks:
name: Run and upload benchmarks
runs-on: benchmarks
timeout-minutes: 180 # 3h
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@1.85
with:
profile: minimal
# Run benchmarks
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch main - Commit ${{ github.sha }}
run: |
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "Push on `main` [Run #${{ github.run_id }}](https://github.com/meilisearch/meilisearch/actions/runs/${{ github.run_id }})" -- workloads/*.json

View File

@@ -4,9 +4,9 @@ on:
workflow_dispatch:
inputs:
dataset_name:
description: 'The name of the dataset used to benchmark (search_songs, search_wiki, search_geo or indexing)'
description: "The name of the dataset used to benchmark (search_songs, search_wiki, search_geo or indexing)"
required: false
default: 'search_songs'
default: "search_songs"
env:
BENCH_NAME: ${{ github.event.inputs.dataset_name }}
@@ -18,7 +18,7 @@ jobs:
timeout-minutes: 4320 # 72h
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
with:
profile: minimal
@@ -67,7 +67,7 @@ jobs:
out_dir: critcmp_results
# Helper
- name: 'README: compare with another benchmark'
- name: "README: compare with another benchmark"
run: |
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
echo 'How to compare this benchmark with another one?'

View File

@@ -44,7 +44,7 @@ jobs:
exit 1
fi
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
with:
profile: minimal

View File

@@ -16,7 +16,7 @@ jobs:
timeout-minutes: 4320 # 72h
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
with:
profile: minimal
@@ -69,7 +69,7 @@ jobs:
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
# Helper
- name: 'README: compare with another benchmark'
- name: "README: compare with another benchmark"
run: |
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
echo 'How to compare this benchmark with another one?'

View File

@@ -15,7 +15,7 @@ jobs:
runs-on: benchmarks
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
with:
profile: minimal
@@ -68,7 +68,7 @@ jobs:
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
# Helper
- name: 'README: compare with another benchmark'
- name: "README: compare with another benchmark"
run: |
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
echo 'How to compare this benchmark with another one?'

View File

@@ -15,7 +15,7 @@ jobs:
runs-on: benchmarks
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
with:
profile: minimal
@@ -68,7 +68,7 @@ jobs:
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
# Helper
- name: 'README: compare with another benchmark'
- name: "README: compare with another benchmark"
run: |
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
echo 'How to compare this benchmark with another one?'

View File

@@ -15,7 +15,7 @@ jobs:
runs-on: benchmarks
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
with:
profile: minimal
@@ -68,7 +68,7 @@ jobs:
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
# Helper
- name: 'README: compare with another benchmark'
- name: "README: compare with another benchmark"
run: |
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
echo 'How to compare this benchmark with another one?'

View File

@@ -0,0 +1,57 @@
name: Comment when db change labels are added
on:
pull_request:
types: [labeled]
env:
MESSAGE: |
### Hello, I'm a bot 🤖
You are receiving this message because you declared that this PR make changes to the Meilisearch database.
Depending on the nature of the change, additional actions might be required on your part. The following sections detail the additional actions depending on the nature of the change, please copy the relevant section in the description of your PR, and make sure to perform the required actions.
Thank you for contributing to Meilisearch :heart:
## This PR makes forward-compatible changes
*Forward-compatible changes are changes to the database such that databases created in an older version of Meilisearch are still valid in the new version of Meilisearch. They usually represent additive changes, like adding a new optional attribute or setting.*
- [ ] Detail the change to the DB format and why they are forward compatible
- [ ] Forward-compatibility: A database created before this PR and using the features touched by this PR was able to be opened by a Meilisearch produced by the code of this PR.
## This PR makes breaking changes
*Breaking changes are changes to the database such that databases created in an older version of Meilisearch need changes to remain valid in the new version of Meilisearch. This typically happens when the way to store the data changed (change of database, new required key, etc). This can also happen due to breaking changes in the API of an experimental feature. ⚠️ This kind of changes are more difficult to achieve safely, so proceed with caution and test dumpless upgrade right before merging the PR.*
- [ ] Detail the changes to the DB format,
- [ ] which are compatible, and why
- [ ] which are not compatible, why, and how they will be fixed up in the upgrade
- [ ] /!\ Ensure all the read operations still work!
- If the change happened in milli, you may need to check the version of the database before doing any read operation
- If the change happened in the index-scheduler, make sure the new code can immediately read the old database
- If the change happened in the meilisearch-auth database, reach out to the team; we don't know yet how to handle these changes
- [ ] Write the code to go from the old database to the new one
- If the change happened in milli, the upgrade function should be written and called [here](https://github.com/meilisearch/meilisearch/blob/3fd86e8d76d7d468b0095d679adb09211ca3b6c0/crates/milli/src/update/upgrade/mod.rs#L24-L47)
- If the change happened in the index-scheduler, we've never done it yet, but the right place to do it should be [here](https://github.com/meilisearch/meilisearch/blob/3fd86e8d76d7d468b0095d679adb09211ca3b6c0/crates/index-scheduler/src/scheduler/process_upgrade/mod.rs#L13)
- [ ] Write an integration test [here](https://github.com/meilisearch/meilisearch/blob/main/crates/meilisearch/tests/upgrade/mod.rs) ensuring you can read the old database, upgrade to the new database, and read the new database as expected
jobs:
add-comment:
runs-on: ubuntu-latest
if: github.event.label.name == 'db change'
steps:
- name: Add comment
uses: actions/github-script@v6
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const message = process.env.MESSAGE;
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: message
})

28
.github/workflows/db-change-missing.yml vendored Normal file
View File

@@ -0,0 +1,28 @@
name: Check db change labels
on:
pull_request:
types: [opened, synchronize, reopened, labeled, unlabeled]
env:
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
jobs:
check-labels:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Check db change labels
id: check_labels
run: |
URL=/repos/meilisearch/meilisearch/pulls/${{ github.event.pull_request.number }}/labels
echo ${{ github.event.pull_request.number }}
echo $URL
LABELS=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/meilisearch/meilisearch/issues/${{ github.event.pull_request.number }}/labels -q .[].name)
if [[ ! "$LABELS" =~ "db change" && ! "$LABELS" =~ "no db change" ]]; then
echo "::error::Pull request must contain either the 'db change' or 'no db change' label."
exit 1
else
echo "The label is set"
fi

View File

@@ -17,7 +17,7 @@ jobs:
run: |
apt-get update && apt-get install -y curl
apt-get install build-essential -y
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
- name: Install cargo-flaky
run: cargo install cargo-flaky
- name: Run cargo flaky in the dumps

View File

@@ -12,7 +12,7 @@ jobs:
timeout-minutes: 4320 # 72h
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
with:
profile: minimal

View File

@@ -5,6 +5,7 @@ name: Milestone's workflow
# For each Milestone created (not opened!), and if the release is NOT a patch release (only the patch changed)
# - the roadmap issue is created, see https://github.com/meilisearch/engine-team/blob/main/issue-templates/roadmap-issue.md
# - the changelog issue is created, see https://github.com/meilisearch/engine-team/blob/main/issue-templates/changelog-issue.md
# - update the ruleset to add the current release version to the list of allowed versions and be able to use the merge queue.
# For each Milestone closed
# - the `release_version` label is created
@@ -21,10 +22,9 @@ env:
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
jobs:
# -----------------
# MILESTONE CREATED
# -----------------
# -----------------
# MILESTONE CREATED
# -----------------
get-release-version:
if: github.event.action == 'created'
@@ -148,9 +148,37 @@ jobs:
--body-file $ISSUE_TEMPLATE \
--milestone $MILESTONE_VERSION
# ----------------
# MILESTONE CLOSED
# ----------------
update-ruleset:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Install jq
run: |
sudo apt-get update
sudo apt-get install -y jq
- name: Update ruleset
env:
# gh api repos/meilisearch/meilisearch/rulesets --jq '.[] | {name: .name, id: .id}'
RULESET_ID: 4253297
BRANCH_NAME: ${{ github.event.inputs.branch_name }}
run: |
# Get current ruleset conditions
CONDITIONS=$(gh api repos/meilisearch/meilisearch/rulesets/$RULESET_ID --jq '{ conditions: .conditions }')
# Update the conditions by appending the milestone version
UPDATED_CONDITIONS=$(echo $CONDITIONS | jq '.conditions.ref_name.include += ["refs/heads/release-'$MILESTONE_VERSION'"]')
# Update the ruleset from stdin (-)
echo $UPDATED_CONDITIONS |
gh api repos/meilisearch/meilisearch/rulesets/$RULESET_ID \
--method PUT \
-H "Accept: application/vnd.github+json" \
-H "X-GitHub-Api-Version: 2022-11-28" \
--input -
# ----------------
# MILESTONE CLOSED
# ----------------
create-release-label:
if: github.event.action == 'closed'

View File

@@ -25,7 +25,7 @@ jobs:
run: |
apt-get update && apt-get install -y curl
apt-get install build-essential -y
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
- name: Install cargo-deb
run: cargo install cargo-deb
- uses: actions/checkout@v3

View File

@@ -45,7 +45,7 @@ jobs:
run: |
apt-get update && apt-get install -y curl
apt-get install build-essential -y
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
- name: Build
run: cargo build --release --locked
# No need to upload binaries for dry run (cron)
@@ -75,7 +75,7 @@ jobs:
asset_name: meilisearch-windows-amd64.exe
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
- name: Build
run: cargo build --release --locked
# No need to upload binaries for dry run (cron)
@@ -101,7 +101,7 @@ jobs:
- name: Checkout repository
uses: actions/checkout@v3
- name: Installing Rust toolchain
uses: dtolnay/rust-toolchain@1.81
uses: dtolnay/rust-toolchain@1.85
with:
profile: minimal
target: ${{ matrix.target }}
@@ -148,7 +148,7 @@ jobs:
add-apt-repository "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
apt-get update -y && apt-get install -y docker-ce
- name: Installing Rust toolchain
uses: dtolnay/rust-toolchain@1.81
uses: dtolnay/rust-toolchain@1.85
with:
profile: minimal
target: ${{ matrix.target }}

View File

@@ -6,11 +6,7 @@ on:
# Everyday at 5:00am
- cron: "0 5 * * *"
pull_request:
push:
# trying and staging branches are for Bors config
branches:
- trying
- staging
merge_group:
env:
CARGO_TERM_COLOR: always
@@ -31,7 +27,7 @@ jobs:
apt-get update && apt-get install -y curl
apt-get install build-essential -y
- name: Setup test with Rust stable
uses: dtolnay/rust-toolchain@1.81
uses: dtolnay/rust-toolchain@1.85
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.7.7
- name: Run cargo check without any default features
@@ -56,7 +52,7 @@ jobs:
- uses: actions/checkout@v3
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.7.7
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
- name: Run cargo check without any default features
uses: actions-rs/cargo@v1
with:
@@ -81,7 +77,7 @@ jobs:
run: |
apt-get update
apt-get install --assume-yes build-essential curl
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
- name: Run cargo build with almost all features
run: |
cargo build --workspace --locked --release --features "$(cargo xtask list-features --exclude-feature cuda,test-ollama)"
@@ -133,7 +129,7 @@ jobs:
run: |
apt-get update
apt-get install --assume-yes build-essential curl
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
- name: Run cargo tree without default features and check lindera is not present
run: |
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -qz lindera; then
@@ -157,7 +153,7 @@ jobs:
run: |
apt-get update && apt-get install -y curl
apt-get install build-essential -y
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
- name: Cache dependencies
uses: Swatinem/rust-cache@v2.7.7
- name: Run tests in debug
@@ -171,7 +167,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
with:
profile: minimal
components: clippy
@@ -188,7 +184,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
with:
profile: minimal
toolchain: nightly-2024-07-09

View File

@@ -4,7 +4,7 @@ on:
workflow_dispatch:
inputs:
new_version:
description: 'The new version (vX.Y.Z)'
description: "The new version (vX.Y.Z)"
required: true
env:
@@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@1.81
- uses: dtolnay/rust-toolchain@1.85
with:
profile: minimal
- name: Install sd

View File

@@ -150,7 +150,7 @@ Some notes on GitHub PRs:
- The PR title should be accurate and descriptive of the changes.
- [Convert your PR as a draft](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/changing-the-stage-of-a-pull-request) if your changes are a work in progress: no one will review it until you pass your PR as ready for review.<br>
The draft PRs are recommended when you want to show that you are working on something and make your work visible.
- The branch related to the PR must be **up-to-date with `main`** before merging. Fortunately, this project uses [Bors](https://github.com/bors-ng/bors-ng) to automatically enforce this requirement without the PR author having to rebase manually.
- The branch related to the PR must be **up-to-date with `main`** before merging. Fortunately, this project uses [GitHub Merge Queues](https://github.blog/news-insights/product-news/github-merge-queue-is-generally-available/) to automatically enforce this requirement without the PR author having to rebase manually.
## Release Process (for internal team only)
@@ -158,8 +158,7 @@ Meilisearch tools follow the [Semantic Versioning Convention](https://semver.org
### Automation to rebase and Merge the PRs
This project integrates a bot that helps us manage pull requests merging.<br>
_[Read more about this](https://github.com/meilisearch/integration-guides/blob/main/resources/bors.md)._
This project uses GitHub Merge Queues that helps us manage pull requests merging.
### How to Publish a new Release

2253
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -30,7 +30,7 @@ authors = [
description = "Meilisearch HTTP server"
homepage = "https://meilisearch.com"
readme = "README.md"
edition = "2021"
edition = "2024"
license = "MIT"
[profile.release]

View File

@@ -1,5 +1,5 @@
# Compile
FROM rust:1.81.0-alpine3.20 AS compiler
FROM rust:1.85-alpine3.20 AS compiler
RUN apk add -q --no-cache build-base openssl-dev

View File

@@ -20,7 +20,13 @@
<p align="center">
<a href="https://deps.rs/repo/github/meilisearch/meilisearch"><img src="https://deps.rs/repo/github/meilisearch/meilisearch/status.svg" alt="Dependency status"></a>
<a href="https://github.com/meilisearch/meilisearch/blob/main/LICENSE"><img src="https://img.shields.io/badge/license-MIT-informational" alt="License"></a>
<a href="https://ms-bors.herokuapp.com/repositories/52"><img src="https://bors.tech/images/badge_small.svg" alt="Bors enabled"></a>
<a href="https://github.com/meilisearch/meilisearch/queue"><img alt="Merge Queues enabled" src="https://img.shields.io/badge/Merge_Queues-enabled-%2357cf60?logo=github"></a>
</p>
<p align="center" name="ph-banner">
<a href="https://www.producthunt.com/posts/meilisearch-ai">
<img src="assets/ph-banner.png" alt="Meilisearch AI-powered search general availability announcement on ProductHunt">
</a>
</p>
<p align="center">⚡ A lightning-fast search engine that fits effortlessly into your apps, websites, and workflow 🔍</p>

BIN
assets/ph-banner.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 578 KiB

View File

@@ -1,10 +0,0 @@
status = [
'Tests on ubuntu-22.04',
'Tests on macos-13',
'Tests on windows-2022',
'Run Clippy',
'Run Rustfmt',
'Run tests in debug',
]
# 3 hours timeout
timeout-sec = 10800

View File

@@ -108,7 +108,7 @@ where
/// not supported on untagged enums.
struct StarOrVisitor<T>(PhantomData<T>);
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
where
T: FromStr<Err = FE>,
FE: Display,

View File

@@ -99,7 +99,7 @@ impl Task {
/// Return true when a task is finished.
/// A task is finished when its last state is either `Succeeded` or `Failed`.
pub fn is_finished(&self) -> bool {
self.events.last().map_or(false, |event| {
self.events.last().is_some_and(|event| {
matches!(event, TaskEvent::Succeded { .. } | TaskEvent::Failed { .. })
})
}

View File

@@ -108,7 +108,7 @@ where
/// not supported on untagged enums.
struct StarOrVisitor<T>(PhantomData<T>);
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
where
T: FromStr<Err = FE>,
FE: Display,

View File

@@ -114,7 +114,7 @@ impl Task {
/// Return true when a task is finished.
/// A task is finished when its last state is either `Succeeded` or `Failed`.
pub fn is_finished(&self) -> bool {
self.events.last().map_or(false, |event| {
self.events.last().is_some_and(|event| {
matches!(event, TaskEvent::Succeeded { .. } | TaskEvent::Failed { .. })
})
}
@@ -275,19 +275,19 @@ impl From<Task> for TaskView {
match (result, &mut details) {
(
TaskResult::DocumentAddition { indexed_documents: num, .. },
Some(TaskDetails::DocumentAddition { ref mut indexed_documents, .. }),
Some(TaskDetails::DocumentAddition { indexed_documents, .. }),
) => {
indexed_documents.replace(*num);
}
(
TaskResult::DocumentDeletion { deleted_documents: docs, .. },
Some(TaskDetails::DocumentDeletion { ref mut deleted_documents, .. }),
Some(TaskDetails::DocumentDeletion { deleted_documents, .. }),
) => {
deleted_documents.replace(*docs);
}
(
TaskResult::ClearAll { deleted_documents: docs },
Some(TaskDetails::ClearAll { ref mut deleted_documents }),
Some(TaskDetails::ClearAll { deleted_documents }),
) => {
deleted_documents.replace(*docs);
}

View File

@@ -170,14 +170,14 @@ impl UpdateFile {
}
pub fn push_document(&mut self, document: &Document) -> Result<()> {
if let Some(mut writer) = self.writer.as_mut() {
match self.writer.as_mut() { Some(mut writer) => {
serde_json::to_writer(&mut writer, &document)?;
writer.write_all(b"\n")?;
} else {
} _ => {
let file = File::create(&self.path).unwrap();
self.writer = Some(BufWriter::new(file));
self.push_document(document)?;
}
}}
Ok(())
}

View File

@@ -111,7 +111,7 @@ impl FileStore {
}
/// List the Uuids of the files in the FileStore
pub fn all_uuids(&self) -> Result<impl Iterator<Item = Result<Uuid>>> {
pub fn all_uuids(&self) -> Result<impl Iterator<Item = Result<Uuid>> + use<>> {
Ok(self.path.read_dir()?.filter_map(|entry| {
let file_name = match entry {
Ok(entry) => entry.file_name(),
@@ -158,19 +158,19 @@ impl File {
impl Write for File {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
if let Some(file) = self.file.as_mut() {
match self.file.as_mut() { Some(file) => {
file.write(buf)
} else {
} _ => {
Ok(buf.len())
}
}}
}
fn flush(&mut self) -> std::io::Result<()> {
if let Some(file) = self.file.as_mut() {
match self.file.as_mut() { Some(file) => {
file.flush()
} else {
} _ => {
Ok(())
}
}}
}
}

View File

@@ -3,7 +3,7 @@ name = "filter-parser-fuzz"
version = "0.0.0"
authors = ["Automatically generated"]
publish = false
edition = "2018"
edition = "2024"
[package.metadata]
cargo-fuzz = true

View File

@@ -35,7 +35,7 @@ impl<E> NomErrorExt<E> for nom::Err<E> {
pub fn cut_with_err<'a, O>(
mut parser: impl FnMut(Span<'a>) -> IResult<'a, O>,
mut with: impl FnMut(Error<'a>) -> Error<'a>,
) -> impl FnMut(Span<'a>) -> IResult<O> {
) -> impl FnMut(Span<'a>) -> IResult<'a, O> {
move |input| match parser.parse(input) {
Err(nom::Err::Error(e)) => Err(nom::Err::Failure(with(e))),
rest => rest,
@@ -121,7 +121,7 @@ impl<'a> ParseError<Span<'a>> for Error<'a> {
}
}
impl<'a> Display for Error<'a> {
impl Display for Error<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let input = self.context.fragment();
// When printing our error message we want to escape all `\n` to be sure we keep our format with the
@@ -198,7 +198,7 @@ impl<'a> Display for Error<'a> {
f,
"Encountered an internal `{:?}` error while parsing your filter. Please fill an issue", kind
)?,
ErrorKind::External(ref error) => writeln!(f, "{}", error)?,
ErrorKind::External(error) => writeln!(f, "{}", error)?,
}
let base_column = self.context.get_utf8_column();
let size = self.context.fragment().chars().count();

View File

@@ -80,7 +80,7 @@ pub struct Token<'a> {
value: Option<String>,
}
impl<'a> PartialEq for Token<'a> {
impl PartialEq for Token<'_> {
fn eq(&self, other: &Self) -> bool {
self.span.fragment() == other.span.fragment()
}
@@ -226,7 +226,7 @@ impl<'a> FilterCondition<'a> {
}
}
pub fn parse(input: &'a str) -> Result<Option<Self>, Error> {
pub fn parse(input: &'a str) -> Result<Option<Self>, Error<'a>> {
if input.trim().is_empty() {
return Ok(None);
}
@@ -527,7 +527,7 @@ pub fn parse_filter(input: Span) -> IResult<FilterCondition> {
terminated(|input| parse_expression(input, 0), eof)(input)
}
impl<'a> std::fmt::Display for FilterCondition<'a> {
impl std::fmt::Display for FilterCondition<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
FilterCondition::Not(filter) => {
@@ -576,7 +576,8 @@ impl<'a> std::fmt::Display for FilterCondition<'a> {
}
}
}
impl<'a> std::fmt::Display for Condition<'a> {
impl std::fmt::Display for Condition<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Condition::GreaterThan(token) => write!(f, "> {token}"),
@@ -594,7 +595,8 @@ impl<'a> std::fmt::Display for Condition<'a> {
}
}
}
impl<'a> std::fmt::Display for Token<'a> {
impl std::fmt::Display for Token<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{{{}}}", self.value())
}

View File

@@ -52,7 +52,7 @@ fn quoted_by(quote: char, input: Span) -> IResult<Token> {
}
// word = (alphanumeric | _ | - | .)+ except for reserved keywords
pub fn word_not_keyword<'a>(input: Span<'a>) -> IResult<Token<'a>> {
pub fn word_not_keyword<'a>(input: Span<'a>) -> IResult<'a, Token<'a>> {
let (input, word): (_, Token<'a>) =
take_while1(is_value_component)(input).map(|(s, t)| (s, t.into()))?;
if is_keyword(word.value()) {

View File

@@ -3,7 +3,7 @@ name = "flatten-serde-json-fuzz"
version = "0.0.0"
authors = ["Automatically generated"]
publish = false
edition = "2018"
edition = "2024"
[package.metadata]
cargo-fuzz = true

View File

@@ -272,11 +272,11 @@ impl IndexMapper {
if tries >= 100 {
panic!("Too many attempts to close index {name} prior to deletion.")
}
let reopen = if let Some(reopen) = reopen.wait_timeout(Duration::from_secs(6)) {
let reopen = match reopen.wait_timeout(Duration::from_secs(6)) { Some(reopen) => {
reopen
} else {
} _ => {
continue;
};
}};
reopen.close(&mut self.index_map.write().unwrap());
continue;
}
@@ -382,11 +382,11 @@ impl IndexMapper {
Available(index) => break index,
Closing(reopen) => {
// Avoiding deadlocks: no lock taken while doing this operation.
let reopen = if let Some(reopen) = reopen.wait_timeout(Duration::from_secs(6)) {
let reopen = match reopen.wait_timeout(Duration::from_secs(6)) { Some(reopen) => {
reopen
} else {
} _ => {
continue;
};
}};
let index_path = self.base_path.join(uuid.to_string());
// take the lock to reopen the environment.
reopen

View File

@@ -355,19 +355,19 @@ impl IndexScheduler {
}
fn is_good_heed(tasks_path: &Path, map_size: usize) -> bool {
if let Ok(env) = unsafe {
match unsafe {
heed::EnvOpenOptions::new().map_size(clamp_to_page_size(map_size)).open(tasks_path)
} {
} { Ok(env) => {
env.prepare_for_closing().wait();
true
} else {
} _ => {
// We're treating all errors equally here, not only allocation errors.
// This means there's a possiblity for the budget to lower due to errors different from allocation errors.
// For persistent errors, this is OK as long as the task db is then reopened normally without ignoring the error this time.
// For transient errors, this could lead to an instance with too low a budget.
// However transient errors are: 1) less likely than persistent errors 2) likely to cause other issues down the line anyway.
false
}
}}
}
pub fn read_txn(&self) -> Result<RoTxn<WithoutTls>> {
@@ -696,7 +696,7 @@ impl IndexScheduler {
written: usize,
}
impl<'a, 'b> Read for TaskReader<'a, 'b> {
impl Read for TaskReader<'_, '_> {
fn read(&mut self, mut buf: &mut [u8]) -> std::io::Result<usize> {
if self.buffer.is_empty() {
match self.tasks.next() {

View File

@@ -315,7 +315,7 @@ impl Queue {
if let Some(batch_uids) = batch_uids {
let mut batch_tasks = RoaringBitmap::new();
for batch_uid in batch_uids {
if processing_batch.as_ref().map_or(false, |batch| batch.uid == *batch_uid) {
if processing_batch.as_ref().is_some_and(|batch| batch.uid == *batch_uid) {
batch_tasks |= &**processing_tasks;
} else {
batch_tasks |= self.tasks_in_batch(rtxn, *batch_uid)?;

View File

@@ -219,7 +219,7 @@ impl BatchKind {
primary_key.is_some() &&
// 2.1.1 If the task we're trying to accumulate have a pk it must be equal to our primary key
// 2.1.2 If the task don't have a primary-key -> we can continue
kind.primary_key().map_or(true, |pk| pk == primary_key)
kind.primary_key().is_none_or(|pk| pk == primary_key)
) ||
// 2.2 If we don't have a primary-key ->
(

View File

@@ -10,7 +10,7 @@ use crate::TaskId;
#[macro_export]
macro_rules! debug_snapshot {
($value:expr, @$snapshot:literal) => {{
($value:expr_2021, @$snapshot:literal) => {{
let value = format!("{:?}", $value);
meili_snap::snapshot!(value, @$snapshot);
}};

View File

@@ -499,13 +499,13 @@ impl IndexScheduler {
// create the batch directly. Otherwise, get the index name associated with the task
// and use the autobatcher to batch the enqueued tasks associated with it
let index_name = if let Some(&index_name) = task.indexes().first() {
let index_name = match task.indexes().first() { Some(&index_name) => {
index_name
} else {
} _ => {
assert!(matches!(&task.kind, KindWithContent::IndexSwap { swaps } if swaps.is_empty()));
current_batch.processing(Some(&mut task));
return Ok(Some((Batch::IndexSwap { task }, current_batch)));
};
}};
let index_already_exists = self.index_mapper.exists(rtxn, index_name)?;
let mut primary_key = None;

View File

@@ -47,11 +47,11 @@ impl IndexScheduler {
Batch::TaskCancelation { mut task } => {
// 1. Retrieve the tasks that matched the query at enqueue-time.
let matched_tasks =
if let KindWithContent::TaskCancelation { tasks, query: _ } = &task.kind {
match &task.kind { KindWithContent::TaskCancelation { tasks, query: _ } => {
tasks
} else {
} _ => {
unreachable!()
};
}};
let rtxn = self.env.read_txn()?;
let mut canceled_tasks = self.cancel_matched_tasks(
@@ -83,11 +83,11 @@ impl IndexScheduler {
let mut matched_tasks = RoaringBitmap::new();
for task in tasks.iter() {
if let KindWithContent::TaskDeletion { tasks, query: _ } = &task.kind {
match &task.kind { KindWithContent::TaskDeletion { tasks, query: _ } => {
matched_tasks |= tasks;
} else {
} _ => {
unreachable!()
}
}}
}
let mut wtxn = self.env.write_txn()?;
@@ -279,11 +279,11 @@ impl IndexScheduler {
progress.update_progress(SwappingTheIndexes::EnsuringCorrectnessOfTheSwap);
let mut wtxn = self.env.write_txn()?;
let swaps = if let KindWithContent::IndexSwap { swaps } = &task.kind {
let swaps = match &task.kind { KindWithContent::IndexSwap { swaps } => {
swaps
} else {
} _ => {
unreachable!()
};
}};
let mut not_found_indexes = BTreeSet::new();
for IndexSwap { indexes: (lhs, rhs) } in swaps {
for index in [lhs, rhs] {
@@ -532,7 +532,7 @@ impl IndexScheduler {
// We must remove the batch entirely
if tasks.is_empty() {
if let Some(batch) = self.queue.batches.get_batch(wtxn, batch_id)? {
if let Some(BatchEnqueuedAt { earliest, oldest }) = batch.enqueued_at {
match batch.enqueued_at { Some(BatchEnqueuedAt { earliest, oldest }) => {
remove_task_datetime(
wtxn,
self.queue.batches.enqueued_at,
@@ -545,7 +545,7 @@ impl IndexScheduler {
oldest,
batch_id,
)?;
} else {
} _ => {
// If we don't have the enqueued at in the batch it means the database comes from the v1.12
// and we still need to find the date by scrolling the database
remove_n_tasks_datetime_earlier_than(
@@ -555,7 +555,7 @@ impl IndexScheduler {
batch.stats.total_nb_tasks.clamp(1, 2) as usize,
batch_id,
)?;
}
}}
remove_task_datetime(
wtxn,
self.queue.batches.started_at,

View File

@@ -26,11 +26,11 @@ impl IndexScheduler {
progress.update_progress(DumpCreationProgress::StartTheDumpCreation);
let started_at = OffsetDateTime::now_utc();
let (keys, instance_uid) =
if let KindWithContent::DumpCreation { keys, instance_uid } = &task.kind {
match &task.kind { KindWithContent::DumpCreation { keys, instance_uid } => {
(keys, instance_uid)
} else {
} _ => {
unreachable!();
};
}};
let dump = dump::DumpWriter::new(*instance_uid)?;
// 1. dump the keys
@@ -206,14 +206,14 @@ impl IndexScheduler {
let user_err =
milli::Error::UserError(milli::UserError::InvalidVectorsMapType {
document_id: {
if let Ok(Some(Ok(index))) = index
match index
.external_id_of(&rtxn, std::iter::once(id))
.map(|it| it.into_iter().next())
{
{ Ok(Some(Ok(index))) => {
index
} else {
} _ => {
format!("internal docid={id}")
}
}}
},
value: vectors.clone(),
});

View File

@@ -206,17 +206,17 @@ impl IndexScheduler {
IndexOperation::DocumentEdition { index_uid, mut task } => {
progress.update_progress(DocumentEditionProgress::RetrievingConfig);
let (filter, code) = if let KindWithContent::DocumentEdition {
let (filter, code) = match &task.kind
{ KindWithContent::DocumentEdition {
filter_expr,
context: _,
function,
..
} = &task.kind
{
} => {
(filter_expr, function)
} else {
} _ => {
unreachable!()
};
}};
let candidates = match filter.as_ref().map(Filter::from_json) {
Some(Ok(Some(filter))) => filter
@@ -226,18 +226,18 @@ impl IndexScheduler {
Some(Err(e)) => return Err(Error::from_milli(e, Some(index_uid.clone()))),
};
let (original_filter, context, function) = if let Some(Details::DocumentEdition {
let (original_filter, context, function) = match task.details
{ Some(Details::DocumentEdition {
original_filter,
context,
function,
..
}) = task.details
{
}) => {
(original_filter, context, function)
} else {
} _ => {
// In the case of a `documentEdition` the details MUST be set
unreachable!();
};
}};
if candidates.is_empty() {
task.status = Status::Succeeded;
@@ -397,16 +397,16 @@ impl IndexScheduler {
};
}
let will_be_removed = to_delete.len() - before;
if let Some(Details::DocumentDeletionByFilter {
match &mut task.details
{ Some(Details::DocumentDeletionByFilter {
original_filter: _,
deleted_documents,
}) = &mut task.details
{
}) => {
*deleted_documents = Some(will_be_removed);
} else {
} _ => {
// In the case of a `documentDeleteByFilter` the details MUST be set
unreachable!()
}
}}
}
_ => unreachable!(),
}

View File

@@ -307,7 +307,7 @@ pub(crate) fn filter_out_references_to_newer_tasks(task: &mut Task) {
pub(crate) fn check_index_swap_validity(task: &Task) -> Result<()> {
let swaps =
if let KindWithContent::IndexSwap { swaps } = &task.kind { swaps } else { return Ok(()) };
match &task.kind { KindWithContent::IndexSwap { swaps } => { swaps } _ => { return Ok(()) }};
let mut all_indexes = HashSet::new();
let mut duplicate_indexes = BTreeSet::new();
for IndexSwap { indexes: (lhs, rhs) } in swaps {
@@ -501,15 +501,15 @@ impl crate::IndexScheduler {
} => {
assert_eq!(kind.as_kind(), Kind::DocumentDeletion);
let (index_uid, documents_ids) =
if let KindWithContent::DocumentDeletion {
match kind
{ KindWithContent::DocumentDeletion {
ref index_uid,
ref documents_ids,
} = kind
{
} => {
(index_uid, documents_ids)
} else {
} _ => {
unreachable!()
};
}};
assert_eq!(&task_index_uid.unwrap(), index_uid);
match status {
@@ -526,15 +526,15 @@ impl crate::IndexScheduler {
}
Details::DocumentDeletionByFilter { deleted_documents, original_filter: _ } => {
assert_eq!(kind.as_kind(), Kind::DocumentDeletion);
let (index_uid, _) = if let KindWithContent::DocumentDeletionByFilter {
let (index_uid, _) = match kind
{ KindWithContent::DocumentDeletionByFilter {
ref index_uid,
ref filter_expr,
} = kind
{
} => {
(index_uid, filter_expr)
} else {
} _ => {
unreachable!()
};
}};
assert_eq!(&task_index_uid.unwrap(), index_uid);
match status {

View File

@@ -3,7 +3,7 @@ name = "json-depth-checker"
version = "0.0.0"
authors = ["Automatically generated"]
publish = false
edition = "2018"
edition = "2024"
[package.metadata]
cargo-fuzz = true

View File

@@ -77,7 +77,7 @@ snapshot_hash!("hello world", name: "snap_name", @"5f93f983524def3dca464469d2cf9
*/
#[macro_export]
macro_rules! snapshot_hash {
($value:expr, @$inline:literal) => {
($value:expr_2021, @$inline:literal) => {
let test_name = {
fn f() {}
fn type_name_of_val<T>(_: T) -> &'static str {
@@ -99,7 +99,7 @@ macro_rules! snapshot_hash {
}
});
};
($value:expr, name: $name:expr, @$inline:literal) => {
($value:expr_2021, name: $name:expr_2021, @$inline:literal) => {
let test_name = {
fn f() {}
fn type_name_of_val<T>(_: T) -> &'static str {
@@ -151,7 +151,7 @@ snapshot!(format!("{:?}", vec![1, 2]), @"[1, 2]");
*/
#[macro_export]
macro_rules! snapshot {
($value:expr, name: $name:expr) => {
($value:expr_2021, name: $name:expr_2021) => {
let test_name = {
fn f() {}
fn type_name_of_val<T>(_: T) -> &'static str {
@@ -172,7 +172,7 @@ macro_rules! snapshot {
}
});
};
($value:expr, @$inline:literal) => {
($value:expr_2021, @$inline:literal) => {
// Note that the name given as argument does not matter since it is only an inline snapshot
// We don't pass None because otherwise `meili-snap` will try to assign it a unique identifier
let (settings, _, _) = $crate::default_snapshot_settings_for_test("", Some("_dummy_argument"));
@@ -183,7 +183,7 @@ macro_rules! snapshot {
}
});
};
($value:expr) => {
($value:expr_2021) => {
let test_name = {
fn f() {}
fn type_name_of_val<T>(_: T) -> &'static str {
@@ -213,13 +213,13 @@ macro_rules! snapshot {
/// refer to the redactions feature in the `insta` guide.
#[macro_export]
macro_rules! json_string {
($value:expr, {$($k:expr => $v:expr),*$(,)?}) => {
($value:expr_2021, {$($k:expr_2021 => $v:expr_2021),*$(,)?}) => {
{
let (_, snap) = meili_snap::insta::_prepare_snapshot_for_redaction!($value, {$($k => $v),*}, Json, File);
snap
}
};
($value:expr) => {{
($value:expr_2021) => {{
let value = meili_snap::insta::_macro_support::serialize_value(
&$value,
meili_snap::insta::_macro_support::SerializationFormat::Json,

View File

@@ -403,7 +403,7 @@ impl ErrorCode for milli::Error {
match self {
Error::InternalError(_) => Code::Internal,
Error::IoError(e) => e.error_code(),
Error::UserError(ref error) => {
Error::UserError(error) => {
match error {
// TODO: wait for spec for new error codes.
UserError::SerdeJson(_)

View File

@@ -33,7 +33,7 @@ impl From<LocalizedAttributesRuleView> for LocalizedAttributesRule {
///
/// this enum implements `Deserr` in order to be used in the API.
macro_rules! make_locale {
($(($iso_639_1:ident, $iso_639_1_str:expr) => ($iso_639_3:ident, $iso_639_3_str:expr),)+) => {
($(($iso_639_1:ident, $iso_639_1_str:expr_2021) => ($iso_639_3:ident, $iso_639_3_str:expr_2021),)+) => {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserr, Serialize, Deserialize, Ord, PartialOrd, ToSchema)]
#[deserr(rename_all = camelCase)]
#[serde(rename_all = "camelCase")]

View File

@@ -572,19 +572,19 @@ pub fn apply_settings_to_builder(
} = settings;
match searchable_attributes.deref() {
Setting::Set(ref names) => builder.set_searchable_fields(names.clone()),
Setting::Set(names) => builder.set_searchable_fields(names.clone()),
Setting::Reset => builder.reset_searchable_fields(),
Setting::NotSet => (),
}
match displayed_attributes.deref() {
Setting::Set(ref names) => builder.set_displayed_fields(names.clone()),
Setting::Set(names) => builder.set_displayed_fields(names.clone()),
Setting::Reset => builder.reset_displayed_fields(),
Setting::NotSet => (),
}
match filterable_attributes {
Setting::Set(ref facets) => {
Setting::Set(facets) => {
builder.set_filterable_fields(facets.clone().into_iter().collect())
}
Setting::Reset => builder.reset_filterable_fields(),
@@ -592,13 +592,13 @@ pub fn apply_settings_to_builder(
}
match sortable_attributes {
Setting::Set(ref fields) => builder.set_sortable_fields(fields.iter().cloned().collect()),
Setting::Set(fields) => builder.set_sortable_fields(fields.iter().cloned().collect()),
Setting::Reset => builder.reset_sortable_fields(),
Setting::NotSet => (),
}
match ranking_rules {
Setting::Set(ref criteria) => {
Setting::Set(criteria) => {
builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect())
}
Setting::Reset => builder.reset_criteria(),
@@ -606,13 +606,13 @@ pub fn apply_settings_to_builder(
}
match stop_words {
Setting::Set(ref stop_words) => builder.set_stop_words(stop_words.clone()),
Setting::Set(stop_words) => builder.set_stop_words(stop_words.clone()),
Setting::Reset => builder.reset_stop_words(),
Setting::NotSet => (),
}
match non_separator_tokens {
Setting::Set(ref non_separator_tokens) => {
Setting::Set(non_separator_tokens) => {
builder.set_non_separator_tokens(non_separator_tokens.clone())
}
Setting::Reset => builder.reset_non_separator_tokens(),
@@ -620,7 +620,7 @@ pub fn apply_settings_to_builder(
}
match separator_tokens {
Setting::Set(ref separator_tokens) => {
Setting::Set(separator_tokens) => {
builder.set_separator_tokens(separator_tokens.clone())
}
Setting::Reset => builder.reset_separator_tokens(),
@@ -628,38 +628,38 @@ pub fn apply_settings_to_builder(
}
match dictionary {
Setting::Set(ref dictionary) => builder.set_dictionary(dictionary.clone()),
Setting::Set(dictionary) => builder.set_dictionary(dictionary.clone()),
Setting::Reset => builder.reset_dictionary(),
Setting::NotSet => (),
}
match synonyms {
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
Setting::Set(synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
Setting::Reset => builder.reset_synonyms(),
Setting::NotSet => (),
}
match distinct_attribute {
Setting::Set(ref attr) => builder.set_distinct_field(attr.clone()),
Setting::Set(attr) => builder.set_distinct_field(attr.clone()),
Setting::Reset => builder.reset_distinct_field(),
Setting::NotSet => (),
}
match proximity_precision {
Setting::Set(ref precision) => builder.set_proximity_precision((*precision).into()),
Setting::Set(precision) => builder.set_proximity_precision((*precision).into()),
Setting::Reset => builder.reset_proximity_precision(),
Setting::NotSet => (),
}
match localized_attributes_rules {
Setting::Set(ref rules) => builder
Setting::Set(rules) => builder
.set_localized_attributes_rules(rules.iter().cloned().map(|r| r.into()).collect()),
Setting::Reset => builder.reset_localized_attributes_rules(),
Setting::NotSet => (),
}
match typo_tolerance {
Setting::Set(ref value) => {
Setting::Set(value) => {
match value.enabled {
Setting::Set(val) => builder.set_autorize_typos(val),
Setting::Reset => builder.reset_authorize_typos(),
@@ -736,7 +736,7 @@ pub fn apply_settings_to_builder(
}
match pagination {
Setting::Set(ref value) => match value.max_total_hits {
Setting::Set(value) => match value.max_total_hits {
Setting::Set(val) => builder.set_pagination_max_total_hits(val),
Setting::Reset => builder.reset_pagination_max_total_hits(),
Setting::NotSet => (),
@@ -960,7 +960,7 @@ impl<'de> Deserialize<'de> for RankingRuleView {
D: serde::Deserializer<'de>,
{
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
impl serde::de::Visitor<'_> for Visitor {
type Value = RankingRuleView;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "the name of a valid ranking rule (string)")

View File

@@ -66,7 +66,7 @@ where
/// not supported on untagged enums.
struct StarOrVisitor<T>(PhantomData<T>);
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
where
T: FromStr<Err = FE>,
FE: fmt::Display,

View File

@@ -89,11 +89,11 @@ fn is_empty_db(db_path: impl AsRef<Path>) -> bool {
if !db_path.exists() {
true
// if we encounter an error or if the db is a file we consider the db non empty
} else if let Ok(dir) = db_path.read_dir() {
} else { match db_path.read_dir() { Ok(dir) => {
dir.count() == 0
} else {
} _ => {
true
}
}}}
}
/// The handle used to update the logs at runtime. Must be accessible from the `main.rs` and the `route/logs.rs`.
@@ -346,7 +346,7 @@ fn open_or_create_database_unchecked(
match (
index_scheduler_builder(),
auth_controller.map_err(anyhow::Error::from),
create_current_version_file(&opt.db_path).map_err(anyhow::Error::from),
create_current_version_file(&opt.db_path),
) {
(Ok(i), Ok(a), Ok(())) => Ok((i, a)),
(Err(e), _, _) | (_, Err(e), _) | (_, _, Err(e)) => {
@@ -466,18 +466,18 @@ fn import_dump(
let reader = File::open(dump_path)?;
let mut dump_reader = dump::DumpReader::open(reader)?;
if let Some(date) = dump_reader.date() {
match dump_reader.date() { Some(date) => {
tracing::info!(
version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
%date,
"Importing a dump of meilisearch"
);
} else {
} _ => {
tracing::info!(
version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
"Importing a dump of meilisearch",
);
}
}}
let instance_uid = dump_reader.instance_uid()?;

View File

@@ -69,7 +69,7 @@ fn setup(opt: &Opt) -> anyhow::Result<(LogRouteHandle, LogStderrHandle)> {
Ok((route_layer_handle, stderr_layer_handle))
}
fn on_panic(info: &std::panic::PanicInfo) {
fn on_panic(info: &std::panic::PanicHookInfo) {
let info = info.to_string().replace('\n', " ");
tracing::error!(%info);
}
@@ -178,11 +178,11 @@ async fn run_http(
.disable_signals()
.keep_alive(KeepAlive::Os);
if let Some(config) = opt_clone.get_ssl_config()? {
match opt_clone.get_ssl_config()? { Some(config) => {
http_server.bind_rustls_0_23(opt_clone.http_addr, config)?.run().await?;
} else {
} _ => {
http_server.bind(&opt_clone.http_addr)?.run().await?;
}
}}
Ok(())
}

View File

@@ -907,7 +907,7 @@ fn load_private_key(
fn load_ocsp(filename: &Option<PathBuf>) -> anyhow::Result<Vec<u8>> {
let mut ret = Vec::new();
if let Some(ref name) = filename {
if let Some(name) = filename {
fs::File::open(name)
.map_err(|_| anyhow::anyhow!("cannot open ocsp file"))?
.read_to_end(&mut ret)
@@ -924,12 +924,12 @@ where
T: AsRef<OsStr>,
{
if let Err(VarError::NotPresent) = std::env::var(key) {
std::env::set_var(key, value);
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var(key, value) };
}
}
/// Functions used to get default value for `Opt` fields, needs to be function because of serde's default attribute.
fn default_db_path() -> PathBuf {
PathBuf::from(DEFAULT_DB_PATH)
}
@@ -1037,7 +1037,7 @@ where
{
struct BoolOrInt;
impl<'de> serde::de::Visitor<'de> for BoolOrInt {
impl serde::de::Visitor<'_> for BoolOrInt {
type Value = ScheduleSnapshot;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {

View File

@@ -97,12 +97,12 @@ async fn get_batch(
let filters = index_scheduler.filters();
let (batches, _) = index_scheduler.get_batches_from_authorized_indexes(&query, filters)?;
if let Some(batch) = batches.first() {
match batches.first() { Some(batch) => {
let batch_view = BatchView::from_batch(batch);
Ok(HttpResponse::Ok().json(batch_view))
} else {
} _ => {
Err(index_scheduler::Error::BatchNotFound(batch_uid).into())
}
}}
}
#[derive(Debug, Serialize, ToSchema)]

View File

@@ -619,7 +619,7 @@ fn documents_by_query(
let retrieve_vectors = RetrieveVectors::new(retrieve_vectors);
let ids = if let Some(ids) = ids {
let ids = match ids { Some(ids) => {
let mut parsed_ids = Vec::with_capacity(ids.len());
for (index, id) in ids.into_iter().enumerate() {
let id = id.try_into().map_err(|error| {
@@ -629,9 +629,9 @@ fn documents_by_query(
parsed_ids.push(id)
}
Some(parsed_ids)
} else {
} _ => {
None
};
}};
let index = index_scheduler.index(&index_uid)?;
let (total, documents) = retrieve_documents(

View File

@@ -302,7 +302,7 @@ impl From<FacetSearchQuery> for SearchQuery {
// If exhaustive_facet_count is true, we need to set the page to 0
// because the facet search is not exhaustive by default.
let page = if exhaustive_facet_count.map_or(false, |exhaustive| exhaustive) {
let page = if exhaustive_facet_count.is_some_and(|exhaustive| exhaustive) {
// setting the page to 0 will force the search to be exhaustive when computing the number of hits,
// but it will skip the bucket sort saving time.
Some(0)

View File

@@ -131,7 +131,7 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
ret.total_received = 1;
if let Some(ref sort) = sort {
if let Some(sort) = sort {
ret.sort_total_number_of_criteria = 1;
ret.sort_with_geo_point = sort.iter().any(|s| s.contains("_geoPoint("));
ret.sort_sum_of_criteria_terms = sort.len();
@@ -139,7 +139,7 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
ret.distinct = distinct.is_some();
if let Some(ref filter) = filter {
if let Some(filter) = filter {
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
ret.filter_total_number_of_criteria = 1;
@@ -168,11 +168,11 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
ret.attributes_to_search_on_total_number_of_uses = 1;
}
if let Some(ref q) = q {
if let Some(q) = q {
ret.max_terms_number = q.split_whitespace().count();
}
if let Some(ref vector) = vector {
if let Some(vector) = vector {
ret.max_vector_size = vector.len();
}
ret.retrieve_vectors |= retrieve_vectors;

View File

@@ -67,7 +67,7 @@ impl<Method: AggregateMethod> SimilarAggregator<Method> {
ret.total_received = 1;
if let Some(ref filter) = filter {
if let Some(filter) = filter {
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
ret.filter_total_number_of_criteria = 1;

View File

@@ -341,11 +341,11 @@ pub async fn get_logs(
})
.unwrap();
if let Some(stream) = stream {
match stream { Some(stream) => {
Ok(HttpResponse::Ok().streaming(stream))
} else {
} _ => {
Err(MeilisearchHttpError::AlreadyUsedLogRoute.into())
}
}}
}
/// Stop retrieving logs

View File

@@ -64,6 +64,8 @@ mod open_api_utils;
mod snapshot;
mod swap_indexes;
pub mod tasks;
#[cfg(test)]
mod tasks_test;
#[derive(OpenApi)]
#[openapi(
@@ -168,7 +170,7 @@ pub fn is_dry_run(req: &HttpRequest, opt: &Opt) -> Result<bool, ResponseError> {
})
})
.transpose()?
.map_or(false, |s| s.to_lowercase() == "true"))
.is_some_and(|s| s.to_lowercase() == "true"))
}
#[derive(Debug, Serialize, ToSchema)]

View File

@@ -146,7 +146,7 @@ impl TasksFilterQuery {
}
impl TaskDeletionOrCancelationQuery {
fn is_empty(&self) -> bool {
pub fn is_empty(&self) -> bool {
matches!(
self,
TaskDeletionOrCancelationQuery {
@@ -638,12 +638,12 @@ async fn get_task(
let filters = index_scheduler.filters();
let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(&query, filters)?;
if let Some(task) = tasks.first() {
match tasks.first() { Some(task) => {
let task_view = TaskView::from_task(task);
Ok(HttpResponse::Ok().json(task_view))
} else {
} _ => {
Err(index_scheduler::Error::TaskNotFound(task_uid).into())
}
}}
}
/// Get a task's documents.
@@ -693,7 +693,7 @@ async fn get_task_documents_file(
let filters = index_scheduler.filters();
let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(&query, filters)?;
if let Some(task) = tasks.first() {
match tasks.first() { Some(task) => {
match task.content_uuid() {
Some(uuid) => {
let mut tfile = match index_scheduler.queue.update_file(uuid) {
@@ -711,9 +711,9 @@ async fn get_task_documents_file(
}
None => Err(index_scheduler::Error::TaskFileNotFound(task_uid).into()),
}
} else {
} _ => {
Err(index_scheduler::Error::TaskNotFound(task_uid).into())
}
}}
}
pub enum DeserializeDateOption {
@@ -760,356 +760,3 @@ pub fn deserialize_date_before(
) -> std::result::Result<OptionStarOr<OffsetDateTime>, InvalidTaskDateError> {
value.try_map(|x| deserialize_date(&x, DeserializeDateOption::Before))
}
#[cfg(test)]
mod tests {
use deserr::Deserr;
use meili_snap::snapshot;
use meilisearch_types::deserr::DeserrQueryParamError;
use meilisearch_types::error::{Code, ResponseError};
use crate::routes::tasks::{TaskDeletionOrCancelationQuery, TasksFilterQuery};
fn deserr_query_params<T>(j: &str) -> Result<T, ResponseError>
where
T: Deserr<DeserrQueryParamError>,
{
let value = serde_urlencoded::from_str::<serde_json::Value>(j)
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?;
match deserr::deserialize::<_, _, DeserrQueryParamError>(value) {
Ok(data) => Ok(data),
Err(e) => Err(ResponseError::from(e)),
}
}
#[test]
fn deserialize_task_filter_dates() {
{
let params = "afterEnqueuedAt=2021-12-03&beforeEnqueuedAt=2021-12-03&afterStartedAt=2021-12-03&beforeStartedAt=2021-12-03&afterFinishedAt=2021-12-03&beforeFinishedAt=2021-12-03";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.after_started_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_started_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.after_finished_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_finished_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
}
{
let params =
"afterEnqueuedAt=2021-12-03T23:45:23Z&beforeEnqueuedAt=2021-12-03T23:45:23Z";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)");
}
{
let params = "afterEnqueuedAt=1997-11-12T09:55:06-06:20";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 -06:20:00)");
}
{
let params = "afterEnqueuedAt=1997-11-12T09:55:06%2B00:00";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 +00:00:00)");
}
{
let params = "afterEnqueuedAt=1997-11-12T09:55:06.200000300Z";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.2000003 +00:00:00)");
}
{
// Stars are allowed in date fields as well
let params = "afterEnqueuedAt=*&beforeStartedAt=*&afterFinishedAt=*&beforeFinishedAt=*&afterStartedAt=*&beforeEnqueuedAt=*";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: None, batch_uids: None, canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Star, before_enqueued_at: Star, after_started_at: Star, before_started_at: Star, after_finished_at: Star, before_finished_at: Star }");
}
{
let params = "afterFinishedAt=2021";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `afterFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_after_finished_at"
}
"###);
}
{
let params = "beforeFinishedAt=2021";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `beforeFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_before_finished_at"
}
"###);
}
{
let params = "afterEnqueuedAt=2021-12";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `afterEnqueuedAt`: `2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_after_enqueued_at"
}
"###);
}
{
let params = "beforeEnqueuedAt=2021-12-03T23";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `beforeEnqueuedAt`: `2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_before_enqueued_at"
}
"###);
}
{
let params = "afterStartedAt=2021-12-03T23:45";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `afterStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_after_started_at"
}
"###);
}
{
let params = "beforeStartedAt=2021-12-03T23:45";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `beforeStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_before_started_at"
}
"###);
}
}
#[test]
fn deserialize_task_filter_uids() {
{
let params = "uids=78,1,12,73";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.uids), @"List([78, 1, 12, 73])");
}
{
let params = "uids=1";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.uids), @"List([1])");
}
{
let params = "uids=cat,*,dog";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `uids[0]`: could not parse `cat` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
}
"###);
}
{
let params = "uids=78,hello,world";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `uids[1]`: could not parse `hello` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
}
"###);
}
{
let params = "uids=cat";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `uids`: could not parse `cat` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
}
"###);
}
}
#[test]
fn deserialize_task_filter_status() {
{
let params = "statuses=succeeded,failed,enqueued,processing,canceled";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.statuses), @"List([Succeeded, Failed, Enqueued, Processing, Canceled])");
}
{
let params = "statuses=enqueued";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.statuses), @"List([Enqueued])");
}
{
let params = "statuses=finished";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `statuses`: `finished` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.",
"code": "invalid_task_statuses",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_statuses"
}
"###);
}
}
#[test]
fn deserialize_task_filter_types() {
{
let params = "types=documentAdditionOrUpdate,documentDeletion,settingsUpdate,indexCreation,indexDeletion,indexUpdate,indexSwap,taskCancelation,taskDeletion,dumpCreation,snapshotCreation";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.types), @"List([DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, IndexCreation, IndexDeletion, IndexUpdate, IndexSwap, TaskCancelation, TaskDeletion, DumpCreation, SnapshotCreation])");
}
{
let params = "types=settingsUpdate";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.types), @"List([SettingsUpdate])");
}
{
let params = "types=createIndex";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r#"
{
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
}
"#);
}
}
#[test]
fn deserialize_task_filter_index_uids() {
{
let params = "indexUids=toto,tata-78";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("toto"), IndexUid("tata-78")])"###);
}
{
let params = "indexUids=index_a";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("index_a")])"###);
}
{
let params = "indexUids=1,hé";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `indexUids[1]`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
}
"###);
}
{
let params = "indexUids=hé";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `indexUids`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
}
"###);
}
}
#[test]
fn deserialize_task_filter_general() {
{
let params = "from=12&limit=15&indexUids=toto,tata-78&statuses=succeeded,enqueued&afterEnqueuedAt=2012-04-23&uids=1,2,3";
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: Param(15), from: Some(Param(12)), reverse: None, batch_uids: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: List([Succeeded, Enqueued]), index_uids: List([IndexUid("toto"), IndexUid("tata-78")]), after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"###);
}
{
// Stars should translate to `None` in the query
// Verify value of the default limit
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: Param(20), from: None, reverse: None, batch_uids: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
}
{
// Stars should also translate to `None` in task deletion/cancelation queries
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: List([1, 2, 3]), batch_uids: None, canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
}
{
// Star in from not allowed
let params = "uids=*&from=*";
let err = deserr_query_params::<TasksFilterQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `from`: could not parse `*` as a positive integer",
"code": "invalid_task_from",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_from"
}
"###);
}
{
// From not allowed in task deletion/cancelation queries
let params = "from=12";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Unknown parameter `from`: expected one of `uids`, `batchUids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"
}
"###);
}
{
// Limit not allowed in task deletion/cancelation queries
let params = "limit=12";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Unknown parameter `limit`: expected one of `uids`, `batchUids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"
}
"###);
}
}
#[test]
fn deserialize_task_delete_or_cancel_empty() {
{
let params = "";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
assert!(query.is_empty());
}
{
let params = "statuses=*";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
assert!(!query.is_empty());
snapshot!(format!("{query:?}"), @"TaskDeletionOrCancelationQuery { uids: None, batch_uids: None, canceled_by: None, types: None, statuses: Star, index_uids: None, after_enqueued_at: None, before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
}
}
}

View File

@@ -0,0 +1,352 @@
#[cfg(test)]
mod tests {
use deserr::Deserr;
use meili_snap::snapshot;
use meilisearch_types::deserr::DeserrQueryParamError;
use meilisearch_types::error::{Code, ResponseError};
use crate::routes::tasks::{TaskDeletionOrCancelationQuery, TasksFilterQuery};
fn deserr_query_params<T>(j: &str) -> Result<T, ResponseError>
where
T: Deserr<DeserrQueryParamError>,
{
let value = serde_urlencoded::from_str::<serde_json::Value>(j)
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?;
match deserr::deserialize::<_, _, DeserrQueryParamError>(value) {
Ok(data) => Ok(data),
Err(e) => Err(ResponseError::from(e)),
}
}
#[test]
fn deserialize_task_filter_dates() {
{
let params = "afterEnqueuedAt=2021-12-03&beforeEnqueuedAt=2021-12-03&afterStartedAt=2021-12-03&beforeStartedAt=2021-12-03&afterFinishedAt=2021-12-03&beforeFinishedAt=2021-12-03";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.after_started_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_started_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.after_finished_at), @"Other(2021-12-04 0:00:00.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_finished_at), @"Other(2021-12-03 0:00:00.0 +00:00:00)");
}
{
let params =
"afterEnqueuedAt=2021-12-03T23:45:23Z&beforeEnqueuedAt=2021-12-03T23:45:23Z";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)");
snapshot!(format!("{:?}", query.before_enqueued_at), @"Other(2021-12-03 23:45:23.0 +00:00:00)");
}
{
let params = "afterEnqueuedAt=1997-11-12T09:55:06-06:20";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 -06:20:00)");
}
{
let params = "afterEnqueuedAt=1997-11-12T09:55:06%2B00:00";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.0 +00:00:00)");
}
{
let params = "afterEnqueuedAt=1997-11-12T09:55:06.200000300Z";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.after_enqueued_at), @"Other(1997-11-12 9:55:06.2000003 +00:00:00)");
}
{
// Stars are allowed in date fields as well
let params = "afterEnqueuedAt=*&beforeStartedAt=*&afterFinishedAt=*&beforeFinishedAt=*&afterStartedAt=*&beforeEnqueuedAt=*";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: None, batch_uids: None, canceled_by: None, types: None, statuses: None, index_uids: None, after_enqueued_at: Star, before_enqueued_at: Star, after_started_at: Star, before_started_at: Star, after_finished_at: Star, before_finished_at: Star }");
}
{
let params = "afterFinishedAt=2021";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `afterFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_after_finished_at"
}
"###);
}
{
let params = "beforeFinishedAt=2021";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `beforeFinishedAt`: `2021` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_before_finished_at"
}
"###);
}
{
let params = "afterEnqueuedAt=2021-12";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `afterEnqueuedAt`: `2021-12` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_after_enqueued_at"
}
"###);
}
{
let params = "beforeEnqueuedAt=2021-12-03T23";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `beforeEnqueuedAt`: `2021-12-03T23` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_before_enqueued_at"
}
"###);
}
{
let params = "afterStartedAt=2021-12-03T23:45";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `afterStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_after_started_at"
}
"###);
}
{
let params = "beforeStartedAt=2021-12-03T23:45";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `beforeStartedAt`: `2021-12-03T23:45` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_before_started_at"
}
"###);
}
}
#[test]
fn deserialize_task_filter_uids() {
{
let params = "uids=78,1,12,73";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.uids), @"List([78, 1, 12, 73])");
}
{
let params = "uids=1";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.uids), @"List([1])");
}
{
let params = "uids=cat,*,dog";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `uids[0]`: could not parse `cat` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
}
"###);
}
{
let params = "uids=78,hello,world";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `uids[1]`: could not parse `hello` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
}
"###);
}
{
let params = "uids=cat";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `uids`: could not parse `cat` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_uids"
}
"###);
}
}
#[test]
fn deserialize_task_filter_status() {
{
let params = "statuses=succeeded,failed,enqueued,processing,canceled";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.statuses), @"List([Succeeded, Failed, Enqueued, Processing, Canceled])");
}
{
let params = "statuses=enqueued";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.statuses), @"List([Enqueued])");
}
{
let params = "statuses=finished";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `statuses`: `finished` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.",
"code": "invalid_task_statuses",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_statuses"
}
"###);
}
}
#[test]
fn deserialize_task_filter_types() {
{
let params = "types=documentAdditionOrUpdate,documentDeletion,settingsUpdate,indexCreation,indexDeletion,indexUpdate,indexSwap,taskCancelation,taskDeletion,dumpCreation,snapshotCreation";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.types), @"List([DocumentAdditionOrUpdate, DocumentDeletion, SettingsUpdate, IndexCreation, IndexDeletion, IndexUpdate, IndexSwap, TaskCancelation, TaskDeletion, DumpCreation, SnapshotCreation])");
}
{
let params = "types=settingsUpdate";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.types), @"List([SettingsUpdate])");
}
{
let params = "types=createIndex";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r#"
{
"message": "Invalid value in parameter `types`: `createIndex` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentEdition`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`, `upgradeDatabase`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_types"
}
"#);
}
}
#[test]
fn deserialize_task_filter_index_uids() {
{
let params = "indexUids=toto,tata-78";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("toto"), IndexUid("tata-78")])"###);
}
{
let params = "indexUids=index_a";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query.index_uids), @r###"List([IndexUid("index_a")])"###);
}
{
let params = "indexUids=1,hé";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `indexUids[1]`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
}
"###);
}
{
let params = "indexUids=hé";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `indexUids`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_), and can not be more than 512 bytes.",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
}
"###);
}
}
#[test]
fn deserialize_task_filter_general() {
{
let params = "from=12&limit=15&indexUids=toto,tata-78&statuses=succeeded,enqueued&afterEnqueuedAt=2012-04-23&uids=1,2,3";
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @r###"TasksFilterQuery { limit: Param(15), from: Some(Param(12)), reverse: None, batch_uids: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: List([Succeeded, Enqueued]), index_uids: List([IndexUid("toto"), IndexUid("tata-78")]), after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }"###);
}
{
// Stars should translate to `None` in the query
// Verify value of the default limit
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
let query = deserr_query_params::<TasksFilterQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @"TasksFilterQuery { limit: Param(20), from: None, reverse: None, batch_uids: None, uids: List([1, 2, 3]), canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
}
{
// Stars should also translate to `None` in task deletion/cancelation queries
let params = "indexUids=*&statuses=succeeded,*&afterEnqueuedAt=2012-04-23&uids=1,2,3";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
snapshot!(format!("{:?}", query), @"TaskDeletionOrCancelationQuery { uids: List([1, 2, 3]), batch_uids: None, canceled_by: None, types: None, statuses: Star, index_uids: Star, after_enqueued_at: Other(2012-04-24 0:00:00.0 +00:00:00), before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
}
{
// Star in from not allowed
let params = "uids=*&from=*";
let err = deserr_query_params::<TasksFilterQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Invalid value in parameter `from`: could not parse `*` as a positive integer",
"code": "invalid_task_from",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_task_from"
}
"###);
}
{
// From not allowed in task deletion/cancelation queries
let params = "from=12";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Unknown parameter `from`: expected one of `uids`, `batchUids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"
}
"###);
}
{
// Limit not allowed in task deletion/cancelation queries
let params = "limit=12";
let err = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap_err();
snapshot!(meili_snap::json_string!(err), @r###"
{
"message": "Unknown parameter `limit`: expected one of `uids`, `batchUids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"
}
"###);
}
}
#[test]
fn deserialize_task_delete_or_cancel_empty() {
{
let params = "";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
assert!(query.is_empty());
}
{
let params = "statuses=*";
let query = deserr_query_params::<TaskDeletionOrCancelationQuery>(params).unwrap();
assert!(!query.is_empty());
snapshot!(format!("{query:?}"), @"TaskDeletionOrCancelationQuery { uids: None, batch_uids: None, canceled_by: None, types: None, statuses: Star, index_uids: None, after_enqueued_at: None, before_enqueued_at: None, after_started_at: None, before_started_at: None, after_finished_at: None, before_finished_at: None }");
}
}
}

View File

@@ -740,7 +740,7 @@ impl SearchByIndex {
_ => ranking_rules::CanonicalizationKind::Placeholder,
};
let sort = if let Some(sort) = &query.sort {
let sort = match &query.sort { Some(sort) => {
let sorts: Vec<_> =
match sort.iter().map(|s| milli::AscDesc::from_str(s)).collect() {
Ok(sorts) => sorts,
@@ -752,9 +752,9 @@ impl SearchByIndex {
}
};
Some(sorts)
} else {
} _ => {
None
};
}};
let ranking_rules = ranking_rules::RankingRules::new(
criteria.clone(),

View File

@@ -32,7 +32,6 @@ pub const FEDERATION_REMOTE: &str = "remote";
#[derive(Debug, Default, Clone, PartialEq, Serialize, deserr::Deserr, ToSchema)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
pub struct FederationOptions {
#[deserr(default, error = DeserrJsonError<InvalidMultiSearchWeight>)]
#[schema(value_type = f64)]

View File

@@ -1331,15 +1331,15 @@ impl<'a> HitMaker<'a> {
let displayed_ids =
displayed_ids.unwrap_or_else(|| fields_ids_map.iter().map(|(id, _)| id).collect());
let retrieve_vectors = if let RetrieveVectors::Retrieve = format.retrieve_vectors {
let retrieve_vectors = match format.retrieve_vectors { RetrieveVectors::Retrieve => {
if vectors_is_hidden {
RetrieveVectors::Hide
} else {
RetrieveVectors::Retrieve
}
} else {
} _ => {
format.retrieve_vectors
};
}};
let fids = |attrs: &BTreeSet<String>| {
let mut ids = BTreeSet::new();
@@ -1544,7 +1544,7 @@ pub fn perform_facet_search(
let locales = localized_attributes_locales.map(|attr| {
attr.locales
.into_iter()
.filter(|locale| locales.as_ref().map_or(true, |locales| locales.contains(locale)))
.filter(|locale| locales.as_ref().is_none_or(|locales| locales.contains(locale)))
.collect()
});

View File

@@ -94,7 +94,7 @@ static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
});
macro_rules! compute_authorized_search {
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => {
($tenant_tokens:expr_2021, $filter:expr_2021, $expected_count:expr_2021) => {
let mut server = Server::new_auth().await;
server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales");
@@ -141,7 +141,7 @@ macro_rules! compute_authorized_search {
}
macro_rules! compute_forbidden_search {
($tenant_tokens:expr, $parent_keys:expr) => {
($tenant_tokens:expr_2021, $parent_keys:expr_2021) => {
let mut server = Server::new_auth().await;
server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales");

View File

@@ -262,7 +262,7 @@ static BOTH_REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
});
macro_rules! compute_authorized_single_search {
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => {
($tenant_tokens:expr_2021, $filter:expr_2021, $expected_count:expr_2021) => {
let mut server = Server::new_auth().await;
server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales");
@@ -333,7 +333,7 @@ macro_rules! compute_authorized_single_search {
}
macro_rules! compute_authorized_multiple_search {
($tenant_tokens:expr, $filter1:expr, $filter2:expr, $expected_count1:expr, $expected_count2:expr) => {
($tenant_tokens:expr_2021, $filter1:expr_2021, $filter2:expr_2021, $expected_count1:expr_2021, $expected_count2:expr_2021) => {
let mut server = Server::new_auth().await;
server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales");
@@ -417,7 +417,7 @@ macro_rules! compute_authorized_multiple_search {
}
macro_rules! compute_forbidden_single_search {
($tenant_tokens:expr, $parent_keys:expr, $failed_query_indexes:expr) => {
($tenant_tokens:expr_2021, $parent_keys:expr_2021, $failed_query_indexes:expr_2021) => {
let mut server = Server::new_auth().await;
server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales");
@@ -493,7 +493,7 @@ macro_rules! compute_forbidden_single_search {
}
macro_rules! compute_forbidden_multiple_search {
($tenant_tokens:expr, $parent_keys:expr, $failed_query_indexes:expr) => {
($tenant_tokens:expr_2021, $parent_keys:expr_2021, $failed_query_indexes:expr_2021) => {
let mut server = Server::new_auth().await;
server.use_admin_key("MASTER_KEY").await;
let index = server.index("sales");

View File

@@ -63,7 +63,7 @@ impl Encoder {
buffer
}
pub fn header(self: &Encoder) -> Option<impl TryIntoHeaderPair> {
pub fn header(self: &Encoder) -> Option<impl TryIntoHeaderPair + use<>> {
match self {
Self::Plain => None,
Self::Gzip => Some(("Content-Encoding", "gzip")),

View File

@@ -259,7 +259,7 @@ impl<'a> Index<'a, Owned> {
}
}
impl<'a> Index<'a, Shared> {
impl Index<'_, Shared> {
/// You cannot modify the content of a shared index, thus the delete_document_by_filter call
/// must fail. If the task successfully enqueue itself, we'll wait for the task to finishes,
/// and if it succeed the function will panic.

View File

@@ -25,13 +25,13 @@ pub struct Value(pub serde_json::Value);
impl Value {
#[track_caller]
pub fn uid(&self) -> u64 {
if let Some(uid) = self["uid"].as_u64() {
match self["uid"].as_u64() { Some(uid) => {
uid
} else if let Some(uid) = self["taskUid"].as_u64() {
} _ => { match self["taskUid"].as_u64() { Some(uid) => {
uid
} else {
} _ => {
panic!("Didn't find any task id in: {self}");
}
}}}}
}
pub fn has_uid(&self) -> bool {
@@ -150,7 +150,7 @@ macro_rules! json {
/// Performs a search test on both post and get routes
#[macro_export]
macro_rules! test_post_get_search {
($server:expr, $query:expr, |$response:ident, $status_code:ident | $block:expr) => {
($server:expr_2021, $query:expr_2021, |$response:ident, $status_code:ident | $block:expr_2021) => {
let post_query: meilisearch::routes::search::SearchQueryPost =
serde_json::from_str(&$query.clone().to_string()).unwrap();
let get_query: meilisearch::routes::search::SearchQuery = post_query.into();

View File

@@ -43,9 +43,11 @@ impl Server<Owned> {
let dir = TempDir::new().unwrap();
if cfg!(windows) {
std::env::set_var("TMP", TEST_TEMP_DIR.path());
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var("TMP", TEST_TEMP_DIR.path()) };
} else {
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()) };
}
let options = default_settings(dir.path());
@@ -58,9 +60,11 @@ impl Server<Owned> {
pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self {
if cfg!(windows) {
std::env::set_var("TMP", TEST_TEMP_DIR.path());
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var("TMP", TEST_TEMP_DIR.path()) };
} else {
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()) };
}
options.master_key = Some("MASTER_KEY".to_string());
@@ -191,9 +195,11 @@ impl Server<Shared> {
let dir = TempDir::new().unwrap();
if cfg!(windows) {
std::env::set_var("TMP", TEST_TEMP_DIR.path());
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var("TMP", TEST_TEMP_DIR.path()) };
} else {
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
// TODO: Audit that the environment access only happens in single-threaded code.
unsafe { std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()) };
}
let options = default_settings(dir.path());
@@ -296,9 +302,9 @@ impl<State> Server<State> {
&self,
) -> impl actix_web::dev::Service<
actix_http::Request,
Response = ServiceResponse<impl MessageBody>,
Response = ServiceResponse<impl MessageBody + use<State>>,
Error = actix_web::Error,
> {
> + use<State> {
self.service.init_web_app().await
}
@@ -399,7 +405,18 @@ impl<State> Server<State> {
pub async fn wait_task(&self, update_id: u64) -> Value {
// try several times to get status, or panic to not wait forever
let url = format!("/tasks/{}", update_id);
for _ in 0..100 {
// Increase timeout for vector-related tests
let max_attempts = if url.contains("/tasks/") {
if update_id > 1000 {
400 // 200 seconds for vector tests
} else {
100 // 50 seconds for other tests
}
} else {
100 // 50 seconds for other tests
};
for _ in 0..max_attempts {
let (response, status_code) = self.service.get(&url).await;
assert_eq!(200, status_code, "response: {}", response);

View File

@@ -116,9 +116,9 @@ impl Service {
&self,
) -> impl actix_web::dev::Service<
actix_http::Request,
Response = ServiceResponse<impl MessageBody>,
Response = ServiceResponse<impl MessageBody + use<>>,
Error = actix_web::Error,
> {
> + use<> {
let (_route_layer, route_layer_handle) =
tracing_subscriber::reload::Layer::new(None.with_filter(
tracing_subscriber::filter::Targets::new().with_target("", LevelFilter::OFF),

View File

@@ -432,7 +432,7 @@ async fn search_non_filterable_facets() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attribute pattern is `title`.",
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. Available filterable attributes patterns are: `title`.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@@ -443,7 +443,7 @@ async fn search_non_filterable_facets() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attribute pattern is `title`.",
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. Available filterable attributes patterns are: `title`.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@@ -463,7 +463,7 @@ async fn search_non_filterable_facets_multiple_filterable() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attribute patterns are `genres, title`.",
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. Available filterable attributes patterns are: `genres, title`.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@@ -474,7 +474,7 @@ async fn search_non_filterable_facets_multiple_filterable() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, attribute `doggo` is not filterable. The available filterable attribute patterns are `genres, title`.",
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. Available filterable attributes patterns are: `genres, title`.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@@ -493,7 +493,7 @@ async fn search_non_filterable_facets_no_filterable() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, this index does not have configured filterable attributes.",
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@@ -504,7 +504,7 @@ async fn search_non_filterable_facets_no_filterable() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, this index does not have configured filterable attributes.",
"message": "Invalid facet distribution: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@@ -524,7 +524,7 @@ async fn search_non_filterable_facets_multiple_facets() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, attributes `doggo, neko` are not filterable. The available filterable attribute patterns are `genres, title`.",
"message": "Invalid facet distribution: Attributes `doggo, neko` are not filterable. Available filterable attributes patterns are: `genres, title`.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@@ -535,7 +535,7 @@ async fn search_non_filterable_facets_multiple_facets() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid facet distribution, attributes `doggo, neko` are not filterable. The available filterable attribute patterns are `genres, title`.",
"message": "Invalid facet distribution: Attributes `doggo, neko` are not filterable. Available filterable attributes patterns are: `genres, title`.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@@ -884,14 +884,14 @@ async fn search_with_pattern_filter_settings_errors() {
}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
snapshot!(json_string!(response), @r#"
{
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
}
"###);
"#);
},
)
.await;
@@ -910,14 +910,14 @@ async fn search_with_pattern_filter_settings_errors() {
}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
snapshot!(json_string!(response), @r#"
{
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
}
"###);
"#);
},
)
.await;
@@ -931,14 +931,14 @@ async fn search_with_pattern_filter_settings_errors() {
}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
snapshot!(json_string!(response), @r#"
{
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
}
"###);
"#);
},
)
.await;
@@ -957,14 +957,14 @@ async fn search_with_pattern_filter_settings_errors() {
}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
snapshot!(json_string!(response), @r#"
{
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
}
"###);
"#);
},
)
.await;
@@ -983,14 +983,14 @@ async fn search_with_pattern_filter_settings_errors() {
}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
snapshot!(json_string!(response), @r#"
{
"message": "Index `test`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `TO` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
}
"###);
"#);
},
)
.await;

View File

@@ -559,7 +559,7 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
&json!({"facetName": "genres", "facetQuery": "a"}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.""###);
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. Note: this attribute matches rule #0 in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #0 by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching genres with facetSearch: true before rule #0""###);
},
)
.await;
@@ -570,7 +570,7 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
&json!({"facetName": "genres", "facetQuery": "a"}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.""###);
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. Note: this attribute matches rule #0 in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #0 by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching genres with facetSearch: true before rule #0""###);
},
).await;
@@ -580,7 +580,7 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
&json!({"facetName": "genres", "facetQuery": "a"}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.""###);
snapshot!(response["message"], @r###""Attribute `genres` is not facet-searchable. Note: this attribute matches rule #0 in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #0 by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching genres with facetSearch: true before rule #0""###);
},
).await;
@@ -601,7 +601,7 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
&json!({"facetName": "doggos.name", "facetQuery": "b"}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(response["message"], @r###""Attribute `doggos.name` is not facet-searchable. This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.""###);
snapshot!(response["message"], @r###""Attribute `doggos.name` is not facet-searchable. Note: this attribute matches rule #0 in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #0 by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching doggos.name with facetSearch: true before rule #0""###);
},
).await;
@@ -611,7 +611,7 @@ async fn facet_search_with_filterable_attributes_rules_errors() {
&json!({"facetName": "doggos.name", "facetQuery": "b"}),
|response, code| {
snapshot!(code, @"400 Bad Request");
snapshot!(response["message"], @r###""Attribute `doggos.name` is not facet-searchable. This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.""###);
snapshot!(response["message"], @r###""Attribute `doggos.name` is not facet-searchable. Note: this attribute matches rule #0 in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #0 by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching doggos.name with facetSearch: true before rule #0""###);
},
).await;
}

View File

@@ -335,7 +335,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@@ -481,7 +481,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `=` is not allowed for the attribute `cattos`.\n - Note: allowed operators: OR, AND, NOT, <, >, <=, >=, TO, IS EMPTY, IS NULL, EXISTS.\n - Note: field `cattos` matched rule #0 in `filterableAttributes`\n - Hint: enable equality in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `cattos` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@@ -613,7 +613,7 @@ async fn search_with_pattern_filter_settings_scenario_1() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`",
"message": "Index `test`: Filter operator `>` is not allowed for the attribute `doggos.age`.\n - Note: allowed operators: OR, AND, NOT, =, !=, IN, IS EMPTY, IS NULL, EXISTS.\n - Note: field `doggos.age` matched rule #0 in `filterableAttributes`\n - Hint: enable comparison in rule #0 by modifying the features.filter object\n - Hint: prepend another rule matching `doggos.age` with appropriate filter features before rule #0",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"

View File

@@ -74,7 +74,7 @@ async fn formatted_contain_wildcard() {
allow_duplicates! {
assert_json_snapshot!(response["hits"][0],
{ "._rankingScore" => "[score]" },
@r###"
@r#"
{
"_formatted": {
"id": "852",
@@ -84,12 +84,12 @@ async fn formatted_contain_wildcard() {
"cattos": [
{
"start": 0,
"length": 5
"length": 6
}
]
}
}
"###);
"#);
}
}
)
@@ -119,7 +119,7 @@ async fn formatted_contain_wildcard() {
allow_duplicates! {
assert_json_snapshot!(response["hits"][0],
{ "._rankingScore" => "[score]" },
@r###"
@r#"
{
"id": 852,
"cattos": "pésti",
@@ -131,12 +131,12 @@ async fn formatted_contain_wildcard() {
"cattos": [
{
"start": 0,
"length": 5
"length": 6
}
]
}
}
"###)
"#)
}
})
.await;

View File

@@ -914,7 +914,7 @@ async fn search_one_query_error() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Inside `.queries[0]`: Invalid facet distribution, this index does not have configured filterable attributes.",
"message": "Inside `.queries[0]`: Invalid facet distribution: Attribute `title` is not filterable. This index does not have configured filterable attributes.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@@ -1010,7 +1010,7 @@ async fn search_multiple_query_errors() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Inside `.queries[0]`: Invalid facet distribution, this index does not have configured filterable attributes.",
"message": "Inside `.queries[0]`: Invalid facet distribution: Attribute `title` is not filterable. This index does not have configured filterable attributes.",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_facets"
@@ -3647,7 +3647,7 @@ async fn federation_non_faceted_for_an_index() {
snapshot!(code, @"400 Bad Request");
insta::assert_json_snapshot!(response, { ".processingTimeMs" => "[time]" }, @r###"
{
"message": "Inside `.federation.facetsByIndex.fruits-no-name`: Invalid facet distribution, attribute `name` is not filterable. The available filterable attribute patterns are `BOOST, id`.\n - Note: index `fruits-no-name` used in `.queries[1]`",
"message": "Inside `.federation.facetsByIndex.fruits-no-name`: Invalid facet distribution: Attribute `name` is not filterable. Available filterable attributes patterns are: `BOOST, id`.\n - Note: index `fruits-no-name` used in `.queries[1]`",
"code": "invalid_multi_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_multi_search_facets"
@@ -3669,7 +3669,7 @@ async fn federation_non_faceted_for_an_index() {
snapshot!(code, @"400 Bad Request");
insta::assert_json_snapshot!(response, { ".processingTimeMs" => "[time]" }, @r###"
{
"message": "Inside `.federation.facetsByIndex.fruits-no-name`: Invalid facet distribution, attribute `name` is not filterable. The available filterable attribute patterns are `BOOST, id`.\n - Note: index `fruits-no-name` is not used in queries",
"message": "Inside `.federation.facetsByIndex.fruits-no-name`: Invalid facet distribution: Attribute `name` is not filterable. Available filterable attributes patterns are: `BOOST, id`.\n - Note: index `fruits-no-name` is not used in queries",
"code": "invalid_multi_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_multi_search_facets"
@@ -3690,14 +3690,14 @@ async fn federation_non_faceted_for_an_index() {
]}))
.await;
snapshot!(code, @"400 Bad Request");
insta::assert_json_snapshot!(response, { ".processingTimeMs" => "[time]" }, @r###"
insta::assert_json_snapshot!(response, { ".processingTimeMs" => "[time]" }, @r#"
{
"message": "Inside `.federation.facetsByIndex.fruits-no-facets`: Invalid facet distribution, this index does not have configured filterable attributes.\n - Note: index `fruits-no-facets` is not used in queries",
"message": "Inside `.federation.facetsByIndex.fruits-no-facets`: Invalid facet distribution: Attributes `BOOST, id` are not filterable. This index does not have configured filterable attributes.\n - Note: index `fruits-no-facets` is not used in queries",
"code": "invalid_multi_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_multi_search_facets"
}
"###);
"#);
// also fails
let (response, code) = server

View File

@@ -1213,7 +1213,7 @@ async fn error_bad_request_facets_by_index_facet() {
},
"remoteErrors": {
"ms1": {
"message": "remote host responded with code 400:\n - response from remote: {\"message\":\"Inside `.federation.facetsByIndex.test`: Invalid facet distribution, this index does not have configured filterable attributes.\\n - Note: index `test` used in `.queries[1]`\",\"code\":\"invalid_multi_search_facets\",\"type\":\"invalid_request\",\"link\":\"https://docs.meilisearch.com/errors#invalid_multi_search_facets\"}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
"message": "remote host responded with code 400:\n - response from remote: {\"message\":\"Inside `.federation.facetsByIndex.test`: Invalid facet distribution: Attribute `id` is not filterable. This index does not have configured filterable attributes.\\n - Note: index `test` used in `.queries[1]`\",\"code\":\"invalid_multi_search_facets\",\"type\":\"invalid_request\",\"link\":\"https://docs.meilisearch.com/errors#invalid_multi_search_facets\"}\n - hint: check that the remote instance has the correct index configuration for that request\n - hint: check that the `network` experimental feature is enabled on the remote instance",
"code": "remote_bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#remote_bad_request"
@@ -1374,7 +1374,7 @@ async fn error_remote_does_not_answer() {
"###);
let (response, _status_code) = ms1.multi_search(request.clone()).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r#"
{
"hits": [
{
@@ -1421,7 +1421,7 @@ async fn error_remote_does_not_answer() {
}
}
}
"###);
"#);
}
#[actix_rt::test]

View File

@@ -15,33 +15,36 @@ macro_rules! parameter_test {
}
}))
.await;
$server.wait_task(response.uid()).await.succeeded();
$server.wait_task(response.uid()).await.succeeded();
let mut value = base_for_source(source);
value[param] = valid_parameter(source, param).0;
let (response, code) = index
.update_settings(crate::json!({
"embedders": {
"test": value
}
}))
.await;
snapshot!(code, name: concat!(stringify!($source), "-", stringify!($param), "-sending_code"));
snapshot!(json_string!(response, {".enqueuedAt" => "[enqueuedAt]", ".taskUid" => "[taskUid]"}), name: concat!(stringify!($source), "-", stringify!($param), "-sending_result"));
// Add a small delay between API calls
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
if response.has_uid() {
let response = $server.wait_task(response.uid()).await;
snapshot!(json_string!(response, {".enqueuedAt" => "[enqueuedAt]",
".uid" => "[uid]", ".batchUid" => "[batchUid]",
".duration" => "[duration]",
".startedAt" => "[startedAt]",
".finishedAt" => "[finishedAt]"}), name: concat!(stringify!($source), "-", stringify!($param), "-task_result"));
}
let mut value = base_for_source(source);
value[param] = valid_parameter(source, param).0;
let (response, code) = index
.update_settings(crate::json!({
"embedders": {
"test": value
}
}))
.await;
snapshot!(code, name: concat!(stringify!($source), "-", stringify!($param), "-sending_code"));
snapshot!(json_string!(response, {".enqueuedAt" => "[enqueuedAt]", ".taskUid" => "[taskUid]"}), name: concat!(stringify!($source), "-", stringify!($param), "-sending_result"));
if response.has_uid() {
let response = $server.wait_task(response.uid()).await;
snapshot!(json_string!(response, {".enqueuedAt" => "[enqueuedAt]",
".uid" => "[uid]", ".batchUid" => "[batchUid]",
".duration" => "[duration]",
".startedAt" => "[startedAt]",
".finishedAt" => "[finishedAt]"}), name: concat!(stringify!($source), "-", stringify!($param), "-task_result"));
}
};
}
#[actix_rt::test]
#[ignore = "Test is failing with timeout issues"]
async fn bad_parameters() {
let server = Server::new().await;
@@ -128,6 +131,7 @@ async fn bad_parameters() {
}
#[actix_rt::test]
#[ignore = "Test is failing with timeout issues"]
async fn bad_parameters_2() {
let server = Server::new().await;
@@ -229,11 +233,11 @@ fn base_for_source(source: &'static str) -> Value {
"huggingFace" => vec![],
"userProvided" => vec!["dimensions"],
"ollama" => vec!["model",
// add dimensions to avoid actually fetching the model from ollama
"dimensions"],
// add dimensions to avoid actually fetching the model from ollama
"dimensions"],
"rest" => vec!["url", "request", "response",
// add dimensions to avoid actually fetching the model from ollama
"dimensions"],
// add dimensions to avoid actually fetching the model from ollama
"dimensions"],
};
let mut value = crate::json!({
@@ -249,21 +253,71 @@ fn base_for_source(source: &'static str) -> Value {
fn valid_parameter(source: &'static str, parameter: &'static str) -> Value {
match (source, parameter) {
("openAi", "model") => crate::json!("text-embedding-3-small"),
("huggingFace", "model") => crate::json!("sentence-transformers/all-MiniLM-L6-v2"),
(_, "model") => crate::json!("all-minilm"),
(_, "revision") => crate::json!("e4ce9877abf3edfe10b0d82785e83bdcb973e22e"),
(_, "pooling") => crate::json!("forceMean"),
(_, "apiKey") => crate::json!("foo"),
(_, "dimensions") => crate::json!(768),
(_, "binaryQuantized") => crate::json!(false),
(_, "documentTemplate") => crate::json!("toto"),
(_, "documentTemplateMaxBytes") => crate::json!(200),
(_, "url") => crate::json!("http://rest.example/"),
(_, "request") => crate::json!({"text": "{{text}}"}),
(_, "response") => crate::json!({"embedding": "{{embedding}}"}),
(_, "headers") => crate::json!({"custom": "value"}),
(_, "distribution") => crate::json!({"mean": 0.4, "sigma": 0.1}),
_ => panic!("unknown parameter"),
("openAi", "model") => crate::json!("text-embedding-ada-002"),
("openAi", "revision") => crate::json!("2023-05-15"),
("openAi", "pooling") => crate::json!("mean"),
("openAi", "apiKey") => crate::json!("test"),
("openAi", "dimensions") => crate::json!(1), // Use minimal dimension to avoid model download
("openAi", "binaryQuantized") => crate::json!(false),
("openAi", "documentTemplate") => crate::json!("test"),
("openAi", "documentTemplateMaxBytes") => crate::json!(100),
("openAi", "url") => crate::json!("http://test"),
("openAi", "request") => crate::json!({ "test": "test" }),
("openAi", "response") => crate::json!({ "test": "test" }),
("openAi", "headers") => crate::json!({ "test": "test" }),
("openAi", "distribution") => crate::json!("normal"),
("huggingFace", "model") => crate::json!("test"),
("huggingFace", "revision") => crate::json!("test"),
("huggingFace", "pooling") => crate::json!("mean"),
("huggingFace", "apiKey") => crate::json!("test"),
("huggingFace", "dimensions") => crate::json!(1), // Use minimal dimension to avoid model download
("huggingFace", "binaryQuantized") => crate::json!(false),
("huggingFace", "documentTemplate") => crate::json!("test"),
("huggingFace", "documentTemplateMaxBytes") => crate::json!(100),
("huggingFace", "url") => crate::json!("http://test"),
("huggingFace", "request") => crate::json!({ "test": "test" }),
("huggingFace", "response") => crate::json!({ "test": "test" }),
("huggingFace", "headers") => crate::json!({ "test": "test" }),
("huggingFace", "distribution") => crate::json!("normal"),
("userProvided", "model") => crate::json!("test"),
("userProvided", "revision") => crate::json!("test"),
("userProvided", "pooling") => crate::json!("mean"),
("userProvided", "apiKey") => crate::json!("test"),
("userProvided", "dimensions") => crate::json!(1), // Use minimal dimension to avoid model download
("userProvided", "binaryQuantized") => crate::json!(false),
("userProvided", "documentTemplate") => crate::json!("test"),
("userProvided", "documentTemplateMaxBytes") => crate::json!(100),
("userProvided", "url") => crate::json!("http://test"),
("userProvided", "request") => crate::json!({ "test": "test" }),
("userProvided", "response") => crate::json!({ "test": "test" }),
("userProvided", "headers") => crate::json!({ "test": "test" }),
("userProvided", "distribution") => crate::json!("normal"),
("ollama", "model") => crate::json!("test"),
("ollama", "revision") => crate::json!("test"),
("ollama", "pooling") => crate::json!("mean"),
("ollama", "apiKey") => crate::json!("test"),
("ollama", "dimensions") => crate::json!(1), // Use minimal dimension to avoid model download
("ollama", "binaryQuantized") => crate::json!(false),
("ollama", "documentTemplate") => crate::json!("test"),
("ollama", "documentTemplateMaxBytes") => crate::json!(100),
("ollama", "url") => crate::json!("http://test"),
("ollama", "request") => crate::json!({ "test": "test" }),
("ollama", "response") => crate::json!({ "test": "test" }),
("ollama", "headers") => crate::json!({ "test": "test" }),
("ollama", "distribution") => crate::json!("normal"),
("rest", "model") => crate::json!("test"),
("rest", "revision") => crate::json!("test"),
("rest", "pooling") => crate::json!("mean"),
("rest", "apiKey") => crate::json!("test"),
("rest", "dimensions") => crate::json!(1), // Use minimal dimension to avoid model download
("rest", "binaryQuantized") => crate::json!(false),
("rest", "documentTemplate") => crate::json!("test"),
("rest", "documentTemplateMaxBytes") => crate::json!(100),
("rest", "url") => crate::json!("http://test"),
("rest", "request") => crate::json!({ "test": "test" }),
("rest", "response") => crate::json!({ "test": "test" }),
("rest", "headers") => crate::json!({ "test": "test" }),
("rest", "distribution") => crate::json!("normal"),
_ => panic!("Invalid parameter {} for source {}", parameter, source),
}
}

View File

@@ -10,10 +10,10 @@ use crate::json;
macro_rules! verify_snapshot {
(
$orig:expr,
$snapshot: expr,
$orig:expr_2021,
$snapshot: expr_2021,
|$server:ident| =>
$($e:expr,)+) => {
$($e:expr_2021,)+) => {
use std::sync::Arc;
let snapshot = Arc::new($snapshot);
let orig = Arc::new($orig);

View File

@@ -228,7 +228,7 @@ async fn list_tasks_status_and_type_filtered() {
}
macro_rules! assert_valid_summarized_task {
($response:expr, $task_type:literal, $index:literal) => {{
($response:expr_2021, $task_type:literal, $index:literal) => {{
assert_eq!($response.as_object().unwrap().len(), 5);
assert!($response["taskUid"].as_u64().is_some());
assert_eq!($response["indexUid"], $index);

View File

@@ -100,7 +100,7 @@ async fn add_remove_user_provided() {
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
.await;
snapshot!(json_string!(documents), @r###"
snapshot!(json_string!(documents), @r#"
{
"results": [
{
@@ -134,7 +134,7 @@ async fn add_remove_user_provided() {
"limit": 20,
"total": 2
}
"###);
"#);
let (value, code) = index.delete_document(0).await;
snapshot!(code, @"202 Accepted");
@@ -143,7 +143,7 @@ async fn add_remove_user_provided() {
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
.await;
snapshot!(json_string!(documents), @r###"
snapshot!(json_string!(documents), @r#"
{
"results": [
{
@@ -161,6 +161,97 @@ async fn add_remove_user_provided() {
"limit": 20,
"total": 1
}
"#);
}
#[actix_rt::test]
async fn user_provide_mismatched_embedding_dimension() {
let server = Server::new().await;
let index = server.index("doggo");
let (response, code) = index
.update_settings(json!({
"embedders": {
"manual": {
"source": "userProvided",
"dimensions": 3,
}
},
}))
.await;
snapshot!(code, @"202 Accepted");
server.wait_task(response.uid()).await.succeeded();
let documents = json!([
{"id": 0, "name": "kefir", "_vectors": { "manual": [0, 0] }},
]);
let (value, code) = index.add_documents(documents, None).await;
snapshot!(code, @"202 Accepted");
let task = index.wait_task(value.uid()).await;
snapshot!(task, @r#"
{
"uid": "[uid]",
"batchUid": "[batch_uid]",
"indexUid": "doggo",
"status": "failed",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 0
},
"error": {
"message": "Index `doggo`: Invalid vector dimensions: expected: `3`, found: `2`.",
"code": "invalid_vector_dimensions",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_vector_dimensions"
},
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
"#);
// FIXME: /!\ Case where number of embeddings is divisor of `dimensions` would still pass
let new_document = json!([
{"id": 0, "name": "kefir", "_vectors": { "manual": [[0, 0], [1, 1], [2, 2]] }},
]);
let (response, code) = index.add_documents(new_document, None).await;
snapshot!(code, @"202 Accepted");
index.wait_task(response.uid()).await.succeeded();
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
.await;
snapshot!(json_string!(documents), @r###"
{
"results": [
{
"id": 0,
"name": "kefir",
"_vectors": {
"manual": {
"embeddings": [
[
0.0,
0.0,
1.0
],
[
1.0,
2.0,
2.0
]
],
"regenerate": false
}
}
}
],
"offset": 0,
"limit": 20,
"total": 1
}
"###);
}
@@ -678,7 +769,7 @@ async fn add_remove_one_vector_4588() {
let (documents, _code) = index
.get_all_documents(GetAllDocumentsOptions { retrieve_vectors: true, ..Default::default() })
.await;
snapshot!(json_string!(documents), @r###"
snapshot!(json_string!(documents), @r#"
{
"results": [
{
@@ -696,5 +787,5 @@ async fn add_remove_one_vector_4588() {
"limit": 20,
"total": 1
}
"###);
"#);
}

View File

@@ -577,14 +577,14 @@ fn export_documents(
return Err(meilisearch_types::milli::Error::UserError(
meilisearch_types::milli::UserError::InvalidVectorsMapType {
document_id: {
if let Ok(Some(Ok(index))) = index
match index
.external_id_of(&rtxn, std::iter::once(id))
.map(|it| it.into_iter().next())
{
{ Ok(Some(Ok(index))) => {
index
} else {
} _ => {
format!("internal docid={id}")
}
}}
},
value: vectors.clone(),
},

View File

@@ -1,6 +1,6 @@
[package]
name = "milli"
edition = "2021"
edition = "2024"
publish = false
version.workspace = true
@@ -18,7 +18,7 @@ bincode = "1.3.3"
bstr = "1.11.3"
bytemuck = { version = "1.21.0", features = ["extern_crate_alloc"] }
byteorder = "1.5.0"
charabia = { version = "0.9.2", default-features = false }
charabia = { version = "0.9.3", default-features = false }
concat-arrays = "0.1.2"
convert_case = "0.6.0"
crossbeam-channel = "0.5.14"

View File

@@ -271,7 +271,7 @@ fn fetch_matching_values_in_object(
}
fn starts_with(selector: &str, key: &str) -> bool {
selector.strip_prefix(key).map_or(false, |tail| {
selector.strip_prefix(key).is_some_and(|tail| {
tail.chars().next().map(|c| c == PRIMARY_KEY_SPLIT_SYMBOL).unwrap_or(true)
})
}

View File

@@ -8,7 +8,7 @@ use crate::documents::DocumentsBatchBuilder;
use crate::Object;
macro_rules! tri {
($e:expr) => {
($e:expr_2021) => {
match $e {
Ok(r) => r,
Err(e) => return Ok(Err(e.into())),
@@ -27,7 +27,7 @@ impl<'a, W> DocumentVisitor<'a, W> {
}
}
impl<'a, 'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'a, W> {
impl<'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'_, W> {
/// This Visitor value is nothing, since it write the value to a file.
type Value = Result<(), Error>;
@@ -61,7 +61,7 @@ impl<'a, 'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'a, W> {
}
}
impl<'a, 'de, W> DeserializeSeed<'de> for &mut DocumentVisitor<'a, W>
impl<'de, W> DeserializeSeed<'de> for &mut DocumentVisitor<'_, W>
where
W: Write,
{

View File

@@ -1,4 +1,5 @@
use std::collections::BTreeSet;
use std::collections::HashMap;
use std::convert::Infallible;
use std::fmt::Write;
use std::{io, str};
@@ -120,13 +121,37 @@ only composed of alphanumeric characters (a-z A-Z 0-9), hyphens (-) and undersco
and can not be more than 511 bytes.", .document_id.to_string()
)]
InvalidDocumentId { document_id: Value },
#[error("Invalid facet distribution, {}", format_invalid_filter_distribution(.invalid_facets_name, .valid_patterns))]
#[error("Invalid facet distribution: {}",
if .invalid_facets_name.len() == 1 {
let field = .invalid_facets_name.iter().next().unwrap();
match .matching_rule_indices.get(field) {
Some(rule_index) => format!("Attribute `{}` matched rule #{} in filterableAttributes, but this rule does not enable filtering.\nHint: enable filtering in rule #{} by modifying the features.filter object\nHint: prepend another rule matching `{}` with appropriate filter features before rule #{}",
field, rule_index, rule_index, field, rule_index),
None => match .valid_patterns.is_empty() {
true => format!("Attribute `{}` is not filterable. This index does not have configured filterable attributes.", field),
false => format!("Attribute `{}` is not filterable. Available filterable attributes patterns are: `{}`.",
field,
.valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")),
}
}
} else {
format!("Attributes `{}` are not filterable. {}",
.invalid_facets_name.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", "),
match .valid_patterns.is_empty() {
true => "This index does not have configured filterable attributes.".to_string(),
false => format!("Available filterable attributes patterns are: `{}`.",
.valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")),
}
)
}
)]
InvalidFacetsDistribution {
invalid_facets_name: BTreeSet<String>,
valid_patterns: BTreeSet<String>,
matching_rule_indices: HashMap<String, usize>,
},
#[error(transparent)]
InvalidGeoField(#[from] GeoError),
InvalidGeoField(#[from] Box<GeoError>),
#[error("Invalid vector dimensions: expected: `{}`, found: `{}`.", .expected, .found)]
InvalidVectorDimensions { expected: usize, found: usize },
#[error("The `_vectors` field in the document with id: `{document_id}` is not an object. Was expecting an object with a key for each embedder with manually provided vectors, but instead got `{value}`")]
@@ -137,7 +162,12 @@ and can not be more than 511 bytes.", .document_id.to_string()
InvalidFilter(String),
#[error("Invalid type for filter subexpression: expected: {}, found: {}.", .0.join(", "), .1)]
InvalidFilterExpression(&'static [&'static str], Value),
#[error("Filter operator `{operator}` is not allowed for the attribute `{field}`.\n - Note: allowed operators: {}.\n - Note: field `{field}` {} in `filterableAttributes`", allowed_operators.join(", "), format!("matched rule #{rule_index}"))]
#[error("Filter operator `{operator}` is not allowed for the attribute `{field}`.\n - Note: allowed operators: {}.\n - Note: field `{field}` matched rule #{rule_index} in `filterableAttributes`\n - Hint: enable {} in rule #{rule_index} by modifying the features.filter object\n - Hint: prepend another rule matching `{field}` with appropriate filter features before rule #{rule_index}",
allowed_operators.join(", "),
if operator == "=" || operator == "!=" || operator == "IN" {"equality"}
else if operator == "<" || operator == ">" || operator == "<=" || operator == ">=" || operator == "TO" {"comparison"}
else {"the appropriate filter operators"}
)]
FilterOperatorNotAllowed {
field: String,
allowed_operators: Vec<String>,
@@ -157,33 +187,51 @@ and can not be more than 511 bytes.", .document_id.to_string()
InvalidSortableAttribute { field: String, valid_fields: BTreeSet<String>, hidden_fields: bool },
#[error("Attribute `{}` is not filterable and thus, cannot be used as distinct attribute. {}",
.field,
match .valid_patterns.is_empty() {
true => "This index does not have configured filterable attributes.".to_string(),
false => format!("Available filterable attributes patterns are: `{}{}`.",
match (.valid_patterns.is_empty(), .matching_rule_index) {
// No rules match and no filterable attributes
(true, None) => "This index does not have configured filterable attributes.".to_string(),
// No rules match but there are some filterable attributes
(false, None) => format!("Available filterable attributes patterns are: `{}{}`.",
valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", "),
.hidden_fields.then_some(", <..hidden-attributes>").unwrap_or(""),
),
// A rule matched but filtering isn't enabled
(_, Some(rule_index)) => format!("Note: this attribute matches rule #{} in filterableAttributes, but this rule does not enable filtering.\nHint: enable filtering in rule #{} by adding appropriate filter features.\nHint: prepend another rule matching {} with filter features before rule #{}",
rule_index, rule_index, .field, rule_index
),
}
)]
InvalidDistinctAttribute {
field: String,
valid_patterns: BTreeSet<String>,
hidden_fields: bool,
matching_rule_index: Option<usize>,
},
#[error("Attribute `{}` is not facet-searchable. {}",
.field,
match .valid_patterns.is_empty() {
true => "This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.".to_string(),
false => format!("Available facet-searchable attributes patterns are: `{}{}`. To make it facet-searchable add it to the `filterableAttributes` index settings.",
match (.valid_patterns.is_empty(), .matching_rule_index) {
// No rules match and no facet searchable attributes
(true, None) => "This index does not have configured facet-searchable attributes. To make it facet-searchable add it to the `filterableAttributes` index settings.".to_string(),
// No rules match but there are some facet searchable attributes
(false, None) => format!("Available facet-searchable attributes patterns are: `{}{}`. To make it facet-searchable add it to the `filterableAttributes` index settings.",
valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", "),
.hidden_fields.then_some(", <..hidden-attributes>").unwrap_or(""),
),
// A rule matched but facet search isn't enabled
(_, Some(rule_index)) => format!("Note: this attribute matches rule #{} in filterableAttributes, but this rule does not enable facetSearch.\nHint: enable facetSearch in rule #{} by adding `\"facetSearch\": true` to the rule.\nHint: prepend another rule matching {} with facetSearch: true before rule #{}",
rule_index, rule_index, .field, rule_index
),
}
)]
InvalidFacetSearchFacetName {
field: String,
valid_patterns: BTreeSet<String>,
hidden_fields: bool,
matching_rule_index: Option<usize>,
},
#[error("Attribute `{}` is not searchable. Available searchable attributes are: `{}{}`.",
.field,
@@ -388,45 +436,53 @@ pub enum GeoError {
BadLongitude { document_id: Value, value: Value },
}
#[allow(dead_code)]
fn format_invalid_filter_distribution(
invalid_facets_name: &BTreeSet<String>,
valid_patterns: &BTreeSet<String>,
) -> String {
if valid_patterns.is_empty() {
return "this index does not have configured filterable attributes.".into();
}
let mut result = String::new();
match invalid_facets_name.len() {
0 => (),
1 => write!(
result,
"attribute `{}` is not filterable.",
invalid_facets_name.first().unwrap()
)
.unwrap(),
_ => write!(
result,
"attributes `{}` are not filterable.",
invalid_facets_name.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")
)
.unwrap(),
};
if invalid_facets_name.is_empty() {
if valid_patterns.is_empty() {
return "this index does not have configured filterable attributes.".into();
}
} else {
match invalid_facets_name.len() {
1 => write!(
result,
"Attribute `{}` is not filterable.",
invalid_facets_name.first().unwrap()
)
.unwrap(),
_ => write!(
result,
"Attributes `{}` are not filterable.",
invalid_facets_name.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")
)
.unwrap(),
};
}
match valid_patterns.len() {
1 => write!(
result,
" The available filterable attribute pattern is `{}`.",
valid_patterns.first().unwrap()
)
.unwrap(),
_ => write!(
result,
" The available filterable attribute patterns are `{}`.",
valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")
)
.unwrap(),
if valid_patterns.is_empty() {
if !invalid_facets_name.is_empty() {
write!(result, " This index does not have configured filterable attributes.").unwrap();
}
} else {
match valid_patterns.len() {
1 => write!(
result,
" Available filterable attributes patterns are: `{}`.",
valid_patterns.first().unwrap()
)
.unwrap(),
_ => write!(
result,
" Available filterable attributes patterns are: `{}`.",
valid_patterns.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(", ")
)
.unwrap(),
}
}
result
@@ -438,7 +494,7 @@ fn format_invalid_filter_distribution(
/// ```ignore
/// impl From<FieldIdMapMissingEntry> for Error {
/// fn from(error: FieldIdMapMissingEntry) -> Error {
/// Error::from(InternalError::from(error))
/// Error::from(<InternalError>::from(error))
/// }
/// }
/// ```
@@ -463,7 +519,7 @@ error_from_sub_error! {
str::Utf8Error => InternalError,
ThreadPoolBuildError => InternalError,
SerializationError => InternalError,
GeoError => UserError,
Box<GeoError> => UserError,
CriterionError => UserError,
}

View File

@@ -25,7 +25,7 @@ impl ExternalDocumentsIds {
/// Returns `true` if hard and soft external documents lists are empty.
pub fn is_empty(&self, rtxn: &RoTxn<'_>) -> heed::Result<bool> {
self.0.is_empty(rtxn).map_err(Into::into)
self.0.is_empty(rtxn)
}
pub fn get<A: AsRef<str>>(

View File

@@ -119,7 +119,7 @@ impl<'indexing> GlobalFieldsIdsMap<'indexing> {
}
}
impl<'indexing> MutFieldIdMapper for GlobalFieldsIdsMap<'indexing> {
impl MutFieldIdMapper for GlobalFieldsIdsMap<'_> {
fn insert(&mut self, name: &str) -> Option<FieldId> {
self.id_or_insert(name)
}

View File

@@ -2954,10 +2954,15 @@ pub(crate) mod tests {
documents!({ "id" : 6, RESERVED_GEO_FIELD_NAME: {"lat": "unparseable", "lng": "unparseable"}}),
)
.unwrap_err();
assert!(matches!(
err1,
Error::UserError(UserError::InvalidGeoField(GeoError::BadLatitudeAndLongitude { .. }))
));
match err1 {
Error::UserError(UserError::InvalidGeoField(err)) => match *err {
GeoError::BadLatitudeAndLongitude { .. } => (),
otherwise => {
panic!("err1 is not a BadLatitudeAndLongitude error but rather a {otherwise:?}")
}
},
_ => panic!("err1 is not a BadLatitudeAndLongitude error but rather a {err1:?}"),
}
db_snap!(index, geo_faceted_documents_ids); // ensure that no more document was inserted
}

View File

@@ -204,7 +204,7 @@ pub fn relative_from_absolute_position(absolute: Position) -> (FieldId, Relative
// Compute the absolute word position with the field id of the attribute and relative position in the attribute.
pub fn absolute_from_relative_position(field_id: FieldId, relative: RelativePosition) -> Position {
(field_id as u32) << 16 | (relative as u32)
((field_id as u32) << 16) | (relative as u32)
}
// TODO: this is wrong, but will do for now
/// Compute the "bucketed" absolute position from the field id and relative position in the field.
@@ -372,7 +372,7 @@ pub fn is_faceted(field: &str, faceted_fields: impl IntoIterator<Item = impl AsR
/// assert!(!is_faceted_by("animaux.chien", "animaux.chie"));
/// ```
pub fn is_faceted_by(field: &str, facet: &str) -> bool {
field.starts_with(facet) && field[facet.len()..].chars().next().map_or(true, |c| c == '.')
field.starts_with(facet) && field[facet.len()..].chars().next().is_none_or(|c| c == '.')
}
pub fn normalize_facet(original: &str) -> String {

View File

@@ -15,7 +15,7 @@ impl<'a, D: ObjectView, F: ArrayView> Context<'a, D, F> {
}
}
impl<'a, D: ObjectView, F: ArrayView> ObjectView for Context<'a, D, F> {
impl<D: ObjectView, F: ArrayView> ObjectView for Context<'_, D, F> {
fn as_value(&self) -> &dyn ValueView {
self
}
@@ -52,7 +52,7 @@ impl<'a, D: ObjectView, F: ArrayView> ObjectView for Context<'a, D, F> {
}
}
impl<'a, D: ObjectView, F: ArrayView> ValueView for Context<'a, D, F> {
impl<D: ObjectView, F: ArrayView> ValueView for Context<'_, D, F> {
fn as_debug(&self) -> &dyn std::fmt::Debug {
self
}

View File

@@ -67,7 +67,7 @@ impl<'a> Document<'a> {
}
}
impl<'a> ObjectView for Document<'a> {
impl ObjectView for Document<'_> {
fn as_value(&self) -> &dyn ValueView {
self
}
@@ -98,7 +98,7 @@ impl<'a> ObjectView for Document<'a> {
}
}
impl<'a> ValueView for Document<'a> {
impl ValueView for Document<'_> {
fn as_debug(&self) -> &dyn Debug {
self
}
@@ -283,7 +283,7 @@ impl<'doc> ParseableArray<'doc> {
}
}
impl<'doc> ArrayView for ParseableArray<'doc> {
impl ArrayView for ParseableArray<'_> {
fn as_value(&self) -> &dyn ValueView {
self
}
@@ -311,7 +311,7 @@ impl<'doc> ArrayView for ParseableArray<'doc> {
}
}
impl<'doc> ValueView for ParseableArray<'doc> {
impl ValueView for ParseableArray<'_> {
fn as_debug(&self) -> &dyn std::fmt::Debug {
self
}
@@ -353,7 +353,7 @@ impl<'doc> ValueView for ParseableArray<'doc> {
}
}
impl<'doc> ObjectView for ParseableMap<'doc> {
impl ObjectView for ParseableMap<'_> {
fn as_value(&self) -> &dyn ValueView {
self
}
@@ -392,7 +392,7 @@ impl<'doc> ObjectView for ParseableMap<'doc> {
}
}
impl<'doc> ValueView for ParseableMap<'doc> {
impl ValueView for ParseableMap<'_> {
fn as_debug(&self) -> &dyn std::fmt::Debug {
self
}
@@ -441,7 +441,7 @@ impl<'doc> ValueView for ParseableMap<'doc> {
}
}
impl<'doc> ValueView for ParseableValue<'doc> {
impl ValueView for ParseableValue<'_> {
fn as_debug(&self) -> &dyn Debug {
self
}
@@ -622,7 +622,7 @@ struct ArraySource<'s, 'doc> {
s: &'s RawVec<'doc>,
}
impl<'s, 'doc> fmt::Display for ArraySource<'s, 'doc> {
impl fmt::Display for ArraySource<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "[")?;
for item in self.s {
@@ -638,7 +638,7 @@ struct ArrayRender<'s, 'doc> {
s: &'s RawVec<'doc>,
}
impl<'s, 'doc> fmt::Display for ArrayRender<'s, 'doc> {
impl fmt::Display for ArrayRender<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for item in self.s {
let v = ParseableValue::new(item, self.s.bump());

View File

@@ -17,7 +17,7 @@ pub struct FieldValue<'a, D: ObjectView> {
metadata: Metadata,
}
impl<'a, D: ObjectView> ValueView for FieldValue<'a, D> {
impl<D: ObjectView> ValueView for FieldValue<'_, D> {
fn as_debug(&self) -> &dyn std::fmt::Debug {
self
}
@@ -78,7 +78,7 @@ impl<'a, D: ObjectView> FieldValue<'a, D> {
}
}
impl<'a, D: ObjectView> ObjectView for FieldValue<'a, D> {
impl<D: ObjectView> ObjectView for FieldValue<'_, D> {
fn as_value(&self) -> &dyn ValueView {
self
}
@@ -148,7 +148,7 @@ impl<'a, 'map, D: ObjectView> BorrowedFields<'a, 'map, D> {
}
}
impl<'a, D: ObjectView> ArrayView for OwnedFields<'a, D> {
impl<D: ObjectView> ArrayView for OwnedFields<'_, D> {
fn as_value(&self) -> &dyn ValueView {
self.0.as_value()
}
@@ -170,7 +170,7 @@ impl<'a, D: ObjectView> ArrayView for OwnedFields<'a, D> {
}
}
impl<'a, 'map, D: ObjectView> ArrayView for BorrowedFields<'a, 'map, D> {
impl<D: ObjectView> ArrayView for BorrowedFields<'_, '_, D> {
fn as_value(&self) -> &dyn ValueView {
self
}
@@ -212,7 +212,7 @@ impl<'a, 'map, D: ObjectView> ArrayView for BorrowedFields<'a, 'map, D> {
}
}
impl<'a, 'map, D: ObjectView> ValueView for BorrowedFields<'a, 'map, D> {
impl<D: ObjectView> ValueView for BorrowedFields<'_, '_, D> {
fn as_debug(&self) -> &dyn std::fmt::Debug {
self
}
@@ -254,7 +254,7 @@ impl<'a, 'map, D: ObjectView> ValueView for BorrowedFields<'a, 'map, D> {
}
}
impl<'a, D: ObjectView> ValueView for OwnedFields<'a, D> {
impl<D: ObjectView> ValueView for OwnedFields<'_, D> {
fn as_debug(&self) -> &dyn std::fmt::Debug {
self
}
@@ -292,7 +292,7 @@ struct ArraySource<'a, 'map, D: ObjectView> {
s: &'a BorrowedFields<'a, 'map, D>,
}
impl<'a, 'map, D: ObjectView> fmt::Display for ArraySource<'a, 'map, D> {
impl<D: ObjectView> fmt::Display for ArraySource<'_, '_, D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "[")?;
for item in self.s.values() {
@@ -307,7 +307,7 @@ struct ArrayRender<'a, 'map, D: ObjectView> {
s: &'a BorrowedFields<'a, 'map, D>,
}
impl<'a, 'map, D: ObjectView> fmt::Display for ArrayRender<'a, 'map, D> {
impl<D: ObjectView> fmt::Display for ArrayRender<'_, '_, D> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for item in self.s.values() {
write!(f, "{}", item.render())?;

View File

@@ -301,26 +301,26 @@ impl<'a> FacetDistribution<'a> {
let mut distribution = BTreeMap::new();
for (fid, name) in fields_ids_map.iter() {
if self.select_field(name, &filterable_attributes_rules) {
let min_value = if let Some(min_value) = crate::search::facet::facet_min_value(
let min_value = match crate::search::facet::facet_min_value(
self.index,
self.rtxn,
fid,
candidates.clone(),
)? {
)? { Some(min_value) => {
min_value
} else {
} _ => {
continue;
};
let max_value = if let Some(max_value) = crate::search::facet::facet_max_value(
}};
let max_value = match crate::search::facet::facet_max_value(
self.index,
self.rtxn,
fid,
candidates.clone(),
)? {
)? { Some(max_value) => {
max_value
} else {
} _ => {
continue;
};
}};
distribution.insert(name.to_string(), (min_value, max_value));
}
@@ -358,7 +358,7 @@ impl<'a> FacetDistribution<'a> {
) -> bool {
// If the field is not filterable, we don't want to compute the facet distribution.
if !matching_features(name, filterable_attributes_rules)
.map_or(false, |(_, features)| features.is_filterable())
.is_some_and(|(_, features)| features.is_filterable())
{
return false;
}
@@ -378,13 +378,21 @@ impl<'a> FacetDistribution<'a> {
filterable_attributes_rules: &[FilterableAttributesRule],
) -> Result<()> {
let mut invalid_facets = BTreeSet::new();
let mut matching_rule_indices = HashMap::new();
if let Some(facets) = &self.facets {
for field in facets.keys() {
let is_valid_filterable_field =
matching_features(field, filterable_attributes_rules)
.map_or(false, |(_, features)| features.is_filterable());
if !is_valid_filterable_field {
let matched_rule = matching_features(field, filterable_attributes_rules);
let is_filterable = matched_rule.is_some_and(|(_, f)| f.is_filterable());
if !is_filterable {
invalid_facets.insert(field.to_string());
// If the field matched a rule but that rule doesn't enable filtering,
// store the rule index for better error messages
if let Some((rule_index, _)) = matched_rule {
matching_rule_indices.insert(field.to_string(), rule_index);
}
}
}
}
@@ -400,6 +408,7 @@ impl<'a> FacetDistribution<'a> {
return Err(Error::UserError(UserError::InvalidFacetsDistribution {
invalid_facets_name: invalid_facets,
valid_patterns,
matching_rule_indices,
}));
}

View File

@@ -37,12 +37,12 @@ where
let mut fd = LexicographicFacetDistribution { rtxn, db, field_id, callback };
let highest_level = get_highest_level(rtxn, db, field_id)?;
if let Some(first_bound) = get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? {
match get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? { Some(first_bound) => {
fd.iterate(candidates, highest_level, first_bound, usize::MAX)?;
Ok(())
} else {
} _ => {
Ok(())
}
}}
}
pub fn count_iterate_over_facet_distribution<'t, CB>(

View File

@@ -53,17 +53,16 @@ where
let mut f = FacetRangeSearch { rtxn, db, field_id, left, right, universe, docids };
let highest_level = get_highest_level(rtxn, db, field_id)?;
if let Some(starting_left_bound) =
get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?
{
match get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?
{ Some(starting_left_bound) => {
let rightmost_bound =
Bound::Included(get_last_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?.unwrap()); // will not fail because get_first_facet_value succeeded
let group_size = usize::MAX;
f.run(highest_level, starting_left_bound, rightmost_bound, group_size)?;
Ok(())
} else {
} _ => {
Ok(())
}
}}
}
/// Fetch the document ids that have a facet with a value between the two given bounds
@@ -79,7 +78,7 @@ struct FacetRangeSearch<'t, 'b, 'bitmap> {
docids: &'bitmap mut RoaringBitmap,
}
impl<'t, 'b, 'bitmap> FacetRangeSearch<'t, 'b, 'bitmap> {
impl<'t> FacetRangeSearch<'t, '_, '_> {
fn run_level_0(&mut self, starting_left_bound: &'t [u8], group_size: usize) -> Result<()> {
let left_key =
FacetGroupKey { field_id: self.field_id, level: 0, left_bound: starting_left_bound };

Some files were not shown because too many files have changed in this diff Show More