mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-13 16:07:00 +00:00
Compare commits
8 Commits
better-gre
...
bump-to-ed
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aa87064a13 | ||
|
|
2762d5a32a | ||
|
|
a0bfcf8872 | ||
|
|
64477aac60 | ||
|
|
4d90e3d2ec | ||
|
|
249da5846c | ||
|
|
ee15d4fe77 | ||
|
|
f0f6c3000f |
39
.github/workflows/bench-manual.yml
vendored
39
.github/workflows/bench-manual.yml
vendored
@@ -1,28 +1,27 @@
|
|||||||
name: Bench (manual)
|
name: Bench (manual)
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
workload:
|
workload:
|
||||||
description: 'The path to the workloads to execute (workloads/...)'
|
description: "The path to the workloads to execute (workloads/...)"
|
||||||
required: true
|
required: true
|
||||||
default: 'workloads/movies.json'
|
default: "workloads/movies.json"
|
||||||
|
|
||||||
env:
|
env:
|
||||||
WORKLOAD_NAME: ${{ github.event.inputs.workload }}
|
WORKLOAD_NAME: ${{ github.event.inputs.workload }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
benchmarks:
|
benchmarks:
|
||||||
name: Run and upload benchmarks
|
name: Run and upload benchmarks
|
||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
timeout-minutes: 180 # 3h
|
timeout-minutes: 180 # 3h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
- name: Run benchmarks - workload ${WORKLOAD_NAME} - branch ${{ github.ref }} - commit ${{ github.sha }}
|
|
||||||
run: |
|
|
||||||
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "Manual [Run #${{ github.run_id }}](https://github.com/meilisearch/meilisearch/actions/runs/${{ github.run_id }})" -- ${WORKLOAD_NAME}
|
|
||||||
|
|
||||||
|
- name: Run benchmarks - workload ${WORKLOAD_NAME} - branch ${{ github.ref }} - commit ${{ github.sha }}
|
||||||
|
run: |
|
||||||
|
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "Manual [Run #${{ github.run_id }}](https://github.com/meilisearch/meilisearch/actions/runs/${{ github.run_id }})" -- ${WORKLOAD_NAME}
|
||||||
|
|||||||
136
.github/workflows/bench-pr.yml
vendored
136
.github/workflows/bench-pr.yml
vendored
@@ -1,82 +1,82 @@
|
|||||||
name: Bench (PR)
|
name: Bench (PR)
|
||||||
on:
|
on:
|
||||||
issue_comment:
|
issue_comment:
|
||||||
types: [created]
|
types: [created]
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
issues: write
|
issues: write
|
||||||
|
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
run-benchmarks-on-comment:
|
run-benchmarks-on-comment:
|
||||||
if: startsWith(github.event.comment.body, '/bench')
|
if: startsWith(github.event.comment.body, '/bench')
|
||||||
name: Run and upload benchmarks
|
name: Run and upload benchmarks
|
||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
timeout-minutes: 180 # 3h
|
timeout-minutes: 180 # 3h
|
||||||
steps:
|
steps:
|
||||||
- name: Check permissions
|
- name: Check permissions
|
||||||
id: permission
|
id: permission
|
||||||
env:
|
env:
|
||||||
PR_AUTHOR: ${{github.event.issue.user.login }}
|
PR_AUTHOR: ${{github.event.issue.user.login }}
|
||||||
COMMENT_AUTHOR: ${{github.event.comment.user.login }}
|
COMMENT_AUTHOR: ${{github.event.comment.user.login }}
|
||||||
REPOSITORY: ${{github.repository}}
|
REPOSITORY: ${{github.repository}}
|
||||||
PR_ID: ${{github.event.issue.number}}
|
PR_ID: ${{github.event.issue.number}}
|
||||||
run: |
|
run: |
|
||||||
PR_REPOSITORY=$(gh api /repos/"$REPOSITORY"/pulls/"$PR_ID" --jq .head.repo.full_name)
|
PR_REPOSITORY=$(gh api /repos/"$REPOSITORY"/pulls/"$PR_ID" --jq .head.repo.full_name)
|
||||||
if $(gh api /repos/"$REPOSITORY"/collaborators/"$PR_AUTHOR"/permission --jq .user.permissions.push)
|
if $(gh api /repos/"$REPOSITORY"/collaborators/"$PR_AUTHOR"/permission --jq .user.permissions.push)
|
||||||
then
|
then
|
||||||
echo "::notice title=Authentication success::PR author authenticated"
|
echo "::notice title=Authentication success::PR author authenticated"
|
||||||
else
|
else
|
||||||
echo "::error title=Authentication error::PR author doesn't have push permission on this repository"
|
echo "::error title=Authentication error::PR author doesn't have push permission on this repository"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
if $(gh api /repos/"$REPOSITORY"/collaborators/"$COMMENT_AUTHOR"/permission --jq .user.permissions.push)
|
if $(gh api /repos/"$REPOSITORY"/collaborators/"$COMMENT_AUTHOR"/permission --jq .user.permissions.push)
|
||||||
then
|
then
|
||||||
echo "::notice title=Authentication success::Comment author authenticated"
|
echo "::notice title=Authentication success::Comment author authenticated"
|
||||||
else
|
else
|
||||||
echo "::error title=Authentication error::Comment author doesn't have push permission on this repository"
|
echo "::error title=Authentication error::Comment author doesn't have push permission on this repository"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
if [ "$PR_REPOSITORY" = "$REPOSITORY" ]
|
if [ "$PR_REPOSITORY" = "$REPOSITORY" ]
|
||||||
then
|
then
|
||||||
echo "::notice title=Authentication success::PR started from main repository"
|
echo "::notice title=Authentication success::PR started from main repository"
|
||||||
else
|
else
|
||||||
echo "::error title=Authentication error::PR started from a fork"
|
echo "::error title=Authentication error::PR started from a fork"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Check for Command
|
- name: Check for Command
|
||||||
id: command
|
id: command
|
||||||
uses: xt0rted/slash-command-action@v2
|
uses: xt0rted/slash-command-action@v2
|
||||||
with:
|
with:
|
||||||
command: bench
|
command: bench
|
||||||
reaction-type: "rocket"
|
reaction-type: "rocket"
|
||||||
repo-token: ${{ env.GH_TOKEN }}
|
repo-token: ${{ env.GH_TOKEN }}
|
||||||
|
|
||||||
- uses: xt0rted/pull-request-comment-branch@v3
|
- uses: xt0rted/pull-request-comment-branch@v3
|
||||||
id: comment-branch
|
id: comment-branch
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ env.GH_TOKEN }}
|
repo_token: ${{ env.GH_TOKEN }}
|
||||||
|
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
if: success()
|
if: success()
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
||||||
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
- name: Run benchmarks on PR ${{ github.event.issue.id }}
|
- name: Run benchmarks on PR ${{ github.event.issue.id }}
|
||||||
run: |
|
run: |
|
||||||
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" \
|
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" \
|
||||||
--dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" \
|
--dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" \
|
||||||
--reason "[Comment](${{ github.event.comment.html_url }}) on [#${{ github.event.issue.number }}](${{ github.event.issue.html_url }})" \
|
--reason "[Comment](${{ github.event.comment.html_url }}) on [#${{ github.event.issue.number }}](${{ github.event.issue.html_url }})" \
|
||||||
-- ${{ steps.command.outputs.command-arguments }} > benchlinks.txt
|
-- ${{ steps.command.outputs.command-arguments }} > benchlinks.txt
|
||||||
|
|
||||||
- name: Send comment in PR
|
- name: Send comment in PR
|
||||||
run: |
|
run: |
|
||||||
gh pr comment ${{github.event.issue.number}} --body-file benchlinks.txt
|
gh pr comment ${{github.event.issue.number}} --body-file benchlinks.txt
|
||||||
|
|||||||
33
.github/workflows/bench-push-indexing.yml
vendored
33
.github/workflows/bench-push-indexing.yml
vendored
@@ -1,23 +1,22 @@
|
|||||||
name: Indexing bench (push)
|
name: Indexing bench (push)
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
benchmarks:
|
benchmarks:
|
||||||
name: Run and upload benchmarks
|
name: Run and upload benchmarks
|
||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
timeout-minutes: 180 # 3h
|
timeout-minutes: 180 # 3h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
# Run benchmarks
|
|
||||||
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch main - Commit ${{ github.sha }}
|
|
||||||
run: |
|
|
||||||
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "Push on `main` [Run #${{ github.run_id }}](https://github.com/meilisearch/meilisearch/actions/runs/${{ github.run_id }})" -- workloads/*.json
|
|
||||||
|
|
||||||
|
# Run benchmarks
|
||||||
|
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch main - Commit ${{ github.sha }}
|
||||||
|
run: |
|
||||||
|
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "Push on `main` [Run #${{ github.run_id }}](https://github.com/meilisearch/meilisearch/actions/runs/${{ github.run_id }})" -- workloads/*.json
|
||||||
|
|||||||
8
.github/workflows/benchmarks-manual.yml
vendored
8
.github/workflows/benchmarks-manual.yml
vendored
@@ -4,9 +4,9 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
dataset_name:
|
dataset_name:
|
||||||
description: 'The name of the dataset used to benchmark (search_songs, search_wiki, search_geo or indexing)'
|
description: "The name of the dataset used to benchmark (search_songs, search_wiki, search_geo or indexing)"
|
||||||
required: false
|
required: false
|
||||||
default: 'search_songs'
|
default: "search_songs"
|
||||||
|
|
||||||
env:
|
env:
|
||||||
BENCH_NAME: ${{ github.event.inputs.dataset_name }}
|
BENCH_NAME: ${{ github.event.inputs.dataset_name }}
|
||||||
@@ -18,7 +18,7 @@ jobs:
|
|||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
@@ -67,7 +67,7 @@ jobs:
|
|||||||
out_dir: critcmp_results
|
out_dir: critcmp_results
|
||||||
|
|
||||||
# Helper
|
# Helper
|
||||||
- name: 'README: compare with another benchmark'
|
- name: "README: compare with another benchmark"
|
||||||
run: |
|
run: |
|
||||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||||
echo 'How to compare this benchmark with another one?'
|
echo 'How to compare this benchmark with another one?'
|
||||||
|
|||||||
2
.github/workflows/benchmarks-pr.yml
vendored
2
.github/workflows/benchmarks-pr.yml
vendored
@@ -44,7 +44,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ jobs:
|
|||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
@@ -69,7 +69,7 @@ jobs:
|
|||||||
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
||||||
|
|
||||||
# Helper
|
# Helper
|
||||||
- name: 'README: compare with another benchmark'
|
- name: "README: compare with another benchmark"
|
||||||
run: |
|
run: |
|
||||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||||
echo 'How to compare this benchmark with another one?'
|
echo 'How to compare this benchmark with another one?'
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
@@ -68,7 +68,7 @@ jobs:
|
|||||||
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
||||||
|
|
||||||
# Helper
|
# Helper
|
||||||
- name: 'README: compare with another benchmark'
|
- name: "README: compare with another benchmark"
|
||||||
run: |
|
run: |
|
||||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||||
echo 'How to compare this benchmark with another one?'
|
echo 'How to compare this benchmark with another one?'
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
@@ -68,7 +68,7 @@ jobs:
|
|||||||
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
||||||
|
|
||||||
# Helper
|
# Helper
|
||||||
- name: 'README: compare with another benchmark'
|
- name: "README: compare with another benchmark"
|
||||||
run: |
|
run: |
|
||||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||||
echo 'How to compare this benchmark with another one?'
|
echo 'How to compare this benchmark with another one?'
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ jobs:
|
|||||||
runs-on: benchmarks
|
runs-on: benchmarks
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
@@ -68,7 +68,7 @@ jobs:
|
|||||||
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
run: telegraf --config https://eu-central-1-1.aws.cloud2.influxdata.com/api/v2/telegrafs/08b52e34a370b000 --once --debug
|
||||||
|
|
||||||
# Helper
|
# Helper
|
||||||
- name: 'README: compare with another benchmark'
|
- name: "README: compare with another benchmark"
|
||||||
run: |
|
run: |
|
||||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||||
echo 'How to compare this benchmark with another one?'
|
echo 'How to compare this benchmark with another one?'
|
||||||
|
|||||||
2
.github/workflows/flaky-tests.yml
vendored
2
.github/workflows/flaky-tests.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
- name: Install cargo-flaky
|
- name: Install cargo-flaky
|
||||||
run: cargo install cargo-flaky
|
run: cargo install cargo-flaky
|
||||||
- name: Run cargo flaky in the dumps
|
- name: Run cargo flaky in the dumps
|
||||||
|
|||||||
2
.github/workflows/fuzzer-indexing.yml
vendored
2
.github/workflows/fuzzer-indexing.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
|||||||
timeout-minutes: 4320 # 72h
|
timeout-minutes: 4320 # 72h
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
|
|
||||||
|
|||||||
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
@@ -25,7 +25,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
- name: Install cargo-deb
|
- name: Install cargo-deb
|
||||||
run: cargo install cargo-deb
|
run: cargo install cargo-deb
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|||||||
8
.github/workflows/publish-binaries.yml
vendored
8
.github/workflows/publish-binaries.yml
vendored
@@ -45,7 +45,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --release --locked
|
run: cargo build --release --locked
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
@@ -75,7 +75,7 @@ jobs:
|
|||||||
asset_name: meilisearch-windows-amd64.exe
|
asset_name: meilisearch-windows-amd64.exe
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --release --locked
|
run: cargo build --release --locked
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
@@ -101,7 +101,7 @@ jobs:
|
|||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
- name: Installing Rust toolchain
|
- name: Installing Rust toolchain
|
||||||
uses: dtolnay/rust-toolchain@1.81
|
uses: dtolnay/rust-toolchain@1.85
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
@@ -148,7 +148,7 @@ jobs:
|
|||||||
add-apt-repository "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
add-apt-repository "deb [arch=$(dpkg --print-architecture)] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||||
apt-get update -y && apt-get install -y docker-ce
|
apt-get update -y && apt-get install -y docker-ce
|
||||||
- name: Installing Rust toolchain
|
- name: Installing Rust toolchain
|
||||||
uses: dtolnay/rust-toolchain@1.81
|
uses: dtolnay/rust-toolchain@1.85
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
|
|||||||
14
.github/workflows/test-suite.yml
vendored
14
.github/workflows/test-suite.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
|||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- name: Setup test with Rust stable
|
- name: Setup test with Rust stable
|
||||||
uses: dtolnay/rust-toolchain@1.81
|
uses: dtolnay/rust-toolchain@1.85
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.7
|
uses: Swatinem/rust-cache@v2.7.7
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
@@ -52,7 +52,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.7
|
uses: Swatinem/rust-cache@v2.7.7
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -77,7 +77,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get install --assume-yes build-essential curl
|
apt-get install --assume-yes build-essential curl
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
- name: Run cargo build with almost all features
|
- name: Run cargo build with almost all features
|
||||||
run: |
|
run: |
|
||||||
cargo build --workspace --locked --release --features "$(cargo xtask list-features --exclude-feature cuda,test-ollama)"
|
cargo build --workspace --locked --release --features "$(cargo xtask list-features --exclude-feature cuda,test-ollama)"
|
||||||
@@ -129,7 +129,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get install --assume-yes build-essential curl
|
apt-get install --assume-yes build-essential curl
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
- name: Run cargo tree without default features and check lindera is not present
|
- name: Run cargo tree without default features and check lindera is not present
|
||||||
run: |
|
run: |
|
||||||
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -qz lindera; then
|
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -qz lindera; then
|
||||||
@@ -153,7 +153,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.7
|
uses: Swatinem/rust-cache@v2.7.7
|
||||||
- name: Run tests in debug
|
- name: Run tests in debug
|
||||||
@@ -167,7 +167,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
components: clippy
|
components: clippy
|
||||||
@@ -184,7 +184,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: nightly-2024-07-09
|
toolchain: nightly-2024-07-09
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
new_version:
|
new_version:
|
||||||
description: 'The new version (vX.Y.Z)'
|
description: "The new version (vX.Y.Z)"
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
env:
|
env:
|
||||||
@@ -18,7 +18,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: dtolnay/rust-toolchain@1.81
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
- name: Install sd
|
- name: Install sd
|
||||||
|
|||||||
2165
Cargo.lock
generated
2165
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -30,7 +30,7 @@ authors = [
|
|||||||
description = "Meilisearch HTTP server"
|
description = "Meilisearch HTTP server"
|
||||||
homepage = "https://meilisearch.com"
|
homepage = "https://meilisearch.com"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# Compile
|
# Compile
|
||||||
FROM rust:1.81.0-alpine3.20 AS compiler
|
FROM rust:1.85-alpine3.20 AS compiler
|
||||||
|
|
||||||
RUN apk add -q --no-cache build-base openssl-dev
|
RUN apk add -q --no-cache build-base openssl-dev
|
||||||
|
|
||||||
|
|||||||
@@ -108,7 +108,7 @@ where
|
|||||||
/// not supported on untagged enums.
|
/// not supported on untagged enums.
|
||||||
struct StarOrVisitor<T>(PhantomData<T>);
|
struct StarOrVisitor<T>(PhantomData<T>);
|
||||||
|
|
||||||
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
|
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
|
||||||
where
|
where
|
||||||
T: FromStr<Err = FE>,
|
T: FromStr<Err = FE>,
|
||||||
FE: Display,
|
FE: Display,
|
||||||
|
|||||||
@@ -99,7 +99,7 @@ impl Task {
|
|||||||
/// Return true when a task is finished.
|
/// Return true when a task is finished.
|
||||||
/// A task is finished when its last state is either `Succeeded` or `Failed`.
|
/// A task is finished when its last state is either `Succeeded` or `Failed`.
|
||||||
pub fn is_finished(&self) -> bool {
|
pub fn is_finished(&self) -> bool {
|
||||||
self.events.last().map_or(false, |event| {
|
self.events.last().is_some_and(|event| {
|
||||||
matches!(event, TaskEvent::Succeded { .. } | TaskEvent::Failed { .. })
|
matches!(event, TaskEvent::Succeded { .. } | TaskEvent::Failed { .. })
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -108,7 +108,7 @@ where
|
|||||||
/// not supported on untagged enums.
|
/// not supported on untagged enums.
|
||||||
struct StarOrVisitor<T>(PhantomData<T>);
|
struct StarOrVisitor<T>(PhantomData<T>);
|
||||||
|
|
||||||
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
|
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
|
||||||
where
|
where
|
||||||
T: FromStr<Err = FE>,
|
T: FromStr<Err = FE>,
|
||||||
FE: Display,
|
FE: Display,
|
||||||
|
|||||||
@@ -114,7 +114,7 @@ impl Task {
|
|||||||
/// Return true when a task is finished.
|
/// Return true when a task is finished.
|
||||||
/// A task is finished when its last state is either `Succeeded` or `Failed`.
|
/// A task is finished when its last state is either `Succeeded` or `Failed`.
|
||||||
pub fn is_finished(&self) -> bool {
|
pub fn is_finished(&self) -> bool {
|
||||||
self.events.last().map_or(false, |event| {
|
self.events.last().is_some_and(|event| {
|
||||||
matches!(event, TaskEvent::Succeeded { .. } | TaskEvent::Failed { .. })
|
matches!(event, TaskEvent::Succeeded { .. } | TaskEvent::Failed { .. })
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -275,19 +275,19 @@ impl From<Task> for TaskView {
|
|||||||
match (result, &mut details) {
|
match (result, &mut details) {
|
||||||
(
|
(
|
||||||
TaskResult::DocumentAddition { indexed_documents: num, .. },
|
TaskResult::DocumentAddition { indexed_documents: num, .. },
|
||||||
Some(TaskDetails::DocumentAddition { ref mut indexed_documents, .. }),
|
Some(TaskDetails::DocumentAddition { indexed_documents, .. }),
|
||||||
) => {
|
) => {
|
||||||
indexed_documents.replace(*num);
|
indexed_documents.replace(*num);
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
TaskResult::DocumentDeletion { deleted_documents: docs, .. },
|
TaskResult::DocumentDeletion { deleted_documents: docs, .. },
|
||||||
Some(TaskDetails::DocumentDeletion { ref mut deleted_documents, .. }),
|
Some(TaskDetails::DocumentDeletion { deleted_documents, .. }),
|
||||||
) => {
|
) => {
|
||||||
deleted_documents.replace(*docs);
|
deleted_documents.replace(*docs);
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
TaskResult::ClearAll { deleted_documents: docs },
|
TaskResult::ClearAll { deleted_documents: docs },
|
||||||
Some(TaskDetails::ClearAll { ref mut deleted_documents }),
|
Some(TaskDetails::ClearAll { deleted_documents }),
|
||||||
) => {
|
) => {
|
||||||
deleted_documents.replace(*docs);
|
deleted_documents.replace(*docs);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -170,14 +170,14 @@ impl UpdateFile {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn push_document(&mut self, document: &Document) -> Result<()> {
|
pub fn push_document(&mut self, document: &Document) -> Result<()> {
|
||||||
if let Some(mut writer) = self.writer.as_mut() {
|
match self.writer.as_mut() { Some(mut writer) => {
|
||||||
serde_json::to_writer(&mut writer, &document)?;
|
serde_json::to_writer(&mut writer, &document)?;
|
||||||
writer.write_all(b"\n")?;
|
writer.write_all(b"\n")?;
|
||||||
} else {
|
} _ => {
|
||||||
let file = File::create(&self.path).unwrap();
|
let file = File::create(&self.path).unwrap();
|
||||||
self.writer = Some(BufWriter::new(file));
|
self.writer = Some(BufWriter::new(file));
|
||||||
self.push_document(document)?;
|
self.push_document(document)?;
|
||||||
}
|
}}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -111,7 +111,7 @@ impl FileStore {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// List the Uuids of the files in the FileStore
|
/// List the Uuids of the files in the FileStore
|
||||||
pub fn all_uuids(&self) -> Result<impl Iterator<Item = Result<Uuid>>> {
|
pub fn all_uuids(&self) -> Result<impl Iterator<Item = Result<Uuid>> + use<>> {
|
||||||
Ok(self.path.read_dir()?.filter_map(|entry| {
|
Ok(self.path.read_dir()?.filter_map(|entry| {
|
||||||
let file_name = match entry {
|
let file_name = match entry {
|
||||||
Ok(entry) => entry.file_name(),
|
Ok(entry) => entry.file_name(),
|
||||||
@@ -158,19 +158,19 @@ impl File {
|
|||||||
|
|
||||||
impl Write for File {
|
impl Write for File {
|
||||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||||
if let Some(file) = self.file.as_mut() {
|
match self.file.as_mut() { Some(file) => {
|
||||||
file.write(buf)
|
file.write(buf)
|
||||||
} else {
|
} _ => {
|
||||||
Ok(buf.len())
|
Ok(buf.len())
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn flush(&mut self) -> std::io::Result<()> {
|
fn flush(&mut self) -> std::io::Result<()> {
|
||||||
if let Some(file) = self.file.as_mut() {
|
match self.file.as_mut() { Some(file) => {
|
||||||
file.flush()
|
file.flush()
|
||||||
} else {
|
} _ => {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ name = "filter-parser-fuzz"
|
|||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
authors = ["Automatically generated"]
|
authors = ["Automatically generated"]
|
||||||
publish = false
|
publish = false
|
||||||
edition = "2018"
|
edition = "2024"
|
||||||
|
|
||||||
[package.metadata]
|
[package.metadata]
|
||||||
cargo-fuzz = true
|
cargo-fuzz = true
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ impl<E> NomErrorExt<E> for nom::Err<E> {
|
|||||||
pub fn cut_with_err<'a, O>(
|
pub fn cut_with_err<'a, O>(
|
||||||
mut parser: impl FnMut(Span<'a>) -> IResult<'a, O>,
|
mut parser: impl FnMut(Span<'a>) -> IResult<'a, O>,
|
||||||
mut with: impl FnMut(Error<'a>) -> Error<'a>,
|
mut with: impl FnMut(Error<'a>) -> Error<'a>,
|
||||||
) -> impl FnMut(Span<'a>) -> IResult<O> {
|
) -> impl FnMut(Span<'a>) -> IResult<'a, O> {
|
||||||
move |input| match parser.parse(input) {
|
move |input| match parser.parse(input) {
|
||||||
Err(nom::Err::Error(e)) => Err(nom::Err::Failure(with(e))),
|
Err(nom::Err::Error(e)) => Err(nom::Err::Failure(with(e))),
|
||||||
rest => rest,
|
rest => rest,
|
||||||
@@ -121,7 +121,7 @@ impl<'a> ParseError<Span<'a>> for Error<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Display for Error<'a> {
|
impl Display for Error<'_> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
let input = self.context.fragment();
|
let input = self.context.fragment();
|
||||||
// When printing our error message we want to escape all `\n` to be sure we keep our format with the
|
// When printing our error message we want to escape all `\n` to be sure we keep our format with the
|
||||||
@@ -198,7 +198,7 @@ impl<'a> Display for Error<'a> {
|
|||||||
f,
|
f,
|
||||||
"Encountered an internal `{:?}` error while parsing your filter. Please fill an issue", kind
|
"Encountered an internal `{:?}` error while parsing your filter. Please fill an issue", kind
|
||||||
)?,
|
)?,
|
||||||
ErrorKind::External(ref error) => writeln!(f, "{}", error)?,
|
ErrorKind::External(error) => writeln!(f, "{}", error)?,
|
||||||
}
|
}
|
||||||
let base_column = self.context.get_utf8_column();
|
let base_column = self.context.get_utf8_column();
|
||||||
let size = self.context.fragment().chars().count();
|
let size = self.context.fragment().chars().count();
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ pub struct Token<'a> {
|
|||||||
value: Option<String>,
|
value: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> PartialEq for Token<'a> {
|
impl PartialEq for Token<'_> {
|
||||||
fn eq(&self, other: &Self) -> bool {
|
fn eq(&self, other: &Self) -> bool {
|
||||||
self.span.fragment() == other.span.fragment()
|
self.span.fragment() == other.span.fragment()
|
||||||
}
|
}
|
||||||
@@ -226,7 +226,7 @@ impl<'a> FilterCondition<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse(input: &'a str) -> Result<Option<Self>, Error> {
|
pub fn parse(input: &'a str) -> Result<Option<Self>, Error<'a>> {
|
||||||
if input.trim().is_empty() {
|
if input.trim().is_empty() {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
@@ -527,7 +527,7 @@ pub fn parse_filter(input: Span) -> IResult<FilterCondition> {
|
|||||||
terminated(|input| parse_expression(input, 0), eof)(input)
|
terminated(|input| parse_expression(input, 0), eof)(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> std::fmt::Display for FilterCondition<'a> {
|
impl std::fmt::Display for FilterCondition<'_> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
FilterCondition::Not(filter) => {
|
FilterCondition::Not(filter) => {
|
||||||
@@ -576,7 +576,8 @@ impl<'a> std::fmt::Display for FilterCondition<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<'a> std::fmt::Display for Condition<'a> {
|
|
||||||
|
impl std::fmt::Display for Condition<'_> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Condition::GreaterThan(token) => write!(f, "> {token}"),
|
Condition::GreaterThan(token) => write!(f, "> {token}"),
|
||||||
@@ -594,7 +595,8 @@ impl<'a> std::fmt::Display for Condition<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<'a> std::fmt::Display for Token<'a> {
|
|
||||||
|
impl std::fmt::Display for Token<'_> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
write!(f, "{{{}}}", self.value())
|
write!(f, "{{{}}}", self.value())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -52,7 +52,7 @@ fn quoted_by(quote: char, input: Span) -> IResult<Token> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// word = (alphanumeric | _ | - | .)+ except for reserved keywords
|
// word = (alphanumeric | _ | - | .)+ except for reserved keywords
|
||||||
pub fn word_not_keyword<'a>(input: Span<'a>) -> IResult<Token<'a>> {
|
pub fn word_not_keyword<'a>(input: Span<'a>) -> IResult<'a, Token<'a>> {
|
||||||
let (input, word): (_, Token<'a>) =
|
let (input, word): (_, Token<'a>) =
|
||||||
take_while1(is_value_component)(input).map(|(s, t)| (s, t.into()))?;
|
take_while1(is_value_component)(input).map(|(s, t)| (s, t.into()))?;
|
||||||
if is_keyword(word.value()) {
|
if is_keyword(word.value()) {
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ name = "flatten-serde-json-fuzz"
|
|||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
authors = ["Automatically generated"]
|
authors = ["Automatically generated"]
|
||||||
publish = false
|
publish = false
|
||||||
edition = "2018"
|
edition = "2024"
|
||||||
|
|
||||||
[package.metadata]
|
[package.metadata]
|
||||||
cargo-fuzz = true
|
cargo-fuzz = true
|
||||||
|
|||||||
@@ -272,11 +272,11 @@ impl IndexMapper {
|
|||||||
if tries >= 100 {
|
if tries >= 100 {
|
||||||
panic!("Too many attempts to close index {name} prior to deletion.")
|
panic!("Too many attempts to close index {name} prior to deletion.")
|
||||||
}
|
}
|
||||||
let reopen = if let Some(reopen) = reopen.wait_timeout(Duration::from_secs(6)) {
|
let reopen = match reopen.wait_timeout(Duration::from_secs(6)) { Some(reopen) => {
|
||||||
reopen
|
reopen
|
||||||
} else {
|
} _ => {
|
||||||
continue;
|
continue;
|
||||||
};
|
}};
|
||||||
reopen.close(&mut self.index_map.write().unwrap());
|
reopen.close(&mut self.index_map.write().unwrap());
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -382,11 +382,11 @@ impl IndexMapper {
|
|||||||
Available(index) => break index,
|
Available(index) => break index,
|
||||||
Closing(reopen) => {
|
Closing(reopen) => {
|
||||||
// Avoiding deadlocks: no lock taken while doing this operation.
|
// Avoiding deadlocks: no lock taken while doing this operation.
|
||||||
let reopen = if let Some(reopen) = reopen.wait_timeout(Duration::from_secs(6)) {
|
let reopen = match reopen.wait_timeout(Duration::from_secs(6)) { Some(reopen) => {
|
||||||
reopen
|
reopen
|
||||||
} else {
|
} _ => {
|
||||||
continue;
|
continue;
|
||||||
};
|
}};
|
||||||
let index_path = self.base_path.join(uuid.to_string());
|
let index_path = self.base_path.join(uuid.to_string());
|
||||||
// take the lock to reopen the environment.
|
// take the lock to reopen the environment.
|
||||||
reopen
|
reopen
|
||||||
|
|||||||
@@ -355,19 +355,19 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn is_good_heed(tasks_path: &Path, map_size: usize) -> bool {
|
fn is_good_heed(tasks_path: &Path, map_size: usize) -> bool {
|
||||||
if let Ok(env) = unsafe {
|
match unsafe {
|
||||||
heed::EnvOpenOptions::new().map_size(clamp_to_page_size(map_size)).open(tasks_path)
|
heed::EnvOpenOptions::new().map_size(clamp_to_page_size(map_size)).open(tasks_path)
|
||||||
} {
|
} { Ok(env) => {
|
||||||
env.prepare_for_closing().wait();
|
env.prepare_for_closing().wait();
|
||||||
true
|
true
|
||||||
} else {
|
} _ => {
|
||||||
// We're treating all errors equally here, not only allocation errors.
|
// We're treating all errors equally here, not only allocation errors.
|
||||||
// This means there's a possiblity for the budget to lower due to errors different from allocation errors.
|
// This means there's a possiblity for the budget to lower due to errors different from allocation errors.
|
||||||
// For persistent errors, this is OK as long as the task db is then reopened normally without ignoring the error this time.
|
// For persistent errors, this is OK as long as the task db is then reopened normally without ignoring the error this time.
|
||||||
// For transient errors, this could lead to an instance with too low a budget.
|
// For transient errors, this could lead to an instance with too low a budget.
|
||||||
// However transient errors are: 1) less likely than persistent errors 2) likely to cause other issues down the line anyway.
|
// However transient errors are: 1) less likely than persistent errors 2) likely to cause other issues down the line anyway.
|
||||||
false
|
false
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read_txn(&self) -> Result<RoTxn<WithoutTls>> {
|
pub fn read_txn(&self) -> Result<RoTxn<WithoutTls>> {
|
||||||
@@ -696,7 +696,7 @@ impl IndexScheduler {
|
|||||||
written: usize,
|
written: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'b> Read for TaskReader<'a, 'b> {
|
impl Read for TaskReader<'_, '_> {
|
||||||
fn read(&mut self, mut buf: &mut [u8]) -> std::io::Result<usize> {
|
fn read(&mut self, mut buf: &mut [u8]) -> std::io::Result<usize> {
|
||||||
if self.buffer.is_empty() {
|
if self.buffer.is_empty() {
|
||||||
match self.tasks.next() {
|
match self.tasks.next() {
|
||||||
|
|||||||
@@ -315,7 +315,7 @@ impl Queue {
|
|||||||
if let Some(batch_uids) = batch_uids {
|
if let Some(batch_uids) = batch_uids {
|
||||||
let mut batch_tasks = RoaringBitmap::new();
|
let mut batch_tasks = RoaringBitmap::new();
|
||||||
for batch_uid in batch_uids {
|
for batch_uid in batch_uids {
|
||||||
if processing_batch.as_ref().map_or(false, |batch| batch.uid == *batch_uid) {
|
if processing_batch.as_ref().is_some_and(|batch| batch.uid == *batch_uid) {
|
||||||
batch_tasks |= &**processing_tasks;
|
batch_tasks |= &**processing_tasks;
|
||||||
} else {
|
} else {
|
||||||
batch_tasks |= self.tasks_in_batch(rtxn, *batch_uid)?;
|
batch_tasks |= self.tasks_in_batch(rtxn, *batch_uid)?;
|
||||||
|
|||||||
@@ -219,7 +219,7 @@ impl BatchKind {
|
|||||||
primary_key.is_some() &&
|
primary_key.is_some() &&
|
||||||
// 2.1.1 If the task we're trying to accumulate have a pk it must be equal to our primary key
|
// 2.1.1 If the task we're trying to accumulate have a pk it must be equal to our primary key
|
||||||
// 2.1.2 If the task don't have a primary-key -> we can continue
|
// 2.1.2 If the task don't have a primary-key -> we can continue
|
||||||
kind.primary_key().map_or(true, |pk| pk == primary_key)
|
kind.primary_key().is_none_or(|pk| pk == primary_key)
|
||||||
) ||
|
) ||
|
||||||
// 2.2 If we don't have a primary-key ->
|
// 2.2 If we don't have a primary-key ->
|
||||||
(
|
(
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ use crate::TaskId;
|
|||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! debug_snapshot {
|
macro_rules! debug_snapshot {
|
||||||
($value:expr, @$snapshot:literal) => {{
|
($value:expr_2021, @$snapshot:literal) => {{
|
||||||
let value = format!("{:?}", $value);
|
let value = format!("{:?}", $value);
|
||||||
meili_snap::snapshot!(value, @$snapshot);
|
meili_snap::snapshot!(value, @$snapshot);
|
||||||
}};
|
}};
|
||||||
|
|||||||
@@ -499,13 +499,13 @@ impl IndexScheduler {
|
|||||||
// create the batch directly. Otherwise, get the index name associated with the task
|
// create the batch directly. Otherwise, get the index name associated with the task
|
||||||
// and use the autobatcher to batch the enqueued tasks associated with it
|
// and use the autobatcher to batch the enqueued tasks associated with it
|
||||||
|
|
||||||
let index_name = if let Some(&index_name) = task.indexes().first() {
|
let index_name = match task.indexes().first() { Some(&index_name) => {
|
||||||
index_name
|
index_name
|
||||||
} else {
|
} _ => {
|
||||||
assert!(matches!(&task.kind, KindWithContent::IndexSwap { swaps } if swaps.is_empty()));
|
assert!(matches!(&task.kind, KindWithContent::IndexSwap { swaps } if swaps.is_empty()));
|
||||||
current_batch.processing(Some(&mut task));
|
current_batch.processing(Some(&mut task));
|
||||||
return Ok(Some((Batch::IndexSwap { task }, current_batch)));
|
return Ok(Some((Batch::IndexSwap { task }, current_batch)));
|
||||||
};
|
}};
|
||||||
|
|
||||||
let index_already_exists = self.index_mapper.exists(rtxn, index_name)?;
|
let index_already_exists = self.index_mapper.exists(rtxn, index_name)?;
|
||||||
let mut primary_key = None;
|
let mut primary_key = None;
|
||||||
|
|||||||
@@ -47,11 +47,11 @@ impl IndexScheduler {
|
|||||||
Batch::TaskCancelation { mut task } => {
|
Batch::TaskCancelation { mut task } => {
|
||||||
// 1. Retrieve the tasks that matched the query at enqueue-time.
|
// 1. Retrieve the tasks that matched the query at enqueue-time.
|
||||||
let matched_tasks =
|
let matched_tasks =
|
||||||
if let KindWithContent::TaskCancelation { tasks, query: _ } = &task.kind {
|
match &task.kind { KindWithContent::TaskCancelation { tasks, query: _ } => {
|
||||||
tasks
|
tasks
|
||||||
} else {
|
} _ => {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
}};
|
||||||
|
|
||||||
let rtxn = self.env.read_txn()?;
|
let rtxn = self.env.read_txn()?;
|
||||||
let mut canceled_tasks = self.cancel_matched_tasks(
|
let mut canceled_tasks = self.cancel_matched_tasks(
|
||||||
@@ -83,11 +83,11 @@ impl IndexScheduler {
|
|||||||
let mut matched_tasks = RoaringBitmap::new();
|
let mut matched_tasks = RoaringBitmap::new();
|
||||||
|
|
||||||
for task in tasks.iter() {
|
for task in tasks.iter() {
|
||||||
if let KindWithContent::TaskDeletion { tasks, query: _ } = &task.kind {
|
match &task.kind { KindWithContent::TaskDeletion { tasks, query: _ } => {
|
||||||
matched_tasks |= tasks;
|
matched_tasks |= tasks;
|
||||||
} else {
|
} _ => {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut wtxn = self.env.write_txn()?;
|
let mut wtxn = self.env.write_txn()?;
|
||||||
@@ -279,11 +279,11 @@ impl IndexScheduler {
|
|||||||
progress.update_progress(SwappingTheIndexes::EnsuringCorrectnessOfTheSwap);
|
progress.update_progress(SwappingTheIndexes::EnsuringCorrectnessOfTheSwap);
|
||||||
|
|
||||||
let mut wtxn = self.env.write_txn()?;
|
let mut wtxn = self.env.write_txn()?;
|
||||||
let swaps = if let KindWithContent::IndexSwap { swaps } = &task.kind {
|
let swaps = match &task.kind { KindWithContent::IndexSwap { swaps } => {
|
||||||
swaps
|
swaps
|
||||||
} else {
|
} _ => {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
}};
|
||||||
let mut not_found_indexes = BTreeSet::new();
|
let mut not_found_indexes = BTreeSet::new();
|
||||||
for IndexSwap { indexes: (lhs, rhs) } in swaps {
|
for IndexSwap { indexes: (lhs, rhs) } in swaps {
|
||||||
for index in [lhs, rhs] {
|
for index in [lhs, rhs] {
|
||||||
@@ -532,7 +532,7 @@ impl IndexScheduler {
|
|||||||
// We must remove the batch entirely
|
// We must remove the batch entirely
|
||||||
if tasks.is_empty() {
|
if tasks.is_empty() {
|
||||||
if let Some(batch) = self.queue.batches.get_batch(wtxn, batch_id)? {
|
if let Some(batch) = self.queue.batches.get_batch(wtxn, batch_id)? {
|
||||||
if let Some(BatchEnqueuedAt { earliest, oldest }) = batch.enqueued_at {
|
match batch.enqueued_at { Some(BatchEnqueuedAt { earliest, oldest }) => {
|
||||||
remove_task_datetime(
|
remove_task_datetime(
|
||||||
wtxn,
|
wtxn,
|
||||||
self.queue.batches.enqueued_at,
|
self.queue.batches.enqueued_at,
|
||||||
@@ -545,7 +545,7 @@ impl IndexScheduler {
|
|||||||
oldest,
|
oldest,
|
||||||
batch_id,
|
batch_id,
|
||||||
)?;
|
)?;
|
||||||
} else {
|
} _ => {
|
||||||
// If we don't have the enqueued at in the batch it means the database comes from the v1.12
|
// If we don't have the enqueued at in the batch it means the database comes from the v1.12
|
||||||
// and we still need to find the date by scrolling the database
|
// and we still need to find the date by scrolling the database
|
||||||
remove_n_tasks_datetime_earlier_than(
|
remove_n_tasks_datetime_earlier_than(
|
||||||
@@ -555,7 +555,7 @@ impl IndexScheduler {
|
|||||||
batch.stats.total_nb_tasks.clamp(1, 2) as usize,
|
batch.stats.total_nb_tasks.clamp(1, 2) as usize,
|
||||||
batch_id,
|
batch_id,
|
||||||
)?;
|
)?;
|
||||||
}
|
}}
|
||||||
remove_task_datetime(
|
remove_task_datetime(
|
||||||
wtxn,
|
wtxn,
|
||||||
self.queue.batches.started_at,
|
self.queue.batches.started_at,
|
||||||
|
|||||||
@@ -26,11 +26,11 @@ impl IndexScheduler {
|
|||||||
progress.update_progress(DumpCreationProgress::StartTheDumpCreation);
|
progress.update_progress(DumpCreationProgress::StartTheDumpCreation);
|
||||||
let started_at = OffsetDateTime::now_utc();
|
let started_at = OffsetDateTime::now_utc();
|
||||||
let (keys, instance_uid) =
|
let (keys, instance_uid) =
|
||||||
if let KindWithContent::DumpCreation { keys, instance_uid } = &task.kind {
|
match &task.kind { KindWithContent::DumpCreation { keys, instance_uid } => {
|
||||||
(keys, instance_uid)
|
(keys, instance_uid)
|
||||||
} else {
|
} _ => {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
};
|
}};
|
||||||
let dump = dump::DumpWriter::new(*instance_uid)?;
|
let dump = dump::DumpWriter::new(*instance_uid)?;
|
||||||
|
|
||||||
// 1. dump the keys
|
// 1. dump the keys
|
||||||
@@ -206,14 +206,14 @@ impl IndexScheduler {
|
|||||||
let user_err =
|
let user_err =
|
||||||
milli::Error::UserError(milli::UserError::InvalidVectorsMapType {
|
milli::Error::UserError(milli::UserError::InvalidVectorsMapType {
|
||||||
document_id: {
|
document_id: {
|
||||||
if let Ok(Some(Ok(index))) = index
|
match index
|
||||||
.external_id_of(&rtxn, std::iter::once(id))
|
.external_id_of(&rtxn, std::iter::once(id))
|
||||||
.map(|it| it.into_iter().next())
|
.map(|it| it.into_iter().next())
|
||||||
{
|
{ Ok(Some(Ok(index))) => {
|
||||||
index
|
index
|
||||||
} else {
|
} _ => {
|
||||||
format!("internal docid={id}")
|
format!("internal docid={id}")
|
||||||
}
|
}}
|
||||||
},
|
},
|
||||||
value: vectors.clone(),
|
value: vectors.clone(),
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -206,17 +206,17 @@ impl IndexScheduler {
|
|||||||
IndexOperation::DocumentEdition { index_uid, mut task } => {
|
IndexOperation::DocumentEdition { index_uid, mut task } => {
|
||||||
progress.update_progress(DocumentEditionProgress::RetrievingConfig);
|
progress.update_progress(DocumentEditionProgress::RetrievingConfig);
|
||||||
|
|
||||||
let (filter, code) = if let KindWithContent::DocumentEdition {
|
let (filter, code) = match &task.kind
|
||||||
|
{ KindWithContent::DocumentEdition {
|
||||||
filter_expr,
|
filter_expr,
|
||||||
context: _,
|
context: _,
|
||||||
function,
|
function,
|
||||||
..
|
..
|
||||||
} = &task.kind
|
} => {
|
||||||
{
|
|
||||||
(filter_expr, function)
|
(filter_expr, function)
|
||||||
} else {
|
} _ => {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
}};
|
||||||
|
|
||||||
let candidates = match filter.as_ref().map(Filter::from_json) {
|
let candidates = match filter.as_ref().map(Filter::from_json) {
|
||||||
Some(Ok(Some(filter))) => filter
|
Some(Ok(Some(filter))) => filter
|
||||||
@@ -226,18 +226,18 @@ impl IndexScheduler {
|
|||||||
Some(Err(e)) => return Err(Error::from_milli(e, Some(index_uid.clone()))),
|
Some(Err(e)) => return Err(Error::from_milli(e, Some(index_uid.clone()))),
|
||||||
};
|
};
|
||||||
|
|
||||||
let (original_filter, context, function) = if let Some(Details::DocumentEdition {
|
let (original_filter, context, function) = match task.details
|
||||||
|
{ Some(Details::DocumentEdition {
|
||||||
original_filter,
|
original_filter,
|
||||||
context,
|
context,
|
||||||
function,
|
function,
|
||||||
..
|
..
|
||||||
}) = task.details
|
}) => {
|
||||||
{
|
|
||||||
(original_filter, context, function)
|
(original_filter, context, function)
|
||||||
} else {
|
} _ => {
|
||||||
// In the case of a `documentEdition` the details MUST be set
|
// In the case of a `documentEdition` the details MUST be set
|
||||||
unreachable!();
|
unreachable!();
|
||||||
};
|
}};
|
||||||
|
|
||||||
if candidates.is_empty() {
|
if candidates.is_empty() {
|
||||||
task.status = Status::Succeeded;
|
task.status = Status::Succeeded;
|
||||||
@@ -397,16 +397,16 @@ impl IndexScheduler {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
let will_be_removed = to_delete.len() - before;
|
let will_be_removed = to_delete.len() - before;
|
||||||
if let Some(Details::DocumentDeletionByFilter {
|
match &mut task.details
|
||||||
|
{ Some(Details::DocumentDeletionByFilter {
|
||||||
original_filter: _,
|
original_filter: _,
|
||||||
deleted_documents,
|
deleted_documents,
|
||||||
}) = &mut task.details
|
}) => {
|
||||||
{
|
|
||||||
*deleted_documents = Some(will_be_removed);
|
*deleted_documents = Some(will_be_removed);
|
||||||
} else {
|
} _ => {
|
||||||
// In the case of a `documentDeleteByFilter` the details MUST be set
|
// In the case of a `documentDeleteByFilter` the details MUST be set
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -307,7 +307,7 @@ pub(crate) fn filter_out_references_to_newer_tasks(task: &mut Task) {
|
|||||||
|
|
||||||
pub(crate) fn check_index_swap_validity(task: &Task) -> Result<()> {
|
pub(crate) fn check_index_swap_validity(task: &Task) -> Result<()> {
|
||||||
let swaps =
|
let swaps =
|
||||||
if let KindWithContent::IndexSwap { swaps } = &task.kind { swaps } else { return Ok(()) };
|
match &task.kind { KindWithContent::IndexSwap { swaps } => { swaps } _ => { return Ok(()) }};
|
||||||
let mut all_indexes = HashSet::new();
|
let mut all_indexes = HashSet::new();
|
||||||
let mut duplicate_indexes = BTreeSet::new();
|
let mut duplicate_indexes = BTreeSet::new();
|
||||||
for IndexSwap { indexes: (lhs, rhs) } in swaps {
|
for IndexSwap { indexes: (lhs, rhs) } in swaps {
|
||||||
@@ -501,15 +501,15 @@ impl crate::IndexScheduler {
|
|||||||
} => {
|
} => {
|
||||||
assert_eq!(kind.as_kind(), Kind::DocumentDeletion);
|
assert_eq!(kind.as_kind(), Kind::DocumentDeletion);
|
||||||
let (index_uid, documents_ids) =
|
let (index_uid, documents_ids) =
|
||||||
if let KindWithContent::DocumentDeletion {
|
match kind
|
||||||
|
{ KindWithContent::DocumentDeletion {
|
||||||
ref index_uid,
|
ref index_uid,
|
||||||
ref documents_ids,
|
ref documents_ids,
|
||||||
} = kind
|
} => {
|
||||||
{
|
|
||||||
(index_uid, documents_ids)
|
(index_uid, documents_ids)
|
||||||
} else {
|
} _ => {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
}};
|
||||||
assert_eq!(&task_index_uid.unwrap(), index_uid);
|
assert_eq!(&task_index_uid.unwrap(), index_uid);
|
||||||
|
|
||||||
match status {
|
match status {
|
||||||
@@ -526,15 +526,15 @@ impl crate::IndexScheduler {
|
|||||||
}
|
}
|
||||||
Details::DocumentDeletionByFilter { deleted_documents, original_filter: _ } => {
|
Details::DocumentDeletionByFilter { deleted_documents, original_filter: _ } => {
|
||||||
assert_eq!(kind.as_kind(), Kind::DocumentDeletion);
|
assert_eq!(kind.as_kind(), Kind::DocumentDeletion);
|
||||||
let (index_uid, _) = if let KindWithContent::DocumentDeletionByFilter {
|
let (index_uid, _) = match kind
|
||||||
|
{ KindWithContent::DocumentDeletionByFilter {
|
||||||
ref index_uid,
|
ref index_uid,
|
||||||
ref filter_expr,
|
ref filter_expr,
|
||||||
} = kind
|
} => {
|
||||||
{
|
|
||||||
(index_uid, filter_expr)
|
(index_uid, filter_expr)
|
||||||
} else {
|
} _ => {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
}};
|
||||||
assert_eq!(&task_index_uid.unwrap(), index_uid);
|
assert_eq!(&task_index_uid.unwrap(), index_uid);
|
||||||
|
|
||||||
match status {
|
match status {
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ name = "json-depth-checker"
|
|||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
authors = ["Automatically generated"]
|
authors = ["Automatically generated"]
|
||||||
publish = false
|
publish = false
|
||||||
edition = "2018"
|
edition = "2024"
|
||||||
|
|
||||||
[package.metadata]
|
[package.metadata]
|
||||||
cargo-fuzz = true
|
cargo-fuzz = true
|
||||||
|
|||||||
@@ -77,7 +77,7 @@ snapshot_hash!("hello world", name: "snap_name", @"5f93f983524def3dca464469d2cf9
|
|||||||
*/
|
*/
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! snapshot_hash {
|
macro_rules! snapshot_hash {
|
||||||
($value:expr, @$inline:literal) => {
|
($value:expr_2021, @$inline:literal) => {
|
||||||
let test_name = {
|
let test_name = {
|
||||||
fn f() {}
|
fn f() {}
|
||||||
fn type_name_of_val<T>(_: T) -> &'static str {
|
fn type_name_of_val<T>(_: T) -> &'static str {
|
||||||
@@ -99,7 +99,7 @@ macro_rules! snapshot_hash {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
($value:expr, name: $name:expr, @$inline:literal) => {
|
($value:expr_2021, name: $name:expr_2021, @$inline:literal) => {
|
||||||
let test_name = {
|
let test_name = {
|
||||||
fn f() {}
|
fn f() {}
|
||||||
fn type_name_of_val<T>(_: T) -> &'static str {
|
fn type_name_of_val<T>(_: T) -> &'static str {
|
||||||
@@ -151,7 +151,7 @@ snapshot!(format!("{:?}", vec![1, 2]), @"[1, 2]");
|
|||||||
*/
|
*/
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! snapshot {
|
macro_rules! snapshot {
|
||||||
($value:expr, name: $name:expr) => {
|
($value:expr_2021, name: $name:expr_2021) => {
|
||||||
let test_name = {
|
let test_name = {
|
||||||
fn f() {}
|
fn f() {}
|
||||||
fn type_name_of_val<T>(_: T) -> &'static str {
|
fn type_name_of_val<T>(_: T) -> &'static str {
|
||||||
@@ -172,7 +172,7 @@ macro_rules! snapshot {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
($value:expr, @$inline:literal) => {
|
($value:expr_2021, @$inline:literal) => {
|
||||||
// Note that the name given as argument does not matter since it is only an inline snapshot
|
// Note that the name given as argument does not matter since it is only an inline snapshot
|
||||||
// We don't pass None because otherwise `meili-snap` will try to assign it a unique identifier
|
// We don't pass None because otherwise `meili-snap` will try to assign it a unique identifier
|
||||||
let (settings, _, _) = $crate::default_snapshot_settings_for_test("", Some("_dummy_argument"));
|
let (settings, _, _) = $crate::default_snapshot_settings_for_test("", Some("_dummy_argument"));
|
||||||
@@ -183,7 +183,7 @@ macro_rules! snapshot {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
($value:expr) => {
|
($value:expr_2021) => {
|
||||||
let test_name = {
|
let test_name = {
|
||||||
fn f() {}
|
fn f() {}
|
||||||
fn type_name_of_val<T>(_: T) -> &'static str {
|
fn type_name_of_val<T>(_: T) -> &'static str {
|
||||||
@@ -213,13 +213,13 @@ macro_rules! snapshot {
|
|||||||
/// refer to the redactions feature in the `insta` guide.
|
/// refer to the redactions feature in the `insta` guide.
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! json_string {
|
macro_rules! json_string {
|
||||||
($value:expr, {$($k:expr => $v:expr),*$(,)?}) => {
|
($value:expr_2021, {$($k:expr_2021 => $v:expr_2021),*$(,)?}) => {
|
||||||
{
|
{
|
||||||
let (_, snap) = meili_snap::insta::_prepare_snapshot_for_redaction!($value, {$($k => $v),*}, Json, File);
|
let (_, snap) = meili_snap::insta::_prepare_snapshot_for_redaction!($value, {$($k => $v),*}, Json, File);
|
||||||
snap
|
snap
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
($value:expr) => {{
|
($value:expr_2021) => {{
|
||||||
let value = meili_snap::insta::_macro_support::serialize_value(
|
let value = meili_snap::insta::_macro_support::serialize_value(
|
||||||
&$value,
|
&$value,
|
||||||
meili_snap::insta::_macro_support::SerializationFormat::Json,
|
meili_snap::insta::_macro_support::SerializationFormat::Json,
|
||||||
|
|||||||
@@ -403,7 +403,7 @@ impl ErrorCode for milli::Error {
|
|||||||
match self {
|
match self {
|
||||||
Error::InternalError(_) => Code::Internal,
|
Error::InternalError(_) => Code::Internal,
|
||||||
Error::IoError(e) => e.error_code(),
|
Error::IoError(e) => e.error_code(),
|
||||||
Error::UserError(ref error) => {
|
Error::UserError(error) => {
|
||||||
match error {
|
match error {
|
||||||
// TODO: wait for spec for new error codes.
|
// TODO: wait for spec for new error codes.
|
||||||
UserError::SerdeJson(_)
|
UserError::SerdeJson(_)
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ impl From<LocalizedAttributesRuleView> for LocalizedAttributesRule {
|
|||||||
///
|
///
|
||||||
/// this enum implements `Deserr` in order to be used in the API.
|
/// this enum implements `Deserr` in order to be used in the API.
|
||||||
macro_rules! make_locale {
|
macro_rules! make_locale {
|
||||||
($(($iso_639_1:ident, $iso_639_1_str:expr) => ($iso_639_3:ident, $iso_639_3_str:expr),)+) => {
|
($(($iso_639_1:ident, $iso_639_1_str:expr_2021) => ($iso_639_3:ident, $iso_639_3_str:expr_2021),)+) => {
|
||||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserr, Serialize, Deserialize, Ord, PartialOrd, ToSchema)]
|
#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserr, Serialize, Deserialize, Ord, PartialOrd, ToSchema)]
|
||||||
#[deserr(rename_all = camelCase)]
|
#[deserr(rename_all = camelCase)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
|
|||||||
@@ -572,19 +572,19 @@ pub fn apply_settings_to_builder(
|
|||||||
} = settings;
|
} = settings;
|
||||||
|
|
||||||
match searchable_attributes.deref() {
|
match searchable_attributes.deref() {
|
||||||
Setting::Set(ref names) => builder.set_searchable_fields(names.clone()),
|
Setting::Set(names) => builder.set_searchable_fields(names.clone()),
|
||||||
Setting::Reset => builder.reset_searchable_fields(),
|
Setting::Reset => builder.reset_searchable_fields(),
|
||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match displayed_attributes.deref() {
|
match displayed_attributes.deref() {
|
||||||
Setting::Set(ref names) => builder.set_displayed_fields(names.clone()),
|
Setting::Set(names) => builder.set_displayed_fields(names.clone()),
|
||||||
Setting::Reset => builder.reset_displayed_fields(),
|
Setting::Reset => builder.reset_displayed_fields(),
|
||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match filterable_attributes {
|
match filterable_attributes {
|
||||||
Setting::Set(ref facets) => {
|
Setting::Set(facets) => {
|
||||||
builder.set_filterable_fields(facets.clone().into_iter().collect())
|
builder.set_filterable_fields(facets.clone().into_iter().collect())
|
||||||
}
|
}
|
||||||
Setting::Reset => builder.reset_filterable_fields(),
|
Setting::Reset => builder.reset_filterable_fields(),
|
||||||
@@ -592,13 +592,13 @@ pub fn apply_settings_to_builder(
|
|||||||
}
|
}
|
||||||
|
|
||||||
match sortable_attributes {
|
match sortable_attributes {
|
||||||
Setting::Set(ref fields) => builder.set_sortable_fields(fields.iter().cloned().collect()),
|
Setting::Set(fields) => builder.set_sortable_fields(fields.iter().cloned().collect()),
|
||||||
Setting::Reset => builder.reset_sortable_fields(),
|
Setting::Reset => builder.reset_sortable_fields(),
|
||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match ranking_rules {
|
match ranking_rules {
|
||||||
Setting::Set(ref criteria) => {
|
Setting::Set(criteria) => {
|
||||||
builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect())
|
builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect())
|
||||||
}
|
}
|
||||||
Setting::Reset => builder.reset_criteria(),
|
Setting::Reset => builder.reset_criteria(),
|
||||||
@@ -606,13 +606,13 @@ pub fn apply_settings_to_builder(
|
|||||||
}
|
}
|
||||||
|
|
||||||
match stop_words {
|
match stop_words {
|
||||||
Setting::Set(ref stop_words) => builder.set_stop_words(stop_words.clone()),
|
Setting::Set(stop_words) => builder.set_stop_words(stop_words.clone()),
|
||||||
Setting::Reset => builder.reset_stop_words(),
|
Setting::Reset => builder.reset_stop_words(),
|
||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match non_separator_tokens {
|
match non_separator_tokens {
|
||||||
Setting::Set(ref non_separator_tokens) => {
|
Setting::Set(non_separator_tokens) => {
|
||||||
builder.set_non_separator_tokens(non_separator_tokens.clone())
|
builder.set_non_separator_tokens(non_separator_tokens.clone())
|
||||||
}
|
}
|
||||||
Setting::Reset => builder.reset_non_separator_tokens(),
|
Setting::Reset => builder.reset_non_separator_tokens(),
|
||||||
@@ -620,7 +620,7 @@ pub fn apply_settings_to_builder(
|
|||||||
}
|
}
|
||||||
|
|
||||||
match separator_tokens {
|
match separator_tokens {
|
||||||
Setting::Set(ref separator_tokens) => {
|
Setting::Set(separator_tokens) => {
|
||||||
builder.set_separator_tokens(separator_tokens.clone())
|
builder.set_separator_tokens(separator_tokens.clone())
|
||||||
}
|
}
|
||||||
Setting::Reset => builder.reset_separator_tokens(),
|
Setting::Reset => builder.reset_separator_tokens(),
|
||||||
@@ -628,38 +628,38 @@ pub fn apply_settings_to_builder(
|
|||||||
}
|
}
|
||||||
|
|
||||||
match dictionary {
|
match dictionary {
|
||||||
Setting::Set(ref dictionary) => builder.set_dictionary(dictionary.clone()),
|
Setting::Set(dictionary) => builder.set_dictionary(dictionary.clone()),
|
||||||
Setting::Reset => builder.reset_dictionary(),
|
Setting::Reset => builder.reset_dictionary(),
|
||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match synonyms {
|
match synonyms {
|
||||||
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
|
Setting::Set(synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
|
||||||
Setting::Reset => builder.reset_synonyms(),
|
Setting::Reset => builder.reset_synonyms(),
|
||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match distinct_attribute {
|
match distinct_attribute {
|
||||||
Setting::Set(ref attr) => builder.set_distinct_field(attr.clone()),
|
Setting::Set(attr) => builder.set_distinct_field(attr.clone()),
|
||||||
Setting::Reset => builder.reset_distinct_field(),
|
Setting::Reset => builder.reset_distinct_field(),
|
||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match proximity_precision {
|
match proximity_precision {
|
||||||
Setting::Set(ref precision) => builder.set_proximity_precision((*precision).into()),
|
Setting::Set(precision) => builder.set_proximity_precision((*precision).into()),
|
||||||
Setting::Reset => builder.reset_proximity_precision(),
|
Setting::Reset => builder.reset_proximity_precision(),
|
||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match localized_attributes_rules {
|
match localized_attributes_rules {
|
||||||
Setting::Set(ref rules) => builder
|
Setting::Set(rules) => builder
|
||||||
.set_localized_attributes_rules(rules.iter().cloned().map(|r| r.into()).collect()),
|
.set_localized_attributes_rules(rules.iter().cloned().map(|r| r.into()).collect()),
|
||||||
Setting::Reset => builder.reset_localized_attributes_rules(),
|
Setting::Reset => builder.reset_localized_attributes_rules(),
|
||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match typo_tolerance {
|
match typo_tolerance {
|
||||||
Setting::Set(ref value) => {
|
Setting::Set(value) => {
|
||||||
match value.enabled {
|
match value.enabled {
|
||||||
Setting::Set(val) => builder.set_autorize_typos(val),
|
Setting::Set(val) => builder.set_autorize_typos(val),
|
||||||
Setting::Reset => builder.reset_authorize_typos(),
|
Setting::Reset => builder.reset_authorize_typos(),
|
||||||
@@ -736,7 +736,7 @@ pub fn apply_settings_to_builder(
|
|||||||
}
|
}
|
||||||
|
|
||||||
match pagination {
|
match pagination {
|
||||||
Setting::Set(ref value) => match value.max_total_hits {
|
Setting::Set(value) => match value.max_total_hits {
|
||||||
Setting::Set(val) => builder.set_pagination_max_total_hits(val),
|
Setting::Set(val) => builder.set_pagination_max_total_hits(val),
|
||||||
Setting::Reset => builder.reset_pagination_max_total_hits(),
|
Setting::Reset => builder.reset_pagination_max_total_hits(),
|
||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
@@ -960,7 +960,7 @@ impl<'de> Deserialize<'de> for RankingRuleView {
|
|||||||
D: serde::Deserializer<'de>,
|
D: serde::Deserializer<'de>,
|
||||||
{
|
{
|
||||||
struct Visitor;
|
struct Visitor;
|
||||||
impl<'de> serde::de::Visitor<'de> for Visitor {
|
impl serde::de::Visitor<'_> for Visitor {
|
||||||
type Value = RankingRuleView;
|
type Value = RankingRuleView;
|
||||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
write!(formatter, "the name of a valid ranking rule (string)")
|
write!(formatter, "the name of a valid ranking rule (string)")
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ where
|
|||||||
/// not supported on untagged enums.
|
/// not supported on untagged enums.
|
||||||
struct StarOrVisitor<T>(PhantomData<T>);
|
struct StarOrVisitor<T>(PhantomData<T>);
|
||||||
|
|
||||||
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
|
impl<T, FE> Visitor<'_> for StarOrVisitor<T>
|
||||||
where
|
where
|
||||||
T: FromStr<Err = FE>,
|
T: FromStr<Err = FE>,
|
||||||
FE: fmt::Display,
|
FE: fmt::Display,
|
||||||
|
|||||||
@@ -89,11 +89,11 @@ fn is_empty_db(db_path: impl AsRef<Path>) -> bool {
|
|||||||
if !db_path.exists() {
|
if !db_path.exists() {
|
||||||
true
|
true
|
||||||
// if we encounter an error or if the db is a file we consider the db non empty
|
// if we encounter an error or if the db is a file we consider the db non empty
|
||||||
} else if let Ok(dir) = db_path.read_dir() {
|
} else { match db_path.read_dir() { Ok(dir) => {
|
||||||
dir.count() == 0
|
dir.count() == 0
|
||||||
} else {
|
} _ => {
|
||||||
true
|
true
|
||||||
}
|
}}}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The handle used to update the logs at runtime. Must be accessible from the `main.rs` and the `route/logs.rs`.
|
/// The handle used to update the logs at runtime. Must be accessible from the `main.rs` and the `route/logs.rs`.
|
||||||
@@ -346,7 +346,7 @@ fn open_or_create_database_unchecked(
|
|||||||
match (
|
match (
|
||||||
index_scheduler_builder(),
|
index_scheduler_builder(),
|
||||||
auth_controller.map_err(anyhow::Error::from),
|
auth_controller.map_err(anyhow::Error::from),
|
||||||
create_current_version_file(&opt.db_path).map_err(anyhow::Error::from),
|
create_current_version_file(&opt.db_path),
|
||||||
) {
|
) {
|
||||||
(Ok(i), Ok(a), Ok(())) => Ok((i, a)),
|
(Ok(i), Ok(a), Ok(())) => Ok((i, a)),
|
||||||
(Err(e), _, _) | (_, Err(e), _) | (_, _, Err(e)) => {
|
(Err(e), _, _) | (_, Err(e), _) | (_, _, Err(e)) => {
|
||||||
@@ -466,18 +466,18 @@ fn import_dump(
|
|||||||
let reader = File::open(dump_path)?;
|
let reader = File::open(dump_path)?;
|
||||||
let mut dump_reader = dump::DumpReader::open(reader)?;
|
let mut dump_reader = dump::DumpReader::open(reader)?;
|
||||||
|
|
||||||
if let Some(date) = dump_reader.date() {
|
match dump_reader.date() { Some(date) => {
|
||||||
tracing::info!(
|
tracing::info!(
|
||||||
version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
||||||
%date,
|
%date,
|
||||||
"Importing a dump of meilisearch"
|
"Importing a dump of meilisearch"
|
||||||
);
|
);
|
||||||
} else {
|
} _ => {
|
||||||
tracing::info!(
|
tracing::info!(
|
||||||
version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
||||||
"Importing a dump of meilisearch",
|
"Importing a dump of meilisearch",
|
||||||
);
|
);
|
||||||
}
|
}}
|
||||||
|
|
||||||
let instance_uid = dump_reader.instance_uid()?;
|
let instance_uid = dump_reader.instance_uid()?;
|
||||||
|
|
||||||
|
|||||||
@@ -69,7 +69,7 @@ fn setup(opt: &Opt) -> anyhow::Result<(LogRouteHandle, LogStderrHandle)> {
|
|||||||
Ok((route_layer_handle, stderr_layer_handle))
|
Ok((route_layer_handle, stderr_layer_handle))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn on_panic(info: &std::panic::PanicInfo) {
|
fn on_panic(info: &std::panic::PanicHookInfo) {
|
||||||
let info = info.to_string().replace('\n', " ");
|
let info = info.to_string().replace('\n', " ");
|
||||||
tracing::error!(%info);
|
tracing::error!(%info);
|
||||||
}
|
}
|
||||||
@@ -178,11 +178,11 @@ async fn run_http(
|
|||||||
.disable_signals()
|
.disable_signals()
|
||||||
.keep_alive(KeepAlive::Os);
|
.keep_alive(KeepAlive::Os);
|
||||||
|
|
||||||
if let Some(config) = opt_clone.get_ssl_config()? {
|
match opt_clone.get_ssl_config()? { Some(config) => {
|
||||||
http_server.bind_rustls_0_23(opt_clone.http_addr, config)?.run().await?;
|
http_server.bind_rustls_0_23(opt_clone.http_addr, config)?.run().await?;
|
||||||
} else {
|
} _ => {
|
||||||
http_server.bind(&opt_clone.http_addr)?.run().await?;
|
http_server.bind(&opt_clone.http_addr)?.run().await?;
|
||||||
}
|
}}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -907,7 +907,7 @@ fn load_private_key(
|
|||||||
fn load_ocsp(filename: &Option<PathBuf>) -> anyhow::Result<Vec<u8>> {
|
fn load_ocsp(filename: &Option<PathBuf>) -> anyhow::Result<Vec<u8>> {
|
||||||
let mut ret = Vec::new();
|
let mut ret = Vec::new();
|
||||||
|
|
||||||
if let Some(ref name) = filename {
|
if let Some(name) = filename {
|
||||||
fs::File::open(name)
|
fs::File::open(name)
|
||||||
.map_err(|_| anyhow::anyhow!("cannot open ocsp file"))?
|
.map_err(|_| anyhow::anyhow!("cannot open ocsp file"))?
|
||||||
.read_to_end(&mut ret)
|
.read_to_end(&mut ret)
|
||||||
@@ -924,12 +924,12 @@ where
|
|||||||
T: AsRef<OsStr>,
|
T: AsRef<OsStr>,
|
||||||
{
|
{
|
||||||
if let Err(VarError::NotPresent) = std::env::var(key) {
|
if let Err(VarError::NotPresent) = std::env::var(key) {
|
||||||
std::env::set_var(key, value);
|
// TODO: Audit that the environment access only happens in single-threaded code.
|
||||||
|
unsafe { std::env::set_var(key, value) };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Functions used to get default value for `Opt` fields, needs to be function because of serde's default attribute.
|
/// Functions used to get default value for `Opt` fields, needs to be function because of serde's default attribute.
|
||||||
|
|
||||||
fn default_db_path() -> PathBuf {
|
fn default_db_path() -> PathBuf {
|
||||||
PathBuf::from(DEFAULT_DB_PATH)
|
PathBuf::from(DEFAULT_DB_PATH)
|
||||||
}
|
}
|
||||||
@@ -1037,7 +1037,7 @@ where
|
|||||||
{
|
{
|
||||||
struct BoolOrInt;
|
struct BoolOrInt;
|
||||||
|
|
||||||
impl<'de> serde::de::Visitor<'de> for BoolOrInt {
|
impl serde::de::Visitor<'_> for BoolOrInt {
|
||||||
type Value = ScheduleSnapshot;
|
type Value = ScheduleSnapshot;
|
||||||
|
|
||||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
|||||||
@@ -97,12 +97,12 @@ async fn get_batch(
|
|||||||
let filters = index_scheduler.filters();
|
let filters = index_scheduler.filters();
|
||||||
let (batches, _) = index_scheduler.get_batches_from_authorized_indexes(&query, filters)?;
|
let (batches, _) = index_scheduler.get_batches_from_authorized_indexes(&query, filters)?;
|
||||||
|
|
||||||
if let Some(batch) = batches.first() {
|
match batches.first() { Some(batch) => {
|
||||||
let batch_view = BatchView::from_batch(batch);
|
let batch_view = BatchView::from_batch(batch);
|
||||||
Ok(HttpResponse::Ok().json(batch_view))
|
Ok(HttpResponse::Ok().json(batch_view))
|
||||||
} else {
|
} _ => {
|
||||||
Err(index_scheduler::Error::BatchNotFound(batch_uid).into())
|
Err(index_scheduler::Error::BatchNotFound(batch_uid).into())
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, ToSchema)]
|
#[derive(Debug, Serialize, ToSchema)]
|
||||||
|
|||||||
@@ -619,7 +619,7 @@ fn documents_by_query(
|
|||||||
|
|
||||||
let retrieve_vectors = RetrieveVectors::new(retrieve_vectors);
|
let retrieve_vectors = RetrieveVectors::new(retrieve_vectors);
|
||||||
|
|
||||||
let ids = if let Some(ids) = ids {
|
let ids = match ids { Some(ids) => {
|
||||||
let mut parsed_ids = Vec::with_capacity(ids.len());
|
let mut parsed_ids = Vec::with_capacity(ids.len());
|
||||||
for (index, id) in ids.into_iter().enumerate() {
|
for (index, id) in ids.into_iter().enumerate() {
|
||||||
let id = id.try_into().map_err(|error| {
|
let id = id.try_into().map_err(|error| {
|
||||||
@@ -629,9 +629,9 @@ fn documents_by_query(
|
|||||||
parsed_ids.push(id)
|
parsed_ids.push(id)
|
||||||
}
|
}
|
||||||
Some(parsed_ids)
|
Some(parsed_ids)
|
||||||
} else {
|
} _ => {
|
||||||
None
|
None
|
||||||
};
|
}};
|
||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let (total, documents) = retrieve_documents(
|
let (total, documents) = retrieve_documents(
|
||||||
|
|||||||
@@ -302,7 +302,7 @@ impl From<FacetSearchQuery> for SearchQuery {
|
|||||||
|
|
||||||
// If exhaustive_facet_count is true, we need to set the page to 0
|
// If exhaustive_facet_count is true, we need to set the page to 0
|
||||||
// because the facet search is not exhaustive by default.
|
// because the facet search is not exhaustive by default.
|
||||||
let page = if exhaustive_facet_count.map_or(false, |exhaustive| exhaustive) {
|
let page = if exhaustive_facet_count.is_some_and(|exhaustive| exhaustive) {
|
||||||
// setting the page to 0 will force the search to be exhaustive when computing the number of hits,
|
// setting the page to 0 will force the search to be exhaustive when computing the number of hits,
|
||||||
// but it will skip the bucket sort saving time.
|
// but it will skip the bucket sort saving time.
|
||||||
Some(0)
|
Some(0)
|
||||||
|
|||||||
@@ -131,7 +131,7 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
|
|||||||
|
|
||||||
ret.total_received = 1;
|
ret.total_received = 1;
|
||||||
|
|
||||||
if let Some(ref sort) = sort {
|
if let Some(sort) = sort {
|
||||||
ret.sort_total_number_of_criteria = 1;
|
ret.sort_total_number_of_criteria = 1;
|
||||||
ret.sort_with_geo_point = sort.iter().any(|s| s.contains("_geoPoint("));
|
ret.sort_with_geo_point = sort.iter().any(|s| s.contains("_geoPoint("));
|
||||||
ret.sort_sum_of_criteria_terms = sort.len();
|
ret.sort_sum_of_criteria_terms = sort.len();
|
||||||
@@ -139,7 +139,7 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
|
|||||||
|
|
||||||
ret.distinct = distinct.is_some();
|
ret.distinct = distinct.is_some();
|
||||||
|
|
||||||
if let Some(ref filter) = filter {
|
if let Some(filter) = filter {
|
||||||
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
|
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
|
||||||
ret.filter_total_number_of_criteria = 1;
|
ret.filter_total_number_of_criteria = 1;
|
||||||
|
|
||||||
@@ -168,11 +168,11 @@ impl<Method: AggregateMethod> SearchAggregator<Method> {
|
|||||||
ret.attributes_to_search_on_total_number_of_uses = 1;
|
ret.attributes_to_search_on_total_number_of_uses = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(ref q) = q {
|
if let Some(q) = q {
|
||||||
ret.max_terms_number = q.split_whitespace().count();
|
ret.max_terms_number = q.split_whitespace().count();
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(ref vector) = vector {
|
if let Some(vector) = vector {
|
||||||
ret.max_vector_size = vector.len();
|
ret.max_vector_size = vector.len();
|
||||||
}
|
}
|
||||||
ret.retrieve_vectors |= retrieve_vectors;
|
ret.retrieve_vectors |= retrieve_vectors;
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ impl<Method: AggregateMethod> SimilarAggregator<Method> {
|
|||||||
|
|
||||||
ret.total_received = 1;
|
ret.total_received = 1;
|
||||||
|
|
||||||
if let Some(ref filter) = filter {
|
if let Some(filter) = filter {
|
||||||
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
|
static RE: Lazy<Regex> = Lazy::new(|| Regex::new("AND | OR").unwrap());
|
||||||
ret.filter_total_number_of_criteria = 1;
|
ret.filter_total_number_of_criteria = 1;
|
||||||
|
|
||||||
|
|||||||
@@ -341,11 +341,11 @@ pub async fn get_logs(
|
|||||||
})
|
})
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
if let Some(stream) = stream {
|
match stream { Some(stream) => {
|
||||||
Ok(HttpResponse::Ok().streaming(stream))
|
Ok(HttpResponse::Ok().streaming(stream))
|
||||||
} else {
|
} _ => {
|
||||||
Err(MeilisearchHttpError::AlreadyUsedLogRoute.into())
|
Err(MeilisearchHttpError::AlreadyUsedLogRoute.into())
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Stop retrieving logs
|
/// Stop retrieving logs
|
||||||
|
|||||||
@@ -170,7 +170,7 @@ pub fn is_dry_run(req: &HttpRequest, opt: &Opt) -> Result<bool, ResponseError> {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
.transpose()?
|
.transpose()?
|
||||||
.map_or(false, |s| s.to_lowercase() == "true"))
|
.is_some_and(|s| s.to_lowercase() == "true"))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, ToSchema)]
|
#[derive(Debug, Serialize, ToSchema)]
|
||||||
|
|||||||
@@ -638,12 +638,12 @@ async fn get_task(
|
|||||||
let filters = index_scheduler.filters();
|
let filters = index_scheduler.filters();
|
||||||
let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(&query, filters)?;
|
let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(&query, filters)?;
|
||||||
|
|
||||||
if let Some(task) = tasks.first() {
|
match tasks.first() { Some(task) => {
|
||||||
let task_view = TaskView::from_task(task);
|
let task_view = TaskView::from_task(task);
|
||||||
Ok(HttpResponse::Ok().json(task_view))
|
Ok(HttpResponse::Ok().json(task_view))
|
||||||
} else {
|
} _ => {
|
||||||
Err(index_scheduler::Error::TaskNotFound(task_uid).into())
|
Err(index_scheduler::Error::TaskNotFound(task_uid).into())
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get a task's documents.
|
/// Get a task's documents.
|
||||||
@@ -693,7 +693,7 @@ async fn get_task_documents_file(
|
|||||||
let filters = index_scheduler.filters();
|
let filters = index_scheduler.filters();
|
||||||
let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(&query, filters)?;
|
let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(&query, filters)?;
|
||||||
|
|
||||||
if let Some(task) = tasks.first() {
|
match tasks.first() { Some(task) => {
|
||||||
match task.content_uuid() {
|
match task.content_uuid() {
|
||||||
Some(uuid) => {
|
Some(uuid) => {
|
||||||
let mut tfile = match index_scheduler.queue.update_file(uuid) {
|
let mut tfile = match index_scheduler.queue.update_file(uuid) {
|
||||||
@@ -711,9 +711,9 @@ async fn get_task_documents_file(
|
|||||||
}
|
}
|
||||||
None => Err(index_scheduler::Error::TaskFileNotFound(task_uid).into()),
|
None => Err(index_scheduler::Error::TaskFileNotFound(task_uid).into()),
|
||||||
}
|
}
|
||||||
} else {
|
} _ => {
|
||||||
Err(index_scheduler::Error::TaskNotFound(task_uid).into())
|
Err(index_scheduler::Error::TaskNotFound(task_uid).into())
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum DeserializeDateOption {
|
pub enum DeserializeDateOption {
|
||||||
|
|||||||
@@ -740,7 +740,7 @@ impl SearchByIndex {
|
|||||||
_ => ranking_rules::CanonicalizationKind::Placeholder,
|
_ => ranking_rules::CanonicalizationKind::Placeholder,
|
||||||
};
|
};
|
||||||
|
|
||||||
let sort = if let Some(sort) = &query.sort {
|
let sort = match &query.sort { Some(sort) => {
|
||||||
let sorts: Vec<_> =
|
let sorts: Vec<_> =
|
||||||
match sort.iter().map(|s| milli::AscDesc::from_str(s)).collect() {
|
match sort.iter().map(|s| milli::AscDesc::from_str(s)).collect() {
|
||||||
Ok(sorts) => sorts,
|
Ok(sorts) => sorts,
|
||||||
@@ -752,9 +752,9 @@ impl SearchByIndex {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
Some(sorts)
|
Some(sorts)
|
||||||
} else {
|
} _ => {
|
||||||
None
|
None
|
||||||
};
|
}};
|
||||||
|
|
||||||
let ranking_rules = ranking_rules::RankingRules::new(
|
let ranking_rules = ranking_rules::RankingRules::new(
|
||||||
criteria.clone(),
|
criteria.clone(),
|
||||||
|
|||||||
@@ -32,7 +32,6 @@ pub const FEDERATION_REMOTE: &str = "remote";
|
|||||||
#[derive(Debug, Default, Clone, PartialEq, Serialize, deserr::Deserr, ToSchema)]
|
#[derive(Debug, Default, Clone, PartialEq, Serialize, deserr::Deserr, ToSchema)]
|
||||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
|
|
||||||
pub struct FederationOptions {
|
pub struct FederationOptions {
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidMultiSearchWeight>)]
|
#[deserr(default, error = DeserrJsonError<InvalidMultiSearchWeight>)]
|
||||||
#[schema(value_type = f64)]
|
#[schema(value_type = f64)]
|
||||||
|
|||||||
@@ -1331,15 +1331,15 @@ impl<'a> HitMaker<'a> {
|
|||||||
let displayed_ids =
|
let displayed_ids =
|
||||||
displayed_ids.unwrap_or_else(|| fields_ids_map.iter().map(|(id, _)| id).collect());
|
displayed_ids.unwrap_or_else(|| fields_ids_map.iter().map(|(id, _)| id).collect());
|
||||||
|
|
||||||
let retrieve_vectors = if let RetrieveVectors::Retrieve = format.retrieve_vectors {
|
let retrieve_vectors = match format.retrieve_vectors { RetrieveVectors::Retrieve => {
|
||||||
if vectors_is_hidden {
|
if vectors_is_hidden {
|
||||||
RetrieveVectors::Hide
|
RetrieveVectors::Hide
|
||||||
} else {
|
} else {
|
||||||
RetrieveVectors::Retrieve
|
RetrieveVectors::Retrieve
|
||||||
}
|
}
|
||||||
} else {
|
} _ => {
|
||||||
format.retrieve_vectors
|
format.retrieve_vectors
|
||||||
};
|
}};
|
||||||
|
|
||||||
let fids = |attrs: &BTreeSet<String>| {
|
let fids = |attrs: &BTreeSet<String>| {
|
||||||
let mut ids = BTreeSet::new();
|
let mut ids = BTreeSet::new();
|
||||||
@@ -1544,7 +1544,7 @@ pub fn perform_facet_search(
|
|||||||
let locales = localized_attributes_locales.map(|attr| {
|
let locales = localized_attributes_locales.map(|attr| {
|
||||||
attr.locales
|
attr.locales
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|locale| locales.as_ref().map_or(true, |locales| locales.contains(locale)))
|
.filter(|locale| locales.as_ref().is_none_or(|locales| locales.contains(locale)))
|
||||||
.collect()
|
.collect()
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -94,7 +94,7 @@ static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
|||||||
});
|
});
|
||||||
|
|
||||||
macro_rules! compute_authorized_search {
|
macro_rules! compute_authorized_search {
|
||||||
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => {
|
($tenant_tokens:expr_2021, $filter:expr_2021, $expected_count:expr_2021) => {
|
||||||
let mut server = Server::new_auth().await;
|
let mut server = Server::new_auth().await;
|
||||||
server.use_admin_key("MASTER_KEY").await;
|
server.use_admin_key("MASTER_KEY").await;
|
||||||
let index = server.index("sales");
|
let index = server.index("sales");
|
||||||
@@ -141,7 +141,7 @@ macro_rules! compute_authorized_search {
|
|||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! compute_forbidden_search {
|
macro_rules! compute_forbidden_search {
|
||||||
($tenant_tokens:expr, $parent_keys:expr) => {
|
($tenant_tokens:expr_2021, $parent_keys:expr_2021) => {
|
||||||
let mut server = Server::new_auth().await;
|
let mut server = Server::new_auth().await;
|
||||||
server.use_admin_key("MASTER_KEY").await;
|
server.use_admin_key("MASTER_KEY").await;
|
||||||
let index = server.index("sales");
|
let index = server.index("sales");
|
||||||
|
|||||||
@@ -262,7 +262,7 @@ static BOTH_REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
|||||||
});
|
});
|
||||||
|
|
||||||
macro_rules! compute_authorized_single_search {
|
macro_rules! compute_authorized_single_search {
|
||||||
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => {
|
($tenant_tokens:expr_2021, $filter:expr_2021, $expected_count:expr_2021) => {
|
||||||
let mut server = Server::new_auth().await;
|
let mut server = Server::new_auth().await;
|
||||||
server.use_admin_key("MASTER_KEY").await;
|
server.use_admin_key("MASTER_KEY").await;
|
||||||
let index = server.index("sales");
|
let index = server.index("sales");
|
||||||
@@ -333,7 +333,7 @@ macro_rules! compute_authorized_single_search {
|
|||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! compute_authorized_multiple_search {
|
macro_rules! compute_authorized_multiple_search {
|
||||||
($tenant_tokens:expr, $filter1:expr, $filter2:expr, $expected_count1:expr, $expected_count2:expr) => {
|
($tenant_tokens:expr_2021, $filter1:expr_2021, $filter2:expr_2021, $expected_count1:expr_2021, $expected_count2:expr_2021) => {
|
||||||
let mut server = Server::new_auth().await;
|
let mut server = Server::new_auth().await;
|
||||||
server.use_admin_key("MASTER_KEY").await;
|
server.use_admin_key("MASTER_KEY").await;
|
||||||
let index = server.index("sales");
|
let index = server.index("sales");
|
||||||
@@ -417,7 +417,7 @@ macro_rules! compute_authorized_multiple_search {
|
|||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! compute_forbidden_single_search {
|
macro_rules! compute_forbidden_single_search {
|
||||||
($tenant_tokens:expr, $parent_keys:expr, $failed_query_indexes:expr) => {
|
($tenant_tokens:expr_2021, $parent_keys:expr_2021, $failed_query_indexes:expr_2021) => {
|
||||||
let mut server = Server::new_auth().await;
|
let mut server = Server::new_auth().await;
|
||||||
server.use_admin_key("MASTER_KEY").await;
|
server.use_admin_key("MASTER_KEY").await;
|
||||||
let index = server.index("sales");
|
let index = server.index("sales");
|
||||||
@@ -493,7 +493,7 @@ macro_rules! compute_forbidden_single_search {
|
|||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! compute_forbidden_multiple_search {
|
macro_rules! compute_forbidden_multiple_search {
|
||||||
($tenant_tokens:expr, $parent_keys:expr, $failed_query_indexes:expr) => {
|
($tenant_tokens:expr_2021, $parent_keys:expr_2021, $failed_query_indexes:expr_2021) => {
|
||||||
let mut server = Server::new_auth().await;
|
let mut server = Server::new_auth().await;
|
||||||
server.use_admin_key("MASTER_KEY").await;
|
server.use_admin_key("MASTER_KEY").await;
|
||||||
let index = server.index("sales");
|
let index = server.index("sales");
|
||||||
|
|||||||
@@ -63,7 +63,7 @@ impl Encoder {
|
|||||||
buffer
|
buffer
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn header(self: &Encoder) -> Option<impl TryIntoHeaderPair> {
|
pub fn header(self: &Encoder) -> Option<impl TryIntoHeaderPair + use<>> {
|
||||||
match self {
|
match self {
|
||||||
Self::Plain => None,
|
Self::Plain => None,
|
||||||
Self::Gzip => Some(("Content-Encoding", "gzip")),
|
Self::Gzip => Some(("Content-Encoding", "gzip")),
|
||||||
|
|||||||
@@ -259,7 +259,7 @@ impl<'a> Index<'a, Owned> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Index<'a, Shared> {
|
impl Index<'_, Shared> {
|
||||||
/// You cannot modify the content of a shared index, thus the delete_document_by_filter call
|
/// You cannot modify the content of a shared index, thus the delete_document_by_filter call
|
||||||
/// must fail. If the task successfully enqueue itself, we'll wait for the task to finishes,
|
/// must fail. If the task successfully enqueue itself, we'll wait for the task to finishes,
|
||||||
/// and if it succeed the function will panic.
|
/// and if it succeed the function will panic.
|
||||||
|
|||||||
@@ -25,13 +25,13 @@ pub struct Value(pub serde_json::Value);
|
|||||||
impl Value {
|
impl Value {
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
pub fn uid(&self) -> u64 {
|
pub fn uid(&self) -> u64 {
|
||||||
if let Some(uid) = self["uid"].as_u64() {
|
match self["uid"].as_u64() { Some(uid) => {
|
||||||
uid
|
uid
|
||||||
} else if let Some(uid) = self["taskUid"].as_u64() {
|
} _ => { match self["taskUid"].as_u64() { Some(uid) => {
|
||||||
uid
|
uid
|
||||||
} else {
|
} _ => {
|
||||||
panic!("Didn't find any task id in: {self}");
|
panic!("Didn't find any task id in: {self}");
|
||||||
}
|
}}}}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_uid(&self) -> bool {
|
pub fn has_uid(&self) -> bool {
|
||||||
@@ -150,7 +150,7 @@ macro_rules! json {
|
|||||||
/// Performs a search test on both post and get routes
|
/// Performs a search test on both post and get routes
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! test_post_get_search {
|
macro_rules! test_post_get_search {
|
||||||
($server:expr, $query:expr, |$response:ident, $status_code:ident | $block:expr) => {
|
($server:expr_2021, $query:expr_2021, |$response:ident, $status_code:ident | $block:expr_2021) => {
|
||||||
let post_query: meilisearch::routes::search::SearchQueryPost =
|
let post_query: meilisearch::routes::search::SearchQueryPost =
|
||||||
serde_json::from_str(&$query.clone().to_string()).unwrap();
|
serde_json::from_str(&$query.clone().to_string()).unwrap();
|
||||||
let get_query: meilisearch::routes::search::SearchQuery = post_query.into();
|
let get_query: meilisearch::routes::search::SearchQuery = post_query.into();
|
||||||
|
|||||||
@@ -43,9 +43,11 @@ impl Server<Owned> {
|
|||||||
let dir = TempDir::new().unwrap();
|
let dir = TempDir::new().unwrap();
|
||||||
|
|
||||||
if cfg!(windows) {
|
if cfg!(windows) {
|
||||||
std::env::set_var("TMP", TEST_TEMP_DIR.path());
|
// TODO: Audit that the environment access only happens in single-threaded code.
|
||||||
|
unsafe { std::env::set_var("TMP", TEST_TEMP_DIR.path()) };
|
||||||
} else {
|
} else {
|
||||||
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
|
// TODO: Audit that the environment access only happens in single-threaded code.
|
||||||
|
unsafe { std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()) };
|
||||||
}
|
}
|
||||||
|
|
||||||
let options = default_settings(dir.path());
|
let options = default_settings(dir.path());
|
||||||
@@ -58,9 +60,11 @@ impl Server<Owned> {
|
|||||||
|
|
||||||
pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self {
|
pub async fn new_auth_with_options(mut options: Opt, dir: TempDir) -> Self {
|
||||||
if cfg!(windows) {
|
if cfg!(windows) {
|
||||||
std::env::set_var("TMP", TEST_TEMP_DIR.path());
|
// TODO: Audit that the environment access only happens in single-threaded code.
|
||||||
|
unsafe { std::env::set_var("TMP", TEST_TEMP_DIR.path()) };
|
||||||
} else {
|
} else {
|
||||||
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
|
// TODO: Audit that the environment access only happens in single-threaded code.
|
||||||
|
unsafe { std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()) };
|
||||||
}
|
}
|
||||||
|
|
||||||
options.master_key = Some("MASTER_KEY".to_string());
|
options.master_key = Some("MASTER_KEY".to_string());
|
||||||
@@ -191,9 +195,11 @@ impl Server<Shared> {
|
|||||||
let dir = TempDir::new().unwrap();
|
let dir = TempDir::new().unwrap();
|
||||||
|
|
||||||
if cfg!(windows) {
|
if cfg!(windows) {
|
||||||
std::env::set_var("TMP", TEST_TEMP_DIR.path());
|
// TODO: Audit that the environment access only happens in single-threaded code.
|
||||||
|
unsafe { std::env::set_var("TMP", TEST_TEMP_DIR.path()) };
|
||||||
} else {
|
} else {
|
||||||
std::env::set_var("TMPDIR", TEST_TEMP_DIR.path());
|
// TODO: Audit that the environment access only happens in single-threaded code.
|
||||||
|
unsafe { std::env::set_var("TMPDIR", TEST_TEMP_DIR.path()) };
|
||||||
}
|
}
|
||||||
|
|
||||||
let options = default_settings(dir.path());
|
let options = default_settings(dir.path());
|
||||||
@@ -296,9 +302,9 @@ impl<State> Server<State> {
|
|||||||
&self,
|
&self,
|
||||||
) -> impl actix_web::dev::Service<
|
) -> impl actix_web::dev::Service<
|
||||||
actix_http::Request,
|
actix_http::Request,
|
||||||
Response = ServiceResponse<impl MessageBody>,
|
Response = ServiceResponse<impl MessageBody + use<State>>,
|
||||||
Error = actix_web::Error,
|
Error = actix_web::Error,
|
||||||
> {
|
> + use<State> {
|
||||||
self.service.init_web_app().await
|
self.service.init_web_app().await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -116,9 +116,9 @@ impl Service {
|
|||||||
&self,
|
&self,
|
||||||
) -> impl actix_web::dev::Service<
|
) -> impl actix_web::dev::Service<
|
||||||
actix_http::Request,
|
actix_http::Request,
|
||||||
Response = ServiceResponse<impl MessageBody>,
|
Response = ServiceResponse<impl MessageBody + use<>>,
|
||||||
Error = actix_web::Error,
|
Error = actix_web::Error,
|
||||||
> {
|
> + use<> {
|
||||||
let (_route_layer, route_layer_handle) =
|
let (_route_layer, route_layer_handle) =
|
||||||
tracing_subscriber::reload::Layer::new(None.with_filter(
|
tracing_subscriber::reload::Layer::new(None.with_filter(
|
||||||
tracing_subscriber::filter::Targets::new().with_target("", LevelFilter::OFF),
|
tracing_subscriber::filter::Targets::new().with_target("", LevelFilter::OFF),
|
||||||
|
|||||||
@@ -10,10 +10,10 @@ use crate::json;
|
|||||||
|
|
||||||
macro_rules! verify_snapshot {
|
macro_rules! verify_snapshot {
|
||||||
(
|
(
|
||||||
$orig:expr,
|
$orig:expr_2021,
|
||||||
$snapshot: expr,
|
$snapshot: expr_2021,
|
||||||
|$server:ident| =>
|
|$server:ident| =>
|
||||||
$($e:expr,)+) => {
|
$($e:expr_2021,)+) => {
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
let snapshot = Arc::new($snapshot);
|
let snapshot = Arc::new($snapshot);
|
||||||
let orig = Arc::new($orig);
|
let orig = Arc::new($orig);
|
||||||
|
|||||||
@@ -228,7 +228,7 @@ async fn list_tasks_status_and_type_filtered() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! assert_valid_summarized_task {
|
macro_rules! assert_valid_summarized_task {
|
||||||
($response:expr, $task_type:literal, $index:literal) => {{
|
($response:expr_2021, $task_type:literal, $index:literal) => {{
|
||||||
assert_eq!($response.as_object().unwrap().len(), 5);
|
assert_eq!($response.as_object().unwrap().len(), 5);
|
||||||
assert!($response["taskUid"].as_u64().is_some());
|
assert!($response["taskUid"].as_u64().is_some());
|
||||||
assert_eq!($response["indexUid"], $index);
|
assert_eq!($response["indexUid"], $index);
|
||||||
|
|||||||
@@ -577,14 +577,14 @@ fn export_documents(
|
|||||||
return Err(meilisearch_types::milli::Error::UserError(
|
return Err(meilisearch_types::milli::Error::UserError(
|
||||||
meilisearch_types::milli::UserError::InvalidVectorsMapType {
|
meilisearch_types::milli::UserError::InvalidVectorsMapType {
|
||||||
document_id: {
|
document_id: {
|
||||||
if let Ok(Some(Ok(index))) = index
|
match index
|
||||||
.external_id_of(&rtxn, std::iter::once(id))
|
.external_id_of(&rtxn, std::iter::once(id))
|
||||||
.map(|it| it.into_iter().next())
|
.map(|it| it.into_iter().next())
|
||||||
{
|
{ Ok(Some(Ok(index))) => {
|
||||||
index
|
index
|
||||||
} else {
|
} _ => {
|
||||||
format!("internal docid={id}")
|
format!("internal docid={id}")
|
||||||
}
|
}}
|
||||||
},
|
},
|
||||||
value: vectors.clone(),
|
value: vectors.clone(),
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "milli"
|
name = "milli"
|
||||||
edition = "2021"
|
edition = "2024"
|
||||||
publish = false
|
publish = false
|
||||||
|
|
||||||
version.workspace = true
|
version.workspace = true
|
||||||
|
|||||||
@@ -271,7 +271,7 @@ fn fetch_matching_values_in_object(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn starts_with(selector: &str, key: &str) -> bool {
|
fn starts_with(selector: &str, key: &str) -> bool {
|
||||||
selector.strip_prefix(key).map_or(false, |tail| {
|
selector.strip_prefix(key).is_some_and(|tail| {
|
||||||
tail.chars().next().map(|c| c == PRIMARY_KEY_SPLIT_SYMBOL).unwrap_or(true)
|
tail.chars().next().map(|c| c == PRIMARY_KEY_SPLIT_SYMBOL).unwrap_or(true)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ use crate::documents::DocumentsBatchBuilder;
|
|||||||
use crate::Object;
|
use crate::Object;
|
||||||
|
|
||||||
macro_rules! tri {
|
macro_rules! tri {
|
||||||
($e:expr) => {
|
($e:expr_2021) => {
|
||||||
match $e {
|
match $e {
|
||||||
Ok(r) => r,
|
Ok(r) => r,
|
||||||
Err(e) => return Ok(Err(e.into())),
|
Err(e) => return Ok(Err(e.into())),
|
||||||
@@ -27,7 +27,7 @@ impl<'a, W> DocumentVisitor<'a, W> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'a, W> {
|
impl<'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'_, W> {
|
||||||
/// This Visitor value is nothing, since it write the value to a file.
|
/// This Visitor value is nothing, since it write the value to a file.
|
||||||
type Value = Result<(), Error>;
|
type Value = Result<(), Error>;
|
||||||
|
|
||||||
@@ -61,7 +61,7 @@ impl<'a, 'de, W: Write> Visitor<'de> for &mut DocumentVisitor<'a, W> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'de, W> DeserializeSeed<'de> for &mut DocumentVisitor<'a, W>
|
impl<'de, W> DeserializeSeed<'de> for &mut DocumentVisitor<'_, W>
|
||||||
where
|
where
|
||||||
W: Write,
|
W: Write,
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -151,7 +151,7 @@ and can not be more than 511 bytes.", .document_id.to_string()
|
|||||||
matching_rule_indices: HashMap<String, usize>,
|
matching_rule_indices: HashMap<String, usize>,
|
||||||
},
|
},
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
InvalidGeoField(#[from] GeoError),
|
InvalidGeoField(#[from] Box<GeoError>),
|
||||||
#[error("Invalid vector dimensions: expected: `{}`, found: `{}`.", .expected, .found)]
|
#[error("Invalid vector dimensions: expected: `{}`, found: `{}`.", .expected, .found)]
|
||||||
InvalidVectorDimensions { expected: usize, found: usize },
|
InvalidVectorDimensions { expected: usize, found: usize },
|
||||||
#[error("The `_vectors` field in the document with id: `{document_id}` is not an object. Was expecting an object with a key for each embedder with manually provided vectors, but instead got `{value}`")]
|
#[error("The `_vectors` field in the document with id: `{document_id}` is not an object. Was expecting an object with a key for each embedder with manually provided vectors, but instead got `{value}`")]
|
||||||
@@ -519,7 +519,7 @@ error_from_sub_error! {
|
|||||||
str::Utf8Error => InternalError,
|
str::Utf8Error => InternalError,
|
||||||
ThreadPoolBuildError => InternalError,
|
ThreadPoolBuildError => InternalError,
|
||||||
SerializationError => InternalError,
|
SerializationError => InternalError,
|
||||||
GeoError => UserError,
|
Box<GeoError> => UserError,
|
||||||
CriterionError => UserError,
|
CriterionError => UserError,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ impl ExternalDocumentsIds {
|
|||||||
|
|
||||||
/// Returns `true` if hard and soft external documents lists are empty.
|
/// Returns `true` if hard and soft external documents lists are empty.
|
||||||
pub fn is_empty(&self, rtxn: &RoTxn<'_>) -> heed::Result<bool> {
|
pub fn is_empty(&self, rtxn: &RoTxn<'_>) -> heed::Result<bool> {
|
||||||
self.0.is_empty(rtxn).map_err(Into::into)
|
self.0.is_empty(rtxn)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get<A: AsRef<str>>(
|
pub fn get<A: AsRef<str>>(
|
||||||
|
|||||||
@@ -119,7 +119,7 @@ impl<'indexing> GlobalFieldsIdsMap<'indexing> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'indexing> MutFieldIdMapper for GlobalFieldsIdsMap<'indexing> {
|
impl MutFieldIdMapper for GlobalFieldsIdsMap<'_> {
|
||||||
fn insert(&mut self, name: &str) -> Option<FieldId> {
|
fn insert(&mut self, name: &str) -> Option<FieldId> {
|
||||||
self.id_or_insert(name)
|
self.id_or_insert(name)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2954,10 +2954,15 @@ pub(crate) mod tests {
|
|||||||
documents!({ "id" : 6, RESERVED_GEO_FIELD_NAME: {"lat": "unparseable", "lng": "unparseable"}}),
|
documents!({ "id" : 6, RESERVED_GEO_FIELD_NAME: {"lat": "unparseable", "lng": "unparseable"}}),
|
||||||
)
|
)
|
||||||
.unwrap_err();
|
.unwrap_err();
|
||||||
assert!(matches!(
|
match err1 {
|
||||||
err1,
|
Error::UserError(UserError::InvalidGeoField(err)) => match *err {
|
||||||
Error::UserError(UserError::InvalidGeoField(GeoError::BadLatitudeAndLongitude { .. }))
|
GeoError::BadLatitudeAndLongitude { .. } => (),
|
||||||
));
|
otherwise => {
|
||||||
|
panic!("err1 is not a BadLatitudeAndLongitude error but rather a {otherwise:?}")
|
||||||
|
}
|
||||||
|
},
|
||||||
|
_ => panic!("err1 is not a BadLatitudeAndLongitude error but rather a {err1:?}"),
|
||||||
|
}
|
||||||
|
|
||||||
db_snap!(index, geo_faceted_documents_ids); // ensure that no more document was inserted
|
db_snap!(index, geo_faceted_documents_ids); // ensure that no more document was inserted
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -204,7 +204,7 @@ pub fn relative_from_absolute_position(absolute: Position) -> (FieldId, Relative
|
|||||||
|
|
||||||
// Compute the absolute word position with the field id of the attribute and relative position in the attribute.
|
// Compute the absolute word position with the field id of the attribute and relative position in the attribute.
|
||||||
pub fn absolute_from_relative_position(field_id: FieldId, relative: RelativePosition) -> Position {
|
pub fn absolute_from_relative_position(field_id: FieldId, relative: RelativePosition) -> Position {
|
||||||
(field_id as u32) << 16 | (relative as u32)
|
((field_id as u32) << 16) | (relative as u32)
|
||||||
}
|
}
|
||||||
// TODO: this is wrong, but will do for now
|
// TODO: this is wrong, but will do for now
|
||||||
/// Compute the "bucketed" absolute position from the field id and relative position in the field.
|
/// Compute the "bucketed" absolute position from the field id and relative position in the field.
|
||||||
@@ -372,7 +372,7 @@ pub fn is_faceted(field: &str, faceted_fields: impl IntoIterator<Item = impl AsR
|
|||||||
/// assert!(!is_faceted_by("animaux.chien", "animaux.chie"));
|
/// assert!(!is_faceted_by("animaux.chien", "animaux.chie"));
|
||||||
/// ```
|
/// ```
|
||||||
pub fn is_faceted_by(field: &str, facet: &str) -> bool {
|
pub fn is_faceted_by(field: &str, facet: &str) -> bool {
|
||||||
field.starts_with(facet) && field[facet.len()..].chars().next().map_or(true, |c| c == '.')
|
field.starts_with(facet) && field[facet.len()..].chars().next().is_none_or(|c| c == '.')
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn normalize_facet(original: &str) -> String {
|
pub fn normalize_facet(original: &str) -> String {
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ impl<'a, D: ObjectView, F: ArrayView> Context<'a, D, F> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, D: ObjectView, F: ArrayView> ObjectView for Context<'a, D, F> {
|
impl<D: ObjectView, F: ArrayView> ObjectView for Context<'_, D, F> {
|
||||||
fn as_value(&self) -> &dyn ValueView {
|
fn as_value(&self) -> &dyn ValueView {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -52,7 +52,7 @@ impl<'a, D: ObjectView, F: ArrayView> ObjectView for Context<'a, D, F> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, D: ObjectView, F: ArrayView> ValueView for Context<'a, D, F> {
|
impl<D: ObjectView, F: ArrayView> ValueView for Context<'_, D, F> {
|
||||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ impl<'a> Document<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> ObjectView for Document<'a> {
|
impl ObjectView for Document<'_> {
|
||||||
fn as_value(&self) -> &dyn ValueView {
|
fn as_value(&self) -> &dyn ValueView {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -98,7 +98,7 @@ impl<'a> ObjectView for Document<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> ValueView for Document<'a> {
|
impl ValueView for Document<'_> {
|
||||||
fn as_debug(&self) -> &dyn Debug {
|
fn as_debug(&self) -> &dyn Debug {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -283,7 +283,7 @@ impl<'doc> ParseableArray<'doc> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'doc> ArrayView for ParseableArray<'doc> {
|
impl ArrayView for ParseableArray<'_> {
|
||||||
fn as_value(&self) -> &dyn ValueView {
|
fn as_value(&self) -> &dyn ValueView {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -311,7 +311,7 @@ impl<'doc> ArrayView for ParseableArray<'doc> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'doc> ValueView for ParseableArray<'doc> {
|
impl ValueView for ParseableArray<'_> {
|
||||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -353,7 +353,7 @@ impl<'doc> ValueView for ParseableArray<'doc> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'doc> ObjectView for ParseableMap<'doc> {
|
impl ObjectView for ParseableMap<'_> {
|
||||||
fn as_value(&self) -> &dyn ValueView {
|
fn as_value(&self) -> &dyn ValueView {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -392,7 +392,7 @@ impl<'doc> ObjectView for ParseableMap<'doc> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'doc> ValueView for ParseableMap<'doc> {
|
impl ValueView for ParseableMap<'_> {
|
||||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -441,7 +441,7 @@ impl<'doc> ValueView for ParseableMap<'doc> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'doc> ValueView for ParseableValue<'doc> {
|
impl ValueView for ParseableValue<'_> {
|
||||||
fn as_debug(&self) -> &dyn Debug {
|
fn as_debug(&self) -> &dyn Debug {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -622,7 +622,7 @@ struct ArraySource<'s, 'doc> {
|
|||||||
s: &'s RawVec<'doc>,
|
s: &'s RawVec<'doc>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'s, 'doc> fmt::Display for ArraySource<'s, 'doc> {
|
impl fmt::Display for ArraySource<'_, '_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "[")?;
|
write!(f, "[")?;
|
||||||
for item in self.s {
|
for item in self.s {
|
||||||
@@ -638,7 +638,7 @@ struct ArrayRender<'s, 'doc> {
|
|||||||
s: &'s RawVec<'doc>,
|
s: &'s RawVec<'doc>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'s, 'doc> fmt::Display for ArrayRender<'s, 'doc> {
|
impl fmt::Display for ArrayRender<'_, '_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
for item in self.s {
|
for item in self.s {
|
||||||
let v = ParseableValue::new(item, self.s.bump());
|
let v = ParseableValue::new(item, self.s.bump());
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ pub struct FieldValue<'a, D: ObjectView> {
|
|||||||
metadata: Metadata,
|
metadata: Metadata,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, D: ObjectView> ValueView for FieldValue<'a, D> {
|
impl<D: ObjectView> ValueView for FieldValue<'_, D> {
|
||||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -78,7 +78,7 @@ impl<'a, D: ObjectView> FieldValue<'a, D> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, D: ObjectView> ObjectView for FieldValue<'a, D> {
|
impl<D: ObjectView> ObjectView for FieldValue<'_, D> {
|
||||||
fn as_value(&self) -> &dyn ValueView {
|
fn as_value(&self) -> &dyn ValueView {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -148,7 +148,7 @@ impl<'a, 'map, D: ObjectView> BorrowedFields<'a, 'map, D> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, D: ObjectView> ArrayView for OwnedFields<'a, D> {
|
impl<D: ObjectView> ArrayView for OwnedFields<'_, D> {
|
||||||
fn as_value(&self) -> &dyn ValueView {
|
fn as_value(&self) -> &dyn ValueView {
|
||||||
self.0.as_value()
|
self.0.as_value()
|
||||||
}
|
}
|
||||||
@@ -170,7 +170,7 @@ impl<'a, D: ObjectView> ArrayView for OwnedFields<'a, D> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'map, D: ObjectView> ArrayView for BorrowedFields<'a, 'map, D> {
|
impl<D: ObjectView> ArrayView for BorrowedFields<'_, '_, D> {
|
||||||
fn as_value(&self) -> &dyn ValueView {
|
fn as_value(&self) -> &dyn ValueView {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -212,7 +212,7 @@ impl<'a, 'map, D: ObjectView> ArrayView for BorrowedFields<'a, 'map, D> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'map, D: ObjectView> ValueView for BorrowedFields<'a, 'map, D> {
|
impl<D: ObjectView> ValueView for BorrowedFields<'_, '_, D> {
|
||||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -254,7 +254,7 @@ impl<'a, 'map, D: ObjectView> ValueView for BorrowedFields<'a, 'map, D> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, D: ObjectView> ValueView for OwnedFields<'a, D> {
|
impl<D: ObjectView> ValueView for OwnedFields<'_, D> {
|
||||||
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
fn as_debug(&self) -> &dyn std::fmt::Debug {
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -292,7 +292,7 @@ struct ArraySource<'a, 'map, D: ObjectView> {
|
|||||||
s: &'a BorrowedFields<'a, 'map, D>,
|
s: &'a BorrowedFields<'a, 'map, D>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'map, D: ObjectView> fmt::Display for ArraySource<'a, 'map, D> {
|
impl<D: ObjectView> fmt::Display for ArraySource<'_, '_, D> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "[")?;
|
write!(f, "[")?;
|
||||||
for item in self.s.values() {
|
for item in self.s.values() {
|
||||||
@@ -307,7 +307,7 @@ struct ArrayRender<'a, 'map, D: ObjectView> {
|
|||||||
s: &'a BorrowedFields<'a, 'map, D>,
|
s: &'a BorrowedFields<'a, 'map, D>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a, 'map, D: ObjectView> fmt::Display for ArrayRender<'a, 'map, D> {
|
impl<D: ObjectView> fmt::Display for ArrayRender<'_, '_, D> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
for item in self.s.values() {
|
for item in self.s.values() {
|
||||||
write!(f, "{}", item.render())?;
|
write!(f, "{}", item.render())?;
|
||||||
|
|||||||
@@ -301,26 +301,26 @@ impl<'a> FacetDistribution<'a> {
|
|||||||
let mut distribution = BTreeMap::new();
|
let mut distribution = BTreeMap::new();
|
||||||
for (fid, name) in fields_ids_map.iter() {
|
for (fid, name) in fields_ids_map.iter() {
|
||||||
if self.select_field(name, &filterable_attributes_rules) {
|
if self.select_field(name, &filterable_attributes_rules) {
|
||||||
let min_value = if let Some(min_value) = crate::search::facet::facet_min_value(
|
let min_value = match crate::search::facet::facet_min_value(
|
||||||
self.index,
|
self.index,
|
||||||
self.rtxn,
|
self.rtxn,
|
||||||
fid,
|
fid,
|
||||||
candidates.clone(),
|
candidates.clone(),
|
||||||
)? {
|
)? { Some(min_value) => {
|
||||||
min_value
|
min_value
|
||||||
} else {
|
} _ => {
|
||||||
continue;
|
continue;
|
||||||
};
|
}};
|
||||||
let max_value = if let Some(max_value) = crate::search::facet::facet_max_value(
|
let max_value = match crate::search::facet::facet_max_value(
|
||||||
self.index,
|
self.index,
|
||||||
self.rtxn,
|
self.rtxn,
|
||||||
fid,
|
fid,
|
||||||
candidates.clone(),
|
candidates.clone(),
|
||||||
)? {
|
)? { Some(max_value) => {
|
||||||
max_value
|
max_value
|
||||||
} else {
|
} _ => {
|
||||||
continue;
|
continue;
|
||||||
};
|
}};
|
||||||
|
|
||||||
distribution.insert(name.to_string(), (min_value, max_value));
|
distribution.insert(name.to_string(), (min_value, max_value));
|
||||||
}
|
}
|
||||||
@@ -358,7 +358,7 @@ impl<'a> FacetDistribution<'a> {
|
|||||||
) -> bool {
|
) -> bool {
|
||||||
// If the field is not filterable, we don't want to compute the facet distribution.
|
// If the field is not filterable, we don't want to compute the facet distribution.
|
||||||
if !matching_features(name, filterable_attributes_rules)
|
if !matching_features(name, filterable_attributes_rules)
|
||||||
.map_or(false, |(_, features)| features.is_filterable())
|
.is_some_and(|(_, features)| features.is_filterable())
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@@ -383,8 +383,7 @@ impl<'a> FacetDistribution<'a> {
|
|||||||
if let Some(facets) = &self.facets {
|
if let Some(facets) = &self.facets {
|
||||||
for field in facets.keys() {
|
for field in facets.keys() {
|
||||||
let matched_rule = matching_features(field, filterable_attributes_rules);
|
let matched_rule = matching_features(field, filterable_attributes_rules);
|
||||||
let is_filterable =
|
let is_filterable = matched_rule.is_some_and(|(_, f)| f.is_filterable());
|
||||||
matched_rule.map_or(false, |(_, features)| features.is_filterable());
|
|
||||||
|
|
||||||
if !is_filterable {
|
if !is_filterable {
|
||||||
invalid_facets.insert(field.to_string());
|
invalid_facets.insert(field.to_string());
|
||||||
|
|||||||
@@ -37,12 +37,12 @@ where
|
|||||||
let mut fd = LexicographicFacetDistribution { rtxn, db, field_id, callback };
|
let mut fd = LexicographicFacetDistribution { rtxn, db, field_id, callback };
|
||||||
let highest_level = get_highest_level(rtxn, db, field_id)?;
|
let highest_level = get_highest_level(rtxn, db, field_id)?;
|
||||||
|
|
||||||
if let Some(first_bound) = get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? {
|
match get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? { Some(first_bound) => {
|
||||||
fd.iterate(candidates, highest_level, first_bound, usize::MAX)?;
|
fd.iterate(candidates, highest_level, first_bound, usize::MAX)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} _ => {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn count_iterate_over_facet_distribution<'t, CB>(
|
pub fn count_iterate_over_facet_distribution<'t, CB>(
|
||||||
|
|||||||
@@ -53,17 +53,16 @@ where
|
|||||||
let mut f = FacetRangeSearch { rtxn, db, field_id, left, right, universe, docids };
|
let mut f = FacetRangeSearch { rtxn, db, field_id, left, right, universe, docids };
|
||||||
let highest_level = get_highest_level(rtxn, db, field_id)?;
|
let highest_level = get_highest_level(rtxn, db, field_id)?;
|
||||||
|
|
||||||
if let Some(starting_left_bound) =
|
match get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?
|
||||||
get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?
|
{ Some(starting_left_bound) => {
|
||||||
{
|
|
||||||
let rightmost_bound =
|
let rightmost_bound =
|
||||||
Bound::Included(get_last_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?.unwrap()); // will not fail because get_first_facet_value succeeded
|
Bound::Included(get_last_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?.unwrap()); // will not fail because get_first_facet_value succeeded
|
||||||
let group_size = usize::MAX;
|
let group_size = usize::MAX;
|
||||||
f.run(highest_level, starting_left_bound, rightmost_bound, group_size)?;
|
f.run(highest_level, starting_left_bound, rightmost_bound, group_size)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} _ => {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Fetch the document ids that have a facet with a value between the two given bounds
|
/// Fetch the document ids that have a facet with a value between the two given bounds
|
||||||
@@ -79,7 +78,7 @@ struct FacetRangeSearch<'t, 'b, 'bitmap> {
|
|||||||
docids: &'bitmap mut RoaringBitmap,
|
docids: &'bitmap mut RoaringBitmap,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'t, 'b, 'bitmap> FacetRangeSearch<'t, 'b, 'bitmap> {
|
impl<'t> FacetRangeSearch<'t, '_, '_> {
|
||||||
fn run_level_0(&mut self, starting_left_bound: &'t [u8], group_size: usize) -> Result<()> {
|
fn run_level_0(&mut self, starting_left_bound: &'t [u8], group_size: usize) -> Result<()> {
|
||||||
let left_key =
|
let left_key =
|
||||||
FacetGroupKey { field_id: self.field_id, level: 0, left_bound: starting_left_bound };
|
FacetGroupKey { field_id: self.field_id, level: 0, left_bound: starting_left_bound };
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ pub fn ascending_facet_sort<'t>(
|
|||||||
candidates: RoaringBitmap,
|
candidates: RoaringBitmap,
|
||||||
) -> Result<impl Iterator<Item = Result<(RoaringBitmap, &'t [u8])>> + 't> {
|
) -> Result<impl Iterator<Item = Result<(RoaringBitmap, &'t [u8])>> + 't> {
|
||||||
let highest_level = get_highest_level(rtxn, db, field_id)?;
|
let highest_level = get_highest_level(rtxn, db, field_id)?;
|
||||||
if let Some(first_bound) = get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? {
|
match get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? { Some(first_bound) => {
|
||||||
let first_key = FacetGroupKey { field_id, level: highest_level, left_bound: first_bound };
|
let first_key = FacetGroupKey { field_id, level: highest_level, left_bound: first_bound };
|
||||||
let iter = db.range(rtxn, &(first_key..)).unwrap().take(usize::MAX);
|
let iter = db.range(rtxn, &(first_key..)).unwrap().take(usize::MAX);
|
||||||
|
|
||||||
@@ -46,9 +46,9 @@ pub fn ascending_facet_sort<'t>(
|
|||||||
field_id,
|
field_id,
|
||||||
stack: vec![(candidates, iter)],
|
stack: vec![(candidates, iter)],
|
||||||
}))
|
}))
|
||||||
} else {
|
} _ => {
|
||||||
Ok(itertools::Either::Right(std::iter::empty()))
|
Ok(itertools::Either::Right(std::iter::empty()))
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct AscendingFacetSort<'t, 'e> {
|
struct AscendingFacetSort<'t, 'e> {
|
||||||
@@ -62,7 +62,7 @@ struct AscendingFacetSort<'t, 'e> {
|
|||||||
)>,
|
)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'t, 'e> Iterator for AscendingFacetSort<'t, 'e> {
|
impl<'t> Iterator for AscendingFacetSort<'t, '_> {
|
||||||
type Item = Result<(RoaringBitmap, &'t [u8])>;
|
type Item = Result<(RoaringBitmap, &'t [u8])>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ pub fn descending_facet_sort<'t>(
|
|||||||
candidates: RoaringBitmap,
|
candidates: RoaringBitmap,
|
||||||
) -> Result<impl Iterator<Item = Result<(RoaringBitmap, &'t [u8])>> + 't> {
|
) -> Result<impl Iterator<Item = Result<(RoaringBitmap, &'t [u8])>> + 't> {
|
||||||
let highest_level = get_highest_level(rtxn, db, field_id)?;
|
let highest_level = get_highest_level(rtxn, db, field_id)?;
|
||||||
if let Some(first_bound) = get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? {
|
match get_first_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)? { Some(first_bound) => {
|
||||||
let first_key = FacetGroupKey { field_id, level: highest_level, left_bound: first_bound };
|
let first_key = FacetGroupKey { field_id, level: highest_level, left_bound: first_bound };
|
||||||
let last_bound = get_last_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?.unwrap();
|
let last_bound = get_last_facet_value::<BytesRefCodec, _>(rtxn, db, field_id)?.unwrap();
|
||||||
let last_key = FacetGroupKey { field_id, level: highest_level, left_bound: last_bound };
|
let last_key = FacetGroupKey { field_id, level: highest_level, left_bound: last_bound };
|
||||||
@@ -30,9 +30,9 @@ pub fn descending_facet_sort<'t>(
|
|||||||
field_id,
|
field_id,
|
||||||
stack: vec![(candidates, iter, Bound::Included(last_bound))],
|
stack: vec![(candidates, iter, Bound::Included(last_bound))],
|
||||||
}))
|
}))
|
||||||
} else {
|
} _ => {
|
||||||
Ok(itertools::Either::Right(std::iter::empty()))
|
Ok(itertools::Either::Right(std::iter::empty()))
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct DescendingFacetSort<'t> {
|
struct DescendingFacetSort<'t> {
|
||||||
|
|||||||
@@ -66,15 +66,15 @@ enum FilterError<'a> {
|
|||||||
ParseGeoError(BadGeoError),
|
ParseGeoError(BadGeoError),
|
||||||
TooDeep,
|
TooDeep,
|
||||||
}
|
}
|
||||||
impl<'a> std::error::Error for FilterError<'a> {}
|
impl std::error::Error for FilterError<'_> {}
|
||||||
|
|
||||||
impl<'a> From<BadGeoError> for FilterError<'a> {
|
impl From<BadGeoError> for FilterError<'_> {
|
||||||
fn from(geo_error: BadGeoError) -> Self {
|
fn from(geo_error: BadGeoError) -> Self {
|
||||||
FilterError::ParseGeoError(geo_error)
|
FilterError::ParseGeoError(geo_error)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Display for FilterError<'a> {
|
impl Display for FilterError<'_> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::AttributeNotFilterable { attribute, filterable_patterns } => {
|
Self::AttributeNotFilterable { attribute, filterable_patterns } => {
|
||||||
@@ -237,7 +237,7 @@ impl<'a> Filter<'a> {
|
|||||||
for fid in self.condition.fids(MAX_FILTER_DEPTH) {
|
for fid in self.condition.fids(MAX_FILTER_DEPTH) {
|
||||||
let attribute = fid.value();
|
let attribute = fid.value();
|
||||||
if matching_features(attribute, &filterable_attributes_rules)
|
if matching_features(attribute, &filterable_attributes_rules)
|
||||||
.map_or(false, |(_, features)| features.is_filterable())
|
.is_some_and(|(_, features)| features.is_filterable())
|
||||||
{
|
{
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -461,7 +461,7 @@ impl<'a> Filter<'a> {
|
|||||||
filterable_attribute_rules: &[FilterableAttributesRule],
|
filterable_attribute_rules: &[FilterableAttributesRule],
|
||||||
universe: Option<&RoaringBitmap>,
|
universe: Option<&RoaringBitmap>,
|
||||||
) -> Result<RoaringBitmap> {
|
) -> Result<RoaringBitmap> {
|
||||||
if universe.map_or(false, |u| u.is_empty()) {
|
if universe.is_some_and(|u| u.is_empty()) {
|
||||||
return Ok(RoaringBitmap::new());
|
return Ok(RoaringBitmap::new());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ fn facet_extreme_value<'t>(
|
|||||||
mut extreme_it: impl Iterator<Item = heed::Result<(RoaringBitmap, &'t [u8])>> + 't,
|
mut extreme_it: impl Iterator<Item = heed::Result<(RoaringBitmap, &'t [u8])>> + 't,
|
||||||
) -> Result<Option<f64>> {
|
) -> Result<Option<f64>> {
|
||||||
let extreme_value =
|
let extreme_value =
|
||||||
if let Some(extreme_value) = extreme_it.next() { extreme_value } else { return Ok(None) };
|
match extreme_it.next() { Some(extreme_value) => { extreme_value } _ => { return Ok(None) }};
|
||||||
let (_, extreme_value) = extreme_value?;
|
let (_, extreme_value) = extreme_value?;
|
||||||
OrderedF64Codec::bytes_decode(extreme_value)
|
OrderedF64Codec::bytes_decode(extreme_value)
|
||||||
.map(Some)
|
.map(Some)
|
||||||
@@ -67,14 +67,14 @@ where
|
|||||||
level0prefix.push(0);
|
level0prefix.push(0);
|
||||||
let mut level0_iter_forward =
|
let mut level0_iter_forward =
|
||||||
db.remap_types::<Bytes, DecodeIgnore>().prefix_iter(txn, level0prefix.as_slice())?;
|
db.remap_types::<Bytes, DecodeIgnore>().prefix_iter(txn, level0prefix.as_slice())?;
|
||||||
if let Some(first) = level0_iter_forward.next() {
|
match level0_iter_forward.next() { Some(first) => {
|
||||||
let (first_key, _) = first?;
|
let (first_key, _) = first?;
|
||||||
let first_key = FacetGroupKeyCodec::<BoundCodec>::bytes_decode(first_key)
|
let first_key = FacetGroupKeyCodec::<BoundCodec>::bytes_decode(first_key)
|
||||||
.map_err(heed::Error::Decoding)?;
|
.map_err(heed::Error::Decoding)?;
|
||||||
Ok(Some(first_key.left_bound))
|
Ok(Some(first_key.left_bound))
|
||||||
} else {
|
} _ => {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the last facet value in the facet database
|
/// Get the last facet value in the facet database
|
||||||
@@ -91,14 +91,14 @@ where
|
|||||||
level0prefix.push(0);
|
level0prefix.push(0);
|
||||||
let mut level0_iter_backward =
|
let mut level0_iter_backward =
|
||||||
db.remap_types::<Bytes, DecodeIgnore>().rev_prefix_iter(txn, level0prefix.as_slice())?;
|
db.remap_types::<Bytes, DecodeIgnore>().rev_prefix_iter(txn, level0prefix.as_slice())?;
|
||||||
if let Some(last) = level0_iter_backward.next() {
|
match level0_iter_backward.next() { Some(last) => {
|
||||||
let (last_key, _) = last?;
|
let (last_key, _) = last?;
|
||||||
let last_key = FacetGroupKeyCodec::<BoundCodec>::bytes_decode(last_key)
|
let last_key = FacetGroupKeyCodec::<BoundCodec>::bytes_decode(last_key)
|
||||||
.map_err(heed::Error::Decoding)?;
|
.map_err(heed::Error::Decoding)?;
|
||||||
Ok(Some(last_key.left_bound))
|
Ok(Some(last_key.left_bound))
|
||||||
} else {
|
} _ => {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the height of the highest level in the facet database
|
/// Get the height of the highest level in the facet database
|
||||||
|
|||||||
@@ -77,7 +77,7 @@ impl<'a> SearchForFacetValues<'a> {
|
|||||||
let filterable_attributes_rules = index.filterable_attributes_rules(rtxn)?;
|
let filterable_attributes_rules = index.filterable_attributes_rules(rtxn)?;
|
||||||
let matched_rule = matching_features(&self.facet, &filterable_attributes_rules);
|
let matched_rule = matching_features(&self.facet, &filterable_attributes_rules);
|
||||||
let is_facet_searchable =
|
let is_facet_searchable =
|
||||||
matched_rule.map_or(false, |(_, features)| features.is_facet_searchable());
|
matched_rule.is_some_and(|(_, features)| features.is_facet_searchable());
|
||||||
|
|
||||||
if !is_facet_searchable {
|
if !is_facet_searchable {
|
||||||
let matching_field_names =
|
let matching_field_names =
|
||||||
@@ -135,7 +135,7 @@ impl<'a> SearchForFacetValues<'a> {
|
|||||||
|
|
||||||
if authorize_typos && field_authorizes_typos {
|
if authorize_typos && field_authorizes_typos {
|
||||||
let exact_words_fst = self.search_query.index.exact_words(rtxn)?;
|
let exact_words_fst = self.search_query.index.exact_words(rtxn)?;
|
||||||
if exact_words_fst.map_or(false, |fst| fst.contains(query)) {
|
if exact_words_fst.is_some_and(|fst| fst.contains(query)) {
|
||||||
if fst.contains(query) {
|
if fst.contains(query) {
|
||||||
self.fetch_original_facets_using_normalized(
|
self.fetch_original_facets_using_normalized(
|
||||||
fid,
|
fid,
|
||||||
|
|||||||
@@ -151,7 +151,7 @@ impl ScoreWithRatioResult {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Search<'a> {
|
impl Search<'_> {
|
||||||
#[tracing::instrument(level = "trace", skip_all, target = "search::hybrid")]
|
#[tracing::instrument(level = "trace", skip_all, target = "search::hybrid")]
|
||||||
pub fn execute_hybrid(&self, semantic_ratio: f32) -> Result<(SearchResult, Option<u32>)> {
|
pub fn execute_hybrid(&self, semantic_ratio: f32) -> Result<(SearchResult, Option<u32>)> {
|
||||||
// TODO: find classier way to achieve that than to reset vector and query params
|
// TODO: find classier way to achieve that than to reset vector and query params
|
||||||
|
|||||||
@@ -191,8 +191,7 @@ impl<'a> Search<'a> {
|
|||||||
let filterable_fields = ctx.index.filterable_attributes_rules(ctx.txn)?;
|
let filterable_fields = ctx.index.filterable_attributes_rules(ctx.txn)?;
|
||||||
// check if the distinct field is in the filterable fields
|
// check if the distinct field is in the filterable fields
|
||||||
let matched_rule = matching_features(distinct, &filterable_fields);
|
let matched_rule = matching_features(distinct, &filterable_fields);
|
||||||
let is_filterable =
|
let is_filterable = matched_rule.is_some_and(|(_, features)| features.is_filterable());
|
||||||
matched_rule.map_or(false, |(_, features)| features.is_filterable());
|
|
||||||
|
|
||||||
if !is_filterable {
|
if !is_filterable {
|
||||||
// if not, remove the hidden fields from the filterable fields to generate the error message
|
// if not, remove the hidden fields from the filterable fields to generate the error message
|
||||||
|
|||||||
@@ -146,7 +146,7 @@ pub fn bucket_sort<'ctx, Q: RankingRuleQueryTrait>(
|
|||||||
let mut cur_offset = 0usize;
|
let mut cur_offset = 0usize;
|
||||||
|
|
||||||
macro_rules! maybe_add_to_results {
|
macro_rules! maybe_add_to_results {
|
||||||
($candidates:expr) => {
|
($candidates:expr_2021) => {
|
||||||
maybe_add_to_results(
|
maybe_add_to_results(
|
||||||
ctx,
|
ctx,
|
||||||
from,
|
from,
|
||||||
|
|||||||
@@ -537,7 +537,7 @@ impl<'ctx> SearchContext<'ctx> {
|
|||||||
fid: u16,
|
fid: u16,
|
||||||
) -> Result<Option<RoaringBitmap>> {
|
) -> Result<Option<RoaringBitmap>> {
|
||||||
// if the requested fid isn't in the restricted list, return None.
|
// if the requested fid isn't in the restricted list, return None.
|
||||||
if self.restricted_fids.as_ref().map_or(false, |fids| !fids.contains(&fid)) {
|
if self.restricted_fids.as_ref().is_some_and(|fids| !fids.contains(&fid)) {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -558,7 +558,7 @@ impl<'ctx> SearchContext<'ctx> {
|
|||||||
fid: u16,
|
fid: u16,
|
||||||
) -> Result<Option<RoaringBitmap>> {
|
) -> Result<Option<RoaringBitmap>> {
|
||||||
// if the requested fid isn't in the restricted list, return None.
|
// if the requested fid isn't in the restricted list, return None.
|
||||||
if self.restricted_fids.as_ref().map_or(false, |fids| !fids.contains(&fid)) {
|
if self.restricted_fids.as_ref().is_some_and(|fids| !fids.contains(&fid)) {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -54,15 +54,15 @@ where
|
|||||||
/// Insert the given value into the dedup-interner, and return
|
/// Insert the given value into the dedup-interner, and return
|
||||||
/// its index.
|
/// its index.
|
||||||
pub fn insert(&mut self, s: T) -> Interned<T> {
|
pub fn insert(&mut self, s: T) -> Interned<T> {
|
||||||
if let Some(interned) = self.lookup.get(&s) {
|
match self.lookup.get(&s) { Some(interned) => {
|
||||||
*interned
|
*interned
|
||||||
} else {
|
} _ => {
|
||||||
assert!(self.stable_store.len() < u16::MAX as usize);
|
assert!(self.stable_store.len() < u16::MAX as usize);
|
||||||
self.stable_store.push(s.clone());
|
self.stable_store.push(s.clone());
|
||||||
let interned = Interned::from_raw(self.stable_store.len() as u16 - 1);
|
let interned = Interned::from_raw(self.stable_store.len() as u16 - 1);
|
||||||
self.lookup.insert(s, interned);
|
self.lookup.insert(s, interned);
|
||||||
interned
|
interned
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
/// Get a reference to the interned value.
|
/// Get a reference to the interned value.
|
||||||
pub fn get(&self, interned: Interned<T>) -> &T {
|
pub fn get(&self, interned: Interned<T>) -> &T {
|
||||||
@@ -117,7 +117,7 @@ impl<T> FixedSizeInterner<T> {
|
|||||||
pub fn map_indexes<U>(&self, map_f: impl Fn(Interned<T>) -> U) -> MappedInterner<T, U> {
|
pub fn map_indexes<U>(&self, map_f: impl Fn(Interned<T>) -> U) -> MappedInterner<T, U> {
|
||||||
MappedInterner { stable_store: self.indexes().map(map_f).collect(), _phantom: PhantomData }
|
MappedInterner { stable_store: self.indexes().map(map_f).collect(), _phantom: PhantomData }
|
||||||
}
|
}
|
||||||
pub fn indexes(&self) -> impl Iterator<Item = Interned<T>> {
|
pub fn indexes(&self) -> impl Iterator<Item = Interned<T>> + use<T> {
|
||||||
(0..self.stable_store.len()).map(|i| Interned::from_raw(i as u16))
|
(0..self.stable_store.len()).map(|i| Interned::from_raw(i as u16))
|
||||||
}
|
}
|
||||||
pub fn iter(&self) -> impl Iterator<Item = (Interned<T>, &T)> {
|
pub fn iter(&self) -> impl Iterator<Item = (Interned<T>, &T)> {
|
||||||
@@ -167,7 +167,7 @@ impl<T> Interner<T> {
|
|||||||
pub fn map_indexes<U>(&self, map_f: impl Fn(Interned<T>) -> U) -> MappedInterner<T, U> {
|
pub fn map_indexes<U>(&self, map_f: impl Fn(Interned<T>) -> U) -> MappedInterner<T, U> {
|
||||||
MappedInterner { stable_store: self.indexes().map(map_f).collect(), _phantom: PhantomData }
|
MappedInterner { stable_store: self.indexes().map(map_f).collect(), _phantom: PhantomData }
|
||||||
}
|
}
|
||||||
pub fn indexes(&self) -> impl Iterator<Item = Interned<T>> {
|
pub fn indexes(&self) -> impl Iterator<Item = Interned<T>> + use<T> {
|
||||||
(0..self.stable_store.len()).map(|i| Interned::from_raw(i as u16))
|
(0..self.stable_store.len()).map(|i| Interned::from_raw(i as u16))
|
||||||
}
|
}
|
||||||
pub fn iter(&self) -> impl Iterator<Item = (Interned<T>, &T)> {
|
pub fn iter(&self) -> impl Iterator<Item = (Interned<T>, &T)> {
|
||||||
|
|||||||
@@ -206,11 +206,11 @@ struct DetailedLoggerFinish<'ctx> {
|
|||||||
|
|
||||||
impl<'ctx> DetailedLoggerFinish<'ctx> {
|
impl<'ctx> DetailedLoggerFinish<'ctx> {
|
||||||
fn cur_file(&mut self) -> &mut BufWriter<File> {
|
fn cur_file(&mut self) -> &mut BufWriter<File> {
|
||||||
if let Some(file) = self.file_for_internal_state.as_mut() {
|
match self.file_for_internal_state.as_mut() { Some(file) => {
|
||||||
file
|
file
|
||||||
} else {
|
} _ => {
|
||||||
&mut self.index_file
|
&mut self.index_file
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
fn pop_rr_action(&mut self) {
|
fn pop_rr_action(&mut self) {
|
||||||
self.file_for_internal_state = None;
|
self.file_for_internal_state = None;
|
||||||
@@ -531,11 +531,11 @@ fill: \"#B6E2D3\"
|
|||||||
paths: Vec<Vec<Interned<R::Condition>>>,
|
paths: Vec<Vec<Interned<R::Condition>>>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
self.make_new_file_for_internal_state_if_needed()?;
|
self.make_new_file_for_internal_state_if_needed()?;
|
||||||
let file = if let Some(file) = self.file_for_internal_state.as_mut() {
|
let file = match self.file_for_internal_state.as_mut() { Some(file) => {
|
||||||
file
|
file
|
||||||
} else {
|
} _ => {
|
||||||
&mut self.index_file
|
&mut self.index_file
|
||||||
};
|
}};
|
||||||
writeln!(file, "Path {{")?;
|
writeln!(file, "Path {{")?;
|
||||||
for (path_idx, condition_indexes) in paths.iter().enumerate() {
|
for (path_idx, condition_indexes) in paths.iter().enumerate() {
|
||||||
writeln!(file, "{path_idx} {{")?;
|
writeln!(file, "{path_idx} {{")?;
|
||||||
|
|||||||
@@ -72,7 +72,7 @@ pub fn find_best_match_interval(matches: &[Match], crop_size: usize) -> [&Match;
|
|||||||
let interval_score = get_interval_score(&matches[interval_first..=interval_last]);
|
let interval_score = get_interval_score(&matches[interval_first..=interval_last]);
|
||||||
let is_interval_score_better = &best_interval
|
let is_interval_score_better = &best_interval
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map_or(true, |MatchIntervalWithScore { score, .. }| interval_score > *score);
|
.is_none_or(|MatchIntervalWithScore { score, .. }| interval_score > *score);
|
||||||
|
|
||||||
if *is_interval_score_better {
|
if *is_interval_score_better {
|
||||||
best_interval = Some(MatchIntervalWithScore {
|
best_interval = Some(MatchIntervalWithScore {
|
||||||
|
|||||||
@@ -123,7 +123,7 @@ pub struct Matcher<'t, 'tokenizer, 'b, 'lang> {
|
|||||||
matches: Option<(Vec<Token<'t>>, Vec<Match>)>,
|
matches: Option<(Vec<Token<'t>>, Vec<Match>)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'t, 'tokenizer> Matcher<'t, 'tokenizer, '_, '_> {
|
impl<'t> Matcher<'t, '_, '_, '_> {
|
||||||
/// Iterates over tokens and save any of them that matches the query.
|
/// Iterates over tokens and save any of them that matches the query.
|
||||||
fn compute_matches(&mut self) -> &mut Self {
|
fn compute_matches(&mut self) -> &mut Self {
|
||||||
/// some words are counted as matches only if they are close together and in the good order,
|
/// some words are counted as matches only if they are close together and in the good order,
|
||||||
|
|||||||
@@ -853,7 +853,7 @@ fn check_sort_criteria(
|
|||||||
let sortable_fields = ctx.index.sortable_fields(ctx.txn)?;
|
let sortable_fields = ctx.index.sortable_fields(ctx.txn)?;
|
||||||
for asc_desc in sort_criteria {
|
for asc_desc in sort_criteria {
|
||||||
match asc_desc.member() {
|
match asc_desc.member() {
|
||||||
Member::Field(ref field) if !crate::is_faceted(field, &sortable_fields) => {
|
Member::Field(field) if !crate::is_faceted(field, &sortable_fields) => {
|
||||||
let (valid_fields, hidden_fields) =
|
let (valid_fields, hidden_fields) =
|
||||||
ctx.index.remove_hidden_fields(ctx.txn, sortable_fields)?;
|
ctx.index.remove_hidden_fields(ctx.txn, sortable_fields)?;
|
||||||
|
|
||||||
|
|||||||
@@ -327,7 +327,7 @@ impl QueryGraph {
|
|||||||
let mut peekable = term_with_frequency.into_iter().peekable();
|
let mut peekable = term_with_frequency.into_iter().peekable();
|
||||||
while let Some((idx, frequency)) = peekable.next() {
|
while let Some((idx, frequency)) = peekable.next() {
|
||||||
term_weight.insert(idx, weight);
|
term_weight.insert(idx, weight);
|
||||||
if peekable.peek().map_or(false, |(_, f)| frequency != *f) {
|
if peekable.peek().is_some_and(|(_, f)| frequency != *f) {
|
||||||
weight += 1;
|
weight += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -266,11 +266,11 @@ pub fn partially_initialized_term_from_word(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn find_split_words(ctx: &mut SearchContext<'_>, word: &str) -> Result<Option<Interned<Phrase>>> {
|
fn find_split_words(ctx: &mut SearchContext<'_>, word: &str) -> Result<Option<Interned<Phrase>>> {
|
||||||
if let Some((l, r)) = split_best_frequency(ctx, word)? {
|
match split_best_frequency(ctx, word)? { Some((l, r)) => {
|
||||||
Ok(Some(ctx.phrase_interner.insert(Phrase { words: vec![Some(l), Some(r)] })))
|
Ok(Some(ctx.phrase_interner.insert(Phrase { words: vec![Some(l), Some(r)] })))
|
||||||
} else {
|
} _ => {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Interned<QueryTerm> {
|
impl Interned<QueryTerm> {
|
||||||
@@ -418,7 +418,7 @@ fn split_best_frequency(
|
|||||||
let right = ctx.word_interner.insert(right.to_owned());
|
let right = ctx.word_interner.insert(right.to_owned());
|
||||||
|
|
||||||
if let Some(frequency) = ctx.get_db_word_pair_proximity_docids_len(None, left, right, 1)? {
|
if let Some(frequency) = ctx.get_db_word_pair_proximity_docids_len(None, left, right, 1)? {
|
||||||
if best.map_or(true, |(old, _, _)| frequency > old) {
|
if best.is_none_or(|(old, _, _)| frequency > old) {
|
||||||
best = Some((frequency, left, right));
|
best = Some((frequency, left, right));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ impl ExactTerm {
|
|||||||
pub fn interned_words<'ctx>(
|
pub fn interned_words<'ctx>(
|
||||||
&self,
|
&self,
|
||||||
ctx: &'ctx SearchContext<'ctx>,
|
ctx: &'ctx SearchContext<'ctx>,
|
||||||
) -> impl Iterator<Item = Option<Interned<String>>> + 'ctx {
|
) -> impl Iterator<Item = Option<Interned<String>>> + 'ctx + use<'ctx> {
|
||||||
match *self {
|
match *self {
|
||||||
ExactTerm::Phrase(phrase) => {
|
ExactTerm::Phrase(phrase) => {
|
||||||
let phrase = ctx.phrase_interner.get(phrase);
|
let phrase = ctx.phrase_interner.get(phrase);
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user