mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-01 10:15:50 +00:00
Compare commits
1 Commits
v1.6.0-rc.
...
reduce-max
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d6868dbd47 |
8
.github/ISSUE_TEMPLATE/sprint_issue.md
vendored
8
.github/ISSUE_TEMPLATE/sprint_issue.md
vendored
@@ -7,17 +7,19 @@ assignees: ''
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
Related product team resources: [PRD]() (_internal only_)
|
Related product team resources: [roadmap card]() (_internal only_) and [PRD]() (_internal only_)
|
||||||
Related product discussion:
|
Related product discussion:
|
||||||
Related spec: WIP
|
Related spec: WIP
|
||||||
|
|
||||||
## Motivation
|
## Motivation
|
||||||
|
|
||||||
<!---Copy/paste the information in PRD or briefly detail the product motivation. Ask product team if any hesitation.-->
|
<!---Copy/paste the information in the roadmap resources or briefly detail the product motivation. Ask product team if any hesitation.-->
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
<!---Link to the public part of the PRD, or to the related product discussion for experimental features-->
|
<!---Write a quick description of the usage if the usage has already been defined-->
|
||||||
|
|
||||||
|
Refer to the final spec to know the details and the final decisions about the usage.
|
||||||
|
|
||||||
## TODO
|
## TODO
|
||||||
|
|
||||||
|
|||||||
47
.github/scripts/check-release.sh
vendored
47
.github/scripts/check-release.sh
vendored
@@ -1,41 +1,24 @@
|
|||||||
#!/usr/bin/env bash
|
#!/bin/bash
|
||||||
set -eu -o pipefail
|
|
||||||
|
|
||||||
check_tag() {
|
# check_tag $current_tag $file_tag $file_name
|
||||||
local expected=$1
|
function check_tag {
|
||||||
local actual=$2
|
if [[ "$1" != "$2" ]]; then
|
||||||
local filename=$3
|
echo "Error: the current tag does not match the version in Cargo.toml: found $2 - expected $1"
|
||||||
|
ret=1
|
||||||
if [[ $actual != $expected ]]; then
|
fi
|
||||||
echo >&2 "Error: the current tag does not match the version in $filename: found $actual, expected $expected"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
}
|
}
|
||||||
|
|
||||||
read_version() {
|
|
||||||
grep '^version = ' | cut -d \" -f 2
|
|
||||||
}
|
|
||||||
|
|
||||||
if [[ -z "${GITHUB_REF:-}" ]]; then
|
|
||||||
echo >&2 "Error: GITHUB_REF is not set"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ ! "$GITHUB_REF" =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+(-[a-z0-9]+)?$ ]]; then
|
|
||||||
echo >&2 "Error: GITHUB_REF is not a valid tag: $GITHUB_REF"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
current_tag=${GITHUB_REF#refs/tags/v}
|
|
||||||
ret=0
|
ret=0
|
||||||
|
current_tag=${GITHUB_REF#'refs/tags/v'}
|
||||||
|
|
||||||
toml_tag="$(cat Cargo.toml | read_version)"
|
file_tag="$(grep '^version = ' Cargo.toml | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')"
|
||||||
check_tag "$current_tag" "$toml_tag" Cargo.toml || ret=1
|
check_tag $current_tag $file_tag
|
||||||
|
|
||||||
lock_tag=$(grep -A 1 '^name = "meilisearch-auth"' Cargo.lock | read_version)
|
lock_file='Cargo.lock'
|
||||||
check_tag "$current_tag" "$lock_tag" Cargo.lock || ret=1
|
lock_tag=$(grep -A 1 'name = "meilisearch-auth"' $lock_file | grep version | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')
|
||||||
|
check_tag $current_tag $lock_tag $lock_file
|
||||||
|
|
||||||
if (( ret == 0 )); then
|
if [[ "$ret" -eq 0 ]] ; then
|
||||||
echo 'OK'
|
echo 'OK'
|
||||||
fi
|
fi
|
||||||
exit $ret
|
exit $ret
|
||||||
|
|||||||
2
.github/workflows/benchmarks-manual.yml
vendored
2
.github/workflows/benchmarks-manual.yml
vendored
@@ -74,4 +74,4 @@ jobs:
|
|||||||
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
echo "${{ steps.file.outputs.basename }}.json has just been pushed."
|
||||||
echo 'How to compare this benchmark with another one?'
|
echo 'How to compare this benchmark with another one?'
|
||||||
echo ' - Check the available files with: ./benchmarks/scripts/list.sh'
|
echo ' - Check the available files with: ./benchmarks/scripts/list.sh'
|
||||||
echo " - Run the following command: ./benchmaks/scripts/compare.sh <file-to-compare-with> ${{ steps.file.outputs.basename }}.json"
|
echo " - Run the following command: ./benchmaks/scipts/compare.sh <file-to-compare-with> ${{ steps.file.outputs.basename }}.json"
|
||||||
|
|||||||
98
.github/workflows/benchmarks-pr.yml
vendored
98
.github/workflows/benchmarks-pr.yml
vendored
@@ -1,98 +0,0 @@
|
|||||||
name: Benchmarks (PR)
|
|
||||||
on: issue_comment
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
run-benchmarks-on-comment:
|
|
||||||
if: startsWith(github.event.comment.body, '/benchmark')
|
|
||||||
name: Run and upload benchmarks
|
|
||||||
runs-on: benchmarks
|
|
||||||
timeout-minutes: 4320 # 72h
|
|
||||||
steps:
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Check for Command
|
|
||||||
id: command
|
|
||||||
uses: xt0rted/slash-command-action@v2
|
|
||||||
with:
|
|
||||||
command: benchmark
|
|
||||||
reaction-type: "eyes"
|
|
||||||
repo-token: ${{ env.GH_TOKEN }}
|
|
||||||
|
|
||||||
- uses: xt0rted/pull-request-comment-branch@v2
|
|
||||||
id: comment-branch
|
|
||||||
with:
|
|
||||||
repo_token: ${{ env.GH_TOKEN }}
|
|
||||||
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
if: success()
|
|
||||||
with:
|
|
||||||
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
|
||||||
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
|
||||||
|
|
||||||
# Set variables
|
|
||||||
- name: Set current branch name
|
|
||||||
shell: bash
|
|
||||||
run: echo "name=$(git rev-parse --abbrev-ref HEAD)" >> $GITHUB_OUTPUT
|
|
||||||
id: current_branch
|
|
||||||
- name: Set normalized current branch name # Replace `/` by `_` in branch name to avoid issues when pushing to S3
|
|
||||||
shell: bash
|
|
||||||
run: echo "name=$(git rev-parse --abbrev-ref HEAD | tr '/' '_')" >> $GITHUB_OUTPUT
|
|
||||||
id: normalized_current_branch
|
|
||||||
- name: Set shorter commit SHA
|
|
||||||
shell: bash
|
|
||||||
run: echo "short=$(echo $GITHUB_SHA | cut -c1-8)" >> $GITHUB_OUTPUT
|
|
||||||
id: commit_sha
|
|
||||||
- name: Set file basename with format "dataset_branch_commitSHA"
|
|
||||||
shell: bash
|
|
||||||
run: echo "basename=$(echo ${{ steps.command.outputs.command-arguments }}_${{ steps.normalized_current_branch.outputs.name }}_${{ steps.commit_sha.outputs.short }})" >> $GITHUB_OUTPUT
|
|
||||||
id: file
|
|
||||||
|
|
||||||
# Run benchmarks
|
|
||||||
- name: Run benchmarks - Dataset ${{ steps.command.outputs.command-arguments }} - Branch ${{ steps.current_branch.outputs.name }} - Commit ${{ steps.commit_sha.outputs.short }}
|
|
||||||
run: |
|
|
||||||
cd benchmarks
|
|
||||||
cargo bench --bench ${{ steps.command.outputs.command-arguments }} -- --save-baseline ${{ steps.file.outputs.basename }}
|
|
||||||
|
|
||||||
# Generate critcmp files
|
|
||||||
- name: Install critcmp
|
|
||||||
uses: taiki-e/install-action@v2
|
|
||||||
with:
|
|
||||||
tool: critcmp
|
|
||||||
- name: Export cripcmp file
|
|
||||||
run: |
|
|
||||||
critcmp --export ${{ steps.file.outputs.basename }} > ${{ steps.file.outputs.basename }}.json
|
|
||||||
|
|
||||||
# Upload benchmarks
|
|
||||||
- name: Upload ${{ steps.file.outputs.basename }}.json to DO Spaces # DigitalOcean Spaces = S3
|
|
||||||
uses: BetaHuhn/do-spaces-action@v2
|
|
||||||
with:
|
|
||||||
access_key: ${{ secrets.DO_SPACES_ACCESS_KEY }}
|
|
||||||
secret_key: ${{ secrets.DO_SPACES_SECRET_KEY }}
|
|
||||||
space_name: ${{ secrets.DO_SPACES_SPACE_NAME }}
|
|
||||||
space_region: ${{ secrets.DO_SPACES_SPACE_REGION }}
|
|
||||||
source: ${{ steps.file.outputs.basename }}.json
|
|
||||||
out_dir: critcmp_results
|
|
||||||
|
|
||||||
# Compute the diff of the benchmarks and send a message on the GitHub PR
|
|
||||||
- name: Compute and send a message in the PR
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
|
||||||
run: |
|
|
||||||
set -x
|
|
||||||
export base_ref=$(git merge-base origin/main ${{ steps.comment-branch.outputs.head_ref }} | head -c8)
|
|
||||||
export base_filename=$(echo ${{ steps.command.outputs.command-arguments }}_main_${base_ref}.json)
|
|
||||||
export bench_name=$(echo ${{ steps.command.outputs.command-arguments }})
|
|
||||||
echo "Here are your $bench_name benchmarks diff 👊" >> body.txt
|
|
||||||
echo '```' >> body.txt
|
|
||||||
./benchmarks/scripts/compare.sh $base_filename ${{ steps.file.outputs.basename }}.json >> body.txt
|
|
||||||
echo '```' >> body.txt
|
|
||||||
gh pr comment ${{ steps.current_branch.outputs.name }} --body-file body.txt
|
|
||||||
4
.github/workflows/dependency-issue.yml
vendored
4
.github/workflows/dependency-issue.yml
vendored
@@ -2,8 +2,8 @@ name: Create issue to upgrade dependencies
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
# Run the first of the month, every 6 month
|
# Run the first of the month, every 3 month
|
||||||
- cron: '0 0 1 */6 *'
|
- cron: '0 0 1 */3 *'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|||||||
24
.github/workflows/fuzzer-indexing.yml
vendored
24
.github/workflows/fuzzer-indexing.yml
vendored
@@ -1,24 +0,0 @@
|
|||||||
name: Run the indexing fuzzer
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
fuzz:
|
|
||||||
name: Setup the action
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
timeout-minutes: 4320 # 72h
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
# Run benchmarks
|
|
||||||
- name: Run the fuzzer
|
|
||||||
run: |
|
|
||||||
cargo run --release --bin fuzz-indexing
|
|
||||||
5
.github/workflows/publish-apt-brew-pkg.yml
vendored
5
.github/workflows/publish-apt-brew-pkg.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
- name: Build deb package
|
- name: Build deb package
|
||||||
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
||||||
- name: Upload debian pkg to release
|
- name: Upload debian pkg to release
|
||||||
uses: svenstaro/upload-release-action@2.7.0
|
uses: svenstaro/upload-release-action@2.6.1
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/debian/meilisearch.deb
|
file: target/debian/meilisearch.deb
|
||||||
@@ -50,9 +50,8 @@ jobs:
|
|||||||
needs: check-version
|
needs: check-version
|
||||||
steps:
|
steps:
|
||||||
- name: Create PR to Homebrew
|
- name: Create PR to Homebrew
|
||||||
uses: mislav/bump-homebrew-formula-action@v3
|
uses: mislav/bump-homebrew-formula-action@v2
|
||||||
with:
|
with:
|
||||||
formula-name: meilisearch
|
formula-name: meilisearch
|
||||||
formula-path: Formula/m/meilisearch.rb
|
|
||||||
env:
|
env:
|
||||||
COMMITTER_TOKEN: ${{ secrets.HOMEBREW_COMMITTER_TOKEN }}
|
COMMITTER_TOKEN: ${{ secrets.HOMEBREW_COMMITTER_TOKEN }}
|
||||||
|
|||||||
8
.github/workflows/publish-binaries.yml
vendored
8
.github/workflows/publish-binaries.yml
vendored
@@ -54,7 +54,7 @@ jobs:
|
|||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
- name: Upload binaries to release
|
- name: Upload binaries to release
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.7.0
|
uses: svenstaro/upload-release-action@2.6.1
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/release/meilisearch
|
file: target/release/meilisearch
|
||||||
@@ -87,7 +87,7 @@ jobs:
|
|||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
- name: Upload binaries to release
|
- name: Upload binaries to release
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.7.0
|
uses: svenstaro/upload-release-action@2.6.1
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/release/${{ matrix.artifact_name }}
|
file: target/release/${{ matrix.artifact_name }}
|
||||||
@@ -121,7 +121,7 @@ jobs:
|
|||||||
- name: Upload the binary to release
|
- name: Upload the binary to release
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.7.0
|
uses: svenstaro/upload-release-action@2.6.1
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/${{ matrix.target }}/release/meilisearch
|
file: target/${{ matrix.target }}/release/meilisearch
|
||||||
@@ -183,7 +183,7 @@ jobs:
|
|||||||
- name: Upload the binary to release
|
- name: Upload the binary to release
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.7.0
|
uses: svenstaro/upload-release-action@2.6.1
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/${{ matrix.target }}/release/meilisearch
|
file: target/${{ matrix.target }}/release/meilisearch
|
||||||
|
|||||||
10
.github/workflows/publish-docker-images.yml
vendored
10
.github/workflows/publish-docker-images.yml
vendored
@@ -57,20 +57,20 @@ jobs:
|
|||||||
echo "date=$commit_date" >> $GITHUB_OUTPUT
|
echo "date=$commit_date" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@v4
|
||||||
with:
|
with:
|
||||||
images: getmeili/meilisearch
|
images: getmeili/meilisearch
|
||||||
# Prevent `latest` to be updated for each new tag pushed.
|
# Prevent `latest` to be updated for each new tag pushed.
|
||||||
@@ -83,7 +83,7 @@ jobs:
|
|||||||
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }}
|
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v5
|
uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
push: true
|
push: true
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
|
|||||||
387
.github/workflows/sdks-tests.yml
vendored
387
.github/workflows/sdks-tests.yml
vendored
@@ -14,61 +14,15 @@ on:
|
|||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: 'masterKey'
|
MEILI_MASTER_KEY: 'masterKey'
|
||||||
MEILI_NO_ANALYTICS: 'true'
|
MEILI_NO_ANALYTICS: 'true'
|
||||||
DISABLE_COVERAGE: 'true'
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
define-docker-image:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
docker-image: ${{ steps.define-image.outputs.docker-image }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Define the Docker image we need to use
|
|
||||||
id: define-image
|
|
||||||
run: |
|
|
||||||
event=${{ github.event_name }}
|
|
||||||
echo "docker-image=nightly" >> $GITHUB_OUTPUT
|
|
||||||
if [[ $event == 'workflow_dispatch' ]]; then
|
|
||||||
echo "docker-image=${{ github.event.inputs.docker_image }}" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
- name: Docker image is ${{ steps.define-image.outputs.docker-image }}
|
|
||||||
run: echo "Docker image is ${{ steps.define-image.outputs.docker-image }}"
|
|
||||||
|
|
||||||
##########
|
meilisearch-js-tests:
|
||||||
## SDKs ##
|
name: JS SDK tests
|
||||||
##########
|
|
||||||
|
|
||||||
meilisearch-dotnet-tests:
|
|
||||||
needs: define-docker-image
|
|
||||||
name: .NET SDK tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
MEILISEARCH_VERSION: ${{ needs.define-docker-image.outputs.docker-image }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: meilisearch/meilisearch-dotnet
|
|
||||||
- name: Setup .NET Core
|
|
||||||
uses: actions/setup-dotnet@v3
|
|
||||||
with:
|
|
||||||
dotnet-version: "6.0.x"
|
|
||||||
- name: Install dependencies
|
|
||||||
run: dotnet restore
|
|
||||||
- name: Build
|
|
||||||
run: dotnet build --configuration Release --no-restore
|
|
||||||
- name: Meilisearch (latest version) setup with Docker
|
|
||||||
run: docker compose up -d
|
|
||||||
- name: Run tests
|
|
||||||
run: dotnet test --no-restore --verbosity normal
|
|
||||||
|
|
||||||
meilisearch-dart-tests:
|
|
||||||
needs: define-docker-image
|
|
||||||
name: Dart SDK tests
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
@@ -77,22 +31,112 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-dart
|
repository: meilisearch/meilisearch-js
|
||||||
- uses: dart-lang/setup-dart@v1
|
- name: Setup node
|
||||||
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
sdk: 3.1.1
|
cache: 'yarn'
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: dart pub get
|
run: yarn --dev
|
||||||
- name: Run integration tests
|
- name: Run tests
|
||||||
run: dart test --concurrency=4
|
run: yarn test
|
||||||
|
- name: Build project
|
||||||
|
run: yarn build
|
||||||
|
- name: Run ESM env
|
||||||
|
run: yarn test:env:esm
|
||||||
|
- name: Run Node.js env
|
||||||
|
run: yarn test:env:nodejs
|
||||||
|
- name: Run node typescript env
|
||||||
|
run: yarn test:env:node-ts
|
||||||
|
- name: Run Browser env
|
||||||
|
run: yarn test:env:browser
|
||||||
|
|
||||||
|
instant-meilisearch-tests:
|
||||||
|
name: instant-meilisearch tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/instant-meilisearch
|
||||||
|
- name: Setup node
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
cache: yarn
|
||||||
|
- name: Install dependencies
|
||||||
|
run: yarn install
|
||||||
|
- name: Run tests
|
||||||
|
run: yarn test
|
||||||
|
- name: Build all the playgrounds and the packages
|
||||||
|
run: yarn build
|
||||||
|
|
||||||
|
meilisearch-php-tests:
|
||||||
|
name: PHP SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-php
|
||||||
|
- name: Install PHP
|
||||||
|
uses: shivammathur/setup-php@v2
|
||||||
|
with:
|
||||||
|
coverage: none
|
||||||
|
- name: Validate composer.json and composer.lock
|
||||||
|
run: composer validate
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
composer remove --dev friendsofphp/php-cs-fixer --no-update --no-interaction
|
||||||
|
composer update --prefer-dist --no-progress
|
||||||
|
- name: Run test suite - default HTTP client (Guzzle 7)
|
||||||
|
run: |
|
||||||
|
sh scripts/tests.sh
|
||||||
|
composer remove --dev guzzlehttp/guzzle http-interop/http-factory-guzzle
|
||||||
|
|
||||||
|
meilisearch-python-tests:
|
||||||
|
name: Python SDK tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||||
|
env:
|
||||||
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
|
ports:
|
||||||
|
- '7700:7700'
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
repository: meilisearch/meilisearch-python
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
- name: Install pipenv
|
||||||
|
uses: dschep/install-pipenv-action@v1
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pipenv install --dev --python=${{ matrix.python-version }}
|
||||||
|
- name: Test with pytest
|
||||||
|
run: pipenv run pytest
|
||||||
|
|
||||||
meilisearch-go-tests:
|
meilisearch-go-tests:
|
||||||
needs: define-docker-image
|
|
||||||
name: Go SDK tests
|
name: Go SDK tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
@@ -116,129 +160,12 @@ jobs:
|
|||||||
- name: Run integration tests
|
- name: Run integration tests
|
||||||
run: go test -v ./...
|
run: go test -v ./...
|
||||||
|
|
||||||
meilisearch-java-tests:
|
|
||||||
needs: define-docker-image
|
|
||||||
name: Java SDK tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
services:
|
|
||||||
meilisearch:
|
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
|
||||||
env:
|
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
|
||||||
ports:
|
|
||||||
- '7700:7700'
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: meilisearch/meilisearch-java
|
|
||||||
- name: Set up Java
|
|
||||||
uses: actions/setup-java@v3
|
|
||||||
with:
|
|
||||||
java-version: 8
|
|
||||||
distribution: 'zulu'
|
|
||||||
cache: gradle
|
|
||||||
- name: Grant execute permission for gradlew
|
|
||||||
run: chmod +x gradlew
|
|
||||||
- name: Build and run unit and integration tests
|
|
||||||
run: ./gradlew build integrationTest
|
|
||||||
|
|
||||||
meilisearch-js-tests:
|
|
||||||
needs: define-docker-image
|
|
||||||
name: JS SDK tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
services:
|
|
||||||
meilisearch:
|
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
|
||||||
env:
|
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
|
||||||
ports:
|
|
||||||
- '7700:7700'
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: meilisearch/meilisearch-js
|
|
||||||
- name: Setup node
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
cache: 'yarn'
|
|
||||||
- name: Install dependencies
|
|
||||||
run: yarn --dev
|
|
||||||
- name: Run tests
|
|
||||||
run: yarn test
|
|
||||||
- name: Build project
|
|
||||||
run: yarn build
|
|
||||||
- name: Run ESM env
|
|
||||||
run: yarn test:env:esm
|
|
||||||
- name: Run Node.js env
|
|
||||||
run: yarn test:env:nodejs
|
|
||||||
- name: Run node typescript env
|
|
||||||
run: yarn test:env:node-ts
|
|
||||||
- name: Run Browser env
|
|
||||||
run: yarn test:env:browser
|
|
||||||
|
|
||||||
meilisearch-php-tests:
|
|
||||||
needs: define-docker-image
|
|
||||||
name: PHP SDK tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
services:
|
|
||||||
meilisearch:
|
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
|
||||||
env:
|
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
|
||||||
ports:
|
|
||||||
- '7700:7700'
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: meilisearch/meilisearch-php
|
|
||||||
- name: Install PHP
|
|
||||||
uses: shivammathur/setup-php@v2
|
|
||||||
- name: Validate composer.json and composer.lock
|
|
||||||
run: composer validate
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
composer remove --dev friendsofphp/php-cs-fixer --no-update --no-interaction
|
|
||||||
composer update --prefer-dist --no-progress
|
|
||||||
- name: Run test suite - default HTTP client (Guzzle 7)
|
|
||||||
run: |
|
|
||||||
sh scripts/tests.sh
|
|
||||||
composer remove --dev guzzlehttp/guzzle http-interop/http-factory-guzzle
|
|
||||||
|
|
||||||
meilisearch-python-tests:
|
|
||||||
needs: define-docker-image
|
|
||||||
name: Python SDK tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
services:
|
|
||||||
meilisearch:
|
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
|
||||||
env:
|
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
|
||||||
ports:
|
|
||||||
- '7700:7700'
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: meilisearch/meilisearch-python
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
- name: Install pipenv
|
|
||||||
uses: dschep/install-pipenv-action@v1
|
|
||||||
- name: Install dependencies
|
|
||||||
run: pipenv install --dev --python=${{ matrix.python-version }}
|
|
||||||
- name: Test with pytest
|
|
||||||
run: pipenv run pytest
|
|
||||||
|
|
||||||
meilisearch-ruby-tests:
|
meilisearch-ruby-tests:
|
||||||
needs: define-docker-image
|
|
||||||
name: Ruby SDK tests
|
name: Ruby SDK tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
@@ -258,12 +185,11 @@ jobs:
|
|||||||
run: bundle exec rspec
|
run: bundle exec rspec
|
||||||
|
|
||||||
meilisearch-rust-tests:
|
meilisearch-rust-tests:
|
||||||
needs: define-docker-image
|
|
||||||
name: Rust SDK tests
|
name: Rust SDK tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
meilisearch:
|
meilisearch:
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
image: getmeili/meilisearch:${{ github.event.inputs.docker_image }}
|
||||||
env:
|
env:
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
||||||
@@ -277,110 +203,3 @@ jobs:
|
|||||||
run: cargo build --verbose
|
run: cargo build --verbose
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: cargo test --verbose
|
run: cargo test --verbose
|
||||||
|
|
||||||
meilisearch-swift-tests:
|
|
||||||
needs: define-docker-image
|
|
||||||
name: Swift SDK tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
services:
|
|
||||||
meilisearch:
|
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
|
||||||
env:
|
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
|
||||||
ports:
|
|
||||||
- '7700:7700'
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: meilisearch/meilisearch-swift
|
|
||||||
- name: Run tests
|
|
||||||
run: swift test
|
|
||||||
|
|
||||||
########################
|
|
||||||
## FRONT-END PLUGINS ##
|
|
||||||
########################
|
|
||||||
|
|
||||||
meilisearch-js-plugins-tests:
|
|
||||||
needs: define-docker-image
|
|
||||||
name: meilisearch-js-plugins tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
services:
|
|
||||||
meilisearch:
|
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
|
||||||
env:
|
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
|
||||||
ports:
|
|
||||||
- '7700:7700'
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: meilisearch/meilisearch-js-plugins
|
|
||||||
- name: Setup node
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
cache: yarn
|
|
||||||
- name: Install dependencies
|
|
||||||
run: yarn install
|
|
||||||
- name: Run tests
|
|
||||||
run: yarn test
|
|
||||||
- name: Build all the playgrounds and the packages
|
|
||||||
run: yarn build
|
|
||||||
|
|
||||||
########################
|
|
||||||
## BACK-END PLUGINS ###
|
|
||||||
########################
|
|
||||||
|
|
||||||
meilisearch-rails-tests:
|
|
||||||
needs: define-docker-image
|
|
||||||
name: meilisearch-rails tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
services:
|
|
||||||
meilisearch:
|
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
|
||||||
env:
|
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
|
||||||
ports:
|
|
||||||
- '7700:7700'
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: meilisearch/meilisearch-rails
|
|
||||||
- name: Set up Ruby 3
|
|
||||||
uses: ruby/setup-ruby@v1
|
|
||||||
with:
|
|
||||||
ruby-version: 3
|
|
||||||
bundler-cache: true
|
|
||||||
- name: Run tests
|
|
||||||
run: bundle exec rspec
|
|
||||||
|
|
||||||
meilisearch-symfony-tests:
|
|
||||||
needs: define-docker-image
|
|
||||||
name: meilisearch-symfony tests
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
services:
|
|
||||||
meilisearch:
|
|
||||||
image: getmeili/meilisearch:${{ needs.define-docker-image.outputs.docker-image }}
|
|
||||||
env:
|
|
||||||
MEILI_MASTER_KEY: ${{ env.MEILI_MASTER_KEY }}
|
|
||||||
MEILI_NO_ANALYTICS: ${{ env.MEILI_NO_ANALYTICS }}
|
|
||||||
ports:
|
|
||||||
- '7700:7700'
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: meilisearch/meilisearch-symfony
|
|
||||||
- name: Install PHP
|
|
||||||
uses: shivammathur/setup-php@v2
|
|
||||||
with:
|
|
||||||
tools: composer:v2, flex
|
|
||||||
- name: Validate composer.json and composer.lock
|
|
||||||
run: composer validate
|
|
||||||
- name: Install dependencies
|
|
||||||
run: composer install --prefer-dist --no-progress --quiet
|
|
||||||
- name: Remove doctrine/annotations
|
|
||||||
run: composer remove --dev doctrine/annotations
|
|
||||||
- name: Run test suite
|
|
||||||
run: composer test:unit
|
|
||||||
|
|||||||
29
.github/workflows/test-suite.yml
vendored
29
.github/workflows/test-suite.yml
vendored
@@ -30,20 +30,20 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- name: Setup test with Rust stable
|
- name: Run test with Rust stable
|
||||||
if: github.event_name != 'schedule'
|
if: github.event_name != 'schedule'
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
- name: Setup test with Rust nightly
|
- name: Run test with Rust nightly
|
||||||
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'schedule'
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: nightly
|
toolchain: nightly
|
||||||
override: true
|
override: true
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.1
|
uses: Swatinem/rust-cache@v2.4.0
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -65,7 +65,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.1
|
uses: Swatinem/rust-cache@v2.4.0
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -78,12 +78,12 @@ jobs:
|
|||||||
args: --locked --release --all
|
args: --locked --release --all
|
||||||
|
|
||||||
test-all-features:
|
test-all-features:
|
||||||
name: Tests all features
|
name: Tests all features on cron schedule only
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
||||||
image: ubuntu:18.04
|
image: ubuntu:18.04
|
||||||
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'schedule'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Install needed dependencies
|
- name: Install needed dependencies
|
||||||
@@ -110,7 +110,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: ubuntu:18.04
|
image: ubuntu:18.04
|
||||||
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'schedule'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Install needed dependencies
|
- name: Install needed dependencies
|
||||||
@@ -123,10 +123,7 @@ jobs:
|
|||||||
override: true
|
override: true
|
||||||
- name: Run cargo tree without default features and check lindera is not present
|
- name: Run cargo tree without default features and check lindera is not present
|
||||||
run: |
|
run: |
|
||||||
if cargo tree -f '{p} {f}' -e normal --no-default-features | grep -vqz lindera; then
|
cargo tree -f '{p} {f}' -e normal --no-default-features | grep lindera -vqz
|
||||||
echo "lindera has been found in the sources and it shouldn't"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
- name: Run cargo tree with default features and check lindera is pressent
|
- name: Run cargo tree with default features and check lindera is pressent
|
||||||
run: |
|
run: |
|
||||||
cargo tree -f '{p} {f}' -e normal | grep lindera -qz
|
cargo tree -f '{p} {f}' -e normal | grep lindera -qz
|
||||||
@@ -149,7 +146,7 @@ jobs:
|
|||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.1
|
uses: Swatinem/rust-cache@v2.4.0
|
||||||
- name: Run tests in debug
|
- name: Run tests in debug
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -164,11 +161,11 @@ jobs:
|
|||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: 1.71.1
|
toolchain: 1.69.0
|
||||||
override: true
|
override: true
|
||||||
components: clippy
|
components: clippy
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.1
|
uses: Swatinem/rust-cache@v2.4.0
|
||||||
- name: Run cargo clippy
|
- name: Run cargo clippy
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -187,7 +184,7 @@ jobs:
|
|||||||
override: true
|
override: true
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.1
|
uses: Swatinem/rust-cache@v2.4.0
|
||||||
- name: Run cargo fmt
|
- name: Run cargo fmt
|
||||||
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
||||||
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
||||||
|
|||||||
3137
Cargo.lock
generated
3137
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -2,7 +2,6 @@
|
|||||||
resolver = "2"
|
resolver = "2"
|
||||||
members = [
|
members = [
|
||||||
"meilisearch",
|
"meilisearch",
|
||||||
"meilitool",
|
|
||||||
"meilisearch-types",
|
"meilisearch-types",
|
||||||
"meilisearch-auth",
|
"meilisearch-auth",
|
||||||
"meili-snap",
|
"meili-snap",
|
||||||
@@ -14,12 +13,11 @@ members = [
|
|||||||
"filter-parser",
|
"filter-parser",
|
||||||
"flatten-serde-json",
|
"flatten-serde-json",
|
||||||
"json-depth-checker",
|
"json-depth-checker",
|
||||||
"benchmarks",
|
"benchmarks"
|
||||||
"fuzzers",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "1.6.0"
|
version = "1.2.0"
|
||||||
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||||
description = "Meilisearch HTTP server"
|
description = "Meilisearch HTTP server"
|
||||||
homepage = "https://meilisearch.com"
|
homepage = "https://meilisearch.com"
|
||||||
|
|||||||
13
Dockerfile
13
Dockerfile
@@ -1,9 +1,9 @@
|
|||||||
# Compile
|
# Compile
|
||||||
FROM rust:1.71.1-alpine3.18 AS compiler
|
FROM rust:alpine3.16 AS compiler
|
||||||
|
|
||||||
RUN apk add -q --update-cache --no-cache build-base openssl-dev
|
RUN apk add -q --update-cache --no-cache build-base openssl-dev
|
||||||
|
|
||||||
WORKDIR /
|
WORKDIR /meilisearch
|
||||||
|
|
||||||
ARG COMMIT_SHA
|
ARG COMMIT_SHA
|
||||||
ARG COMMIT_DATE
|
ARG COMMIT_DATE
|
||||||
@@ -17,7 +17,7 @@ RUN set -eux; \
|
|||||||
if [ "$apkArch" = "aarch64" ]; then \
|
if [ "$apkArch" = "aarch64" ]; then \
|
||||||
export JEMALLOC_SYS_WITH_LG_PAGE=16; \
|
export JEMALLOC_SYS_WITH_LG_PAGE=16; \
|
||||||
fi && \
|
fi && \
|
||||||
cargo build --release -p meilisearch -p meilitool
|
cargo build --release
|
||||||
|
|
||||||
# Run
|
# Run
|
||||||
FROM alpine:3.16
|
FROM alpine:3.16
|
||||||
@@ -28,10 +28,9 @@ ENV MEILI_SERVER_PROVIDER docker
|
|||||||
RUN apk update --quiet \
|
RUN apk update --quiet \
|
||||||
&& apk add -q --no-cache libgcc tini curl
|
&& apk add -q --no-cache libgcc tini curl
|
||||||
|
|
||||||
# add meilisearch and meilitool to the `/bin` so you can run it from anywhere
|
# add meilisearch to the `/bin` so you can run it from anywhere and it's easy
|
||||||
# and it's easy to find.
|
# to find.
|
||||||
COPY --from=compiler /target/release/meilisearch /bin/meilisearch
|
COPY --from=compiler /meilisearch/target/release/meilisearch /bin/meilisearch
|
||||||
COPY --from=compiler /target/release/meilitool /bin/meilitool
|
|
||||||
# To stay compatible with the older version of the container (pre v0.27.0) we're
|
# To stay compatible with the older version of the container (pre v0.27.0) we're
|
||||||
# going to symlink the meilisearch binary in the path to `/meilisearch`
|
# going to symlink the meilisearch binary in the path to `/meilisearch`
|
||||||
RUN ln -s /bin/meilisearch /meilisearch
|
RUN ln -s /bin/meilisearch /meilisearch
|
||||||
|
|||||||
19
PROFILING.md
19
PROFILING.md
@@ -1,19 +0,0 @@
|
|||||||
# Profiling Meilisearch
|
|
||||||
|
|
||||||
Search engine technologies are complex pieces of software that require thorough profiling tools. We chose to use [Puffin](https://github.com/EmbarkStudios/puffin), which the Rust gaming industry uses extensively. You can export and import the profiling reports using the top bar's _File_ menu options [in Puffin Viewer](https://github.com/embarkstudios/puffin#ui).
|
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
## Profiling the Indexing Process
|
|
||||||
|
|
||||||
When you enable [the `exportPuffinReports` experimental feature](https://www.meilisearch.com/docs/learn/experimental/overview) of Meilisearch, Puffin reports with the `.puffin` extension will be automatically exported to disk. When this option is enabled, the engine will automatically create a "frame" whenever it executes the `IndexScheduler::tick` method.
|
|
||||||
|
|
||||||
[Puffin Viewer](https://github.com/EmbarkStudios/puffin/tree/main/puffin_viewer) is used to analyze the reports. Those reports show areas where Meilisearch spent time during indexing.
|
|
||||||
|
|
||||||
Another piece of advice on the Puffin viewer UI interface is to consider the _Merge children with same ID_ option. It can hide the exact actual timings at which events were sent. Please turn it off when you see strange gaps on the Flamegraph. It can help.
|
|
||||||
|
|
||||||
## Profiling the Search Process
|
|
||||||
|
|
||||||
We still need to take the time to profile the search side of the engine with Puffin. It would require time to profile the filtering phase, query parsing, creation, and execution. We could even profile the Actix HTTP server.
|
|
||||||
|
|
||||||
The only issue we see is the framing system. Puffin requires a global frame-based profiling phase, which collides with Meilisearch's ability to accept and answer multiple requests on different threads simultaneously.
|
|
||||||
71
README.md
71
README.md
@@ -1,20 +1,15 @@
|
|||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://www.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=logo#gh-light-mode-only" target="_blank">
|
<img src="assets/meilisearch-logo-light.svg?sanitize=true#gh-light-mode-only">
|
||||||
<img src="assets/meilisearch-logo-light.svg?sanitize=true#gh-light-mode-only">
|
<img src="assets/meilisearch-logo-dark.svg?sanitize=true#gh-dark-mode-only">
|
||||||
</a>
|
|
||||||
<a href="https://www.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=logo#gh-dark-mode-only" target="_blank">
|
|
||||||
<img src="assets/meilisearch-logo-dark.svg?sanitize=true#gh-dark-mode-only">
|
|
||||||
</a>
|
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<h4 align="center">
|
<h4 align="center">
|
||||||
<a href="https://www.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">Website</a> |
|
<a href="https://www.meilisearch.com">Website</a> |
|
||||||
<a href="https://roadmap.meilisearch.com/tabs/1-under-consideration">Roadmap</a> |
|
<a href="https://roadmap.meilisearch.com/tabs/1-under-consideration">Roadmap</a> |
|
||||||
<a href="https://www.meilisearch.com/pricing?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">Meilisearch Cloud</a> |
|
<a href="https://blog.meilisearch.com">Blog</a> |
|
||||||
<a href="https://blog.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">Blog</a> |
|
<a href="https://www.meilisearch.com/docs">Documentation</a> |
|
||||||
<a href="https://www.meilisearch.com/docs?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">Documentation</a> |
|
<a href="https://www.meilisearch.com/docs/faq">FAQ</a> |
|
||||||
<a href="https://www.meilisearch.com/docs/faq?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">FAQ</a> |
|
<a href="https://discord.meilisearch.com">Discord</a>
|
||||||
<a href="https://discord.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=nav">Discord</a>
|
|
||||||
</h4>
|
</h4>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
@@ -28,72 +23,72 @@
|
|||||||
Meilisearch helps you shape a delightful search experience in a snap, offering features that work out-of-the-box to speed up your workflow.
|
Meilisearch helps you shape a delightful search experience in a snap, offering features that work out-of-the-box to speed up your workflow.
|
||||||
|
|
||||||
<p align="center" name="demo">
|
<p align="center" name="demo">
|
||||||
<a href="https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demo-gif#gh-light-mode-only" target="_blank">
|
<a href="https://where2watch.meilisearch.com/#gh-light-mode-only" target="_blank">
|
||||||
<img src="assets/demo-light.gif#gh-light-mode-only" alt="A bright colored application for finding movies screening near the user">
|
<img src="assets/demo-light.gif#gh-light-mode-only" alt="A bright colored application for finding movies screening near the user">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demo-gif#gh-dark-mode-only" target="_blank">
|
<a href="https://where2watch.meilisearch.com/#gh-dark-mode-only" target="_blank">
|
||||||
<img src="assets/demo-dark.gif#gh-dark-mode-only" alt="A dark colored application for finding movies screening near the user">
|
<img src="assets/demo-dark.gif#gh-dark-mode-only" alt="A dark colored application for finding movies screening near the user">
|
||||||
</a>
|
</a>
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
🔥 [**Try it!**](https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demo-link) 🔥
|
🔥 [**Try it!**](https://where2watch.meilisearch.com/) 🔥
|
||||||
|
|
||||||
## ✨ Features
|
## ✨ Features
|
||||||
|
|
||||||
- **Search-as-you-type:** find search results in less than 50 milliseconds
|
- **Search-as-you-type:** find search results in less than 50 milliseconds
|
||||||
- **[Typo tolerance](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features#typo-tolerance):** get relevant matches even when queries contain typos and misspellings
|
- **[Typo tolerance](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy#typo-tolerance):** get relevant matches even when queries contain typos and misspellings
|
||||||
- **[Filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features) and [faceted search](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** enhance your user's search experience with custom filters and build a faceted search interface in a few lines of code
|
- **[Filtering](https://www.meilisearch.com/docs/learn/advanced/filtering) and [faceted search](https://www.meilisearch.com/docs/learn/advanced/faceted_search):** enhance your user's search experience with custom filters and build a faceted search interface in a few lines of code
|
||||||
- **[Sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** sort results based on price, date, or pretty much anything else your users need
|
- **[Sorting](https://www.meilisearch.com/docs/learn/advanced/sorting):** sort results based on price, date, or pretty much anything else your users need
|
||||||
- **[Synonym support](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features#synonyms):** configure synonyms to include more relevant content in your search results
|
- **[Synonym support](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy#synonyms):** configure synonyms to include more relevant content in your search results
|
||||||
- **[Geosearch](https://www.meilisearch.com/docs/learn/fine_tuning_results/geosearch?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** filter and sort documents based on geographic data
|
- **[Geosearch](https://www.meilisearch.com/docs/learn/advanced/geosearch):** filter and sort documents based on geographic data
|
||||||
- **[Extensive language support](https://www.meilisearch.com/docs/learn/what_is_meilisearch/language?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet
|
- **[Extensive language support](https://www.meilisearch.com/docs/learn/what_is_meilisearch/language):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet
|
||||||
- **[Security management](https://www.meilisearch.com/docs/learn/security/master_api_keys?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** control which users can access what data with API keys that allow fine-grained permissions handling
|
- **[Security management](https://www.meilisearch.com/docs/learn/security/master_api_keys):** control which users can access what data with API keys that allow fine-grained permissions handling
|
||||||
- **[Multi-Tenancy](https://www.meilisearch.com/docs/learn/security/tenant_tokens?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** personalize search results for any number of application tenants
|
- **[Multi-Tenancy](https://www.meilisearch.com/docs/learn/security/tenant_tokens):** personalize search results for any number of application tenants
|
||||||
- **Highly Customizable:** customize Meilisearch to your specific needs or use our out-of-the-box and hassle-free presets
|
- **Highly Customizable:** customize Meilisearch to your specific needs or use our out-of-the-box and hassle-free presets
|
||||||
- **[RESTful API](https://www.meilisearch.com/docs/reference/api/overview?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** integrate Meilisearch in your technical stack with our plugins and SDKs
|
- **[RESTful API](https://www.meilisearch.com/docs/reference/api/overview):** integrate Meilisearch in your technical stack with our plugins and SDKs
|
||||||
- **Easy to install, deploy, and maintain**
|
- **Easy to install, deploy, and maintain**
|
||||||
|
|
||||||
## 📖 Documentation
|
## 📖 Documentation
|
||||||
|
|
||||||
You can consult Meilisearch's documentation at [https://www.meilisearch.com/docs](https://www.meilisearch.com/docs/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=docs).
|
You can consult Meilisearch's documentation at [https://www.meilisearch.com/docs](https://www.meilisearch.com/docs/).
|
||||||
|
|
||||||
## 🚀 Getting started
|
## 🚀 Getting started
|
||||||
|
|
||||||
For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://www.meilisearch.com/docs/learn/getting_started/quick_start?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=get-started) guide.
|
For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://www.meilisearch.com/docs/learn/getting_started/quick_start) guide.
|
||||||
|
|
||||||
You may also want to check out [Meilisearch 101](https://www.meilisearch.com/docs/learn/getting_started/filtering_and_sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=get-started) for an introduction to some of Meilisearch's most popular features.
|
You may also want to check out [Meilisearch 101](https://www.meilisearch.com/docs/learn/getting_started/filtering_and_sorting) for an introduction to some of Meilisearch's most popular features.
|
||||||
|
|
||||||
## ⚡ Supercharge your Meilisearch experience
|
## ☁️ Meilisearch cloud
|
||||||
|
|
||||||
Say goodbye to server deployment and manual updates with [Meilisearch Cloud](https://www.meilisearch.com/cloud?utm_campaign=oss&utm_source=github&utm_medium=meilisearch). No credit card required.
|
Let us manage your infrastructure so you can focus on integrating a great search experience. Try [Meilisearch Cloud](https://meilisearch.com/pricing) today.
|
||||||
|
|
||||||
## 🧰 SDKs & integration tools
|
## 🧰 SDKs & integration tools
|
||||||
|
|
||||||
Install one of our SDKs in your project for seamless integration between Meilisearch and your favorite language or framework!
|
Install one of our SDKs in your project for seamless integration between Meilisearch and your favorite language or framework!
|
||||||
|
|
||||||
Take a look at the complete [Meilisearch integration list](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=sdks-link).
|
Take a look at the complete [Meilisearch integration list](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks).
|
||||||
|
|
||||||
[](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=sdks-logos)
|
[](https://www.meilisearch.com/docs/learn/what_is_meilisearch/sdks)
|
||||||
|
|
||||||
## ⚙️ Advanced usage
|
## ⚙️ Advanced usage
|
||||||
|
|
||||||
Experienced users will want to keep our [API Reference](https://www.meilisearch.com/docs/reference/api/overview?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced) close at hand.
|
Experienced users will want to keep our [API Reference](https://www.meilisearch.com/docs/reference/api/overview) close at hand.
|
||||||
|
|
||||||
We also offer a wide range of dedicated guides to all Meilisearch features, such as [filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced), [sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced), [geosearch](https://www.meilisearch.com/docs/learn/fine_tuning_results/geosearch?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced), [API keys](https://www.meilisearch.com/docs/learn/security/master_api_keys?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced), and [tenant tokens](https://www.meilisearch.com/docs/learn/security/tenant_tokens?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced).
|
We also offer a wide range of dedicated guides to all Meilisearch features, such as [filtering](https://www.meilisearch.com/docs/learn/advanced/filtering), [sorting](https://www.meilisearch.com/docs/learn/advanced/sorting), [geosearch](https://www.meilisearch.com/docs/learn/advanced/geosearch), [API keys](https://www.meilisearch.com/docs/learn/security/master_api_keys), and [tenant tokens](https://www.meilisearch.com/docs/learn/security/tenant_tokens).
|
||||||
|
|
||||||
Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://www.meilisearch.com/docs/learn/core_concepts/documents?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced) and [indexes](https://www.meilisearch.com/docs/learn/core_concepts/indexes?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=advanced).
|
Finally, for more in-depth information, refer to our articles explaining fundamental Meilisearch concepts such as [documents](https://www.meilisearch.com/docs/learn/core_concepts/documents) and [indexes](https://www.meilisearch.com/docs/learn/core_concepts/indexes).
|
||||||
|
|
||||||
## 📊 Telemetry
|
## 📊 Telemetry
|
||||||
|
|
||||||
Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=telemetry#how-to-disable-data-collection) whenever you want.
|
Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry#how-to-disable-data-collection) whenever you want.
|
||||||
|
|
||||||
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Don't forget to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
|
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Don't forget to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
|
||||||
|
|
||||||
If you want to know more about the kind of data we collect and what we use it for, check the [telemetry section](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=telemetry#how-to-disable-data-collection) of our documentation.
|
If you want to know more about the kind of data we collect and what we use it for, check the [telemetry section](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry) of our documentation.
|
||||||
|
|
||||||
## 📫 Get in touch!
|
## 📫 Get in touch!
|
||||||
|
|
||||||
Meilisearch is a search engine created by [Meili](https://www.welcometothejungle.com/en/companies/meilisearch), a software development company based in France and with team members all over the world. Want to know more about us? [Check out our blog!](https://blog.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=contact)
|
Meilisearch is a search engine created by [Meili](https://www.welcometothejungle.com/en/companies/meilisearch), a software development company based in France and with team members all over the world. Want to know more about us? [Check out our blog!](https://blog.meilisearch.com/)
|
||||||
|
|
||||||
🗞 [Subscribe to our newsletter](https://meilisearch.us2.list-manage.com/subscribe?u=27870f7b71c908a8b359599fb&id=79582d828e) if you don't want to miss any updates! We promise we won't clutter your mailbox: we only send one edition every two months.
|
🗞 [Subscribe to our newsletter](https://meilisearch.us2.list-manage.com/subscribe?u=27870f7b71c908a8b359599fb&id=79582d828e) if you don't want to miss any updates! We promise we won't clutter your mailbox: we only send one edition every two months.
|
||||||
|
|
||||||
|
|||||||
@@ -98,7 +98,7 @@
|
|||||||
"showThresholdMarkers": true,
|
"showThresholdMarkers": true,
|
||||||
"text": {}
|
"text": {}
|
||||||
},
|
},
|
||||||
"pluginVersion": "10.0.1",
|
"pluginVersion": "9.5.2",
|
||||||
"targets": [
|
"targets": [
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
@@ -158,7 +158,7 @@
|
|||||||
"showThresholdMarkers": true,
|
"showThresholdMarkers": true,
|
||||||
"text": {}
|
"text": {}
|
||||||
},
|
},
|
||||||
"pluginVersion": "10.0.1",
|
"pluginVersion": "9.5.2",
|
||||||
"targets": [
|
"targets": [
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
@@ -176,7 +176,8 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
"type": "prometheus"
|
"type": "prometheus",
|
||||||
|
"uid": "c4085c47-f6d3-45dd-b761-6809055bb749"
|
||||||
},
|
},
|
||||||
"fieldConfig": {
|
"fieldConfig": {
|
||||||
"defaults": {
|
"defaults": {
|
||||||
@@ -220,7 +221,7 @@
|
|||||||
"showThresholdMarkers": true,
|
"showThresholdMarkers": true,
|
||||||
"text": {}
|
"text": {}
|
||||||
},
|
},
|
||||||
"pluginVersion": "10.0.1",
|
"pluginVersion": "9.5.2",
|
||||||
"targets": [
|
"targets": [
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
@@ -240,7 +241,8 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
"type": "prometheus"
|
"type": "prometheus",
|
||||||
|
"uid": "c4085c47-f6d3-45dd-b761-6809055bb749"
|
||||||
},
|
},
|
||||||
"fieldConfig": {
|
"fieldConfig": {
|
||||||
"defaults": {
|
"defaults": {
|
||||||
@@ -280,7 +282,7 @@
|
|||||||
"showThresholdMarkers": true,
|
"showThresholdMarkers": true,
|
||||||
"text": {}
|
"text": {}
|
||||||
},
|
},
|
||||||
"pluginVersion": "10.0.1",
|
"pluginVersion": "9.5.2",
|
||||||
"targets": [
|
"targets": [
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
@@ -300,7 +302,8 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
"type": "prometheus"
|
"type": "prometheus",
|
||||||
|
"uid": "c4085c47-f6d3-45dd-b761-6809055bb749"
|
||||||
},
|
},
|
||||||
"fieldConfig": {
|
"fieldConfig": {
|
||||||
"defaults": {
|
"defaults": {
|
||||||
@@ -340,7 +343,7 @@
|
|||||||
"showThresholdMarkers": true,
|
"showThresholdMarkers": true,
|
||||||
"text": {}
|
"text": {}
|
||||||
},
|
},
|
||||||
"pluginVersion": "10.0.1",
|
"pluginVersion": "9.5.2",
|
||||||
"targets": [
|
"targets": [
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
@@ -360,7 +363,8 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
"type": "prometheus"
|
"type": "prometheus",
|
||||||
|
"uid": "c4085c47-f6d3-45dd-b761-6809055bb749"
|
||||||
},
|
},
|
||||||
"description": "",
|
"description": "",
|
||||||
"fieldConfig": {
|
"fieldConfig": {
|
||||||
@@ -407,7 +411,8 @@
|
|||||||
"mode": "absolute",
|
"mode": "absolute",
|
||||||
"steps": [
|
"steps": [
|
||||||
{
|
{
|
||||||
"color": "green"
|
"color": "green",
|
||||||
|
"value": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"color": "red",
|
"color": "red",
|
||||||
@@ -455,7 +460,8 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
"type": "prometheus"
|
"type": "prometheus",
|
||||||
|
"uid": "c4085c47-f6d3-45dd-b761-6809055bb749"
|
||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"expr": "meilisearch_used_db_size_bytes{job=\"meilisearch\", instance=\"$instance\"}",
|
"expr": "meilisearch_used_db_size_bytes{job=\"meilisearch\", instance=\"$instance\"}",
|
||||||
@@ -553,7 +559,7 @@
|
|||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "rate(meilisearch_http_response_time_seconds_sum{instance=\"$instance\", job=\"meilisearch\"}[5m]) / rate(meilisearch_http_response_time_seconds_count[5m])",
|
"expr": "rate(http_response_time_seconds_sum{instance=\"$instance\", job=\"meilisearch\"}[5m]) / rate(http_response_time_seconds_count[5m])",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "{{method}} {{path}}",
|
"legendFormat": "{{method}} {{path}}",
|
||||||
"range": true,
|
"range": true,
|
||||||
@@ -565,7 +571,8 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datasource": {
|
"datasource": {
|
||||||
"type": "prometheus"
|
"type": "prometheus",
|
||||||
|
"uid": "c4085c47-f6d3-45dd-b761-6809055bb749"
|
||||||
},
|
},
|
||||||
"fieldConfig": {
|
"fieldConfig": {
|
||||||
"defaults": {
|
"defaults": {
|
||||||
@@ -608,7 +615,8 @@
|
|||||||
"mode": "absolute",
|
"mode": "absolute",
|
||||||
"steps": [
|
"steps": [
|
||||||
{
|
{
|
||||||
"color": "green"
|
"color": "green",
|
||||||
|
"value": null
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"color": "red",
|
"color": "red",
|
||||||
@@ -735,7 +743,7 @@
|
|||||||
"unit": "s"
|
"unit": "s"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"pluginVersion": "10.0.1",
|
"pluginVersion": "9.5.2",
|
||||||
"reverseYBuckets": false,
|
"reverseYBuckets": false,
|
||||||
"targets": [
|
"targets": [
|
||||||
{
|
{
|
||||||
@@ -744,7 +752,7 @@
|
|||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "sum by(le) (increase(meilisearch_http_response_time_seconds_bucket{path=\"/indexes/$Index/search\", instance=\"$instance\", job=\"meilisearch\"}[30s]))",
|
"expr": "sum by(le) (increase(http_response_time_seconds_bucket{path=\"/indexes/$Index/search\", instance=\"$instance\", job=\"meilisearch\"}[30s]))",
|
||||||
"format": "heatmap",
|
"format": "heatmap",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "{{le}}",
|
"legendFormat": "{{le}}",
|
||||||
@@ -1298,7 +1306,8 @@
|
|||||||
"value": "localhost:7700"
|
"value": "localhost:7700"
|
||||||
},
|
},
|
||||||
"datasource": {
|
"datasource": {
|
||||||
"type": "prometheus"
|
"type": "prometheus",
|
||||||
|
"uid": "bb3298a4-9acf-4da1-b86a-813f29f50888"
|
||||||
},
|
},
|
||||||
"definition": "label_values(instance)",
|
"definition": "label_values(instance)",
|
||||||
"hide": 0,
|
"hide": 0,
|
||||||
@@ -1320,11 +1329,12 @@
|
|||||||
{
|
{
|
||||||
"current": {
|
"current": {
|
||||||
"selected": false,
|
"selected": false,
|
||||||
"text": "index-word-count-10-count",
|
"text": "mieli",
|
||||||
"value": "index-word-count-10-count"
|
"value": "mieli"
|
||||||
},
|
},
|
||||||
"datasource": {
|
"datasource": {
|
||||||
"type": "prometheus"
|
"type": "prometheus",
|
||||||
|
"uid": "bb3298a4-9acf-4da1-b86a-813f29f50888"
|
||||||
},
|
},
|
||||||
"definition": "label_values(index)",
|
"definition": "label_values(index)",
|
||||||
"hide": 0,
|
"hide": 0,
|
||||||
@@ -1361,6 +1371,6 @@
|
|||||||
"timezone": "",
|
"timezone": "",
|
||||||
"title": "Meilisearch",
|
"title": "Meilisearch",
|
||||||
"uid": "7wcZ94dnz",
|
"uid": "7wcZ94dnz",
|
||||||
"version": 5,
|
"version": 6,
|
||||||
"weekStart": ""
|
"weekStart": ""
|
||||||
}
|
}
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 1.2 MiB |
@@ -14,11 +14,11 @@ license.workspace = true
|
|||||||
anyhow = "1.0.70"
|
anyhow = "1.0.70"
|
||||||
csv = "1.2.1"
|
csv = "1.2.1"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
mimalloc = { version = "0.1.37", default-features = false }
|
mimalloc = { version = "0.1.36", default-features = false }
|
||||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = { version = "0.5.1", features = ["html_reports"] }
|
criterion = { version = "0.4.0", features = ["html_reports"] }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
rand_chacha = "0.3.1"
|
rand_chacha = "0.3.1"
|
||||||
roaring = "0.10.1"
|
roaring = "0.10.1"
|
||||||
|
|||||||
@@ -6,7 +6,9 @@ use std::path::Path;
|
|||||||
|
|
||||||
use criterion::{criterion_group, criterion_main, Criterion};
|
use criterion::{criterion_group, criterion_main, Criterion};
|
||||||
use milli::heed::{EnvOpenOptions, RwTxn};
|
use milli::heed::{EnvOpenOptions, RwTxn};
|
||||||
use milli::update::{IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings};
|
use milli::update::{
|
||||||
|
DeleteDocuments, IndexDocuments, IndexDocumentsConfig, IndexerConfig, Settings,
|
||||||
|
};
|
||||||
use milli::Index;
|
use milli::Index;
|
||||||
use rand::seq::SliceRandom;
|
use rand::seq::SliceRandom;
|
||||||
use rand_chacha::rand_core::SeedableRng;
|
use rand_chacha::rand_core::SeedableRng;
|
||||||
@@ -36,7 +38,7 @@ fn setup_index() -> Index {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn setup_settings<'t>(
|
fn setup_settings<'t>(
|
||||||
wtxn: &mut RwTxn<'t>,
|
wtxn: &mut RwTxn<'t, '_>,
|
||||||
index: &'t Index,
|
index: &'t Index,
|
||||||
primary_key: &str,
|
primary_key: &str,
|
||||||
searchable_fields: &[&str],
|
searchable_fields: &[&str],
|
||||||
@@ -264,7 +266,17 @@ fn deleting_songs_in_batches_default(c: &mut Criterion) {
|
|||||||
(index, document_ids_to_delete)
|
(index, document_ids_to_delete)
|
||||||
},
|
},
|
||||||
move |(index, document_ids_to_delete)| {
|
move |(index, document_ids_to_delete)| {
|
||||||
delete_documents_from_ids(index, document_ids_to_delete)
|
let mut wtxn = index.write_txn().unwrap();
|
||||||
|
|
||||||
|
for ids in document_ids_to_delete {
|
||||||
|
let mut builder = DeleteDocuments::new(&mut wtxn, &index).unwrap();
|
||||||
|
builder.delete_documents(&ids);
|
||||||
|
builder.execute().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
wtxn.commit().unwrap();
|
||||||
|
|
||||||
|
index.prepare_for_closing().wait();
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
@@ -601,7 +613,17 @@ fn deleting_wiki_in_batches_default(c: &mut Criterion) {
|
|||||||
(index, document_ids_to_delete)
|
(index, document_ids_to_delete)
|
||||||
},
|
},
|
||||||
move |(index, document_ids_to_delete)| {
|
move |(index, document_ids_to_delete)| {
|
||||||
delete_documents_from_ids(index, document_ids_to_delete)
|
let mut wtxn = index.write_txn().unwrap();
|
||||||
|
|
||||||
|
for ids in document_ids_to_delete {
|
||||||
|
let mut builder = DeleteDocuments::new(&mut wtxn, &index).unwrap();
|
||||||
|
builder.delete_documents(&ids);
|
||||||
|
builder.execute().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
wtxn.commit().unwrap();
|
||||||
|
|
||||||
|
index.prepare_for_closing().wait();
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
@@ -853,31 +875,22 @@ fn deleting_movies_in_batches_default(c: &mut Criterion) {
|
|||||||
(index, document_ids_to_delete)
|
(index, document_ids_to_delete)
|
||||||
},
|
},
|
||||||
move |(index, document_ids_to_delete)| {
|
move |(index, document_ids_to_delete)| {
|
||||||
delete_documents_from_ids(index, document_ids_to_delete)
|
let mut wtxn = index.write_txn().unwrap();
|
||||||
|
|
||||||
|
for ids in document_ids_to_delete {
|
||||||
|
let mut builder = DeleteDocuments::new(&mut wtxn, &index).unwrap();
|
||||||
|
builder.delete_documents(&ids);
|
||||||
|
builder.execute().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
wtxn.commit().unwrap();
|
||||||
|
|
||||||
|
index.prepare_for_closing().wait();
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delete_documents_from_ids(index: Index, document_ids_to_delete: Vec<RoaringBitmap>) {
|
|
||||||
let mut wtxn = index.write_txn().unwrap();
|
|
||||||
|
|
||||||
let indexer_config = IndexerConfig::default();
|
|
||||||
for ids in document_ids_to_delete {
|
|
||||||
let config = IndexDocumentsConfig::default();
|
|
||||||
|
|
||||||
let mut builder =
|
|
||||||
IndexDocuments::new(&mut wtxn, &index, &indexer_config, config, |_| (), || false)
|
|
||||||
.unwrap();
|
|
||||||
(builder, _) = builder.remove_documents_from_db_no_batch(&ids).unwrap();
|
|
||||||
builder.execute().unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
wtxn.commit().unwrap();
|
|
||||||
|
|
||||||
index.prepare_for_closing().wait();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
||||||
let mut group = c.benchmark_group("indexing");
|
let mut group = c.benchmark_group("indexing");
|
||||||
group.sample_size(BENCHMARK_ITERATION);
|
group.sample_size(BENCHMARK_ITERATION);
|
||||||
@@ -1099,7 +1112,17 @@ fn deleting_nested_movies_in_batches_default(c: &mut Criterion) {
|
|||||||
(index, document_ids_to_delete)
|
(index, document_ids_to_delete)
|
||||||
},
|
},
|
||||||
move |(index, document_ids_to_delete)| {
|
move |(index, document_ids_to_delete)| {
|
||||||
delete_documents_from_ids(index, document_ids_to_delete)
|
let mut wtxn = index.write_txn().unwrap();
|
||||||
|
|
||||||
|
for ids in document_ids_to_delete {
|
||||||
|
let mut builder = DeleteDocuments::new(&mut wtxn, &index).unwrap();
|
||||||
|
builder.delete_documents(&ids);
|
||||||
|
builder.execute().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
wtxn.commit().unwrap();
|
||||||
|
|
||||||
|
index.prepare_for_closing().wait();
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
@@ -1315,7 +1338,17 @@ fn deleting_geo_in_batches_default(c: &mut Criterion) {
|
|||||||
(index, document_ids_to_delete)
|
(index, document_ids_to_delete)
|
||||||
},
|
},
|
||||||
move |(index, document_ids_to_delete)| {
|
move |(index, document_ids_to_delete)| {
|
||||||
delete_documents_from_ids(index, document_ids_to_delete)
|
let mut wtxn = index.write_txn().unwrap();
|
||||||
|
|
||||||
|
for ids in document_ids_to_delete {
|
||||||
|
let mut builder = DeleteDocuments::new(&mut wtxn, &index).unwrap();
|
||||||
|
builder.delete_documents(&ids);
|
||||||
|
builder.execute().unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
wtxn.commit().unwrap();
|
||||||
|
|
||||||
|
index.prepare_for_closing().wait();
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -129,6 +129,3 @@ experimental_enable_metrics = false
|
|||||||
|
|
||||||
# Experimental RAM reduction during indexing, do not use in production, see: <https://github.com/meilisearch/product/discussions/652>
|
# Experimental RAM reduction during indexing, do not use in production, see: <https://github.com/meilisearch/product/discussions/652>
|
||||||
experimental_reduce_indexing_memory_usage = false
|
experimental_reduce_indexing_memory_usage = false
|
||||||
|
|
||||||
# Experimentally reduces the maximum number of tasks that will be processed at once, see: <https://github.com/orgs/meilisearch/discussions/713>
|
|
||||||
# experimental_max_number_of_batched_tasks = 100
|
|
||||||
|
|||||||
@@ -208,14 +208,12 @@ pub(crate) mod test {
|
|||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use big_s::S;
|
use big_s::S;
|
||||||
use maplit::{btreemap, btreeset};
|
use maplit::btreeset;
|
||||||
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
|
||||||
use meilisearch_types::features::RuntimeTogglableFeatures;
|
|
||||||
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||||
use meilisearch_types::keys::{Action, Key};
|
use meilisearch_types::keys::{Action, Key};
|
||||||
use meilisearch_types::milli;
|
|
||||||
use meilisearch_types::milli::update::Setting;
|
use meilisearch_types::milli::update::Setting;
|
||||||
use meilisearch_types::settings::{Checked, FacetingSettings, Settings};
|
use meilisearch_types::milli::{self};
|
||||||
|
use meilisearch_types::settings::{Checked, Settings};
|
||||||
use meilisearch_types::tasks::{Details, Status};
|
use meilisearch_types::tasks::{Details, Status};
|
||||||
use serde_json::{json, Map, Value};
|
use serde_json::{json, Map, Value};
|
||||||
use time::macros::datetime;
|
use time::macros::datetime;
|
||||||
@@ -262,21 +260,11 @@ pub(crate) mod test {
|
|||||||
sortable_attributes: Setting::Set(btreeset! { S("age") }),
|
sortable_attributes: Setting::Set(btreeset! { S("age") }),
|
||||||
ranking_rules: Setting::NotSet,
|
ranking_rules: Setting::NotSet,
|
||||||
stop_words: Setting::NotSet,
|
stop_words: Setting::NotSet,
|
||||||
non_separator_tokens: Setting::NotSet,
|
|
||||||
separator_tokens: Setting::NotSet,
|
|
||||||
dictionary: Setting::NotSet,
|
|
||||||
synonyms: Setting::NotSet,
|
synonyms: Setting::NotSet,
|
||||||
distinct_attribute: Setting::NotSet,
|
distinct_attribute: Setting::NotSet,
|
||||||
proximity_precision: Setting::NotSet,
|
|
||||||
typo_tolerance: Setting::NotSet,
|
typo_tolerance: Setting::NotSet,
|
||||||
faceting: Setting::Set(FacetingSettings {
|
faceting: Setting::NotSet,
|
||||||
max_values_per_facet: Setting::Set(111),
|
|
||||||
sort_facet_values_by: Setting::Set(
|
|
||||||
btreemap! { S("age") => FacetValuesSort::Count },
|
|
||||||
),
|
|
||||||
}),
|
|
||||||
pagination: Setting::NotSet,
|
pagination: Setting::NotSet,
|
||||||
embedders: Setting::NotSet,
|
|
||||||
_kind: std::marker::PhantomData,
|
_kind: std::marker::PhantomData,
|
||||||
};
|
};
|
||||||
settings.check()
|
settings.check()
|
||||||
@@ -424,11 +412,6 @@ pub(crate) mod test {
|
|||||||
}
|
}
|
||||||
keys.flush().unwrap();
|
keys.flush().unwrap();
|
||||||
|
|
||||||
// ========== experimental features
|
|
||||||
let features = create_test_features();
|
|
||||||
|
|
||||||
dump.create_experimental_features(features).unwrap();
|
|
||||||
|
|
||||||
// create the dump
|
// create the dump
|
||||||
let mut file = tempfile::tempfile().unwrap();
|
let mut file = tempfile::tempfile().unwrap();
|
||||||
dump.persist_to(&mut file).unwrap();
|
dump.persist_to(&mut file).unwrap();
|
||||||
@@ -437,10 +420,6 @@ pub(crate) mod test {
|
|||||||
file
|
file
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_test_features() -> RuntimeTogglableFeatures {
|
|
||||||
RuntimeTogglableFeatures { vector_store: true, ..Default::default() }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_creating_and_read_dump() {
|
fn test_creating_and_read_dump() {
|
||||||
let mut file = create_test_dump();
|
let mut file = create_test_dump();
|
||||||
@@ -485,9 +464,5 @@ pub(crate) mod test {
|
|||||||
for (key, expected) in dump.keys().unwrap().zip(create_test_api_keys()) {
|
for (key, expected) in dump.keys().unwrap().zip(create_test_api_keys()) {
|
||||||
assert_eq!(key.unwrap(), expected);
|
assert_eq!(key.unwrap(), expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
// ==== checking the features
|
|
||||||
let expected = create_test_features();
|
|
||||||
assert_eq!(dump.features().unwrap().unwrap(), expected);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -191,10 +191,6 @@ impl CompatV5ToV6 {
|
|||||||
})
|
})
|
||||||
})))
|
})))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn features(&self) -> Result<Option<v6::RuntimeTogglableFeatures>> {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum CompatIndexV5ToV6 {
|
pub enum CompatIndexV5ToV6 {
|
||||||
@@ -340,12 +336,8 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
stop_words: settings.stop_words.into(),
|
stop_words: settings.stop_words.into(),
|
||||||
non_separator_tokens: v6::Setting::NotSet,
|
|
||||||
separator_tokens: v6::Setting::NotSet,
|
|
||||||
dictionary: v6::Setting::NotSet,
|
|
||||||
synonyms: settings.synonyms.into(),
|
synonyms: settings.synonyms.into(),
|
||||||
distinct_attribute: settings.distinct_attribute.into(),
|
distinct_attribute: settings.distinct_attribute.into(),
|
||||||
proximity_precision: v6::Setting::NotSet,
|
|
||||||
typo_tolerance: match settings.typo_tolerance {
|
typo_tolerance: match settings.typo_tolerance {
|
||||||
v5::Setting::Set(typo) => v6::Setting::Set(v6::TypoTolerance {
|
v5::Setting::Set(typo) => v6::Setting::Set(v6::TypoTolerance {
|
||||||
enabled: typo.enabled.into(),
|
enabled: typo.enabled.into(),
|
||||||
@@ -366,7 +358,6 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
|||||||
faceting: match settings.faceting {
|
faceting: match settings.faceting {
|
||||||
v5::Setting::Set(faceting) => v6::Setting::Set(v6::FacetingSettings {
|
v5::Setting::Set(faceting) => v6::Setting::Set(v6::FacetingSettings {
|
||||||
max_values_per_facet: faceting.max_values_per_facet.into(),
|
max_values_per_facet: faceting.max_values_per_facet.into(),
|
||||||
sort_facet_values_by: v6::Setting::NotSet,
|
|
||||||
}),
|
}),
|
||||||
v5::Setting::Reset => v6::Setting::Reset,
|
v5::Setting::Reset => v6::Setting::Reset,
|
||||||
v5::Setting::NotSet => v6::Setting::NotSet,
|
v5::Setting::NotSet => v6::Setting::NotSet,
|
||||||
@@ -378,7 +369,6 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
|||||||
v5::Setting::Reset => v6::Setting::Reset,
|
v5::Setting::Reset => v6::Setting::Reset,
|
||||||
v5::Setting::NotSet => v6::Setting::NotSet,
|
v5::Setting::NotSet => v6::Setting::NotSet,
|
||||||
},
|
},
|
||||||
embedders: v6::Setting::NotSet,
|
|
||||||
_kind: std::marker::PhantomData,
|
_kind: std::marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,12 +13,12 @@ use crate::{Result, Version};
|
|||||||
|
|
||||||
mod compat;
|
mod compat;
|
||||||
|
|
||||||
mod v1;
|
pub(self) mod v1;
|
||||||
mod v2;
|
pub(self) mod v2;
|
||||||
mod v3;
|
pub(self) mod v3;
|
||||||
mod v4;
|
pub(self) mod v4;
|
||||||
mod v5;
|
pub(self) mod v5;
|
||||||
mod v6;
|
pub(self) mod v6;
|
||||||
|
|
||||||
pub type Document = serde_json::Map<String, serde_json::Value>;
|
pub type Document = serde_json::Map<String, serde_json::Value>;
|
||||||
pub type UpdateFile = dyn Iterator<Item = Result<Document>>;
|
pub type UpdateFile = dyn Iterator<Item = Result<Document>>;
|
||||||
@@ -107,13 +107,6 @@ impl DumpReader {
|
|||||||
DumpReader::Compat(compat) => compat.keys(),
|
DumpReader::Compat(compat) => compat.keys(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn features(&self) -> Result<Option<v6::RuntimeTogglableFeatures>> {
|
|
||||||
match self {
|
|
||||||
DumpReader::Current(current) => Ok(current.features()),
|
|
||||||
DumpReader::Compat(compat) => compat.features(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<V6Reader> for DumpReader {
|
impl From<V6Reader> for DumpReader {
|
||||||
@@ -195,53 +188,6 @@ pub(crate) mod test {
|
|||||||
use meili_snap::insta;
|
use meili_snap::insta;
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::reader::v6::RuntimeTogglableFeatures;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn import_dump_v6_experimental() {
|
|
||||||
let dump = File::open("tests/assets/v6-with-experimental.dump").unwrap();
|
|
||||||
let mut dump = DumpReader::open(dump).unwrap();
|
|
||||||
|
|
||||||
// top level infos
|
|
||||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-07-06 7:10:27.21958 +00:00:00");
|
|
||||||
insta::assert_debug_snapshot!(dump.instance_uid().unwrap(), @"None");
|
|
||||||
|
|
||||||
// tasks
|
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"d45cd8571703e58ae53c7bd7ce3f5c22");
|
|
||||||
assert_eq!(update_files.len(), 2);
|
|
||||||
assert!(update_files[0].is_none()); // the dump creation
|
|
||||||
assert!(update_files[1].is_none()); // the processed document addition
|
|
||||||
|
|
||||||
// keys
|
|
||||||
let keys = dump.keys().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(keys), @"13c2da155e9729c2344688cab29af71d");
|
|
||||||
|
|
||||||
// indexes
|
|
||||||
let mut indexes = dump.indexes().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
|
||||||
// the index are not ordered in any way by default
|
|
||||||
indexes.sort_by_key(|index| index.metadata().uid.to_string());
|
|
||||||
|
|
||||||
let mut test = indexes.pop().unwrap();
|
|
||||||
assert!(indexes.is_empty());
|
|
||||||
|
|
||||||
insta::assert_json_snapshot!(test.metadata(), @r###"
|
|
||||||
{
|
|
||||||
"uid": "test",
|
|
||||||
"primaryKey": "id",
|
|
||||||
"createdAt": "2023-07-06T07:07:41.364694Z",
|
|
||||||
"updatedAt": "2023-07-06T07:07:41.396114Z"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
assert_eq!(test.documents().unwrap().count(), 1);
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
dump.features().unwrap().unwrap(),
|
|
||||||
RuntimeTogglableFeatures { vector_store: true, ..Default::default() }
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn import_dump_v5() {
|
fn import_dump_v5() {
|
||||||
@@ -319,8 +265,6 @@ pub(crate) mod test {
|
|||||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
|
|
||||||
assert_eq!(dump.features().unwrap(), None);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@@ -526,12 +470,12 @@ pub(crate) mod test {
|
|||||||
assert!(indexes.is_empty());
|
assert!(indexes.is_empty());
|
||||||
|
|
||||||
// products
|
// products
|
||||||
insta::assert_json_snapshot!(products.metadata(), @r###"
|
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "products",
|
"uid": "products",
|
||||||
"primaryKey": "sku",
|
"primaryKey": "sku",
|
||||||
"createdAt": "2022-10-09T20:27:22.688964637Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2022-10-09T20:27:23.951017769Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -541,12 +485,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
// movies
|
// movies
|
||||||
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "movies",
|
"uid": "movies",
|
||||||
"primaryKey": "id",
|
"primaryKey": "id",
|
||||||
"createdAt": "2022-10-09T20:27:22.197788495Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2022-10-09T20:28:01.93111053Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -571,12 +515,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
|
|
||||||
// spells
|
// spells
|
||||||
insta::assert_json_snapshot!(spells.metadata(), @r###"
|
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "dnd_spells",
|
"uid": "dnd_spells",
|
||||||
"primaryKey": "index",
|
"primaryKey": "index",
|
||||||
"createdAt": "2022-10-09T20:27:24.242683494Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2022-10-09T20:27:24.312809641Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -617,12 +561,12 @@ pub(crate) mod test {
|
|||||||
assert!(indexes.is_empty());
|
assert!(indexes.is_empty());
|
||||||
|
|
||||||
// products
|
// products
|
||||||
insta::assert_json_snapshot!(products.metadata(), @r###"
|
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "products",
|
"uid": "products",
|
||||||
"primaryKey": "sku",
|
"primaryKey": "sku",
|
||||||
"createdAt": "2023-01-30T16:25:56.595257Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2023-01-30T16:25:58.70348Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -632,12 +576,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
// movies
|
// movies
|
||||||
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "movies",
|
"uid": "movies",
|
||||||
"primaryKey": "id",
|
"primaryKey": "id",
|
||||||
"createdAt": "2023-01-30T16:25:56.192178Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2023-01-30T16:25:56.455714Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -647,12 +591,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720");
|
||||||
|
|
||||||
// spells
|
// spells
|
||||||
insta::assert_json_snapshot!(spells.metadata(), @r###"
|
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "dnd_spells",
|
"uid": "dnd_spells",
|
||||||
"primaryKey": "index",
|
"primaryKey": "index",
|
||||||
"createdAt": "2023-01-30T16:25:58.876405Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2023-01-30T16:25:59.079906Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
|||||||
@@ -56,7 +56,8 @@ pub enum RankingRule {
|
|||||||
Desc(String),
|
Desc(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
static ASC_DESC_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"(asc|desc)\(([\w_-]+)\)").unwrap());
|
static ASC_DESC_REGEX: Lazy<Regex> =
|
||||||
|
Lazy::new(|| Regex::new(r#"(asc|desc)\(([\w_-]+)\)"#).unwrap());
|
||||||
|
|
||||||
impl FromStr for RankingRule {
|
impl FromStr for RankingRule {
|
||||||
type Err = ();
|
type Err = ();
|
||||||
|
|||||||
@@ -46,7 +46,6 @@ pub type Checked = settings::Checked;
|
|||||||
pub type Unchecked = settings::Unchecked;
|
pub type Unchecked = settings::Unchecked;
|
||||||
|
|
||||||
pub type Task = updates::UpdateEntry;
|
pub type Task = updates::UpdateEntry;
|
||||||
pub type Kind = updates::UpdateMeta;
|
|
||||||
|
|
||||||
// everything related to the errors
|
// everything related to the errors
|
||||||
pub type ResponseError = errors::ResponseError;
|
pub type ResponseError = errors::ResponseError;
|
||||||
@@ -108,11 +107,8 @@ impl V2Reader {
|
|||||||
pub fn indexes(&self) -> Result<impl Iterator<Item = Result<V2IndexReader>> + '_> {
|
pub fn indexes(&self) -> Result<impl Iterator<Item = Result<V2IndexReader>> + '_> {
|
||||||
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
|
Ok(self.index_uuid.iter().map(|index| -> Result<_> {
|
||||||
V2IndexReader::new(
|
V2IndexReader::new(
|
||||||
|
index.uid.clone(),
|
||||||
&self.dump.path().join("indexes").join(format!("index-{}", index.uuid)),
|
&self.dump.path().join("indexes").join(format!("index-{}", index.uuid)),
|
||||||
index,
|
|
||||||
BufReader::new(
|
|
||||||
File::open(self.dump.path().join("updates").join("data.jsonl")).unwrap(),
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@@ -147,41 +143,16 @@ pub struct V2IndexReader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl V2IndexReader {
|
impl V2IndexReader {
|
||||||
pub fn new(path: &Path, index_uuid: &IndexUuid, tasks: BufReader<File>) -> Result<Self> {
|
pub fn new(name: String, path: &Path) -> Result<Self> {
|
||||||
let meta = File::open(path.join("meta.json"))?;
|
let meta = File::open(path.join("meta.json"))?;
|
||||||
let meta: DumpMeta = serde_json::from_reader(meta)?;
|
let meta: DumpMeta = serde_json::from_reader(meta)?;
|
||||||
|
|
||||||
let mut created_at = None;
|
|
||||||
let mut updated_at = None;
|
|
||||||
|
|
||||||
for line in tasks.lines() {
|
|
||||||
let task: Task = serde_json::from_str(&line?)?;
|
|
||||||
if !(task.uuid == index_uuid.uuid && task.is_finished()) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let new_created_at = match task.update.meta() {
|
|
||||||
Kind::DocumentsAddition { .. } | Kind::Settings(_) => task.update.finished_at(),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
let new_updated_at = task.update.finished_at();
|
|
||||||
|
|
||||||
if created_at.is_none() || created_at > new_created_at {
|
|
||||||
created_at = new_created_at;
|
|
||||||
}
|
|
||||||
|
|
||||||
if updated_at.is_none() || updated_at < new_updated_at {
|
|
||||||
updated_at = new_updated_at;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let current_time = OffsetDateTime::now_utc();
|
|
||||||
|
|
||||||
let metadata = IndexMetadata {
|
let metadata = IndexMetadata {
|
||||||
uid: index_uuid.uid.clone(),
|
uid: name,
|
||||||
primary_key: meta.primary_key,
|
primary_key: meta.primary_key,
|
||||||
created_at: created_at.unwrap_or(current_time),
|
// FIXME: Iterate over the whole task queue to find the creation and last update date.
|
||||||
updated_at: updated_at.unwrap_or(current_time),
|
created_at: OffsetDateTime::now_utc(),
|
||||||
|
updated_at: OffsetDateTime::now_utc(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let ret = V2IndexReader {
|
let ret = V2IndexReader {
|
||||||
@@ -277,12 +248,12 @@ pub(crate) mod test {
|
|||||||
assert!(indexes.is_empty());
|
assert!(indexes.is_empty());
|
||||||
|
|
||||||
// products
|
// products
|
||||||
insta::assert_json_snapshot!(products.metadata(), @r###"
|
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "products",
|
"uid": "products",
|
||||||
"primaryKey": "sku",
|
"primaryKey": "sku",
|
||||||
"createdAt": "2022-10-09T20:27:22.688964637Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2022-10-09T20:27:23.951017769Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -292,12 +263,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
// movies
|
// movies
|
||||||
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "movies",
|
"uid": "movies",
|
||||||
"primaryKey": "id",
|
"primaryKey": "id",
|
||||||
"createdAt": "2022-10-09T20:27:22.197788495Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2022-10-09T20:28:01.93111053Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -322,12 +293,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"d751713988987e9331980363e24189ce");
|
||||||
|
|
||||||
// spells
|
// spells
|
||||||
insta::assert_json_snapshot!(spells.metadata(), @r###"
|
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "dnd_spells",
|
"uid": "dnd_spells",
|
||||||
"primaryKey": "index",
|
"primaryKey": "index",
|
||||||
"createdAt": "2022-10-09T20:27:24.242683494Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2022-10-09T20:27:24.312809641Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -369,12 +340,12 @@ pub(crate) mod test {
|
|||||||
assert!(indexes.is_empty());
|
assert!(indexes.is_empty());
|
||||||
|
|
||||||
// products
|
// products
|
||||||
insta::assert_json_snapshot!(products.metadata(), @r###"
|
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "products",
|
"uid": "products",
|
||||||
"primaryKey": "sku",
|
"primaryKey": "sku",
|
||||||
"createdAt": "2023-01-30T16:25:56.595257Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2023-01-30T16:25:58.70348Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -384,12 +355,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
// movies
|
// movies
|
||||||
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "movies",
|
"uid": "movies",
|
||||||
"primaryKey": "id",
|
"primaryKey": "id",
|
||||||
"createdAt": "2023-01-30T16:25:56.192178Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2023-01-30T16:25:56.455714Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@@ -399,12 +370,12 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720");
|
||||||
|
|
||||||
// spells
|
// spells
|
||||||
insta::assert_json_snapshot!(spells.metadata(), @r###"
|
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
{
|
{
|
||||||
"uid": "dnd_spells",
|
"uid": "dnd_spells",
|
||||||
"primaryKey": "index",
|
"primaryKey": "index",
|
||||||
"createdAt": "2023-01-30T16:25:58.876405Z",
|
"createdAt": "[now]",
|
||||||
"updatedAt": "2023-01-30T16:25:59.079906Z"
|
"updatedAt": "[now]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
|||||||
@@ -227,14 +227,4 @@ impl UpdateStatus {
|
|||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn finished_at(&self) -> Option<OffsetDateTime> {
|
|
||||||
match self {
|
|
||||||
UpdateStatus::Processing(_) => None,
|
|
||||||
UpdateStatus::Enqueued(_) => None,
|
|
||||||
UpdateStatus::Processed(u) => Some(u.processed_at),
|
|
||||||
UpdateStatus::Aborted(_) => None,
|
|
||||||
UpdateStatus::Failed(u) => Some(u.failed_at),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ use std::fs::{self, File};
|
|||||||
use std::io::{BufRead, BufReader, ErrorKind};
|
use std::io::{BufRead, BufReader, ErrorKind};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use log::debug;
|
|
||||||
pub use meilisearch_types::milli;
|
pub use meilisearch_types::milli;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
@@ -19,7 +18,6 @@ pub type Unchecked = meilisearch_types::settings::Unchecked;
|
|||||||
|
|
||||||
pub type Task = crate::TaskDump;
|
pub type Task = crate::TaskDump;
|
||||||
pub type Key = meilisearch_types::keys::Key;
|
pub type Key = meilisearch_types::keys::Key;
|
||||||
pub type RuntimeTogglableFeatures = meilisearch_types::features::RuntimeTogglableFeatures;
|
|
||||||
|
|
||||||
// ===== Other types to clarify the code of the compat module
|
// ===== Other types to clarify the code of the compat module
|
||||||
// everything related to the tasks
|
// everything related to the tasks
|
||||||
@@ -49,7 +47,6 @@ pub struct V6Reader {
|
|||||||
metadata: Metadata,
|
metadata: Metadata,
|
||||||
tasks: BufReader<File>,
|
tasks: BufReader<File>,
|
||||||
keys: BufReader<File>,
|
keys: BufReader<File>,
|
||||||
features: Option<RuntimeTogglableFeatures>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl V6Reader {
|
impl V6Reader {
|
||||||
@@ -61,29 +58,11 @@ impl V6Reader {
|
|||||||
Err(e) => return Err(e.into()),
|
Err(e) => return Err(e.into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let feature_file = match fs::read(dump.path().join("experimental-features.json")) {
|
|
||||||
Ok(feature_file) => Some(feature_file),
|
|
||||||
Err(error) => match error.kind() {
|
|
||||||
// Allows the file to be missing, this will only result in all experimental features disabled.
|
|
||||||
ErrorKind::NotFound => {
|
|
||||||
debug!("`experimental-features.json` not found in dump");
|
|
||||||
None
|
|
||||||
}
|
|
||||||
_ => return Err(error.into()),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
let features = if let Some(feature_file) = feature_file {
|
|
||||||
Some(serde_json::from_reader(&*feature_file)?)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(V6Reader {
|
Ok(V6Reader {
|
||||||
metadata: serde_json::from_reader(&*meta_file)?,
|
metadata: serde_json::from_reader(&*meta_file)?,
|
||||||
instance_uid,
|
instance_uid,
|
||||||
tasks: BufReader::new(File::open(dump.path().join("tasks").join("queue.jsonl"))?),
|
tasks: BufReader::new(File::open(dump.path().join("tasks").join("queue.jsonl"))?),
|
||||||
keys: BufReader::new(File::open(dump.path().join("keys.jsonl"))?),
|
keys: BufReader::new(File::open(dump.path().join("keys.jsonl"))?),
|
||||||
features,
|
|
||||||
dump,
|
dump,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -150,10 +129,6 @@ impl V6Reader {
|
|||||||
(&mut self.keys).lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
|
(&mut self.keys).lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn features(&self) -> Option<RuntimeTogglableFeatures> {
|
|
||||||
self.features
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct UpdateFile {
|
pub struct UpdateFile {
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ use std::path::PathBuf;
|
|||||||
|
|
||||||
use flate2::write::GzEncoder;
|
use flate2::write::GzEncoder;
|
||||||
use flate2::Compression;
|
use flate2::Compression;
|
||||||
use meilisearch_types::features::RuntimeTogglableFeatures;
|
|
||||||
use meilisearch_types::keys::Key;
|
use meilisearch_types::keys::Key;
|
||||||
use meilisearch_types::settings::{Checked, Settings};
|
use meilisearch_types::settings::{Checked, Settings};
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
@@ -54,13 +53,6 @@ impl DumpWriter {
|
|||||||
TaskWriter::new(self.dir.path().join("tasks"))
|
TaskWriter::new(self.dir.path().join("tasks"))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_experimental_features(&self, features: RuntimeTogglableFeatures) -> Result<()> {
|
|
||||||
Ok(std::fs::write(
|
|
||||||
self.dir.path().join("experimental-features.json"),
|
|
||||||
serde_json::to_string(&features)?,
|
|
||||||
)?)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn persist_to(self, mut writer: impl Write) -> Result<()> {
|
pub fn persist_to(self, mut writer: impl Write) -> Result<()> {
|
||||||
let gz_encoder = GzEncoder::new(&mut writer, Compression::default());
|
let gz_encoder = GzEncoder::new(&mut writer, Compression::default());
|
||||||
let mut tar_encoder = tar::Builder::new(gz_encoder);
|
let mut tar_encoder = tar::Builder::new(gz_encoder);
|
||||||
@@ -292,7 +284,6 @@ pub(crate) mod test {
|
|||||||
│ ├---- update_files/
|
│ ├---- update_files/
|
||||||
│ │ └---- 1.jsonl
|
│ │ └---- 1.jsonl
|
||||||
│ └---- queue.jsonl
|
│ └---- queue.jsonl
|
||||||
├---- experimental-features.json
|
|
||||||
├---- instance_uid.uuid
|
├---- instance_uid.uuid
|
||||||
├---- keys.jsonl
|
├---- keys.jsonl
|
||||||
└---- metadata.json
|
└---- metadata.json
|
||||||
|
|||||||
Binary file not shown.
@@ -14,7 +14,6 @@ license.workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
nom = "7.1.3"
|
nom = "7.1.3"
|
||||||
nom_locate = "4.1.0"
|
nom_locate = "4.1.0"
|
||||||
unescaper = "0.1.2"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = "1.29.0"
|
insta = "1.29.0"
|
||||||
|
|||||||
@@ -62,7 +62,6 @@ pub enum ErrorKind<'a> {
|
|||||||
MisusedGeoRadius,
|
MisusedGeoRadius,
|
||||||
MisusedGeoBoundingBox,
|
MisusedGeoBoundingBox,
|
||||||
InvalidPrimary,
|
InvalidPrimary,
|
||||||
InvalidEscapedNumber,
|
|
||||||
ExpectedEof,
|
ExpectedEof,
|
||||||
ExpectedValue(ExpectedValueKind),
|
ExpectedValue(ExpectedValueKind),
|
||||||
MalformedValue,
|
MalformedValue,
|
||||||
@@ -148,9 +147,6 @@ impl<'a> Display for Error<'a> {
|
|||||||
let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) };
|
let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) };
|
||||||
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` {}", text)?
|
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` {}", text)?
|
||||||
}
|
}
|
||||||
ErrorKind::InvalidEscapedNumber => {
|
|
||||||
writeln!(f, "Found an invalid escaped sequence number: `{}`.", escaped_input)?
|
|
||||||
}
|
|
||||||
ErrorKind::ExpectedEof => {
|
ErrorKind::ExpectedEof => {
|
||||||
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
|
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -472,81 +472,8 @@ pub fn parse_filter(input: Span) -> IResult<FilterCondition> {
|
|||||||
terminated(|input| parse_expression(input, 0), eof)(input)
|
terminated(|input| parse_expression(input, 0), eof)(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> std::fmt::Display for FilterCondition<'a> {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
FilterCondition::Not(filter) => {
|
|
||||||
write!(f, "NOT ({filter})")
|
|
||||||
}
|
|
||||||
FilterCondition::Condition { fid, op } => {
|
|
||||||
write!(f, "{fid} {op}")
|
|
||||||
}
|
|
||||||
FilterCondition::In { fid, els } => {
|
|
||||||
write!(f, "{fid} IN[")?;
|
|
||||||
for el in els {
|
|
||||||
write!(f, "{el}, ")?;
|
|
||||||
}
|
|
||||||
write!(f, "]")
|
|
||||||
}
|
|
||||||
FilterCondition::Or(els) => {
|
|
||||||
write!(f, "OR[")?;
|
|
||||||
for el in els {
|
|
||||||
write!(f, "{el}, ")?;
|
|
||||||
}
|
|
||||||
write!(f, "]")
|
|
||||||
}
|
|
||||||
FilterCondition::And(els) => {
|
|
||||||
write!(f, "AND[")?;
|
|
||||||
for el in els {
|
|
||||||
write!(f, "{el}, ")?;
|
|
||||||
}
|
|
||||||
write!(f, "]")
|
|
||||||
}
|
|
||||||
FilterCondition::GeoLowerThan { point, radius } => {
|
|
||||||
write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius)
|
|
||||||
}
|
|
||||||
FilterCondition::GeoBoundingBox {
|
|
||||||
top_right_point: top_left_point,
|
|
||||||
bottom_left_point: bottom_right_point,
|
|
||||||
} => {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"_geoBoundingBox([{}, {}], [{}, {}])",
|
|
||||||
top_left_point[0],
|
|
||||||
top_left_point[1],
|
|
||||||
bottom_right_point[0],
|
|
||||||
bottom_right_point[1]
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<'a> std::fmt::Display for Condition<'a> {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
Condition::GreaterThan(token) => write!(f, "> {token}"),
|
|
||||||
Condition::GreaterThanOrEqual(token) => write!(f, ">= {token}"),
|
|
||||||
Condition::Equal(token) => write!(f, "= {token}"),
|
|
||||||
Condition::NotEqual(token) => write!(f, "!= {token}"),
|
|
||||||
Condition::Null => write!(f, "IS NULL"),
|
|
||||||
Condition::Empty => write!(f, "IS EMPTY"),
|
|
||||||
Condition::Exists => write!(f, "EXISTS"),
|
|
||||||
Condition::LowerThan(token) => write!(f, "< {token}"),
|
|
||||||
Condition::LowerThanOrEqual(token) => write!(f, "<= {token}"),
|
|
||||||
Condition::Between { from, to } => write!(f, "{from} TO {to}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<'a> std::fmt::Display for Token<'a> {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{{{}}}", self.value())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub mod tests {
|
pub mod tests {
|
||||||
use FilterCondition as Fc;
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
/// Create a raw [Token]. You must specify the string that appear BEFORE your element followed by your element
|
/// Create a raw [Token]. You must specify the string that appear BEFORE your element followed by your element
|
||||||
@@ -558,22 +485,14 @@ pub mod tests {
|
|||||||
unsafe { Span::new_from_raw_offset(offset, lines as u32, value, "") }.into()
|
unsafe { Span::new_from_raw_offset(offset, lines as u32, value, "") }.into()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn p(s: &str) -> impl std::fmt::Display + '_ {
|
|
||||||
Fc::parse(s).unwrap().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parse_escaped() {
|
|
||||||
insta::assert_display_snapshot!(p(r"title = 'foo\\'"), @r#"{title} = {foo\}"#);
|
|
||||||
insta::assert_display_snapshot!(p(r"title = 'foo\\\\'"), @r#"{title} = {foo\\}"#);
|
|
||||||
insta::assert_display_snapshot!(p(r"title = 'foo\\\\\\'"), @r#"{title} = {foo\\\}"#);
|
|
||||||
insta::assert_display_snapshot!(p(r"title = 'foo\\\\\\\\'"), @r#"{title} = {foo\\\\}"#);
|
|
||||||
// but it also works with other sequencies
|
|
||||||
insta::assert_display_snapshot!(p(r#"title = 'foo\x20\n\t\"\'"'"#), @"{title} = {foo \n\t\"\'\"}");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse() {
|
fn parse() {
|
||||||
|
use FilterCondition as Fc;
|
||||||
|
|
||||||
|
fn p(s: &str) -> impl std::fmt::Display + '_ {
|
||||||
|
Fc::parse(s).unwrap().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
// Test equal
|
// Test equal
|
||||||
insta::assert_display_snapshot!(p("channel = Ponce"), @"{channel} = {Ponce}");
|
insta::assert_display_snapshot!(p("channel = Ponce"), @"{channel} = {Ponce}");
|
||||||
insta::assert_display_snapshot!(p("subscribers = 12"), @"{subscribers} = {12}");
|
insta::assert_display_snapshot!(p("subscribers = 12"), @"{subscribers} = {12}");
|
||||||
@@ -933,3 +852,74 @@ pub mod tests {
|
|||||||
assert_eq!(token.value(), s);
|
assert_eq!(token.value(), s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a> std::fmt::Display for FilterCondition<'a> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
FilterCondition::Not(filter) => {
|
||||||
|
write!(f, "NOT ({filter})")
|
||||||
|
}
|
||||||
|
FilterCondition::Condition { fid, op } => {
|
||||||
|
write!(f, "{fid} {op}")
|
||||||
|
}
|
||||||
|
FilterCondition::In { fid, els } => {
|
||||||
|
write!(f, "{fid} IN[")?;
|
||||||
|
for el in els {
|
||||||
|
write!(f, "{el}, ")?;
|
||||||
|
}
|
||||||
|
write!(f, "]")
|
||||||
|
}
|
||||||
|
FilterCondition::Or(els) => {
|
||||||
|
write!(f, "OR[")?;
|
||||||
|
for el in els {
|
||||||
|
write!(f, "{el}, ")?;
|
||||||
|
}
|
||||||
|
write!(f, "]")
|
||||||
|
}
|
||||||
|
FilterCondition::And(els) => {
|
||||||
|
write!(f, "AND[")?;
|
||||||
|
for el in els {
|
||||||
|
write!(f, "{el}, ")?;
|
||||||
|
}
|
||||||
|
write!(f, "]")
|
||||||
|
}
|
||||||
|
FilterCondition::GeoLowerThan { point, radius } => {
|
||||||
|
write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius)
|
||||||
|
}
|
||||||
|
FilterCondition::GeoBoundingBox {
|
||||||
|
top_right_point: top_left_point,
|
||||||
|
bottom_left_point: bottom_right_point,
|
||||||
|
} => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"_geoBoundingBox([{}, {}], [{}, {}])",
|
||||||
|
top_left_point[0],
|
||||||
|
top_left_point[1],
|
||||||
|
bottom_right_point[0],
|
||||||
|
bottom_right_point[1]
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<'a> std::fmt::Display for Condition<'a> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
Condition::GreaterThan(token) => write!(f, "> {token}"),
|
||||||
|
Condition::GreaterThanOrEqual(token) => write!(f, ">= {token}"),
|
||||||
|
Condition::Equal(token) => write!(f, "= {token}"),
|
||||||
|
Condition::NotEqual(token) => write!(f, "!= {token}"),
|
||||||
|
Condition::Null => write!(f, "IS NULL"),
|
||||||
|
Condition::Empty => write!(f, "IS EMPTY"),
|
||||||
|
Condition::Exists => write!(f, "EXISTS"),
|
||||||
|
Condition::LowerThan(token) => write!(f, "< {token}"),
|
||||||
|
Condition::LowerThanOrEqual(token) => write!(f, "<= {token}"),
|
||||||
|
Condition::Between { from, to } => write!(f, "{from} TO {to}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<'a> std::fmt::Display for Token<'a> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{{{}}}", self.value())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -171,24 +171,7 @@ pub fn parse_value(input: Span) -> IResult<Token> {
|
|||||||
})
|
})
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
match unescaper::unescape(value.value()) {
|
Ok((input, value))
|
||||||
Ok(content) => {
|
|
||||||
if content.len() != value.value().len() {
|
|
||||||
Ok((input, Token::new(value.original_span(), Some(content))))
|
|
||||||
} else {
|
|
||||||
Ok((input, value))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(unescaper::Error::IncompleteStr(_)) => Err(nom::Err::Incomplete(nom::Needed::Unknown)),
|
|
||||||
Err(unescaper::Error::ParseIntError { .. }) => Err(nom::Err::Error(Error::new_from_kind(
|
|
||||||
value.original_span(),
|
|
||||||
ErrorKind::InvalidEscapedNumber,
|
|
||||||
))),
|
|
||||||
Err(unescaper::Error::InvalidChar { .. }) => Err(nom::Err::Error(Error::new_from_kind(
|
|
||||||
value.original_span(),
|
|
||||||
ErrorKind::MalformedValue,
|
|
||||||
))),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_value_component(c: char) -> bool {
|
fn is_value_component(c: char) -> bool {
|
||||||
@@ -270,8 +253,8 @@ pub mod test {
|
|||||||
("aaaa", "", rtok("", "aaaa"), "aaaa"),
|
("aaaa", "", rtok("", "aaaa"), "aaaa"),
|
||||||
(r#"aa"aa"#, r#""aa"#, rtok("", "aa"), "aa"),
|
(r#"aa"aa"#, r#""aa"#, rtok("", "aa"), "aa"),
|
||||||
(r#"aa\"aa"#, r#""#, rtok("", r#"aa\"aa"#), r#"aa"aa"#),
|
(r#"aa\"aa"#, r#""#, rtok("", r#"aa\"aa"#), r#"aa"aa"#),
|
||||||
(r"aa\\\aa", r#""#, rtok("", r"aa\\\aa"), r"aa\\\aa"),
|
(r#"aa\\\aa"#, r#""#, rtok("", r#"aa\\\aa"#), r#"aa\\\aa"#),
|
||||||
(r#"aa\\"\aa"#, r#""\aa"#, rtok("", r"aa\\"), r"aa\\"),
|
(r#"aa\\"\aa"#, r#""\aa"#, rtok("", r#"aa\\"#), r#"aa\\"#),
|
||||||
(r#"aa\\\"\aa"#, r#""#, rtok("", r#"aa\\\"\aa"#), r#"aa\\"\aa"#),
|
(r#"aa\\\"\aa"#, r#""#, rtok("", r#"aa\\\"\aa"#), r#"aa\\"\aa"#),
|
||||||
(r#"\"\""#, r#""#, rtok("", r#"\"\""#), r#""""#),
|
(r#"\"\""#, r#""#, rtok("", r#"\"\""#), r#""""#),
|
||||||
];
|
];
|
||||||
@@ -301,12 +284,12 @@ pub mod test {
|
|||||||
);
|
);
|
||||||
// simple quote
|
// simple quote
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
unescape(Span::new_extra(r"Hello \'World\'", ""), '\''),
|
unescape(Span::new_extra(r#"Hello \'World\'"#, ""), '\''),
|
||||||
r#"Hello 'World'"#.to_string()
|
r#"Hello 'World'"#.to_string()
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
unescape(Span::new_extra(r"Hello \\\'World\\\'", ""), '\''),
|
unescape(Span::new_extra(r#"Hello \\\'World\\\'"#, ""), '\''),
|
||||||
r"Hello \\'World\\'".to_string()
|
r#"Hello \\'World\\'"#.to_string()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -335,19 +318,19 @@ pub mod test {
|
|||||||
("\"cha'nnel\"", "cha'nnel", false),
|
("\"cha'nnel\"", "cha'nnel", false),
|
||||||
("I'm tamo", "I", false),
|
("I'm tamo", "I", false),
|
||||||
// escaped thing but not quote
|
// escaped thing but not quote
|
||||||
(r#""\\""#, r"\", true),
|
(r#""\\""#, r#"\\"#, false),
|
||||||
(r#""\\\\\\""#, r"\\\", true),
|
(r#""\\\\\\""#, r#"\\\\\\"#, false),
|
||||||
(r#""aa\\aa""#, r"aa\aa", true),
|
(r#""aa\\aa""#, r#"aa\\aa"#, false),
|
||||||
// with double quote
|
// with double quote
|
||||||
(r#""Hello \"world\"""#, r#"Hello "world""#, true),
|
(r#""Hello \"world\"""#, r#"Hello "world""#, true),
|
||||||
(r#""Hello \\\"world\\\"""#, r#"Hello \"world\""#, true),
|
(r#""Hello \\\"world\\\"""#, r#"Hello \\"world\\""#, true),
|
||||||
(r#""I'm \"super\" tamo""#, r#"I'm "super" tamo"#, true),
|
(r#""I'm \"super\" tamo""#, r#"I'm "super" tamo"#, true),
|
||||||
(r#""\"\"""#, r#""""#, true),
|
(r#""\"\"""#, r#""""#, true),
|
||||||
// with simple quote
|
// with simple quote
|
||||||
(r"'Hello \'world\''", r#"Hello 'world'"#, true),
|
(r#"'Hello \'world\''"#, r#"Hello 'world'"#, true),
|
||||||
(r"'Hello \\\'world\\\''", r"Hello \'world\'", true),
|
(r#"'Hello \\\'world\\\''"#, r#"Hello \\'world\\'"#, true),
|
||||||
(r#"'I\'m "super" tamo'"#, r#"I'm "super" tamo"#, true),
|
(r#"'I\'m "super" tamo'"#, r#"I'm "super" tamo"#, true),
|
||||||
(r"'\'\''", r#"''"#, true),
|
(r#"'\'\''"#, r#"''"#, true),
|
||||||
];
|
];
|
||||||
|
|
||||||
for (input, expected, escaped) in test_case {
|
for (input, expected, escaped) in test_case {
|
||||||
@@ -367,14 +350,7 @@ pub mod test {
|
|||||||
"Filter `{}` was not supposed to be escaped",
|
"Filter `{}` was not supposed to be escaped",
|
||||||
input
|
input
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(token.value(), expected, "Filter `{}` failed.", input);
|
||||||
token.value(),
|
|
||||||
expected,
|
|
||||||
"Filter `{}` failed by giving `{}` instead of `{}`.",
|
|
||||||
input,
|
|
||||||
token.value(),
|
|
||||||
expected
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ license.workspace = true
|
|||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = { version = "0.5.1", features = ["html_reports"] }
|
criterion = { version = "0.4.0", features = ["html_reports"] }
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "benchmarks"
|
name = "benchmarks"
|
||||||
|
|||||||
@@ -1,20 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "fuzzers"
|
|
||||||
publish = false
|
|
||||||
|
|
||||||
version.workspace = true
|
|
||||||
authors.workspace = true
|
|
||||||
description.workspace = true
|
|
||||||
homepage.workspace = true
|
|
||||||
readme.workspace = true
|
|
||||||
edition.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
arbitrary = { version = "1.3.0", features = ["derive"] }
|
|
||||||
clap = { version = "4.3.0", features = ["derive"] }
|
|
||||||
fastrand = "2.0.0"
|
|
||||||
milli = { path = "../milli" }
|
|
||||||
serde = { version = "1.0.160", features = ["derive"] }
|
|
||||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
|
||||||
tempfile = "3.5.0"
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
# Fuzzers
|
|
||||||
|
|
||||||
The purpose of this crate is to contains all the handmade "fuzzer" we may need.
|
|
||||||
@@ -1,152 +0,0 @@
|
|||||||
use std::num::NonZeroUsize;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
|
|
||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
use arbitrary::{Arbitrary, Unstructured};
|
|
||||||
use clap::Parser;
|
|
||||||
use fuzzers::Operation;
|
|
||||||
use milli::heed::EnvOpenOptions;
|
|
||||||
use milli::update::{IndexDocuments, IndexDocumentsConfig, IndexerConfig};
|
|
||||||
use milli::Index;
|
|
||||||
use tempfile::TempDir;
|
|
||||||
|
|
||||||
#[derive(Debug, Arbitrary)]
|
|
||||||
struct Batch([Operation; 5]);
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Parser)]
|
|
||||||
struct Opt {
|
|
||||||
/// The number of fuzzer to run in parallel.
|
|
||||||
#[clap(long)]
|
|
||||||
par: Option<NonZeroUsize>,
|
|
||||||
// We need to put a lot of newlines in the following documentation or else everything gets collapsed on one line
|
|
||||||
/// The path in which the databases will be created.
|
|
||||||
/// Using a ramdisk is recommended.
|
|
||||||
///
|
|
||||||
/// Linux:
|
|
||||||
///
|
|
||||||
/// sudo mount -t tmpfs -o size=2g tmpfs ramdisk # to create it
|
|
||||||
///
|
|
||||||
/// sudo umount ramdisk # to remove it
|
|
||||||
///
|
|
||||||
/// MacOS:
|
|
||||||
///
|
|
||||||
/// diskutil erasevolume HFS+ 'RAM Disk' `hdiutil attach -nobrowse -nomount ram://4194304 # create it
|
|
||||||
///
|
|
||||||
/// hdiutil detach /dev/:the_disk
|
|
||||||
#[clap(long)]
|
|
||||||
path: Option<PathBuf>,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
let opt = Opt::parse();
|
|
||||||
let progression: &'static AtomicUsize = Box::leak(Box::new(AtomicUsize::new(0)));
|
|
||||||
let stop: &'static AtomicBool = Box::leak(Box::new(AtomicBool::new(false)));
|
|
||||||
|
|
||||||
let par = opt.par.unwrap_or_else(|| std::thread::available_parallelism().unwrap()).get();
|
|
||||||
let mut handles = Vec::with_capacity(par);
|
|
||||||
|
|
||||||
for _ in 0..par {
|
|
||||||
let opt = opt.clone();
|
|
||||||
|
|
||||||
let handle = std::thread::spawn(move || {
|
|
||||||
let mut options = EnvOpenOptions::new();
|
|
||||||
options.map_size(1024 * 1024 * 1024 * 1024);
|
|
||||||
let tempdir = match opt.path {
|
|
||||||
Some(path) => TempDir::new_in(path).unwrap(),
|
|
||||||
None => TempDir::new().unwrap(),
|
|
||||||
};
|
|
||||||
let index = Index::new(options, tempdir.path()).unwrap();
|
|
||||||
let indexer_config = IndexerConfig::default();
|
|
||||||
let index_documents_config = IndexDocumentsConfig::default();
|
|
||||||
|
|
||||||
std::thread::scope(|s| {
|
|
||||||
loop {
|
|
||||||
if stop.load(Ordering::Relaxed) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let v: Vec<u8> =
|
|
||||||
std::iter::repeat_with(|| fastrand::u8(..)).take(1000).collect();
|
|
||||||
|
|
||||||
let mut data = Unstructured::new(&v);
|
|
||||||
let batches = <[Batch; 5]>::arbitrary(&mut data).unwrap();
|
|
||||||
// will be used to display the error once a thread crashes
|
|
||||||
let dbg_input = format!("{:#?}", batches);
|
|
||||||
|
|
||||||
let handle = s.spawn(|| {
|
|
||||||
let mut wtxn = index.write_txn().unwrap();
|
|
||||||
|
|
||||||
for batch in batches {
|
|
||||||
let mut builder = IndexDocuments::new(
|
|
||||||
&mut wtxn,
|
|
||||||
&index,
|
|
||||||
&indexer_config,
|
|
||||||
index_documents_config.clone(),
|
|
||||||
|_| (),
|
|
||||||
|| false,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
for op in batch.0 {
|
|
||||||
match op {
|
|
||||||
Operation::AddDoc(doc) => {
|
|
||||||
let documents =
|
|
||||||
milli::documents::objects_from_json_value(doc.to_d());
|
|
||||||
let documents =
|
|
||||||
milli::documents::documents_batch_reader_from_objects(
|
|
||||||
documents,
|
|
||||||
);
|
|
||||||
let (b, _added) = builder.add_documents(documents).unwrap();
|
|
||||||
builder = b;
|
|
||||||
}
|
|
||||||
Operation::DeleteDoc(id) => {
|
|
||||||
let (b, _removed) =
|
|
||||||
builder.remove_documents(vec![id.to_s()]).unwrap();
|
|
||||||
builder = b;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
builder.execute().unwrap();
|
|
||||||
|
|
||||||
// after executing a batch we check if the database is corrupted
|
|
||||||
let res = index.search(&wtxn).execute().unwrap();
|
|
||||||
index.documents(&wtxn, res.documents_ids).unwrap();
|
|
||||||
progression.fetch_add(1, Ordering::Relaxed);
|
|
||||||
}
|
|
||||||
wtxn.abort();
|
|
||||||
});
|
|
||||||
if let err @ Err(_) = handle.join() {
|
|
||||||
stop.store(true, Ordering::Relaxed);
|
|
||||||
err.expect(&dbg_input);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
handles.push(handle);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::thread::spawn(|| {
|
|
||||||
let mut last_value = 0;
|
|
||||||
let start = std::time::Instant::now();
|
|
||||||
loop {
|
|
||||||
let total = progression.load(Ordering::Relaxed);
|
|
||||||
let elapsed = start.elapsed().as_secs();
|
|
||||||
if elapsed > 3600 {
|
|
||||||
// after 1 hour, stop the fuzzer, success
|
|
||||||
std::process::exit(0);
|
|
||||||
}
|
|
||||||
println!(
|
|
||||||
"Has been running for {:?} seconds. Tested {} new values for a total of {}.",
|
|
||||||
elapsed,
|
|
||||||
total - last_value,
|
|
||||||
total
|
|
||||||
);
|
|
||||||
last_value = total;
|
|
||||||
std::thread::sleep(Duration::from_secs(1));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
for handle in handles {
|
|
||||||
handle.join().unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
use arbitrary::Arbitrary;
|
|
||||||
use serde_json::{json, Value};
|
|
||||||
|
|
||||||
#[derive(Debug, Arbitrary)]
|
|
||||||
pub enum Document {
|
|
||||||
One,
|
|
||||||
Two,
|
|
||||||
Three,
|
|
||||||
Four,
|
|
||||||
Five,
|
|
||||||
Six,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Document {
|
|
||||||
pub fn to_d(&self) -> Value {
|
|
||||||
match self {
|
|
||||||
Document::One => json!({ "id": 0, "doggo": "bernese" }),
|
|
||||||
Document::Two => json!({ "id": 0, "doggo": "golden" }),
|
|
||||||
Document::Three => json!({ "id": 0, "catto": "jorts" }),
|
|
||||||
Document::Four => json!({ "id": 1, "doggo": "bernese" }),
|
|
||||||
Document::Five => json!({ "id": 1, "doggo": "golden" }),
|
|
||||||
Document::Six => json!({ "id": 1, "catto": "jorts" }),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Arbitrary)]
|
|
||||||
pub enum DocId {
|
|
||||||
Zero,
|
|
||||||
One,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DocId {
|
|
||||||
pub fn to_s(&self) -> String {
|
|
||||||
match self {
|
|
||||||
DocId::Zero => "0".to_string(),
|
|
||||||
DocId::One => "1".to_string(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Arbitrary)]
|
|
||||||
pub enum Operation {
|
|
||||||
AddDoc(Document),
|
|
||||||
DeleteDoc(DocId),
|
|
||||||
}
|
|
||||||
@@ -22,7 +22,6 @@ log = "0.4.17"
|
|||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
page_size = "0.5.0"
|
page_size = "0.5.0"
|
||||||
puffin = { version = "0.16.0", features = ["serialization"] }
|
|
||||||
roaring = { version = "0.10.1", features = ["serde"] }
|
roaring = { version = "0.10.1", features = ["serde"] }
|
||||||
serde = { version = "1.0.160", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
|
|||||||
@@ -160,7 +160,7 @@ impl BatchKind {
|
|||||||
impl BatchKind {
|
impl BatchKind {
|
||||||
/// Returns a `ControlFlow::Break` if you must stop right now.
|
/// Returns a `ControlFlow::Break` if you must stop right now.
|
||||||
/// The boolean tell you if an index has been created by the batched task.
|
/// The boolean tell you if an index has been created by the batched task.
|
||||||
/// To ease the writing of the code. `true` can be returned when you don't need to create an index
|
/// To ease the writting of the code. `true` can be returned when you don't need to create an index
|
||||||
/// but false can't be returned if you needs to create an index.
|
/// but false can't be returned if you needs to create an index.
|
||||||
// TODO use an AutoBatchKind as input
|
// TODO use an AutoBatchKind as input
|
||||||
pub fn new(
|
pub fn new(
|
||||||
@@ -214,7 +214,7 @@ impl BatchKind {
|
|||||||
|
|
||||||
/// Returns a `ControlFlow::Break` if you must stop right now.
|
/// Returns a `ControlFlow::Break` if you must stop right now.
|
||||||
/// The boolean tell you if an index has been created by the batched task.
|
/// The boolean tell you if an index has been created by the batched task.
|
||||||
/// To ease the writing of the code. `true` can be returned when you don't need to create an index
|
/// To ease the writting of the code. `true` can be returned when you don't need to create an index
|
||||||
/// but false can't be returned if you needs to create an index.
|
/// but false can't be returned if you needs to create an index.
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
fn accumulate(self, id: TaskId, kind: AutobatchKind, index_already_exists: bool, primary_key: Option<&str>) -> ControlFlow<BatchKind, BatchKind> {
|
fn accumulate(self, id: TaskId, kind: AutobatchKind, index_already_exists: bool, primary_key: Option<&str>) -> ControlFlow<BatchKind, BatchKind> {
|
||||||
@@ -321,18 +321,9 @@ impl BatchKind {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
BatchKind::DocumentOperation { method, allow_index_creation, primary_key, mut operation_ids },
|
this @ BatchKind::DocumentOperation { .. },
|
||||||
K::DocumentDeletion,
|
K::DocumentDeletion,
|
||||||
) => {
|
) => Break(this),
|
||||||
operation_ids.push(id);
|
|
||||||
|
|
||||||
Continue(BatchKind::DocumentOperation {
|
|
||||||
method,
|
|
||||||
allow_index_creation,
|
|
||||||
primary_key,
|
|
||||||
operation_ids,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
// but we can't autobatch documents if it's not the same kind
|
// but we can't autobatch documents if it's not the same kind
|
||||||
// this match branch MUST be AFTER the previous one
|
// this match branch MUST be AFTER the previous one
|
||||||
(
|
(
|
||||||
@@ -355,35 +346,7 @@ impl BatchKind {
|
|||||||
deletion_ids.push(id);
|
deletion_ids.push(id);
|
||||||
Continue(BatchKind::DocumentClear { ids: deletion_ids })
|
Continue(BatchKind::DocumentClear { ids: deletion_ids })
|
||||||
}
|
}
|
||||||
// we can autobatch the deletion and import if the index already exists
|
// we can't autobatch a deletion and an import
|
||||||
(
|
|
||||||
BatchKind::DocumentDeletion { mut deletion_ids },
|
|
||||||
K::DocumentImport { method, allow_index_creation, primary_key }
|
|
||||||
) if index_already_exists => {
|
|
||||||
deletion_ids.push(id);
|
|
||||||
|
|
||||||
Continue(BatchKind::DocumentOperation {
|
|
||||||
method,
|
|
||||||
allow_index_creation,
|
|
||||||
primary_key,
|
|
||||||
operation_ids: deletion_ids,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
// we can autobatch the deletion and import if both can't create an index
|
|
||||||
(
|
|
||||||
BatchKind::DocumentDeletion { mut deletion_ids },
|
|
||||||
K::DocumentImport { method, allow_index_creation, primary_key }
|
|
||||||
) if !allow_index_creation => {
|
|
||||||
deletion_ids.push(id);
|
|
||||||
|
|
||||||
Continue(BatchKind::DocumentOperation {
|
|
||||||
method,
|
|
||||||
allow_index_creation,
|
|
||||||
primary_key,
|
|
||||||
operation_ids: deletion_ids,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
// we can't autobatch a deletion and an import if the index does not exists but would be created by an addition
|
|
||||||
(
|
(
|
||||||
this @ BatchKind::DocumentDeletion { .. },
|
this @ BatchKind::DocumentDeletion { .. },
|
||||||
K::DocumentImport { .. }
|
K::DocumentImport { .. }
|
||||||
@@ -685,36 +648,36 @@ mod tests {
|
|||||||
debug_snapshot!(autobatch_from(false,None, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false,None, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
||||||
|
|
||||||
// We can autobatch document addition with document deletion
|
// We can't autobatch document addition with document deletion
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###);
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###);
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###);
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###);
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###);
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###);
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###);
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0] }, true))"###);
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###);
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0] }, false))"###);
|
||||||
// And the other way around
|
// we also can't do the only way around
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@@ -19,20 +19,20 @@ one indexing operation.
|
|||||||
|
|
||||||
use std::collections::{BTreeSet, HashSet};
|
use std::collections::{BTreeSet, HashSet};
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
use std::fmt;
|
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
use std::io::BufWriter;
|
use std::io::BufWriter;
|
||||||
|
|
||||||
use dump::IndexMetadata;
|
use dump::IndexMetadata;
|
||||||
use log::{debug, error, info, trace};
|
use log::{debug, error, info};
|
||||||
use meilisearch_types::error::Code;
|
use meilisearch_types::error::Code;
|
||||||
use meilisearch_types::heed::{RoTxn, RwTxn};
|
use meilisearch_types::heed::{RoTxn, RwTxn};
|
||||||
use meilisearch_types::milli::documents::{obkv_to_object, DocumentsBatchReader};
|
use meilisearch_types::milli::documents::{obkv_to_object, DocumentsBatchReader};
|
||||||
use meilisearch_types::milli::heed::CompactionOption;
|
use meilisearch_types::milli::heed::CompactionOption;
|
||||||
use meilisearch_types::milli::update::{
|
use meilisearch_types::milli::update::{
|
||||||
IndexDocumentsConfig, IndexDocumentsMethod, IndexerConfig, Settings as MilliSettings,
|
DeleteDocuments, DocumentDeletionResult, IndexDocumentsConfig, IndexDocumentsMethod,
|
||||||
|
Settings as MilliSettings,
|
||||||
};
|
};
|
||||||
use meilisearch_types::milli::{self, Filter};
|
use meilisearch_types::milli::{self, Filter, BEU32};
|
||||||
use meilisearch_types::settings::{apply_settings_to_builder, Settings, Unchecked};
|
use meilisearch_types::settings::{apply_settings_to_builder, Settings, Unchecked};
|
||||||
use meilisearch_types::tasks::{Details, IndexSwap, Kind, KindWithContent, Status, Task};
|
use meilisearch_types::tasks::{Details, IndexSwap, Kind, KindWithContent, Status, Task};
|
||||||
use meilisearch_types::{compression, Index, VERSION_FILE_NAME};
|
use meilisearch_types::{compression, Index, VERSION_FILE_NAME};
|
||||||
@@ -43,7 +43,7 @@ use uuid::Uuid;
|
|||||||
|
|
||||||
use crate::autobatcher::{self, BatchKind};
|
use crate::autobatcher::{self, BatchKind};
|
||||||
use crate::utils::{self, swap_index_uid_in_task};
|
use crate::utils::{self, swap_index_uid_in_task};
|
||||||
use crate::{Error, IndexScheduler, MustStopProcessing, ProcessingTasks, Result, TaskId};
|
use crate::{Error, IndexScheduler, ProcessingTasks, Result, TaskId};
|
||||||
|
|
||||||
/// Represents a combination of tasks that can all be processed at the same time.
|
/// Represents a combination of tasks that can all be processed at the same time.
|
||||||
///
|
///
|
||||||
@@ -67,6 +67,10 @@ pub(crate) enum Batch {
|
|||||||
op: IndexOperation,
|
op: IndexOperation,
|
||||||
must_create_index: bool,
|
must_create_index: bool,
|
||||||
},
|
},
|
||||||
|
IndexDocumentDeletionByFilter {
|
||||||
|
index_uid: String,
|
||||||
|
task: Task,
|
||||||
|
},
|
||||||
IndexCreation {
|
IndexCreation {
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
@@ -104,9 +108,11 @@ pub(crate) enum IndexOperation {
|
|||||||
operations: Vec<DocumentOperation>,
|
operations: Vec<DocumentOperation>,
|
||||||
tasks: Vec<Task>,
|
tasks: Vec<Task>,
|
||||||
},
|
},
|
||||||
IndexDocumentDeletionByFilter {
|
DocumentDeletion {
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
task: Task,
|
// The vec associated with each document deletion tasks.
|
||||||
|
documents: Vec<Vec<String>>,
|
||||||
|
tasks: Vec<Task>,
|
||||||
},
|
},
|
||||||
DocumentClear {
|
DocumentClear {
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
@@ -149,17 +155,18 @@ impl Batch {
|
|||||||
| Batch::TaskDeletion(task)
|
| Batch::TaskDeletion(task)
|
||||||
| Batch::Dump(task)
|
| Batch::Dump(task)
|
||||||
| Batch::IndexCreation { task, .. }
|
| Batch::IndexCreation { task, .. }
|
||||||
|
| Batch::IndexDocumentDeletionByFilter { task, .. }
|
||||||
| Batch::IndexUpdate { task, .. } => vec![task.uid],
|
| Batch::IndexUpdate { task, .. } => vec![task.uid],
|
||||||
Batch::SnapshotCreation(tasks) | Batch::IndexDeletion { tasks, .. } => {
|
Batch::SnapshotCreation(tasks) | Batch::IndexDeletion { tasks, .. } => {
|
||||||
tasks.iter().map(|task| task.uid).collect()
|
tasks.iter().map(|task| task.uid).collect()
|
||||||
}
|
}
|
||||||
Batch::IndexOperation { op, .. } => match op {
|
Batch::IndexOperation { op, .. } => match op {
|
||||||
IndexOperation::DocumentOperation { tasks, .. }
|
IndexOperation::DocumentOperation { tasks, .. }
|
||||||
|
| IndexOperation::DocumentDeletion { tasks, .. }
|
||||||
| IndexOperation::Settings { tasks, .. }
|
| IndexOperation::Settings { tasks, .. }
|
||||||
| IndexOperation::DocumentClear { tasks, .. } => {
|
| IndexOperation::DocumentClear { tasks, .. } => {
|
||||||
tasks.iter().map(|task| task.uid).collect()
|
tasks.iter().map(|task| task.uid).collect()
|
||||||
}
|
}
|
||||||
IndexOperation::IndexDocumentDeletionByFilter { task, .. } => vec![task.uid],
|
|
||||||
IndexOperation::SettingsAndDocumentOperation {
|
IndexOperation::SettingsAndDocumentOperation {
|
||||||
document_import_tasks: tasks,
|
document_import_tasks: tasks,
|
||||||
settings_tasks: other,
|
settings_tasks: other,
|
||||||
@@ -187,30 +194,8 @@ impl Batch {
|
|||||||
IndexOperation { op, .. } => Some(op.index_uid()),
|
IndexOperation { op, .. } => Some(op.index_uid()),
|
||||||
IndexCreation { index_uid, .. }
|
IndexCreation { index_uid, .. }
|
||||||
| IndexUpdate { index_uid, .. }
|
| IndexUpdate { index_uid, .. }
|
||||||
| IndexDeletion { index_uid, .. } => Some(index_uid),
|
| IndexDeletion { index_uid, .. }
|
||||||
}
|
| IndexDocumentDeletionByFilter { index_uid, .. } => Some(index_uid),
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Batch {
|
|
||||||
/// A text used when we debug the profiling reports.
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
let index_uid = self.index_uid();
|
|
||||||
let tasks = self.ids();
|
|
||||||
match self {
|
|
||||||
Batch::TaskCancelation { .. } => f.write_str("TaskCancelation")?,
|
|
||||||
Batch::TaskDeletion(_) => f.write_str("TaskDeletion")?,
|
|
||||||
Batch::SnapshotCreation(_) => f.write_str("SnapshotCreation")?,
|
|
||||||
Batch::Dump(_) => f.write_str("Dump")?,
|
|
||||||
Batch::IndexOperation { op, .. } => write!(f, "{op}")?,
|
|
||||||
Batch::IndexCreation { .. } => f.write_str("IndexCreation")?,
|
|
||||||
Batch::IndexUpdate { .. } => f.write_str("IndexUpdate")?,
|
|
||||||
Batch::IndexDeletion { .. } => f.write_str("IndexDeletion")?,
|
|
||||||
Batch::IndexSwap { .. } => f.write_str("IndexSwap")?,
|
|
||||||
};
|
|
||||||
match index_uid {
|
|
||||||
Some(name) => f.write_fmt(format_args!(" on {name:?} from tasks: {tasks:?}")),
|
|
||||||
None => f.write_fmt(format_args!(" from tasks: {tasks:?}")),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -219,7 +204,7 @@ impl IndexOperation {
|
|||||||
pub fn index_uid(&self) -> &str {
|
pub fn index_uid(&self) -> &str {
|
||||||
match self {
|
match self {
|
||||||
IndexOperation::DocumentOperation { index_uid, .. }
|
IndexOperation::DocumentOperation { index_uid, .. }
|
||||||
| IndexOperation::IndexDocumentDeletionByFilter { index_uid, .. }
|
| IndexOperation::DocumentDeletion { index_uid, .. }
|
||||||
| IndexOperation::DocumentClear { index_uid, .. }
|
| IndexOperation::DocumentClear { index_uid, .. }
|
||||||
| IndexOperation::Settings { index_uid, .. }
|
| IndexOperation::Settings { index_uid, .. }
|
||||||
| IndexOperation::DocumentClearAndSetting { index_uid, .. }
|
| IndexOperation::DocumentClearAndSetting { index_uid, .. }
|
||||||
@@ -228,27 +213,6 @@ impl IndexOperation {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for IndexOperation {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
IndexOperation::DocumentOperation { .. } => {
|
|
||||||
f.write_str("IndexOperation::DocumentOperation")
|
|
||||||
}
|
|
||||||
IndexOperation::IndexDocumentDeletionByFilter { .. } => {
|
|
||||||
f.write_str("IndexOperation::IndexDocumentDeletionByFilter")
|
|
||||||
}
|
|
||||||
IndexOperation::DocumentClear { .. } => f.write_str("IndexOperation::DocumentClear"),
|
|
||||||
IndexOperation::Settings { .. } => f.write_str("IndexOperation::Settings"),
|
|
||||||
IndexOperation::DocumentClearAndSetting { .. } => {
|
|
||||||
f.write_str("IndexOperation::DocumentClearAndSetting")
|
|
||||||
}
|
|
||||||
IndexOperation::SettingsAndDocumentOperation { .. } => {
|
|
||||||
f.write_str("IndexOperation::SettingsAndDocumentOperation")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IndexScheduler {
|
impl IndexScheduler {
|
||||||
/// Convert an [`BatchKind`](crate::autobatcher::BatchKind) into a [`Batch`].
|
/// Convert an [`BatchKind`](crate::autobatcher::BatchKind) into a [`Batch`].
|
||||||
///
|
///
|
||||||
@@ -275,12 +239,9 @@ impl IndexScheduler {
|
|||||||
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
let task = self.get_task(rtxn, id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
match &task.kind {
|
match &task.kind {
|
||||||
KindWithContent::DocumentDeletionByFilter { index_uid, .. } => {
|
KindWithContent::DocumentDeletionByFilter { index_uid, .. } => {
|
||||||
Ok(Some(Batch::IndexOperation {
|
Ok(Some(Batch::IndexDocumentDeletionByFilter {
|
||||||
op: IndexOperation::IndexDocumentDeletionByFilter {
|
index_uid: index_uid.clone(),
|
||||||
index_uid: index_uid.clone(),
|
task,
|
||||||
task,
|
|
||||||
},
|
|
||||||
must_create_index: false,
|
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
@@ -336,27 +297,18 @@ impl IndexScheduler {
|
|||||||
BatchKind::DocumentDeletion { deletion_ids } => {
|
BatchKind::DocumentDeletion { deletion_ids } => {
|
||||||
let tasks = self.get_existing_tasks(rtxn, deletion_ids)?;
|
let tasks = self.get_existing_tasks(rtxn, deletion_ids)?;
|
||||||
|
|
||||||
let mut operations = Vec::with_capacity(tasks.len());
|
let mut documents = Vec::new();
|
||||||
let mut documents_counts = Vec::with_capacity(tasks.len());
|
|
||||||
for task in &tasks {
|
for task in &tasks {
|
||||||
match task.kind {
|
match task.kind {
|
||||||
KindWithContent::DocumentDeletion { ref documents_ids, .. } => {
|
KindWithContent::DocumentDeletion { ref documents_ids, .. } => {
|
||||||
operations.push(DocumentOperation::Delete(documents_ids.clone()));
|
documents.push(documents_ids.clone())
|
||||||
documents_counts.push(documents_ids.len() as u64);
|
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Some(Batch::IndexOperation {
|
Ok(Some(Batch::IndexOperation {
|
||||||
op: IndexOperation::DocumentOperation {
|
op: IndexOperation::DocumentDeletion { index_uid, documents, tasks },
|
||||||
index_uid,
|
|
||||||
primary_key: None,
|
|
||||||
method: IndexDocumentsMethod::ReplaceDocuments,
|
|
||||||
documents_counts,
|
|
||||||
operations,
|
|
||||||
tasks,
|
|
||||||
},
|
|
||||||
must_create_index,
|
must_create_index,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
@@ -519,8 +471,6 @@ impl IndexScheduler {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
self.maybe_fail(crate::tests::FailureLocation::InsideCreateBatch)?;
|
self.maybe_fail(crate::tests::FailureLocation::InsideCreateBatch)?;
|
||||||
|
|
||||||
puffin::profile_function!();
|
|
||||||
|
|
||||||
let enqueued = &self.get_status(rtxn, Status::Enqueued)?;
|
let enqueued = &self.get_status(rtxn, Status::Enqueued)?;
|
||||||
let to_cancel = self.get_kind(rtxn, Kind::TaskCancelation)? & enqueued;
|
let to_cancel = self.get_kind(rtxn, Kind::TaskCancelation)? & enqueued;
|
||||||
|
|
||||||
@@ -584,9 +534,7 @@ impl IndexScheduler {
|
|||||||
let index_tasks = self.index_tasks(rtxn, index_name)? & enqueued;
|
let index_tasks = self.index_tasks(rtxn, index_name)? & enqueued;
|
||||||
|
|
||||||
// If autobatching is disabled we only take one task at a time.
|
// If autobatching is disabled we only take one task at a time.
|
||||||
// Otherwise, we take only a maximum of tasks to create batches.
|
let tasks_limit = if self.autobatching_enabled { usize::MAX } else { 1 };
|
||||||
let tasks_limit =
|
|
||||||
if self.autobatching_enabled { self.max_number_of_batched_tasks } else { 1 };
|
|
||||||
|
|
||||||
let enqueued = index_tasks
|
let enqueued = index_tasks
|
||||||
.into_iter()
|
.into_iter()
|
||||||
@@ -627,9 +575,6 @@ impl IndexScheduler {
|
|||||||
self.maybe_fail(crate::tests::FailureLocation::PanicInsideProcessBatch)?;
|
self.maybe_fail(crate::tests::FailureLocation::PanicInsideProcessBatch)?;
|
||||||
self.breakpoint(crate::Breakpoint::InsideProcessBatch);
|
self.breakpoint(crate::Breakpoint::InsideProcessBatch);
|
||||||
}
|
}
|
||||||
|
|
||||||
puffin::profile_function!(batch.to_string());
|
|
||||||
|
|
||||||
match batch {
|
match batch {
|
||||||
Batch::TaskCancelation { mut task, previous_started_at, previous_processing_tasks } => {
|
Batch::TaskCancelation { mut task, previous_started_at, previous_processing_tasks } => {
|
||||||
// 1. Retrieve the tasks that matched the query at enqueue-time.
|
// 1. Retrieve the tasks that matched the query at enqueue-time.
|
||||||
@@ -717,7 +662,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
// 2. Snapshot the index-scheduler LMDB env
|
// 2. Snapshot the index-scheduler LMDB env
|
||||||
//
|
//
|
||||||
// When we call copy_to_file, LMDB opens a read transaction by itself,
|
// When we call copy_to_path, LMDB opens a read transaction by itself,
|
||||||
// we can't provide our own. It is an issue as we would like to know
|
// we can't provide our own. It is an issue as we would like to know
|
||||||
// the update files to copy but new ones can be enqueued between the copy
|
// the update files to copy but new ones can be enqueued between the copy
|
||||||
// of the env and the new transaction we open to retrieve the enqueued tasks.
|
// of the env and the new transaction we open to retrieve the enqueued tasks.
|
||||||
@@ -730,7 +675,7 @@ impl IndexScheduler {
|
|||||||
// 2.1 First copy the LMDB env of the index-scheduler
|
// 2.1 First copy the LMDB env of the index-scheduler
|
||||||
let dst = temp_snapshot_dir.path().join("tasks");
|
let dst = temp_snapshot_dir.path().join("tasks");
|
||||||
fs::create_dir_all(&dst)?;
|
fs::create_dir_all(&dst)?;
|
||||||
self.env.copy_to_file(dst.join("data.mdb"), CompactionOption::Enabled)?;
|
self.env.copy_to_path(dst.join("data.mdb"), CompactionOption::Enabled)?;
|
||||||
|
|
||||||
// 2.2 Create a read transaction on the index-scheduler
|
// 2.2 Create a read transaction on the index-scheduler
|
||||||
let rtxn = self.env.read_txn()?;
|
let rtxn = self.env.read_txn()?;
|
||||||
@@ -755,7 +700,7 @@ impl IndexScheduler {
|
|||||||
let index = self.index_mapper.index(&rtxn, name)?;
|
let index = self.index_mapper.index(&rtxn, name)?;
|
||||||
let dst = temp_snapshot_dir.path().join("indexes").join(uuid.to_string());
|
let dst = temp_snapshot_dir.path().join("indexes").join(uuid.to_string());
|
||||||
fs::create_dir_all(&dst)?;
|
fs::create_dir_all(&dst)?;
|
||||||
index.copy_to_file(dst.join("data.mdb"), CompactionOption::Enabled)?;
|
index.copy_to_path(dst.join("data.mdb"), CompactionOption::Enabled)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
drop(rtxn);
|
drop(rtxn);
|
||||||
@@ -768,7 +713,7 @@ impl IndexScheduler {
|
|||||||
.map_size(1024 * 1024 * 1024) // 1 GiB
|
.map_size(1024 * 1024 * 1024) // 1 GiB
|
||||||
.max_dbs(2)
|
.max_dbs(2)
|
||||||
.open(&self.auth_path)?;
|
.open(&self.auth_path)?;
|
||||||
auth.copy_to_file(dst.join("data.mdb"), CompactionOption::Enabled)?;
|
auth.copy_to_path(dst.join("data.mdb"), CompactionOption::Enabled)?;
|
||||||
|
|
||||||
// 5. Copy and tarball the flat snapshot
|
// 5. Copy and tarball the flat snapshot
|
||||||
// 5.1 Find the original name of the database
|
// 5.1 Find the original name of the database
|
||||||
@@ -824,10 +769,6 @@ impl IndexScheduler {
|
|||||||
// 2. dump the tasks
|
// 2. dump the tasks
|
||||||
let mut dump_tasks = dump.create_tasks_queue()?;
|
let mut dump_tasks = dump.create_tasks_queue()?;
|
||||||
for ret in self.all_tasks.iter(&rtxn)? {
|
for ret in self.all_tasks.iter(&rtxn)? {
|
||||||
if self.must_stop_processing.get() {
|
|
||||||
return Err(Error::AbortedTask);
|
|
||||||
}
|
|
||||||
|
|
||||||
let (_, mut t) = ret?;
|
let (_, mut t) = ret?;
|
||||||
let status = t.status;
|
let status = t.status;
|
||||||
let content_file = t.content_uuid();
|
let content_file = t.content_uuid();
|
||||||
@@ -848,9 +789,6 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
// 2.1. Dump the `content_file` associated with the task if there is one and the task is not finished yet.
|
// 2.1. Dump the `content_file` associated with the task if there is one and the task is not finished yet.
|
||||||
if let Some(content_file) = content_file {
|
if let Some(content_file) = content_file {
|
||||||
if self.must_stop_processing.get() {
|
|
||||||
return Err(Error::AbortedTask);
|
|
||||||
}
|
|
||||||
if status == Status::Enqueued {
|
if status == Status::Enqueued {
|
||||||
let content_file = self.file_store.get_update(content_file)?;
|
let content_file = self.file_store.get_update(content_file)?;
|
||||||
|
|
||||||
@@ -890,9 +828,6 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
// 3.1. Dump the documents
|
// 3.1. Dump the documents
|
||||||
for ret in index.all_documents(&rtxn)? {
|
for ret in index.all_documents(&rtxn)? {
|
||||||
if self.must_stop_processing.get() {
|
|
||||||
return Err(Error::AbortedTask);
|
|
||||||
}
|
|
||||||
let (_id, doc) = ret?;
|
let (_id, doc) = ret?;
|
||||||
let document = milli::obkv_to_json(&all_fields, &fields_ids_map, doc)?;
|
let document = milli::obkv_to_json(&all_fields, &fields_ids_map, doc)?;
|
||||||
index_dumper.push_document(&document)?;
|
index_dumper.push_document(&document)?;
|
||||||
@@ -904,17 +839,10 @@ impl IndexScheduler {
|
|||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
// 4. Dump experimental feature settings
|
|
||||||
let features = self.features().runtime_features();
|
|
||||||
dump.create_experimental_features(features)?;
|
|
||||||
|
|
||||||
let dump_uid = started_at.format(format_description!(
|
let dump_uid = started_at.format(format_description!(
|
||||||
"[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]"
|
"[year repr:full][month repr:numerical][day padding:zero]-[hour padding:zero][minute padding:zero][second padding:zero][subsecond digits:3]"
|
||||||
)).unwrap();
|
)).unwrap();
|
||||||
|
|
||||||
if self.must_stop_processing.get() {
|
|
||||||
return Err(Error::AbortedTask);
|
|
||||||
}
|
|
||||||
let path = self.dumps_path.join(format!("{}.dump", dump_uid));
|
let path = self.dumps_path.join(format!("{}.dump", dump_uid));
|
||||||
let file = File::create(path)?;
|
let file = File::create(path)?;
|
||||||
dump.persist_to(BufWriter::new(file))?;
|
dump.persist_to(BufWriter::new(file))?;
|
||||||
@@ -935,10 +863,6 @@ impl IndexScheduler {
|
|||||||
self.index_mapper.index(&rtxn, &index_uid)?
|
self.index_mapper.index(&rtxn, &index_uid)?
|
||||||
};
|
};
|
||||||
|
|
||||||
// the index operation can take a long time, so save this handle to make it available to the search for the duration of the tick
|
|
||||||
*self.currently_updating_index.write().unwrap() =
|
|
||||||
Some((index_uid.clone(), index.clone()));
|
|
||||||
|
|
||||||
let mut index_wtxn = index.write_txn()?;
|
let mut index_wtxn = index.write_txn()?;
|
||||||
let tasks = self.apply_index_operation(&mut index_wtxn, &index, op)?;
|
let tasks = self.apply_index_operation(&mut index_wtxn, &index, op)?;
|
||||||
index_wtxn.commit()?;
|
index_wtxn.commit()?;
|
||||||
@@ -963,6 +887,51 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
|
Batch::IndexDocumentDeletionByFilter { mut task, index_uid: _ } => {
|
||||||
|
let (index_uid, filter) =
|
||||||
|
if let KindWithContent::DocumentDeletionByFilter { index_uid, filter_expr } =
|
||||||
|
&task.kind
|
||||||
|
{
|
||||||
|
(index_uid, filter_expr)
|
||||||
|
} else {
|
||||||
|
unreachable!()
|
||||||
|
};
|
||||||
|
let index = {
|
||||||
|
let rtxn = self.env.read_txn()?;
|
||||||
|
self.index_mapper.index(&rtxn, index_uid)?
|
||||||
|
};
|
||||||
|
let deleted_documents = delete_document_by_filter(filter, index);
|
||||||
|
let original_filter = if let Some(Details::DocumentDeletionByFilter {
|
||||||
|
original_filter,
|
||||||
|
deleted_documents: _,
|
||||||
|
}) = task.details
|
||||||
|
{
|
||||||
|
original_filter
|
||||||
|
} else {
|
||||||
|
// In the case of a `documentDeleteByFilter` the details MUST be set
|
||||||
|
unreachable!();
|
||||||
|
};
|
||||||
|
|
||||||
|
match deleted_documents {
|
||||||
|
Ok(deleted_documents) => {
|
||||||
|
task.status = Status::Succeeded;
|
||||||
|
task.details = Some(Details::DocumentDeletionByFilter {
|
||||||
|
original_filter,
|
||||||
|
deleted_documents: Some(deleted_documents),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
task.status = Status::Failed;
|
||||||
|
task.details = Some(Details::DocumentDeletionByFilter {
|
||||||
|
original_filter,
|
||||||
|
deleted_documents: Some(0),
|
||||||
|
});
|
||||||
|
task.error = Some(e.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(vec![task])
|
||||||
|
}
|
||||||
Batch::IndexCreation { index_uid, primary_key, task } => {
|
Batch::IndexCreation { index_uid, primary_key, task } => {
|
||||||
let wtxn = self.env.write_txn()?;
|
let wtxn = self.env.write_txn()?;
|
||||||
if self.index_mapper.exists(&wtxn, &index_uid)? {
|
if self.index_mapper.exists(&wtxn, &index_uid)? {
|
||||||
@@ -1029,7 +998,7 @@ impl IndexScheduler {
|
|||||||
}()
|
}()
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
// The write transaction is directly owned and committed inside.
|
// The write transaction is directly owned and commited inside.
|
||||||
match self.index_mapper.delete_index(wtxn, &index_uid) {
|
match self.index_mapper.delete_index(wtxn, &index_uid) {
|
||||||
Ok(()) => (),
|
Ok(()) => (),
|
||||||
Err(Error::IndexNotFound(_)) if index_has_been_created => (),
|
Err(Error::IndexNotFound(_)) if index_has_been_created => (),
|
||||||
@@ -1108,7 +1077,7 @@ impl IndexScheduler {
|
|||||||
for task_id in &index_lhs_task_ids | &index_rhs_task_ids {
|
for task_id in &index_lhs_task_ids | &index_rhs_task_ids {
|
||||||
let mut task = self.get_task(wtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
let mut task = self.get_task(wtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
swap_index_uid_in_task(&mut task, (lhs, rhs));
|
swap_index_uid_in_task(&mut task, (lhs, rhs));
|
||||||
self.all_tasks.put(wtxn, &task_id, &task)?;
|
self.all_tasks.put(wtxn, &BEU32::new(task_id), &task)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4. remove the task from indexuid = before_name
|
// 4. remove the task from indexuid = before_name
|
||||||
@@ -1134,12 +1103,10 @@ impl IndexScheduler {
|
|||||||
/// The list of processed tasks.
|
/// The list of processed tasks.
|
||||||
fn apply_index_operation<'i>(
|
fn apply_index_operation<'i>(
|
||||||
&self,
|
&self,
|
||||||
index_wtxn: &mut RwTxn<'i>,
|
index_wtxn: &mut RwTxn<'i, '_>,
|
||||||
index: &'i Index,
|
index: &'i Index,
|
||||||
operation: IndexOperation,
|
operation: IndexOperation,
|
||||||
) -> Result<Vec<Task>> {
|
) -> Result<Vec<Task>> {
|
||||||
puffin::profile_function!();
|
|
||||||
|
|
||||||
match operation {
|
match operation {
|
||||||
IndexOperation::DocumentClear { mut tasks, .. } => {
|
IndexOperation::DocumentClear { mut tasks, .. } => {
|
||||||
let count = milli::update::ClearDocuments::new(index_wtxn, index).execute()?;
|
let count = milli::update::ClearDocuments::new(index_wtxn, index).execute()?;
|
||||||
@@ -1202,16 +1169,12 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
let config = IndexDocumentsConfig { update_method: method, ..Default::default() };
|
let config = IndexDocumentsConfig { update_method: method, ..Default::default() };
|
||||||
|
|
||||||
let embedder_configs = index.embedding_configs(index_wtxn)?;
|
|
||||||
// TODO: consider Arc'ing the map too (we only need read access + we'll be cloning it multiple times, so really makes sense)
|
|
||||||
let embedders = self.embedders(embedder_configs)?;
|
|
||||||
|
|
||||||
let mut builder = milli::update::IndexDocuments::new(
|
let mut builder = milli::update::IndexDocuments::new(
|
||||||
index_wtxn,
|
index_wtxn,
|
||||||
index,
|
index,
|
||||||
indexer_config,
|
indexer_config,
|
||||||
config,
|
config,
|
||||||
|indexing_step| trace!("update: {:?}", indexing_step),
|
|indexing_step| debug!("update: {:?}", indexing_step),
|
||||||
|| must_stop_processing.get(),
|
|| must_stop_processing.get(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
@@ -1224,8 +1187,6 @@ impl IndexScheduler {
|
|||||||
let (new_builder, user_result) = builder.add_documents(reader)?;
|
let (new_builder, user_result) = builder.add_documents(reader)?;
|
||||||
builder = new_builder;
|
builder = new_builder;
|
||||||
|
|
||||||
builder = builder.with_embedders(embedders.clone());
|
|
||||||
|
|
||||||
let received_documents =
|
let received_documents =
|
||||||
if let Some(Details::DocumentAdditionOrUpdate {
|
if let Some(Details::DocumentAdditionOrUpdate {
|
||||||
received_documents,
|
received_documents,
|
||||||
@@ -1260,8 +1221,7 @@ impl IndexScheduler {
|
|||||||
let (new_builder, user_result) =
|
let (new_builder, user_result) =
|
||||||
builder.remove_documents(document_ids)?;
|
builder.remove_documents(document_ids)?;
|
||||||
builder = new_builder;
|
builder = new_builder;
|
||||||
// Uses Invariant: remove documents actually always returns Ok for the inner result
|
|
||||||
let count = user_result.unwrap();
|
|
||||||
let provided_ids =
|
let provided_ids =
|
||||||
if let Some(Details::DocumentDeletion { provided_ids, .. }) =
|
if let Some(Details::DocumentDeletion { provided_ids, .. }) =
|
||||||
task.details
|
task.details
|
||||||
@@ -1272,11 +1232,23 @@ impl IndexScheduler {
|
|||||||
unreachable!();
|
unreachable!();
|
||||||
};
|
};
|
||||||
|
|
||||||
task.status = Status::Succeeded;
|
match user_result {
|
||||||
task.details = Some(Details::DocumentDeletion {
|
Ok(count) => {
|
||||||
provided_ids,
|
task.status = Status::Succeeded;
|
||||||
deleted_documents: Some(count),
|
task.details = Some(Details::DocumentDeletion {
|
||||||
});
|
provided_ids,
|
||||||
|
deleted_documents: Some(count),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
task.status = Status::Failed;
|
||||||
|
task.details = Some(Details::DocumentDeletion {
|
||||||
|
provided_ids,
|
||||||
|
deleted_documents: Some(0),
|
||||||
|
});
|
||||||
|
task.error = Some(milli::Error::from(e).into());
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1291,59 +1263,30 @@ impl IndexScheduler {
|
|||||||
milli::update::Settings::new(index_wtxn, index, indexer_config);
|
milli::update::Settings::new(index_wtxn, index, indexer_config);
|
||||||
builder.reset_primary_key();
|
builder.reset_primary_key();
|
||||||
builder.execute(
|
builder.execute(
|
||||||
|indexing_step| trace!("update: {:?}", indexing_step),
|
|indexing_step| debug!("update: {:?}", indexing_step),
|
||||||
|| must_stop_processing.clone().get(),
|
|| must_stop_processing.clone().get(),
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
IndexOperation::IndexDocumentDeletionByFilter { mut task, index_uid: _ } => {
|
IndexOperation::DocumentDeletion { index_uid: _, documents, mut tasks } => {
|
||||||
let filter =
|
let mut builder = milli::update::DeleteDocuments::new(index_wtxn, index)?;
|
||||||
if let KindWithContent::DocumentDeletionByFilter { filter_expr, .. } =
|
documents.iter().flatten().for_each(|id| {
|
||||||
&task.kind
|
builder.delete_external_id(id);
|
||||||
{
|
});
|
||||||
filter_expr
|
|
||||||
} else {
|
|
||||||
unreachable!()
|
|
||||||
};
|
|
||||||
let deleted_documents = delete_document_by_filter(
|
|
||||||
index_wtxn,
|
|
||||||
filter,
|
|
||||||
self.index_mapper.indexer_config(),
|
|
||||||
self.must_stop_processing.clone(),
|
|
||||||
index,
|
|
||||||
);
|
|
||||||
let original_filter = if let Some(Details::DocumentDeletionByFilter {
|
|
||||||
original_filter,
|
|
||||||
deleted_documents: _,
|
|
||||||
}) = task.details
|
|
||||||
{
|
|
||||||
original_filter
|
|
||||||
} else {
|
|
||||||
// In the case of a `documentDeleteByFilter` the details MUST be set
|
|
||||||
unreachable!();
|
|
||||||
};
|
|
||||||
|
|
||||||
match deleted_documents {
|
let DocumentDeletionResult { deleted_documents, .. } = builder.execute()?;
|
||||||
Ok(deleted_documents) => {
|
|
||||||
task.status = Status::Succeeded;
|
for (task, documents) in tasks.iter_mut().zip(documents) {
|
||||||
task.details = Some(Details::DocumentDeletionByFilter {
|
task.status = Status::Succeeded;
|
||||||
original_filter,
|
task.details = Some(Details::DocumentDeletion {
|
||||||
deleted_documents: Some(deleted_documents),
|
provided_ids: documents.len(),
|
||||||
});
|
deleted_documents: Some(deleted_documents.min(documents.len() as u64)),
|
||||||
}
|
});
|
||||||
Err(e) => {
|
|
||||||
task.status = Status::Failed;
|
|
||||||
task.details = Some(Details::DocumentDeletionByFilter {
|
|
||||||
original_filter,
|
|
||||||
deleted_documents: Some(0),
|
|
||||||
});
|
|
||||||
task.error = Some(e.into());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(vec![task])
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
IndexOperation::Settings { index_uid: _, settings, mut tasks } => {
|
IndexOperation::Settings { index_uid: _, settings, mut tasks } => {
|
||||||
let indexer_config = self.index_mapper.indexer_config();
|
let indexer_config = self.index_mapper.indexer_config();
|
||||||
@@ -1351,9 +1294,6 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
for (task, (_, settings)) in tasks.iter_mut().zip(settings) {
|
for (task, (_, settings)) in tasks.iter_mut().zip(settings) {
|
||||||
let checked_settings = settings.clone().check();
|
let checked_settings = settings.clone().check();
|
||||||
if matches!(checked_settings.embedders, milli::update::Setting::Set(_)) {
|
|
||||||
self.features().check_vector("Passing `embedders` in settings")?
|
|
||||||
}
|
|
||||||
task.details = Some(Details::SettingsUpdate { settings: Box::new(settings) });
|
task.details = Some(Details::SettingsUpdate { settings: Box::new(settings) });
|
||||||
apply_settings_to_builder(&checked_settings, &mut builder);
|
apply_settings_to_builder(&checked_settings, &mut builder);
|
||||||
|
|
||||||
@@ -1490,9 +1430,10 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
for task in to_delete_tasks.iter() {
|
for task in to_delete_tasks.iter() {
|
||||||
self.all_tasks.delete(wtxn, &task)?;
|
self.all_tasks.delete(wtxn, &BEU32::new(task))?;
|
||||||
}
|
}
|
||||||
for canceled_by in affected_canceled_by {
|
for canceled_by in affected_canceled_by {
|
||||||
|
let canceled_by = BEU32::new(canceled_by);
|
||||||
if let Some(mut tasks) = self.canceled_by.get(wtxn, &canceled_by)? {
|
if let Some(mut tasks) = self.canceled_by.get(wtxn, &canceled_by)? {
|
||||||
tasks -= &to_delete_tasks;
|
tasks -= &to_delete_tasks;
|
||||||
if tasks.is_empty() {
|
if tasks.is_empty() {
|
||||||
@@ -1540,47 +1481,29 @@ impl IndexScheduler {
|
|||||||
task.details = task.details.map(|d| d.to_failed());
|
task.details = task.details.map(|d| d.to_failed());
|
||||||
self.update_task(wtxn, &task)?;
|
self.update_task(wtxn, &task)?;
|
||||||
}
|
}
|
||||||
self.canceled_by.put(wtxn, &cancel_task_id, &tasks_to_cancel)?;
|
self.canceled_by.put(wtxn, &BEU32::new(cancel_task_id), &tasks_to_cancel)?;
|
||||||
|
|
||||||
Ok(content_files_to_delete)
|
Ok(content_files_to_delete)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delete_document_by_filter<'a>(
|
fn delete_document_by_filter(filter: &serde_json::Value, index: Index) -> Result<u64> {
|
||||||
wtxn: &mut RwTxn<'a>,
|
|
||||||
filter: &serde_json::Value,
|
|
||||||
indexer_config: &IndexerConfig,
|
|
||||||
must_stop_processing: MustStopProcessing,
|
|
||||||
index: &'a Index,
|
|
||||||
) -> Result<u64> {
|
|
||||||
let filter = Filter::from_json(filter)?;
|
let filter = Filter::from_json(filter)?;
|
||||||
Ok(if let Some(filter) = filter {
|
Ok(if let Some(filter) = filter {
|
||||||
let candidates = filter.evaluate(wtxn, index).map_err(|err| match err {
|
let mut wtxn = index.write_txn()?;
|
||||||
|
|
||||||
|
let candidates = filter.evaluate(&wtxn, &index).map_err(|err| match err {
|
||||||
milli::Error::UserError(milli::UserError::InvalidFilter(_)) => {
|
milli::Error::UserError(milli::UserError::InvalidFilter(_)) => {
|
||||||
Error::from(err).with_custom_error_code(Code::InvalidDocumentFilter)
|
Error::from(err).with_custom_error_code(Code::InvalidDocumentFilter)
|
||||||
}
|
}
|
||||||
e => e.into(),
|
e => e.into(),
|
||||||
})?;
|
})?;
|
||||||
|
let mut delete_operation = DeleteDocuments::new(&mut wtxn, &index)?;
|
||||||
let config = IndexDocumentsConfig {
|
delete_operation.delete_documents(&candidates);
|
||||||
update_method: IndexDocumentsMethod::ReplaceDocuments,
|
let deleted_documents =
|
||||||
..Default::default()
|
delete_operation.execute().map(|result| result.deleted_documents)?;
|
||||||
};
|
wtxn.commit()?;
|
||||||
|
deleted_documents
|
||||||
let mut builder = milli::update::IndexDocuments::new(
|
|
||||||
wtxn,
|
|
||||||
index,
|
|
||||||
indexer_config,
|
|
||||||
config,
|
|
||||||
|indexing_step| debug!("update: {:?}", indexing_step),
|
|
||||||
|| must_stop_processing.get(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let (new_builder, count) = builder.remove_documents_from_db_no_batch(&candidates)?;
|
|
||||||
builder = new_builder;
|
|
||||||
|
|
||||||
let _ = builder.execute()?;
|
|
||||||
count
|
|
||||||
} else {
|
} else {
|
||||||
0
|
0
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -108,8 +108,6 @@ pub enum Error {
|
|||||||
TaskDeletionWithEmptyQuery,
|
TaskDeletionWithEmptyQuery,
|
||||||
#[error("Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
#[error("Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
||||||
TaskCancelationWithEmptyQuery,
|
TaskCancelationWithEmptyQuery,
|
||||||
#[error("Aborted task")]
|
|
||||||
AbortedTask,
|
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Dump(#[from] dump::Error),
|
Dump(#[from] dump::Error),
|
||||||
@@ -125,8 +123,6 @@ pub enum Error {
|
|||||||
IoError(#[from] std::io::Error),
|
IoError(#[from] std::io::Error),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Persist(#[from] tempfile::PersistError),
|
Persist(#[from] tempfile::PersistError),
|
||||||
#[error(transparent)]
|
|
||||||
FeatureNotEnabled(#[from] FeatureNotEnabledError),
|
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Anyhow(#[from] anyhow::Error),
|
Anyhow(#[from] anyhow::Error),
|
||||||
@@ -146,16 +142,6 @@ pub enum Error {
|
|||||||
PlannedFailure,
|
PlannedFailure,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
|
||||||
#[error(
|
|
||||||
"{disabled_action} requires enabling the `{feature}` experimental feature. See {issue_link}"
|
|
||||||
)]
|
|
||||||
pub struct FeatureNotEnabledError {
|
|
||||||
pub disabled_action: &'static str,
|
|
||||||
pub feature: &'static str,
|
|
||||||
pub issue_link: &'static str,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Error {
|
impl Error {
|
||||||
pub fn is_recoverable(&self) -> bool {
|
pub fn is_recoverable(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
@@ -177,7 +163,6 @@ impl Error {
|
|||||||
| Error::TaskNotFound(_)
|
| Error::TaskNotFound(_)
|
||||||
| Error::TaskDeletionWithEmptyQuery
|
| Error::TaskDeletionWithEmptyQuery
|
||||||
| Error::TaskCancelationWithEmptyQuery
|
| Error::TaskCancelationWithEmptyQuery
|
||||||
| Error::AbortedTask
|
|
||||||
| Error::Dump(_)
|
| Error::Dump(_)
|
||||||
| Error::Heed(_)
|
| Error::Heed(_)
|
||||||
| Error::Milli(_)
|
| Error::Milli(_)
|
||||||
@@ -185,7 +170,6 @@ impl Error {
|
|||||||
| Error::FileStore(_)
|
| Error::FileStore(_)
|
||||||
| Error::IoError(_)
|
| Error::IoError(_)
|
||||||
| Error::Persist(_)
|
| Error::Persist(_)
|
||||||
| Error::FeatureNotEnabled(_)
|
|
||||||
| Error::Anyhow(_) => true,
|
| Error::Anyhow(_) => true,
|
||||||
Error::CreateBatch(_)
|
Error::CreateBatch(_)
|
||||||
| Error::CorruptedTaskQueue
|
| Error::CorruptedTaskQueue
|
||||||
@@ -230,7 +214,6 @@ impl ErrorCode for Error {
|
|||||||
Error::FileStore(e) => e.error_code(),
|
Error::FileStore(e) => e.error_code(),
|
||||||
Error::IoError(e) => e.error_code(),
|
Error::IoError(e) => e.error_code(),
|
||||||
Error::Persist(e) => e.error_code(),
|
Error::Persist(e) => e.error_code(),
|
||||||
Error::FeatureNotEnabled(_) => Code::FeatureNotEnabled,
|
|
||||||
|
|
||||||
// Irrecoverable errors
|
// Irrecoverable errors
|
||||||
Error::Anyhow(_) => Code::Internal,
|
Error::Anyhow(_) => Code::Internal,
|
||||||
@@ -239,9 +222,6 @@ impl ErrorCode for Error {
|
|||||||
Error::TaskDatabaseUpdate(_) => Code::Internal,
|
Error::TaskDatabaseUpdate(_) => Code::Internal,
|
||||||
Error::CreateBatch(_) => Code::Internal,
|
Error::CreateBatch(_) => Code::Internal,
|
||||||
|
|
||||||
// This one should never be seen by the end user
|
|
||||||
Error::AbortedTask => Code::Internal,
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
Error::PlannedFailure => Code::Internal,
|
Error::PlannedFailure => Code::Internal,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,129 +0,0 @@
|
|||||||
use std::sync::{Arc, RwLock};
|
|
||||||
|
|
||||||
use meilisearch_types::features::{InstanceTogglableFeatures, RuntimeTogglableFeatures};
|
|
||||||
use meilisearch_types::heed::types::{SerdeJson, Str};
|
|
||||||
use meilisearch_types::heed::{Database, Env, RwTxn};
|
|
||||||
|
|
||||||
use crate::error::FeatureNotEnabledError;
|
|
||||||
use crate::Result;
|
|
||||||
|
|
||||||
const EXPERIMENTAL_FEATURES: &str = "experimental-features";
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub(crate) struct FeatureData {
|
|
||||||
persisted: Database<Str, SerdeJson<RuntimeTogglableFeatures>>,
|
|
||||||
runtime: Arc<RwLock<RuntimeTogglableFeatures>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy)]
|
|
||||||
pub struct RoFeatures {
|
|
||||||
runtime: RuntimeTogglableFeatures,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RoFeatures {
|
|
||||||
fn new(data: &FeatureData) -> Self {
|
|
||||||
let runtime = data.runtime_features();
|
|
||||||
Self { runtime }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn runtime_features(&self) -> RuntimeTogglableFeatures {
|
|
||||||
self.runtime
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn check_score_details(&self) -> Result<()> {
|
|
||||||
if self.runtime.score_details {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(FeatureNotEnabledError {
|
|
||||||
disabled_action: "Computing score details",
|
|
||||||
feature: "score details",
|
|
||||||
issue_link: "https://github.com/meilisearch/product/discussions/674",
|
|
||||||
}
|
|
||||||
.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn check_metrics(&self) -> Result<()> {
|
|
||||||
if self.runtime.metrics {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(FeatureNotEnabledError {
|
|
||||||
disabled_action: "Getting metrics",
|
|
||||||
feature: "metrics",
|
|
||||||
issue_link: "https://github.com/meilisearch/product/discussions/625",
|
|
||||||
}
|
|
||||||
.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn check_vector(&self, disabled_action: &'static str) -> Result<()> {
|
|
||||||
if self.runtime.vector_store {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(FeatureNotEnabledError {
|
|
||||||
disabled_action,
|
|
||||||
feature: "vector store",
|
|
||||||
issue_link: "https://github.com/meilisearch/product/discussions/677",
|
|
||||||
}
|
|
||||||
.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn check_puffin(&self) -> Result<()> {
|
|
||||||
if self.runtime.export_puffin_reports {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(FeatureNotEnabledError {
|
|
||||||
disabled_action: "Outputting Puffin reports to disk",
|
|
||||||
feature: "export puffin reports",
|
|
||||||
issue_link: "https://github.com/meilisearch/product/discussions/693",
|
|
||||||
}
|
|
||||||
.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FeatureData {
|
|
||||||
pub fn new(env: &Env, instance_features: InstanceTogglableFeatures) -> Result<Self> {
|
|
||||||
let mut wtxn = env.write_txn()?;
|
|
||||||
let runtime_features_db = env.create_database(&mut wtxn, Some(EXPERIMENTAL_FEATURES))?;
|
|
||||||
wtxn.commit()?;
|
|
||||||
|
|
||||||
let txn = env.read_txn()?;
|
|
||||||
let persisted_features: RuntimeTogglableFeatures =
|
|
||||||
runtime_features_db.get(&txn, EXPERIMENTAL_FEATURES)?.unwrap_or_default();
|
|
||||||
let runtime = Arc::new(RwLock::new(RuntimeTogglableFeatures {
|
|
||||||
metrics: instance_features.metrics || persisted_features.metrics,
|
|
||||||
..persisted_features
|
|
||||||
}));
|
|
||||||
|
|
||||||
Ok(Self { persisted: runtime_features_db, runtime })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn put_runtime_features(
|
|
||||||
&self,
|
|
||||||
mut wtxn: RwTxn,
|
|
||||||
features: RuntimeTogglableFeatures,
|
|
||||||
) -> Result<()> {
|
|
||||||
self.persisted.put(&mut wtxn, EXPERIMENTAL_FEATURES, &features)?;
|
|
||||||
wtxn.commit()?;
|
|
||||||
|
|
||||||
// safe to unwrap, the lock will only fail if:
|
|
||||||
// 1. requested by the same thread concurrently -> it is called and released in methods that don't call each other
|
|
||||||
// 2. there's a panic while the thread is held -> it is only used for an assignment here.
|
|
||||||
let mut toggled_features = self.runtime.write().unwrap();
|
|
||||||
*toggled_features = features;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn runtime_features(&self) -> RuntimeTogglableFeatures {
|
|
||||||
// sound to unwrap, the lock will only fail if:
|
|
||||||
// 1. requested by the same thread concurrently -> it is called and released in methods that don't call each other
|
|
||||||
// 2. there's a panic while the thread is held -> it is only used for copying the data here
|
|
||||||
*self.runtime.read().unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn features(&self) -> RoFeatures {
|
|
||||||
RoFeatures::new(self)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,8 +1,12 @@
|
|||||||
|
/// the map size to use when we don't succeed in reading it in indexes.
|
||||||
|
const DEFAULT_MAP_SIZE: usize = 10 * 1024 * 1024 * 1024; // 10 GiB
|
||||||
|
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use meilisearch_types::heed::{EnvClosingEvent, EnvFlags, EnvOpenOptions};
|
use meilisearch_types::heed::flags::Flags;
|
||||||
|
use meilisearch_types::heed::{EnvClosingEvent, EnvOpenOptions};
|
||||||
use meilisearch_types::milli::Index;
|
use meilisearch_types::milli::Index;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
@@ -219,9 +223,7 @@ impl IndexMap {
|
|||||||
enable_mdb_writemap: bool,
|
enable_mdb_writemap: bool,
|
||||||
map_size_growth: usize,
|
map_size_growth: usize,
|
||||||
) {
|
) {
|
||||||
let Some(index) = self.available.remove(uuid) else {
|
let Some(index) = self.available.remove(uuid) else { return; };
|
||||||
return;
|
|
||||||
};
|
|
||||||
self.close(*uuid, index, enable_mdb_writemap, map_size_growth);
|
self.close(*uuid, index, enable_mdb_writemap, map_size_growth);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -232,7 +234,7 @@ impl IndexMap {
|
|||||||
enable_mdb_writemap: bool,
|
enable_mdb_writemap: bool,
|
||||||
map_size_growth: usize,
|
map_size_growth: usize,
|
||||||
) {
|
) {
|
||||||
let map_size = index.map_size() + map_size_growth;
|
let map_size = index.map_size().unwrap_or(DEFAULT_MAP_SIZE) + map_size_growth;
|
||||||
let closing_event = index.prepare_for_closing();
|
let closing_event = index.prepare_for_closing();
|
||||||
let generation = self.next_generation();
|
let generation = self.next_generation();
|
||||||
self.unavailable.insert(
|
self.unavailable.insert(
|
||||||
@@ -305,7 +307,7 @@ fn create_or_open_index(
|
|||||||
options.map_size(clamp_to_page_size(map_size));
|
options.map_size(clamp_to_page_size(map_size));
|
||||||
options.max_readers(1024);
|
options.max_readers(1024);
|
||||||
if enable_mdb_writemap {
|
if enable_mdb_writemap {
|
||||||
unsafe { options.flags(EnvFlags::WRITE_MAP) };
|
unsafe { options.flag(Flags::MdbWriteMap) };
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((created, updated)) = date {
|
if let Some((created, updated)) = date {
|
||||||
@@ -384,7 +386,7 @@ mod tests {
|
|||||||
|
|
||||||
fn assert_index_size(index: Index, expected: usize) {
|
fn assert_index_size(index: Index, expected: usize) {
|
||||||
let expected = clamp_to_page_size(expected);
|
let expected = clamp_to_page_size(expected);
|
||||||
let index_map_size = index.map_size();
|
let index_map_size = index.map_size().unwrap();
|
||||||
assert_eq!(index_map_size, expected);
|
assert_eq!(index_map_size, expected);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
|
|
||||||
use meilisearch_types::heed::types::{SerdeBincode, SerdeJson, Str};
|
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
|
||||||
use meilisearch_types::heed::{Database, RoTxn};
|
use meilisearch_types::heed::{Database, RoTxn};
|
||||||
use meilisearch_types::milli::{CboRoaringBitmapCodec, RoaringBitmapCodec, BEU32};
|
use meilisearch_types::milli::{CboRoaringBitmapCodec, RoaringBitmapCodec, BEU32};
|
||||||
use meilisearch_types::tasks::{Details, Task};
|
use meilisearch_types::tasks::{Details, Task};
|
||||||
@@ -28,10 +28,7 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
|||||||
started_at,
|
started_at,
|
||||||
finished_at,
|
finished_at,
|
||||||
index_mapper,
|
index_mapper,
|
||||||
features: _,
|
|
||||||
max_number_of_tasks: _,
|
max_number_of_tasks: _,
|
||||||
max_number_of_batched_tasks: _,
|
|
||||||
puffin_frame: _,
|
|
||||||
wake_up: _,
|
wake_up: _,
|
||||||
dumps_path: _,
|
dumps_path: _,
|
||||||
snapshots_path: _,
|
snapshots_path: _,
|
||||||
@@ -40,8 +37,6 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
|||||||
test_breakpoint_sdr: _,
|
test_breakpoint_sdr: _,
|
||||||
planned_failures: _,
|
planned_failures: _,
|
||||||
run_loop_iteration: _,
|
run_loop_iteration: _,
|
||||||
currently_updating_index: _,
|
|
||||||
embedders: _,
|
|
||||||
} = scheduler;
|
} = scheduler;
|
||||||
|
|
||||||
let rtxn = env.read_txn().unwrap();
|
let rtxn = env.read_txn().unwrap();
|
||||||
@@ -117,7 +112,7 @@ pub fn snapshot_bitmap(r: &RoaringBitmap) -> String {
|
|||||||
snap
|
snap
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn snapshot_all_tasks(rtxn: &RoTxn, db: Database<BEU32, SerdeJson<Task>>) -> String {
|
pub fn snapshot_all_tasks(rtxn: &RoTxn, db: Database<OwnedType<BEU32>, SerdeJson<Task>>) -> String {
|
||||||
let mut snap = String::new();
|
let mut snap = String::new();
|
||||||
let iter = db.iter(rtxn).unwrap();
|
let iter = db.iter(rtxn).unwrap();
|
||||||
for next in iter {
|
for next in iter {
|
||||||
@@ -127,7 +122,10 @@ pub fn snapshot_all_tasks(rtxn: &RoTxn, db: Database<BEU32, SerdeJson<Task>>) ->
|
|||||||
snap
|
snap
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn snapshot_date_db(rtxn: &RoTxn, db: Database<BEI128, CboRoaringBitmapCodec>) -> String {
|
pub fn snapshot_date_db(
|
||||||
|
rtxn: &RoTxn,
|
||||||
|
db: Database<OwnedType<BEI128>, CboRoaringBitmapCodec>,
|
||||||
|
) -> String {
|
||||||
let mut snap = String::new();
|
let mut snap = String::new();
|
||||||
let iter = db.iter(rtxn).unwrap();
|
let iter = db.iter(rtxn).unwrap();
|
||||||
for next in iter {
|
for next in iter {
|
||||||
@@ -247,7 +245,10 @@ pub fn snapshot_index_tasks(rtxn: &RoTxn, db: Database<Str, RoaringBitmapCodec>)
|
|||||||
}
|
}
|
||||||
snap
|
snap
|
||||||
}
|
}
|
||||||
pub fn snapshot_canceled_by(rtxn: &RoTxn, db: Database<BEU32, RoaringBitmapCodec>) -> String {
|
pub fn snapshot_canceled_by(
|
||||||
|
rtxn: &RoTxn,
|
||||||
|
db: Database<OwnedType<BEU32>, RoaringBitmapCodec>,
|
||||||
|
) -> String {
|
||||||
let mut snap = String::new();
|
let mut snap = String::new();
|
||||||
let iter = db.iter(rtxn).unwrap();
|
let iter = db.iter(rtxn).unwrap();
|
||||||
for next in iter {
|
for next in iter {
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,35 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: enqueued, details: { dump_uid: None }, kind: DumpCreation { keys: [], instance_uid: None }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"dumpCreation" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: canceled, canceled_by: 1, details: { dump_uid: None }, kind: DumpCreation { keys: [], instance_uid: None }}
|
|
||||||
1 {uid: 1, status: succeeded, details: { matched_tasks: 1, canceled_tasks: Some(0), original_filter: "cancel dump" }, kind: TaskCancelation { query: "cancel dump", tasks: RoaringBitmap<[0]> }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued []
|
|
||||||
succeeded [1,]
|
|
||||||
canceled [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"taskCancelation" [1,]
|
|
||||||
"dumpCreation" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
1 [0,]
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@@ -1,38 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: enqueued, details: { dump_uid: None }, kind: DumpCreation { keys: [], instance_uid: None }}
|
|
||||||
1 {uid: 1, status: enqueued, details: { matched_tasks: 1, canceled_tasks: None, original_filter: "cancel dump" }, kind: TaskCancelation { query: "cancel dump", tasks: RoaringBitmap<[0]> }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"taskCancelation" [1,]
|
|
||||||
"dumpCreation" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
|
||||||
1 {uid: 1, status: succeeded, details: { received_document_ids: 2, deleted_documents: Some(2) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued []
|
|
||||||
succeeded [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"documentAdditionOrUpdate" [0,]
|
|
||||||
"documentDeletion" [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
doggos: { number_of_documents: 1, field_distribution: {"doggo": 1, "id": 1} }
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
[timestamp] [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
[timestamp] [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"id": 3,
|
|
||||||
"doggo": "bork"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"documentAdditionOrUpdate" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
00000000-0000-0000-0000-000000000000
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
|
||||||
1 {uid: 1, status: enqueued, details: { received_document_ids: 2, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"documentAdditionOrUpdate" [0,]
|
|
||||||
"documentDeletion" [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
00000000-0000-0000-0000-000000000000
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_document_ids: 2, deleted_documents: Some(0) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
|
||||||
1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued [1,]
|
|
||||||
failed [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"documentAdditionOrUpdate" [1,]
|
|
||||||
"documentDeletion" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
00000000-0000-0000-0000-000000000000
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_document_ids: 2, deleted_documents: Some(0) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
|
||||||
1 {uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued []
|
|
||||||
succeeded [1,]
|
|
||||||
failed [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"documentAdditionOrUpdate" [1,]
|
|
||||||
"documentDeletion" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
doggos: { number_of_documents: 3, field_distribution: {"catto": 1, "doggo": 2, "id": 3} }
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
"doggo": "jean bob"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 2,
|
|
||||||
"catto": "jorts"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 3,
|
|
||||||
"doggo": "bork"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: enqueued, details: { received_document_ids: 2, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"documentDeletion" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: enqueued, details: { received_document_ids: 2, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
|
||||||
1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"documentAdditionOrUpdate" [1,]
|
|
||||||
"documentDeletion" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
[timestamp] [1,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
00000000-0000-0000-0000-000000000000
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing Tasks:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"indexCreation" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
index_a [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@@ -3,9 +3,9 @@
|
|||||||
use std::collections::{BTreeSet, HashSet};
|
use std::collections::{BTreeSet, HashSet};
|
||||||
use std::ops::Bound;
|
use std::ops::Bound;
|
||||||
|
|
||||||
use meilisearch_types::heed::types::DecodeIgnore;
|
use meilisearch_types::heed::types::{DecodeIgnore, OwnedType};
|
||||||
use meilisearch_types::heed::{Database, RoTxn, RwTxn};
|
use meilisearch_types::heed::{Database, RoTxn, RwTxn};
|
||||||
use meilisearch_types::milli::CboRoaringBitmapCodec;
|
use meilisearch_types::milli::{CboRoaringBitmapCodec, BEU32};
|
||||||
use meilisearch_types::tasks::{Details, IndexSwap, Kind, KindWithContent, Status};
|
use meilisearch_types::tasks::{Details, IndexSwap, Kind, KindWithContent, Status};
|
||||||
use roaring::{MultiOps, RoaringBitmap};
|
use roaring::{MultiOps, RoaringBitmap};
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
@@ -18,7 +18,7 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn last_task_id(&self, rtxn: &RoTxn) -> Result<Option<TaskId>> {
|
pub(crate) fn last_task_id(&self, rtxn: &RoTxn) -> Result<Option<TaskId>> {
|
||||||
Ok(self.all_tasks.remap_data_type::<DecodeIgnore>().last(rtxn)?.map(|(k, _)| k + 1))
|
Ok(self.all_tasks.remap_data_type::<DecodeIgnore>().last(rtxn)?.map(|(k, _)| k.get() + 1))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn next_task_id(&self, rtxn: &RoTxn) -> Result<TaskId> {
|
pub(crate) fn next_task_id(&self, rtxn: &RoTxn) -> Result<TaskId> {
|
||||||
@@ -26,7 +26,7 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_task(&self, rtxn: &RoTxn, task_id: TaskId) -> Result<Option<Task>> {
|
pub(crate) fn get_task(&self, rtxn: &RoTxn, task_id: TaskId) -> Result<Option<Task>> {
|
||||||
Ok(self.all_tasks.get(rtxn, &task_id)?)
|
Ok(self.all_tasks.get(rtxn, &BEU32::new(task_id))?)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert an iterator to a `Vec` of tasks. The tasks MUST exist or a
|
/// Convert an iterator to a `Vec` of tasks. The tasks MUST exist or a
|
||||||
@@ -88,7 +88,7 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
self.all_tasks.put(wtxn, &task.uid, task)?;
|
self.all_tasks.put(wtxn, &BEU32::new(task.uid), task)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -169,11 +169,11 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
pub(crate) fn insert_task_datetime(
|
pub(crate) fn insert_task_datetime(
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn,
|
||||||
database: Database<BEI128, CboRoaringBitmapCodec>,
|
database: Database<OwnedType<BEI128>, CboRoaringBitmapCodec>,
|
||||||
time: OffsetDateTime,
|
time: OffsetDateTime,
|
||||||
task_id: TaskId,
|
task_id: TaskId,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let timestamp = time.unix_timestamp_nanos();
|
let timestamp = BEI128::new(time.unix_timestamp_nanos());
|
||||||
let mut task_ids = database.get(wtxn, ×tamp)?.unwrap_or_default();
|
let mut task_ids = database.get(wtxn, ×tamp)?.unwrap_or_default();
|
||||||
task_ids.insert(task_id);
|
task_ids.insert(task_id);
|
||||||
database.put(wtxn, ×tamp, &RoaringBitmap::from_iter(task_ids))?;
|
database.put(wtxn, ×tamp, &RoaringBitmap::from_iter(task_ids))?;
|
||||||
@@ -182,11 +182,11 @@ pub(crate) fn insert_task_datetime(
|
|||||||
|
|
||||||
pub(crate) fn remove_task_datetime(
|
pub(crate) fn remove_task_datetime(
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn,
|
||||||
database: Database<BEI128, CboRoaringBitmapCodec>,
|
database: Database<OwnedType<BEI128>, CboRoaringBitmapCodec>,
|
||||||
time: OffsetDateTime,
|
time: OffsetDateTime,
|
||||||
task_id: TaskId,
|
task_id: TaskId,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let timestamp = time.unix_timestamp_nanos();
|
let timestamp = BEI128::new(time.unix_timestamp_nanos());
|
||||||
if let Some(mut existing) = database.get(wtxn, ×tamp)? {
|
if let Some(mut existing) = database.get(wtxn, ×tamp)? {
|
||||||
existing.remove(task_id);
|
existing.remove(task_id);
|
||||||
if existing.is_empty() {
|
if existing.is_empty() {
|
||||||
@@ -202,7 +202,7 @@ pub(crate) fn remove_task_datetime(
|
|||||||
pub(crate) fn keep_tasks_within_datetimes(
|
pub(crate) fn keep_tasks_within_datetimes(
|
||||||
rtxn: &RoTxn,
|
rtxn: &RoTxn,
|
||||||
tasks: &mut RoaringBitmap,
|
tasks: &mut RoaringBitmap,
|
||||||
database: Database<BEI128, CboRoaringBitmapCodec>,
|
database: Database<OwnedType<BEI128>, CboRoaringBitmapCodec>,
|
||||||
after: Option<OffsetDateTime>,
|
after: Option<OffsetDateTime>,
|
||||||
before: Option<OffsetDateTime>,
|
before: Option<OffsetDateTime>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
@@ -213,8 +213,8 @@ pub(crate) fn keep_tasks_within_datetimes(
|
|||||||
(Some(after), Some(before)) => (Bound::Excluded(*after), Bound::Excluded(*before)),
|
(Some(after), Some(before)) => (Bound::Excluded(*after), Bound::Excluded(*before)),
|
||||||
};
|
};
|
||||||
let mut collected_task_ids = RoaringBitmap::new();
|
let mut collected_task_ids = RoaringBitmap::new();
|
||||||
let start = map_bound(start, |b| b.unix_timestamp_nanos());
|
let start = map_bound(start, |b| BEI128::new(b.unix_timestamp_nanos()));
|
||||||
let end = map_bound(end, |b| b.unix_timestamp_nanos());
|
let end = map_bound(end, |b| BEI128::new(b.unix_timestamp_nanos()));
|
||||||
let iter = database.range(rtxn, &(start, end))?;
|
let iter = database.range(rtxn, &(start, end))?;
|
||||||
for r in iter {
|
for r in iter {
|
||||||
let (_timestamp, task_ids) = r?;
|
let (_timestamp, task_ids) = r?;
|
||||||
@@ -337,6 +337,8 @@ impl IndexScheduler {
|
|||||||
let rtxn = self.env.read_txn().unwrap();
|
let rtxn = self.env.read_txn().unwrap();
|
||||||
for task in self.all_tasks.iter(&rtxn).unwrap() {
|
for task in self.all_tasks.iter(&rtxn).unwrap() {
|
||||||
let (task_id, task) = task.unwrap();
|
let (task_id, task) = task.unwrap();
|
||||||
|
let task_id = task_id.get();
|
||||||
|
|
||||||
let task_index_uid = task.index_uid().map(ToOwned::to_owned);
|
let task_index_uid = task.index_uid().map(ToOwned::to_owned);
|
||||||
|
|
||||||
let Task {
|
let Task {
|
||||||
@@ -359,13 +361,16 @@ impl IndexScheduler {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.contains(task.uid));
|
.contains(task.uid));
|
||||||
}
|
}
|
||||||
let db_enqueued_at =
|
let db_enqueued_at = self
|
||||||
self.enqueued_at.get(&rtxn, &enqueued_at.unix_timestamp_nanos()).unwrap().unwrap();
|
.enqueued_at
|
||||||
|
.get(&rtxn, &BEI128::new(enqueued_at.unix_timestamp_nanos()))
|
||||||
|
.unwrap()
|
||||||
|
.unwrap();
|
||||||
assert!(db_enqueued_at.contains(task_id));
|
assert!(db_enqueued_at.contains(task_id));
|
||||||
if let Some(started_at) = started_at {
|
if let Some(started_at) = started_at {
|
||||||
let db_started_at = self
|
let db_started_at = self
|
||||||
.started_at
|
.started_at
|
||||||
.get(&rtxn, &started_at.unix_timestamp_nanos())
|
.get(&rtxn, &BEI128::new(started_at.unix_timestamp_nanos()))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert!(db_started_at.contains(task_id));
|
assert!(db_started_at.contains(task_id));
|
||||||
@@ -373,7 +378,7 @@ impl IndexScheduler {
|
|||||||
if let Some(finished_at) = finished_at {
|
if let Some(finished_at) = finished_at {
|
||||||
let db_finished_at = self
|
let db_finished_at = self
|
||||||
.finished_at
|
.finished_at
|
||||||
.get(&rtxn, &finished_at.unix_timestamp_nanos())
|
.get(&rtxn, &BEI128::new(finished_at.unix_timestamp_nanos()))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
assert!(db_finished_at.contains(task_id));
|
assert!(db_finished_at.contains(task_id));
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::convert::TryInto;
|
use std::convert::TryInto;
|
||||||
|
|
||||||
use meilisearch_types::heed::{BoxedError, BytesDecode, BytesEncode};
|
use meilisearch_types::heed::{BytesDecode, BytesEncode};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
/// A heed codec for value of struct Uuid.
|
/// A heed codec for value of struct Uuid.
|
||||||
@@ -10,15 +10,15 @@ pub struct UuidCodec;
|
|||||||
impl<'a> BytesDecode<'a> for UuidCodec {
|
impl<'a> BytesDecode<'a> for UuidCodec {
|
||||||
type DItem = Uuid;
|
type DItem = Uuid;
|
||||||
|
|
||||||
fn bytes_decode(bytes: &'a [u8]) -> Result<Self::DItem, BoxedError> {
|
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||||
bytes.try_into().map(Uuid::from_bytes).map_err(Into::into)
|
bytes.try_into().ok().map(Uuid::from_bytes)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BytesEncode<'_> for UuidCodec {
|
impl BytesEncode<'_> for UuidCodec {
|
||||||
type EItem = Uuid;
|
type EItem = Uuid;
|
||||||
|
|
||||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<[u8]>, BoxedError> {
|
fn bytes_encode(item: &Self::EItem) -> Option<Cow<[u8]>> {
|
||||||
Ok(Cow::Borrowed(item.as_bytes()))
|
Some(Cow::Borrowed(item.as_bytes()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ license.workspace = true
|
|||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = "0.5.1"
|
criterion = "0.4.0"
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "depth"
|
name = "depth"
|
||||||
|
|||||||
@@ -167,9 +167,7 @@ macro_rules! snapshot {
|
|||||||
let (settings, snap_name, _) = $crate::default_snapshot_settings_for_test(test_name, Some(&snap_name));
|
let (settings, snap_name, _) = $crate::default_snapshot_settings_for_test(test_name, Some(&snap_name));
|
||||||
settings.bind(|| {
|
settings.bind(|| {
|
||||||
let snap = format!("{}", $value);
|
let snap = format!("{}", $value);
|
||||||
insta::allow_duplicates! {
|
meili_snap::insta::assert_snapshot!(format!("{}", snap_name), snap);
|
||||||
meili_snap::insta::assert_snapshot!(format!("{}", snap_name), snap);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
($value:expr, @$inline:literal) => {
|
($value:expr, @$inline:literal) => {
|
||||||
@@ -178,9 +176,7 @@ macro_rules! snapshot {
|
|||||||
let (settings, _, _) = $crate::default_snapshot_settings_for_test("", Some("_dummy_argument"));
|
let (settings, _, _) = $crate::default_snapshot_settings_for_test("", Some("_dummy_argument"));
|
||||||
settings.bind(|| {
|
settings.bind(|| {
|
||||||
let snap = format!("{}", $value);
|
let snap = format!("{}", $value);
|
||||||
insta::allow_duplicates! {
|
meili_snap::insta::assert_snapshot!(snap, @$inline);
|
||||||
meili_snap::insta::assert_snapshot!(snap, @$inline);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
($value:expr) => {
|
($value:expr) => {
|
||||||
@@ -198,37 +194,11 @@ macro_rules! snapshot {
|
|||||||
let (settings, snap_name, _) = $crate::default_snapshot_settings_for_test(test_name, None);
|
let (settings, snap_name, _) = $crate::default_snapshot_settings_for_test(test_name, None);
|
||||||
settings.bind(|| {
|
settings.bind(|| {
|
||||||
let snap = format!("{}", $value);
|
let snap = format!("{}", $value);
|
||||||
insta::allow_duplicates! {
|
meili_snap::insta::assert_snapshot!(format!("{}", snap_name), snap);
|
||||||
meili_snap::insta::assert_snapshot!(format!("{}", snap_name), snap);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a string from the value by serializing it as Json, optionally
|
|
||||||
/// redacting some parts of it.
|
|
||||||
///
|
|
||||||
/// The second argument to the macro can be an object expression for redaction.
|
|
||||||
/// It's in the form { selector => replacement }. For more information about redactions
|
|
||||||
/// refer to the redactions feature in the `insta` guide.
|
|
||||||
#[macro_export]
|
|
||||||
macro_rules! json_string {
|
|
||||||
($value:expr, {$($k:expr => $v:expr),*$(,)?}) => {
|
|
||||||
{
|
|
||||||
let (_, snap) = meili_snap::insta::_prepare_snapshot_for_redaction!($value, {$($k => $v),*}, Json, File);
|
|
||||||
snap
|
|
||||||
}
|
|
||||||
};
|
|
||||||
($value:expr) => {{
|
|
||||||
let value = meili_snap::insta::_macro_support::serialize_value(
|
|
||||||
&$value,
|
|
||||||
meili_snap::insta::_macro_support::SerializationFormat::Json,
|
|
||||||
meili_snap::insta::_macro_support::SnapshotLocation::File
|
|
||||||
);
|
|
||||||
value
|
|
||||||
}};
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate as meili_snap;
|
use crate as meili_snap;
|
||||||
@@ -280,3 +250,27 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a string from the value by serializing it as Json, optionally
|
||||||
|
/// redacting some parts of it.
|
||||||
|
///
|
||||||
|
/// The second argument to the macro can be an object expression for redaction.
|
||||||
|
/// It's in the form { selector => replacement }. For more information about redactions
|
||||||
|
/// refer to the redactions feature in the `insta` guide.
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! json_string {
|
||||||
|
($value:expr, {$($k:expr => $v:expr),*$(,)?}) => {
|
||||||
|
{
|
||||||
|
let (_, snap) = meili_snap::insta::_prepare_snapshot_for_redaction!($value, {$($k => $v),*}, Json, File);
|
||||||
|
snap
|
||||||
|
}
|
||||||
|
};
|
||||||
|
($value:expr) => {{
|
||||||
|
let value = meili_snap::insta::_macro_support::serialize_value(
|
||||||
|
&$value,
|
||||||
|
meili_snap::insta::_macro_support::SerializationFormat::Json,
|
||||||
|
meili_snap::insta::_macro_support::SnapshotLocation::File
|
||||||
|
);
|
||||||
|
value
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|||||||
@@ -4,20 +4,17 @@ use std::collections::HashSet;
|
|||||||
use std::convert::{TryFrom, TryInto};
|
use std::convert::{TryFrom, TryInto};
|
||||||
use std::fs::create_dir_all;
|
use std::fs::create_dir_all;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::result::Result as StdResult;
|
|
||||||
use std::str;
|
use std::str;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use hmac::{Hmac, Mac};
|
use hmac::{Hmac, Mac};
|
||||||
use meilisearch_types::heed::BoxedError;
|
|
||||||
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||||
use meilisearch_types::keys::KeyId;
|
use meilisearch_types::keys::KeyId;
|
||||||
use meilisearch_types::milli;
|
use meilisearch_types::milli;
|
||||||
use meilisearch_types::milli::heed::types::{Bytes, DecodeIgnore, SerdeJson};
|
use meilisearch_types::milli::heed::types::{ByteSlice, DecodeIgnore, SerdeJson};
|
||||||
use meilisearch_types::milli::heed::{Database, Env, EnvOpenOptions, RwTxn};
|
use meilisearch_types::milli::heed::{Database, Env, EnvOpenOptions, RwTxn};
|
||||||
use sha2::Sha256;
|
use sha2::Sha256;
|
||||||
use thiserror::Error;
|
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::fmt::Hyphenated;
|
use uuid::fmt::Hyphenated;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
@@ -33,7 +30,7 @@ const KEY_ID_ACTION_INDEX_EXPIRATION_DB_NAME: &str = "keyid-action-index-expirat
|
|||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct HeedAuthStore {
|
pub struct HeedAuthStore {
|
||||||
env: Arc<Env>,
|
env: Arc<Env>,
|
||||||
keys: Database<Bytes, SerdeJson<Key>>,
|
keys: Database<ByteSlice, SerdeJson<Key>>,
|
||||||
action_keyid_index_expiration: Database<KeyIdActionCodec, SerdeJson<Option<OffsetDateTime>>>,
|
action_keyid_index_expiration: Database<KeyIdActionCodec, SerdeJson<Option<OffsetDateTime>>>,
|
||||||
should_close_on_drop: bool,
|
should_close_on_drop: bool,
|
||||||
}
|
}
|
||||||
@@ -132,9 +129,6 @@ impl HeedAuthStore {
|
|||||||
Action::DumpsAll => {
|
Action::DumpsAll => {
|
||||||
actions.insert(Action::DumpsCreate);
|
actions.insert(Action::DumpsCreate);
|
||||||
}
|
}
|
||||||
Action::SnapshotsAll => {
|
|
||||||
actions.insert(Action::SnapshotsCreate);
|
|
||||||
}
|
|
||||||
Action::TasksAll => {
|
Action::TasksAll => {
|
||||||
actions.extend([Action::TasksGet, Action::TasksDelete, Action::TasksCancel]);
|
actions.extend([Action::TasksGet, Action::TasksDelete, Action::TasksCancel]);
|
||||||
}
|
}
|
||||||
@@ -279,7 +273,7 @@ impl HeedAuthStore {
|
|||||||
fn delete_key_from_inverted_db(&self, wtxn: &mut RwTxn, key: &KeyId) -> Result<()> {
|
fn delete_key_from_inverted_db(&self, wtxn: &mut RwTxn, key: &KeyId) -> Result<()> {
|
||||||
let mut iter = self
|
let mut iter = self
|
||||||
.action_keyid_index_expiration
|
.action_keyid_index_expiration
|
||||||
.remap_types::<Bytes, DecodeIgnore>()
|
.remap_types::<ByteSlice, DecodeIgnore>()
|
||||||
.prefix_iter_mut(wtxn, key.as_bytes())?;
|
.prefix_iter_mut(wtxn, key.as_bytes())?;
|
||||||
while iter.next().transpose()?.is_some() {
|
while iter.next().transpose()?.is_some() {
|
||||||
// safety: we don't keep references from inside the LMDB database.
|
// safety: we don't keep references from inside the LMDB database.
|
||||||
@@ -297,24 +291,23 @@ pub struct KeyIdActionCodec;
|
|||||||
impl<'a> milli::heed::BytesDecode<'a> for KeyIdActionCodec {
|
impl<'a> milli::heed::BytesDecode<'a> for KeyIdActionCodec {
|
||||||
type DItem = (KeyId, Action, Option<&'a [u8]>);
|
type DItem = (KeyId, Action, Option<&'a [u8]>);
|
||||||
|
|
||||||
fn bytes_decode(bytes: &'a [u8]) -> StdResult<Self::DItem, BoxedError> {
|
fn bytes_decode(bytes: &'a [u8]) -> Option<Self::DItem> {
|
||||||
let (key_id_bytes, action_bytes) = try_split_array_at(bytes).ok_or(SliceTooShortError)?;
|
let (key_id_bytes, action_bytes) = try_split_array_at(bytes)?;
|
||||||
let (&action_byte, index) =
|
let (action_bytes, index) = match try_split_array_at(action_bytes)? {
|
||||||
match try_split_array_at(action_bytes).ok_or(SliceTooShortError)? {
|
(action, []) => (action, None),
|
||||||
([action], []) => (action, None),
|
(action, index) => (action, Some(index)),
|
||||||
([action], index) => (action, Some(index)),
|
};
|
||||||
};
|
|
||||||
let key_id = Uuid::from_bytes(*key_id_bytes);
|
let key_id = Uuid::from_bytes(*key_id_bytes);
|
||||||
let action = Action::from_repr(action_byte).ok_or(InvalidActionError { action_byte })?;
|
let action = Action::from_repr(u8::from_be_bytes(*action_bytes))?;
|
||||||
|
|
||||||
Ok((key_id, action, index))
|
Some((key_id, action, index))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec {
|
impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||||
type EItem = (&'a KeyId, &'a Action, Option<&'a [u8]>);
|
type EItem = (&'a KeyId, &'a Action, Option<&'a [u8]>);
|
||||||
|
|
||||||
fn bytes_encode((key_id, action, index): &Self::EItem) -> StdResult<Cow<[u8]>, BoxedError> {
|
fn bytes_encode((key_id, action, index): &Self::EItem) -> Option<Cow<[u8]>> {
|
||||||
let mut bytes = Vec::new();
|
let mut bytes = Vec::new();
|
||||||
|
|
||||||
bytes.extend_from_slice(key_id.as_bytes());
|
bytes.extend_from_slice(key_id.as_bytes());
|
||||||
@@ -324,20 +317,10 @@ impl<'a> milli::heed::BytesEncode<'a> for KeyIdActionCodec {
|
|||||||
bytes.extend_from_slice(index);
|
bytes.extend_from_slice(index);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Cow::Owned(bytes))
|
Some(Cow::Owned(bytes))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
|
||||||
#[error("the slice is too short")]
|
|
||||||
pub struct SliceTooShortError;
|
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
|
||||||
#[error("cannot construct a valid Action from {action_byte}")]
|
|
||||||
pub struct InvalidActionError {
|
|
||||||
pub action_byte: u8,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn generate_key_as_hexa(uid: Uuid, master_key: &[u8]) -> String {
|
pub fn generate_key_as_hexa(uid: Uuid, master_key: &[u8]) -> String {
|
||||||
// format uid as hyphenated allowing user to generate their own keys.
|
// format uid as hyphenated allowing user to generate their own keys.
|
||||||
let mut uid_buffer = [0; Hyphenated::LENGTH];
|
let mut uid_buffer = [0; Hyphenated::LENGTH];
|
||||||
|
|||||||
@@ -15,13 +15,13 @@ actix-web = { version = "4.3.1", default-features = false }
|
|||||||
anyhow = "1.0.70"
|
anyhow = "1.0.70"
|
||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
csv = "1.2.1"
|
csv = "1.2.1"
|
||||||
deserr = { version = "0.6.0", features = ["actix-web"] }
|
deserr = "0.5.0"
|
||||||
either = { version = "1.8.1", features = ["serde"] }
|
either = { version = "1.8.1", features = ["serde"] }
|
||||||
enum-iterator = "1.4.0"
|
enum-iterator = "1.4.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.0.25"
|
flate2 = "1.0.25"
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
memmap2 = "0.7.1"
|
memmap2 = "0.5.10"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
roaring = { version = "0.10.1", features = ["serde"] }
|
roaring = { version = "0.10.1", features = ["serde"] }
|
||||||
serde = { version = "1.0.160", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
@@ -50,7 +50,6 @@ hebrew = ["milli/hebrew"]
|
|||||||
japanese = ["milli/japanese"]
|
japanese = ["milli/japanese"]
|
||||||
# thai specialized tokenization
|
# thai specialized tokenization
|
||||||
thai = ["milli/thai"]
|
thai = ["milli/thai"]
|
||||||
|
|
||||||
# allow greek specialized tokenization
|
# allow greek specialized tokenization
|
||||||
greek = ["milli/greek"]
|
greek = ["milli/greek"]
|
||||||
# allow khmer specialized tokenization
|
|
||||||
khmer = ["milli/khmer"]
|
|
||||||
|
|||||||
@@ -151,10 +151,6 @@ make_missing_field_convenience_builder!(MissingApiKeyExpiresAt, missing_api_key_
|
|||||||
make_missing_field_convenience_builder!(MissingApiKeyIndexes, missing_api_key_indexes);
|
make_missing_field_convenience_builder!(MissingApiKeyIndexes, missing_api_key_indexes);
|
||||||
make_missing_field_convenience_builder!(MissingSwapIndexes, missing_swap_indexes);
|
make_missing_field_convenience_builder!(MissingSwapIndexes, missing_swap_indexes);
|
||||||
make_missing_field_convenience_builder!(MissingDocumentFilter, missing_document_filter);
|
make_missing_field_convenience_builder!(MissingDocumentFilter, missing_document_filter);
|
||||||
make_missing_field_convenience_builder!(
|
|
||||||
MissingFacetSearchFacetName,
|
|
||||||
missing_facet_search_facet_name
|
|
||||||
);
|
|
||||||
|
|
||||||
// Integrate a sub-error into a [`DeserrError`] by taking its error message but using
|
// Integrate a sub-error into a [`DeserrError`] by taking its error message but using
|
||||||
// the default error code (C) from `Self`
|
// the default error code (C) from `Self`
|
||||||
@@ -188,4 +184,3 @@ merge_with_error_impl_take_error_message!(ParseOffsetDateTimeError);
|
|||||||
merge_with_error_impl_take_error_message!(ParseTaskKindError);
|
merge_with_error_impl_take_error_message!(ParseTaskKindError);
|
||||||
merge_with_error_impl_take_error_message!(ParseTaskStatusError);
|
merge_with_error_impl_take_error_message!(ParseTaskStatusError);
|
||||||
merge_with_error_impl_take_error_message!(IndexUidFormatError);
|
merge_with_error_impl_take_error_message!(IndexUidFormatError);
|
||||||
merge_with_error_impl_take_error_message!(InvalidSearchSemanticRatio);
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
use std::borrow::Borrow;
|
||||||
use std::fmt::{self, Debug, Display};
|
use std::fmt::{self, Debug, Display};
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::{self, Seek, Write};
|
use std::io::{self, Seek, Write};
|
||||||
@@ -41,7 +42,7 @@ impl Display for DocumentFormatError {
|
|||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Self::Io(e) => write!(f, "{e}"),
|
Self::Io(e) => write!(f, "{e}"),
|
||||||
Self::MalformedPayload(me, b) => match me {
|
Self::MalformedPayload(me, b) => match me.borrow() {
|
||||||
Error::Json(se) => {
|
Error::Json(se) => {
|
||||||
let mut message = match se.classify() {
|
let mut message = match se.classify() {
|
||||||
Category::Data => {
|
Category::Data => {
|
||||||
|
|||||||
@@ -217,26 +217,19 @@ InvalidDocumentFields , InvalidRequest , BAD_REQUEST ;
|
|||||||
MissingDocumentFilter , InvalidRequest , BAD_REQUEST ;
|
MissingDocumentFilter , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidDocumentFilter , InvalidRequest , BAD_REQUEST ;
|
InvalidDocumentFilter , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidDocumentGeoField , InvalidRequest , BAD_REQUEST ;
|
InvalidDocumentGeoField , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidVectorDimensions , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidVectorsType , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidDocumentId , InvalidRequest , BAD_REQUEST ;
|
InvalidDocumentId , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidDocumentLimit , InvalidRequest , BAD_REQUEST ;
|
InvalidDocumentLimit , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidDocumentOffset , InvalidRequest , BAD_REQUEST ;
|
InvalidDocumentOffset , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidEmbedder , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidHybridQuery , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidIndexLimit , InvalidRequest , BAD_REQUEST ;
|
InvalidIndexLimit , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidIndexOffset , InvalidRequest , BAD_REQUEST ;
|
InvalidIndexOffset , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidIndexPrimaryKey , InvalidRequest , BAD_REQUEST ;
|
InvalidIndexPrimaryKey , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidIndexUid , InvalidRequest , BAD_REQUEST ;
|
InvalidIndexUid , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchAttributesToSearchOn , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidSearchAttributesToCrop , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchAttributesToCrop , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchAttributesToHighlight , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchAttributesToHighlight , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchAttributesToRetrieve , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchAttributesToRetrieve , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchCropLength , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchCropLength , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchCropMarker , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchCropMarker , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchFacets , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchFacets , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchSemanticRatio , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidFacetSearchFacetName , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidSearchFilter , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchFilter , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchHighlightPostTag , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchHighlightPostTag , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchHighlightPreTag , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchHighlightPreTag , InvalidRequest , BAD_REQUEST ;
|
||||||
@@ -246,27 +239,17 @@ InvalidSearchMatchingStrategy , InvalidRequest , BAD_REQUEST ;
|
|||||||
InvalidSearchOffset , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchOffset , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchPage , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchPage , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchQ , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchQ , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidFacetSearchQuery , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidFacetSearchName , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidSearchVector , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidSearchShowMatchesPosition , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchShowMatchesPosition , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSearchShowRankingScore , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidSearchShowRankingScoreDetails , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidSearchSort , InvalidRequest , BAD_REQUEST ;
|
InvalidSearchSort , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsProximityPrecision , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidSettingsFaceting , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsFaceting , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsFilterableAttributes , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsFilterableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsPagination , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsPagination , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsEmbedders , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsNonSeparatorTokens , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidSettingsSeparatorTokens , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidSettingsDictionary , InvalidRequest , BAD_REQUEST ;
|
|
||||||
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
|
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidState , Internal , INTERNAL_SERVER_ERROR ;
|
InvalidState , Internal , INTERNAL_SERVER_ERROR ;
|
||||||
@@ -286,7 +269,6 @@ InvalidTaskStatuses , InvalidRequest , BAD_REQUEST ;
|
|||||||
InvalidTaskTypes , InvalidRequest , BAD_REQUEST ;
|
InvalidTaskTypes , InvalidRequest , BAD_REQUEST ;
|
||||||
InvalidTaskUids , InvalidRequest , BAD_REQUEST ;
|
InvalidTaskUids , InvalidRequest , BAD_REQUEST ;
|
||||||
IoError , System , UNPROCESSABLE_ENTITY;
|
IoError , System , UNPROCESSABLE_ENTITY;
|
||||||
FeatureNotEnabled , InvalidRequest , BAD_REQUEST ;
|
|
||||||
MalformedPayload , InvalidRequest , BAD_REQUEST ;
|
MalformedPayload , InvalidRequest , BAD_REQUEST ;
|
||||||
MaxFieldsLimitExceeded , InvalidRequest , BAD_REQUEST ;
|
MaxFieldsLimitExceeded , InvalidRequest , BAD_REQUEST ;
|
||||||
MissingApiKeyActions , InvalidRequest , BAD_REQUEST ;
|
MissingApiKeyActions , InvalidRequest , BAD_REQUEST ;
|
||||||
@@ -295,22 +277,18 @@ MissingApiKeyIndexes , InvalidRequest , BAD_REQUEST ;
|
|||||||
MissingAuthorizationHeader , Auth , UNAUTHORIZED ;
|
MissingAuthorizationHeader , Auth , UNAUTHORIZED ;
|
||||||
MissingContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
|
MissingContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
|
||||||
MissingDocumentId , InvalidRequest , BAD_REQUEST ;
|
MissingDocumentId , InvalidRequest , BAD_REQUEST ;
|
||||||
MissingFacetSearchFacetName , InvalidRequest , BAD_REQUEST ;
|
|
||||||
MissingIndexUid , InvalidRequest , BAD_REQUEST ;
|
MissingIndexUid , InvalidRequest , BAD_REQUEST ;
|
||||||
MissingMasterKey , Auth , UNAUTHORIZED ;
|
MissingMasterKey , Auth , UNAUTHORIZED ;
|
||||||
MissingPayload , InvalidRequest , BAD_REQUEST ;
|
MissingPayload , InvalidRequest , BAD_REQUEST ;
|
||||||
MissingSearchHybrid , InvalidRequest , BAD_REQUEST ;
|
|
||||||
MissingSwapIndexes , InvalidRequest , BAD_REQUEST ;
|
MissingSwapIndexes , InvalidRequest , BAD_REQUEST ;
|
||||||
MissingTaskFilters , InvalidRequest , BAD_REQUEST ;
|
MissingTaskFilters , InvalidRequest , BAD_REQUEST ;
|
||||||
NoSpaceLeftOnDevice , System , UNPROCESSABLE_ENTITY;
|
NoSpaceLeftOnDevice , System , UNPROCESSABLE_ENTITY;
|
||||||
PayloadTooLarge , InvalidRequest , PAYLOAD_TOO_LARGE ;
|
PayloadTooLarge , InvalidRequest , PAYLOAD_TOO_LARGE ;
|
||||||
TaskNotFound , InvalidRequest , NOT_FOUND ;
|
TaskNotFound , InvalidRequest , NOT_FOUND ;
|
||||||
TooManyOpenFiles , System , UNPROCESSABLE_ENTITY ;
|
TooManyOpenFiles , System , UNPROCESSABLE_ENTITY ;
|
||||||
TooManyVectors , InvalidRequest , BAD_REQUEST ;
|
|
||||||
UnretrievableDocument , Internal , BAD_REQUEST ;
|
UnretrievableDocument , Internal , BAD_REQUEST ;
|
||||||
UnretrievableErrorCode , InvalidRequest , BAD_REQUEST ;
|
UnretrievableErrorCode , InvalidRequest , BAD_REQUEST ;
|
||||||
UnsupportedMediaType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
|
UnsupportedMediaType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE
|
||||||
VectorEmbeddingError , InvalidRequest , BAD_REQUEST
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ErrorCode for JoinError {
|
impl ErrorCode for JoinError {
|
||||||
@@ -332,6 +310,7 @@ impl ErrorCode for milli::Error {
|
|||||||
UserError::SerdeJson(_)
|
UserError::SerdeJson(_)
|
||||||
| UserError::InvalidLmdbOpenOptions
|
| UserError::InvalidLmdbOpenOptions
|
||||||
| UserError::DocumentLimitReached
|
| UserError::DocumentLimitReached
|
||||||
|
| UserError::AccessingSoftDeletedDocument { .. }
|
||||||
| UserError::UnknownInternalDocumentId { .. } => Code::Internal,
|
| UserError::UnknownInternalDocumentId { .. } => Code::Internal,
|
||||||
UserError::InvalidStoreFile => Code::InvalidStoreFile,
|
UserError::InvalidStoreFile => Code::InvalidStoreFile,
|
||||||
UserError::NoSpaceLeftOnDevice => Code::NoSpaceLeftOnDevice,
|
UserError::NoSpaceLeftOnDevice => Code::NoSpaceLeftOnDevice,
|
||||||
@@ -343,10 +322,6 @@ impl ErrorCode for milli::Error {
|
|||||||
UserError::InvalidDocumentId { .. } | UserError::TooManyDocumentIds { .. } => {
|
UserError::InvalidDocumentId { .. } | UserError::TooManyDocumentIds { .. } => {
|
||||||
Code::InvalidDocumentId
|
Code::InvalidDocumentId
|
||||||
}
|
}
|
||||||
UserError::MissingDocumentField(_) => Code::InvalidDocumentFields,
|
|
||||||
UserError::InvalidPrompt(_) => Code::InvalidSettingsEmbedders,
|
|
||||||
UserError::TooManyEmbedders(_) => Code::InvalidSettingsEmbedders,
|
|
||||||
UserError::InvalidPromptForEmbeddings(..) => Code::InvalidSettingsEmbedders,
|
|
||||||
UserError::NoPrimaryKeyCandidateFound => Code::IndexPrimaryKeyNoCandidateFound,
|
UserError::NoPrimaryKeyCandidateFound => Code::IndexPrimaryKeyNoCandidateFound,
|
||||||
UserError::MultiplePrimaryKeyCandidatesFound { .. } => {
|
UserError::MultiplePrimaryKeyCandidatesFound { .. } => {
|
||||||
Code::IndexPrimaryKeyMultipleCandidatesFound
|
Code::IndexPrimaryKeyMultipleCandidatesFound
|
||||||
@@ -355,24 +330,12 @@ impl ErrorCode for milli::Error {
|
|||||||
UserError::SortRankingRuleMissing => Code::InvalidSearchSort,
|
UserError::SortRankingRuleMissing => Code::InvalidSearchSort,
|
||||||
UserError::InvalidFacetsDistribution { .. } => Code::InvalidSearchFacets,
|
UserError::InvalidFacetsDistribution { .. } => Code::InvalidSearchFacets,
|
||||||
UserError::InvalidSortableAttribute { .. } => Code::InvalidSearchSort,
|
UserError::InvalidSortableAttribute { .. } => Code::InvalidSearchSort,
|
||||||
UserError::InvalidSearchableAttribute { .. } => {
|
|
||||||
Code::InvalidSearchAttributesToSearchOn
|
|
||||||
}
|
|
||||||
UserError::InvalidFacetSearchFacetName { .. } => {
|
|
||||||
Code::InvalidFacetSearchFacetName
|
|
||||||
}
|
|
||||||
UserError::CriterionError(_) => Code::InvalidSettingsRankingRules,
|
UserError::CriterionError(_) => Code::InvalidSettingsRankingRules,
|
||||||
UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField,
|
UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField,
|
||||||
UserError::InvalidVectorDimensions { .. } => Code::InvalidVectorDimensions,
|
|
||||||
UserError::InvalidVectorsMapType { .. } => Code::InvalidVectorsType,
|
|
||||||
UserError::InvalidVectorsType { .. } => Code::InvalidVectorsType,
|
|
||||||
UserError::TooManyVectors(_, _) => Code::TooManyVectors,
|
|
||||||
UserError::SortError(_) => Code::InvalidSearchSort,
|
UserError::SortError(_) => Code::InvalidSearchSort,
|
||||||
UserError::InvalidMinTypoWordLenSetting(_, _) => {
|
UserError::InvalidMinTypoWordLenSetting(_, _) => {
|
||||||
Code::InvalidSettingsTypoTolerance
|
Code::InvalidSettingsTypoTolerance
|
||||||
}
|
}
|
||||||
UserError::InvalidEmbedder(_) => Code::InvalidEmbedder,
|
|
||||||
UserError::VectorEmbeddingError(_) => Code::VectorEmbeddingError,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -402,11 +365,11 @@ impl ErrorCode for HeedError {
|
|||||||
HeedError::Mdb(MdbError::Invalid) => Code::InvalidStoreFile,
|
HeedError::Mdb(MdbError::Invalid) => Code::InvalidStoreFile,
|
||||||
HeedError::Io(e) => e.error_code(),
|
HeedError::Io(e) => e.error_code(),
|
||||||
HeedError::Mdb(_)
|
HeedError::Mdb(_)
|
||||||
| HeedError::Encoding(_)
|
| HeedError::Encoding
|
||||||
| HeedError::Decoding(_)
|
| HeedError::Decoding
|
||||||
| HeedError::InvalidDatabaseTyping
|
| HeedError::InvalidDatabaseTyping
|
||||||
| HeedError::DatabaseClosing
|
| HeedError::DatabaseClosing
|
||||||
| HeedError::BadOpenOptions { .. } => Code::Internal,
|
| HeedError::BadOpenOptions => Code::Internal,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -460,15 +423,6 @@ impl fmt::Display for DeserrParseIntError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for deserr_codes::InvalidSearchSemanticRatio {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"the value of `semanticRatio` is invalid, expected a float between `0.0` and `1.0`."
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! internal_error {
|
macro_rules! internal_error {
|
||||||
($target:ty : $($other:path), *) => {
|
($target:ty : $($other:path), *) => {
|
||||||
|
|||||||
@@ -1,33 +0,0 @@
|
|||||||
use deserr::Deserr;
|
|
||||||
use milli::OrderBy;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, Deserr)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
#[deserr(rename_all = camelCase)]
|
|
||||||
pub enum FacetValuesSort {
|
|
||||||
/// Facet values are sorted in alphabetical order, ascending from A to Z.
|
|
||||||
#[default]
|
|
||||||
Alpha,
|
|
||||||
/// Facet values are sorted by decreasing count.
|
|
||||||
/// The count is the number of records containing this facet value in the results of the query.
|
|
||||||
Count,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<FacetValuesSort> for OrderBy {
|
|
||||||
fn from(val: FacetValuesSort) -> Self {
|
|
||||||
match val {
|
|
||||||
FacetValuesSort::Alpha => OrderBy::Lexicographic,
|
|
||||||
FacetValuesSort::Count => OrderBy::Count,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<OrderBy> for FacetValuesSort {
|
|
||||||
fn from(val: OrderBy) -> Self {
|
|
||||||
match val {
|
|
||||||
OrderBy::Lexicographic => FacetValuesSort::Alpha,
|
|
||||||
OrderBy::Count => FacetValuesSort::Count,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Default, PartialEq, Eq)]
|
|
||||||
#[serde(rename_all = "camelCase", default)]
|
|
||||||
pub struct RuntimeTogglableFeatures {
|
|
||||||
pub score_details: bool,
|
|
||||||
pub vector_store: bool,
|
|
||||||
pub metrics: bool,
|
|
||||||
pub export_puffin_reports: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default, Debug, Clone, Copy)]
|
|
||||||
pub struct InstanceTogglableFeatures {
|
|
||||||
pub metrics: bool,
|
|
||||||
}
|
|
||||||
@@ -147,7 +147,9 @@ impl Key {
|
|||||||
fn parse_expiration_date(
|
fn parse_expiration_date(
|
||||||
string: Option<String>,
|
string: Option<String>,
|
||||||
) -> std::result::Result<Option<OffsetDateTime>, ParseOffsetDateTimeError> {
|
) -> std::result::Result<Option<OffsetDateTime>, ParseOffsetDateTimeError> {
|
||||||
let Some(string) = string else { return Ok(None) };
|
let Some(string) = string else {
|
||||||
|
return Ok(None)
|
||||||
|
};
|
||||||
let datetime = if let Ok(datetime) = OffsetDateTime::parse(&string, &Rfc3339) {
|
let datetime = if let Ok(datetime) = OffsetDateTime::parse(&string, &Rfc3339) {
|
||||||
datetime
|
datetime
|
||||||
} else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
|
} else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
|
||||||
@@ -257,12 +259,6 @@ pub enum Action {
|
|||||||
#[serde(rename = "dumps.create")]
|
#[serde(rename = "dumps.create")]
|
||||||
#[deserr(rename = "dumps.create")]
|
#[deserr(rename = "dumps.create")]
|
||||||
DumpsCreate,
|
DumpsCreate,
|
||||||
#[serde(rename = "snapshots.*")]
|
|
||||||
#[deserr(rename = "snapshots.*")]
|
|
||||||
SnapshotsAll,
|
|
||||||
#[serde(rename = "snapshots.create")]
|
|
||||||
#[deserr(rename = "snapshots.create")]
|
|
||||||
SnapshotsCreate,
|
|
||||||
#[serde(rename = "version")]
|
#[serde(rename = "version")]
|
||||||
#[deserr(rename = "version")]
|
#[deserr(rename = "version")]
|
||||||
Version,
|
Version,
|
||||||
@@ -278,12 +274,6 @@ pub enum Action {
|
|||||||
#[serde(rename = "keys.delete")]
|
#[serde(rename = "keys.delete")]
|
||||||
#[deserr(rename = "keys.delete")]
|
#[deserr(rename = "keys.delete")]
|
||||||
KeysDelete,
|
KeysDelete,
|
||||||
#[serde(rename = "experimental.get")]
|
|
||||||
#[deserr(rename = "experimental.get")]
|
|
||||||
ExperimentalFeaturesGet,
|
|
||||||
#[serde(rename = "experimental.update")]
|
|
||||||
#[deserr(rename = "experimental.update")]
|
|
||||||
ExperimentalFeaturesUpdate,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Action {
|
impl Action {
|
||||||
@@ -315,14 +305,11 @@ impl Action {
|
|||||||
METRICS_GET => Some(Self::MetricsGet),
|
METRICS_GET => Some(Self::MetricsGet),
|
||||||
DUMPS_ALL => Some(Self::DumpsAll),
|
DUMPS_ALL => Some(Self::DumpsAll),
|
||||||
DUMPS_CREATE => Some(Self::DumpsCreate),
|
DUMPS_CREATE => Some(Self::DumpsCreate),
|
||||||
SNAPSHOTS_CREATE => Some(Self::SnapshotsCreate),
|
|
||||||
VERSION => Some(Self::Version),
|
VERSION => Some(Self::Version),
|
||||||
KEYS_CREATE => Some(Self::KeysAdd),
|
KEYS_CREATE => Some(Self::KeysAdd),
|
||||||
KEYS_GET => Some(Self::KeysGet),
|
KEYS_GET => Some(Self::KeysGet),
|
||||||
KEYS_UPDATE => Some(Self::KeysUpdate),
|
KEYS_UPDATE => Some(Self::KeysUpdate),
|
||||||
KEYS_DELETE => Some(Self::KeysDelete),
|
KEYS_DELETE => Some(Self::KeysDelete),
|
||||||
EXPERIMENTAL_FEATURES_GET => Some(Self::ExperimentalFeaturesGet),
|
|
||||||
EXPERIMENTAL_FEATURES_UPDATE => Some(Self::ExperimentalFeaturesUpdate),
|
|
||||||
_otherwise => None,
|
_otherwise => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -360,12 +347,9 @@ pub mod actions {
|
|||||||
pub const METRICS_GET: u8 = MetricsGet.repr();
|
pub const METRICS_GET: u8 = MetricsGet.repr();
|
||||||
pub const DUMPS_ALL: u8 = DumpsAll.repr();
|
pub const DUMPS_ALL: u8 = DumpsAll.repr();
|
||||||
pub const DUMPS_CREATE: u8 = DumpsCreate.repr();
|
pub const DUMPS_CREATE: u8 = DumpsCreate.repr();
|
||||||
pub const SNAPSHOTS_CREATE: u8 = SnapshotsCreate.repr();
|
|
||||||
pub const VERSION: u8 = Version.repr();
|
pub const VERSION: u8 = Version.repr();
|
||||||
pub const KEYS_CREATE: u8 = KeysAdd.repr();
|
pub const KEYS_CREATE: u8 = KeysAdd.repr();
|
||||||
pub const KEYS_GET: u8 = KeysGet.repr();
|
pub const KEYS_GET: u8 = KeysGet.repr();
|
||||||
pub const KEYS_UPDATE: u8 = KeysUpdate.repr();
|
pub const KEYS_UPDATE: u8 = KeysUpdate.repr();
|
||||||
pub const KEYS_DELETE: u8 = KeysDelete.repr();
|
pub const KEYS_DELETE: u8 = KeysDelete.repr();
|
||||||
pub const EXPERIMENTAL_FEATURES_GET: u8 = ExperimentalFeaturesGet.repr();
|
|
||||||
pub const EXPERIMENTAL_FEATURES_UPDATE: u8 = ExperimentalFeaturesUpdate.repr();
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,8 +2,6 @@ pub mod compression;
|
|||||||
pub mod deserr;
|
pub mod deserr;
|
||||||
pub mod document_formats;
|
pub mod document_formats;
|
||||||
pub mod error;
|
pub mod error;
|
||||||
pub mod facet_values_sort;
|
|
||||||
pub mod features;
|
|
||||||
pub mod index_uid;
|
pub mod index_uid;
|
||||||
pub mod index_uid_pattern;
|
pub mod index_uid_pattern;
|
||||||
pub mod keys;
|
pub mod keys;
|
||||||
|
|||||||
@@ -8,16 +8,14 @@ use std::str::FromStr;
|
|||||||
|
|
||||||
use deserr::{DeserializeError, Deserr, ErrorKind, MergeWithError, ValuePointerRef};
|
use deserr::{DeserializeError, Deserr, ErrorKind, MergeWithError, ValuePointerRef};
|
||||||
use fst::IntoStreamer;
|
use fst::IntoStreamer;
|
||||||
use milli::proximity::ProximityPrecision;
|
|
||||||
use milli::update::Setting;
|
use milli::update::Setting;
|
||||||
use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET};
|
use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET};
|
||||||
use serde::{Deserialize, Serialize, Serializer};
|
use serde::{Deserialize, Serialize, Serializer};
|
||||||
|
|
||||||
use crate::deserr::DeserrJsonError;
|
use crate::deserr::DeserrJsonError;
|
||||||
use crate::error::deserr_codes::*;
|
use crate::error::deserr_codes::*;
|
||||||
use crate::facet_values_sort::FacetValuesSort;
|
|
||||||
|
|
||||||
/// The maximum number of results that the engine
|
/// The maximimum number of results that the engine
|
||||||
/// will be able to return in one search call.
|
/// will be able to return in one search call.
|
||||||
pub const DEFAULT_PAGINATION_MAX_TOTAL_HITS: usize = 1000;
|
pub const DEFAULT_PAGINATION_MAX_TOTAL_HITS: usize = 1000;
|
||||||
|
|
||||||
@@ -104,9 +102,6 @@ pub struct FacetingSettings {
|
|||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(default)]
|
#[deserr(default)]
|
||||||
pub max_values_per_facet: Setting<usize>,
|
pub max_values_per_facet: Setting<usize>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
|
||||||
#[deserr(default)]
|
|
||||||
pub sort_facet_values_by: Setting<BTreeMap<String, FacetValuesSort>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)]
|
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)]
|
||||||
@@ -172,24 +167,12 @@ pub struct Settings<T> {
|
|||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsStopWords>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsStopWords>)]
|
||||||
pub stop_words: Setting<BTreeSet<String>>,
|
pub stop_words: Setting<BTreeSet<String>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsNonSeparatorTokens>)]
|
|
||||||
pub non_separator_tokens: Setting<BTreeSet<String>>,
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSeparatorTokens>)]
|
|
||||||
pub separator_tokens: Setting<BTreeSet<String>>,
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsDictionary>)]
|
|
||||||
pub dictionary: Setting<BTreeSet<String>>,
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSynonyms>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsSynonyms>)]
|
||||||
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsDistinctAttribute>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsDistinctAttribute>)]
|
||||||
pub distinct_attribute: Setting<String>,
|
pub distinct_attribute: Setting<String>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsProximityPrecision>)]
|
|
||||||
pub proximity_precision: Setting<ProximityPrecisionView>,
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsTypoTolerance>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsTypoTolerance>)]
|
||||||
pub typo_tolerance: Setting<TypoSettings>,
|
pub typo_tolerance: Setting<TypoSettings>,
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
@@ -199,10 +182,6 @@ pub struct Settings<T> {
|
|||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsPagination>)]
|
#[deserr(default, error = DeserrJsonError<InvalidSettingsPagination>)]
|
||||||
pub pagination: Setting<PaginationSettings>,
|
pub pagination: Setting<PaginationSettings>,
|
||||||
|
|
||||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsEmbedders>)]
|
|
||||||
pub embedders: Setting<BTreeMap<String, Setting<milli::vector::settings::EmbeddingSettings>>>,
|
|
||||||
|
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
#[deserr(skip)]
|
#[deserr(skip)]
|
||||||
pub _kind: PhantomData<T>,
|
pub _kind: PhantomData<T>,
|
||||||
@@ -218,15 +197,10 @@ impl Settings<Checked> {
|
|||||||
ranking_rules: Setting::Reset,
|
ranking_rules: Setting::Reset,
|
||||||
stop_words: Setting::Reset,
|
stop_words: Setting::Reset,
|
||||||
synonyms: Setting::Reset,
|
synonyms: Setting::Reset,
|
||||||
non_separator_tokens: Setting::Reset,
|
|
||||||
separator_tokens: Setting::Reset,
|
|
||||||
dictionary: Setting::Reset,
|
|
||||||
distinct_attribute: Setting::Reset,
|
distinct_attribute: Setting::Reset,
|
||||||
proximity_precision: Setting::Reset,
|
|
||||||
typo_tolerance: Setting::Reset,
|
typo_tolerance: Setting::Reset,
|
||||||
faceting: Setting::Reset,
|
faceting: Setting::Reset,
|
||||||
pagination: Setting::Reset,
|
pagination: Setting::Reset,
|
||||||
embedders: Setting::Reset,
|
|
||||||
_kind: PhantomData,
|
_kind: PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -239,16 +213,11 @@ impl Settings<Checked> {
|
|||||||
sortable_attributes,
|
sortable_attributes,
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
stop_words,
|
stop_words,
|
||||||
non_separator_tokens,
|
|
||||||
separator_tokens,
|
|
||||||
dictionary,
|
|
||||||
synonyms,
|
synonyms,
|
||||||
distinct_attribute,
|
distinct_attribute,
|
||||||
proximity_precision,
|
|
||||||
typo_tolerance,
|
typo_tolerance,
|
||||||
faceting,
|
faceting,
|
||||||
pagination,
|
pagination,
|
||||||
embedders,
|
|
||||||
..
|
..
|
||||||
} = self;
|
} = self;
|
||||||
|
|
||||||
@@ -259,16 +228,11 @@ impl Settings<Checked> {
|
|||||||
sortable_attributes,
|
sortable_attributes,
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
stop_words,
|
stop_words,
|
||||||
non_separator_tokens,
|
|
||||||
separator_tokens,
|
|
||||||
dictionary,
|
|
||||||
synonyms,
|
synonyms,
|
||||||
distinct_attribute,
|
distinct_attribute,
|
||||||
proximity_precision,
|
|
||||||
typo_tolerance,
|
typo_tolerance,
|
||||||
faceting,
|
faceting,
|
||||||
pagination,
|
pagination,
|
||||||
embedders,
|
|
||||||
_kind: PhantomData,
|
_kind: PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -306,15 +270,10 @@ impl Settings<Unchecked> {
|
|||||||
ranking_rules: self.ranking_rules,
|
ranking_rules: self.ranking_rules,
|
||||||
stop_words: self.stop_words,
|
stop_words: self.stop_words,
|
||||||
synonyms: self.synonyms,
|
synonyms: self.synonyms,
|
||||||
non_separator_tokens: self.non_separator_tokens,
|
|
||||||
separator_tokens: self.separator_tokens,
|
|
||||||
dictionary: self.dictionary,
|
|
||||||
distinct_attribute: self.distinct_attribute,
|
distinct_attribute: self.distinct_attribute,
|
||||||
proximity_precision: self.proximity_precision,
|
|
||||||
typo_tolerance: self.typo_tolerance,
|
typo_tolerance: self.typo_tolerance,
|
||||||
faceting: self.faceting,
|
faceting: self.faceting,
|
||||||
pagination: self.pagination,
|
pagination: self.pagination,
|
||||||
embedders: self.embedders,
|
|
||||||
_kind: PhantomData,
|
_kind: PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -372,28 +331,6 @@ pub fn apply_settings_to_builder(
|
|||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match settings.non_separator_tokens {
|
|
||||||
Setting::Set(ref non_separator_tokens) => {
|
|
||||||
builder.set_non_separator_tokens(non_separator_tokens.clone())
|
|
||||||
}
|
|
||||||
Setting::Reset => builder.reset_non_separator_tokens(),
|
|
||||||
Setting::NotSet => (),
|
|
||||||
}
|
|
||||||
|
|
||||||
match settings.separator_tokens {
|
|
||||||
Setting::Set(ref separator_tokens) => {
|
|
||||||
builder.set_separator_tokens(separator_tokens.clone())
|
|
||||||
}
|
|
||||||
Setting::Reset => builder.reset_separator_tokens(),
|
|
||||||
Setting::NotSet => (),
|
|
||||||
}
|
|
||||||
|
|
||||||
match settings.dictionary {
|
|
||||||
Setting::Set(ref dictionary) => builder.set_dictionary(dictionary.clone()),
|
|
||||||
Setting::Reset => builder.reset_dictionary(),
|
|
||||||
Setting::NotSet => (),
|
|
||||||
}
|
|
||||||
|
|
||||||
match settings.synonyms {
|
match settings.synonyms {
|
||||||
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
|
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
|
||||||
Setting::Reset => builder.reset_synonyms(),
|
Setting::Reset => builder.reset_synonyms(),
|
||||||
@@ -406,12 +343,6 @@ pub fn apply_settings_to_builder(
|
|||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match settings.proximity_precision {
|
|
||||||
Setting::Set(ref precision) => builder.set_proximity_precision((*precision).into()),
|
|
||||||
Setting::Reset => builder.reset_proximity_precision(),
|
|
||||||
Setting::NotSet => (),
|
|
||||||
}
|
|
||||||
|
|
||||||
match settings.typo_tolerance {
|
match settings.typo_tolerance {
|
||||||
Setting::Set(ref value) => {
|
Setting::Set(ref value) => {
|
||||||
match value.enabled {
|
match value.enabled {
|
||||||
@@ -467,25 +398,13 @@ pub fn apply_settings_to_builder(
|
|||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match &settings.faceting {
|
match settings.faceting {
|
||||||
Setting::Set(FacetingSettings { max_values_per_facet, sort_facet_values_by }) => {
|
Setting::Set(ref value) => match value.max_values_per_facet {
|
||||||
match max_values_per_facet {
|
Setting::Set(val) => builder.set_max_values_per_facet(val),
|
||||||
Setting::Set(val) => builder.set_max_values_per_facet(*val),
|
Setting::Reset => builder.reset_max_values_per_facet(),
|
||||||
Setting::Reset => builder.reset_max_values_per_facet(),
|
Setting::NotSet => (),
|
||||||
Setting::NotSet => (),
|
},
|
||||||
}
|
Setting::Reset => builder.reset_max_values_per_facet(),
|
||||||
match sort_facet_values_by {
|
|
||||||
Setting::Set(val) => builder.set_sort_facet_values_by(
|
|
||||||
val.iter().map(|(name, order)| (name.clone(), (*order).into())).collect(),
|
|
||||||
),
|
|
||||||
Setting::Reset => builder.reset_sort_facet_values_by(),
|
|
||||||
Setting::NotSet => (),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Setting::Reset => {
|
|
||||||
builder.reset_max_values_per_facet();
|
|
||||||
builder.reset_sort_facet_values_by();
|
|
||||||
}
|
|
||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -498,12 +417,6 @@ pub fn apply_settings_to_builder(
|
|||||||
Setting::Reset => builder.reset_pagination_max_total_hits(),
|
Setting::Reset => builder.reset_pagination_max_total_hits(),
|
||||||
Setting::NotSet => (),
|
Setting::NotSet => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match settings.embedders.clone() {
|
|
||||||
Setting::Set(value) => builder.set_embedder_settings(value),
|
|
||||||
Setting::Reset => builder.reset_embedder_settings(),
|
|
||||||
Setting::NotSet => (),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn settings(
|
pub fn settings(
|
||||||
@@ -530,16 +443,15 @@ pub fn settings(
|
|||||||
})
|
})
|
||||||
.transpose()?
|
.transpose()?
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
|
|
||||||
let non_separator_tokens = index.non_separator_tokens(rtxn)?.unwrap_or_default();
|
|
||||||
let separator_tokens = index.separator_tokens(rtxn)?.unwrap_or_default();
|
|
||||||
let dictionary = index.dictionary(rtxn)?.unwrap_or_default();
|
|
||||||
|
|
||||||
let distinct_field = index.distinct_field(rtxn)?.map(String::from);
|
let distinct_field = index.distinct_field(rtxn)?.map(String::from);
|
||||||
|
|
||||||
let proximity_precision = index.proximity_precision(rtxn)?.map(ProximityPrecisionView::from);
|
// in milli each word in the synonyms map were split on their separator. Since we lost
|
||||||
|
// this information we are going to put space between words.
|
||||||
let synonyms = index.user_defined_synonyms(rtxn)?;
|
let synonyms = index
|
||||||
|
.synonyms(rtxn)?
|
||||||
|
.iter()
|
||||||
|
.map(|(key, values)| (key.join(" "), values.iter().map(|value| value.join(" ")).collect()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
let min_typo_word_len = MinWordSizeTyposSetting {
|
let min_typo_word_len = MinWordSizeTyposSetting {
|
||||||
one_typo: Setting::Set(index.min_word_len_one_typo(rtxn)?),
|
one_typo: Setting::Set(index.min_word_len_one_typo(rtxn)?),
|
||||||
@@ -562,35 +474,16 @@ pub fn settings(
|
|||||||
|
|
||||||
let faceting = FacetingSettings {
|
let faceting = FacetingSettings {
|
||||||
max_values_per_facet: Setting::Set(
|
max_values_per_facet: Setting::Set(
|
||||||
index
|
index.max_values_per_facet(rtxn)?.unwrap_or(DEFAULT_VALUES_PER_FACET),
|
||||||
.max_values_per_facet(rtxn)?
|
|
||||||
.map(|x| x as usize)
|
|
||||||
.unwrap_or(DEFAULT_VALUES_PER_FACET),
|
|
||||||
),
|
|
||||||
sort_facet_values_by: Setting::Set(
|
|
||||||
index
|
|
||||||
.sort_facet_values_by(rtxn)?
|
|
||||||
.into_iter()
|
|
||||||
.map(|(name, sort)| (name, sort.into()))
|
|
||||||
.collect(),
|
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
let pagination = PaginationSettings {
|
let pagination = PaginationSettings {
|
||||||
max_total_hits: Setting::Set(
|
max_total_hits: Setting::Set(
|
||||||
index
|
index.pagination_max_total_hits(rtxn)?.unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS),
|
||||||
.pagination_max_total_hits(rtxn)?
|
|
||||||
.map(|x| x as usize)
|
|
||||||
.unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS),
|
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
let embedders = index
|
|
||||||
.embedding_configs(rtxn)?
|
|
||||||
.into_iter()
|
|
||||||
.map(|(name, config)| (name, Setting::Set(config.into())))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
Ok(Settings {
|
Ok(Settings {
|
||||||
displayed_attributes: match displayed_attributes {
|
displayed_attributes: match displayed_attributes {
|
||||||
Some(attrs) => Setting::Set(attrs),
|
Some(attrs) => Setting::Set(attrs),
|
||||||
@@ -604,22 +497,14 @@ pub fn settings(
|
|||||||
sortable_attributes: Setting::Set(sortable_attributes),
|
sortable_attributes: Setting::Set(sortable_attributes),
|
||||||
ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
|
ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
|
||||||
stop_words: Setting::Set(stop_words),
|
stop_words: Setting::Set(stop_words),
|
||||||
non_separator_tokens: Setting::Set(non_separator_tokens),
|
|
||||||
separator_tokens: Setting::Set(separator_tokens),
|
|
||||||
dictionary: Setting::Set(dictionary),
|
|
||||||
distinct_attribute: match distinct_field {
|
distinct_attribute: match distinct_field {
|
||||||
Some(field) => Setting::Set(field),
|
Some(field) => Setting::Set(field),
|
||||||
None => Setting::Reset,
|
None => Setting::Reset,
|
||||||
},
|
},
|
||||||
proximity_precision: match proximity_precision {
|
|
||||||
Some(precision) => Setting::Set(precision),
|
|
||||||
None => Setting::Reset,
|
|
||||||
},
|
|
||||||
synonyms: Setting::Set(synonyms),
|
synonyms: Setting::Set(synonyms),
|
||||||
typo_tolerance: Setting::Set(typo_tolerance),
|
typo_tolerance: Setting::Set(typo_tolerance),
|
||||||
faceting: Setting::Set(faceting),
|
faceting: Setting::Set(faceting),
|
||||||
pagination: Setting::Set(pagination),
|
pagination: Setting::Set(pagination),
|
||||||
embedders: Setting::Set(embedders),
|
|
||||||
_kind: PhantomData,
|
_kind: PhantomData,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -720,31 +605,6 @@ impl From<RankingRuleView> for Criterion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserr, Serialize, Deserialize)]
|
|
||||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
|
||||||
#[deserr(error = DeserrJsonError<InvalidSettingsProximityPrecision>, rename_all = camelCase, deny_unknown_fields)]
|
|
||||||
pub enum ProximityPrecisionView {
|
|
||||||
ByWord,
|
|
||||||
ByAttribute,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<ProximityPrecision> for ProximityPrecisionView {
|
|
||||||
fn from(value: ProximityPrecision) -> Self {
|
|
||||||
match value {
|
|
||||||
ProximityPrecision::ByWord => ProximityPrecisionView::ByWord,
|
|
||||||
ProximityPrecision::ByAttribute => ProximityPrecisionView::ByAttribute,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl From<ProximityPrecisionView> for ProximityPrecision {
|
|
||||||
fn from(value: ProximityPrecisionView) -> Self {
|
|
||||||
match value {
|
|
||||||
ProximityPrecisionView::ByWord => ProximityPrecision::ByWord,
|
|
||||||
ProximityPrecisionView::ByAttribute => ProximityPrecision::ByAttribute,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod test {
|
pub(crate) mod test {
|
||||||
use super::*;
|
use super::*;
|
||||||
@@ -759,16 +619,11 @@ pub(crate) mod test {
|
|||||||
sortable_attributes: Setting::NotSet,
|
sortable_attributes: Setting::NotSet,
|
||||||
ranking_rules: Setting::NotSet,
|
ranking_rules: Setting::NotSet,
|
||||||
stop_words: Setting::NotSet,
|
stop_words: Setting::NotSet,
|
||||||
non_separator_tokens: Setting::NotSet,
|
|
||||||
separator_tokens: Setting::NotSet,
|
|
||||||
dictionary: Setting::NotSet,
|
|
||||||
synonyms: Setting::NotSet,
|
synonyms: Setting::NotSet,
|
||||||
distinct_attribute: Setting::NotSet,
|
distinct_attribute: Setting::NotSet,
|
||||||
proximity_precision: Setting::NotSet,
|
|
||||||
typo_tolerance: Setting::NotSet,
|
typo_tolerance: Setting::NotSet,
|
||||||
faceting: Setting::NotSet,
|
faceting: Setting::NotSet,
|
||||||
pagination: Setting::NotSet,
|
pagination: Setting::NotSet,
|
||||||
embedders: Setting::NotSet,
|
|
||||||
_kind: PhantomData::<Unchecked>,
|
_kind: PhantomData::<Unchecked>,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -785,16 +640,11 @@ pub(crate) mod test {
|
|||||||
sortable_attributes: Setting::NotSet,
|
sortable_attributes: Setting::NotSet,
|
||||||
ranking_rules: Setting::NotSet,
|
ranking_rules: Setting::NotSet,
|
||||||
stop_words: Setting::NotSet,
|
stop_words: Setting::NotSet,
|
||||||
non_separator_tokens: Setting::NotSet,
|
|
||||||
separator_tokens: Setting::NotSet,
|
|
||||||
dictionary: Setting::NotSet,
|
|
||||||
synonyms: Setting::NotSet,
|
synonyms: Setting::NotSet,
|
||||||
distinct_attribute: Setting::NotSet,
|
distinct_attribute: Setting::NotSet,
|
||||||
proximity_precision: Setting::NotSet,
|
|
||||||
typo_tolerance: Setting::NotSet,
|
typo_tolerance: Setting::NotSet,
|
||||||
faceting: Setting::NotSet,
|
faceting: Setting::NotSet,
|
||||||
pagination: Setting::NotSet,
|
pagination: Setting::NotSet,
|
||||||
embedders: Setting::NotSet,
|
|
||||||
_kind: PhantomData::<Unchecked>,
|
_kind: PhantomData::<Unchecked>,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -14,32 +14,18 @@ default-run = "meilisearch"
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-cors = "0.6.4"
|
actix-cors = "0.6.4"
|
||||||
actix-http = { version = "3.3.1", default-features = false, features = [
|
actix-http = { version = "3.3.1", default-features = false, features = ["compress-brotli", "compress-gzip", "rustls"] }
|
||||||
"compress-brotli",
|
actix-web = { version = "4.3.1", default-features = false, features = ["macros", "compress-brotli", "compress-gzip", "cookies", "rustls"] }
|
||||||
"compress-gzip",
|
|
||||||
"rustls",
|
|
||||||
] }
|
|
||||||
actix-utils = "3.0.1"
|
|
||||||
actix-web = { version = "4.3.1", default-features = false, features = [
|
|
||||||
"macros",
|
|
||||||
"compress-brotli",
|
|
||||||
"compress-gzip",
|
|
||||||
"cookies",
|
|
||||||
"rustls",
|
|
||||||
] }
|
|
||||||
actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true }
|
actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true }
|
||||||
anyhow = { version = "1.0.70", features = ["backtrace"] }
|
anyhow = { version = "1.0.70", features = ["backtrace"] }
|
||||||
async-stream = "0.3.5"
|
async-stream = "0.3.5"
|
||||||
async-trait = "0.1.68"
|
async-trait = "0.1.68"
|
||||||
bstr = "1.4.0"
|
bstr = "1.4.0"
|
||||||
byte-unit = { version = "4.0.19", default-features = false, features = [
|
byte-unit = { version = "4.0.19", default-features = false, features = ["std", "serde"] }
|
||||||
"std",
|
|
||||||
"serde",
|
|
||||||
] }
|
|
||||||
bytes = "1.4.0"
|
bytes = "1.4.0"
|
||||||
clap = { version = "4.2.1", features = ["derive", "env"] }
|
clap = { version = "4.2.1", features = ["derive", "env"] }
|
||||||
crossbeam-channel = "0.5.8"
|
crossbeam-channel = "0.5.8"
|
||||||
deserr = { version = "0.6.0", features = ["actix-web"] }
|
deserr = "0.5.0"
|
||||||
dump = { path = "../dump" }
|
dump = { path = "../dump" }
|
||||||
either = "1.8.1"
|
either = "1.8.1"
|
||||||
env_logger = "0.10.0"
|
env_logger = "0.10.0"
|
||||||
@@ -50,33 +36,27 @@ futures = "0.3.28"
|
|||||||
futures-util = "0.3.28"
|
futures-util = "0.3.28"
|
||||||
http = "0.2.9"
|
http = "0.2.9"
|
||||||
index-scheduler = { path = "../index-scheduler" }
|
index-scheduler = { path = "../index-scheduler" }
|
||||||
indexmap = { version = "2.0.0", features = ["serde"] }
|
indexmap = { version = "1.9.3", features = ["serde-1"] }
|
||||||
is-terminal = "0.4.8"
|
itertools = "0.10.5"
|
||||||
itertools = "0.11.0"
|
|
||||||
jsonwebtoken = "8.3.0"
|
jsonwebtoken = "8.3.0"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
log = "0.4.17"
|
log = "0.4.17"
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
mimalloc = { version = "0.1.37", default-features = false }
|
mimalloc = { version = "0.1.36", default-features = false }
|
||||||
mime = "0.3.17"
|
mime = "0.3.17"
|
||||||
num_cpus = "1.15.0"
|
num_cpus = "1.15.0"
|
||||||
obkv = "0.2.0"
|
obkv = "0.2.0"
|
||||||
once_cell = "1.17.1"
|
once_cell = "1.17.1"
|
||||||
ordered-float = "3.7.0"
|
|
||||||
parking_lot = "0.12.1"
|
parking_lot = "0.12.1"
|
||||||
permissive-json-pointer = { path = "../permissive-json-pointer" }
|
permissive-json-pointer = { path = "../permissive-json-pointer" }
|
||||||
pin-project-lite = "0.2.9"
|
pin-project-lite = "0.2.9"
|
||||||
platform-dirs = "0.3.0"
|
platform-dirs = "0.3.0"
|
||||||
prometheus = { version = "0.13.3", features = ["process"] }
|
prometheus = { version = "0.13.3", features = ["process"] }
|
||||||
puffin = { version = "0.16.0", features = ["serialization"] }
|
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
rayon = "1.7.0"
|
rayon = "1.7.0"
|
||||||
regex = "1.7.3"
|
regex = "1.7.3"
|
||||||
reqwest = { version = "0.11.16", features = [
|
reqwest = { version = "0.11.16", features = ["rustls-tls", "json"], default-features = false }
|
||||||
"rustls-tls",
|
|
||||||
"json",
|
|
||||||
], default-features = false }
|
|
||||||
rustls = "0.20.8"
|
rustls = "0.20.8"
|
||||||
rustls-pemfile = "1.0.2"
|
rustls-pemfile = "1.0.2"
|
||||||
segment = { version = "0.2.2", optional = true }
|
segment = { version = "0.2.2", optional = true }
|
||||||
@@ -86,16 +66,11 @@ sha2 = "0.10.6"
|
|||||||
siphasher = "0.3.10"
|
siphasher = "0.3.10"
|
||||||
slice-group-by = "0.3.0"
|
slice-group-by = "0.3.0"
|
||||||
static-files = { version = "0.2.3", optional = true }
|
static-files = { version = "0.2.3", optional = true }
|
||||||
sysinfo = "0.29.7"
|
sysinfo = "0.28.4"
|
||||||
tar = "0.4.38"
|
tar = "0.4.38"
|
||||||
tempfile = "3.5.0"
|
tempfile = "3.5.0"
|
||||||
thiserror = "1.0.40"
|
thiserror = "1.0.40"
|
||||||
time = { version = "0.3.20", features = [
|
time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
"serde-well-known",
|
|
||||||
"formatting",
|
|
||||||
"parsing",
|
|
||||||
"macros",
|
|
||||||
] }
|
|
||||||
tokio = { version = "1.27.0", features = ["full"] }
|
tokio = { version = "1.27.0", features = ["full"] }
|
||||||
tokio-stream = "0.1.12"
|
tokio-stream = "0.1.12"
|
||||||
toml = "0.7.3"
|
toml = "0.7.3"
|
||||||
@@ -103,6 +78,8 @@ uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
|||||||
walkdir = "2.3.3"
|
walkdir = "2.3.3"
|
||||||
yaup = "0.2.1"
|
yaup = "0.2.1"
|
||||||
serde_urlencoded = "0.7.1"
|
serde_urlencoded = "0.7.1"
|
||||||
|
actix-utils = "3.0.1"
|
||||||
|
atty = "0.2.14"
|
||||||
termcolor = "1.2.0"
|
termcolor = "1.2.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
@@ -112,7 +89,7 @@ brotli = "3.3.4"
|
|||||||
insta = "1.29.0"
|
insta = "1.29.0"
|
||||||
manifest-dir-macros = "0.1.16"
|
manifest-dir-macros = "0.1.16"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
meili-snap = { path = "../meili-snap" }
|
meili-snap = {path = "../meili-snap"}
|
||||||
temp-env = "0.3.3"
|
temp-env = "0.3.3"
|
||||||
urlencoding = "2.1.2"
|
urlencoding = "2.1.2"
|
||||||
yaup = "0.2.1"
|
yaup = "0.2.1"
|
||||||
@@ -121,10 +98,7 @@ yaup = "0.2.1"
|
|||||||
anyhow = { version = "1.0.70", optional = true }
|
anyhow = { version = "1.0.70", optional = true }
|
||||||
cargo_toml = { version = "0.15.2", optional = true }
|
cargo_toml = { version = "0.15.2", optional = true }
|
||||||
hex = { version = "0.4.3", optional = true }
|
hex = { version = "0.4.3", optional = true }
|
||||||
reqwest = { version = "0.11.16", features = [
|
reqwest = { version = "0.11.16", features = ["blocking", "rustls-tls"], default-features = false, optional = true }
|
||||||
"blocking",
|
|
||||||
"rustls-tls",
|
|
||||||
], default-features = false, optional = true }
|
|
||||||
sha-1 = { version = "0.10.1", optional = true }
|
sha-1 = { version = "0.10.1", optional = true }
|
||||||
static-files = { version = "0.2.3", optional = true }
|
static-files = { version = "0.2.3", optional = true }
|
||||||
tempfile = { version = "3.5.0", optional = true }
|
tempfile = { version = "3.5.0", optional = true }
|
||||||
@@ -134,24 +108,13 @@ zip = { version = "0.6.4", optional = true }
|
|||||||
[features]
|
[features]
|
||||||
default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"]
|
default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"]
|
||||||
analytics = ["segment"]
|
analytics = ["segment"]
|
||||||
mini-dashboard = [
|
mini-dashboard = ["actix-web-static-files", "static-files", "anyhow", "cargo_toml", "hex", "reqwest", "sha-1", "tempfile", "zip"]
|
||||||
"actix-web-static-files",
|
|
||||||
"static-files",
|
|
||||||
"anyhow",
|
|
||||||
"cargo_toml",
|
|
||||||
"hex",
|
|
||||||
"reqwest",
|
|
||||||
"sha-1",
|
|
||||||
"tempfile",
|
|
||||||
"zip",
|
|
||||||
]
|
|
||||||
chinese = ["meilisearch-types/chinese"]
|
chinese = ["meilisearch-types/chinese"]
|
||||||
hebrew = ["meilisearch-types/hebrew"]
|
hebrew = ["meilisearch-types/hebrew"]
|
||||||
japanese = ["meilisearch-types/japanese"]
|
japanese = ["meilisearch-types/japanese"]
|
||||||
thai = ["meilisearch-types/thai"]
|
thai = ["meilisearch-types/thai"]
|
||||||
greek = ["meilisearch-types/greek"]
|
greek = ["meilisearch-types/greek"]
|
||||||
khmer = ["meilisearch-types/khmer"]
|
|
||||||
|
|
||||||
[package.metadata.mini-dashboard]
|
[package.metadata.mini-dashboard]
|
||||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.11/build.zip"
|
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.7/build.zip"
|
||||||
sha1 = "83cd44ed1e5f97ecb581dc9f958a63f4ccc982d9"
|
sha1 = "28b45bf772c84f9a6e16bc1689b393bfce8da7d6"
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ pub struct SearchAggregator;
|
|||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
impl SearchAggregator {
|
impl SearchAggregator {
|
||||||
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
||||||
Self
|
Self::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||||
@@ -32,24 +32,12 @@ pub struct MultiSearchAggregator;
|
|||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
impl MultiSearchAggregator {
|
impl MultiSearchAggregator {
|
||||||
pub fn from_queries(_: &dyn Any, _: &dyn Any) -> Self {
|
pub fn from_queries(_: &dyn Any, _: &dyn Any) -> Self {
|
||||||
Self
|
Self::default()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn succeed(&mut self) {}
|
pub fn succeed(&mut self) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
pub struct FacetSearchAggregator;
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
impl FacetSearchAggregator {
|
|
||||||
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
|
||||||
Self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MockAnalytics {
|
impl MockAnalytics {
|
||||||
#[allow(clippy::new_ret_no_self)]
|
#[allow(clippy::new_ret_no_self)]
|
||||||
pub fn new(opt: &Opt) -> Arc<dyn Analytics> {
|
pub fn new(opt: &Opt) -> Arc<dyn Analytics> {
|
||||||
@@ -68,7 +56,6 @@ impl Analytics for MockAnalytics {
|
|||||||
fn get_search(&self, _aggregate: super::SearchAggregator) {}
|
fn get_search(&self, _aggregate: super::SearchAggregator) {}
|
||||||
fn post_search(&self, _aggregate: super::SearchAggregator) {}
|
fn post_search(&self, _aggregate: super::SearchAggregator) {}
|
||||||
fn post_multi_search(&self, _aggregate: super::MultiSearchAggregator) {}
|
fn post_multi_search(&self, _aggregate: super::MultiSearchAggregator) {}
|
||||||
fn post_facet_search(&self, _aggregate: super::FacetSearchAggregator) {}
|
|
||||||
fn add_documents(
|
fn add_documents(
|
||||||
&self,
|
&self,
|
||||||
_documents_query: &UpdateDocumentsQuery,
|
_documents_query: &UpdateDocumentsQuery,
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
mod mock_analytics;
|
mod mock_analytics;
|
||||||
#[cfg(feature = "analytics")]
|
// if we are in release mode and the feature analytics was enabled
|
||||||
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
mod segment_analytics;
|
mod segment_analytics;
|
||||||
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
@@ -16,26 +17,23 @@ use serde_json::Value;
|
|||||||
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
||||||
use crate::routes::tasks::TasksFilterQuery;
|
use crate::routes::tasks::TasksFilterQuery;
|
||||||
|
|
||||||
// if the analytics feature is disabled
|
// if we are in debug mode OR the analytics feature is disabled
|
||||||
// the `SegmentAnalytics` point to the mock instead of the real analytics
|
// the `SegmentAnalytics` point to the mock instead of the real analytics
|
||||||
#[cfg(not(feature = "analytics"))]
|
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
||||||
pub type SegmentAnalytics = mock_analytics::MockAnalytics;
|
pub type SegmentAnalytics = mock_analytics::MockAnalytics;
|
||||||
#[cfg(not(feature = "analytics"))]
|
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
||||||
pub type SearchAggregator = mock_analytics::SearchAggregator;
|
pub type SearchAggregator = mock_analytics::SearchAggregator;
|
||||||
#[cfg(not(feature = "analytics"))]
|
#[cfg(any(debug_assertions, not(feature = "analytics")))]
|
||||||
pub type MultiSearchAggregator = mock_analytics::MultiSearchAggregator;
|
pub type MultiSearchAggregator = mock_analytics::MultiSearchAggregator;
|
||||||
#[cfg(not(feature = "analytics"))]
|
|
||||||
pub type FacetSearchAggregator = mock_analytics::FacetSearchAggregator;
|
|
||||||
|
|
||||||
// if the feature analytics is enabled we use the real analytics
|
// if we are in release mode and the feature analytics was enabled
|
||||||
#[cfg(feature = "analytics")]
|
// we use the real analytics
|
||||||
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
pub type SegmentAnalytics = segment_analytics::SegmentAnalytics;
|
pub type SegmentAnalytics = segment_analytics::SegmentAnalytics;
|
||||||
#[cfg(feature = "analytics")]
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
pub type SearchAggregator = segment_analytics::SearchAggregator;
|
pub type SearchAggregator = segment_analytics::SearchAggregator;
|
||||||
#[cfg(feature = "analytics")]
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
pub type MultiSearchAggregator = segment_analytics::MultiSearchAggregator;
|
pub type MultiSearchAggregator = segment_analytics::MultiSearchAggregator;
|
||||||
#[cfg(feature = "analytics")]
|
|
||||||
pub type FacetSearchAggregator = segment_analytics::FacetSearchAggregator;
|
|
||||||
|
|
||||||
/// The Meilisearch config dir:
|
/// The Meilisearch config dir:
|
||||||
/// `~/.config/Meilisearch` on *NIX or *BSD.
|
/// `~/.config/Meilisearch` on *NIX or *BSD.
|
||||||
@@ -90,9 +88,6 @@ pub trait Analytics: Sync + Send {
|
|||||||
/// This method should be called to aggregate a post array of searches
|
/// This method should be called to aggregate a post array of searches
|
||||||
fn post_multi_search(&self, aggregate: MultiSearchAggregator);
|
fn post_multi_search(&self, aggregate: MultiSearchAggregator);
|
||||||
|
|
||||||
/// This method should be called to aggregate post facet values searches
|
|
||||||
fn post_facet_search(&self, aggregate: FacetSearchAggregator);
|
|
||||||
|
|
||||||
// this method should be called to aggregate a add documents request
|
// this method should be called to aggregate a add documents request
|
||||||
fn add_documents(
|
fn add_documents(
|
||||||
&self,
|
&self,
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -51,8 +51,6 @@ pub enum MeilisearchHttpError {
|
|||||||
DocumentFormat(#[from] DocumentFormatError),
|
DocumentFormat(#[from] DocumentFormatError),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
Join(#[from] JoinError),
|
Join(#[from] JoinError),
|
||||||
#[error("Invalid request: missing `hybrid` parameter when both `q` and `vector` are present.")]
|
|
||||||
MissingSearchHybrid,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ErrorCode for MeilisearchHttpError {
|
impl ErrorCode for MeilisearchHttpError {
|
||||||
@@ -76,7 +74,6 @@ impl ErrorCode for MeilisearchHttpError {
|
|||||||
MeilisearchHttpError::FileStore(_) => Code::Internal,
|
MeilisearchHttpError::FileStore(_) => Code::Internal,
|
||||||
MeilisearchHttpError::DocumentFormat(e) => e.error_code(),
|
MeilisearchHttpError::DocumentFormat(e) => e.error_code(),
|
||||||
MeilisearchHttpError::Join(_) => Code::Internal,
|
MeilisearchHttpError::Join(_) => Code::Internal,
|
||||||
MeilisearchHttpError::MissingSearchHybrid => Code::MissingSearchHybrid,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -71,40 +71,3 @@ impl Stream for Payload {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use actix_http::encoding::Decoder as Decompress;
|
|
||||||
use actix_http::BoxedPayloadStream;
|
|
||||||
use bytes::Bytes;
|
|
||||||
use futures_util::StreamExt;
|
|
||||||
use meili_snap::snapshot;
|
|
||||||
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn payload_to_large() {
|
|
||||||
let stream = futures::stream::iter(vec![
|
|
||||||
Ok(Bytes::from("1")),
|
|
||||||
Ok(Bytes::from("2")),
|
|
||||||
Ok(Bytes::from("3")),
|
|
||||||
Ok(Bytes::from("4")),
|
|
||||||
]);
|
|
||||||
let boxed_stream: BoxedPayloadStream = Box::pin(stream);
|
|
||||||
let actix_payload = dev::Payload::from(boxed_stream);
|
|
||||||
|
|
||||||
let payload = Payload {
|
|
||||||
limit: 3,
|
|
||||||
remaining: 3,
|
|
||||||
payload: Decompress::new(actix_payload, actix_http::ContentEncoding::Identity),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut enumerated_payload_stream = payload.enumerate();
|
|
||||||
|
|
||||||
while let Some((idx, chunk)) = enumerated_payload_stream.next().await {
|
|
||||||
if idx == 3 {
|
|
||||||
snapshot!(chunk.unwrap_err(), @"The provided payload reached the size limit. The maximum accepted payload size is 3 B.");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -111,10 +111,13 @@ pub fn create_app(
|
|||||||
analytics.clone(),
|
analytics.clone(),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
.configure(routes::configure)
|
.configure(|cfg| routes::configure(cfg, opt.experimental_enable_metrics))
|
||||||
.configure(|s| dashboard(s, enable_dashboard));
|
.configure(|s| dashboard(s, enable_dashboard));
|
||||||
|
|
||||||
let app = app.wrap(middleware::RouteMetrics);
|
let app = app.wrap(actix_web::middleware::Condition::new(
|
||||||
|
opt.experimental_enable_metrics,
|
||||||
|
middleware::RouteMetrics,
|
||||||
|
));
|
||||||
app.wrap(
|
app.wrap(
|
||||||
Cors::default()
|
Cors::default()
|
||||||
.send_wildcard()
|
.send_wildcard()
|
||||||
@@ -218,7 +221,6 @@ fn open_or_create_database_unchecked(
|
|||||||
// we don't want to create anything in the data.ms yet, thus we
|
// we don't want to create anything in the data.ms yet, thus we
|
||||||
// wrap our two builders in a closure that'll be executed later.
|
// wrap our two builders in a closure that'll be executed later.
|
||||||
let auth_controller = AuthController::new(&opt.db_path, &opt.master_key);
|
let auth_controller = AuthController::new(&opt.db_path, &opt.master_key);
|
||||||
let instance_features = opt.to_instance_features();
|
|
||||||
let index_scheduler_builder = || -> anyhow::Result<_> {
|
let index_scheduler_builder = || -> anyhow::Result<_> {
|
||||||
Ok(IndexScheduler::new(IndexSchedulerOptions {
|
Ok(IndexScheduler::new(IndexSchedulerOptions {
|
||||||
version_file_path: opt.db_path.join(VERSION_FILE_NAME),
|
version_file_path: opt.db_path.join(VERSION_FILE_NAME),
|
||||||
@@ -234,10 +236,8 @@ fn open_or_create_database_unchecked(
|
|||||||
indexer_config: (&opt.indexer_options).try_into()?,
|
indexer_config: (&opt.indexer_options).try_into()?,
|
||||||
autobatching_enabled: true,
|
autobatching_enabled: true,
|
||||||
max_number_of_tasks: 1_000_000,
|
max_number_of_tasks: 1_000_000,
|
||||||
max_number_of_batched_tasks: opt.experimental_max_number_of_batched_tasks,
|
|
||||||
index_growth_amount: byte_unit::Byte::from_str("10GiB").unwrap().get_bytes() as usize,
|
index_growth_amount: byte_unit::Byte::from_str("10GiB").unwrap().get_bytes() as usize,
|
||||||
index_count: DEFAULT_INDEX_COUNT,
|
index_count: DEFAULT_INDEX_COUNT,
|
||||||
instance_features,
|
|
||||||
})?)
|
})?)
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -307,16 +307,12 @@ fn import_dump(
|
|||||||
keys.push(key);
|
keys.push(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. Import the runtime features.
|
|
||||||
let features = dump_reader.features()?.unwrap_or_default();
|
|
||||||
index_scheduler.put_runtime_features(features)?;
|
|
||||||
|
|
||||||
let indexer_config = index_scheduler.indexer_config();
|
let indexer_config = index_scheduler.indexer_config();
|
||||||
|
|
||||||
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
|
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
|
||||||
// try to process tasks while we're trying to import the indexes.
|
// try to process tasks while we're trying to import the indexes.
|
||||||
|
|
||||||
// 4. Import the indexes.
|
// 3. Import the indexes.
|
||||||
for index_reader in dump_reader.indexes()? {
|
for index_reader in dump_reader.indexes()? {
|
||||||
let mut index_reader = index_reader?;
|
let mut index_reader = index_reader?;
|
||||||
let metadata = index_reader.metadata();
|
let metadata = index_reader.metadata();
|
||||||
@@ -328,19 +324,19 @@ fn import_dump(
|
|||||||
let mut wtxn = index.write_txn()?;
|
let mut wtxn = index.write_txn()?;
|
||||||
|
|
||||||
let mut builder = milli::update::Settings::new(&mut wtxn, &index, indexer_config);
|
let mut builder = milli::update::Settings::new(&mut wtxn, &index, indexer_config);
|
||||||
// 4.1 Import the primary key if there is one.
|
// 3.1 Import the primary key if there is one.
|
||||||
if let Some(ref primary_key) = metadata.primary_key {
|
if let Some(ref primary_key) = metadata.primary_key {
|
||||||
builder.set_primary_key(primary_key.to_string());
|
builder.set_primary_key(primary_key.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4.2 Import the settings.
|
// 3.2 Import the settings.
|
||||||
log::info!("Importing the settings.");
|
log::info!("Importing the settings.");
|
||||||
let settings = index_reader.settings()?;
|
let settings = index_reader.settings()?;
|
||||||
apply_settings_to_builder(&settings, &mut builder);
|
apply_settings_to_builder(&settings, &mut builder);
|
||||||
builder.execute(|indexing_step| log::debug!("update: {:?}", indexing_step), || false)?;
|
builder.execute(|indexing_step| log::debug!("update: {:?}", indexing_step), || false)?;
|
||||||
|
|
||||||
// 4.3 Import the documents.
|
// 3.3 Import the documents.
|
||||||
// 4.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
// 3.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
||||||
log::info!("Importing the documents.");
|
log::info!("Importing the documents.");
|
||||||
let file = tempfile::tempfile()?;
|
let file = tempfile::tempfile()?;
|
||||||
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
|
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
|
||||||
@@ -351,7 +347,7 @@ fn import_dump(
|
|||||||
// This flush the content of the batch builder.
|
// This flush the content of the batch builder.
|
||||||
let file = builder.into_inner()?.into_inner()?;
|
let file = builder.into_inner()?.into_inner()?;
|
||||||
|
|
||||||
// 4.3.2 We feed it to the milli index.
|
// 3.3.2 We feed it to the milli index.
|
||||||
let reader = BufReader::new(file);
|
let reader = BufReader::new(file);
|
||||||
let reader = DocumentsBatchReader::from_reader(reader)?;
|
let reader = DocumentsBatchReader::from_reader(reader)?;
|
||||||
|
|
||||||
@@ -363,7 +359,7 @@ fn import_dump(
|
|||||||
update_method: IndexDocumentsMethod::ReplaceDocuments,
|
update_method: IndexDocumentsMethod::ReplaceDocuments,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
|indexing_step| log::trace!("update: {:?}", indexing_step),
|
|indexing_step| log::debug!("update: {:?}", indexing_step),
|
||||||
|| false,
|
|| false,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
@@ -376,7 +372,7 @@ fn import_dump(
|
|||||||
|
|
||||||
let mut index_scheduler_dump = index_scheduler.register_dumped_task()?;
|
let mut index_scheduler_dump = index_scheduler.register_dumped_task()?;
|
||||||
|
|
||||||
// 5. Import the tasks.
|
// 4. Import the tasks.
|
||||||
for ret in dump_reader.tasks()? {
|
for ret in dump_reader.tasks()? {
|
||||||
let (task, file) = ret?;
|
let (task, file) = ret?;
|
||||||
index_scheduler_dump.register_dumped_task(task, file)?;
|
index_scheduler_dump.register_dumped_task(task, file)?;
|
||||||
@@ -398,7 +394,6 @@ pub fn configure_data(
|
|||||||
.app_data(web::Data::from(analytics))
|
.app_data(web::Data::from(analytics))
|
||||||
.app_data(
|
.app_data(
|
||||||
web::JsonConfig::default()
|
web::JsonConfig::default()
|
||||||
.limit(http_payload_size_limit)
|
|
||||||
.content_type(|mime| mime == mime::APPLICATION_JSON)
|
.content_type(|mime| mime == mime::APPLICATION_JSON)
|
||||||
.error_handler(|err, req: &HttpRequest| match err {
|
.error_handler(|err, req: &HttpRequest| match err {
|
||||||
JsonPayloadError::ContentType => match req.headers().get(CONTENT_TYPE) {
|
JsonPayloadError::ContentType => match req.headers().get(CONTENT_TYPE) {
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
use std::env;
|
use std::env;
|
||||||
use std::io::{stderr, Write};
|
use std::io::Write;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
@@ -7,7 +7,6 @@ use actix_web::http::KeepAlive;
|
|||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::HttpServer;
|
use actix_web::HttpServer;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use is_terminal::IsTerminal;
|
|
||||||
use meilisearch::analytics::Analytics;
|
use meilisearch::analytics::Analytics;
|
||||||
use meilisearch::{analytics, create_app, prototype_name, setup_meilisearch, Opt};
|
use meilisearch::{analytics, create_app, prototype_name, setup_meilisearch, Opt};
|
||||||
use meilisearch_auth::{generate_master_key, AuthController, MASTER_KEY_MIN_SIZE};
|
use meilisearch_auth::{generate_master_key, AuthController, MASTER_KEY_MIN_SIZE};
|
||||||
@@ -19,11 +18,7 @@ static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
|||||||
/// does all the setup before meilisearch is launched
|
/// does all the setup before meilisearch is launched
|
||||||
fn setup(opt: &Opt) -> anyhow::Result<()> {
|
fn setup(opt: &Opt) -> anyhow::Result<()> {
|
||||||
let mut log_builder = env_logger::Builder::new();
|
let mut log_builder = env_logger::Builder::new();
|
||||||
let log_filters = format!(
|
log_builder.parse_filters(&opt.log_level.to_string());
|
||||||
"{},h2=warn,hyper=warn,tokio_util=warn,tracing=warn,rustls=warn,mio=warn,reqwest=warn",
|
|
||||||
opt.log_level
|
|
||||||
);
|
|
||||||
log_builder.parse_filters(&log_filters);
|
|
||||||
|
|
||||||
log_builder.init();
|
log_builder.init();
|
||||||
|
|
||||||
@@ -191,10 +186,9 @@ Anonymous telemetry:\t\"Enabled\""
|
|||||||
}
|
}
|
||||||
|
|
||||||
eprintln!();
|
eprintln!();
|
||||||
eprintln!("Check out Meilisearch Cloud!\thttps://www.meilisearch.com/cloud?utm_campaign=oss&utm_source=engine&utm_medium=cli");
|
eprintln!("Documentation:\t\thttps://www.meilisearch.com/docs");
|
||||||
eprintln!("Documentation:\t\t\thttps://www.meilisearch.com/docs");
|
eprintln!("Source code:\t\thttps://github.com/meilisearch/meilisearch");
|
||||||
eprintln!("Source code:\t\t\thttps://github.com/meilisearch/meilisearch");
|
eprintln!("Discord:\t\thttps://discord.meilisearch.com");
|
||||||
eprintln!("Discord:\t\t\thttps://discord.meilisearch.com");
|
|
||||||
eprintln!();
|
eprintln!();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -202,7 +196,8 @@ const WARNING_BG_COLOR: Option<Color> = Some(Color::Ansi256(178));
|
|||||||
const WARNING_FG_COLOR: Option<Color> = Some(Color::Ansi256(0));
|
const WARNING_FG_COLOR: Option<Color> = Some(Color::Ansi256(0));
|
||||||
|
|
||||||
fn print_master_key_too_short_warning() {
|
fn print_master_key_too_short_warning() {
|
||||||
let choice = if stderr().is_terminal() { ColorChoice::Auto } else { ColorChoice::Never };
|
let choice =
|
||||||
|
if atty::is(atty::Stream::Stderr) { ColorChoice::Auto } else { ColorChoice::Never };
|
||||||
let mut stderr = StandardStream::stderr(choice);
|
let mut stderr = StandardStream::stderr(choice);
|
||||||
stderr
|
stderr
|
||||||
.set_color(
|
.set_color(
|
||||||
@@ -227,7 +222,8 @@ fn print_master_key_too_short_warning() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn print_missing_master_key_warning() {
|
fn print_missing_master_key_warning() {
|
||||||
let choice = if stderr().is_terminal() { ColorChoice::Auto } else { ColorChoice::Never };
|
let choice =
|
||||||
|
if atty::is(atty::Stream::Stderr) { ColorChoice::Auto } else { ColorChoice::Never };
|
||||||
let mut stderr = StandardStream::stderr(choice);
|
let mut stderr = StandardStream::stderr(choice);
|
||||||
stderr
|
stderr
|
||||||
.set_color(
|
.set_color(
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ fn create_buckets() -> [f64; 29] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
pub static ref MEILISEARCH_HTTP_RESPONSE_TIME_CUSTOM_BUCKETS: [f64; 29] = create_buckets();
|
pub static ref HTTP_RESPONSE_TIME_CUSTOM_BUCKETS: [f64; 29] = create_buckets();
|
||||||
pub static ref MEILISEARCH_HTTP_REQUESTS_TOTAL: IntCounterVec = register_int_counter_vec!(
|
pub static ref MEILISEARCH_HTTP_REQUESTS_TOTAL: IntCounterVec = register_int_counter_vec!(
|
||||||
opts!("meilisearch_http_requests_total", "Meilisearch HTTP requests total"),
|
opts!("meilisearch_http_requests_total", "Meilisearch HTTP requests total"),
|
||||||
&["method", "path"]
|
&["method", "path"]
|
||||||
@@ -39,10 +39,10 @@ lazy_static! {
|
|||||||
)
|
)
|
||||||
.expect("Can't create a metric");
|
.expect("Can't create a metric");
|
||||||
pub static ref MEILISEARCH_HTTP_RESPONSE_TIME_SECONDS: HistogramVec = register_histogram_vec!(
|
pub static ref MEILISEARCH_HTTP_RESPONSE_TIME_SECONDS: HistogramVec = register_histogram_vec!(
|
||||||
"meilisearch_http_response_time_seconds",
|
"http_response_time_seconds",
|
||||||
"Meilisearch HTTP response times",
|
"HTTP response times",
|
||||||
&["method", "path"],
|
&["method", "path"],
|
||||||
MEILISEARCH_HTTP_RESPONSE_TIME_CUSTOM_BUCKETS.to_vec()
|
HTTP_RESPONSE_TIME_CUSTOM_BUCKETS.to_vec()
|
||||||
)
|
)
|
||||||
.expect("Can't create a metric");
|
.expect("Can't create a metric");
|
||||||
pub static ref MEILISEARCH_NB_TASKS: IntGaugeVec = register_int_gauge_vec!(
|
pub static ref MEILISEARCH_NB_TASKS: IntGaugeVec = register_int_gauge_vec!(
|
||||||
@@ -50,10 +50,4 @@ lazy_static! {
|
|||||||
&["kind", "value"]
|
&["kind", "value"]
|
||||||
)
|
)
|
||||||
.expect("Can't create a metric");
|
.expect("Can't create a metric");
|
||||||
pub static ref MEILISEARCH_LAST_UPDATE: IntGauge =
|
|
||||||
register_int_gauge!(opts!("meilisearch_last_update", "Meilisearch Last Update"))
|
|
||||||
.expect("Can't create a metric");
|
|
||||||
pub static ref MEILISEARCH_IS_INDEXING: IntGauge =
|
|
||||||
register_int_gauge!(opts!("meilisearch_is_indexing", "Meilisearch Is Indexing"))
|
|
||||||
.expect("Can't create a metric");
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,10 +3,8 @@
|
|||||||
use std::future::{ready, Ready};
|
use std::future::{ready, Ready};
|
||||||
|
|
||||||
use actix_web::dev::{self, Service, ServiceRequest, ServiceResponse, Transform};
|
use actix_web::dev::{self, Service, ServiceRequest, ServiceResponse, Transform};
|
||||||
use actix_web::web::Data;
|
|
||||||
use actix_web::Error;
|
use actix_web::Error;
|
||||||
use futures_util::future::LocalBoxFuture;
|
use futures_util::future::LocalBoxFuture;
|
||||||
use index_scheduler::IndexScheduler;
|
|
||||||
use prometheus::HistogramTimer;
|
use prometheus::HistogramTimer;
|
||||||
|
|
||||||
pub struct RouteMetrics;
|
pub struct RouteMetrics;
|
||||||
@@ -49,27 +47,19 @@ where
|
|||||||
|
|
||||||
fn call(&self, req: ServiceRequest) -> Self::Future {
|
fn call(&self, req: ServiceRequest) -> Self::Future {
|
||||||
let mut histogram_timer: Option<HistogramTimer> = None;
|
let mut histogram_timer: Option<HistogramTimer> = None;
|
||||||
|
let request_path = req.path();
|
||||||
// calling unwrap here is safe because index scheduler is added to app data while creating actix app.
|
let is_registered_resource = req.resource_map().has_resource(request_path);
|
||||||
// also, the tests will fail if this is not present.
|
if is_registered_resource {
|
||||||
let index_scheduler = req.app_data::<Data<IndexScheduler>>().unwrap();
|
let request_method = req.method().to_string();
|
||||||
let features = index_scheduler.features();
|
histogram_timer = Some(
|
||||||
|
crate::metrics::MEILISEARCH_HTTP_RESPONSE_TIME_SECONDS
|
||||||
if features.check_metrics().is_ok() {
|
|
||||||
let request_path = req.path();
|
|
||||||
let is_registered_resource = req.resource_map().has_resource(request_path);
|
|
||||||
if is_registered_resource {
|
|
||||||
let request_method = req.method().to_string();
|
|
||||||
histogram_timer = Some(
|
|
||||||
crate::metrics::MEILISEARCH_HTTP_RESPONSE_TIME_SECONDS
|
|
||||||
.with_label_values(&[&request_method, request_path])
|
|
||||||
.start_timer(),
|
|
||||||
);
|
|
||||||
crate::metrics::MEILISEARCH_HTTP_REQUESTS_TOTAL
|
|
||||||
.with_label_values(&[&request_method, request_path])
|
.with_label_values(&[&request_method, request_path])
|
||||||
.inc();
|
.start_timer(),
|
||||||
}
|
);
|
||||||
};
|
crate::metrics::MEILISEARCH_HTTP_REQUESTS_TOTAL
|
||||||
|
.with_label_values(&[&request_method, request_path])
|
||||||
|
.inc();
|
||||||
|
}
|
||||||
|
|
||||||
let fut = self.service.call(req);
|
let fut = self.service.call(req);
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ use std::{env, fmt, fs};
|
|||||||
|
|
||||||
use byte_unit::{Byte, ByteError};
|
use byte_unit::{Byte, ByteError};
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use meilisearch_types::features::InstanceTogglableFeatures;
|
|
||||||
use meilisearch_types::milli::update::IndexerConfig;
|
use meilisearch_types::milli::update::IndexerConfig;
|
||||||
use rustls::server::{
|
use rustls::server::{
|
||||||
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient, ServerSessionMemoryCache,
|
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient, ServerSessionMemoryCache,
|
||||||
@@ -28,7 +27,7 @@ const MEILI_DB_PATH: &str = "MEILI_DB_PATH";
|
|||||||
const MEILI_HTTP_ADDR: &str = "MEILI_HTTP_ADDR";
|
const MEILI_HTTP_ADDR: &str = "MEILI_HTTP_ADDR";
|
||||||
const MEILI_MASTER_KEY: &str = "MEILI_MASTER_KEY";
|
const MEILI_MASTER_KEY: &str = "MEILI_MASTER_KEY";
|
||||||
const MEILI_ENV: &str = "MEILI_ENV";
|
const MEILI_ENV: &str = "MEILI_ENV";
|
||||||
#[cfg(feature = "analytics")]
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
const MEILI_NO_ANALYTICS: &str = "MEILI_NO_ANALYTICS";
|
const MEILI_NO_ANALYTICS: &str = "MEILI_NO_ANALYTICS";
|
||||||
const MEILI_HTTP_PAYLOAD_SIZE_LIMIT: &str = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT";
|
const MEILI_HTTP_PAYLOAD_SIZE_LIMIT: &str = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT";
|
||||||
const MEILI_SSL_CERT_PATH: &str = "MEILI_SSL_CERT_PATH";
|
const MEILI_SSL_CERT_PATH: &str = "MEILI_SSL_CERT_PATH";
|
||||||
@@ -51,8 +50,6 @@ const MEILI_LOG_LEVEL: &str = "MEILI_LOG_LEVEL";
|
|||||||
const MEILI_EXPERIMENTAL_ENABLE_METRICS: &str = "MEILI_EXPERIMENTAL_ENABLE_METRICS";
|
const MEILI_EXPERIMENTAL_ENABLE_METRICS: &str = "MEILI_EXPERIMENTAL_ENABLE_METRICS";
|
||||||
const MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE: &str =
|
const MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE: &str =
|
||||||
"MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE";
|
"MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE";
|
||||||
const MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS: &str =
|
|
||||||
"MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS";
|
|
||||||
|
|
||||||
const DEFAULT_CONFIG_FILE_PATH: &str = "./config.toml";
|
const DEFAULT_CONFIG_FILE_PATH: &str = "./config.toml";
|
||||||
const DEFAULT_DB_PATH: &str = "./data.ms";
|
const DEFAULT_DB_PATH: &str = "./data.ms";
|
||||||
@@ -161,7 +158,7 @@ pub struct Opt {
|
|||||||
/// Meilisearch automatically collects data from all instances that do not opt out using this flag.
|
/// Meilisearch automatically collects data from all instances that do not opt out using this flag.
|
||||||
/// All gathered data is used solely for the purpose of improving Meilisearch, and can be deleted
|
/// All gathered data is used solely for the purpose of improving Meilisearch, and can be deleted
|
||||||
/// at any time.
|
/// at any time.
|
||||||
#[cfg(feature = "analytics")]
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
#[serde(default)] // we can't send true
|
#[serde(default)] // we can't send true
|
||||||
#[clap(long, env = MEILI_NO_ANALYTICS)]
|
#[clap(long, env = MEILI_NO_ANALYTICS)]
|
||||||
pub no_analytics: bool,
|
pub no_analytics: bool,
|
||||||
@@ -303,11 +300,6 @@ pub struct Opt {
|
|||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub experimental_reduce_indexing_memory_usage: bool,
|
pub experimental_reduce_indexing_memory_usage: bool,
|
||||||
|
|
||||||
/// Experimentally reduces the maximum number of tasks that will be processed at once, see: <https://github.com/orgs/meilisearch/discussions/713>
|
|
||||||
#[clap(long, env = MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS, default_value_t = default_limit_batched_tasks())]
|
|
||||||
#[serde(default = "default_limit_batched_tasks")]
|
|
||||||
pub experimental_max_number_of_batched_tasks: usize,
|
|
||||||
|
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
#[clap(flatten)]
|
#[clap(flatten)]
|
||||||
pub indexer_options: IndexerOpts,
|
pub indexer_options: IndexerOpts,
|
||||||
@@ -378,7 +370,6 @@ impl Opt {
|
|||||||
max_index_size: _,
|
max_index_size: _,
|
||||||
max_task_db_size: _,
|
max_task_db_size: _,
|
||||||
http_payload_size_limit,
|
http_payload_size_limit,
|
||||||
experimental_max_number_of_batched_tasks,
|
|
||||||
ssl_cert_path,
|
ssl_cert_path,
|
||||||
ssl_key_path,
|
ssl_key_path,
|
||||||
ssl_auth_path,
|
ssl_auth_path,
|
||||||
@@ -398,10 +389,10 @@ impl Opt {
|
|||||||
ignore_missing_dump: _,
|
ignore_missing_dump: _,
|
||||||
ignore_dump_if_db_exists: _,
|
ignore_dump_if_db_exists: _,
|
||||||
config_file_path: _,
|
config_file_path: _,
|
||||||
#[cfg(feature = "analytics")]
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
no_analytics,
|
no_analytics,
|
||||||
experimental_enable_metrics,
|
experimental_enable_metrics: enable_metrics_route,
|
||||||
experimental_reduce_indexing_memory_usage,
|
experimental_reduce_indexing_memory_usage: reduce_indexing_memory_usage,
|
||||||
} = self;
|
} = self;
|
||||||
export_to_env_if_not_present(MEILI_DB_PATH, db_path);
|
export_to_env_if_not_present(MEILI_DB_PATH, db_path);
|
||||||
export_to_env_if_not_present(MEILI_HTTP_ADDR, http_addr);
|
export_to_env_if_not_present(MEILI_HTTP_ADDR, http_addr);
|
||||||
@@ -409,7 +400,7 @@ impl Opt {
|
|||||||
export_to_env_if_not_present(MEILI_MASTER_KEY, master_key);
|
export_to_env_if_not_present(MEILI_MASTER_KEY, master_key);
|
||||||
}
|
}
|
||||||
export_to_env_if_not_present(MEILI_ENV, env);
|
export_to_env_if_not_present(MEILI_ENV, env);
|
||||||
#[cfg(feature = "analytics")]
|
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||||
{
|
{
|
||||||
export_to_env_if_not_present(MEILI_NO_ANALYTICS, no_analytics.to_string());
|
export_to_env_if_not_present(MEILI_NO_ANALYTICS, no_analytics.to_string());
|
||||||
}
|
}
|
||||||
@@ -417,10 +408,6 @@ impl Opt {
|
|||||||
MEILI_HTTP_PAYLOAD_SIZE_LIMIT,
|
MEILI_HTTP_PAYLOAD_SIZE_LIMIT,
|
||||||
http_payload_size_limit.to_string(),
|
http_payload_size_limit.to_string(),
|
||||||
);
|
);
|
||||||
export_to_env_if_not_present(
|
|
||||||
MEILI_EXPERIMENTAL_MAX_NUMBER_OF_BATCHED_TASKS,
|
|
||||||
experimental_max_number_of_batched_tasks.to_string(),
|
|
||||||
);
|
|
||||||
if let Some(ssl_cert_path) = ssl_cert_path {
|
if let Some(ssl_cert_path) = ssl_cert_path {
|
||||||
export_to_env_if_not_present(MEILI_SSL_CERT_PATH, ssl_cert_path);
|
export_to_env_if_not_present(MEILI_SSL_CERT_PATH, ssl_cert_path);
|
||||||
}
|
}
|
||||||
@@ -445,11 +432,11 @@ impl Opt {
|
|||||||
export_to_env_if_not_present(MEILI_LOG_LEVEL, log_level.to_string());
|
export_to_env_if_not_present(MEILI_LOG_LEVEL, log_level.to_string());
|
||||||
export_to_env_if_not_present(
|
export_to_env_if_not_present(
|
||||||
MEILI_EXPERIMENTAL_ENABLE_METRICS,
|
MEILI_EXPERIMENTAL_ENABLE_METRICS,
|
||||||
experimental_enable_metrics.to_string(),
|
enable_metrics_route.to_string(),
|
||||||
);
|
);
|
||||||
export_to_env_if_not_present(
|
export_to_env_if_not_present(
|
||||||
MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE,
|
MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE,
|
||||||
experimental_reduce_indexing_memory_usage.to_string(),
|
reduce_indexing_memory_usage.to_string(),
|
||||||
);
|
);
|
||||||
indexer_options.export_to_env();
|
indexer_options.export_to_env();
|
||||||
}
|
}
|
||||||
@@ -499,10 +486,6 @@ impl Opt {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn to_instance_features(&self) -> InstanceTogglableFeatures {
|
|
||||||
InstanceTogglableFeatures { metrics: self.experimental_enable_metrics }
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Parser, Deserialize)]
|
#[derive(Debug, Default, Clone, Parser, Deserialize)]
|
||||||
@@ -739,10 +722,6 @@ fn default_http_payload_size_limit() -> Byte {
|
|||||||
Byte::from_str(DEFAULT_HTTP_PAYLOAD_SIZE_LIMIT).unwrap()
|
Byte::from_str(DEFAULT_HTTP_PAYLOAD_SIZE_LIMIT).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn default_limit_batched_tasks() -> usize {
|
|
||||||
usize::MAX
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_snapshot_dir() -> PathBuf {
|
fn default_snapshot_dir() -> PathBuf {
|
||||||
PathBuf::from(DEFAULT_SNAPSHOT_DIR)
|
PathBuf::from(DEFAULT_SNAPSHOT_DIR)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,97 +0,0 @@
|
|||||||
use actix_web::web::{self, Data};
|
|
||||||
use actix_web::{HttpRequest, HttpResponse};
|
|
||||||
use deserr::actix_web::AwebJson;
|
|
||||||
use deserr::Deserr;
|
|
||||||
use index_scheduler::IndexScheduler;
|
|
||||||
use log::debug;
|
|
||||||
use meilisearch_types::deserr::DeserrJsonError;
|
|
||||||
use meilisearch_types::error::ResponseError;
|
|
||||||
use meilisearch_types::keys::actions;
|
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
|
||||||
use crate::extractors::authentication::policies::ActionPolicy;
|
|
||||||
use crate::extractors::authentication::GuardedData;
|
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
|
||||||
cfg.service(
|
|
||||||
web::resource("")
|
|
||||||
.route(web::get().to(SeqHandler(get_features)))
|
|
||||||
.route(web::patch().to(SeqHandler(patch_features))),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn get_features(
|
|
||||||
index_scheduler: GuardedData<
|
|
||||||
ActionPolicy<{ actions::EXPERIMENTAL_FEATURES_GET }>,
|
|
||||||
Data<IndexScheduler>,
|
|
||||||
>,
|
|
||||||
req: HttpRequest,
|
|
||||||
analytics: Data<dyn Analytics>,
|
|
||||||
) -> HttpResponse {
|
|
||||||
let features = index_scheduler.features();
|
|
||||||
|
|
||||||
analytics.publish("Experimental features Seen".to_string(), json!(null), Some(&req));
|
|
||||||
debug!("returns: {:?}", features.runtime_features());
|
|
||||||
HttpResponse::Ok().json(features.runtime_features())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserr)]
|
|
||||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
|
||||||
pub struct RuntimeTogglableFeatures {
|
|
||||||
#[deserr(default)]
|
|
||||||
pub score_details: Option<bool>,
|
|
||||||
#[deserr(default)]
|
|
||||||
pub vector_store: Option<bool>,
|
|
||||||
#[deserr(default)]
|
|
||||||
pub metrics: Option<bool>,
|
|
||||||
#[deserr(default)]
|
|
||||||
pub export_puffin_reports: Option<bool>,
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn patch_features(
|
|
||||||
index_scheduler: GuardedData<
|
|
||||||
ActionPolicy<{ actions::EXPERIMENTAL_FEATURES_UPDATE }>,
|
|
||||||
Data<IndexScheduler>,
|
|
||||||
>,
|
|
||||||
new_features: AwebJson<RuntimeTogglableFeatures, DeserrJsonError>,
|
|
||||||
req: HttpRequest,
|
|
||||||
analytics: Data<dyn Analytics>,
|
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
|
||||||
let features = index_scheduler.features();
|
|
||||||
|
|
||||||
let old_features = features.runtime_features();
|
|
||||||
let new_features = meilisearch_types::features::RuntimeTogglableFeatures {
|
|
||||||
score_details: new_features.0.score_details.unwrap_or(old_features.score_details),
|
|
||||||
vector_store: new_features.0.vector_store.unwrap_or(old_features.vector_store),
|
|
||||||
metrics: new_features.0.metrics.unwrap_or(old_features.metrics),
|
|
||||||
export_puffin_reports: new_features
|
|
||||||
.0
|
|
||||||
.export_puffin_reports
|
|
||||||
.unwrap_or(old_features.export_puffin_reports),
|
|
||||||
};
|
|
||||||
|
|
||||||
// explicitly destructure for analytics rather than using the `Serialize` implementation, because
|
|
||||||
// the it renames to camelCase, which we don't want for analytics.
|
|
||||||
// **Do not** ignore fields with `..` or `_` here, because we want to add them in the future.
|
|
||||||
let meilisearch_types::features::RuntimeTogglableFeatures {
|
|
||||||
score_details,
|
|
||||||
vector_store,
|
|
||||||
metrics,
|
|
||||||
export_puffin_reports,
|
|
||||||
} = new_features;
|
|
||||||
|
|
||||||
analytics.publish(
|
|
||||||
"Experimental features Updated".to_string(),
|
|
||||||
json!({
|
|
||||||
"score_details": score_details,
|
|
||||||
"vector_store": vector_store,
|
|
||||||
"metrics": metrics,
|
|
||||||
"export_puffin_reports": export_puffin_reports,
|
|
||||||
}),
|
|
||||||
Some(&req),
|
|
||||||
);
|
|
||||||
index_scheduler.put_runtime_features(new_features)?;
|
|
||||||
Ok(HttpResponse::Ok().json(new_features))
|
|
||||||
}
|
|
||||||
@@ -3,7 +3,7 @@ use std::io::ErrorKind;
|
|||||||
use actix_web::http::header::CONTENT_TYPE;
|
use actix_web::http::header::CONTENT_TYPE;
|
||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpMessage, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpMessage, HttpRequest, HttpResponse};
|
||||||
use bstr::ByteSlice as _;
|
use bstr::ByteSlice;
|
||||||
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
||||||
use deserr::Deserr;
|
use deserr::Deserr;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
@@ -612,8 +612,8 @@ fn retrieve_document<S: AsRef<str>>(
|
|||||||
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
|
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
|
||||||
|
|
||||||
let internal_id = index
|
let internal_id = index
|
||||||
.external_documents_ids()
|
.external_documents_ids(&txn)?
|
||||||
.get(&txn, doc_id)?
|
.get(doc_id.as_bytes())
|
||||||
.ok_or_else(|| MeilisearchHttpError::DocumentNotFound(doc_id.to_string()))?;
|
.ok_or_else(|| MeilisearchHttpError::DocumentNotFound(doc_id.to_string()))?;
|
||||||
|
|
||||||
let document = index
|
let document = index
|
||||||
|
|||||||
@@ -1,128 +0,0 @@
|
|||||||
use actix_web::web::Data;
|
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
|
||||||
use deserr::actix_web::AwebJson;
|
|
||||||
use index_scheduler::IndexScheduler;
|
|
||||||
use log::debug;
|
|
||||||
use meilisearch_types::deserr::DeserrJsonError;
|
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
|
||||||
use meilisearch_types::error::ResponseError;
|
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
|
||||||
use serde_json::Value;
|
|
||||||
|
|
||||||
use crate::analytics::{Analytics, FacetSearchAggregator};
|
|
||||||
use crate::extractors::authentication::policies::*;
|
|
||||||
use crate::extractors::authentication::GuardedData;
|
|
||||||
use crate::search::{
|
|
||||||
add_search_rules, perform_facet_search, HybridQuery, MatchingStrategy, SearchQuery,
|
|
||||||
DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG,
|
|
||||||
DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
|
||||||
cfg.service(web::resource("").route(web::post().to(search)));
|
|
||||||
}
|
|
||||||
|
|
||||||
/// # Important
|
|
||||||
///
|
|
||||||
/// Intentionally don't use `deny_unknown_fields` to ignore search parameters sent by user
|
|
||||||
#[derive(Debug, Clone, Default, PartialEq, deserr::Deserr)]
|
|
||||||
#[deserr(error = DeserrJsonError, rename_all = camelCase)]
|
|
||||||
pub struct FacetSearchQuery {
|
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidFacetSearchQuery>)]
|
|
||||||
pub facet_query: Option<String>,
|
|
||||||
#[deserr(error = DeserrJsonError<InvalidFacetSearchFacetName>, missing_field_error = DeserrJsonError::missing_facet_search_facet_name)]
|
|
||||||
pub facet_name: String,
|
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
|
|
||||||
pub q: Option<String>,
|
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchVector>)]
|
|
||||||
pub vector: Option<Vec<f32>>,
|
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidHybridQuery>)]
|
|
||||||
pub hybrid: Option<HybridQuery>,
|
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchFilter>)]
|
|
||||||
pub filter: Option<Value>,
|
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchMatchingStrategy>, default)]
|
|
||||||
pub matching_strategy: MatchingStrategy,
|
|
||||||
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToSearchOn>, default)]
|
|
||||||
pub attributes_to_search_on: Option<Vec<String>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn search(
|
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
|
||||||
index_uid: web::Path<String>,
|
|
||||||
params: AwebJson<FacetSearchQuery, DeserrJsonError>,
|
|
||||||
req: HttpRequest,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
|
||||||
|
|
||||||
let query = params.into_inner();
|
|
||||||
debug!("facet search called with params: {:?}", query);
|
|
||||||
|
|
||||||
let mut aggregate = FacetSearchAggregator::from_query(&query, &req);
|
|
||||||
|
|
||||||
let facet_query = query.facet_query.clone();
|
|
||||||
let facet_name = query.facet_name.clone();
|
|
||||||
let mut search_query = SearchQuery::from(query);
|
|
||||||
|
|
||||||
// Tenant token search_rules.
|
|
||||||
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
|
|
||||||
add_search_rules(&mut search_query, search_rules);
|
|
||||||
}
|
|
||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
|
||||||
let features = index_scheduler.features();
|
|
||||||
let search_result = tokio::task::spawn_blocking(move || {
|
|
||||||
perform_facet_search(&index, search_query, facet_query, facet_name, features)
|
|
||||||
})
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if let Ok(ref search_result) = search_result {
|
|
||||||
aggregate.succeed(search_result);
|
|
||||||
}
|
|
||||||
analytics.post_facet_search(aggregate);
|
|
||||||
|
|
||||||
let search_result = search_result?;
|
|
||||||
|
|
||||||
debug!("returns: {:?}", search_result);
|
|
||||||
Ok(HttpResponse::Ok().json(search_result))
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<FacetSearchQuery> for SearchQuery {
|
|
||||||
fn from(value: FacetSearchQuery) -> Self {
|
|
||||||
let FacetSearchQuery {
|
|
||||||
facet_query: _,
|
|
||||||
facet_name: _,
|
|
||||||
q,
|
|
||||||
vector,
|
|
||||||
filter,
|
|
||||||
matching_strategy,
|
|
||||||
attributes_to_search_on,
|
|
||||||
hybrid,
|
|
||||||
} = value;
|
|
||||||
|
|
||||||
SearchQuery {
|
|
||||||
q,
|
|
||||||
offset: DEFAULT_SEARCH_OFFSET(),
|
|
||||||
limit: DEFAULT_SEARCH_LIMIT(),
|
|
||||||
page: None,
|
|
||||||
hits_per_page: None,
|
|
||||||
attributes_to_retrieve: None,
|
|
||||||
attributes_to_crop: None,
|
|
||||||
crop_length: DEFAULT_CROP_LENGTH(),
|
|
||||||
attributes_to_highlight: None,
|
|
||||||
show_matches_position: false,
|
|
||||||
show_ranking_score: false,
|
|
||||||
show_ranking_score_details: false,
|
|
||||||
filter,
|
|
||||||
sort: None,
|
|
||||||
facets: None,
|
|
||||||
highlight_pre_tag: DEFAULT_HIGHLIGHT_PRE_TAG(),
|
|
||||||
highlight_post_tag: DEFAULT_HIGHLIGHT_POST_TAG(),
|
|
||||||
crop_marker: DEFAULT_CROP_MARKER(),
|
|
||||||
matching_strategy,
|
|
||||||
vector,
|
|
||||||
attributes_to_search_on,
|
|
||||||
hybrid,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -24,7 +24,6 @@ use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
|||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
|
|
||||||
pub mod documents;
|
pub mod documents;
|
||||||
pub mod facet_search;
|
|
||||||
pub mod search;
|
pub mod search;
|
||||||
pub mod settings;
|
pub mod settings;
|
||||||
|
|
||||||
@@ -45,7 +44,6 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
.service(web::resource("/stats").route(web::get().to(SeqHandler(get_index_stats))))
|
.service(web::resource("/stats").route(web::get().to(SeqHandler(get_index_stats))))
|
||||||
.service(web::scope("/documents").configure(documents::configure))
|
.service(web::scope("/documents").configure(documents::configure))
|
||||||
.service(web::scope("/search").configure(search::configure))
|
.service(web::scope("/search").configure(search::configure))
|
||||||
.service(web::scope("/facet-search").configure(facet_search::configure))
|
|
||||||
.service(web::scope("/settings").configure(settings::configure)),
|
.service(web::scope("/settings").configure(settings::configure)),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,14 +2,12 @@ use actix_web::web::Data;
|
|||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use log::{debug, warn};
|
use log::debug;
|
||||||
use meilisearch_types::deserr::query_params::Param;
|
use meilisearch_types::deserr::query_params::Param;
|
||||||
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::milli;
|
|
||||||
use meilisearch_types::milli::vector::DistributionShift;
|
|
||||||
use meilisearch_types::serde_cs::vec::CS;
|
use meilisearch_types::serde_cs::vec::CS;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
@@ -18,9 +16,9 @@ use crate::extractors::authentication::policies::*;
|
|||||||
use crate::extractors::authentication::GuardedData;
|
use crate::extractors::authentication::GuardedData;
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::search::{
|
use crate::search::{
|
||||||
add_search_rules, perform_search, HybridQuery, MatchingStrategy, SearchQuery, SemanticRatio,
|
add_search_rules, perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH,
|
||||||
DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG,
|
DEFAULT_CROP_MARKER, DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG,
|
||||||
DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET, DEFAULT_SEMANTIC_RATIO,
|
DEFAULT_SEARCH_LIMIT, DEFAULT_SEARCH_OFFSET,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
@@ -36,8 +34,6 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
pub struct SearchQueryGet {
|
pub struct SearchQueryGet {
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchQ>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchQ>)]
|
||||||
q: Option<String>,
|
q: Option<String>,
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchVector>)]
|
|
||||||
vector: Option<CS<f32>>,
|
|
||||||
#[deserr(default = Param(DEFAULT_SEARCH_OFFSET()), error = DeserrQueryParamError<InvalidSearchOffset>)]
|
#[deserr(default = Param(DEFAULT_SEARCH_OFFSET()), error = DeserrQueryParamError<InvalidSearchOffset>)]
|
||||||
offset: Param<usize>,
|
offset: Param<usize>,
|
||||||
#[deserr(default = Param(DEFAULT_SEARCH_LIMIT()), error = DeserrQueryParamError<InvalidSearchLimit>)]
|
#[deserr(default = Param(DEFAULT_SEARCH_LIMIT()), error = DeserrQueryParamError<InvalidSearchLimit>)]
|
||||||
@@ -60,10 +56,6 @@ pub struct SearchQueryGet {
|
|||||||
sort: Option<String>,
|
sort: Option<String>,
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowMatchesPosition>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowMatchesPosition>)]
|
||||||
show_matches_position: Param<bool>,
|
show_matches_position: Param<bool>,
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowRankingScore>)]
|
|
||||||
show_ranking_score: Param<bool>,
|
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowRankingScoreDetails>)]
|
|
||||||
show_ranking_score_details: Param<bool>,
|
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchFacets>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchFacets>)]
|
||||||
facets: Option<CS<String>>,
|
facets: Option<CS<String>>,
|
||||||
#[deserr( default = DEFAULT_HIGHLIGHT_PRE_TAG(), error = DeserrQueryParamError<InvalidSearchHighlightPreTag>)]
|
#[deserr( default = DEFAULT_HIGHLIGHT_PRE_TAG(), error = DeserrQueryParamError<InvalidSearchHighlightPreTag>)]
|
||||||
@@ -74,33 +66,6 @@ pub struct SearchQueryGet {
|
|||||||
crop_marker: String,
|
crop_marker: String,
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchMatchingStrategy>)]
|
#[deserr(default, error = DeserrQueryParamError<InvalidSearchMatchingStrategy>)]
|
||||||
matching_strategy: MatchingStrategy,
|
matching_strategy: MatchingStrategy,
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToSearchOn>)]
|
|
||||||
pub attributes_to_search_on: Option<CS<String>>,
|
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidEmbedder>)]
|
|
||||||
pub hybrid_embedder: Option<String>,
|
|
||||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchSemanticRatio>)]
|
|
||||||
pub hybrid_semantic_ratio: Option<SemanticRatioGet>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, Default, PartialEq, deserr::Deserr)]
|
|
||||||
#[deserr(try_from(String) = TryFrom::try_from -> InvalidSearchSemanticRatio)]
|
|
||||||
pub struct SemanticRatioGet(SemanticRatio);
|
|
||||||
|
|
||||||
impl std::convert::TryFrom<String> for SemanticRatioGet {
|
|
||||||
type Error = InvalidSearchSemanticRatio;
|
|
||||||
|
|
||||||
fn try_from(s: String) -> Result<Self, Self::Error> {
|
|
||||||
let f: f32 = s.parse().map_err(|_| InvalidSearchSemanticRatio)?;
|
|
||||||
Ok(SemanticRatioGet(SemanticRatio::try_from(f)?))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::ops::Deref for SemanticRatioGet {
|
|
||||||
type Target = SemanticRatio;
|
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
|
||||||
&self.0
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<SearchQueryGet> for SearchQuery {
|
impl From<SearchQueryGet> for SearchQuery {
|
||||||
@@ -113,23 +78,8 @@ impl From<SearchQueryGet> for SearchQuery {
|
|||||||
None => None,
|
None => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let hybrid = match (other.hybrid_embedder, other.hybrid_semantic_ratio) {
|
|
||||||
(None, None) => None,
|
|
||||||
(None, Some(semantic_ratio)) => {
|
|
||||||
Some(HybridQuery { semantic_ratio: *semantic_ratio, embedder: None })
|
|
||||||
}
|
|
||||||
(Some(embedder), None) => Some(HybridQuery {
|
|
||||||
semantic_ratio: DEFAULT_SEMANTIC_RATIO(),
|
|
||||||
embedder: Some(embedder),
|
|
||||||
}),
|
|
||||||
(Some(embedder), Some(semantic_ratio)) => {
|
|
||||||
Some(HybridQuery { semantic_ratio: *semantic_ratio, embedder: Some(embedder) })
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
q: other.q,
|
q: other.q,
|
||||||
vector: other.vector.map(CS::into_inner),
|
|
||||||
offset: other.offset.0,
|
offset: other.offset.0,
|
||||||
limit: other.limit.0,
|
limit: other.limit.0,
|
||||||
page: other.page.as_deref().copied(),
|
page: other.page.as_deref().copied(),
|
||||||
@@ -141,15 +91,11 @@ impl From<SearchQueryGet> for SearchQuery {
|
|||||||
filter,
|
filter,
|
||||||
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
|
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
|
||||||
show_matches_position: other.show_matches_position.0,
|
show_matches_position: other.show_matches_position.0,
|
||||||
show_ranking_score: other.show_ranking_score.0,
|
|
||||||
show_ranking_score_details: other.show_ranking_score_details.0,
|
|
||||||
facets: other.facets.map(|o| o.into_iter().collect()),
|
facets: other.facets.map(|o| o.into_iter().collect()),
|
||||||
highlight_pre_tag: other.highlight_pre_tag,
|
highlight_pre_tag: other.highlight_pre_tag,
|
||||||
highlight_post_tag: other.highlight_post_tag,
|
highlight_post_tag: other.highlight_post_tag,
|
||||||
crop_marker: other.crop_marker,
|
crop_marker: other.crop_marker,
|
||||||
matching_strategy: other.matching_strategy,
|
matching_strategy: other.matching_strategy,
|
||||||
attributes_to_search_on: other.attributes_to_search_on.map(|o| o.into_iter().collect()),
|
|
||||||
hybrid,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -199,13 +145,7 @@ pub async fn search_with_url_query(
|
|||||||
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let features = index_scheduler.features();
|
let search_result = tokio::task::spawn_blocking(move || perform_search(&index, query)).await?;
|
||||||
|
|
||||||
let distribution = embed(&mut query, index_scheduler.get_ref(), &index).await?;
|
|
||||||
|
|
||||||
let search_result =
|
|
||||||
tokio::task::spawn_blocking(move || perform_search(&index, query, features, distribution))
|
|
||||||
.await?;
|
|
||||||
if let Ok(ref search_result) = search_result {
|
if let Ok(ref search_result) = search_result {
|
||||||
aggregate.succeed(search_result);
|
aggregate.succeed(search_result);
|
||||||
}
|
}
|
||||||
@@ -237,14 +177,7 @@ pub async fn search_with_post(
|
|||||||
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
let mut aggregate = SearchAggregator::from_query(&query, &req);
|
||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
|
let search_result = tokio::task::spawn_blocking(move || perform_search(&index, query)).await?;
|
||||||
let features = index_scheduler.features();
|
|
||||||
|
|
||||||
let distribution = embed(&mut query, index_scheduler.get_ref(), &index).await?;
|
|
||||||
|
|
||||||
let search_result =
|
|
||||||
tokio::task::spawn_blocking(move || perform_search(&index, query, features, distribution))
|
|
||||||
.await?;
|
|
||||||
if let Ok(ref search_result) = search_result {
|
if let Ok(ref search_result) = search_result {
|
||||||
aggregate.succeed(search_result);
|
aggregate.succeed(search_result);
|
||||||
}
|
}
|
||||||
@@ -256,80 +189,6 @@ pub async fn search_with_post(
|
|||||||
Ok(HttpResponse::Ok().json(search_result))
|
Ok(HttpResponse::Ok().json(search_result))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn embed(
|
|
||||||
query: &mut SearchQuery,
|
|
||||||
index_scheduler: &IndexScheduler,
|
|
||||||
index: &milli::Index,
|
|
||||||
) -> Result<Option<DistributionShift>, ResponseError> {
|
|
||||||
match (&query.hybrid, &query.vector, &query.q) {
|
|
||||||
(Some(HybridQuery { semantic_ratio: _, embedder }), None, Some(q))
|
|
||||||
if !q.trim().is_empty() =>
|
|
||||||
{
|
|
||||||
let embedder_configs = index.embedding_configs(&index.read_txn()?)?;
|
|
||||||
let embedders = index_scheduler.embedders(embedder_configs)?;
|
|
||||||
|
|
||||||
let embedder = if let Some(embedder_name) = embedder {
|
|
||||||
embedders.get(embedder_name)
|
|
||||||
} else {
|
|
||||||
embedders.get_default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let embedder = embedder
|
|
||||||
.ok_or(milli::UserError::InvalidEmbedder("default".to_owned()))
|
|
||||||
.map_err(milli::Error::from)?
|
|
||||||
.0;
|
|
||||||
|
|
||||||
let distribution = embedder.distribution();
|
|
||||||
|
|
||||||
let embeddings = embedder
|
|
||||||
.embed(vec![q.to_owned()])
|
|
||||||
.await
|
|
||||||
.map_err(milli::vector::Error::from)
|
|
||||||
.map_err(milli::Error::from)?
|
|
||||||
.pop()
|
|
||||||
.expect("No vector returned from embedding");
|
|
||||||
|
|
||||||
if embeddings.iter().nth(1).is_some() {
|
|
||||||
warn!("Ignoring embeddings past the first one in long search query");
|
|
||||||
query.vector = Some(embeddings.iter().next().unwrap().to_vec());
|
|
||||||
} else {
|
|
||||||
query.vector = Some(embeddings.into_inner());
|
|
||||||
}
|
|
||||||
Ok(distribution)
|
|
||||||
}
|
|
||||||
(Some(hybrid), vector, _) => {
|
|
||||||
let embedder_configs = index.embedding_configs(&index.read_txn()?)?;
|
|
||||||
let embedders = index_scheduler.embedders(embedder_configs)?;
|
|
||||||
|
|
||||||
let embedder = if let Some(embedder_name) = &hybrid.embedder {
|
|
||||||
embedders.get(embedder_name)
|
|
||||||
} else {
|
|
||||||
embedders.get_default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let embedder = embedder
|
|
||||||
.ok_or(milli::UserError::InvalidEmbedder("default".to_owned()))
|
|
||||||
.map_err(milli::Error::from)?
|
|
||||||
.0;
|
|
||||||
|
|
||||||
if let Some(vector) = vector {
|
|
||||||
if vector.len() != embedder.dimensions() {
|
|
||||||
return Err(meilisearch_types::milli::Error::UserError(
|
|
||||||
meilisearch_types::milli::UserError::InvalidVectorDimensions {
|
|
||||||
expected: embedder.dimensions(),
|
|
||||||
found: vector.len(),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.into());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(embedder.distribution())
|
|
||||||
}
|
|
||||||
_ => Ok(None),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|||||||
@@ -5,9 +5,7 @@ use index_scheduler::IndexScheduler;
|
|||||||
use log::debug;
|
use log::debug;
|
||||||
use meilisearch_types::deserr::DeserrJsonError;
|
use meilisearch_types::deserr::DeserrJsonError;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::milli::update::Setting;
|
|
||||||
use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
|
use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
@@ -79,7 +77,6 @@ macro_rules! make_setting_route {
|
|||||||
|
|
||||||
let body = body.into_inner();
|
let body = body.into_inner();
|
||||||
|
|
||||||
#[allow(clippy::redundant_closure_call)]
|
|
||||||
$analytics(&body, &req);
|
$analytics(&body, &req);
|
||||||
|
|
||||||
let new_settings = Settings {
|
let new_settings = Settings {
|
||||||
@@ -312,81 +309,6 @@ make_setting_route!(
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
make_setting_route!(
|
|
||||||
"/non-separator-tokens",
|
|
||||||
put,
|
|
||||||
std::collections::BTreeSet<String>,
|
|
||||||
meilisearch_types::deserr::DeserrJsonError<
|
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsNonSeparatorTokens,
|
|
||||||
>,
|
|
||||||
non_separator_tokens,
|
|
||||||
"nonSeparatorTokens",
|
|
||||||
analytics,
|
|
||||||
|non_separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
analytics.publish(
|
|
||||||
"nonSeparatorTokens Updated".to_string(),
|
|
||||||
json!({
|
|
||||||
"non_separator_tokens": {
|
|
||||||
"total": non_separator_tokens.as_ref().map(|non_separator_tokens| non_separator_tokens.len()),
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
Some(req),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
make_setting_route!(
|
|
||||||
"/separator-tokens",
|
|
||||||
put,
|
|
||||||
std::collections::BTreeSet<String>,
|
|
||||||
meilisearch_types::deserr::DeserrJsonError<
|
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsSeparatorTokens,
|
|
||||||
>,
|
|
||||||
separator_tokens,
|
|
||||||
"separatorTokens",
|
|
||||||
analytics,
|
|
||||||
|separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
analytics.publish(
|
|
||||||
"separatorTokens Updated".to_string(),
|
|
||||||
json!({
|
|
||||||
"separator_tokens": {
|
|
||||||
"total": separator_tokens.as_ref().map(|separator_tokens| separator_tokens.len()),
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
Some(req),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
make_setting_route!(
|
|
||||||
"/dictionary",
|
|
||||||
put,
|
|
||||||
std::collections::BTreeSet<String>,
|
|
||||||
meilisearch_types::deserr::DeserrJsonError<
|
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsDictionary,
|
|
||||||
>,
|
|
||||||
dictionary,
|
|
||||||
"dictionary",
|
|
||||||
analytics,
|
|
||||||
|dictionary: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
analytics.publish(
|
|
||||||
"dictionary Updated".to_string(),
|
|
||||||
json!({
|
|
||||||
"dictionary": {
|
|
||||||
"total": dictionary.as_ref().map(|dictionary| dictionary.len()),
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
Some(req),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
make_setting_route!(
|
make_setting_route!(
|
||||||
"/synonyms",
|
"/synonyms",
|
||||||
put,
|
put,
|
||||||
@@ -436,31 +358,6 @@ make_setting_route!(
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
make_setting_route!(
|
|
||||||
"/proximity-precision",
|
|
||||||
put,
|
|
||||||
meilisearch_types::settings::ProximityPrecisionView,
|
|
||||||
meilisearch_types::deserr::DeserrJsonError<
|
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsProximityPrecision,
|
|
||||||
>,
|
|
||||||
proximity_precision,
|
|
||||||
"proximityPrecision",
|
|
||||||
analytics,
|
|
||||||
|precision: &Option<meilisearch_types::settings::ProximityPrecisionView>, req: &HttpRequest| {
|
|
||||||
use serde_json::json;
|
|
||||||
analytics.publish(
|
|
||||||
"ProximityPrecision Updated".to_string(),
|
|
||||||
json!({
|
|
||||||
"proximity_precision": {
|
|
||||||
"set": precision.is_some(),
|
|
||||||
"value": precision,
|
|
||||||
}
|
|
||||||
}),
|
|
||||||
Some(req),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
make_setting_route!(
|
make_setting_route!(
|
||||||
"/ranking-rules",
|
"/ranking-rules",
|
||||||
put,
|
put,
|
||||||
@@ -504,17 +401,12 @@ make_setting_route!(
|
|||||||
analytics,
|
analytics,
|
||||||
|setting: &Option<meilisearch_types::settings::FacetingSettings>, req: &HttpRequest| {
|
|setting: &Option<meilisearch_types::settings::FacetingSettings>, req: &HttpRequest| {
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
|
||||||
|
|
||||||
analytics.publish(
|
analytics.publish(
|
||||||
"Faceting Updated".to_string(),
|
"Faceting Updated".to_string(),
|
||||||
json!({
|
json!({
|
||||||
"faceting": {
|
"faceting": {
|
||||||
"max_values_per_facet": setting.as_ref().and_then(|s| s.max_values_per_facet.set()),
|
"max_values_per_facet": setting.as_ref().and_then(|s| s.max_values_per_facet.set()),
|
||||||
"sort_facet_values_by_star_count": setting.as_ref().and_then(|s| {
|
|
||||||
s.sort_facet_values_by.as_ref().set().map(|s| s.iter().any(|(k, v)| k == "*" && v == &FacetValuesSort::Count))
|
|
||||||
}),
|
|
||||||
"sort_facet_values_by_total": setting.as_ref().and_then(|s| s.sort_facet_values_by.as_ref().set().map(|s| s.len())),
|
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
Some(req),
|
Some(req),
|
||||||
@@ -547,67 +439,6 @@ make_setting_route!(
|
|||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
make_setting_route!(
|
|
||||||
"/embedders",
|
|
||||||
patch,
|
|
||||||
std::collections::BTreeMap<String, Setting<meilisearch_types::milli::vector::settings::EmbeddingSettings>>,
|
|
||||||
meilisearch_types::deserr::DeserrJsonError<
|
|
||||||
meilisearch_types::error::deserr_codes::InvalidSettingsEmbedders,
|
|
||||||
>,
|
|
||||||
embedders,
|
|
||||||
"embedders",
|
|
||||||
analytics,
|
|
||||||
|setting: &Option<std::collections::BTreeMap<String, Setting<meilisearch_types::milli::vector::settings::EmbeddingSettings>>>, req: &HttpRequest| {
|
|
||||||
|
|
||||||
|
|
||||||
analytics.publish(
|
|
||||||
"Embedders Updated".to_string(),
|
|
||||||
serde_json::json!({"embedders": crate::routes::indexes::settings::embedder_analytics(setting.as_ref())}),
|
|
||||||
Some(req),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
fn embedder_analytics(
|
|
||||||
setting: Option<
|
|
||||||
&std::collections::BTreeMap<
|
|
||||||
String,
|
|
||||||
Setting<meilisearch_types::milli::vector::settings::EmbeddingSettings>,
|
|
||||||
>,
|
|
||||||
>,
|
|
||||||
) -> serde_json::Value {
|
|
||||||
let mut sources = std::collections::HashSet::new();
|
|
||||||
|
|
||||||
if let Some(s) = &setting {
|
|
||||||
for source in s
|
|
||||||
.values()
|
|
||||||
.filter_map(|config| config.clone().set())
|
|
||||||
.filter_map(|config| config.embedder_options.set())
|
|
||||||
{
|
|
||||||
use meilisearch_types::milli::vector::settings::EmbedderSettings;
|
|
||||||
match source {
|
|
||||||
EmbedderSettings::OpenAi(_) => sources.insert("openAi"),
|
|
||||||
EmbedderSettings::HuggingFace(_) => sources.insert("huggingFace"),
|
|
||||||
EmbedderSettings::UserProvided(_) => sources.insert("userProvided"),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let document_template_used = setting.as_ref().map(|map| {
|
|
||||||
map.values()
|
|
||||||
.filter_map(|config| config.clone().set())
|
|
||||||
.any(|config| config.document_template.set().is_some())
|
|
||||||
});
|
|
||||||
|
|
||||||
json!(
|
|
||||||
{
|
|
||||||
"total": setting.as_ref().map(|s| s.len()),
|
|
||||||
"sources": sources,
|
|
||||||
"document_template_used": document_template_used,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! generate_configure {
|
macro_rules! generate_configure {
|
||||||
($($mod:ident),*) => {
|
($($mod:ident),*) => {
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
@@ -628,17 +459,12 @@ generate_configure!(
|
|||||||
displayed_attributes,
|
displayed_attributes,
|
||||||
searchable_attributes,
|
searchable_attributes,
|
||||||
distinct_attribute,
|
distinct_attribute,
|
||||||
proximity_precision,
|
|
||||||
stop_words,
|
stop_words,
|
||||||
separator_tokens,
|
|
||||||
non_separator_tokens,
|
|
||||||
dictionary,
|
|
||||||
synonyms,
|
synonyms,
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
typo_tolerance,
|
typo_tolerance,
|
||||||
pagination,
|
pagination,
|
||||||
faceting,
|
faceting
|
||||||
embedders
|
|
||||||
);
|
);
|
||||||
|
|
||||||
pub async fn update_all(
|
pub async fn update_all(
|
||||||
@@ -683,9 +509,6 @@ pub async fn update_all(
|
|||||||
"distinct_attribute": {
|
"distinct_attribute": {
|
||||||
"set": new_settings.distinct_attribute.as_ref().set().is_some()
|
"set": new_settings.distinct_attribute.as_ref().set().is_some()
|
||||||
},
|
},
|
||||||
"proximity_precision": {
|
|
||||||
"set": new_settings.proximity_precision.as_ref().set().is_some()
|
|
||||||
},
|
|
||||||
"typo_tolerance": {
|
"typo_tolerance": {
|
||||||
"enabled": new_settings.typo_tolerance
|
"enabled": new_settings.typo_tolerance
|
||||||
.as_ref()
|
.as_ref()
|
||||||
@@ -722,16 +545,6 @@ pub async fn update_all(
|
|||||||
.as_ref()
|
.as_ref()
|
||||||
.set()
|
.set()
|
||||||
.and_then(|s| s.max_values_per_facet.as_ref().set()),
|
.and_then(|s| s.max_values_per_facet.as_ref().set()),
|
||||||
"sort_facet_values_by_star_count": new_settings.faceting
|
|
||||||
.as_ref()
|
|
||||||
.set()
|
|
||||||
.and_then(|s| {
|
|
||||||
s.sort_facet_values_by.as_ref().set().map(|s| s.iter().any(|(k, v)| k == "*" && v == &FacetValuesSort::Count))
|
|
||||||
}),
|
|
||||||
"sort_facet_values_by_total": new_settings.faceting
|
|
||||||
.as_ref()
|
|
||||||
.set()
|
|
||||||
.and_then(|s| s.sort_facet_values_by.as_ref().set().map(|s| s.len())),
|
|
||||||
},
|
},
|
||||||
"pagination": {
|
"pagination": {
|
||||||
"max_total_hits": new_settings.pagination
|
"max_total_hits": new_settings.pagination
|
||||||
@@ -745,7 +558,6 @@ pub async fn update_all(
|
|||||||
"synonyms": {
|
"synonyms": {
|
||||||
"total": new_settings.synonyms.as_ref().set().map(|synonyms| synonyms.len()),
|
"total": new_settings.synonyms.as_ref().set().map(|synonyms| synonyms.len()),
|
||||||
},
|
},
|
||||||
"embedders": crate::routes::indexes::settings::embedder_analytics(new_settings.embedders.as_ref().set())
|
|
||||||
}),
|
}),
|
||||||
Some(&req),
|
Some(&req),
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ pub async fn get_metrics(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
|
||||||
auth_controller: Data<AuthController>,
|
auth_controller: Data<AuthController>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
index_scheduler.features().check_metrics()?;
|
|
||||||
let auth_filters = index_scheduler.filters();
|
let auth_filters = index_scheduler.filters();
|
||||||
if !auth_filters.all_indexes_authorized() {
|
if !auth_filters.all_indexes_authorized() {
|
||||||
let mut error = ResponseError::from(AuthenticationError::InvalidToken);
|
let mut error = ResponseError::from(AuthenticationError::InvalidToken);
|
||||||
@@ -49,11 +48,6 @@ pub async fn get_metrics(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(last_update) = response.last_update {
|
|
||||||
crate::metrics::MEILISEARCH_LAST_UPDATE.set(last_update.unix_timestamp());
|
|
||||||
}
|
|
||||||
crate::metrics::MEILISEARCH_IS_INDEXING.set(index_scheduler.is_task_processing()? as i64);
|
|
||||||
|
|
||||||
let encoder = TextEncoder::new();
|
let encoder = TextEncoder::new();
|
||||||
let mut buffer = vec![];
|
let mut buffer = vec![];
|
||||||
encoder.encode(&prometheus::gather(), &mut buffer).expect("Failed to encode metrics");
|
encoder.encode(&prometheus::gather(), &mut buffer).expect("Failed to encode metrics");
|
||||||
|
|||||||
@@ -20,27 +20,26 @@ const PAGINATION_DEFAULT_LIMIT: usize = 20;
|
|||||||
|
|
||||||
mod api_key;
|
mod api_key;
|
||||||
mod dump;
|
mod dump;
|
||||||
pub mod features;
|
|
||||||
pub mod indexes;
|
pub mod indexes;
|
||||||
mod metrics;
|
mod metrics;
|
||||||
mod multi_search;
|
mod multi_search;
|
||||||
mod snapshot;
|
|
||||||
mod swap_indexes;
|
mod swap_indexes;
|
||||||
pub mod tasks;
|
pub mod tasks;
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig, enable_metrics: bool) {
|
||||||
cfg.service(web::scope("/tasks").configure(tasks::configure))
|
cfg.service(web::scope("/tasks").configure(tasks::configure))
|
||||||
.service(web::resource("/health").route(web::get().to(get_health)))
|
.service(web::resource("/health").route(web::get().to(get_health)))
|
||||||
.service(web::scope("/keys").configure(api_key::configure))
|
.service(web::scope("/keys").configure(api_key::configure))
|
||||||
.service(web::scope("/dumps").configure(dump::configure))
|
.service(web::scope("/dumps").configure(dump::configure))
|
||||||
.service(web::scope("/snapshots").configure(snapshot::configure))
|
|
||||||
.service(web::resource("/stats").route(web::get().to(get_stats)))
|
.service(web::resource("/stats").route(web::get().to(get_stats)))
|
||||||
.service(web::resource("/version").route(web::get().to(get_version)))
|
.service(web::resource("/version").route(web::get().to(get_version)))
|
||||||
.service(web::scope("/indexes").configure(indexes::configure))
|
.service(web::scope("/indexes").configure(indexes::configure))
|
||||||
.service(web::scope("/multi-search").configure(multi_search::configure))
|
.service(web::scope("/multi-search").configure(multi_search::configure))
|
||||||
.service(web::scope("/swap-indexes").configure(swap_indexes::configure))
|
.service(web::scope("/swap-indexes").configure(swap_indexes::configure));
|
||||||
.service(web::scope("/metrics").configure(metrics::configure))
|
|
||||||
.service(web::scope("/experimental-features").configure(features::configure));
|
if enable_metrics {
|
||||||
|
cfg.service(web::scope("/metrics").configure(metrics::configure));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
@@ -286,6 +285,9 @@ pub fn create_all_stats(
|
|||||||
used_database_size += index_scheduler.used_size()?;
|
used_database_size += index_scheduler.used_size()?;
|
||||||
database_size += auth_controller.size()?;
|
database_size += auth_controller.size()?;
|
||||||
used_database_size += auth_controller.used_size()?;
|
used_database_size += auth_controller.used_size()?;
|
||||||
|
let update_file_size = index_scheduler.compute_update_file_size()?;
|
||||||
|
database_size += update_file_size;
|
||||||
|
used_database_size += update_file_size;
|
||||||
|
|
||||||
let stats = Stats { database_size, used_database_size, last_update: last_task, indexes };
|
let stats = Stats { database_size, used_database_size, last_update: last_task, indexes };
|
||||||
Ok(stats)
|
Ok(stats)
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ use crate::analytics::{Analytics, MultiSearchAggregator};
|
|||||||
use crate::extractors::authentication::policies::ActionPolicy;
|
use crate::extractors::authentication::policies::ActionPolicy;
|
||||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::routes::indexes::search::embed;
|
|
||||||
use crate::search::{
|
use crate::search::{
|
||||||
add_search_rules, perform_search, SearchQueryWithIndex, SearchResultWithIndex,
|
add_search_rules, perform_search, SearchQueryWithIndex, SearchResultWithIndex,
|
||||||
};
|
};
|
||||||
@@ -42,56 +41,52 @@ pub async fn multi_search_with_post(
|
|||||||
let queries = params.into_inner().queries;
|
let queries = params.into_inner().queries;
|
||||||
|
|
||||||
let mut multi_aggregate = MultiSearchAggregator::from_queries(&queries, &req);
|
let mut multi_aggregate = MultiSearchAggregator::from_queries(&queries, &req);
|
||||||
let features = index_scheduler.features();
|
|
||||||
|
|
||||||
// Explicitly expect a `(ResponseError, usize)` for the error type rather than `ResponseError` only,
|
// Explicitly expect a `(ResponseError, usize)` for the error type rather than `ResponseError` only,
|
||||||
// so that `?` doesn't work if it doesn't use `with_index`, ensuring that it is not forgotten in case of code
|
// so that `?` doesn't work if it doesn't use `with_index`, ensuring that it is not forgotten in case of code
|
||||||
// changes.
|
// changes.
|
||||||
let search_results: Result<_, (ResponseError, usize)> = async {
|
let search_results: Result<_, (ResponseError, usize)> = (|| {
|
||||||
let mut search_results = Vec::with_capacity(queries.len());
|
async {
|
||||||
for (query_index, (index_uid, mut query)) in
|
let mut search_results = Vec::with_capacity(queries.len());
|
||||||
queries.into_iter().map(SearchQueryWithIndex::into_index_query).enumerate()
|
for (query_index, (index_uid, mut query)) in
|
||||||
{
|
queries.into_iter().map(SearchQueryWithIndex::into_index_query).enumerate()
|
||||||
debug!("multi-search #{query_index}: called with params: {:?}", query);
|
|
||||||
|
|
||||||
// Check index from API key
|
|
||||||
if !index_scheduler.filters().is_index_authorized(&index_uid) {
|
|
||||||
return Err(AuthenticationError::InvalidToken).with_index(query_index);
|
|
||||||
}
|
|
||||||
// Apply search rules from tenant token
|
|
||||||
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid)
|
|
||||||
{
|
{
|
||||||
add_search_rules(&mut query, search_rules);
|
debug!("multi-search #{query_index}: called with params: {:?}", query);
|
||||||
|
|
||||||
|
// Check index from API key
|
||||||
|
if !index_scheduler.filters().is_index_authorized(&index_uid) {
|
||||||
|
return Err(AuthenticationError::InvalidToken).with_index(query_index);
|
||||||
|
}
|
||||||
|
// Apply search rules from tenant token
|
||||||
|
if let Some(search_rules) =
|
||||||
|
index_scheduler.filters().get_index_search_rules(&index_uid)
|
||||||
|
{
|
||||||
|
add_search_rules(&mut query, search_rules);
|
||||||
|
}
|
||||||
|
|
||||||
|
let index = index_scheduler
|
||||||
|
.index(&index_uid)
|
||||||
|
.map_err(|err| {
|
||||||
|
let mut err = ResponseError::from(err);
|
||||||
|
// Patch the HTTP status code to 400 as it defaults to 404 for `index_not_found`, but
|
||||||
|
// here the resource not found is not part of the URL.
|
||||||
|
err.code = StatusCode::BAD_REQUEST;
|
||||||
|
err
|
||||||
|
})
|
||||||
|
.with_index(query_index)?;
|
||||||
|
let search_result =
|
||||||
|
tokio::task::spawn_blocking(move || perform_search(&index, query))
|
||||||
|
.await
|
||||||
|
.with_index(query_index)?;
|
||||||
|
|
||||||
|
search_results.push(SearchResultWithIndex {
|
||||||
|
index_uid: index_uid.into_inner(),
|
||||||
|
result: search_result.with_index(query_index)?,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
Ok(search_results)
|
||||||
let index = index_scheduler
|
|
||||||
.index(&index_uid)
|
|
||||||
.map_err(|err| {
|
|
||||||
let mut err = ResponseError::from(err);
|
|
||||||
// Patch the HTTP status code to 400 as it defaults to 404 for `index_not_found`, but
|
|
||||||
// here the resource not found is not part of the URL.
|
|
||||||
err.code = StatusCode::BAD_REQUEST;
|
|
||||||
err
|
|
||||||
})
|
|
||||||
.with_index(query_index)?;
|
|
||||||
|
|
||||||
let distribution = embed(&mut query, index_scheduler.get_ref(), &index)
|
|
||||||
.await
|
|
||||||
.with_index(query_index)?;
|
|
||||||
|
|
||||||
let search_result = tokio::task::spawn_blocking(move || {
|
|
||||||
perform_search(&index, query, features, distribution)
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.with_index(query_index)?;
|
|
||||||
|
|
||||||
search_results.push(SearchResultWithIndex {
|
|
||||||
index_uid: index_uid.into_inner(),
|
|
||||||
result: search_result.with_index(query_index)?,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
Ok(search_results)
|
})()
|
||||||
}
|
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
if search_results.is_ok() {
|
if search_results.is_ok() {
|
||||||
|
|||||||
@@ -1,32 +0,0 @@
|
|||||||
use actix_web::web::Data;
|
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
|
||||||
use index_scheduler::IndexScheduler;
|
|
||||||
use log::debug;
|
|
||||||
use meilisearch_types::error::ResponseError;
|
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
|
||||||
use serde_json::json;
|
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
|
||||||
use crate::extractors::authentication::policies::*;
|
|
||||||
use crate::extractors::authentication::GuardedData;
|
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
|
||||||
use crate::routes::SummarizedTaskView;
|
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
|
||||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_snapshot))));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn create_snapshot(
|
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SNAPSHOTS_CREATE }>, Data<IndexScheduler>>,
|
|
||||||
req: HttpRequest,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
|
||||||
analytics.publish("Snapshot Created".to_string(), json!({}), Some(&req));
|
|
||||||
|
|
||||||
let task = KindWithContent::SnapshotCreation;
|
|
||||||
let task: SummarizedTaskView =
|
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
|
||||||
|
|
||||||
debug!("returns: {:?}", task);
|
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
|
||||||
}
|
|
||||||
@@ -60,7 +60,8 @@ pub async fn swap_indexes(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let task = KindWithContent::IndexSwap { swaps };
|
let task = KindWithContent::IndexSwap { swaps };
|
||||||
let task: SummarizedTaskView =
|
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
let task = index_scheduler.register(task)?;
|
||||||
|
let task: SummarizedTaskView = task.into();
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user