mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-12 15:45:48 +00:00
Compare commits
2 Commits
v1.7.1
...
adapt-pref
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b06a7a4861 | ||
|
|
8cc2bc4e17 |
@@ -1,2 +0,0 @@
|
|||||||
[alias]
|
|
||||||
xtask = "run --release --package xtask --"
|
|
||||||
17
.github/ISSUE_TEMPLATE/sprint_issue.md
vendored
17
.github/ISSUE_TEMPLATE/sprint_issue.md
vendored
@@ -27,23 +27,6 @@ Related spec: WIP
|
|||||||
- [ ] If prototype validated, merge changes into `main`
|
- [ ] If prototype validated, merge changes into `main`
|
||||||
- [ ] Update the spec
|
- [ ] Update the spec
|
||||||
|
|
||||||
### Reminders when modifying the Setting API
|
|
||||||
|
|
||||||
<!--- Special steps to remind when adding a new index setting -->
|
|
||||||
|
|
||||||
- [ ] Ensure the new setting route is at least tested by the [`test_setting_routes` macro](https://github.com/meilisearch/meilisearch/blob/5204c0b60b384cbc79621b6b2176fca086069e8e/meilisearch/tests/settings/get_settings.rs#L276)
|
|
||||||
- [ ] Ensure Analytics are fully implemented
|
|
||||||
- [ ] `/settings/my-new-setting` configurated in the [`make_setting_routes` macro](https://github.com/meilisearch/meilisearch/blob/5204c0b60b384cbc79621b6b2176fca086069e8e/meilisearch/src/routes/indexes/settings.rs#L141-L165)
|
|
||||||
- [ ] global `/settings` route configurated in the [`update_all` function](https://github.com/meilisearch/meilisearch/blob/5204c0b60b384cbc79621b6b2176fca086069e8e/meilisearch/src/routes/indexes/settings.rs#L655-L751)
|
|
||||||
- [ ] Ensure the dump serializing is consistent with the `/settings` route serializing, e.g., enums case can be different (`camelCase` in route and `PascalCase` in the dump)
|
|
||||||
|
|
||||||
#### Special cases when adding a setting for an experimental feature
|
|
||||||
|
|
||||||
- [ ] ⚠️ API stability: The setting does not appear on the main settings route when the feature has never been enabled (e.g. mark it `Unset` when returned from the index in this situation. See [an example](https://github.com/meilisearch/meilisearch/blob/7a89abd2a025606a42f8b219e539117eb2eb029f/meilisearch-types/src/settings.rs#L608))
|
|
||||||
- [ ] The setting cannot be set when the feature is disabled, either by the main settings route or the subroute (see [`validate_settings` function](https://github.com/meilisearch/meilisearch/blob/7a89abd2a025606a42f8b219e539117eb2eb029f/meilisearch/src/routes/indexes/settings.rs#L811))
|
|
||||||
- [ ] If possible, the setting is reset when the feature is disabled (hard if it requires reindexing)
|
|
||||||
|
|
||||||
## Impacted teams
|
## Impacted teams
|
||||||
|
|
||||||
<!---Ping the related teams. Ask for the engine manager if any hesitation-->
|
<!---Ping the related teams. Ask for the engine manager if any hesitation-->
|
||||||
<!---@meilisearch/docs-team when there is any API change, e.g. settings addition-->
|
|
||||||
|
|||||||
30
.github/workflows/bench-manual.yml
vendored
30
.github/workflows/bench-manual.yml
vendored
@@ -1,30 +0,0 @@
|
|||||||
name: Bench (manual)
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
workload:
|
|
||||||
description: 'The path to the workloads to execute (workloads/...)'
|
|
||||||
required: true
|
|
||||||
default: 'workloads/movies.json'
|
|
||||||
|
|
||||||
env:
|
|
||||||
WORKLOAD_NAME: ${{ github.event.inputs.workload }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
benchmarks:
|
|
||||||
name: Run and upload benchmarks
|
|
||||||
runs-on: benchmarks
|
|
||||||
timeout-minutes: 180 # 3h
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Run benchmarks - workload ${WORKLOAD_NAME} - branch ${{ github.ref }} - commit ${{ github.sha }}
|
|
||||||
run: |
|
|
||||||
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "Manual [Run #${{ github.run_id }}](https://github.com/meilisearch/meilisearch/actions/runs/${{ github.run_id }})" -- ${WORKLOAD_NAME}
|
|
||||||
|
|
||||||
46
.github/workflows/bench-pr.yml
vendored
46
.github/workflows/bench-pr.yml
vendored
@@ -1,46 +0,0 @@
|
|||||||
name: Bench (PR)
|
|
||||||
on:
|
|
||||||
issue_comment:
|
|
||||||
types: [created]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
run-benchmarks-on-comment:
|
|
||||||
if: startsWith(github.event.comment.body, '/bench')
|
|
||||||
name: Run and upload benchmarks
|
|
||||||
runs-on: benchmarks
|
|
||||||
timeout-minutes: 180 # 3h
|
|
||||||
steps:
|
|
||||||
- name: Check for Command
|
|
||||||
id: command
|
|
||||||
uses: xt0rted/slash-command-action@v2
|
|
||||||
with:
|
|
||||||
command: bench
|
|
||||||
reaction-type: "rocket"
|
|
||||||
repo-token: ${{ env.GH_TOKEN }}
|
|
||||||
|
|
||||||
- uses: xt0rted/pull-request-comment-branch@v2
|
|
||||||
id: comment-branch
|
|
||||||
with:
|
|
||||||
repo_token: ${{ env.GH_TOKEN }}
|
|
||||||
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
if: success()
|
|
||||||
with:
|
|
||||||
fetch-depth: 0 # fetch full history to be able to get main commit sha
|
|
||||||
ref: ${{ steps.comment-branch.outputs.head_ref }}
|
|
||||||
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Run benchmarks on PR ${{ github.event.issue.id }}
|
|
||||||
run: |
|
|
||||||
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "[Comment](${{ github.event.comment.url }}) on [#${{github.event.issue.id}}](${{ github.event.issue.url }})" -- ${{ steps.command.outputs.command-arguments }}
|
|
||||||
25
.github/workflows/bench-push-indexing.yml
vendored
25
.github/workflows/bench-push-indexing.yml
vendored
@@ -1,25 +0,0 @@
|
|||||||
name: Indexing bench (push)
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
benchmarks:
|
|
||||||
name: Run and upload benchmarks
|
|
||||||
runs-on: benchmarks
|
|
||||||
timeout-minutes: 180 # 3h
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
# Run benchmarks
|
|
||||||
- name: Run benchmarks - Dataset ${BENCH_NAME} - Branch main - Commit ${{ github.sha }}
|
|
||||||
run: |
|
|
||||||
cargo xtask bench --api-key "${{ secrets.BENCHMARK_API_KEY }}" --dashboard-url "${{ vars.BENCHMARK_DASHBOARD_URL }}" --reason "Push on `main` [Run #${{ github.run_id }}](https://github.com/meilisearch/meilisearch/actions/runs/${{ github.run_id }})" -- workloads/*.json
|
|
||||||
|
|
||||||
2
.github/workflows/publish-docker-images.yml
vendored
2
.github/workflows/publish-docker-images.yml
vendored
@@ -97,7 +97,7 @@ jobs:
|
|||||||
- name: Send CI information to Cloud team
|
- name: Send CI information to Cloud team
|
||||||
# Do not send if nightly build (i.e. 'schedule' or 'workflow_dispatch' event)
|
# Do not send if nightly build (i.e. 'schedule' or 'workflow_dispatch' event)
|
||||||
if: github.event_name == 'push'
|
if: github.event_name == 'push'
|
||||||
uses: peter-evans/repository-dispatch@v3
|
uses: peter-evans/repository-dispatch@v2
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
repository: meilisearch/meilisearch-cloud
|
repository: meilisearch/meilisearch-cloud
|
||||||
|
|||||||
38
.github/workflows/sdks-tests.yml
vendored
38
.github/workflows/sdks-tests.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
|||||||
outputs:
|
outputs:
|
||||||
docker-image: ${{ steps.define-image.outputs.docker-image }}
|
docker-image: ${{ steps.define-image.outputs.docker-image }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Define the Docker image we need to use
|
- name: Define the Docker image we need to use
|
||||||
id: define-image
|
id: define-image
|
||||||
run: |
|
run: |
|
||||||
@@ -46,11 +46,11 @@ jobs:
|
|||||||
MEILISEARCH_VERSION: ${{ needs.define-docker-image.outputs.docker-image }}
|
MEILISEARCH_VERSION: ${{ needs.define-docker-image.outputs.docker-image }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-dotnet
|
repository: meilisearch/meilisearch-dotnet
|
||||||
- name: Setup .NET Core
|
- name: Setup .NET Core
|
||||||
uses: actions/setup-dotnet@v4
|
uses: actions/setup-dotnet@v3
|
||||||
with:
|
with:
|
||||||
dotnet-version: "6.0.x"
|
dotnet-version: "6.0.x"
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@@ -75,12 +75,12 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-dart
|
repository: meilisearch/meilisearch-dart
|
||||||
- uses: dart-lang/setup-dart@v1
|
- uses: dart-lang/setup-dart@v1
|
||||||
with:
|
with:
|
||||||
sdk: 'latest'
|
sdk: 3.1.1
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: dart pub get
|
run: dart pub get
|
||||||
- name: Run integration tests
|
- name: Run integration tests
|
||||||
@@ -100,10 +100,10 @@ jobs:
|
|||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- name: Set up Go
|
- name: Set up Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v4
|
||||||
with:
|
with:
|
||||||
go-version: stable
|
go-version: stable
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-go
|
repository: meilisearch/meilisearch-go
|
||||||
- name: Get dependencies
|
- name: Get dependencies
|
||||||
@@ -129,11 +129,11 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-java
|
repository: meilisearch/meilisearch-java
|
||||||
- name: Set up Java
|
- name: Set up Java
|
||||||
uses: actions/setup-java@v4
|
uses: actions/setup-java@v3
|
||||||
with:
|
with:
|
||||||
java-version: 8
|
java-version: 8
|
||||||
distribution: 'zulu'
|
distribution: 'zulu'
|
||||||
@@ -156,7 +156,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-js
|
repository: meilisearch/meilisearch-js
|
||||||
- name: Setup node
|
- name: Setup node
|
||||||
@@ -191,7 +191,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-php
|
repository: meilisearch/meilisearch-php
|
||||||
- name: Install PHP
|
- name: Install PHP
|
||||||
@@ -220,11 +220,11 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-python
|
repository: meilisearch/meilisearch-python
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v4
|
||||||
- name: Install pipenv
|
- name: Install pipenv
|
||||||
uses: dschep/install-pipenv-action@v1
|
uses: dschep/install-pipenv-action@v1
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
@@ -245,7 +245,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-ruby
|
repository: meilisearch/meilisearch-ruby
|
||||||
- name: Set up Ruby 3
|
- name: Set up Ruby 3
|
||||||
@@ -270,7 +270,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-rust
|
repository: meilisearch/meilisearch-rust
|
||||||
- name: Build
|
- name: Build
|
||||||
@@ -291,7 +291,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-swift
|
repository: meilisearch/meilisearch-swift
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
@@ -314,7 +314,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-js-plugins
|
repository: meilisearch/meilisearch-js-plugins
|
||||||
- name: Setup node
|
- name: Setup node
|
||||||
@@ -345,7 +345,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-rails
|
repository: meilisearch/meilisearch-rails
|
||||||
- name: Set up Ruby 3
|
- name: Set up Ruby 3
|
||||||
@@ -369,7 +369,7 @@ jobs:
|
|||||||
ports:
|
ports:
|
||||||
- '7700:7700'
|
- '7700:7700'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: meilisearch/meilisearch-symfony
|
repository: meilisearch/meilisearch-symfony
|
||||||
- name: Install PHP
|
- name: Install PHP
|
||||||
|
|||||||
31
.github/workflows/test-suite.yml
vendored
31
.github/workflows/test-suite.yml
vendored
@@ -31,10 +31,17 @@ jobs:
|
|||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- name: Setup test with Rust stable
|
- name: Setup test with Rust stable
|
||||||
|
if: github.event_name != 'schedule'
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
|
- name: Setup test with Rust nightly
|
||||||
|
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
override: true
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.1
|
uses: Swatinem/rust-cache@v2.7.1
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
@@ -59,10 +66,6 @@ jobs:
|
|||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.1
|
uses: Swatinem/rust-cache@v2.7.1
|
||||||
- uses: actions-rs/toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@@ -75,7 +78,7 @@ jobs:
|
|||||||
args: --locked --release --all
|
args: --locked --release --all
|
||||||
|
|
||||||
test-all-features:
|
test-all-features:
|
||||||
name: Tests almost all features
|
name: Tests all features
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
||||||
@@ -91,12 +94,16 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
- name: Run cargo build with almost all features
|
- name: Run cargo build with all features
|
||||||
run: |
|
uses: actions-rs/cargo@v1
|
||||||
cargo build --workspace --locked --release --features "$(cargo xtask list-features --exclude-feature cuda)"
|
with:
|
||||||
- name: Run cargo test with almost all features
|
command: build
|
||||||
run: |
|
args: --workspace --locked --release --all-features
|
||||||
cargo test --workspace --locked --release --features "$(cargo xtask list-features --exclude-feature cuda)"
|
- name: Run cargo test with all features
|
||||||
|
uses: actions-rs/cargo@v1
|
||||||
|
with:
|
||||||
|
command: test
|
||||||
|
args: --workspace --locked --release --all-features
|
||||||
|
|
||||||
test-disabled-tokenization:
|
test-disabled-tokenization:
|
||||||
name: Test disabled tokenization
|
name: Test disabled tokenization
|
||||||
@@ -157,7 +164,7 @@ jobs:
|
|||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: 1.75.0
|
toolchain: 1.71.1
|
||||||
override: true
|
override: true
|
||||||
components: clippy
|
components: clippy
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -9,8 +9,6 @@
|
|||||||
/data.ms
|
/data.ms
|
||||||
/snapshots
|
/snapshots
|
||||||
/dumps
|
/dumps
|
||||||
/bench
|
|
||||||
/_xtask_benchmark.ms
|
|
||||||
|
|
||||||
# Snapshots
|
# Snapshots
|
||||||
## ... large
|
## ... large
|
||||||
|
|||||||
@@ -75,12 +75,6 @@ If you get a "Too many open files" error you might want to increase the open fil
|
|||||||
ulimit -Sn 3000
|
ulimit -Sn 3000
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Build tools
|
|
||||||
|
|
||||||
Meilisearch follows the [cargo xtask](https://github.com/matklad/cargo-xtask) workflow to provide some build tools.
|
|
||||||
|
|
||||||
Run `cargo xtask --help` from the root of the repository to find out what is available.
|
|
||||||
|
|
||||||
## Git Guidelines
|
## Git Guidelines
|
||||||
|
|
||||||
### Git Branches
|
### Git Branches
|
||||||
|
|||||||
2173
Cargo.lock
generated
2173
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -16,16 +16,11 @@ members = [
|
|||||||
"json-depth-checker",
|
"json-depth-checker",
|
||||||
"benchmarks",
|
"benchmarks",
|
||||||
"fuzzers",
|
"fuzzers",
|
||||||
"tracing-trace",
|
|
||||||
"xtask", "build-info",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "1.7.1"
|
version = "1.6.0"
|
||||||
authors = [
|
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||||
"Quentin de Quelen <quentin@dequelen.me>",
|
|
||||||
"Clément Renault <clement@meilisearch.com>",
|
|
||||||
]
|
|
||||||
description = "Meilisearch HTTP server"
|
description = "Meilisearch HTTP server"
|
||||||
homepage = "https://meilisearch.com"
|
homepage = "https://meilisearch.com"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# Compile
|
# Compile
|
||||||
FROM rust:1.75.0-alpine3.18 AS compiler
|
FROM rust:1.71.1-alpine3.18 AS compiler
|
||||||
|
|
||||||
RUN apk add -q --update-cache --no-cache build-base openssl-dev
|
RUN apk add -q --update-cache --no-cache build-base openssl-dev
|
||||||
|
|
||||||
@@ -8,7 +8,7 @@ WORKDIR /
|
|||||||
ARG COMMIT_SHA
|
ARG COMMIT_SHA
|
||||||
ARG COMMIT_DATE
|
ARG COMMIT_DATE
|
||||||
ARG GIT_TAG
|
ARG GIT_TAG
|
||||||
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} VERGEN_GIT_DESCRIBE=${GIT_TAG}
|
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} VERGEN_GIT_SEMVER_LIGHTWEIGHT=${GIT_TAG}
|
||||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|||||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2019-2024 Meili SAS
|
Copyright (c) 2019-2022 Meili SAS
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
10
README.md
10
README.md
@@ -41,10 +41,10 @@ Meilisearch helps you shape a delightful search experience in a snap, offering f
|
|||||||
## ✨ Features
|
## ✨ Features
|
||||||
|
|
||||||
- **Search-as-you-type:** find search results in less than 50 milliseconds
|
- **Search-as-you-type:** find search results in less than 50 milliseconds
|
||||||
- **[Typo tolerance](https://www.meilisearch.com/docs/learn/configuration/typo_tolerance?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** get relevant matches even when queries contain typos and misspellings
|
- **[Typo tolerance](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features#typo-tolerance):** get relevant matches even when queries contain typos and misspellings
|
||||||
- **[Filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features) and [faceted search](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** enhance your users' search experience with custom filters and build a faceted search interface in a few lines of code
|
- **[Filtering](https://www.meilisearch.com/docs/learn/fine_tuning_results/filtering?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features) and [faceted search](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** enhance your user's search experience with custom filters and build a faceted search interface in a few lines of code
|
||||||
- **[Sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** sort results based on price, date, or pretty much anything else your users need
|
- **[Sorting](https://www.meilisearch.com/docs/learn/fine_tuning_results/sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** sort results based on price, date, or pretty much anything else your users need
|
||||||
- **[Synonym support](https://www.meilisearch.com/docs/learn/configuration/synonyms?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** configure synonyms to include more relevant content in your search results
|
- **[Synonym support](https://www.meilisearch.com/docs/learn/getting_started/customizing_relevancy?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features#synonyms):** configure synonyms to include more relevant content in your search results
|
||||||
- **[Geosearch](https://www.meilisearch.com/docs/learn/fine_tuning_results/geosearch?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** filter and sort documents based on geographic data
|
- **[Geosearch](https://www.meilisearch.com/docs/learn/fine_tuning_results/geosearch?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** filter and sort documents based on geographic data
|
||||||
- **[Extensive language support](https://www.meilisearch.com/docs/learn/what_is_meilisearch/language?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet
|
- **[Extensive language support](https://www.meilisearch.com/docs/learn/what_is_meilisearch/language?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** search datasets in any language, with optimized support for Chinese, Japanese, Hebrew, and languages using the Latin alphabet
|
||||||
- **[Security management](https://www.meilisearch.com/docs/learn/security/master_api_keys?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** control which users can access what data with API keys that allow fine-grained permissions handling
|
- **[Security management](https://www.meilisearch.com/docs/learn/security/master_api_keys?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=features):** control which users can access what data with API keys that allow fine-grained permissions handling
|
||||||
@@ -61,6 +61,8 @@ You can consult Meilisearch's documentation at [https://www.meilisearch.com/docs
|
|||||||
|
|
||||||
For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://www.meilisearch.com/docs/learn/getting_started/quick_start?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=get-started) guide.
|
For basic instructions on how to set up Meilisearch, add documents to an index, and search for documents, take a look at our [Quick Start](https://www.meilisearch.com/docs/learn/getting_started/quick_start?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=get-started) guide.
|
||||||
|
|
||||||
|
You may also want to check out [Meilisearch 101](https://www.meilisearch.com/docs/learn/getting_started/filtering_and_sorting?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=get-started) for an introduction to some of Meilisearch's most popular features.
|
||||||
|
|
||||||
## ⚡ Supercharge your Meilisearch experience
|
## ⚡ Supercharge your Meilisearch experience
|
||||||
|
|
||||||
Say goodbye to server deployment and manual updates with [Meilisearch Cloud](https://www.meilisearch.com/cloud?utm_campaign=oss&utm_source=github&utm_medium=meilisearch). No credit card required.
|
Say goodbye to server deployment and manual updates with [Meilisearch Cloud](https://www.meilisearch.com/cloud?utm_campaign=oss&utm_source=github&utm_medium=meilisearch). No credit card required.
|
||||||
@@ -99,7 +101,7 @@ Meilisearch is a search engine created by [Meili](https://www.welcometothejungle
|
|||||||
|
|
||||||
- For feature requests, please visit our [product repository](https://github.com/meilisearch/product/discussions)
|
- For feature requests, please visit our [product repository](https://github.com/meilisearch/product/discussions)
|
||||||
- Found a bug? Open an [issue](https://github.com/meilisearch/meilisearch/issues)!
|
- Found a bug? Open an [issue](https://github.com/meilisearch/meilisearch/issues)!
|
||||||
- Want to be part of our Discord community? [Join us!](https://discord.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=contact)
|
- Want to be part of our Discord community? [Join us!](https://discord.gg/meilisearch)
|
||||||
|
|
||||||
Thank you for your support!
|
Thank you for your support!
|
||||||
|
|
||||||
|
|||||||
@@ -106,7 +106,7 @@
|
|||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "meilisearch_index_count{job=\"$job\", instance=\"$instance\"}",
|
"expr": "meilisearch_index_count{job=\"meilisearch\", instance=\"$instance\"}",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "",
|
"legendFormat": "",
|
||||||
"range": true,
|
"range": true,
|
||||||
@@ -165,7 +165,7 @@
|
|||||||
"type": "prometheus"
|
"type": "prometheus"
|
||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"expr": "meilisearch_index_docs_count{job=\"$job\", index=\"$Index\", instance=\"$instance\"}",
|
"expr": "meilisearch_index_docs_count{job=\"meilisearch\", index=\"$Index\", instance=\"$instance\"}",
|
||||||
"hide": false,
|
"hide": false,
|
||||||
"range": true,
|
"range": true,
|
||||||
"refId": "A"
|
"refId": "A"
|
||||||
@@ -228,7 +228,7 @@
|
|||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "round(increase(meilisearch_http_requests_total{method=\"POST\", path=\"/indexes/$Index/search\", job=\"$job\"}[1h]))",
|
"expr": "round(increase(meilisearch_http_requests_total{method=\"POST\", path=\"/indexes/$Index/search\", job=\"meilisearch\"}[1h]))",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "",
|
"legendFormat": "",
|
||||||
"range": true,
|
"range": true,
|
||||||
@@ -288,7 +288,7 @@
|
|||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "round(increase(meilisearch_http_requests_total{method=\"POST\", path=\"/indexes/$Index/search\", job=\"$job\"}[24h]))",
|
"expr": "round(increase(meilisearch_http_requests_total{method=\"POST\", path=\"/indexes/$Index/search\", job=\"meilisearch\"}[24h]))",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "",
|
"legendFormat": "",
|
||||||
"range": true,
|
"range": true,
|
||||||
@@ -348,7 +348,7 @@
|
|||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "round(increase(meilisearch_http_requests_total{method=\"POST\", path=\"/indexes/$Index/search\", job=\"$job\"}[30d]))",
|
"expr": "round(increase(meilisearch_http_requests_total{method=\"POST\", path=\"/indexes/$Index/search\", job=\"meilisearch\"}[30d]))",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "",
|
"legendFormat": "",
|
||||||
"range": true,
|
"range": true,
|
||||||
@@ -447,7 +447,7 @@
|
|||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "meilisearch_db_size_bytes{job=\"$job\", instance=\"$instance\"}",
|
"expr": "meilisearch_db_size_bytes{job=\"meilisearch\", instance=\"$instance\"}",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "Database size on disk",
|
"legendFormat": "Database size on disk",
|
||||||
"range": true,
|
"range": true,
|
||||||
@@ -458,7 +458,7 @@
|
|||||||
"type": "prometheus"
|
"type": "prometheus"
|
||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"expr": "meilisearch_used_db_size_bytes{job=\"$job\", instance=\"$instance\"}",
|
"expr": "meilisearch_used_db_size_bytes{job=\"meilisearch\", instance=\"$instance\"}",
|
||||||
"hide": false,
|
"hide": false,
|
||||||
"legendFormat": "Used bytes",
|
"legendFormat": "Used bytes",
|
||||||
"range": true,
|
"range": true,
|
||||||
@@ -553,7 +553,7 @@
|
|||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "rate(meilisearch_http_response_time_seconds_sum{instance=\"$instance\", job=\"$job\"}[5m]) / rate(meilisearch_http_response_time_seconds_count[5m])",
|
"expr": "rate(meilisearch_http_response_time_seconds_sum{instance=\"$instance\", job=\"meilisearch\"}[5m]) / rate(meilisearch_http_response_time_seconds_count[5m])",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "{{method}} {{path}}",
|
"legendFormat": "{{method}} {{path}}",
|
||||||
"range": true,
|
"range": true,
|
||||||
@@ -646,7 +646,7 @@
|
|||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "rate(meilisearch_http_requests_total{instance=\"$instance\", job=\"$job\"}[5m])",
|
"expr": "rate(meilisearch_http_requests_total{instance=\"$instance\", job=\"meilisearch\"}[5m])",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "{{method}} {{path}}",
|
"legendFormat": "{{method}} {{path}}",
|
||||||
"range": true,
|
"range": true,
|
||||||
@@ -744,7 +744,7 @@
|
|||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "sum by(le) (increase(meilisearch_http_response_time_seconds_bucket{path=\"/indexes/$Index/search\", instance=\"$instance\", job=\"$job\"}[30s]))",
|
"expr": "sum by(le) (increase(meilisearch_http_response_time_seconds_bucket{path=\"/indexes/$Index/search\", instance=\"$instance\", job=\"meilisearch\"}[30s]))",
|
||||||
"format": "heatmap",
|
"format": "heatmap",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "{{le}}",
|
"legendFormat": "{{le}}",
|
||||||
@@ -854,7 +854,7 @@
|
|||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "meilisearch_nb_tasks{instance=\"$instance\", job=\"$job\", kind=\"statuses\"}",
|
"expr": "meilisearch_nb_tasks{instance=\"$instance\", job=\"meilisearch\", kind=\"statuses\"}",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "{{value}} ",
|
"legendFormat": "{{value}} ",
|
||||||
"range": true,
|
"range": true,
|
||||||
@@ -947,7 +947,7 @@
|
|||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "meilisearch_nb_tasks{instance=\"$instance\", job=\"$job\", kind=\"types\"}",
|
"expr": "meilisearch_nb_tasks{instance=\"$instance\", job=\"meilisearch\", kind=\"types\"}",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "{{value}} ",
|
"legendFormat": "{{value}} ",
|
||||||
"range": true,
|
"range": true,
|
||||||
@@ -1040,7 +1040,7 @@
|
|||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "meilisearch_nb_tasks{instance=\"$instance\", job=\"$job\", kind=\"indexes\"}",
|
"expr": "meilisearch_nb_tasks{instance=\"$instance\", job=\"meilisearch\", kind=\"indexes\"}",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "{{value}} ",
|
"legendFormat": "{{value}} ",
|
||||||
"range": true,
|
"range": true,
|
||||||
@@ -1161,7 +1161,7 @@
|
|||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "rate(process_cpu_seconds_total{job=\"$job\", instance=\"$instance\"}[1m])",
|
"expr": "rate(process_cpu_seconds_total{job=\"meilisearch\", instance=\"$instance\"}[1m])",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "process",
|
"legendFormat": "process",
|
||||||
"range": true,
|
"range": true,
|
||||||
@@ -1264,7 +1264,7 @@
|
|||||||
},
|
},
|
||||||
"editorMode": "builder",
|
"editorMode": "builder",
|
||||||
"exemplar": true,
|
"exemplar": true,
|
||||||
"expr": "process_resident_memory_bytes{job=\"$job\", instance=\"$instance\"} / 1024 / 1024",
|
"expr": "process_resident_memory_bytes{job=\"meilisearch\", instance=\"$instance\"} / 1024 / 1024",
|
||||||
"interval": "",
|
"interval": "",
|
||||||
"legendFormat": "process",
|
"legendFormat": "process",
|
||||||
"range": true,
|
"range": true,
|
||||||
@@ -1342,33 +1342,6 @@
|
|||||||
"skipUrlSync": false,
|
"skipUrlSync": false,
|
||||||
"sort": 0,
|
"sort": 0,
|
||||||
"type": "query"
|
"type": "query"
|
||||||
},
|
|
||||||
{
|
|
||||||
"current": {
|
|
||||||
"selected": true,
|
|
||||||
"text": "meilisearch",
|
|
||||||
"value": "meilisearch"
|
|
||||||
},
|
|
||||||
"datasource": {
|
|
||||||
"type": "prometheus"
|
|
||||||
},
|
|
||||||
"definition": "label_values(job)",
|
|
||||||
"description": "Prometheus job_name from scrape config (default is meilisearch)",
|
|
||||||
"hide": 0,
|
|
||||||
"includeAll": false,
|
|
||||||
"label": "Job",
|
|
||||||
"multi": false,
|
|
||||||
"name": "job",
|
|
||||||
"options": [],
|
|
||||||
"query": {
|
|
||||||
"query": "label_values(job)",
|
|
||||||
"refId": "StandardVariableQuery"
|
|
||||||
},
|
|
||||||
"refresh": 1,
|
|
||||||
"regex": "",
|
|
||||||
"skipUrlSync": false,
|
|
||||||
"sort": 0,
|
|
||||||
"type": "query"
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -11,24 +11,24 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.79"
|
anyhow = "1.0.70"
|
||||||
csv = "1.3.0"
|
csv = "1.2.1"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
mimalloc = { version = "0.1.39", default-features = false }
|
mimalloc = { version = "0.1.37", default-features = false }
|
||||||
serde_json = { version = "1.0.111", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = { version = "0.5.1", features = ["html_reports"] }
|
criterion = { version = "0.5.1", features = ["html_reports"] }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
rand_chacha = "0.3.1"
|
rand_chacha = "0.3.1"
|
||||||
roaring = "0.10.2"
|
roaring = "0.10.1"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
anyhow = "1.0.79"
|
anyhow = "1.0.70"
|
||||||
bytes = "1.5.0"
|
bytes = "1.4.0"
|
||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
flate2 = "1.0.28"
|
flate2 = "1.0.25"
|
||||||
reqwest = { version = "0.11.23", features = ["blocking", "rustls-tls"], default-features = false }
|
reqwest = { version = "0.11.16", features = ["blocking", "rustls-tls"], default-features = false }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["milli/all-tokenizations"]
|
default = ["milli/all-tokenizations"]
|
||||||
|
|||||||
@@ -1,18 +0,0 @@
|
|||||||
[package]
|
|
||||||
name = "build-info"
|
|
||||||
version.workspace = true
|
|
||||||
authors.workspace = true
|
|
||||||
description.workspace = true
|
|
||||||
homepage.workspace = true
|
|
||||||
readme.workspace = true
|
|
||||||
edition.workspace = true
|
|
||||||
license.workspace = true
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
time = { version = "0.3.34", features = ["parsing"] }
|
|
||||||
|
|
||||||
[build-dependencies]
|
|
||||||
anyhow = "1.0.80"
|
|
||||||
vergen-git2 = "1.0.0-beta.2"
|
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
fn main() {
|
|
||||||
if let Err(err) = emit_git_variables() {
|
|
||||||
println!("cargo:warning=vergen: {}", err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_git_variables() -> anyhow::Result<()> {
|
|
||||||
// Note: any code that needs VERGEN_ environment variables should take care to define them manually in the Dockerfile and pass them
|
|
||||||
// in the corresponding GitHub workflow (publish_docker.yml).
|
|
||||||
// This is due to the Dockerfile building the binary outside of the git directory.
|
|
||||||
let mut builder = vergen_git2::Git2Builder::default();
|
|
||||||
|
|
||||||
builder.branch(true);
|
|
||||||
builder.commit_timestamp(true);
|
|
||||||
builder.commit_message(true);
|
|
||||||
builder.describe(true, true, None);
|
|
||||||
builder.sha(false);
|
|
||||||
|
|
||||||
let git2 = builder.build()?;
|
|
||||||
|
|
||||||
vergen_git2::Emitter::default().fail_on_error().add_instructions(&git2)?.emit()
|
|
||||||
}
|
|
||||||
@@ -1,203 +0,0 @@
|
|||||||
use time::format_description::well_known::Iso8601;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct BuildInfo {
|
|
||||||
pub branch: Option<&'static str>,
|
|
||||||
pub describe: Option<DescribeResult>,
|
|
||||||
pub commit_sha1: Option<&'static str>,
|
|
||||||
pub commit_msg: Option<&'static str>,
|
|
||||||
pub commit_timestamp: Option<time::OffsetDateTime>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BuildInfo {
|
|
||||||
pub fn from_build() -> Self {
|
|
||||||
let branch: Option<&'static str> = option_env!("VERGEN_GIT_BRANCH");
|
|
||||||
let describe = DescribeResult::from_build();
|
|
||||||
let commit_sha1 = option_env!("VERGEN_GIT_SHA");
|
|
||||||
let commit_msg = option_env!("VERGEN_GIT_COMMIT_MESSAGE");
|
|
||||||
let commit_timestamp = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP");
|
|
||||||
|
|
||||||
let commit_timestamp = commit_timestamp.and_then(|commit_timestamp| {
|
|
||||||
time::OffsetDateTime::parse(commit_timestamp, &Iso8601::DEFAULT).ok()
|
|
||||||
});
|
|
||||||
|
|
||||||
Self { branch, describe, commit_sha1, commit_msg, commit_timestamp }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
|
||||||
pub enum DescribeResult {
|
|
||||||
Prototype { name: &'static str },
|
|
||||||
Release { version: &'static str, major: u64, minor: u64, patch: u64 },
|
|
||||||
Prerelease { version: &'static str, major: u64, minor: u64, patch: u64, rc: u64 },
|
|
||||||
NotATag { describe: &'static str },
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DescribeResult {
|
|
||||||
pub fn new(describe: &'static str) -> Self {
|
|
||||||
if let Some(name) = prototype_name(describe) {
|
|
||||||
Self::Prototype { name }
|
|
||||||
} else if let Some(release) = release_version(describe) {
|
|
||||||
release
|
|
||||||
} else if let Some(prerelease) = prerelease_version(describe) {
|
|
||||||
prerelease
|
|
||||||
} else {
|
|
||||||
Self::NotATag { describe }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn from_build() -> Option<Self> {
|
|
||||||
let describe: &'static str = option_env!("VERGEN_GIT_DESCRIBE")?;
|
|
||||||
Some(Self::new(describe))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_tag(&self) -> Option<&'static str> {
|
|
||||||
match self {
|
|
||||||
DescribeResult::Prototype { name } => Some(name),
|
|
||||||
DescribeResult::Release { version, .. } => Some(version),
|
|
||||||
DescribeResult::Prerelease { version, .. } => Some(version),
|
|
||||||
DescribeResult::NotATag { describe: _ } => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_prototype(&self) -> Option<&'static str> {
|
|
||||||
match self {
|
|
||||||
DescribeResult::Prototype { name } => Some(name),
|
|
||||||
DescribeResult::Release { .. }
|
|
||||||
| DescribeResult::Prerelease { .. }
|
|
||||||
| DescribeResult::NotATag { .. } => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parses the input as a prototype name.
|
|
||||||
///
|
|
||||||
/// Returns `Some(prototype_name)` if the following conditions are met on this value:
|
|
||||||
///
|
|
||||||
/// 1. starts with `prototype-`,
|
|
||||||
/// 2. ends with `-<some_number>`,
|
|
||||||
/// 3. does not end with `<some_number>-<some_number>`.
|
|
||||||
///
|
|
||||||
/// Otherwise, returns `None`.
|
|
||||||
fn prototype_name(describe: &'static str) -> Option<&'static str> {
|
|
||||||
if !describe.starts_with("prototype-") {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut rsplit_prototype = describe.rsplit('-');
|
|
||||||
// last component MUST be a number
|
|
||||||
rsplit_prototype.next()?.parse::<u64>().ok()?;
|
|
||||||
// before than last component SHALL NOT be a number
|
|
||||||
rsplit_prototype.next()?.parse::<u64>().err()?;
|
|
||||||
|
|
||||||
Some(describe)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn release_version(describe: &'static str) -> Option<DescribeResult> {
|
|
||||||
if !describe.starts_with('v') {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
// full release version don't contain a `-`
|
|
||||||
if describe.contains('-') {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
// full release version parse as vX.Y.Z, with X, Y, Z numbers.
|
|
||||||
let mut dots = describe[1..].split('.');
|
|
||||||
let major: u64 = dots.next()?.parse().ok()?;
|
|
||||||
let minor: u64 = dots.next()?.parse().ok()?;
|
|
||||||
let patch: u64 = dots.next()?.parse().ok()?;
|
|
||||||
|
|
||||||
if dots.next().is_some() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(DescribeResult::Release { version: describe, major, minor, patch })
|
|
||||||
}
|
|
||||||
|
|
||||||
fn prerelease_version(describe: &'static str) -> Option<DescribeResult> {
|
|
||||||
// prerelease version is in the shape vM.N.P-rc.C
|
|
||||||
let mut hyphen = describe.rsplit('-');
|
|
||||||
let prerelease = hyphen.next()?;
|
|
||||||
if !prerelease.starts_with("rc.") {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let rc: u64 = prerelease[3..].parse().ok()?;
|
|
||||||
|
|
||||||
let release = hyphen.next()?;
|
|
||||||
|
|
||||||
let DescribeResult::Release { version: _, major, minor, patch } = release_version(release)?
|
|
||||||
else {
|
|
||||||
return None;
|
|
||||||
};
|
|
||||||
|
|
||||||
Some(DescribeResult::Prerelease { version: describe, major, minor, patch, rc })
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::DescribeResult;
|
|
||||||
|
|
||||||
fn assert_not_a_tag(describe: &'static str) {
|
|
||||||
assert_eq!(DescribeResult::NotATag { describe }, DescribeResult::new(describe))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn assert_proto(describe: &'static str) {
|
|
||||||
assert_eq!(DescribeResult::Prototype { name: describe }, DescribeResult::new(describe))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn assert_release(describe: &'static str, major: u64, minor: u64, patch: u64) {
|
|
||||||
assert_eq!(
|
|
||||||
DescribeResult::Release { version: describe, major, minor, patch },
|
|
||||||
DescribeResult::new(describe)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn assert_prerelease(describe: &'static str, major: u64, minor: u64, patch: u64, rc: u64) {
|
|
||||||
assert_eq!(
|
|
||||||
DescribeResult::Prerelease { version: describe, major, minor, patch, rc },
|
|
||||||
DescribeResult::new(describe)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn not_a_tag() {
|
|
||||||
assert_not_a_tag("whatever-fuzzy");
|
|
||||||
assert_not_a_tag("whatever-fuzzy-5-ggg-dirty");
|
|
||||||
assert_not_a_tag("whatever-fuzzy-120-ggg-dirty");
|
|
||||||
|
|
||||||
// technically a tag, but not a proto nor a version, so not parsed as a tag
|
|
||||||
assert_not_a_tag("whatever");
|
|
||||||
|
|
||||||
// dirty version
|
|
||||||
assert_not_a_tag("v1.7.0-1-ggga-dirty");
|
|
||||||
assert_not_a_tag("v1.7.0-rc.1-1-ggga-dirty");
|
|
||||||
|
|
||||||
// after version
|
|
||||||
assert_not_a_tag("v1.7.0-1-ggga");
|
|
||||||
assert_not_a_tag("v1.7.0-rc.1-1-ggga");
|
|
||||||
|
|
||||||
// after proto
|
|
||||||
assert_not_a_tag("protoype-tag-0-1-ggga");
|
|
||||||
assert_not_a_tag("protoype-tag-0-1-ggga-dirty");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn prototype() {
|
|
||||||
assert_proto("prototype-tag-0");
|
|
||||||
assert_proto("prototype-tag-10");
|
|
||||||
assert_proto("prototype-long-name-tag-10");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn release() {
|
|
||||||
assert_release("v1.7.2", 1, 7, 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn prerelease() {
|
|
||||||
assert_prerelease("v1.7.2-rc.3", 1, 7, 2, 3);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -11,22 +11,22 @@ readme.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.79"
|
anyhow = "1.0.70"
|
||||||
flate2 = "1.0.28"
|
flate2 = "1.0.25"
|
||||||
http = "0.2.11"
|
http = "0.2.9"
|
||||||
|
log = "0.4.17"
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
once_cell = "1.19.0"
|
once_cell = "1.17.1"
|
||||||
regex = "1.10.2"
|
regex = "1.7.3"
|
||||||
roaring = { version = "0.10.2", features = ["serde"] }
|
roaring = { version = "0.10.1", features = ["serde"] }
|
||||||
serde = { version = "1.0.195", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.111", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
tar = "0.4.40"
|
tar = "0.4.38"
|
||||||
tempfile = "3.9.0"
|
tempfile = "3.5.0"
|
||||||
thiserror = "1.0.56"
|
thiserror = "1.0.40"
|
||||||
time = { version = "0.3.31", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
tracing = "0.1.40"
|
uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
||||||
uuid = { version = "1.6.1", features = ["serde", "v4"] }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
big_s = "1.0.2"
|
big_s = "1.0.2"
|
||||||
|
|||||||
@@ -120,7 +120,7 @@ impl From<v1::settings::Settings> for v2::Settings<v2::Unchecked> {
|
|||||||
criterion.as_ref().map(ToString::to_string)
|
criterion.as_ref().map(ToString::to_string)
|
||||||
}
|
}
|
||||||
Err(()) => {
|
Err(()) => {
|
||||||
tracing::warn!(
|
log::warn!(
|
||||||
"Could not import the following ranking rule: `{}`.",
|
"Could not import the following ranking rule: `{}`.",
|
||||||
ranking_rule
|
ranking_rule
|
||||||
);
|
);
|
||||||
@@ -152,11 +152,11 @@ impl From<v1::update::UpdateStatus> for Option<v2::updates::UpdateStatus> {
|
|||||||
use v2::updates::UpdateStatus as UpdateStatusV2;
|
use v2::updates::UpdateStatus as UpdateStatusV2;
|
||||||
Some(match source {
|
Some(match source {
|
||||||
UpdateStatusV1::Enqueued { content } => {
|
UpdateStatusV1::Enqueued { content } => {
|
||||||
tracing::warn!(
|
log::warn!(
|
||||||
"Cannot import task {} (importing enqueued tasks from v1 dumps is unsupported)",
|
"Cannot import task {} (importing enqueued tasks from v1 dumps is unsupported)",
|
||||||
content.update_id
|
content.update_id
|
||||||
);
|
);
|
||||||
tracing::warn!("Task will be skipped in the queue of imported tasks.");
|
log::warn!("Task will be skipped in the queue of imported tasks.");
|
||||||
|
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
@@ -229,7 +229,7 @@ impl From<v1::update::UpdateType> for Option<v2::updates::UpdateMeta> {
|
|||||||
Some(match source {
|
Some(match source {
|
||||||
v1::update::UpdateType::ClearAll => v2::updates::UpdateMeta::ClearDocuments,
|
v1::update::UpdateType::ClearAll => v2::updates::UpdateMeta::ClearDocuments,
|
||||||
v1::update::UpdateType::Customs => {
|
v1::update::UpdateType::Customs => {
|
||||||
tracing::warn!("Ignoring task with type 'Customs' that is no longer supported");
|
log::warn!("Ignoring task with type 'Customs' that is no longer supported");
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
v1::update::UpdateType::DocumentsAddition { .. } => {
|
v1::update::UpdateType::DocumentsAddition { .. } => {
|
||||||
@@ -296,7 +296,7 @@ impl From<v1::settings::RankingRule> for Option<v2::settings::Criterion> {
|
|||||||
v1::settings::RankingRule::Proximity => Some(v2::settings::Criterion::Proximity),
|
v1::settings::RankingRule::Proximity => Some(v2::settings::Criterion::Proximity),
|
||||||
v1::settings::RankingRule::Attribute => Some(v2::settings::Criterion::Attribute),
|
v1::settings::RankingRule::Attribute => Some(v2::settings::Criterion::Attribute),
|
||||||
v1::settings::RankingRule::WordsPosition => {
|
v1::settings::RankingRule::WordsPosition => {
|
||||||
tracing::warn!("Removing the 'WordsPosition' ranking rule that is no longer supported, please check the resulting ranking rules of your indexes");
|
log::warn!("Removing the 'WordsPosition' ranking rule that is no longer supported, please check the resulting ranking rules of your indexes");
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
v1::settings::RankingRule::Exactness => Some(v2::settings::Criterion::Exactness),
|
v1::settings::RankingRule::Exactness => Some(v2::settings::Criterion::Exactness),
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
use std::convert::TryInto;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
@@ -145,8 +146,8 @@ impl From<v2::updates::UpdateStatus> for v3::updates::UpdateStatus {
|
|||||||
started_processing_at: processing.started_processing_at,
|
started_processing_at: processing.started_processing_at,
|
||||||
}),
|
}),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
tracing::warn!("Error with task {}: {}", processing.from.update_id, e);
|
log::warn!("Error with task {}: {}", processing.from.update_id, e);
|
||||||
tracing::warn!("Task will be marked as `Failed`.");
|
log::warn!("Task will be marked as `Failed`.");
|
||||||
v3::updates::UpdateStatus::Failed(v3::updates::Failed {
|
v3::updates::UpdateStatus::Failed(v3::updates::Failed {
|
||||||
from: v3::updates::Processing {
|
from: v3::updates::Processing {
|
||||||
from: v3::updates::Enqueued {
|
from: v3::updates::Enqueued {
|
||||||
@@ -171,8 +172,8 @@ impl From<v2::updates::UpdateStatus> for v3::updates::UpdateStatus {
|
|||||||
enqueued_at: enqueued.enqueued_at,
|
enqueued_at: enqueued.enqueued_at,
|
||||||
}),
|
}),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
tracing::warn!("Error with task {}: {}", enqueued.update_id, e);
|
log::warn!("Error with task {}: {}", enqueued.update_id, e);
|
||||||
tracing::warn!("Task will be marked as `Failed`.");
|
log::warn!("Task will be marked as `Failed`.");
|
||||||
v3::updates::UpdateStatus::Failed(v3::updates::Failed {
|
v3::updates::UpdateStatus::Failed(v3::updates::Failed {
|
||||||
from: v3::updates::Processing {
|
from: v3::updates::Processing {
|
||||||
from: v3::updates::Enqueued {
|
from: v3::updates::Enqueued {
|
||||||
@@ -352,7 +353,7 @@ impl From<String> for v3::Code {
|
|||||||
"malformed_payload" => v3::Code::MalformedPayload,
|
"malformed_payload" => v3::Code::MalformedPayload,
|
||||||
"missing_payload" => v3::Code::MissingPayload,
|
"missing_payload" => v3::Code::MissingPayload,
|
||||||
other => {
|
other => {
|
||||||
tracing::warn!("Unknown error code {}", other);
|
log::warn!("Unknown error code {}", other);
|
||||||
v3::Code::UnretrievableErrorCode
|
v3::Code::UnretrievableErrorCode
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -76,20 +76,20 @@ impl CompatV3ToV4 {
|
|||||||
let index_uid = match index_uid {
|
let index_uid = match index_uid {
|
||||||
Some(uid) => uid,
|
Some(uid) => uid,
|
||||||
None => {
|
None => {
|
||||||
tracing::warn!(
|
log::warn!(
|
||||||
"Error while importing the update {}.",
|
"Error while importing the update {}.",
|
||||||
task.update.id()
|
task.update.id()
|
||||||
);
|
);
|
||||||
tracing::warn!(
|
log::warn!(
|
||||||
"The index associated to the uuid `{}` could not be retrieved.",
|
"The index associated to the uuid `{}` could not be retrieved.",
|
||||||
task.uuid.to_string()
|
task.uuid.to_string()
|
||||||
);
|
);
|
||||||
if task.update.is_finished() {
|
if task.update.is_finished() {
|
||||||
// we're fucking with his history but not his data, that's ok-ish.
|
// we're fucking with his history but not his data, that's ok-ish.
|
||||||
tracing::warn!("The index-uuid will be set as `unknown`.");
|
log::warn!("The index-uuid will be set as `unknown`.");
|
||||||
String::from("unknown")
|
String::from("unknown")
|
||||||
} else {
|
} else {
|
||||||
tracing::warn!("The task will be ignored.");
|
log::warn!("The task will be ignored.");
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -305,7 +305,7 @@ impl From<v4::ResponseError> for v5::ResponseError {
|
|||||||
"invalid_api_key_expires_at" => v5::Code::InvalidApiKeyExpiresAt,
|
"invalid_api_key_expires_at" => v5::Code::InvalidApiKeyExpiresAt,
|
||||||
"invalid_api_key_description" => v5::Code::InvalidApiKeyDescription,
|
"invalid_api_key_description" => v5::Code::InvalidApiKeyDescription,
|
||||||
other => {
|
other => {
|
||||||
tracing::warn!("Unknown error code {}", other);
|
log::warn!("Unknown error code {}", other);
|
||||||
v5::Code::UnretrievableErrorCode
|
v5::Code::UnretrievableErrorCode
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -304,7 +304,7 @@ impl From<v5::ResponseError> for v6::ResponseError {
|
|||||||
"immutable_field" => v6::Code::BadRequest,
|
"immutable_field" => v6::Code::BadRequest,
|
||||||
"api_key_already_exists" => v6::Code::ApiKeyAlreadyExists,
|
"api_key_already_exists" => v6::Code::ApiKeyAlreadyExists,
|
||||||
other => {
|
other => {
|
||||||
tracing::warn!("Unknown error code {}", other);
|
log::warn!("Unknown error code {}", other);
|
||||||
v6::Code::UnretrievableErrorCode
|
v6::Code::UnretrievableErrorCode
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -329,7 +329,7 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
|||||||
new_ranking_rules.push(new_rule);
|
new_ranking_rules.push(new_rule);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
tracing::warn!("Error while importing settings. The ranking rule `{rule}` does not exist anymore.")
|
log::warn!("Error while importing settings. The ranking rule `{rule}` does not exist anymore.")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[allow(clippy::enum_variant_names)]
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
|
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
|
||||||
pub enum Code {
|
pub enum Code {
|
||||||
// index related error
|
// index related error
|
||||||
|
|||||||
@@ -95,7 +95,6 @@ impl fmt::Display for ErrorType {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::enum_variant_names)]
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
|
#[derive(Serialize, Deserialize, Debug, Clone, Copy)]
|
||||||
pub enum Code {
|
pub enum Code {
|
||||||
// index related error
|
// index related error
|
||||||
|
|||||||
@@ -31,7 +31,6 @@ impl ResponseError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::enum_variant_names)]
|
|
||||||
#[derive(Deserialize, Debug, Clone, Copy)]
|
#[derive(Deserialize, Debug, Clone, Copy)]
|
||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
pub enum Code {
|
pub enum Code {
|
||||||
|
|||||||
@@ -2,10 +2,10 @@ use std::fs::{self, File};
|
|||||||
use std::io::{BufRead, BufReader, ErrorKind};
|
use std::io::{BufRead, BufReader, ErrorKind};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
|
use log::debug;
|
||||||
pub use meilisearch_types::milli;
|
pub use meilisearch_types::milli;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use tracing::debug;
|
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use super::Document;
|
use super::Document;
|
||||||
|
|||||||
@@ -11,10 +11,9 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tempfile = "3.9.0"
|
tempfile = "3.5.0"
|
||||||
thiserror = "1.0.56"
|
thiserror = "1.0.40"
|
||||||
tracing = "0.1.40"
|
uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
||||||
uuid = { version = "1.6.1", features = ["serde", "v4"] }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
faux = "0.1.10"
|
faux = "0.1.9"
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
use std::fs::File as StdFile;
|
use std::fs::File as StdFile;
|
||||||
use std::io::Write;
|
use std::ops::{Deref, DerefMut};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
@@ -22,6 +22,20 @@ pub enum Error {
|
|||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, Error>;
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
|
|
||||||
|
impl Deref for File {
|
||||||
|
type Target = NamedTempFile;
|
||||||
|
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.file
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DerefMut for File {
|
||||||
|
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||||
|
&mut self.file
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct FileStore {
|
pub struct FileStore {
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
@@ -42,7 +56,7 @@ impl FileStore {
|
|||||||
let file = NamedTempFile::new_in(&self.path)?;
|
let file = NamedTempFile::new_in(&self.path)?;
|
||||||
let uuid = Uuid::new_v4();
|
let uuid = Uuid::new_v4();
|
||||||
let path = self.path.join(uuid.to_string());
|
let path = self.path.join(uuid.to_string());
|
||||||
let update_file = File { file: Some(file), path };
|
let update_file = File { file, path };
|
||||||
|
|
||||||
Ok((uuid, update_file))
|
Ok((uuid, update_file))
|
||||||
}
|
}
|
||||||
@@ -53,7 +67,7 @@ impl FileStore {
|
|||||||
let file = NamedTempFile::new_in(&self.path)?;
|
let file = NamedTempFile::new_in(&self.path)?;
|
||||||
let uuid = Uuid::from_u128(uuid);
|
let uuid = Uuid::from_u128(uuid);
|
||||||
let path = self.path.join(uuid.to_string());
|
let path = self.path.join(uuid.to_string());
|
||||||
let update_file = File { file: Some(file), path };
|
let update_file = File { file, path };
|
||||||
|
|
||||||
Ok((uuid, update_file))
|
Ok((uuid, update_file))
|
||||||
}
|
}
|
||||||
@@ -61,13 +75,7 @@ impl FileStore {
|
|||||||
/// Returns the file corresponding to the requested uuid.
|
/// Returns the file corresponding to the requested uuid.
|
||||||
pub fn get_update(&self, uuid: Uuid) -> Result<StdFile> {
|
pub fn get_update(&self, uuid: Uuid) -> Result<StdFile> {
|
||||||
let path = self.get_update_path(uuid);
|
let path = self.get_update_path(uuid);
|
||||||
let file = match StdFile::open(path) {
|
let file = StdFile::open(path)?;
|
||||||
Ok(file) => file,
|
|
||||||
Err(e) => {
|
|
||||||
tracing::error!("Can't access update file {uuid}: {e}");
|
|
||||||
return Err(e.into());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
Ok(file)
|
Ok(file)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -102,13 +110,9 @@ impl FileStore {
|
|||||||
|
|
||||||
pub fn delete(&self, uuid: Uuid) -> Result<()> {
|
pub fn delete(&self, uuid: Uuid) -> Result<()> {
|
||||||
let path = self.path.join(uuid.to_string());
|
let path = self.path.join(uuid.to_string());
|
||||||
if let Err(e) = std::fs::remove_file(path) {
|
std::fs::remove_file(path)?;
|
||||||
tracing::error!("Can't delete file {uuid}: {e}");
|
|
||||||
Err(e.into())
|
|
||||||
} else {
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
/// List the Uuids of the files in the FileStore
|
/// List the Uuids of the files in the FileStore
|
||||||
pub fn all_uuids(&self) -> Result<impl Iterator<Item = Result<Uuid>>> {
|
pub fn all_uuids(&self) -> Result<impl Iterator<Item = Result<Uuid>>> {
|
||||||
@@ -132,40 +136,16 @@ impl FileStore {
|
|||||||
|
|
||||||
pub struct File {
|
pub struct File {
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
file: Option<NamedTempFile>,
|
file: NamedTempFile,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl File {
|
impl File {
|
||||||
pub fn dry_file() -> Result<Self> {
|
|
||||||
Ok(Self { path: PathBuf::new(), file: None })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn persist(self) -> Result<()> {
|
pub fn persist(self) -> Result<()> {
|
||||||
if let Some(file) = self.file {
|
self.file.persist(&self.path)?;
|
||||||
file.persist(&self.path)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Write for File {
|
|
||||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
|
||||||
if let Some(file) = self.file.as_mut() {
|
|
||||||
file.write(buf)
|
|
||||||
} else {
|
|
||||||
Ok(buf.len())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn flush(&mut self) -> std::io::Result<()> {
|
|
||||||
if let Some(file) = self.file.as_mut() {
|
|
||||||
file.flush()
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ license.workspace = true
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nom = "7.1.3"
|
nom = "7.1.3"
|
||||||
nom_locate = "4.2.0"
|
nom_locate = "4.1.0"
|
||||||
unescaper = "0.1.3"
|
unescaper = "0.1.2"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = "1.34.0"
|
insta = "1.29.0"
|
||||||
|
|||||||
@@ -11,10 +11,10 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
arbitrary = { version = "1.3.2", features = ["derive"] }
|
arbitrary = { version = "1.3.0", features = ["derive"] }
|
||||||
clap = { version = "4.4.17", features = ["derive"] }
|
clap = { version = "4.3.0", features = ["derive"] }
|
||||||
fastrand = "2.0.1"
|
fastrand = "2.0.0"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
serde = { version = "1.0.195", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.111", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
tempfile = "3.9.0"
|
tempfile = "3.5.0"
|
||||||
|
|||||||
@@ -11,37 +11,30 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.79"
|
anyhow = "1.0.70"
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
csv = "1.3.0"
|
csv = "1.2.1"
|
||||||
derive_builder = "0.12.0"
|
derive_builder = "0.12.0"
|
||||||
dump = { path = "../dump" }
|
dump = { path = "../dump" }
|
||||||
enum-iterator = "1.5.0"
|
enum-iterator = "1.4.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.0.28"
|
log = "0.4.17"
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
page_size = "0.5.0"
|
page_size = "0.5.0"
|
||||||
puffin = { version = "0.16.0", features = ["serialization"] }
|
puffin = { version = "0.16.0", features = ["serialization"] }
|
||||||
rayon = "1.8.1"
|
roaring = { version = "0.10.1", features = ["serde"] }
|
||||||
roaring = { version = "0.10.2", features = ["serde"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde = { version = "1.0.195", features = ["derive"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
serde_json = { version = "1.0.111", features = ["preserve_order"] }
|
|
||||||
synchronoise = "1.0.1"
|
synchronoise = "1.0.1"
|
||||||
tempfile = "3.9.0"
|
tempfile = "3.5.0"
|
||||||
thiserror = "1.0.56"
|
thiserror = "1.0.40"
|
||||||
time = { version = "0.3.31", features = [
|
time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
"serde-well-known",
|
uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
||||||
"formatting",
|
|
||||||
"parsing",
|
|
||||||
"macros",
|
|
||||||
] }
|
|
||||||
tracing = "0.1.40"
|
|
||||||
ureq = "2.9.1"
|
|
||||||
uuid = { version = "1.6.1", features = ["serde", "v4"] }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
big_s = "1.0.2"
|
big_s = "1.0.2"
|
||||||
crossbeam = "0.8.4"
|
crossbeam = "0.8.2"
|
||||||
insta = { version = "1.34.0", features = ["json", "redactions"] }
|
insta = { version = "1.29.0", features = ["json", "redactions"] }
|
||||||
meili-snap = { path = "../meili-snap" }
|
meili-snap = { path = "../meili-snap" }
|
||||||
|
nelson = { git = "https://github.com/meilisearch/nelson.git", rev = "675f13885548fb415ead8fbb447e9e6d9314000a"}
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ use std::fs::{self, File};
|
|||||||
use std::io::BufWriter;
|
use std::io::BufWriter;
|
||||||
|
|
||||||
use dump::IndexMetadata;
|
use dump::IndexMetadata;
|
||||||
|
use log::{debug, error, info, trace};
|
||||||
use meilisearch_types::error::Code;
|
use meilisearch_types::error::Code;
|
||||||
use meilisearch_types::heed::{RoTxn, RwTxn};
|
use meilisearch_types::heed::{RoTxn, RwTxn};
|
||||||
use meilisearch_types::milli::documents::{obkv_to_object, DocumentsBatchReader};
|
use meilisearch_types::milli::documents::{obkv_to_object, DocumentsBatchReader};
|
||||||
@@ -59,7 +60,7 @@ pub(crate) enum Batch {
|
|||||||
/// The list of tasks that were processing when this task cancelation appeared.
|
/// The list of tasks that were processing when this task cancelation appeared.
|
||||||
previous_processing_tasks: RoaringBitmap,
|
previous_processing_tasks: RoaringBitmap,
|
||||||
},
|
},
|
||||||
TaskDeletions(Vec<Task>),
|
TaskDeletion(Task),
|
||||||
SnapshotCreation(Vec<Task>),
|
SnapshotCreation(Vec<Task>),
|
||||||
Dump(Task),
|
Dump(Task),
|
||||||
IndexOperation {
|
IndexOperation {
|
||||||
@@ -142,28 +143,23 @@ pub(crate) enum IndexOperation {
|
|||||||
|
|
||||||
impl Batch {
|
impl Batch {
|
||||||
/// Return the task ids associated with this batch.
|
/// Return the task ids associated with this batch.
|
||||||
pub fn ids(&self) -> RoaringBitmap {
|
pub fn ids(&self) -> Vec<TaskId> {
|
||||||
match self {
|
match self {
|
||||||
Batch::TaskCancelation { task, .. }
|
Batch::TaskCancelation { task, .. }
|
||||||
|
| Batch::TaskDeletion(task)
|
||||||
| Batch::Dump(task)
|
| Batch::Dump(task)
|
||||||
| Batch::IndexCreation { task, .. }
|
| Batch::IndexCreation { task, .. }
|
||||||
| Batch::IndexUpdate { task, .. } => {
|
| Batch::IndexUpdate { task, .. } => vec![task.uid],
|
||||||
RoaringBitmap::from_sorted_iter(std::iter::once(task.uid)).unwrap()
|
Batch::SnapshotCreation(tasks) | Batch::IndexDeletion { tasks, .. } => {
|
||||||
}
|
tasks.iter().map(|task| task.uid).collect()
|
||||||
Batch::SnapshotCreation(tasks)
|
|
||||||
| Batch::TaskDeletions(tasks)
|
|
||||||
| Batch::IndexDeletion { tasks, .. } => {
|
|
||||||
RoaringBitmap::from_iter(tasks.iter().map(|task| task.uid))
|
|
||||||
}
|
}
|
||||||
Batch::IndexOperation { op, .. } => match op {
|
Batch::IndexOperation { op, .. } => match op {
|
||||||
IndexOperation::DocumentOperation { tasks, .. }
|
IndexOperation::DocumentOperation { tasks, .. }
|
||||||
| IndexOperation::Settings { tasks, .. }
|
| IndexOperation::Settings { tasks, .. }
|
||||||
| IndexOperation::DocumentClear { tasks, .. } => {
|
| IndexOperation::DocumentClear { tasks, .. } => {
|
||||||
RoaringBitmap::from_iter(tasks.iter().map(|task| task.uid))
|
tasks.iter().map(|task| task.uid).collect()
|
||||||
}
|
|
||||||
IndexOperation::IndexDocumentDeletionByFilter { task, .. } => {
|
|
||||||
RoaringBitmap::from_sorted_iter(std::iter::once(task.uid)).unwrap()
|
|
||||||
}
|
}
|
||||||
|
IndexOperation::IndexDocumentDeletionByFilter { task, .. } => vec![task.uid],
|
||||||
IndexOperation::SettingsAndDocumentOperation {
|
IndexOperation::SettingsAndDocumentOperation {
|
||||||
document_import_tasks: tasks,
|
document_import_tasks: tasks,
|
||||||
settings_tasks: other,
|
settings_tasks: other,
|
||||||
@@ -173,11 +169,9 @@ impl Batch {
|
|||||||
cleared_tasks: tasks,
|
cleared_tasks: tasks,
|
||||||
settings_tasks: other,
|
settings_tasks: other,
|
||||||
..
|
..
|
||||||
} => RoaringBitmap::from_iter(tasks.iter().chain(other).map(|task| task.uid)),
|
} => tasks.iter().chain(other).map(|task| task.uid).collect(),
|
||||||
},
|
},
|
||||||
Batch::IndexSwap { task } => {
|
Batch::IndexSwap { task } => vec![task.uid],
|
||||||
RoaringBitmap::from_sorted_iter(std::iter::once(task.uid)).unwrap()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -186,7 +180,7 @@ impl Batch {
|
|||||||
use Batch::*;
|
use Batch::*;
|
||||||
match self {
|
match self {
|
||||||
TaskCancelation { .. }
|
TaskCancelation { .. }
|
||||||
| TaskDeletions(_)
|
| TaskDeletion(_)
|
||||||
| SnapshotCreation(_)
|
| SnapshotCreation(_)
|
||||||
| Dump(_)
|
| Dump(_)
|
||||||
| IndexSwap { .. } => None,
|
| IndexSwap { .. } => None,
|
||||||
@@ -205,7 +199,7 @@ impl fmt::Display for Batch {
|
|||||||
let tasks = self.ids();
|
let tasks = self.ids();
|
||||||
match self {
|
match self {
|
||||||
Batch::TaskCancelation { .. } => f.write_str("TaskCancelation")?,
|
Batch::TaskCancelation { .. } => f.write_str("TaskCancelation")?,
|
||||||
Batch::TaskDeletions(_) => f.write_str("TaskDeletion")?,
|
Batch::TaskDeletion(_) => f.write_str("TaskDeletion")?,
|
||||||
Batch::SnapshotCreation(_) => f.write_str("SnapshotCreation")?,
|
Batch::SnapshotCreation(_) => f.write_str("SnapshotCreation")?,
|
||||||
Batch::Dump(_) => f.write_str("Dump")?,
|
Batch::Dump(_) => f.write_str("Dump")?,
|
||||||
Batch::IndexOperation { op, .. } => write!(f, "{op}")?,
|
Batch::IndexOperation { op, .. } => write!(f, "{op}")?,
|
||||||
@@ -521,7 +515,6 @@ impl IndexScheduler {
|
|||||||
/// 3. We get the *next* snapshot to process.
|
/// 3. We get the *next* snapshot to process.
|
||||||
/// 4. We get the *next* dump to process.
|
/// 4. We get the *next* dump to process.
|
||||||
/// 5. We get the *next* tasks to process for a specific index.
|
/// 5. We get the *next* tasks to process for a specific index.
|
||||||
#[tracing::instrument(level = "trace", skip(self, rtxn), target = "indexing::scheduler")]
|
|
||||||
pub(crate) fn create_next_batch(&self, rtxn: &RoTxn) -> Result<Option<Batch>> {
|
pub(crate) fn create_next_batch(&self, rtxn: &RoTxn) -> Result<Option<Batch>> {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
self.maybe_fail(crate::tests::FailureLocation::InsideCreateBatch)?;
|
self.maybe_fail(crate::tests::FailureLocation::InsideCreateBatch)?;
|
||||||
@@ -546,9 +539,9 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
// 2. we get the next task to delete
|
// 2. we get the next task to delete
|
||||||
let to_delete = self.get_kind(rtxn, Kind::TaskDeletion)? & enqueued;
|
let to_delete = self.get_kind(rtxn, Kind::TaskDeletion)? & enqueued;
|
||||||
if !to_delete.is_empty() {
|
if let Some(task_id) = to_delete.min() {
|
||||||
let tasks = self.get_existing_tasks(rtxn, to_delete)?;
|
let task = self.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
return Ok(Some(Batch::TaskDeletions(tasks)));
|
return Ok(Some(Batch::TaskDeletion(task)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. we batch the snapshot.
|
// 3. we batch the snapshot.
|
||||||
@@ -627,7 +620,6 @@ impl IndexScheduler {
|
|||||||
/// The list of tasks that were processed. The metadata of each task in the returned
|
/// The list of tasks that were processed. The metadata of each task in the returned
|
||||||
/// list is updated accordingly, with the exception of the its date fields
|
/// list is updated accordingly, with the exception of the its date fields
|
||||||
/// [`finished_at`](meilisearch_types::tasks::Task::finished_at) and [`started_at`](meilisearch_types::tasks::Task::started_at).
|
/// [`finished_at`](meilisearch_types::tasks::Task::finished_at) and [`started_at`](meilisearch_types::tasks::Task::started_at).
|
||||||
#[tracing::instrument(level = "trace", skip(self, batch), target = "indexing::scheduler", fields(batch=batch.to_string()))]
|
|
||||||
pub(crate) fn process_batch(&self, batch: Batch) -> Result<Vec<Task>> {
|
pub(crate) fn process_batch(&self, batch: Batch) -> Result<Vec<Task>> {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
{
|
{
|
||||||
@@ -677,10 +669,9 @@ impl IndexScheduler {
|
|||||||
Ok(()) => {
|
Ok(()) => {
|
||||||
for content_uuid in canceled_tasks_content_uuids {
|
for content_uuid in canceled_tasks_content_uuids {
|
||||||
if let Err(error) = self.delete_update_file(content_uuid) {
|
if let Err(error) = self.delete_update_file(content_uuid) {
|
||||||
tracing::error!(
|
error!(
|
||||||
file_content_uuid = %content_uuid,
|
"We failed deleting the content file indentified as {}: {}",
|
||||||
%error,
|
content_uuid, error
|
||||||
"Failed deleting content file"
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -690,31 +681,19 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
Ok(vec![task])
|
Ok(vec![task])
|
||||||
}
|
}
|
||||||
Batch::TaskDeletions(mut tasks) => {
|
Batch::TaskDeletion(mut task) => {
|
||||||
// 1. Retrieve the tasks that matched the query at enqueue-time.
|
// 1. Retrieve the tasks that matched the query at enqueue-time.
|
||||||
let mut matched_tasks = RoaringBitmap::new();
|
let matched_tasks =
|
||||||
|
|
||||||
for task in tasks.iter() {
|
|
||||||
if let KindWithContent::TaskDeletion { tasks, query: _ } = &task.kind {
|
if let KindWithContent::TaskDeletion { tasks, query: _ } = &task.kind {
|
||||||
matched_tasks |= tasks;
|
tasks
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut wtxn = self.env.write_txn()?;
|
|
||||||
let mut deleted_tasks = self.delete_matched_tasks(&mut wtxn, &matched_tasks)?;
|
|
||||||
wtxn.commit()?;
|
|
||||||
|
|
||||||
for task in tasks.iter_mut() {
|
|
||||||
task.status = Status::Succeeded;
|
|
||||||
let KindWithContent::TaskDeletion { tasks, query: _ } = &task.kind else {
|
|
||||||
unreachable!()
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let deleted_tasks_count = deleted_tasks.intersection_len(tasks);
|
let mut wtxn = self.env.write_txn()?;
|
||||||
deleted_tasks -= tasks;
|
let deleted_tasks_count = self.delete_matched_tasks(&mut wtxn, matched_tasks)?;
|
||||||
|
|
||||||
|
task.status = Status::Succeeded;
|
||||||
match &mut task.details {
|
match &mut task.details {
|
||||||
Some(Details::TaskDeletion {
|
Some(Details::TaskDeletion {
|
||||||
matched_tasks: _,
|
matched_tasks: _,
|
||||||
@@ -725,8 +704,8 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
wtxn.commit()?;
|
||||||
Ok(tasks)
|
Ok(vec![task])
|
||||||
}
|
}
|
||||||
Batch::SnapshotCreation(mut tasks) => {
|
Batch::SnapshotCreation(mut tasks) => {
|
||||||
fs::create_dir_all(&self.snapshots_path)?;
|
fs::create_dir_all(&self.snapshots_path)?;
|
||||||
@@ -957,8 +936,8 @@ impl IndexScheduler {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// the index operation can take a long time, so save this handle to make it available to the search for the duration of the tick
|
// the index operation can take a long time, so save this handle to make it available to the search for the duration of the tick
|
||||||
self.index_mapper
|
*self.currently_updating_index.write().unwrap() =
|
||||||
.set_currently_updating_index(Some((index_uid.clone(), index.clone())));
|
Some((index_uid.clone(), index.clone()));
|
||||||
|
|
||||||
let mut index_wtxn = index.write_txn()?;
|
let mut index_wtxn = index.write_txn()?;
|
||||||
let tasks = self.apply_index_operation(&mut index_wtxn, &index, op)?;
|
let tasks = self.apply_index_operation(&mut index_wtxn, &index, op)?;
|
||||||
@@ -979,10 +958,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
match res {
|
match res {
|
||||||
Ok(_) => (),
|
Ok(_) => (),
|
||||||
Err(e) => tracing::error!(
|
Err(e) => error!("Could not write the stats of the index {}", e),
|
||||||
error = &e as &dyn std::error::Error,
|
|
||||||
"Could not write the stats of the index"
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
@@ -1010,7 +986,7 @@ impl IndexScheduler {
|
|||||||
builder.set_primary_key(primary_key);
|
builder.set_primary_key(primary_key);
|
||||||
let must_stop_processing = self.must_stop_processing.clone();
|
let must_stop_processing = self.must_stop_processing.clone();
|
||||||
builder.execute(
|
builder.execute(
|
||||||
|indexing_step| tracing::debug!(update = ?indexing_step),
|
|indexing_step| debug!("update: {:?}", indexing_step),
|
||||||
|| must_stop_processing.get(),
|
|| must_stop_processing.get(),
|
||||||
)?;
|
)?;
|
||||||
index_wtxn.commit()?;
|
index_wtxn.commit()?;
|
||||||
@@ -1037,10 +1013,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
match res {
|
match res {
|
||||||
Ok(_) => (),
|
Ok(_) => (),
|
||||||
Err(e) => tracing::error!(
|
Err(e) => error!("Could not write the stats of the index {}", e),
|
||||||
error = &e as &dyn std::error::Error,
|
|
||||||
"Could not write the stats of the index"
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(vec![task])
|
Ok(vec![task])
|
||||||
@@ -1159,11 +1132,6 @@ impl IndexScheduler {
|
|||||||
///
|
///
|
||||||
/// ## Return
|
/// ## Return
|
||||||
/// The list of processed tasks.
|
/// The list of processed tasks.
|
||||||
#[tracing::instrument(
|
|
||||||
level = "trace",
|
|
||||||
skip(self, index_wtxn, index),
|
|
||||||
target = "indexing::scheduler"
|
|
||||||
)]
|
|
||||||
fn apply_index_operation<'i>(
|
fn apply_index_operation<'i>(
|
||||||
&self,
|
&self,
|
||||||
index_wtxn: &mut RwTxn<'i>,
|
index_wtxn: &mut RwTxn<'i>,
|
||||||
@@ -1224,7 +1192,7 @@ impl IndexScheduler {
|
|||||||
milli::update::Settings::new(index_wtxn, index, indexer_config);
|
milli::update::Settings::new(index_wtxn, index, indexer_config);
|
||||||
builder.set_primary_key(primary_key);
|
builder.set_primary_key(primary_key);
|
||||||
builder.execute(
|
builder.execute(
|
||||||
|indexing_step| tracing::debug!(update = ?indexing_step),
|
|indexing_step| debug!("update: {:?}", indexing_step),
|
||||||
|| must_stop_processing.clone().get(),
|
|| must_stop_processing.clone().get(),
|
||||||
)?;
|
)?;
|
||||||
primary_key_has_been_set = true;
|
primary_key_has_been_set = true;
|
||||||
@@ -1243,7 +1211,7 @@ impl IndexScheduler {
|
|||||||
index,
|
index,
|
||||||
indexer_config,
|
indexer_config,
|
||||||
config,
|
config,
|
||||||
|indexing_step| tracing::trace!(?indexing_step, "Update"),
|
|indexing_step| trace!("update: {:?}", indexing_step),
|
||||||
|| must_stop_processing.get(),
|
|| must_stop_processing.get(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
@@ -1315,7 +1283,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
if !tasks.iter().all(|res| res.error.is_some()) {
|
if !tasks.iter().all(|res| res.error.is_some()) {
|
||||||
let addition = builder.execute()?;
|
let addition = builder.execute()?;
|
||||||
tracing::info!(indexing_result = ?addition, "document indexing done");
|
info!("document addition done: {:?}", addition);
|
||||||
} else if primary_key_has_been_set {
|
} else if primary_key_has_been_set {
|
||||||
// Everything failed but we've set a primary key.
|
// Everything failed but we've set a primary key.
|
||||||
// We need to remove it.
|
// We need to remove it.
|
||||||
@@ -1323,7 +1291,7 @@ impl IndexScheduler {
|
|||||||
milli::update::Settings::new(index_wtxn, index, indexer_config);
|
milli::update::Settings::new(index_wtxn, index, indexer_config);
|
||||||
builder.reset_primary_key();
|
builder.reset_primary_key();
|
||||||
builder.execute(
|
builder.execute(
|
||||||
|indexing_step| tracing::trace!(update = ?indexing_step),
|
|indexing_step| trace!("update: {:?}", indexing_step),
|
||||||
|| must_stop_processing.clone().get(),
|
|| must_stop_processing.clone().get(),
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
@@ -1383,6 +1351,9 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
for (task, (_, settings)) in tasks.iter_mut().zip(settings) {
|
for (task, (_, settings)) in tasks.iter_mut().zip(settings) {
|
||||||
let checked_settings = settings.clone().check();
|
let checked_settings = settings.clone().check();
|
||||||
|
if matches!(checked_settings.embedders, milli::update::Setting::Set(_)) {
|
||||||
|
self.features().check_vector("Passing `embedders` in settings")?
|
||||||
|
}
|
||||||
task.details = Some(Details::SettingsUpdate { settings: Box::new(settings) });
|
task.details = Some(Details::SettingsUpdate { settings: Box::new(settings) });
|
||||||
apply_settings_to_builder(&checked_settings, &mut builder);
|
apply_settings_to_builder(&checked_settings, &mut builder);
|
||||||
|
|
||||||
@@ -1393,7 +1364,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
let must_stop_processing = self.must_stop_processing.clone();
|
let must_stop_processing = self.must_stop_processing.clone();
|
||||||
builder.execute(
|
builder.execute(
|
||||||
|indexing_step| tracing::debug!(update = ?indexing_step),
|
|indexing_step| debug!("update: {:?}", indexing_step),
|
||||||
|| must_stop_processing.get(),
|
|| must_stop_processing.get(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
@@ -1467,11 +1438,7 @@ impl IndexScheduler {
|
|||||||
/// Delete each given task from all the databases (if it is deleteable).
|
/// Delete each given task from all the databases (if it is deleteable).
|
||||||
///
|
///
|
||||||
/// Return the number of tasks that were actually deleted.
|
/// Return the number of tasks that were actually deleted.
|
||||||
fn delete_matched_tasks(
|
fn delete_matched_tasks(&self, wtxn: &mut RwTxn, matched_tasks: &RoaringBitmap) -> Result<u64> {
|
||||||
&self,
|
|
||||||
wtxn: &mut RwTxn,
|
|
||||||
matched_tasks: &RoaringBitmap,
|
|
||||||
) -> Result<RoaringBitmap> {
|
|
||||||
// 1. Remove from this list the tasks that we are not allowed to delete
|
// 1. Remove from this list the tasks that we are not allowed to delete
|
||||||
let enqueued_tasks = self.get_status(wtxn, Status::Enqueued)?;
|
let enqueued_tasks = self.get_status(wtxn, Status::Enqueued)?;
|
||||||
let processing_tasks = &self.processing_tasks.read().unwrap().processing.clone();
|
let processing_tasks = &self.processing_tasks.read().unwrap().processing.clone();
|
||||||
@@ -1536,7 +1503,7 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(to_delete_tasks)
|
Ok(to_delete_tasks.len())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Cancel each given task from all the databases (if it is cancelable).
|
/// Cancel each given task from all the databases (if it is cancelable).
|
||||||
@@ -1605,7 +1572,7 @@ fn delete_document_by_filter<'a>(
|
|||||||
index,
|
index,
|
||||||
indexer_config,
|
indexer_config,
|
||||||
config,
|
config,
|
||||||
|indexing_step| tracing::debug!(update = ?indexing_step),
|
|indexing_step| debug!("update: {:?}", indexing_step),
|
||||||
|| must_stop_processing.get(),
|
|| must_stop_processing.get(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
|||||||
@@ -48,8 +48,6 @@ impl From<DateField> for Code {
|
|||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error("{1}")]
|
#[error("{1}")]
|
||||||
WithCustomErrorCode(Code, Box<Self>),
|
WithCustomErrorCode(Code, Box<Self>),
|
||||||
#[error("Received bad task id: {received} should be >= to {expected}.")]
|
|
||||||
BadTaskId { received: TaskId, expected: TaskId },
|
|
||||||
#[error("Index `{0}` not found.")]
|
#[error("Index `{0}` not found.")]
|
||||||
IndexNotFound(String),
|
IndexNotFound(String),
|
||||||
#[error("Index `{0}` already exists.")]
|
#[error("Index `{0}` already exists.")]
|
||||||
@@ -163,7 +161,6 @@ impl Error {
|
|||||||
match self {
|
match self {
|
||||||
Error::IndexNotFound(_)
|
Error::IndexNotFound(_)
|
||||||
| Error::WithCustomErrorCode(_, _)
|
| Error::WithCustomErrorCode(_, _)
|
||||||
| Error::BadTaskId { .. }
|
|
||||||
| Error::IndexAlreadyExists(_)
|
| Error::IndexAlreadyExists(_)
|
||||||
| Error::SwapDuplicateIndexFound(_)
|
| Error::SwapDuplicateIndexFound(_)
|
||||||
| Error::SwapDuplicateIndexesFound(_)
|
| Error::SwapDuplicateIndexesFound(_)
|
||||||
@@ -208,7 +205,6 @@ impl ErrorCode for Error {
|
|||||||
fn error_code(&self) -> Code {
|
fn error_code(&self) -> Code {
|
||||||
match self {
|
match self {
|
||||||
Error::WithCustomErrorCode(code, _) => *code,
|
Error::WithCustomErrorCode(code, _) => *code,
|
||||||
Error::BadTaskId { .. } => Code::BadRequest,
|
|
||||||
Error::IndexNotFound(_) => Code::IndexNotFound,
|
Error::IndexNotFound(_) => Code::IndexNotFound,
|
||||||
Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists,
|
Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists,
|
||||||
Error::SwapDuplicateIndexesFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
Error::SwapDuplicateIndexesFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
||||||
|
|||||||
@@ -30,6 +30,19 @@ impl RoFeatures {
|
|||||||
self.runtime
|
self.runtime
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn check_score_details(&self) -> Result<()> {
|
||||||
|
if self.runtime.score_details {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(FeatureNotEnabledError {
|
||||||
|
disabled_action: "Computing score details",
|
||||||
|
feature: "score details",
|
||||||
|
issue_link: "https://github.com/meilisearch/product/discussions/674",
|
||||||
|
}
|
||||||
|
.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn check_metrics(&self) -> Result<()> {
|
pub fn check_metrics(&self) -> Result<()> {
|
||||||
if self.runtime.metrics {
|
if self.runtime.metrics {
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -43,19 +56,6 @@ impl RoFeatures {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_logs_route(&self) -> Result<()> {
|
|
||||||
if self.runtime.logs_route {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(FeatureNotEnabledError {
|
|
||||||
disabled_action: "Modifying logs through the `/logs/*` routes",
|
|
||||||
feature: "logs route",
|
|
||||||
issue_link: "https://github.com/orgs/meilisearch/discussions/721",
|
|
||||||
}
|
|
||||||
.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn check_vector(&self, disabled_action: &'static str) -> Result<()> {
|
pub fn check_vector(&self, disabled_action: &'static str) -> Result<()> {
|
||||||
if self.runtime.vector_store {
|
if self.runtime.vector_store {
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -94,7 +94,6 @@ impl FeatureData {
|
|||||||
runtime_features_db.get(&txn, EXPERIMENTAL_FEATURES)?.unwrap_or_default();
|
runtime_features_db.get(&txn, EXPERIMENTAL_FEATURES)?.unwrap_or_default();
|
||||||
let runtime = Arc::new(RwLock::new(RuntimeTogglableFeatures {
|
let runtime = Arc::new(RwLock::new(RuntimeTogglableFeatures {
|
||||||
metrics: instance_features.metrics || persisted_features.metrics,
|
metrics: instance_features.metrics || persisted_features.metrics,
|
||||||
logs_route: instance_features.logs_route || persisted_features.logs_route,
|
|
||||||
..persisted_features
|
..persisted_features
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
|||||||
@@ -3,13 +3,13 @@ use std::sync::{Arc, RwLock};
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use std::{fs, thread};
|
use std::{fs, thread};
|
||||||
|
|
||||||
|
use log::error;
|
||||||
use meilisearch_types::heed::types::{SerdeJson, Str};
|
use meilisearch_types::heed::types::{SerdeJson, Str};
|
||||||
use meilisearch_types::heed::{Database, Env, RoTxn, RwTxn};
|
use meilisearch_types::heed::{Database, Env, RoTxn, RwTxn};
|
||||||
use meilisearch_types::milli::update::IndexerConfig;
|
use meilisearch_types::milli::update::IndexerConfig;
|
||||||
use meilisearch_types::milli::{FieldDistribution, Index};
|
use meilisearch_types::milli::{FieldDistribution, Index};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use tracing::error;
|
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use self::index_map::IndexMap;
|
use self::index_map::IndexMap;
|
||||||
@@ -69,10 +69,6 @@ pub struct IndexMapper {
|
|||||||
/// Whether we open a meilisearch index with the MDB_WRITEMAP option or not.
|
/// Whether we open a meilisearch index with the MDB_WRITEMAP option or not.
|
||||||
enable_mdb_writemap: bool,
|
enable_mdb_writemap: bool,
|
||||||
pub indexer_config: Arc<IndexerConfig>,
|
pub indexer_config: Arc<IndexerConfig>,
|
||||||
|
|
||||||
/// A few types of long running batches of tasks that act on a single index set this field
|
|
||||||
/// so that a handle to the index is available from other threads (search) in an optimized manner.
|
|
||||||
currently_updating_index: Arc<RwLock<Option<(String, Index)>>>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Whether the index is available for use or is forbidden to be inserted back in the index map
|
/// Whether the index is available for use or is forbidden to be inserted back in the index map
|
||||||
@@ -155,7 +151,6 @@ impl IndexMapper {
|
|||||||
index_growth_amount,
|
index_growth_amount,
|
||||||
enable_mdb_writemap,
|
enable_mdb_writemap,
|
||||||
indexer_config: Arc::new(indexer_config),
|
indexer_config: Arc::new(indexer_config),
|
||||||
currently_updating_index: Default::default(),
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -308,14 +303,6 @@ impl IndexMapper {
|
|||||||
|
|
||||||
/// Return an index, may open it if it wasn't already opened.
|
/// Return an index, may open it if it wasn't already opened.
|
||||||
pub fn index(&self, rtxn: &RoTxn, name: &str) -> Result<Index> {
|
pub fn index(&self, rtxn: &RoTxn, name: &str) -> Result<Index> {
|
||||||
if let Some((current_name, current_index)) =
|
|
||||||
self.currently_updating_index.read().unwrap().as_ref()
|
|
||||||
{
|
|
||||||
if current_name == name {
|
|
||||||
return Ok(current_index.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let uuid = self
|
let uuid = self
|
||||||
.index_mapping
|
.index_mapping
|
||||||
.get(rtxn, name)?
|
.get(rtxn, name)?
|
||||||
@@ -487,8 +474,4 @@ impl IndexMapper {
|
|||||||
pub fn indexer_config(&self) -> &IndexerConfig {
|
pub fn indexer_config(&self) -> &IndexerConfig {
|
||||||
&self.indexer_config
|
&self.indexer_config
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_currently_updating_index(&self, index: Option<(String, Index)>) {
|
|
||||||
*self.currently_updating_index.write().unwrap() = index;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
|||||||
|
|
||||||
let IndexScheduler {
|
let IndexScheduler {
|
||||||
autobatching_enabled,
|
autobatching_enabled,
|
||||||
cleanup_enabled: _,
|
|
||||||
must_stop_processing: _,
|
must_stop_processing: _,
|
||||||
processing_tasks,
|
processing_tasks,
|
||||||
file_store,
|
file_store,
|
||||||
@@ -38,11 +37,10 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
|||||||
snapshots_path: _,
|
snapshots_path: _,
|
||||||
auth_path: _,
|
auth_path: _,
|
||||||
version_file_path: _,
|
version_file_path: _,
|
||||||
webhook_url: _,
|
|
||||||
webhook_authorization_header: _,
|
|
||||||
test_breakpoint_sdr: _,
|
test_breakpoint_sdr: _,
|
||||||
planned_failures: _,
|
planned_failures: _,
|
||||||
run_loop_iteration: _,
|
run_loop_iteration: _,
|
||||||
|
currently_updating_index: _,
|
||||||
embedders: _,
|
embedders: _,
|
||||||
} = scheduler;
|
} = scheduler;
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -34,10 +34,12 @@ catto: { number_of_documents: 1, field_distribution: {"id": 1} }
|
|||||||
[timestamp] [3,]
|
[timestamp] [3,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Started At:
|
### Started At:
|
||||||
[timestamp] [2,3,]
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Finished At:
|
### Finished At:
|
||||||
[timestamp] [2,3,]
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### File Store:
|
### File Store:
|
||||||
00000000-0000-0000-0000-000000000001
|
00000000-0000-0000-0000-000000000001
|
||||||
|
|||||||
@@ -1,90 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"uid": 0,
|
|
||||||
"enqueuedAt": "[date]",
|
|
||||||
"startedAt": "[date]",
|
|
||||||
"finishedAt": "[date]",
|
|
||||||
"error": null,
|
|
||||||
"canceledBy": null,
|
|
||||||
"details": {
|
|
||||||
"IndexInfo": {
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"status": "succeeded",
|
|
||||||
"kind": {
|
|
||||||
"indexCreation": {
|
|
||||||
"index_uid": "doggo",
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uid": 1,
|
|
||||||
"enqueuedAt": "[date]",
|
|
||||||
"startedAt": "[date]",
|
|
||||||
"finishedAt": "[date]",
|
|
||||||
"error": {
|
|
||||||
"message": "Index `doggo` already exists.",
|
|
||||||
"code": "index_already_exists",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#index_already_exists"
|
|
||||||
},
|
|
||||||
"canceledBy": null,
|
|
||||||
"details": {
|
|
||||||
"IndexInfo": {
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"status": "failed",
|
|
||||||
"kind": {
|
|
||||||
"indexCreation": {
|
|
||||||
"index_uid": "doggo",
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uid": 2,
|
|
||||||
"enqueuedAt": "[date]",
|
|
||||||
"startedAt": "[date]",
|
|
||||||
"finishedAt": "[date]",
|
|
||||||
"error": null,
|
|
||||||
"canceledBy": null,
|
|
||||||
"details": {
|
|
||||||
"IndexInfo": {
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"status": "enqueued",
|
|
||||||
"kind": {
|
|
||||||
"indexCreation": {
|
|
||||||
"index_uid": "doggo",
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uid": 3,
|
|
||||||
"enqueuedAt": "[date]",
|
|
||||||
"startedAt": "[date]",
|
|
||||||
"finishedAt": "[date]",
|
|
||||||
"error": null,
|
|
||||||
"canceledBy": null,
|
|
||||||
"details": {
|
|
||||||
"IndexInfo": {
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"status": "enqueued",
|
|
||||||
"kind": {
|
|
||||||
"indexCreation": {
|
|
||||||
"index_uid": "doggo",
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
@@ -1,90 +0,0 @@
|
|||||||
---
|
|
||||||
source: index-scheduler/src/lib.rs
|
|
||||||
---
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"uid": 0,
|
|
||||||
"enqueuedAt": "[date]",
|
|
||||||
"startedAt": "[date]",
|
|
||||||
"finishedAt": "[date]",
|
|
||||||
"error": null,
|
|
||||||
"canceledBy": null,
|
|
||||||
"details": {
|
|
||||||
"IndexInfo": {
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"status": "succeeded",
|
|
||||||
"kind": {
|
|
||||||
"indexCreation": {
|
|
||||||
"index_uid": "doggo",
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uid": 1,
|
|
||||||
"enqueuedAt": "[date]",
|
|
||||||
"startedAt": "[date]",
|
|
||||||
"finishedAt": "[date]",
|
|
||||||
"error": {
|
|
||||||
"message": "Index `doggo` already exists.",
|
|
||||||
"code": "index_already_exists",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#index_already_exists"
|
|
||||||
},
|
|
||||||
"canceledBy": null,
|
|
||||||
"details": {
|
|
||||||
"IndexInfo": {
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"status": "failed",
|
|
||||||
"kind": {
|
|
||||||
"indexCreation": {
|
|
||||||
"index_uid": "doggo",
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uid": 2,
|
|
||||||
"enqueuedAt": "[date]",
|
|
||||||
"startedAt": "[date]",
|
|
||||||
"finishedAt": "[date]",
|
|
||||||
"error": null,
|
|
||||||
"canceledBy": null,
|
|
||||||
"details": {
|
|
||||||
"IndexInfo": {
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"status": "enqueued",
|
|
||||||
"kind": {
|
|
||||||
"indexCreation": {
|
|
||||||
"index_uid": "doggo",
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"uid": 3,
|
|
||||||
"enqueuedAt": "[date]",
|
|
||||||
"startedAt": "[date]",
|
|
||||||
"finishedAt": "[date]",
|
|
||||||
"error": null,
|
|
||||||
"canceledBy": null,
|
|
||||||
"details": {
|
|
||||||
"IndexInfo": {
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"status": "enqueued",
|
|
||||||
"kind": {
|
|
||||||
"indexCreation": {
|
|
||||||
"index_uid": "doggo",
|
|
||||||
"primary_key": null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::convert::TryInto;
|
||||||
|
|
||||||
use meilisearch_types::heed::{BoxedError, BytesDecode, BytesEncode};
|
use meilisearch_types::heed::{BoxedError, BytesDecode, BytesEncode};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|||||||
@@ -11,6 +11,6 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
insta = { version = "^1.34.0", features = ["json", "redactions"] }
|
insta = { version = "^1.29.0", features = ["json", "redactions"] }
|
||||||
md5 = "0.7.0"
|
md5 = "0.7.0"
|
||||||
once_cell = "1.19"
|
once_cell = "1.17"
|
||||||
|
|||||||
@@ -11,16 +11,16 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
base64 = "0.21.7"
|
base64 = "0.21.0"
|
||||||
enum-iterator = "1.5.0"
|
enum-iterator = "1.4.0"
|
||||||
hmac = "0.12.1"
|
hmac = "0.12.1"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
roaring = { version = "0.10.2", features = ["serde"] }
|
roaring = { version = "0.10.1", features = ["serde"] }
|
||||||
serde = { version = "1.0.195", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.111", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.6"
|
||||||
thiserror = "1.0.56"
|
thiserror = "1.0.40"
|
||||||
time = { version = "0.3.31", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
uuid = { version = "1.6.1", features = ["serde", "v4"] }
|
uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::cmp::Reverse;
|
use std::cmp::Reverse;
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
use std::convert::{TryFrom, TryInto};
|
||||||
use std::fs::create_dir_all;
|
use std::fs::create_dir_all;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::result::Result as StdResult;
|
use std::result::Result as StdResult;
|
||||||
|
|||||||
@@ -11,31 +11,31 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = { version = "4.4.1", default-features = false }
|
actix-web = { version = "4.3.1", default-features = false }
|
||||||
anyhow = "1.0.79"
|
anyhow = "1.0.70"
|
||||||
convert_case = "0.6.0"
|
convert_case = "0.6.0"
|
||||||
csv = "1.3.0"
|
csv = "1.2.1"
|
||||||
deserr = { version = "0.6.1", features = ["actix-web"] }
|
deserr = { version = "0.6.0", features = ["actix-web"] }
|
||||||
either = { version = "1.9.0", features = ["serde"] }
|
either = { version = "1.8.1", features = ["serde"] }
|
||||||
enum-iterator = "1.5.0"
|
enum-iterator = "1.4.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.0.28"
|
flate2 = "1.0.25"
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
memmap2 = "0.7.1"
|
memmap2 = "0.7.1"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
roaring = { version = "0.10.2", features = ["serde"] }
|
roaring = { version = "0.10.1", features = ["serde"] }
|
||||||
serde = { version = "1.0.195", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde-cs = "0.2.4"
|
serde-cs = "0.2.4"
|
||||||
serde_json = "1.0.111"
|
serde_json = "1.0.95"
|
||||||
tar = "0.4.40"
|
tar = "0.4.38"
|
||||||
tempfile = "3.9.0"
|
tempfile = "3.5.0"
|
||||||
thiserror = "1.0.56"
|
thiserror = "1.0.40"
|
||||||
time = { version = "0.3.31", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
time = { version = "0.3.20", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
tokio = "1.35"
|
tokio = "1.27"
|
||||||
uuid = { version = "1.6.1", features = ["serde", "v4"] }
|
uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
insta = "1.34.0"
|
insta = "1.29.0"
|
||||||
meili-snap = { path = "../meili-snap" }
|
meili-snap = { path = "../meili-snap" }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
@@ -54,5 +54,3 @@ thai = ["milli/thai"]
|
|||||||
greek = ["milli/greek"]
|
greek = ["milli/greek"]
|
||||||
# allow khmer specialized tokenization
|
# allow khmer specialized tokenization
|
||||||
khmer = ["milli/khmer"]
|
khmer = ["milli/khmer"]
|
||||||
# allow vietnamese specialized tokenization
|
|
||||||
vietnamese = ["milli/vietnamese"]
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
use std::fmt::{self, Debug, Display};
|
use std::fmt::{self, Debug, Display};
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::{self, BufWriter, Write};
|
use std::io::{self, Seek, Write};
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
|
|
||||||
use memmap2::MmapOptions;
|
use memmap2::MmapOptions;
|
||||||
@@ -104,8 +104,8 @@ impl ErrorCode for DocumentFormatError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Reads CSV from input and write an obkv batch to writer.
|
/// Reads CSV from input and write an obkv batch to writer.
|
||||||
pub fn read_csv(file: &File, writer: impl Write, delimiter: u8) -> Result<u64> {
|
pub fn read_csv(file: &File, writer: impl Write + Seek, delimiter: u8) -> Result<u64> {
|
||||||
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(writer));
|
let mut builder = DocumentsBatchBuilder::new(writer);
|
||||||
let mmap = unsafe { MmapOptions::new().map(file)? };
|
let mmap = unsafe { MmapOptions::new().map(file)? };
|
||||||
let csv = csv::ReaderBuilder::new().delimiter(delimiter).from_reader(mmap.as_ref());
|
let csv = csv::ReaderBuilder::new().delimiter(delimiter).from_reader(mmap.as_ref());
|
||||||
builder.append_csv(csv).map_err(|e| (PayloadType::Csv { delimiter }, e))?;
|
builder.append_csv(csv).map_err(|e| (PayloadType::Csv { delimiter }, e))?;
|
||||||
@@ -117,8 +117,8 @@ pub fn read_csv(file: &File, writer: impl Write, delimiter: u8) -> Result<u64> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Reads JSON from temporary file and write an obkv batch to writer.
|
/// Reads JSON from temporary file and write an obkv batch to writer.
|
||||||
pub fn read_json(file: &File, writer: impl Write) -> Result<u64> {
|
pub fn read_json(file: &File, writer: impl Write + Seek) -> Result<u64> {
|
||||||
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(writer));
|
let mut builder = DocumentsBatchBuilder::new(writer);
|
||||||
let mmap = unsafe { MmapOptions::new().map(file)? };
|
let mmap = unsafe { MmapOptions::new().map(file)? };
|
||||||
let mut deserializer = serde_json::Deserializer::from_slice(&mmap);
|
let mut deserializer = serde_json::Deserializer::from_slice(&mmap);
|
||||||
|
|
||||||
@@ -151,8 +151,8 @@ pub fn read_json(file: &File, writer: impl Write) -> Result<u64> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Reads JSON from temporary file and write an obkv batch to writer.
|
/// Reads JSON from temporary file and write an obkv batch to writer.
|
||||||
pub fn read_ndjson(file: &File, writer: impl Write) -> Result<u64> {
|
pub fn read_ndjson(file: &File, writer: impl Write + Seek) -> Result<u64> {
|
||||||
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(writer));
|
let mut builder = DocumentsBatchBuilder::new(writer);
|
||||||
let mmap = unsafe { MmapOptions::new().map(file)? };
|
let mmap = unsafe { MmapOptions::new().map(file)? };
|
||||||
|
|
||||||
for result in serde_json::Deserializer::from_slice(&mmap).into_iter() {
|
for result in serde_json::Deserializer::from_slice(&mmap).into_iter() {
|
||||||
|
|||||||
@@ -310,8 +310,6 @@ TooManyVectors , InvalidRequest , BAD_REQUEST ;
|
|||||||
UnretrievableDocument , Internal , BAD_REQUEST ;
|
UnretrievableDocument , Internal , BAD_REQUEST ;
|
||||||
UnretrievableErrorCode , InvalidRequest , BAD_REQUEST ;
|
UnretrievableErrorCode , InvalidRequest , BAD_REQUEST ;
|
||||||
UnsupportedMediaType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
|
UnsupportedMediaType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
|
||||||
|
|
||||||
// Experimental features
|
|
||||||
VectorEmbeddingError , InvalidRequest , BAD_REQUEST
|
VectorEmbeddingError , InvalidRequest , BAD_REQUEST
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -346,13 +344,7 @@ impl ErrorCode for milli::Error {
|
|||||||
Code::InvalidDocumentId
|
Code::InvalidDocumentId
|
||||||
}
|
}
|
||||||
UserError::MissingDocumentField(_) => Code::InvalidDocumentFields,
|
UserError::MissingDocumentField(_) => Code::InvalidDocumentFields,
|
||||||
UserError::InvalidFieldForSource { .. }
|
UserError::InvalidPrompt(_) => Code::InvalidSettingsEmbedders,
|
||||||
| UserError::MissingFieldForSource { .. }
|
|
||||||
| UserError::InvalidOpenAiModel { .. }
|
|
||||||
| UserError::InvalidOpenAiModelDimensions { .. }
|
|
||||||
| UserError::InvalidOpenAiModelDimensionsMax { .. }
|
|
||||||
| UserError::InvalidSettingsDimensions { .. }
|
|
||||||
| UserError::InvalidPrompt(_) => Code::InvalidSettingsEmbedders,
|
|
||||||
UserError::TooManyEmbedders(_) => Code::InvalidSettingsEmbedders,
|
UserError::TooManyEmbedders(_) => Code::InvalidSettingsEmbedders,
|
||||||
UserError::InvalidPromptForEmbeddings(..) => Code::InvalidSettingsEmbedders,
|
UserError::InvalidPromptForEmbeddings(..) => Code::InvalidSettingsEmbedders,
|
||||||
UserError::NoPrimaryKeyCandidateFound => Code::IndexPrimaryKeyNoCandidateFound,
|
UserError::NoPrimaryKeyCandidateFound => Code::IndexPrimaryKeyNoCandidateFound,
|
||||||
|
|||||||
@@ -3,14 +3,13 @@ use serde::{Deserialize, Serialize};
|
|||||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Default, PartialEq, Eq)]
|
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Default, PartialEq, Eq)]
|
||||||
#[serde(rename_all = "camelCase", default)]
|
#[serde(rename_all = "camelCase", default)]
|
||||||
pub struct RuntimeTogglableFeatures {
|
pub struct RuntimeTogglableFeatures {
|
||||||
|
pub score_details: bool,
|
||||||
pub vector_store: bool,
|
pub vector_store: bool,
|
||||||
pub metrics: bool,
|
pub metrics: bool,
|
||||||
pub logs_route: bool,
|
|
||||||
pub export_puffin_reports: bool,
|
pub export_puffin_reports: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Debug, Clone, Copy)]
|
#[derive(Default, Debug, Clone, Copy)]
|
||||||
pub struct InstanceTogglableFeatures {
|
pub struct InstanceTogglableFeatures {
|
||||||
pub metrics: bool,
|
pub metrics: bool,
|
||||||
pub logs_route: bool,
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ pub mod index_uid_pattern;
|
|||||||
pub mod keys;
|
pub mod keys;
|
||||||
pub mod settings;
|
pub mod settings;
|
||||||
pub mod star_or;
|
pub mod star_or;
|
||||||
pub mod task_view;
|
|
||||||
pub mod tasks;
|
pub mod tasks;
|
||||||
pub mod versioning;
|
pub mod versioning;
|
||||||
pub use milli::{heed, Index};
|
pub use milli::{heed, Index};
|
||||||
|
|||||||
@@ -318,21 +318,6 @@ impl Settings<Unchecked> {
|
|||||||
_kind: PhantomData,
|
_kind: PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn validate(self) -> Result<Self, milli::Error> {
|
|
||||||
self.validate_embedding_settings()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn validate_embedding_settings(mut self) -> Result<Self, milli::Error> {
|
|
||||||
let Setting::Set(mut configs) = self.embedders else { return Ok(self) };
|
|
||||||
for (name, config) in configs.iter_mut() {
|
|
||||||
let config_to_check = std::mem::take(config);
|
|
||||||
let checked_config = milli::update::validate_embedding_settings(config_to_check, name)?;
|
|
||||||
*config = checked_config
|
|
||||||
}
|
|
||||||
self.embedders = Setting::Set(configs);
|
|
||||||
Ok(self)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
@@ -600,12 +585,11 @@ pub fn settings(
|
|||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
|
||||||
let embedders: BTreeMap<_, _> = index
|
let embedders = index
|
||||||
.embedding_configs(rtxn)?
|
.embedding_configs(rtxn)?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(name, config)| (name, Setting::Set(config.into())))
|
.map(|(name, config)| (name, Setting::Set(config.into())))
|
||||||
.collect();
|
.collect();
|
||||||
let embedders = if embedders.is_empty() { Setting::NotSet } else { Setting::Set(embedders) };
|
|
||||||
|
|
||||||
Ok(Settings {
|
Ok(Settings {
|
||||||
displayed_attributes: match displayed_attributes {
|
displayed_attributes: match displayed_attributes {
|
||||||
@@ -627,12 +611,15 @@ pub fn settings(
|
|||||||
Some(field) => Setting::Set(field),
|
Some(field) => Setting::Set(field),
|
||||||
None => Setting::Reset,
|
None => Setting::Reset,
|
||||||
},
|
},
|
||||||
proximity_precision: Setting::Set(proximity_precision.unwrap_or_default()),
|
proximity_precision: match proximity_precision {
|
||||||
|
Some(precision) => Setting::Set(precision),
|
||||||
|
None => Setting::Reset,
|
||||||
|
},
|
||||||
synonyms: Setting::Set(synonyms),
|
synonyms: Setting::Set(synonyms),
|
||||||
typo_tolerance: Setting::Set(typo_tolerance),
|
typo_tolerance: Setting::Set(typo_tolerance),
|
||||||
faceting: Setting::Set(faceting),
|
faceting: Setting::Set(faceting),
|
||||||
pagination: Setting::Set(pagination),
|
pagination: Setting::Set(pagination),
|
||||||
embedders,
|
embedders: Setting::Set(embedders),
|
||||||
_kind: PhantomData,
|
_kind: PhantomData,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@@ -733,11 +720,10 @@ impl From<RankingRuleView> for Criterion {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq, Deserr, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserr, Serialize, Deserialize)]
|
||||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||||
#[deserr(error = DeserrJsonError<InvalidSettingsProximityPrecision>, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrJsonError<InvalidSettingsProximityPrecision>, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub enum ProximityPrecisionView {
|
pub enum ProximityPrecisionView {
|
||||||
#[default]
|
|
||||||
ByWord,
|
ByWord,
|
||||||
ByAttribute,
|
ByAttribute,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,139 +0,0 @@
|
|||||||
use serde::Serialize;
|
|
||||||
use time::{Duration, OffsetDateTime};
|
|
||||||
|
|
||||||
use crate::error::ResponseError;
|
|
||||||
use crate::settings::{Settings, Unchecked};
|
|
||||||
use crate::tasks::{serialize_duration, Details, IndexSwap, Kind, Status, Task, TaskId};
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct TaskView {
|
|
||||||
pub uid: TaskId,
|
|
||||||
#[serde(default)]
|
|
||||||
pub index_uid: Option<String>,
|
|
||||||
pub status: Status,
|
|
||||||
#[serde(rename = "type")]
|
|
||||||
pub kind: Kind,
|
|
||||||
pub canceled_by: Option<TaskId>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub details: Option<DetailsView>,
|
|
||||||
pub error: Option<ResponseError>,
|
|
||||||
#[serde(serialize_with = "serialize_duration", default)]
|
|
||||||
pub duration: Option<Duration>,
|
|
||||||
#[serde(with = "time::serde::rfc3339")]
|
|
||||||
pub enqueued_at: OffsetDateTime,
|
|
||||||
#[serde(with = "time::serde::rfc3339::option", default)]
|
|
||||||
pub started_at: Option<OffsetDateTime>,
|
|
||||||
#[serde(with = "time::serde::rfc3339::option", default)]
|
|
||||||
pub finished_at: Option<OffsetDateTime>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TaskView {
|
|
||||||
pub fn from_task(task: &Task) -> TaskView {
|
|
||||||
TaskView {
|
|
||||||
uid: task.uid,
|
|
||||||
index_uid: task.index_uid().map(ToOwned::to_owned),
|
|
||||||
status: task.status,
|
|
||||||
kind: task.kind.as_kind(),
|
|
||||||
canceled_by: task.canceled_by,
|
|
||||||
details: task.details.clone().map(DetailsView::from),
|
|
||||||
error: task.error.clone(),
|
|
||||||
duration: task.started_at.zip(task.finished_at).map(|(start, end)| end - start),
|
|
||||||
enqueued_at: task.enqueued_at,
|
|
||||||
started_at: task.started_at,
|
|
||||||
finished_at: task.finished_at,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Default, Debug, PartialEq, Eq, Clone, Serialize)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct DetailsView {
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub received_documents: Option<u64>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub indexed_documents: Option<Option<u64>>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub primary_key: Option<Option<String>>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub provided_ids: Option<usize>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub deleted_documents: Option<Option<u64>>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub matched_tasks: Option<u64>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub canceled_tasks: Option<Option<u64>>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub deleted_tasks: Option<Option<u64>>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub original_filter: Option<Option<String>>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub dump_uid: Option<Option<String>>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
#[serde(flatten)]
|
|
||||||
pub settings: Option<Box<Settings<Unchecked>>>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub swaps: Option<Vec<IndexSwap>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<Details> for DetailsView {
|
|
||||||
fn from(details: Details) -> Self {
|
|
||||||
match details {
|
|
||||||
Details::DocumentAdditionOrUpdate { received_documents, indexed_documents } => {
|
|
||||||
DetailsView {
|
|
||||||
received_documents: Some(received_documents),
|
|
||||||
indexed_documents: Some(indexed_documents),
|
|
||||||
..DetailsView::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Details::SettingsUpdate { settings } => {
|
|
||||||
DetailsView { settings: Some(settings), ..DetailsView::default() }
|
|
||||||
}
|
|
||||||
Details::IndexInfo { primary_key } => {
|
|
||||||
DetailsView { primary_key: Some(primary_key), ..DetailsView::default() }
|
|
||||||
}
|
|
||||||
Details::DocumentDeletion {
|
|
||||||
provided_ids: received_document_ids,
|
|
||||||
deleted_documents,
|
|
||||||
} => DetailsView {
|
|
||||||
provided_ids: Some(received_document_ids),
|
|
||||||
deleted_documents: Some(deleted_documents),
|
|
||||||
original_filter: Some(None),
|
|
||||||
..DetailsView::default()
|
|
||||||
},
|
|
||||||
Details::DocumentDeletionByFilter { original_filter, deleted_documents } => {
|
|
||||||
DetailsView {
|
|
||||||
provided_ids: Some(0),
|
|
||||||
original_filter: Some(Some(original_filter)),
|
|
||||||
deleted_documents: Some(deleted_documents),
|
|
||||||
..DetailsView::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Details::ClearAll { deleted_documents } => {
|
|
||||||
DetailsView { deleted_documents: Some(deleted_documents), ..DetailsView::default() }
|
|
||||||
}
|
|
||||||
Details::TaskCancelation { matched_tasks, canceled_tasks, original_filter } => {
|
|
||||||
DetailsView {
|
|
||||||
matched_tasks: Some(matched_tasks),
|
|
||||||
canceled_tasks: Some(canceled_tasks),
|
|
||||||
original_filter: Some(Some(original_filter)),
|
|
||||||
..DetailsView::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Details::TaskDeletion { matched_tasks, deleted_tasks, original_filter } => {
|
|
||||||
DetailsView {
|
|
||||||
matched_tasks: Some(matched_tasks),
|
|
||||||
deleted_tasks: Some(deleted_tasks),
|
|
||||||
original_filter: Some(Some(original_filter)),
|
|
||||||
..DetailsView::default()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Details::Dump { dump_uid } => {
|
|
||||||
DetailsView { dump_uid: Some(dump_uid), ..DetailsView::default() }
|
|
||||||
}
|
|
||||||
Details::IndexSwap { swaps } => {
|
|
||||||
DetailsView { swaps: Some(swaps), ..Default::default() }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -13,14 +13,14 @@ license.workspace = true
|
|||||||
default-run = "meilisearch"
|
default-run = "meilisearch"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-cors = "0.7.0"
|
actix-cors = "0.6.4"
|
||||||
actix-http = { version = "3.5.1", default-features = false, features = [
|
actix-http = { version = "3.3.1", default-features = false, features = [
|
||||||
"compress-brotli",
|
"compress-brotli",
|
||||||
"compress-gzip",
|
"compress-gzip",
|
||||||
"rustls",
|
"rustls",
|
||||||
] }
|
] }
|
||||||
actix-utils = "3.0.1"
|
actix-utils = "3.0.1"
|
||||||
actix-web = { version = "4.4.1", default-features = false, features = [
|
actix-web = { version = "4.3.1", default-features = false, features = [
|
||||||
"macros",
|
"macros",
|
||||||
"compress-brotli",
|
"compress-brotli",
|
||||||
"compress-gzip",
|
"compress-gzip",
|
||||||
@@ -28,111 +28,108 @@ actix-web = { version = "4.4.1", default-features = false, features = [
|
|||||||
"rustls",
|
"rustls",
|
||||||
] }
|
] }
|
||||||
actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true }
|
actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true }
|
||||||
anyhow = { version = "1.0.79", features = ["backtrace"] }
|
anyhow = { version = "1.0.70", features = ["backtrace"] }
|
||||||
async-stream = "0.3.5"
|
async-stream = "0.3.5"
|
||||||
async-trait = "0.1.77"
|
async-trait = "0.1.68"
|
||||||
bstr = "1.9.0"
|
bstr = "1.4.0"
|
||||||
byte-unit = { version = "4.0.19", default-features = false, features = [
|
byte-unit = { version = "4.0.19", default-features = false, features = [
|
||||||
"std",
|
"std",
|
||||||
"serde",
|
"serde",
|
||||||
] }
|
] }
|
||||||
bytes = "1.5.0"
|
bytes = "1.4.0"
|
||||||
clap = { version = "4.4.17", features = ["derive", "env"] }
|
clap = { version = "4.2.1", features = ["derive", "env"] }
|
||||||
crossbeam-channel = "0.5.11"
|
crossbeam-channel = "0.5.8"
|
||||||
deserr = { version = "0.6.1", features = ["actix-web"] }
|
deserr = { version = "0.6.0", features = ["actix-web"] }
|
||||||
dump = { path = "../dump" }
|
dump = { path = "../dump" }
|
||||||
either = "1.9.0"
|
either = "1.8.1"
|
||||||
|
env_logger = "0.10.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.0.28"
|
flate2 = "1.0.25"
|
||||||
fst = "0.4.7"
|
fst = "0.4.7"
|
||||||
futures = "0.3.30"
|
futures = "0.3.28"
|
||||||
futures-util = "0.3.30"
|
futures-util = "0.3.28"
|
||||||
http = "0.2.11"
|
http = "0.2.9"
|
||||||
index-scheduler = { path = "../index-scheduler" }
|
index-scheduler = { path = "../index-scheduler" }
|
||||||
indexmap = { version = "2.1.0", features = ["serde"] }
|
indexmap = { version = "2.0.0", features = ["serde"] }
|
||||||
is-terminal = "0.4.10"
|
is-terminal = "0.4.8"
|
||||||
itertools = "0.11.0"
|
itertools = "0.11.0"
|
||||||
jsonwebtoken = "8.3.0"
|
jsonwebtoken = "8.3.0"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
|
log = "0.4.17"
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
mimalloc = { version = "0.1.39", default-features = false }
|
mimalloc = { version = "0.1.37", default-features = false }
|
||||||
mime = "0.3.17"
|
mime = "0.3.17"
|
||||||
num_cpus = "1.16.0"
|
num_cpus = "1.15.0"
|
||||||
obkv = "0.2.1"
|
obkv = "0.2.0"
|
||||||
once_cell = "1.19.0"
|
once_cell = "1.17.1"
|
||||||
ordered-float = "4.2.0"
|
ordered-float = "3.7.0"
|
||||||
parking_lot = "0.12.1"
|
parking_lot = "0.12.1"
|
||||||
permissive-json-pointer = { path = "../permissive-json-pointer" }
|
permissive-json-pointer = { path = "../permissive-json-pointer" }
|
||||||
pin-project-lite = "0.2.13"
|
pin-project-lite = "0.2.9"
|
||||||
platform-dirs = "0.3.0"
|
platform-dirs = "0.3.0"
|
||||||
prometheus = { version = "0.13.3", features = ["process"] }
|
prometheus = { version = "0.13.3", features = ["process"] }
|
||||||
puffin = { version = "0.16.0", features = ["serialization"] }
|
puffin = { version = "0.16.0", features = ["serialization"] }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
rayon = "1.8.0"
|
rayon = "1.7.0"
|
||||||
regex = "1.10.2"
|
regex = "1.7.3"
|
||||||
reqwest = { version = "0.11.23", features = [
|
reqwest = { version = "0.11.16", features = [
|
||||||
"rustls-tls",
|
"rustls-tls",
|
||||||
"json",
|
"json",
|
||||||
], default-features = false }
|
], default-features = false }
|
||||||
rustls = "0.20.8"
|
rustls = "0.20.8"
|
||||||
rustls-pemfile = "1.0.2"
|
rustls-pemfile = "1.0.2"
|
||||||
segment = { version = "0.2.3", optional = true }
|
segment = { version = "0.2.2", optional = true }
|
||||||
serde = { version = "1.0.195", features = ["derive"] }
|
serde = { version = "1.0.160", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.111", features = ["preserve_order"] }
|
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.6"
|
||||||
siphasher = "1.0.0"
|
siphasher = "0.3.10"
|
||||||
slice-group-by = "0.3.1"
|
slice-group-by = "0.3.0"
|
||||||
static-files = { version = "0.2.3", optional = true }
|
static-files = { version = "0.2.3", optional = true }
|
||||||
sysinfo = "0.30.5"
|
sysinfo = "0.29.7"
|
||||||
tar = "0.4.40"
|
tar = "0.4.38"
|
||||||
tempfile = "3.9.0"
|
tempfile = "3.5.0"
|
||||||
thiserror = "1.0.56"
|
thiserror = "1.0.40"
|
||||||
time = { version = "0.3.31", features = [
|
time = { version = "0.3.20", features = [
|
||||||
"serde-well-known",
|
"serde-well-known",
|
||||||
"formatting",
|
"formatting",
|
||||||
"parsing",
|
"parsing",
|
||||||
"macros",
|
"macros",
|
||||||
] }
|
] }
|
||||||
tokio = { version = "1.35.1", features = ["full"] }
|
tokio = { version = "1.27.0", features = ["full"] }
|
||||||
tokio-stream = "0.1.14"
|
tokio-stream = "0.1.12"
|
||||||
toml = "0.8.8"
|
toml = "0.7.3"
|
||||||
uuid = { version = "1.6.1", features = ["serde", "v4"] }
|
uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
||||||
walkdir = "2.4.0"
|
walkdir = "2.3.3"
|
||||||
yaup = "0.2.1"
|
yaup = "0.2.1"
|
||||||
serde_urlencoded = "0.7.1"
|
serde_urlencoded = "0.7.1"
|
||||||
termcolor = "1.4.1"
|
termcolor = "1.2.0"
|
||||||
url = { version = "2.5.0", features = ["serde"] }
|
|
||||||
tracing = "0.1.40"
|
|
||||||
tracing-subscriber = { version = "0.3.18", features = ["json"] }
|
|
||||||
tracing-trace = { version = "0.1.0", path = "../tracing-trace" }
|
|
||||||
tracing-actix-web = "0.7.9"
|
|
||||||
build-info = { version = "1.7.0", path = "../build-info" }
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
actix-rt = "2.9.0"
|
actix-rt = "2.8.0"
|
||||||
assert-json-diff = "2.0.2"
|
assert-json-diff = "2.0.2"
|
||||||
brotli = "3.4.0"
|
brotli = "3.3.4"
|
||||||
insta = "1.34.0"
|
insta = "1.29.0"
|
||||||
manifest-dir-macros = "0.1.18"
|
manifest-dir-macros = "0.1.16"
|
||||||
maplit = "1.0.2"
|
maplit = "1.0.2"
|
||||||
meili-snap = { path = "../meili-snap" }
|
meili-snap = { path = "../meili-snap" }
|
||||||
temp-env = "0.3.6"
|
temp-env = "0.3.3"
|
||||||
urlencoding = "2.1.3"
|
urlencoding = "2.1.2"
|
||||||
yaup = "0.2.1"
|
yaup = "0.2.1"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
anyhow = { version = "1.0.79", optional = true }
|
anyhow = { version = "1.0.70", optional = true }
|
||||||
cargo_toml = { version = "0.18.0", optional = true }
|
cargo_toml = { version = "0.15.2", optional = true }
|
||||||
hex = { version = "0.4.3", optional = true }
|
hex = { version = "0.4.3", optional = true }
|
||||||
reqwest = { version = "0.11.23", features = [
|
reqwest = { version = "0.11.16", features = [
|
||||||
"blocking",
|
"blocking",
|
||||||
"rustls-tls",
|
"rustls-tls",
|
||||||
], default-features = false, optional = true }
|
], default-features = false, optional = true }
|
||||||
sha-1 = { version = "0.10.1", optional = true }
|
sha-1 = { version = "0.10.1", optional = true }
|
||||||
static-files = { version = "0.2.3", optional = true }
|
static-files = { version = "0.2.3", optional = true }
|
||||||
tempfile = { version = "3.9.0", optional = true }
|
tempfile = { version = "3.5.0", optional = true }
|
||||||
zip = { version = "0.6.6", optional = true }
|
vergen = { version = "7.5.1", default-features = false, features = ["git"] }
|
||||||
|
zip = { version = "0.6.4", optional = true }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"]
|
default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"]
|
||||||
@@ -154,8 +151,7 @@ japanese = ["meilisearch-types/japanese"]
|
|||||||
thai = ["meilisearch-types/thai"]
|
thai = ["meilisearch-types/thai"]
|
||||||
greek = ["meilisearch-types/greek"]
|
greek = ["meilisearch-types/greek"]
|
||||||
khmer = ["meilisearch-types/khmer"]
|
khmer = ["meilisearch-types/khmer"]
|
||||||
vietnamese = ["meilisearch-types/vietnamese"]
|
|
||||||
|
|
||||||
[package.metadata.mini-dashboard]
|
[package.metadata.mini-dashboard]
|
||||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.13/build.zip"
|
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.11/build.zip"
|
||||||
sha1 = "e20cc9b390003c6c844f4b8bcc5c5013191a77ff"
|
sha1 = "83cd44ed1e5f97ecb581dc9f958a63f4ccc982d9"
|
||||||
|
|||||||
@@ -1,4 +1,17 @@
|
|||||||
|
use vergen::{vergen, Config, SemverKind};
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
// Note: any code that needs VERGEN_ environment variables should take care to define them manually in the Dockerfile and pass them
|
||||||
|
// in the corresponding GitHub workflow (publish_docker.yml).
|
||||||
|
// This is due to the Dockerfile building the binary outside of the git directory.
|
||||||
|
let mut config = Config::default();
|
||||||
|
// allow using non-annotated tags
|
||||||
|
*config.git_mut().semver_kind_mut() = SemverKind::Lightweight;
|
||||||
|
|
||||||
|
if let Err(e) = vergen(config) {
|
||||||
|
println!("cargo:warning=vergen: {}", e);
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(feature = "mini-dashboard")]
|
#[cfg(feature = "mini-dashboard")]
|
||||||
mini_dashboard::setup_mini_dashboard().expect("Could not load the mini-dashboard assets");
|
mini_dashboard::setup_mini_dashboard().expect("Could not load the mini-dashboard assets");
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ use segment::message::{Identify, Track, User};
|
|||||||
use segment::{AutoBatcher, Batcher, HttpClient};
|
use segment::{AutoBatcher, Batcher, HttpClient};
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use sysinfo::{Disks, System};
|
use sysinfo::{DiskExt, System, SystemExt};
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use tokio::select;
|
use tokio::select;
|
||||||
use tokio::sync::mpsc::{self, Receiver, Sender};
|
use tokio::sync::mpsc::{self, Receiver, Sender};
|
||||||
@@ -28,9 +28,7 @@ use super::{
|
|||||||
config_user_id_path, DocumentDeletionKind, DocumentFetchKind, MEILISEARCH_CONFIG_PATH,
|
config_user_id_path, DocumentDeletionKind, DocumentFetchKind, MEILISEARCH_CONFIG_PATH,
|
||||||
};
|
};
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::option::{
|
use crate::option::{default_http_addr, IndexerOpts, MaxMemory, MaxThreads, ScheduleSnapshot};
|
||||||
default_http_addr, IndexerOpts, LogMode, MaxMemory, MaxThreads, ScheduleSnapshot,
|
|
||||||
};
|
|
||||||
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
use crate::routes::indexes::documents::UpdateDocumentsQuery;
|
||||||
use crate::routes::indexes::facet_search::FacetSearchQuery;
|
use crate::routes::indexes::facet_search::FacetSearchQuery;
|
||||||
use crate::routes::tasks::TasksFilterQuery;
|
use crate::routes::tasks::TasksFilterQuery;
|
||||||
@@ -252,12 +250,8 @@ impl super::Analytics for SegmentAnalytics {
|
|||||||
struct Infos {
|
struct Infos {
|
||||||
env: String,
|
env: String,
|
||||||
experimental_enable_metrics: bool,
|
experimental_enable_metrics: bool,
|
||||||
experimental_logs_mode: LogMode,
|
|
||||||
experimental_replication_parameters: bool,
|
|
||||||
experimental_enable_logs_route: bool,
|
|
||||||
experimental_reduce_indexing_memory_usage: bool,
|
experimental_reduce_indexing_memory_usage: bool,
|
||||||
experimental_max_number_of_batched_tasks: usize,
|
experimental_max_number_of_batched_tasks: usize,
|
||||||
gpu_enabled: bool,
|
|
||||||
db_path: bool,
|
db_path: bool,
|
||||||
import_dump: bool,
|
import_dump: bool,
|
||||||
dump_dir: bool,
|
dump_dir: bool,
|
||||||
@@ -270,8 +264,6 @@ struct Infos {
|
|||||||
ignore_snapshot_if_db_exists: bool,
|
ignore_snapshot_if_db_exists: bool,
|
||||||
http_addr: bool,
|
http_addr: bool,
|
||||||
http_payload_size_limit: Byte,
|
http_payload_size_limit: Byte,
|
||||||
task_queue_webhook: bool,
|
|
||||||
task_webhook_authorization_header: bool,
|
|
||||||
log_level: String,
|
log_level: String,
|
||||||
max_indexing_memory: MaxMemory,
|
max_indexing_memory: MaxMemory,
|
||||||
max_indexing_threads: MaxThreads,
|
max_indexing_threads: MaxThreads,
|
||||||
@@ -293,16 +285,11 @@ impl From<Opt> for Infos {
|
|||||||
let Opt {
|
let Opt {
|
||||||
db_path,
|
db_path,
|
||||||
experimental_enable_metrics,
|
experimental_enable_metrics,
|
||||||
experimental_logs_mode,
|
|
||||||
experimental_replication_parameters,
|
|
||||||
experimental_enable_logs_route,
|
|
||||||
experimental_reduce_indexing_memory_usage,
|
experimental_reduce_indexing_memory_usage,
|
||||||
experimental_max_number_of_batched_tasks,
|
experimental_max_number_of_batched_tasks,
|
||||||
http_addr,
|
http_addr,
|
||||||
master_key: _,
|
master_key: _,
|
||||||
env,
|
env,
|
||||||
task_webhook_url,
|
|
||||||
task_webhook_authorization_header,
|
|
||||||
max_index_size: _,
|
max_index_size: _,
|
||||||
max_task_db_size: _,
|
max_task_db_size: _,
|
||||||
http_payload_size_limit,
|
http_payload_size_limit,
|
||||||
@@ -342,11 +329,7 @@ impl From<Opt> for Infos {
|
|||||||
Self {
|
Self {
|
||||||
env,
|
env,
|
||||||
experimental_enable_metrics,
|
experimental_enable_metrics,
|
||||||
experimental_logs_mode,
|
|
||||||
experimental_replication_parameters,
|
|
||||||
experimental_enable_logs_route,
|
|
||||||
experimental_reduce_indexing_memory_usage,
|
experimental_reduce_indexing_memory_usage,
|
||||||
gpu_enabled: meilisearch_types::milli::vector::is_cuda_enabled(),
|
|
||||||
db_path: db_path != PathBuf::from("./data.ms"),
|
db_path: db_path != PathBuf::from("./data.ms"),
|
||||||
import_dump: import_dump.is_some(),
|
import_dump: import_dump.is_some(),
|
||||||
dump_dir: dump_dir != PathBuf::from("dumps/"),
|
dump_dir: dump_dir != PathBuf::from("dumps/"),
|
||||||
@@ -360,8 +343,6 @@ impl From<Opt> for Infos {
|
|||||||
http_addr: http_addr != default_http_addr(),
|
http_addr: http_addr != default_http_addr(),
|
||||||
http_payload_size_limit,
|
http_payload_size_limit,
|
||||||
experimental_max_number_of_batched_tasks,
|
experimental_max_number_of_batched_tasks,
|
||||||
task_queue_webhook: task_webhook_url.is_some(),
|
|
||||||
task_webhook_authorization_header: task_webhook_authorization_header.is_some(),
|
|
||||||
log_level: log_level.to_string(),
|
log_level: log_level.to_string(),
|
||||||
max_indexing_memory,
|
max_indexing_memory,
|
||||||
max_indexing_threads,
|
max_indexing_threads,
|
||||||
@@ -399,17 +380,16 @@ impl Segment {
|
|||||||
fn compute_traits(opt: &Opt, stats: Stats) -> Value {
|
fn compute_traits(opt: &Opt, stats: Stats) -> Value {
|
||||||
static FIRST_START_TIMESTAMP: Lazy<Instant> = Lazy::new(Instant::now);
|
static FIRST_START_TIMESTAMP: Lazy<Instant> = Lazy::new(Instant::now);
|
||||||
static SYSTEM: Lazy<Value> = Lazy::new(|| {
|
static SYSTEM: Lazy<Value> = Lazy::new(|| {
|
||||||
let disks = Disks::new_with_refreshed_list();
|
|
||||||
let mut sys = System::new_all();
|
let mut sys = System::new_all();
|
||||||
sys.refresh_all();
|
sys.refresh_all();
|
||||||
let kernel_version = System::kernel_version()
|
let kernel_version =
|
||||||
.and_then(|k| k.split_once('-').map(|(k, _)| k.to_string()));
|
sys.kernel_version().and_then(|k| k.split_once('-').map(|(k, _)| k.to_string()));
|
||||||
json!({
|
json!({
|
||||||
"distribution": System::name(),
|
"distribution": sys.name(),
|
||||||
"kernel_version": kernel_version,
|
"kernel_version": kernel_version,
|
||||||
"cores": sys.cpus().len(),
|
"cores": sys.cpus().len(),
|
||||||
"ram_size": sys.total_memory(),
|
"ram_size": sys.total_memory(),
|
||||||
"disk_size": disks.iter().map(|disk| disk.total_space()).max(),
|
"disk_size": sys.disks().iter().map(|disk| disk.total_space()).max(),
|
||||||
"server_provider": std::env::var("MEILI_SERVER_PROVIDER").ok(),
|
"server_provider": std::env::var("MEILI_SERVER_PROVIDER").ok(),
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
@@ -473,9 +453,7 @@ impl Segment {
|
|||||||
create_all_stats(index_scheduler.into(), auth_controller.into(), &AuthFilter::default())
|
create_all_stats(index_scheduler.into(), auth_controller.into(), &AuthFilter::default())
|
||||||
{
|
{
|
||||||
// Replace the version number with the prototype name if any.
|
// Replace the version number with the prototype name if any.
|
||||||
let version = if let Some(prototype) = build_info::DescribeResult::from_build()
|
let version = if let Some(prototype) = crate::prototype_name() {
|
||||||
.and_then(|describe| describe.as_prototype())
|
|
||||||
{
|
|
||||||
prototype
|
prototype
|
||||||
} else {
|
} else {
|
||||||
env!("CARGO_PKG_VERSION")
|
env!("CARGO_PKG_VERSION")
|
||||||
|
|||||||
@@ -12,8 +12,6 @@ pub enum MeilisearchHttpError {
|
|||||||
#[error("A Content-Type header is missing. Accepted values for the Content-Type header are: {}",
|
#[error("A Content-Type header is missing. Accepted values for the Content-Type header are: {}",
|
||||||
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", "))]
|
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", "))]
|
||||||
MissingContentType(Vec<String>),
|
MissingContentType(Vec<String>),
|
||||||
#[error("The `/logs/stream` route is currently in use by someone else.")]
|
|
||||||
AlreadyUsedLogRoute,
|
|
||||||
#[error("The Content-Type `{0}` does not support the use of a csv delimiter. The csv delimiter can only be used with the Content-Type `text/csv`.")]
|
#[error("The Content-Type `{0}` does not support the use of a csv delimiter. The csv delimiter can only be used with the Content-Type `text/csv`.")]
|
||||||
CsvDelimiterWithWrongContentType(String),
|
CsvDelimiterWithWrongContentType(String),
|
||||||
#[error(
|
#[error(
|
||||||
@@ -61,7 +59,6 @@ impl ErrorCode for MeilisearchHttpError {
|
|||||||
fn error_code(&self) -> Code {
|
fn error_code(&self) -> Code {
|
||||||
match self {
|
match self {
|
||||||
MeilisearchHttpError::MissingContentType(_) => Code::MissingContentType,
|
MeilisearchHttpError::MissingContentType(_) => Code::MissingContentType,
|
||||||
MeilisearchHttpError::AlreadyUsedLogRoute => Code::BadRequest,
|
|
||||||
MeilisearchHttpError::CsvDelimiterWithWrongContentType(_) => Code::InvalidContentType,
|
MeilisearchHttpError::CsvDelimiterWithWrongContentType(_) => Code::InvalidContentType,
|
||||||
MeilisearchHttpError::MissingPayload(_) => Code::MissingPayload,
|
MeilisearchHttpError::MissingPayload(_) => Code::MissingPayload,
|
||||||
MeilisearchHttpError::InvalidContentType(_, _) => Code::InvalidContentType,
|
MeilisearchHttpError::InvalidContentType(_, _) => Code::InvalidContentType,
|
||||||
|
|||||||
@@ -131,7 +131,6 @@ gen_seq! { SeqFromRequestFut3; A B C }
|
|||||||
gen_seq! { SeqFromRequestFut4; A B C D }
|
gen_seq! { SeqFromRequestFut4; A B C D }
|
||||||
gen_seq! { SeqFromRequestFut5; A B C D E }
|
gen_seq! { SeqFromRequestFut5; A B C D E }
|
||||||
gen_seq! { SeqFromRequestFut6; A B C D E F }
|
gen_seq! { SeqFromRequestFut6; A B C D E F }
|
||||||
gen_seq! { SeqFromRequestFut7; A B C D E F G }
|
|
||||||
|
|
||||||
pin_project! {
|
pin_project! {
|
||||||
#[project = ExtractProj]
|
#[project = ExtractProj]
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ use error::PayloadError;
|
|||||||
use extractors::payload::PayloadConfig;
|
use extractors::payload::PayloadConfig;
|
||||||
use http::header::CONTENT_TYPE;
|
use http::header::CONTENT_TYPE;
|
||||||
use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
|
use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
|
||||||
|
use log::error;
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
|
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
|
||||||
use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod};
|
use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod};
|
||||||
@@ -38,8 +39,6 @@ use meilisearch_types::versioning::{check_version_file, create_version_file};
|
|||||||
use meilisearch_types::{compression, milli, VERSION_FILE_NAME};
|
use meilisearch_types::{compression, milli, VERSION_FILE_NAME};
|
||||||
pub use option::Opt;
|
pub use option::Opt;
|
||||||
use option::ScheduleSnapshot;
|
use option::ScheduleSnapshot;
|
||||||
use tracing::{error, info_span};
|
|
||||||
use tracing_subscriber::filter::Targets;
|
|
||||||
|
|
||||||
use crate::error::MeilisearchHttpError;
|
use crate::error::MeilisearchHttpError;
|
||||||
|
|
||||||
@@ -87,35 +86,10 @@ fn is_empty_db(db_path: impl AsRef<Path>) -> bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The handle used to update the logs at runtime. Must be accessible from the `main.rs` and the `route/logs.rs`.
|
|
||||||
pub type LogRouteHandle =
|
|
||||||
tracing_subscriber::reload::Handle<LogRouteType, tracing_subscriber::Registry>;
|
|
||||||
|
|
||||||
pub type LogRouteType = tracing_subscriber::filter::Filtered<
|
|
||||||
Option<Box<dyn tracing_subscriber::Layer<tracing_subscriber::Registry> + Send + Sync>>,
|
|
||||||
Targets,
|
|
||||||
tracing_subscriber::Registry,
|
|
||||||
>;
|
|
||||||
|
|
||||||
pub type SubscriberForSecondLayer = tracing_subscriber::layer::Layered<
|
|
||||||
tracing_subscriber::reload::Layer<LogRouteType, tracing_subscriber::Registry>,
|
|
||||||
tracing_subscriber::Registry,
|
|
||||||
>;
|
|
||||||
|
|
||||||
pub type LogStderrHandle =
|
|
||||||
tracing_subscriber::reload::Handle<LogStderrType, SubscriberForSecondLayer>;
|
|
||||||
|
|
||||||
pub type LogStderrType = tracing_subscriber::filter::Filtered<
|
|
||||||
Box<dyn tracing_subscriber::Layer<SubscriberForSecondLayer> + Send + Sync>,
|
|
||||||
Targets,
|
|
||||||
SubscriberForSecondLayer,
|
|
||||||
>;
|
|
||||||
|
|
||||||
pub fn create_app(
|
pub fn create_app(
|
||||||
index_scheduler: Data<IndexScheduler>,
|
index_scheduler: Data<IndexScheduler>,
|
||||||
auth_controller: Data<AuthController>,
|
auth_controller: Data<AuthController>,
|
||||||
opt: Opt,
|
opt: Opt,
|
||||||
logs: (LogRouteHandle, LogStderrHandle),
|
|
||||||
analytics: Arc<dyn Analytics>,
|
analytics: Arc<dyn Analytics>,
|
||||||
enable_dashboard: bool,
|
enable_dashboard: bool,
|
||||||
) -> actix_web::App<
|
) -> actix_web::App<
|
||||||
@@ -134,7 +108,6 @@ pub fn create_app(
|
|||||||
index_scheduler.clone(),
|
index_scheduler.clone(),
|
||||||
auth_controller.clone(),
|
auth_controller.clone(),
|
||||||
&opt,
|
&opt,
|
||||||
logs,
|
|
||||||
analytics.clone(),
|
analytics.clone(),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
@@ -150,49 +123,11 @@ pub fn create_app(
|
|||||||
.allow_any_method()
|
.allow_any_method()
|
||||||
.max_age(86_400), // 24h
|
.max_age(86_400), // 24h
|
||||||
)
|
)
|
||||||
.wrap(tracing_actix_web::TracingLogger::<AwebTracingLogger>::new())
|
.wrap(actix_web::middleware::Logger::default())
|
||||||
.wrap(actix_web::middleware::Compress::default())
|
.wrap(actix_web::middleware::Compress::default())
|
||||||
.wrap(actix_web::middleware::NormalizePath::new(actix_web::middleware::TrailingSlash::Trim))
|
.wrap(actix_web::middleware::NormalizePath::new(actix_web::middleware::TrailingSlash::Trim))
|
||||||
}
|
}
|
||||||
|
|
||||||
struct AwebTracingLogger;
|
|
||||||
|
|
||||||
impl tracing_actix_web::RootSpanBuilder for AwebTracingLogger {
|
|
||||||
fn on_request_start(request: &actix_web::dev::ServiceRequest) -> tracing::Span {
|
|
||||||
use tracing::field::Empty;
|
|
||||||
|
|
||||||
let conn_info = request.connection_info();
|
|
||||||
let headers = request.headers();
|
|
||||||
let user_agent = headers
|
|
||||||
.get(http::header::USER_AGENT)
|
|
||||||
.map(|value| String::from_utf8_lossy(value.as_bytes()).into_owned())
|
|
||||||
.unwrap_or_default();
|
|
||||||
info_span!("HTTP request", method = %request.method(), host = conn_info.host(), route = %request.path(), query_parameters = %request.query_string(), %user_agent, status_code = Empty, error = Empty)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn on_request_end<B: MessageBody>(
|
|
||||||
span: tracing::Span,
|
|
||||||
outcome: &Result<ServiceResponse<B>, actix_web::Error>,
|
|
||||||
) {
|
|
||||||
match &outcome {
|
|
||||||
Ok(response) => {
|
|
||||||
let code: i32 = response.response().status().as_u16().into();
|
|
||||||
span.record("status_code", code);
|
|
||||||
|
|
||||||
if let Some(error) = response.response().error() {
|
|
||||||
// use the status code already constructed for the outgoing HTTP response
|
|
||||||
span.record("error", &tracing::field::display(error.as_response_error()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(error) => {
|
|
||||||
let code: i32 = error.error_response().status().as_u16().into();
|
|
||||||
span.record("status_code", code);
|
|
||||||
span.record("error", &tracing::field::display(error.as_response_error()));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
enum OnFailure {
|
enum OnFailure {
|
||||||
RemoveDb,
|
RemoveDb,
|
||||||
KeepDb,
|
KeepDb,
|
||||||
@@ -265,9 +200,7 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(Arc<IndexScheduler>, Arc<
|
|||||||
.name(String::from("register-snapshot-tasks"))
|
.name(String::from("register-snapshot-tasks"))
|
||||||
.spawn(move || loop {
|
.spawn(move || loop {
|
||||||
thread::sleep(snapshot_delay);
|
thread::sleep(snapshot_delay);
|
||||||
if let Err(e) =
|
if let Err(e) = index_scheduler.register(KindWithContent::SnapshotCreation) {
|
||||||
index_scheduler.register(KindWithContent::SnapshotCreation, None, false)
|
|
||||||
{
|
|
||||||
error!("Error while registering snapshot: {}", e);
|
error!("Error while registering snapshot: {}", e);
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
@@ -295,14 +228,11 @@ fn open_or_create_database_unchecked(
|
|||||||
indexes_path: opt.db_path.join("indexes"),
|
indexes_path: opt.db_path.join("indexes"),
|
||||||
snapshots_path: opt.snapshot_dir.clone(),
|
snapshots_path: opt.snapshot_dir.clone(),
|
||||||
dumps_path: opt.dump_dir.clone(),
|
dumps_path: opt.dump_dir.clone(),
|
||||||
webhook_url: opt.task_webhook_url.as_ref().map(|url| url.to_string()),
|
|
||||||
webhook_authorization_header: opt.task_webhook_authorization_header.clone(),
|
|
||||||
task_db_size: opt.max_task_db_size.get_bytes() as usize,
|
task_db_size: opt.max_task_db_size.get_bytes() as usize,
|
||||||
index_base_map_size: opt.max_index_size.get_bytes() as usize,
|
index_base_map_size: opt.max_index_size.get_bytes() as usize,
|
||||||
enable_mdb_writemap: opt.experimental_reduce_indexing_memory_usage,
|
enable_mdb_writemap: opt.experimental_reduce_indexing_memory_usage,
|
||||||
indexer_config: (&opt.indexer_options).try_into()?,
|
indexer_config: (&opt.indexer_options).try_into()?,
|
||||||
autobatching_enabled: true,
|
autobatching_enabled: true,
|
||||||
cleanup_enabled: !opt.experimental_replication_parameters,
|
|
||||||
max_number_of_tasks: 1_000_000,
|
max_number_of_tasks: 1_000_000,
|
||||||
max_number_of_batched_tasks: opt.experimental_max_number_of_batched_tasks,
|
max_number_of_batched_tasks: opt.experimental_max_number_of_batched_tasks,
|
||||||
index_growth_amount: byte_unit::Byte::from_str("10GiB").unwrap().get_bytes() as usize,
|
index_growth_amount: byte_unit::Byte::from_str("10GiB").unwrap().get_bytes() as usize,
|
||||||
@@ -348,15 +278,15 @@ fn import_dump(
|
|||||||
let mut dump_reader = dump::DumpReader::open(reader)?;
|
let mut dump_reader = dump::DumpReader::open(reader)?;
|
||||||
|
|
||||||
if let Some(date) = dump_reader.date() {
|
if let Some(date) = dump_reader.date() {
|
||||||
tracing::info!(
|
log::info!(
|
||||||
version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
"Importing a dump of meilisearch `{:?}` from the {}",
|
||||||
%date,
|
dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
||||||
"Importing a dump of meilisearch"
|
date
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
tracing::info!(
|
log::info!(
|
||||||
version = ?dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
"Importing a dump of meilisearch `{:?}`",
|
||||||
"Importing a dump of meilisearch",
|
dump_reader.version(), // TODO: get the meilisearch version instead of the dump version
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -390,7 +320,7 @@ fn import_dump(
|
|||||||
for index_reader in dump_reader.indexes()? {
|
for index_reader in dump_reader.indexes()? {
|
||||||
let mut index_reader = index_reader?;
|
let mut index_reader = index_reader?;
|
||||||
let metadata = index_reader.metadata();
|
let metadata = index_reader.metadata();
|
||||||
tracing::info!("Importing index `{}`.", metadata.uid);
|
log::info!("Importing index `{}`.", metadata.uid);
|
||||||
|
|
||||||
let date = Some((metadata.created_at, metadata.updated_at));
|
let date = Some((metadata.created_at, metadata.updated_at));
|
||||||
let index = index_scheduler.create_raw_index(&metadata.uid, date)?;
|
let index = index_scheduler.create_raw_index(&metadata.uid, date)?;
|
||||||
@@ -404,15 +334,14 @@ fn import_dump(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 4.2 Import the settings.
|
// 4.2 Import the settings.
|
||||||
tracing::info!("Importing the settings.");
|
log::info!("Importing the settings.");
|
||||||
let settings = index_reader.settings()?;
|
let settings = index_reader.settings()?;
|
||||||
apply_settings_to_builder(&settings, &mut builder);
|
apply_settings_to_builder(&settings, &mut builder);
|
||||||
builder
|
builder.execute(|indexing_step| log::debug!("update: {:?}", indexing_step), || false)?;
|
||||||
.execute(|indexing_step| tracing::debug!("update: {:?}", indexing_step), || false)?;
|
|
||||||
|
|
||||||
// 4.3 Import the documents.
|
// 4.3 Import the documents.
|
||||||
// 4.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
// 4.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
||||||
tracing::info!("Importing the documents.");
|
log::info!("Importing the documents.");
|
||||||
let file = tempfile::tempfile()?;
|
let file = tempfile::tempfile()?;
|
||||||
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
|
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
|
||||||
for document in index_reader.documents()? {
|
for document in index_reader.documents()? {
|
||||||
@@ -426,9 +355,6 @@ fn import_dump(
|
|||||||
let reader = BufReader::new(file);
|
let reader = BufReader::new(file);
|
||||||
let reader = DocumentsBatchReader::from_reader(reader)?;
|
let reader = DocumentsBatchReader::from_reader(reader)?;
|
||||||
|
|
||||||
let embedder_configs = index.embedding_configs(&wtxn)?;
|
|
||||||
let embedders = index_scheduler.embedders(embedder_configs)?;
|
|
||||||
|
|
||||||
let builder = milli::update::IndexDocuments::new(
|
let builder = milli::update::IndexDocuments::new(
|
||||||
&mut wtxn,
|
&mut wtxn,
|
||||||
&index,
|
&index,
|
||||||
@@ -437,18 +363,15 @@ fn import_dump(
|
|||||||
update_method: IndexDocumentsMethod::ReplaceDocuments,
|
update_method: IndexDocumentsMethod::ReplaceDocuments,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
|indexing_step| tracing::trace!("update: {:?}", indexing_step),
|
|indexing_step| log::trace!("update: {:?}", indexing_step),
|
||||||
|| false,
|
|| false,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let builder = builder.with_embedders(embedders);
|
|
||||||
|
|
||||||
let (builder, user_result) = builder.add_documents(reader)?;
|
let (builder, user_result) = builder.add_documents(reader)?;
|
||||||
let user_result = user_result?;
|
log::info!("{} documents found.", user_result?);
|
||||||
tracing::info!(documents_found = user_result, "{} documents found.", user_result);
|
|
||||||
builder.execute()?;
|
builder.execute()?;
|
||||||
wtxn.commit()?;
|
wtxn.commit()?;
|
||||||
tracing::info!("All documents successfully imported.");
|
log::info!("All documents successfully imported.");
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut index_scheduler_dump = index_scheduler.register_dumped_task()?;
|
let mut index_scheduler_dump = index_scheduler.register_dumped_task()?;
|
||||||
@@ -466,7 +389,6 @@ pub fn configure_data(
|
|||||||
index_scheduler: Data<IndexScheduler>,
|
index_scheduler: Data<IndexScheduler>,
|
||||||
auth: Data<AuthController>,
|
auth: Data<AuthController>,
|
||||||
opt: &Opt,
|
opt: &Opt,
|
||||||
(logs_route, logs_stderr): (LogRouteHandle, LogStderrHandle),
|
|
||||||
analytics: Arc<dyn Analytics>,
|
analytics: Arc<dyn Analytics>,
|
||||||
) {
|
) {
|
||||||
let http_payload_size_limit = opt.http_payload_size_limit.get_bytes() as usize;
|
let http_payload_size_limit = opt.http_payload_size_limit.get_bytes() as usize;
|
||||||
@@ -474,9 +396,6 @@ pub fn configure_data(
|
|||||||
.app_data(index_scheduler)
|
.app_data(index_scheduler)
|
||||||
.app_data(auth)
|
.app_data(auth)
|
||||||
.app_data(web::Data::from(analytics))
|
.app_data(web::Data::from(analytics))
|
||||||
.app_data(web::Data::new(logs_route))
|
|
||||||
.app_data(web::Data::new(logs_stderr))
|
|
||||||
.app_data(web::Data::new(opt.clone()))
|
|
||||||
.app_data(
|
.app_data(
|
||||||
web::JsonConfig::default()
|
web::JsonConfig::default()
|
||||||
.limit(http_payload_size_limit)
|
.limit(http_payload_size_limit)
|
||||||
@@ -536,3 +455,30 @@ pub fn dashboard(config: &mut web::ServiceConfig, enable_frontend: bool) {
|
|||||||
pub fn dashboard(config: &mut web::ServiceConfig, _enable_frontend: bool) {
|
pub fn dashboard(config: &mut web::ServiceConfig, _enable_frontend: bool) {
|
||||||
config.service(web::resource("/").route(web::get().to(routes::running)));
|
config.service(web::resource("/").route(web::get().to(routes::running)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Parses the output of
|
||||||
|
/// [`VERGEN_GIT_SEMVER_LIGHTWEIGHT`](https://docs.rs/vergen/latest/vergen/struct.Git.html#instructions)
|
||||||
|
/// as a prototype name.
|
||||||
|
///
|
||||||
|
/// Returns `Some(prototype_name)` if the following conditions are met on this value:
|
||||||
|
///
|
||||||
|
/// 1. starts with `prototype-`,
|
||||||
|
/// 2. ends with `-<some_number>`,
|
||||||
|
/// 3. does not end with `<some_number>-<some_number>`.
|
||||||
|
///
|
||||||
|
/// Otherwise, returns `None`.
|
||||||
|
pub fn prototype_name() -> Option<&'static str> {
|
||||||
|
let prototype: &'static str = option_env!("VERGEN_GIT_SEMVER_LIGHTWEIGHT")?;
|
||||||
|
|
||||||
|
if !prototype.starts_with("prototype-") {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut rsplit_prototype = prototype.rsplit('-');
|
||||||
|
// last component MUST be a number
|
||||||
|
rsplit_prototype.next()?.parse::<u64>().ok()?;
|
||||||
|
// before than last component SHALL NOT be a number
|
||||||
|
rsplit_prototype.next()?.parse::<u64>().err()?;
|
||||||
|
|
||||||
|
Some(prototype)
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
use std::env;
|
use std::env;
|
||||||
use std::io::{stderr, LineWriter, Write};
|
use std::io::{stderr, Write};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::str::FromStr;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use actix_web::http::KeepAlive;
|
use actix_web::http::KeepAlive;
|
||||||
@@ -10,78 +9,37 @@ use actix_web::HttpServer;
|
|||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use is_terminal::IsTerminal;
|
use is_terminal::IsTerminal;
|
||||||
use meilisearch::analytics::Analytics;
|
use meilisearch::analytics::Analytics;
|
||||||
use meilisearch::option::LogMode;
|
use meilisearch::{analytics, create_app, prototype_name, setup_meilisearch, Opt};
|
||||||
use meilisearch::{
|
|
||||||
analytics, create_app, setup_meilisearch, LogRouteHandle, LogRouteType, LogStderrHandle,
|
|
||||||
LogStderrType, Opt, SubscriberForSecondLayer,
|
|
||||||
};
|
|
||||||
use meilisearch_auth::{generate_master_key, AuthController, MASTER_KEY_MIN_SIZE};
|
use meilisearch_auth::{generate_master_key, AuthController, MASTER_KEY_MIN_SIZE};
|
||||||
use mimalloc::MiMalloc;
|
|
||||||
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
|
use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};
|
||||||
use tracing::level_filters::LevelFilter;
|
|
||||||
use tracing_subscriber::layer::SubscriberExt as _;
|
|
||||||
use tracing_subscriber::Layer;
|
|
||||||
|
|
||||||
#[global_allocator]
|
#[global_allocator]
|
||||||
static ALLOC: MiMalloc = MiMalloc;
|
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||||
|
|
||||||
fn default_log_route_layer() -> LogRouteType {
|
|
||||||
None.with_filter(tracing_subscriber::filter::Targets::new().with_target("", LevelFilter::OFF))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn default_log_stderr_layer(opt: &Opt) -> LogStderrType {
|
|
||||||
let layer = tracing_subscriber::fmt::layer()
|
|
||||||
.with_writer(|| LineWriter::new(std::io::stderr()))
|
|
||||||
.with_span_events(tracing_subscriber::fmt::format::FmtSpan::CLOSE);
|
|
||||||
|
|
||||||
let layer = match opt.experimental_logs_mode {
|
|
||||||
LogMode::Human => Box::new(layer)
|
|
||||||
as Box<dyn tracing_subscriber::Layer<SubscriberForSecondLayer> + Send + Sync>,
|
|
||||||
LogMode::Json => Box::new(layer.json())
|
|
||||||
as Box<dyn tracing_subscriber::Layer<SubscriberForSecondLayer> + Send + Sync>,
|
|
||||||
};
|
|
||||||
|
|
||||||
layer.with_filter(
|
|
||||||
tracing_subscriber::filter::Targets::new()
|
|
||||||
.with_target("", LevelFilter::from_str(&opt.log_level.to_string()).unwrap()),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// does all the setup before meilisearch is launched
|
/// does all the setup before meilisearch is launched
|
||||||
fn setup(opt: &Opt) -> anyhow::Result<(LogRouteHandle, LogStderrHandle)> {
|
fn setup(opt: &Opt) -> anyhow::Result<()> {
|
||||||
let (route_layer, route_layer_handle) =
|
let mut log_builder = env_logger::Builder::new();
|
||||||
tracing_subscriber::reload::Layer::new(default_log_route_layer());
|
let log_filters = format!(
|
||||||
let route_layer: tracing_subscriber::reload::Layer<_, _> = route_layer;
|
"{},h2=warn,hyper=warn,tokio_util=warn,tracing=warn,rustls=warn,mio=warn,reqwest=warn",
|
||||||
|
opt.log_level
|
||||||
|
);
|
||||||
|
log_builder.parse_filters(&log_filters);
|
||||||
|
|
||||||
let (stderr_layer, stderr_layer_handle) =
|
log_builder.init();
|
||||||
tracing_subscriber::reload::Layer::new(default_log_stderr_layer(opt));
|
|
||||||
let route_layer: tracing_subscriber::reload::Layer<_, _> = route_layer;
|
|
||||||
|
|
||||||
let subscriber = tracing_subscriber::registry().with(route_layer).with(stderr_layer);
|
Ok(())
|
||||||
|
|
||||||
// set the subscriber as the default for the application
|
|
||||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
|
||||||
|
|
||||||
Ok((route_layer_handle, stderr_layer_handle))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn on_panic(info: &std::panic::PanicInfo) {
|
|
||||||
let info = info.to_string().replace('\n', " ");
|
|
||||||
tracing::error!(%info);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_web::main]
|
#[actix_web::main]
|
||||||
async fn main() -> anyhow::Result<()> {
|
async fn main() -> anyhow::Result<()> {
|
||||||
let (opt, config_read_from) = Opt::try_build()?;
|
let (opt, config_read_from) = Opt::try_build()?;
|
||||||
|
|
||||||
std::panic::set_hook(Box::new(on_panic));
|
|
||||||
|
|
||||||
anyhow::ensure!(
|
anyhow::ensure!(
|
||||||
!(cfg!(windows) && opt.experimental_reduce_indexing_memory_usage),
|
!(cfg!(windows) && opt.experimental_reduce_indexing_memory_usage),
|
||||||
"The `experimental-reduce-indexing-memory-usage` flag is not supported on Windows"
|
"The `experimental-reduce-indexing-memory-usage` flag is not supported on Windows"
|
||||||
);
|
);
|
||||||
|
|
||||||
let log_handle = setup(&opt)?;
|
setup(&opt)?;
|
||||||
|
|
||||||
match (opt.env.as_ref(), &opt.master_key) {
|
match (opt.env.as_ref(), &opt.master_key) {
|
||||||
("production", Some(master_key)) if master_key.len() < MASTER_KEY_MIN_SIZE => {
|
("production", Some(master_key)) if master_key.len() < MASTER_KEY_MIN_SIZE => {
|
||||||
@@ -119,7 +77,7 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
|
|
||||||
print_launch_resume(&opt, analytics.clone(), config_read_from);
|
print_launch_resume(&opt, analytics.clone(), config_read_from);
|
||||||
|
|
||||||
run_http(index_scheduler, auth_controller, opt, log_handle, analytics).await?;
|
run_http(index_scheduler, auth_controller, opt, analytics).await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -128,7 +86,6 @@ async fn run_http(
|
|||||||
index_scheduler: Arc<IndexScheduler>,
|
index_scheduler: Arc<IndexScheduler>,
|
||||||
auth_controller: Arc<AuthController>,
|
auth_controller: Arc<AuthController>,
|
||||||
opt: Opt,
|
opt: Opt,
|
||||||
logs: (LogRouteHandle, LogStderrHandle),
|
|
||||||
analytics: Arc<dyn Analytics>,
|
analytics: Arc<dyn Analytics>,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
let enable_dashboard = &opt.env == "development";
|
let enable_dashboard = &opt.env == "development";
|
||||||
@@ -141,7 +98,6 @@ async fn run_http(
|
|||||||
index_scheduler.clone(),
|
index_scheduler.clone(),
|
||||||
auth_controller.clone(),
|
auth_controller.clone(),
|
||||||
opt.clone(),
|
opt.clone(),
|
||||||
logs.clone(),
|
|
||||||
analytics.clone(),
|
analytics.clone(),
|
||||||
enable_dashboard,
|
enable_dashboard,
|
||||||
)
|
)
|
||||||
@@ -163,8 +119,8 @@ pub fn print_launch_resume(
|
|||||||
analytics: Arc<dyn Analytics>,
|
analytics: Arc<dyn Analytics>,
|
||||||
config_read_from: Option<PathBuf>,
|
config_read_from: Option<PathBuf>,
|
||||||
) {
|
) {
|
||||||
let build_info = build_info::BuildInfo::from_build();
|
let commit_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown");
|
||||||
|
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
|
||||||
let protocol =
|
let protocol =
|
||||||
if opt.ssl_cert_path.is_some() && opt.ssl_key_path.is_some() { "https" } else { "http" };
|
if opt.ssl_cert_path.is_some() && opt.ssl_key_path.is_some() { "https" } else { "http" };
|
||||||
let ascii_name = r#"
|
let ascii_name = r#"
|
||||||
@@ -189,18 +145,10 @@ pub fn print_launch_resume(
|
|||||||
eprintln!("Database path:\t\t{:?}", opt.db_path);
|
eprintln!("Database path:\t\t{:?}", opt.db_path);
|
||||||
eprintln!("Server listening on:\t\"{}://{}\"", protocol, opt.http_addr);
|
eprintln!("Server listening on:\t\"{}://{}\"", protocol, opt.http_addr);
|
||||||
eprintln!("Environment:\t\t{:?}", opt.env);
|
eprintln!("Environment:\t\t{:?}", opt.env);
|
||||||
eprintln!("Commit SHA:\t\t{:?}", build_info.commit_sha1.unwrap_or("unknown"));
|
eprintln!("Commit SHA:\t\t{:?}", commit_sha.to_string());
|
||||||
eprintln!(
|
eprintln!("Commit date:\t\t{:?}", commit_date.to_string());
|
||||||
"Commit date:\t\t{:?}",
|
|
||||||
build_info
|
|
||||||
.commit_timestamp
|
|
||||||
.and_then(|commit_timestamp| commit_timestamp
|
|
||||||
.format(&time::format_description::well_known::Rfc3339)
|
|
||||||
.ok())
|
|
||||||
.unwrap_or("unknown".into())
|
|
||||||
);
|
|
||||||
eprintln!("Package version:\t{:?}", env!("CARGO_PKG_VERSION").to_string());
|
eprintln!("Package version:\t{:?}", env!("CARGO_PKG_VERSION").to_string());
|
||||||
if let Some(prototype) = build_info.describe.and_then(|describe| describe.as_prototype()) {
|
if let Some(prototype) = prototype_name() {
|
||||||
eprintln!("Prototype:\t\t{:?}", prototype);
|
eprintln!("Prototype:\t\t{:?}", prototype);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
use std::convert::TryFrom;
|
||||||
use std::env::VarError;
|
use std::env::VarError;
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
@@ -19,8 +20,7 @@ use rustls::server::{
|
|||||||
use rustls::RootCertStore;
|
use rustls::RootCertStore;
|
||||||
use rustls_pemfile::{certs, pkcs8_private_keys, rsa_private_keys};
|
use rustls_pemfile::{certs, pkcs8_private_keys, rsa_private_keys};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sysinfo::{MemoryRefreshKind, RefreshKind, System};
|
use sysinfo::{RefreshKind, System, SystemExt};
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
const POSSIBLE_ENV: [&str; 2] = ["development", "production"];
|
const POSSIBLE_ENV: [&str; 2] = ["development", "production"];
|
||||||
|
|
||||||
@@ -28,8 +28,6 @@ const MEILI_DB_PATH: &str = "MEILI_DB_PATH";
|
|||||||
const MEILI_HTTP_ADDR: &str = "MEILI_HTTP_ADDR";
|
const MEILI_HTTP_ADDR: &str = "MEILI_HTTP_ADDR";
|
||||||
const MEILI_MASTER_KEY: &str = "MEILI_MASTER_KEY";
|
const MEILI_MASTER_KEY: &str = "MEILI_MASTER_KEY";
|
||||||
const MEILI_ENV: &str = "MEILI_ENV";
|
const MEILI_ENV: &str = "MEILI_ENV";
|
||||||
const MEILI_TASK_WEBHOOK_URL: &str = "MEILI_TASK_WEBHOOK_URL";
|
|
||||||
const MEILI_TASK_WEBHOOK_AUTHORIZATION_HEADER: &str = "MEILI_TASK_WEBHOOK_AUTHORIZATION_HEADER";
|
|
||||||
#[cfg(feature = "analytics")]
|
#[cfg(feature = "analytics")]
|
||||||
const MEILI_NO_ANALYTICS: &str = "MEILI_NO_ANALYTICS";
|
const MEILI_NO_ANALYTICS: &str = "MEILI_NO_ANALYTICS";
|
||||||
const MEILI_HTTP_PAYLOAD_SIZE_LIMIT: &str = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT";
|
const MEILI_HTTP_PAYLOAD_SIZE_LIMIT: &str = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT";
|
||||||
@@ -50,9 +48,6 @@ const MEILI_IGNORE_MISSING_DUMP: &str = "MEILI_IGNORE_MISSING_DUMP";
|
|||||||
const MEILI_IGNORE_DUMP_IF_DB_EXISTS: &str = "MEILI_IGNORE_DUMP_IF_DB_EXISTS";
|
const MEILI_IGNORE_DUMP_IF_DB_EXISTS: &str = "MEILI_IGNORE_DUMP_IF_DB_EXISTS";
|
||||||
const MEILI_DUMP_DIR: &str = "MEILI_DUMP_DIR";
|
const MEILI_DUMP_DIR: &str = "MEILI_DUMP_DIR";
|
||||||
const MEILI_LOG_LEVEL: &str = "MEILI_LOG_LEVEL";
|
const MEILI_LOG_LEVEL: &str = "MEILI_LOG_LEVEL";
|
||||||
const MEILI_EXPERIMENTAL_LOGS_MODE: &str = "MEILI_EXPERIMENTAL_LOGS_MODE";
|
|
||||||
const MEILI_EXPERIMENTAL_REPLICATION_PARAMETERS: &str = "MEILI_EXPERIMENTAL_REPLICATION_PARAMETERS";
|
|
||||||
const MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE: &str = "MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE";
|
|
||||||
const MEILI_EXPERIMENTAL_ENABLE_METRICS: &str = "MEILI_EXPERIMENTAL_ENABLE_METRICS";
|
const MEILI_EXPERIMENTAL_ENABLE_METRICS: &str = "MEILI_EXPERIMENTAL_ENABLE_METRICS";
|
||||||
const MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE: &str =
|
const MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE: &str =
|
||||||
"MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE";
|
"MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE";
|
||||||
@@ -80,39 +75,6 @@ const DEFAULT_LOG_EVERY_N: usize = 100_000;
|
|||||||
pub const INDEX_SIZE: u64 = 2 * 1024 * 1024 * 1024 * 1024; // 2 TiB
|
pub const INDEX_SIZE: u64 = 2 * 1024 * 1024 * 1024 * 1024; // 2 TiB
|
||||||
pub const TASK_DB_SIZE: u64 = 20 * 1024 * 1024 * 1024; // 20 GiB
|
pub const TASK_DB_SIZE: u64 = 20 * 1024 * 1024 * 1024; // 20 GiB
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Copy, Serialize, Deserialize)]
|
|
||||||
#[serde(rename_all = "UPPERCASE")]
|
|
||||||
pub enum LogMode {
|
|
||||||
#[default]
|
|
||||||
Human,
|
|
||||||
Json,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Display for LogMode {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
LogMode::Human => Display::fmt("HUMAN", f),
|
|
||||||
LogMode::Json => Display::fmt("JSON", f),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for LogMode {
|
|
||||||
type Err = LogModeError;
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
match s.trim().to_lowercase().as_str() {
|
|
||||||
"human" => Ok(LogMode::Human),
|
|
||||||
"json" => Ok(LogMode::Json),
|
|
||||||
_ => Err(LogModeError(s.to_owned())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
|
||||||
#[error("Unsupported log mode level `{0}`. Supported values are `HUMAN` and `JSON`.")]
|
|
||||||
pub struct LogModeError(String);
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Copy, Serialize, Deserialize)]
|
#[derive(Debug, Default, Clone, Copy, Serialize, Deserialize)]
|
||||||
#[serde(rename_all = "UPPERCASE")]
|
#[serde(rename_all = "UPPERCASE")]
|
||||||
pub enum LogLevel {
|
pub enum LogLevel {
|
||||||
@@ -194,14 +156,6 @@ pub struct Opt {
|
|||||||
#[serde(default = "default_env")]
|
#[serde(default = "default_env")]
|
||||||
pub env: String,
|
pub env: String,
|
||||||
|
|
||||||
/// Called whenever a task finishes so a third party can be notified.
|
|
||||||
#[clap(long, env = MEILI_TASK_WEBHOOK_URL)]
|
|
||||||
pub task_webhook_url: Option<Url>,
|
|
||||||
|
|
||||||
/// The Authorization header to send on the webhook URL whenever a task finishes so a third party can be notified.
|
|
||||||
#[clap(long, env = MEILI_TASK_WEBHOOK_AUTHORIZATION_HEADER)]
|
|
||||||
pub task_webhook_authorization_header: Option<String>,
|
|
||||||
|
|
||||||
/// Deactivates Meilisearch's built-in telemetry when provided.
|
/// Deactivates Meilisearch's built-in telemetry when provided.
|
||||||
///
|
///
|
||||||
/// Meilisearch automatically collects data from all instances that do not opt out using this flag.
|
/// Meilisearch automatically collects data from all instances that do not opt out using this flag.
|
||||||
@@ -344,30 +298,6 @@ pub struct Opt {
|
|||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
pub experimental_enable_metrics: bool,
|
pub experimental_enable_metrics: bool,
|
||||||
|
|
||||||
/// Experimental logs mode feature. For more information, see: <https://github.com/orgs/meilisearch/discussions/723>
|
|
||||||
///
|
|
||||||
/// Change the mode of the logs on the console.
|
|
||||||
#[clap(long, env = MEILI_EXPERIMENTAL_LOGS_MODE, default_value_t)]
|
|
||||||
#[serde(default)]
|
|
||||||
pub experimental_logs_mode: LogMode,
|
|
||||||
|
|
||||||
/// Experimental logs route feature. For more information, see: <https://github.com/orgs/meilisearch/discussions/721>
|
|
||||||
///
|
|
||||||
/// Enables the log routes on the `POST /logs/stream`, `POST /logs/stderr` endpoints, and the `DELETE /logs/stream` to stop receiving logs.
|
|
||||||
#[clap(long, env = MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE)]
|
|
||||||
#[serde(default)]
|
|
||||||
pub experimental_enable_logs_route: bool,
|
|
||||||
|
|
||||||
/// Enable multiple features that helps you to run meilisearch in a replicated context.
|
|
||||||
/// For more information, see: <https://github.com/orgs/meilisearch/discussions/725>
|
|
||||||
///
|
|
||||||
/// - /!\ Disable the automatic clean up of old processed tasks, you're in charge of that now
|
|
||||||
/// - Lets you specify a custom task ID upon registering a task
|
|
||||||
/// - Lets you execute dry-register a task (get an answer from the route but nothing is actually registered in meilisearch and it won't be processed)
|
|
||||||
#[clap(long, env = MEILI_EXPERIMENTAL_REPLICATION_PARAMETERS)]
|
|
||||||
#[serde(default)]
|
|
||||||
pub experimental_replication_parameters: bool,
|
|
||||||
|
|
||||||
/// Experimental RAM reduction during indexing, do not use in production, see: <https://github.com/meilisearch/product/discussions/652>
|
/// Experimental RAM reduction during indexing, do not use in production, see: <https://github.com/meilisearch/product/discussions/652>
|
||||||
#[clap(long, env = MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE)]
|
#[clap(long, env = MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE)]
|
||||||
#[serde(default)]
|
#[serde(default)]
|
||||||
@@ -445,8 +375,6 @@ impl Opt {
|
|||||||
http_addr,
|
http_addr,
|
||||||
master_key,
|
master_key,
|
||||||
env,
|
env,
|
||||||
task_webhook_url,
|
|
||||||
task_webhook_authorization_header,
|
|
||||||
max_index_size: _,
|
max_index_size: _,
|
||||||
max_task_db_size: _,
|
max_task_db_size: _,
|
||||||
http_payload_size_limit,
|
http_payload_size_limit,
|
||||||
@@ -473,9 +401,6 @@ impl Opt {
|
|||||||
#[cfg(feature = "analytics")]
|
#[cfg(feature = "analytics")]
|
||||||
no_analytics,
|
no_analytics,
|
||||||
experimental_enable_metrics,
|
experimental_enable_metrics,
|
||||||
experimental_logs_mode,
|
|
||||||
experimental_enable_logs_route,
|
|
||||||
experimental_replication_parameters,
|
|
||||||
experimental_reduce_indexing_memory_usage,
|
experimental_reduce_indexing_memory_usage,
|
||||||
} = self;
|
} = self;
|
||||||
export_to_env_if_not_present(MEILI_DB_PATH, db_path);
|
export_to_env_if_not_present(MEILI_DB_PATH, db_path);
|
||||||
@@ -484,16 +409,6 @@ impl Opt {
|
|||||||
export_to_env_if_not_present(MEILI_MASTER_KEY, master_key);
|
export_to_env_if_not_present(MEILI_MASTER_KEY, master_key);
|
||||||
}
|
}
|
||||||
export_to_env_if_not_present(MEILI_ENV, env);
|
export_to_env_if_not_present(MEILI_ENV, env);
|
||||||
if let Some(task_webhook_url) = task_webhook_url {
|
|
||||||
export_to_env_if_not_present(MEILI_TASK_WEBHOOK_URL, task_webhook_url.to_string());
|
|
||||||
}
|
|
||||||
if let Some(task_webhook_authorization_header) = task_webhook_authorization_header {
|
|
||||||
export_to_env_if_not_present(
|
|
||||||
MEILI_TASK_WEBHOOK_AUTHORIZATION_HEADER,
|
|
||||||
task_webhook_authorization_header,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "analytics")]
|
#[cfg(feature = "analytics")]
|
||||||
{
|
{
|
||||||
export_to_env_if_not_present(MEILI_NO_ANALYTICS, no_analytics.to_string());
|
export_to_env_if_not_present(MEILI_NO_ANALYTICS, no_analytics.to_string());
|
||||||
@@ -532,18 +447,6 @@ impl Opt {
|
|||||||
MEILI_EXPERIMENTAL_ENABLE_METRICS,
|
MEILI_EXPERIMENTAL_ENABLE_METRICS,
|
||||||
experimental_enable_metrics.to_string(),
|
experimental_enable_metrics.to_string(),
|
||||||
);
|
);
|
||||||
export_to_env_if_not_present(
|
|
||||||
MEILI_EXPERIMENTAL_LOGS_MODE,
|
|
||||||
experimental_logs_mode.to_string(),
|
|
||||||
);
|
|
||||||
export_to_env_if_not_present(
|
|
||||||
MEILI_EXPERIMENTAL_REPLICATION_PARAMETERS,
|
|
||||||
experimental_replication_parameters.to_string(),
|
|
||||||
);
|
|
||||||
export_to_env_if_not_present(
|
|
||||||
MEILI_EXPERIMENTAL_ENABLE_LOGS_ROUTE,
|
|
||||||
experimental_enable_logs_route.to_string(),
|
|
||||||
);
|
|
||||||
export_to_env_if_not_present(
|
export_to_env_if_not_present(
|
||||||
MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE,
|
MEILI_EXPERIMENTAL_REDUCE_INDEXING_MEMORY_USAGE,
|
||||||
experimental_reduce_indexing_memory_usage.to_string(),
|
experimental_reduce_indexing_memory_usage.to_string(),
|
||||||
@@ -598,10 +501,7 @@ impl Opt {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn to_instance_features(&self) -> InstanceTogglableFeatures {
|
pub(crate) fn to_instance_features(&self) -> InstanceTogglableFeatures {
|
||||||
InstanceTogglableFeatures {
|
InstanceTogglableFeatures { metrics: self.experimental_enable_metrics }
|
||||||
metrics: self.experimental_enable_metrics,
|
|
||||||
logs_route: self.experimental_enable_logs_route,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -710,8 +610,8 @@ impl MaxMemory {
|
|||||||
|
|
||||||
/// Returns the total amount of bytes available or `None` if this system isn't supported.
|
/// Returns the total amount of bytes available or `None` if this system isn't supported.
|
||||||
fn total_memory_bytes() -> Option<u64> {
|
fn total_memory_bytes() -> Option<u64> {
|
||||||
if sysinfo::IS_SUPPORTED_SYSTEM {
|
if System::IS_SUPPORTED {
|
||||||
let memory_kind = RefreshKind::new().with_memory(MemoryRefreshKind::new().with_ram());
|
let memory_kind = RefreshKind::new().with_memory();
|
||||||
let mut system = System::new_with_specifics(memory_kind);
|
let mut system = System::new_with_specifics(memory_kind);
|
||||||
system.refresh_memory();
|
system.refresh_memory();
|
||||||
Some(system.total_memory())
|
Some(system.total_memory())
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ use meilisearch_types::deserr::query_params::Param;
|
|||||||
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
use meilisearch_types::error::{Code, ResponseError};
|
use meilisearch_types::error::{Code, ResponseError};
|
||||||
use meilisearch_types::keys::{CreateApiKey, Key, PatchApiKey};
|
use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|||||||
@@ -1,18 +1,17 @@
|
|||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
|
use log::debug;
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use tracing::debug;
|
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
use crate::extractors::authentication::GuardedData;
|
use crate::extractors::authentication::GuardedData;
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::routes::{get_task_id, is_dry_run, SummarizedTaskView};
|
use crate::routes::SummarizedTaskView;
|
||||||
use crate::Opt;
|
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))));
|
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))));
|
||||||
@@ -22,7 +21,6 @@ pub async fn create_dump(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DUMPS_CREATE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DUMPS_CREATE }>, Data<IndexScheduler>>,
|
||||||
auth_controller: GuardedData<ActionPolicy<{ actions::DUMPS_CREATE }>, Data<AuthController>>,
|
auth_controller: GuardedData<ActionPolicy<{ actions::DUMPS_CREATE }>, Data<AuthController>>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
opt: web::Data<Opt>,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
analytics.publish("Dump Created".to_string(), json!({}), Some(&req));
|
analytics.publish("Dump Created".to_string(), json!({}), Some(&req));
|
||||||
@@ -31,13 +29,9 @@ pub async fn create_dump(
|
|||||||
keys: auth_controller.list_keys()?,
|
keys: auth_controller.list_keys()?,
|
||||||
instance_uid: analytics.instance_uid().cloned(),
|
instance_uid: analytics.instance_uid().cloned(),
|
||||||
};
|
};
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
debug!(returns = ?task, "Create dump");
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,11 +3,11 @@ use actix_web::{HttpRequest, HttpResponse};
|
|||||||
use deserr::actix_web::AwebJson;
|
use deserr::actix_web::AwebJson;
|
||||||
use deserr::Deserr;
|
use deserr::Deserr;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
|
use log::debug;
|
||||||
use meilisearch_types::deserr::DeserrJsonError;
|
use meilisearch_types::deserr::DeserrJsonError;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::keys::actions;
|
use meilisearch_types::keys::actions;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use tracing::debug;
|
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::ActionPolicy;
|
use crate::extractors::authentication::policies::ActionPolicy;
|
||||||
@@ -33,21 +33,20 @@ async fn get_features(
|
|||||||
let features = index_scheduler.features();
|
let features = index_scheduler.features();
|
||||||
|
|
||||||
analytics.publish("Experimental features Seen".to_string(), json!(null), Some(&req));
|
analytics.publish("Experimental features Seen".to_string(), json!(null), Some(&req));
|
||||||
let features = features.runtime_features();
|
debug!("returns: {:?}", features.runtime_features());
|
||||||
debug!(returns = ?features, "Get features");
|
HttpResponse::Ok().json(features.runtime_features())
|
||||||
HttpResponse::Ok().json(features)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserr)]
|
#[derive(Debug, Deserr)]
|
||||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct RuntimeTogglableFeatures {
|
pub struct RuntimeTogglableFeatures {
|
||||||
|
#[deserr(default)]
|
||||||
|
pub score_details: Option<bool>,
|
||||||
#[deserr(default)]
|
#[deserr(default)]
|
||||||
pub vector_store: Option<bool>,
|
pub vector_store: Option<bool>,
|
||||||
#[deserr(default)]
|
#[deserr(default)]
|
||||||
pub metrics: Option<bool>,
|
pub metrics: Option<bool>,
|
||||||
#[deserr(default)]
|
#[deserr(default)]
|
||||||
pub logs_route: Option<bool>,
|
|
||||||
#[deserr(default)]
|
|
||||||
pub export_puffin_reports: Option<bool>,
|
pub export_puffin_reports: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -61,13 +60,12 @@ async fn patch_features(
|
|||||||
analytics: Data<dyn Analytics>,
|
analytics: Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let features = index_scheduler.features();
|
let features = index_scheduler.features();
|
||||||
debug!(parameters = ?new_features, "Patch features");
|
|
||||||
|
|
||||||
let old_features = features.runtime_features();
|
let old_features = features.runtime_features();
|
||||||
let new_features = meilisearch_types::features::RuntimeTogglableFeatures {
|
let new_features = meilisearch_types::features::RuntimeTogglableFeatures {
|
||||||
|
score_details: new_features.0.score_details.unwrap_or(old_features.score_details),
|
||||||
vector_store: new_features.0.vector_store.unwrap_or(old_features.vector_store),
|
vector_store: new_features.0.vector_store.unwrap_or(old_features.vector_store),
|
||||||
metrics: new_features.0.metrics.unwrap_or(old_features.metrics),
|
metrics: new_features.0.metrics.unwrap_or(old_features.metrics),
|
||||||
logs_route: new_features.0.logs_route.unwrap_or(old_features.logs_route),
|
|
||||||
export_puffin_reports: new_features
|
export_puffin_reports: new_features
|
||||||
.0
|
.0
|
||||||
.export_puffin_reports
|
.export_puffin_reports
|
||||||
@@ -78,23 +76,22 @@ async fn patch_features(
|
|||||||
// the it renames to camelCase, which we don't want for analytics.
|
// the it renames to camelCase, which we don't want for analytics.
|
||||||
// **Do not** ignore fields with `..` or `_` here, because we want to add them in the future.
|
// **Do not** ignore fields with `..` or `_` here, because we want to add them in the future.
|
||||||
let meilisearch_types::features::RuntimeTogglableFeatures {
|
let meilisearch_types::features::RuntimeTogglableFeatures {
|
||||||
|
score_details,
|
||||||
vector_store,
|
vector_store,
|
||||||
metrics,
|
metrics,
|
||||||
logs_route,
|
|
||||||
export_puffin_reports,
|
export_puffin_reports,
|
||||||
} = new_features;
|
} = new_features;
|
||||||
|
|
||||||
analytics.publish(
|
analytics.publish(
|
||||||
"Experimental features Updated".to_string(),
|
"Experimental features Updated".to_string(),
|
||||||
json!({
|
json!({
|
||||||
|
"score_details": score_details,
|
||||||
"vector_store": vector_store,
|
"vector_store": vector_store,
|
||||||
"metrics": metrics,
|
"metrics": metrics,
|
||||||
"logs_route": logs_route,
|
|
||||||
"export_puffin_reports": export_puffin_reports,
|
"export_puffin_reports": export_puffin_reports,
|
||||||
}),
|
}),
|
||||||
Some(&req),
|
Some(&req),
|
||||||
);
|
);
|
||||||
index_scheduler.put_runtime_features(new_features)?;
|
index_scheduler.put_runtime_features(new_features)?;
|
||||||
debug!(returns = ?new_features, "Patch features");
|
|
||||||
Ok(HttpResponse::Ok().json(new_features))
|
Ok(HttpResponse::Ok().json(new_features))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,8 @@ use bstr::ByteSlice as _;
|
|||||||
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
||||||
use deserr::Deserr;
|
use deserr::Deserr;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use index_scheduler::{IndexScheduler, TaskId};
|
use index_scheduler::IndexScheduler;
|
||||||
|
use log::debug;
|
||||||
use meilisearch_types::deserr::query_params::Param;
|
use meilisearch_types::deserr::query_params::Param;
|
||||||
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||||
use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType};
|
use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType};
|
||||||
@@ -27,7 +28,6 @@ use serde_json::Value;
|
|||||||
use tempfile::tempfile;
|
use tempfile::tempfile;
|
||||||
use tokio::fs::File;
|
use tokio::fs::File;
|
||||||
use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter};
|
use tokio::io::{AsyncSeekExt, AsyncWriteExt, BufWriter};
|
||||||
use tracing::debug;
|
|
||||||
|
|
||||||
use crate::analytics::{Analytics, DocumentDeletionKind, DocumentFetchKind};
|
use crate::analytics::{Analytics, DocumentDeletionKind, DocumentFetchKind};
|
||||||
use crate::error::MeilisearchHttpError;
|
use crate::error::MeilisearchHttpError;
|
||||||
@@ -36,11 +36,8 @@ use crate::extractors::authentication::policies::*;
|
|||||||
use crate::extractors::authentication::GuardedData;
|
use crate::extractors::authentication::GuardedData;
|
||||||
use crate::extractors::payload::Payload;
|
use crate::extractors::payload::Payload;
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::routes::{
|
use crate::routes::{PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
||||||
get_task_id, is_dry_run, PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT,
|
|
||||||
};
|
|
||||||
use crate::search::parse_filter;
|
use crate::search::parse_filter;
|
||||||
use crate::Opt;
|
|
||||||
|
|
||||||
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
||||||
vec!["application/json".to_string(), "application/x-ndjson".to_string(), "text/csv".to_string()]
|
vec!["application/json".to_string(), "application/x-ndjson".to_string(), "text/csv".to_string()]
|
||||||
@@ -104,7 +101,6 @@ pub async fn get_document(
|
|||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let DocumentParam { index_uid, document_id } = document_param.into_inner();
|
let DocumentParam { index_uid, document_id } = document_param.into_inner();
|
||||||
debug!(parameters = ?params, "Get document");
|
|
||||||
let index_uid = IndexUid::try_from(index_uid)?;
|
let index_uid = IndexUid::try_from(index_uid)?;
|
||||||
|
|
||||||
analytics.get_fetch_documents(&DocumentFetchKind::PerDocumentId, &req);
|
analytics.get_fetch_documents(&DocumentFetchKind::PerDocumentId, &req);
|
||||||
@@ -114,7 +110,7 @@ pub async fn get_document(
|
|||||||
|
|
||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let document = retrieve_document(&index, &document_id, attributes_to_retrieve)?;
|
let document = retrieve_document(&index, &document_id, attributes_to_retrieve)?;
|
||||||
debug!(returns = ?document, "Get document");
|
debug!("returns: {:?}", document);
|
||||||
Ok(HttpResponse::Ok().json(document))
|
Ok(HttpResponse::Ok().json(document))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -122,7 +118,6 @@ pub async fn delete_document(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
||||||
path: web::Path<DocumentParam>,
|
path: web::Path<DocumentParam>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
opt: web::Data<Opt>,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let DocumentParam { index_uid, document_id } = path.into_inner();
|
let DocumentParam { index_uid, document_id } = path.into_inner();
|
||||||
@@ -134,12 +129,8 @@ pub async fn delete_document(
|
|||||||
index_uid: index_uid.to_string(),
|
index_uid: index_uid.to_string(),
|
||||||
documents_ids: vec![document_id],
|
documents_ids: vec![document_id],
|
||||||
};
|
};
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
debug!("returns: {:?}", task);
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
@@ -177,8 +168,9 @@ pub async fn documents_by_query_post(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
debug!("called with body: {:?}", body);
|
||||||
|
|
||||||
let body = body.into_inner();
|
let body = body.into_inner();
|
||||||
debug!(parameters = ?body, "Get documents POST");
|
|
||||||
|
|
||||||
analytics.post_fetch_documents(
|
analytics.post_fetch_documents(
|
||||||
&DocumentFetchKind::Normal {
|
&DocumentFetchKind::Normal {
|
||||||
@@ -199,7 +191,7 @@ pub async fn get_documents(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!(parameters = ?params, "Get documents GET");
|
debug!("called with params: {:?}", params);
|
||||||
|
|
||||||
let BrowseQueryGet { limit, offset, fields, filter } = params.into_inner();
|
let BrowseQueryGet { limit, offset, fields, filter } = params.into_inner();
|
||||||
|
|
||||||
@@ -243,7 +235,7 @@ fn documents_by_query(
|
|||||||
|
|
||||||
let ret = PaginationView::new(offset, limit, total as usize, documents);
|
let ret = PaginationView::new(offset, limit, total as usize, documents);
|
||||||
|
|
||||||
debug!(returns = ?ret, "Get documents");
|
debug!("returns: {:?}", ret);
|
||||||
Ok(HttpResponse::Ok().json(ret))
|
Ok(HttpResponse::Ok().json(ret))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -275,19 +267,16 @@ pub async fn replace_documents(
|
|||||||
params: AwebQueryParameter<UpdateDocumentsQuery, DeserrQueryParamError>,
|
params: AwebQueryParameter<UpdateDocumentsQuery, DeserrQueryParamError>,
|
||||||
body: Payload,
|
body: Payload,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
opt: web::Data<Opt>,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
debug!(parameters = ?params, "Replace documents");
|
debug!("called with params: {:?}", params);
|
||||||
let params = params.into_inner();
|
let params = params.into_inner();
|
||||||
|
|
||||||
analytics.add_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req);
|
analytics.add_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req);
|
||||||
|
|
||||||
let allow_index_creation = index_scheduler.filters().allow_index_creation(&index_uid);
|
let allow_index_creation = index_scheduler.filters().allow_index_creation(&index_uid);
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task = document_addition(
|
let task = document_addition(
|
||||||
extract_mime_type(&req)?,
|
extract_mime_type(&req)?,
|
||||||
index_scheduler,
|
index_scheduler,
|
||||||
@@ -296,12 +285,9 @@ pub async fn replace_documents(
|
|||||||
params.csv_delimiter,
|
params.csv_delimiter,
|
||||||
body,
|
body,
|
||||||
IndexDocumentsMethod::ReplaceDocuments,
|
IndexDocumentsMethod::ReplaceDocuments,
|
||||||
uid,
|
|
||||||
dry_run,
|
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
debug!(returns = ?task, "Replace documents");
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
@@ -312,19 +298,16 @@ pub async fn update_documents(
|
|||||||
params: AwebQueryParameter<UpdateDocumentsQuery, DeserrQueryParamError>,
|
params: AwebQueryParameter<UpdateDocumentsQuery, DeserrQueryParamError>,
|
||||||
body: Payload,
|
body: Payload,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
opt: web::Data<Opt>,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
|
debug!("called with params: {:?}", params);
|
||||||
let params = params.into_inner();
|
let params = params.into_inner();
|
||||||
debug!(parameters = ?params, "Update documents");
|
|
||||||
|
|
||||||
analytics.update_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req);
|
analytics.update_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req);
|
||||||
|
|
||||||
let allow_index_creation = index_scheduler.filters().allow_index_creation(&index_uid);
|
let allow_index_creation = index_scheduler.filters().allow_index_creation(&index_uid);
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task = document_addition(
|
let task = document_addition(
|
||||||
extract_mime_type(&req)?,
|
extract_mime_type(&req)?,
|
||||||
index_scheduler,
|
index_scheduler,
|
||||||
@@ -333,12 +316,9 @@ pub async fn update_documents(
|
|||||||
params.csv_delimiter,
|
params.csv_delimiter,
|
||||||
body,
|
body,
|
||||||
IndexDocumentsMethod::UpdateDocuments,
|
IndexDocumentsMethod::UpdateDocuments,
|
||||||
uid,
|
|
||||||
dry_run,
|
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
debug!(returns = ?task, "Update documents");
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
@@ -352,8 +332,6 @@ async fn document_addition(
|
|||||||
csv_delimiter: Option<u8>,
|
csv_delimiter: Option<u8>,
|
||||||
mut body: Payload,
|
mut body: Payload,
|
||||||
method: IndexDocumentsMethod,
|
method: IndexDocumentsMethod,
|
||||||
task_id: Option<TaskId>,
|
|
||||||
dry_run: bool,
|
|
||||||
allow_index_creation: bool,
|
allow_index_creation: bool,
|
||||||
) -> Result<SummarizedTaskView, MeilisearchHttpError> {
|
) -> Result<SummarizedTaskView, MeilisearchHttpError> {
|
||||||
let format = match (
|
let format = match (
|
||||||
@@ -386,7 +364,7 @@ async fn document_addition(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let (uuid, mut update_file) = index_scheduler.create_update_file(dry_run)?;
|
let (uuid, mut update_file) = index_scheduler.create_update_file()?;
|
||||||
|
|
||||||
let temp_file = match tempfile() {
|
let temp_file = match tempfile() {
|
||||||
Ok(file) => file,
|
Ok(file) => file,
|
||||||
@@ -425,9 +403,11 @@ async fn document_addition(
|
|||||||
let read_file = buffer.into_inner().into_std().await;
|
let read_file = buffer.into_inner().into_std().await;
|
||||||
let documents_count = tokio::task::spawn_blocking(move || {
|
let documents_count = tokio::task::spawn_blocking(move || {
|
||||||
let documents_count = match format {
|
let documents_count = match format {
|
||||||
PayloadType::Json => read_json(&read_file, &mut update_file)?,
|
PayloadType::Json => read_json(&read_file, update_file.as_file_mut())?,
|
||||||
PayloadType::Csv { delimiter } => read_csv(&read_file, &mut update_file, delimiter)?,
|
PayloadType::Csv { delimiter } => {
|
||||||
PayloadType::Ndjson => read_ndjson(&read_file, &mut update_file)?,
|
read_csv(&read_file, update_file.as_file_mut(), delimiter)?
|
||||||
|
}
|
||||||
|
PayloadType::Ndjson => read_ndjson(&read_file, update_file.as_file_mut())?,
|
||||||
};
|
};
|
||||||
// we NEED to persist the file here because we moved the `udpate_file` in another task.
|
// we NEED to persist the file here because we moved the `udpate_file` in another task.
|
||||||
update_file.persist()?;
|
update_file.persist()?;
|
||||||
@@ -447,10 +427,7 @@ async fn document_addition(
|
|||||||
Err(index_scheduler::Error::FileStore(file_store::Error::IoError(e)))
|
Err(index_scheduler::Error::FileStore(file_store::Error::IoError(e)))
|
||||||
if e.kind() == ErrorKind::NotFound => {}
|
if e.kind() == ErrorKind::NotFound => {}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
tracing::warn!(
|
log::warn!("Unknown error happened while deleting a malformed update file with uuid {uuid}: {e}");
|
||||||
index_uuid = %uuid,
|
|
||||||
"Unknown error happened while deleting a malformed update file: {e}"
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// We still want to return the original error to the end user.
|
// We still want to return the original error to the end user.
|
||||||
@@ -468,9 +445,7 @@ async fn document_addition(
|
|||||||
};
|
};
|
||||||
|
|
||||||
let scheduler = index_scheduler.clone();
|
let scheduler = index_scheduler.clone();
|
||||||
let task = match tokio::task::spawn_blocking(move || scheduler.register(task, task_id, dry_run))
|
let task = match tokio::task::spawn_blocking(move || scheduler.register(task)).await? {
|
||||||
.await?
|
|
||||||
{
|
|
||||||
Ok(task) => task,
|
Ok(task) => task,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
index_scheduler.delete_update_file(uuid)?;
|
index_scheduler.delete_update_file(uuid)?;
|
||||||
@@ -478,6 +453,7 @@ async fn document_addition(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
debug!("returns: {:?}", task);
|
||||||
Ok(task.into())
|
Ok(task.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -486,10 +462,9 @@ pub async fn delete_documents_batch(
|
|||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
body: web::Json<Vec<Value>>,
|
body: web::Json<Vec<Value>>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
opt: web::Data<Opt>,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!(parameters = ?body, "Delete documents by batch");
|
debug!("called with params: {:?}", body);
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
analytics.delete_documents(DocumentDeletionKind::PerBatch, &req);
|
analytics.delete_documents(DocumentDeletionKind::PerBatch, &req);
|
||||||
@@ -501,14 +476,10 @@ pub async fn delete_documents_batch(
|
|||||||
|
|
||||||
let task =
|
let task =
|
||||||
KindWithContent::DocumentDeletion { index_uid: index_uid.to_string(), documents_ids: ids };
|
KindWithContent::DocumentDeletion { index_uid: index_uid.to_string(), documents_ids: ids };
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
debug!(returns = ?task, "Delete documents by batch");
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -524,10 +495,9 @@ pub async fn delete_documents_by_filter(
|
|||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
body: AwebJson<DocumentDeletionByFilter, DeserrJsonError>,
|
body: AwebJson<DocumentDeletionByFilter, DeserrJsonError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
opt: web::Data<Opt>,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!(parameters = ?body, "Delete documents by filter");
|
debug!("called with params: {:?}", body);
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
let index_uid = index_uid.into_inner();
|
let index_uid = index_uid.into_inner();
|
||||||
let filter = body.into_inner().filter;
|
let filter = body.into_inner().filter;
|
||||||
@@ -542,14 +512,10 @@ pub async fn delete_documents_by_filter(
|
|||||||
.map_err(|err| ResponseError::from_msg(err.message, Code::InvalidDocumentFilter))?;
|
.map_err(|err| ResponseError::from_msg(err.message, Code::InvalidDocumentFilter))?;
|
||||||
let task = KindWithContent::DocumentDeletionByFilter { index_uid, filter_expr: filter };
|
let task = KindWithContent::DocumentDeletionByFilter { index_uid, filter_expr: filter };
|
||||||
|
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
debug!(returns = ?task, "Delete documents by filter");
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -557,21 +523,16 @@ pub async fn clear_all_documents(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
opt: web::Data<Opt>,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
analytics.delete_documents(DocumentDeletionKind::ClearAll, &req);
|
analytics.delete_documents(DocumentDeletionKind::ClearAll, &req);
|
||||||
|
|
||||||
let task = KindWithContent::DocumentClear { index_uid: index_uid.to_string() };
|
let task = KindWithContent::DocumentClear { index_uid: index_uid.to_string() };
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
debug!(returns = ?task, "Delete all documents");
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,12 +2,12 @@ use actix_web::web::Data;
|
|||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use deserr::actix_web::AwebJson;
|
use deserr::actix_web::AwebJson;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
|
use log::debug;
|
||||||
use meilisearch_types::deserr::DeserrJsonError;
|
use meilisearch_types::deserr::DeserrJsonError;
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use tracing::debug;
|
|
||||||
|
|
||||||
use crate::analytics::{Analytics, FacetSearchAggregator};
|
use crate::analytics::{Analytics, FacetSearchAggregator};
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
@@ -56,7 +56,7 @@ pub async fn search(
|
|||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let query = params.into_inner();
|
let query = params.into_inner();
|
||||||
debug!(parameters = ?query, "Facet search");
|
debug!("facet search called with params: {:?}", query);
|
||||||
|
|
||||||
let mut aggregate = FacetSearchAggregator::from_query(&query, &req);
|
let mut aggregate = FacetSearchAggregator::from_query(&query, &req);
|
||||||
|
|
||||||
@@ -83,7 +83,7 @@ pub async fn search(
|
|||||||
|
|
||||||
let search_result = search_result?;
|
let search_result = search_result?;
|
||||||
|
|
||||||
debug!(returns = ?search_result, "Facet search");
|
debug!("returns: {:?}", search_result);
|
||||||
Ok(HttpResponse::Ok().json(search_result))
|
Ok(HttpResponse::Ok().json(search_result))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ use actix_web::{web, HttpRequest, HttpResponse};
|
|||||||
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
||||||
use deserr::{DeserializeError, Deserr, ValuePointerRef};
|
use deserr::{DeserializeError, Deserr, ValuePointerRef};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
|
use log::debug;
|
||||||
use meilisearch_types::deserr::query_params::Param;
|
use meilisearch_types::deserr::query_params::Param;
|
||||||
use meilisearch_types::deserr::{immutable_field_error, DeserrJsonError, DeserrQueryParamError};
|
use meilisearch_types::deserr::{immutable_field_error, DeserrJsonError, DeserrQueryParamError};
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
@@ -15,15 +16,12 @@ use meilisearch_types::tasks::KindWithContent;
|
|||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use tracing::debug;
|
|
||||||
|
|
||||||
use super::{get_task_id, Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::routes::is_dry_run;
|
|
||||||
use crate::Opt;
|
|
||||||
|
|
||||||
pub mod documents;
|
pub mod documents;
|
||||||
pub mod facet_search;
|
pub mod facet_search;
|
||||||
@@ -95,7 +93,6 @@ pub async fn list_indexes(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
|
||||||
paginate: AwebQueryParameter<ListIndexes, DeserrQueryParamError>,
|
paginate: AwebQueryParameter<ListIndexes, DeserrQueryParamError>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!(parameters = ?paginate, "List indexes");
|
|
||||||
let filters = index_scheduler.filters();
|
let filters = index_scheduler.filters();
|
||||||
let indexes: Vec<Option<IndexView>> =
|
let indexes: Vec<Option<IndexView>> =
|
||||||
index_scheduler.try_for_each_index(|uid, index| -> Result<Option<IndexView>, _> {
|
index_scheduler.try_for_each_index(|uid, index| -> Result<Option<IndexView>, _> {
|
||||||
@@ -108,7 +105,7 @@ pub async fn list_indexes(
|
|||||||
let indexes: Vec<IndexView> = indexes.into_iter().flatten().collect();
|
let indexes: Vec<IndexView> = indexes.into_iter().flatten().collect();
|
||||||
let ret = paginate.as_pagination().auto_paginate_sized(indexes.into_iter());
|
let ret = paginate.as_pagination().auto_paginate_sized(indexes.into_iter());
|
||||||
|
|
||||||
debug!(returns = ?ret, "List indexes");
|
debug!("returns: {:?}", ret);
|
||||||
Ok(HttpResponse::Ok().json(ret))
|
Ok(HttpResponse::Ok().json(ret))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -125,10 +122,8 @@ pub async fn create_index(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>,
|
||||||
body: AwebJson<IndexCreateRequest, DeserrJsonError>,
|
body: AwebJson<IndexCreateRequest, DeserrJsonError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
opt: web::Data<Opt>,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!(parameters = ?body, "Create index");
|
|
||||||
let IndexCreateRequest { primary_key, uid } = body.into_inner();
|
let IndexCreateRequest { primary_key, uid } = body.into_inner();
|
||||||
|
|
||||||
let allow_index_creation = index_scheduler.filters().allow_index_creation(&uid);
|
let allow_index_creation = index_scheduler.filters().allow_index_creation(&uid);
|
||||||
@@ -140,13 +135,8 @@ pub async fn create_index(
|
|||||||
);
|
);
|
||||||
|
|
||||||
let task = KindWithContent::IndexCreation { index_uid: uid.to_string(), primary_key };
|
let task = KindWithContent::IndexCreation { index_uid: uid.to_string(), primary_key };
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
debug!(returns = ?task, "Create index");
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
} else {
|
} else {
|
||||||
@@ -187,7 +177,7 @@ pub async fn get_index(
|
|||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let index_view = IndexView::new(index_uid.into_inner(), &index)?;
|
let index_view = IndexView::new(index_uid.into_inner(), &index)?;
|
||||||
|
|
||||||
debug!(returns = ?index_view, "Get index");
|
debug!("returns: {:?}", index_view);
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(index_view))
|
Ok(HttpResponse::Ok().json(index_view))
|
||||||
}
|
}
|
||||||
@@ -197,10 +187,9 @@ pub async fn update_index(
|
|||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
body: AwebJson<UpdateIndexRequest, DeserrJsonError>,
|
body: AwebJson<UpdateIndexRequest, DeserrJsonError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
opt: web::Data<Opt>,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!(parameters = ?body, "Update index");
|
debug!("called with params: {:?}", body);
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
let body = body.into_inner();
|
let body = body.into_inner();
|
||||||
analytics.publish(
|
analytics.publish(
|
||||||
@@ -214,32 +203,21 @@ pub async fn update_index(
|
|||||||
primary_key: body.primary_key,
|
primary_key: body.primary_key,
|
||||||
};
|
};
|
||||||
|
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
debug!(returns = ?task, "Update index");
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_index(
|
pub async fn delete_index(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
req: HttpRequest,
|
|
||||||
opt: web::Data<Opt>,
|
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() };
|
let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() };
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
debug!(returns = ?task, "Delete index");
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
@@ -277,6 +255,6 @@ pub async fn get_index_stats(
|
|||||||
|
|
||||||
let stats = IndexStats::from(index_scheduler.index_stats(&index_uid)?);
|
let stats = IndexStats::from(index_scheduler.index_stats(&index_uid)?);
|
||||||
|
|
||||||
debug!(returns = ?stats, "Get index stats");
|
debug!("returns: {:?}", stats);
|
||||||
Ok(HttpResponse::Ok().json(stats))
|
Ok(HttpResponse::Ok().json(stats))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ use actix_web::web::Data;
|
|||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
|
use log::{debug, warn};
|
||||||
use meilisearch_types::deserr::query_params::Param;
|
use meilisearch_types::deserr::query_params::Param;
|
||||||
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
@@ -11,7 +12,6 @@ use meilisearch_types::milli;
|
|||||||
use meilisearch_types::milli::vector::DistributionShift;
|
use meilisearch_types::milli::vector::DistributionShift;
|
||||||
use meilisearch_types::serde_cs::vec::CS;
|
use meilisearch_types::serde_cs::vec::CS;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use tracing::{debug, warn};
|
|
||||||
|
|
||||||
use crate::analytics::{Analytics, SearchAggregator};
|
use crate::analytics::{Analytics, SearchAggregator};
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
@@ -186,7 +186,7 @@ pub async fn search_with_url_query(
|
|||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!(parameters = ?params, "Search get");
|
debug!("called with params: {:?}", params);
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let mut query: SearchQuery = params.into_inner().into();
|
let mut query: SearchQuery = params.into_inner().into();
|
||||||
@@ -213,7 +213,7 @@ pub async fn search_with_url_query(
|
|||||||
|
|
||||||
let search_result = search_result?;
|
let search_result = search_result?;
|
||||||
|
|
||||||
debug!(returns = ?search_result, "Search get");
|
debug!("returns: {:?}", search_result);
|
||||||
Ok(HttpResponse::Ok().json(search_result))
|
Ok(HttpResponse::Ok().json(search_result))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -227,7 +227,7 @@ pub async fn search_with_post(
|
|||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let mut query = params.into_inner();
|
let mut query = params.into_inner();
|
||||||
debug!(parameters = ?query, "Search post");
|
debug!("search called with params: {:?}", query);
|
||||||
|
|
||||||
// Tenant token search_rules.
|
// Tenant token search_rules.
|
||||||
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
|
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
|
||||||
@@ -252,7 +252,7 @@ pub async fn search_with_post(
|
|||||||
|
|
||||||
let search_result = search_result?;
|
let search_result = search_result?;
|
||||||
|
|
||||||
debug!(returns = ?search_result, "Search post");
|
debug!("returns: {:?}", search_result);
|
||||||
Ok(HttpResponse::Ok().json(search_result))
|
Ok(HttpResponse::Ok().json(search_result))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ use actix_web::web::Data;
|
|||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use deserr::actix_web::AwebJson;
|
use deserr::actix_web::AwebJson;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
|
use log::debug;
|
||||||
use meilisearch_types::deserr::DeserrJsonError;
|
use meilisearch_types::deserr::DeserrJsonError;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
||||||
@@ -10,13 +11,11 @@ use meilisearch_types::milli::update::Setting;
|
|||||||
use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
|
use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use tracing::debug;
|
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
use crate::extractors::authentication::GuardedData;
|
use crate::extractors::authentication::GuardedData;
|
||||||
use crate::routes::{get_task_id, is_dry_run, SummarizedTaskView};
|
use crate::routes::SummarizedTaskView;
|
||||||
use crate::Opt;
|
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! make_setting_route {
|
macro_rules! make_setting_route {
|
||||||
@@ -25,18 +24,17 @@ macro_rules! make_setting_route {
|
|||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
use actix_web::{web, HttpRequest, HttpResponse, Resource};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
|
use log::debug;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
use meilisearch_types::milli::update::Setting;
|
use meilisearch_types::milli::update::Setting;
|
||||||
use meilisearch_types::settings::{settings, Settings};
|
use meilisearch_types::settings::{settings, Settings};
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use tracing::debug;
|
|
||||||
use $crate::analytics::Analytics;
|
use $crate::analytics::Analytics;
|
||||||
use $crate::extractors::authentication::policies::*;
|
use $crate::extractors::authentication::policies::*;
|
||||||
use $crate::extractors::authentication::GuardedData;
|
use $crate::extractors::authentication::GuardedData;
|
||||||
use $crate::extractors::sequential_extractor::SeqHandler;
|
use $crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use $crate::Opt;
|
use $crate::routes::SummarizedTaskView;
|
||||||
use $crate::routes::{is_dry_run, get_task_id, SummarizedTaskView};
|
|
||||||
|
|
||||||
pub async fn delete(
|
pub async fn delete(
|
||||||
index_scheduler: GuardedData<
|
index_scheduler: GuardedData<
|
||||||
@@ -44,8 +42,6 @@ macro_rules! make_setting_route {
|
|||||||
Data<IndexScheduler>,
|
Data<IndexScheduler>,
|
||||||
>,
|
>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
req: HttpRequest,
|
|
||||||
opt: web::Data<Opt>,
|
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
@@ -60,14 +56,12 @@ macro_rules! make_setting_route {
|
|||||||
is_deletion: true,
|
is_deletion: true,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
};
|
};
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task))
|
||||||
.await??
|
.await??
|
||||||
.into();
|
.into();
|
||||||
|
|
||||||
debug!(returns = ?task, "Delete settings");
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -79,13 +73,11 @@ macro_rules! make_setting_route {
|
|||||||
index_uid: actix_web::web::Path<String>,
|
index_uid: actix_web::web::Path<String>,
|
||||||
body: deserr::actix_web::AwebJson<Option<$type>, $err_ty>,
|
body: deserr::actix_web::AwebJson<Option<$type>, $err_ty>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
opt: web::Data<Opt>,
|
|
||||||
$analytics_var: web::Data<dyn Analytics>,
|
$analytics_var: web::Data<dyn Analytics>,
|
||||||
) -> std::result::Result<HttpResponse, ResponseError> {
|
) -> std::result::Result<HttpResponse, ResponseError> {
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let body = body.into_inner();
|
let body = body.into_inner();
|
||||||
debug!(parameters = ?body, "Update settings");
|
|
||||||
|
|
||||||
#[allow(clippy::redundant_closure_call)]
|
#[allow(clippy::redundant_closure_call)]
|
||||||
$analytics(&body, &req);
|
$analytics(&body, &req);
|
||||||
@@ -98,11 +90,6 @@ macro_rules! make_setting_route {
|
|||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let new_settings = $crate::routes::indexes::settings::validate_settings(
|
|
||||||
new_settings,
|
|
||||||
&index_scheduler,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let allow_index_creation =
|
let allow_index_creation =
|
||||||
index_scheduler.filters().allow_index_creation(&index_uid);
|
index_scheduler.filters().allow_index_creation(&index_uid);
|
||||||
|
|
||||||
@@ -112,14 +99,12 @@ macro_rules! make_setting_route {
|
|||||||
is_deletion: false,
|
is_deletion: false,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
};
|
};
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task))
|
||||||
.await??
|
.await??
|
||||||
.into();
|
.into();
|
||||||
|
|
||||||
debug!(returns = ?task, "Update settings");
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -136,7 +121,7 @@ macro_rules! make_setting_route {
|
|||||||
let rtxn = index.read_txn()?;
|
let rtxn = index.read_txn()?;
|
||||||
let settings = settings(&index, &rtxn)?;
|
let settings = settings(&index, &rtxn)?;
|
||||||
|
|
||||||
debug!(returns = ?settings, "Update settings");
|
debug!("returns: {:?}", settings);
|
||||||
let mut json = serde_json::json!(&settings);
|
let mut json = serde_json::json!(&settings);
|
||||||
let val = json[$camelcase_attr].take();
|
let val = json[$camelcase_attr].take();
|
||||||
|
|
||||||
@@ -468,7 +453,7 @@ make_setting_route!(
|
|||||||
json!({
|
json!({
|
||||||
"proximity_precision": {
|
"proximity_precision": {
|
||||||
"set": precision.is_some(),
|
"set": precision.is_some(),
|
||||||
"value": precision.unwrap_or_default(),
|
"value": precision,
|
||||||
}
|
}
|
||||||
}),
|
}),
|
||||||
Some(req),
|
Some(req),
|
||||||
@@ -597,13 +582,13 @@ fn embedder_analytics(
|
|||||||
for source in s
|
for source in s
|
||||||
.values()
|
.values()
|
||||||
.filter_map(|config| config.clone().set())
|
.filter_map(|config| config.clone().set())
|
||||||
.filter_map(|config| config.source.set())
|
.filter_map(|config| config.embedder_options.set())
|
||||||
{
|
{
|
||||||
use meilisearch_types::milli::vector::settings::EmbedderSource;
|
use meilisearch_types::milli::vector::settings::EmbedderSettings;
|
||||||
match source {
|
match source {
|
||||||
EmbedderSource::OpenAi => sources.insert("openAi"),
|
EmbedderSettings::OpenAi(_) => sources.insert("openAi"),
|
||||||
EmbedderSource::HuggingFace => sources.insert("huggingFace"),
|
EmbedderSettings::HuggingFace(_) => sources.insert("huggingFace"),
|
||||||
EmbedderSource::UserProvided => sources.insert("userProvided"),
|
EmbedderSettings::UserProvided(_) => sources.insert("userProvided"),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -661,14 +646,11 @@ pub async fn update_all(
|
|||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
body: AwebJson<Settings<Unchecked>, DeserrJsonError>,
|
body: AwebJson<Settings<Unchecked>, DeserrJsonError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
opt: web::Data<Opt>,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
let new_settings = body.into_inner();
|
let new_settings = body.into_inner();
|
||||||
debug!(parameters = ?new_settings, "Update all settings");
|
|
||||||
let new_settings = validate_settings(new_settings, &index_scheduler)?;
|
|
||||||
|
|
||||||
analytics.publish(
|
analytics.publish(
|
||||||
"Settings Updated".to_string(),
|
"Settings Updated".to_string(),
|
||||||
@@ -702,8 +684,7 @@ pub async fn update_all(
|
|||||||
"set": new_settings.distinct_attribute.as_ref().set().is_some()
|
"set": new_settings.distinct_attribute.as_ref().set().is_some()
|
||||||
},
|
},
|
||||||
"proximity_precision": {
|
"proximity_precision": {
|
||||||
"set": new_settings.proximity_precision.as_ref().set().is_some(),
|
"set": new_settings.proximity_precision.as_ref().set().is_some()
|
||||||
"value": new_settings.proximity_precision.as_ref().set().copied().unwrap_or_default()
|
|
||||||
},
|
},
|
||||||
"typo_tolerance": {
|
"typo_tolerance": {
|
||||||
"enabled": new_settings.typo_tolerance
|
"enabled": new_settings.typo_tolerance
|
||||||
@@ -777,14 +758,10 @@ pub async fn update_all(
|
|||||||
is_deletion: false,
|
is_deletion: false,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
};
|
};
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
debug!(returns = ?task, "Update all settings");
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -797,15 +774,13 @@ pub async fn get_all(
|
|||||||
let index = index_scheduler.index(&index_uid)?;
|
let index = index_scheduler.index(&index_uid)?;
|
||||||
let rtxn = index.read_txn()?;
|
let rtxn = index.read_txn()?;
|
||||||
let new_settings = settings(&index, &rtxn)?;
|
let new_settings = settings(&index, &rtxn)?;
|
||||||
debug!(returns = ?new_settings, "Get all settings");
|
debug!("returns: {:?}", new_settings);
|
||||||
Ok(HttpResponse::Ok().json(new_settings))
|
Ok(HttpResponse::Ok().json(new_settings))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn delete_all(
|
pub async fn delete_all(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
|
||||||
index_uid: web::Path<String>,
|
index_uid: web::Path<String>,
|
||||||
req: HttpRequest,
|
|
||||||
opt: web::Data<Opt>,
|
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||||
|
|
||||||
@@ -819,23 +794,9 @@ pub async fn delete_all(
|
|||||||
is_deletion: true,
|
is_deletion: true,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
};
|
};
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
debug!(returns = ?task, "Delete all settings");
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn validate_settings(
|
|
||||||
settings: Settings<Unchecked>,
|
|
||||||
index_scheduler: &IndexScheduler,
|
|
||||||
) -> Result<Settings<Unchecked>, ResponseError> {
|
|
||||||
if matches!(settings.embedders, Setting::Set(_)) {
|
|
||||||
index_scheduler.features().check_vector("Passing `embedders` in settings")?
|
|
||||||
}
|
|
||||||
Ok(settings.validate()?)
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,318 +0,0 @@
|
|||||||
use std::convert::Infallible;
|
|
||||||
use std::io::Write;
|
|
||||||
use std::ops::ControlFlow;
|
|
||||||
use std::pin::Pin;
|
|
||||||
use std::str::FromStr;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use actix_web::web::{Bytes, Data};
|
|
||||||
use actix_web::{web, HttpResponse};
|
|
||||||
use deserr::actix_web::AwebJson;
|
|
||||||
use deserr::{DeserializeError, Deserr, ErrorKind, MergeWithError, ValuePointerRef};
|
|
||||||
use futures_util::Stream;
|
|
||||||
use index_scheduler::IndexScheduler;
|
|
||||||
use meilisearch_types::deserr::DeserrJsonError;
|
|
||||||
use meilisearch_types::error::deserr_codes::*;
|
|
||||||
use meilisearch_types::error::{Code, ResponseError};
|
|
||||||
use tokio::sync::mpsc;
|
|
||||||
use tracing_subscriber::filter::Targets;
|
|
||||||
use tracing_subscriber::Layer;
|
|
||||||
|
|
||||||
use crate::error::MeilisearchHttpError;
|
|
||||||
use crate::extractors::authentication::policies::*;
|
|
||||||
use crate::extractors::authentication::GuardedData;
|
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
|
||||||
use crate::{LogRouteHandle, LogStderrHandle};
|
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
|
||||||
cfg.service(
|
|
||||||
web::resource("stream")
|
|
||||||
.route(web::post().to(SeqHandler(get_logs)))
|
|
||||||
.route(web::delete().to(SeqHandler(cancel_logs))),
|
|
||||||
)
|
|
||||||
.service(web::resource("stderr").route(web::post().to(SeqHandler(update_stderr_target))));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Copy, Deserr, PartialEq, Eq)]
|
|
||||||
#[deserr(rename_all = camelCase)]
|
|
||||||
pub enum LogMode {
|
|
||||||
#[default]
|
|
||||||
Human,
|
|
||||||
Json,
|
|
||||||
Profile,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Simple wrapper around the `Targets` from `tracing_subscriber` to implement `MergeWithError` on it.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
struct MyTargets(Targets);
|
|
||||||
|
|
||||||
/// Simple wrapper around the `ParseError` from `tracing_subscriber` to implement `MergeWithError` on it.
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
|
||||||
enum MyParseError {
|
|
||||||
#[error(transparent)]
|
|
||||||
ParseError(#[from] tracing_subscriber::filter::ParseError),
|
|
||||||
#[error(
|
|
||||||
"Empty string is not a valid target. If you want to get no logs use `OFF`. Usage: `info`, `meilisearch=info`, or you can write multiple filters in one target: `index_scheduler=info,milli=trace`"
|
|
||||||
)]
|
|
||||||
Example,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromStr for MyTargets {
|
|
||||||
type Err = MyParseError;
|
|
||||||
|
|
||||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
||||||
if s.is_empty() {
|
|
||||||
Err(MyParseError::Example)
|
|
||||||
} else {
|
|
||||||
Ok(MyTargets(Targets::from_str(s).map_err(MyParseError::ParseError)?))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MergeWithError<MyParseError> for DeserrJsonError<BadRequest> {
|
|
||||||
fn merge(
|
|
||||||
_self_: Option<Self>,
|
|
||||||
other: MyParseError,
|
|
||||||
merge_location: ValuePointerRef,
|
|
||||||
) -> ControlFlow<Self, Self> {
|
|
||||||
Self::error::<Infallible>(
|
|
||||||
None,
|
|
||||||
ErrorKind::Unexpected { msg: other.to_string() },
|
|
||||||
merge_location,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserr)]
|
|
||||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields, validate = validate_get_logs -> DeserrJsonError<InvalidSettingsTypoTolerance>)]
|
|
||||||
pub struct GetLogs {
|
|
||||||
#[deserr(default = "info".parse().unwrap(), try_from(&String) = MyTargets::from_str -> DeserrJsonError<BadRequest>)]
|
|
||||||
target: MyTargets,
|
|
||||||
|
|
||||||
#[deserr(default, error = DeserrJsonError<BadRequest>)]
|
|
||||||
mode: LogMode,
|
|
||||||
|
|
||||||
#[deserr(default = false, error = DeserrJsonError<BadRequest>)]
|
|
||||||
profile_memory: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn validate_get_logs<E: DeserializeError>(
|
|
||||||
logs: GetLogs,
|
|
||||||
location: ValuePointerRef,
|
|
||||||
) -> Result<GetLogs, E> {
|
|
||||||
if logs.profile_memory && logs.mode != LogMode::Profile {
|
|
||||||
Err(deserr::take_cf_content(E::error::<Infallible>(
|
|
||||||
None,
|
|
||||||
ErrorKind::Unexpected {
|
|
||||||
msg: format!("`profile_memory` can only be used while profiling code and is not compatible with the {:?} mode.", logs.mode),
|
|
||||||
},
|
|
||||||
location,
|
|
||||||
)))
|
|
||||||
} else {
|
|
||||||
Ok(logs)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct LogWriter {
|
|
||||||
sender: mpsc::UnboundedSender<Vec<u8>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Write for LogWriter {
|
|
||||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
|
||||||
self.sender.send(buf.to_vec()).map_err(std::io::Error::other)?;
|
|
||||||
Ok(buf.len())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn flush(&mut self) -> std::io::Result<()> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct HandleGuard {
|
|
||||||
/// We need to keep an handle on the logs to make it available again when the streamer is dropped
|
|
||||||
logs: Arc<LogRouteHandle>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Drop for HandleGuard {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
if let Err(e) = self.logs.modify(|layer| *layer.inner_mut() = None) {
|
|
||||||
tracing::error!("Could not free the logs route: {e}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn byte_stream(
|
|
||||||
receiver: mpsc::UnboundedReceiver<Vec<u8>>,
|
|
||||||
guard: HandleGuard,
|
|
||||||
) -> impl futures_util::Stream<Item = Result<Bytes, ResponseError>> {
|
|
||||||
futures_util::stream::unfold((receiver, guard), move |(mut receiver, guard)| async move {
|
|
||||||
let vec = receiver.recv().await;
|
|
||||||
|
|
||||||
vec.map(From::from).map(Ok).map(|a| (a, (receiver, guard)))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
type PinnedByteStream = Pin<Box<dyn Stream<Item = Result<Bytes, ResponseError>>>>;
|
|
||||||
|
|
||||||
fn make_layer<
|
|
||||||
S: tracing::Subscriber + for<'span> tracing_subscriber::registry::LookupSpan<'span>,
|
|
||||||
>(
|
|
||||||
opt: &GetLogs,
|
|
||||||
logs: Data<LogRouteHandle>,
|
|
||||||
) -> (Box<dyn Layer<S> + Send + Sync>, PinnedByteStream) {
|
|
||||||
let guard = HandleGuard { logs: logs.into_inner() };
|
|
||||||
match opt.mode {
|
|
||||||
LogMode::Human => {
|
|
||||||
let (sender, receiver) = tokio::sync::mpsc::unbounded_channel();
|
|
||||||
|
|
||||||
let fmt_layer = tracing_subscriber::fmt::layer()
|
|
||||||
.with_writer(move || LogWriter { sender: sender.clone() })
|
|
||||||
.with_span_events(tracing_subscriber::fmt::format::FmtSpan::CLOSE);
|
|
||||||
|
|
||||||
let stream = byte_stream(receiver, guard);
|
|
||||||
(Box::new(fmt_layer) as Box<dyn Layer<S> + Send + Sync>, Box::pin(stream))
|
|
||||||
}
|
|
||||||
LogMode::Json => {
|
|
||||||
let (sender, receiver) = tokio::sync::mpsc::unbounded_channel();
|
|
||||||
|
|
||||||
let fmt_layer = tracing_subscriber::fmt::layer()
|
|
||||||
.with_writer(move || LogWriter { sender: sender.clone() })
|
|
||||||
.json()
|
|
||||||
.with_span_events(tracing_subscriber::fmt::format::FmtSpan::CLOSE);
|
|
||||||
|
|
||||||
let stream = byte_stream(receiver, guard);
|
|
||||||
(Box::new(fmt_layer) as Box<dyn Layer<S> + Send + Sync>, Box::pin(stream))
|
|
||||||
}
|
|
||||||
LogMode::Profile => {
|
|
||||||
let (trace, layer) = tracing_trace::Trace::new(opt.profile_memory);
|
|
||||||
|
|
||||||
let stream = entry_stream(trace, guard);
|
|
||||||
|
|
||||||
(Box::new(layer) as Box<dyn Layer<S> + Send + Sync>, Box::pin(stream))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn entry_stream(
|
|
||||||
trace: tracing_trace::Trace,
|
|
||||||
guard: HandleGuard,
|
|
||||||
) -> impl Stream<Item = Result<Bytes, ResponseError>> {
|
|
||||||
let receiver = trace.into_receiver();
|
|
||||||
let entry_buf = Vec::new();
|
|
||||||
|
|
||||||
futures_util::stream::unfold(
|
|
||||||
(receiver, entry_buf, guard),
|
|
||||||
move |(mut receiver, mut entry_buf, guard)| async move {
|
|
||||||
let mut bytes = Vec::new();
|
|
||||||
|
|
||||||
while bytes.len() < 8192 {
|
|
||||||
entry_buf.clear();
|
|
||||||
|
|
||||||
let Ok(count) = tokio::time::timeout(
|
|
||||||
std::time::Duration::from_secs(1),
|
|
||||||
receiver.recv_many(&mut entry_buf, 100),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
else {
|
|
||||||
break;
|
|
||||||
};
|
|
||||||
|
|
||||||
if count == 0 {
|
|
||||||
if !bytes.is_empty() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// channel closed, exit
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
for entry in &entry_buf {
|
|
||||||
if let Err(error) = serde_json::to_writer(&mut bytes, entry) {
|
|
||||||
tracing::error!(
|
|
||||||
error = &error as &dyn std::error::Error,
|
|
||||||
"deserializing entry"
|
|
||||||
);
|
|
||||||
return Some((
|
|
||||||
Err(ResponseError::from_msg(
|
|
||||||
format!("error deserializing entry: {error}"),
|
|
||||||
Code::Internal,
|
|
||||||
)),
|
|
||||||
(receiver, entry_buf, guard),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Some((Ok(bytes.into()), (receiver, entry_buf, guard)))
|
|
||||||
},
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_logs(
|
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
|
|
||||||
logs: Data<LogRouteHandle>,
|
|
||||||
body: AwebJson<GetLogs, DeserrJsonError>,
|
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
|
||||||
index_scheduler.features().check_logs_route()?;
|
|
||||||
|
|
||||||
let opt = body.into_inner();
|
|
||||||
let mut stream = None;
|
|
||||||
|
|
||||||
logs.modify(|layer| match layer.inner_mut() {
|
|
||||||
None => {
|
|
||||||
// there is no one getting logs
|
|
||||||
*layer.filter_mut() = opt.target.0.clone();
|
|
||||||
let (new_layer, new_stream) = make_layer(&opt, logs.clone());
|
|
||||||
|
|
||||||
*layer.inner_mut() = Some(new_layer);
|
|
||||||
stream = Some(new_stream);
|
|
||||||
}
|
|
||||||
Some(_) => {
|
|
||||||
// there is already someone getting logs
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
if let Some(stream) = stream {
|
|
||||||
Ok(HttpResponse::Ok().streaming(stream))
|
|
||||||
} else {
|
|
||||||
Err(MeilisearchHttpError::AlreadyUsedLogRoute.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn cancel_logs(
|
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
|
|
||||||
logs: Data<LogRouteHandle>,
|
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
|
||||||
index_scheduler.features().check_logs_route()?;
|
|
||||||
|
|
||||||
if let Err(e) = logs.modify(|layer| *layer.inner_mut() = None) {
|
|
||||||
tracing::error!("Could not free the logs route: {e}");
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().finish())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserr)]
|
|
||||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
|
||||||
pub struct UpdateStderrLogs {
|
|
||||||
#[deserr(default = "info".parse().unwrap(), try_from(&String) = MyTargets::from_str -> DeserrJsonError<BadRequest>)]
|
|
||||||
target: MyTargets,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn update_stderr_target(
|
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
|
|
||||||
logs: Data<LogStderrHandle>,
|
|
||||||
body: AwebJson<UpdateStderrLogs, DeserrJsonError>,
|
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
|
||||||
index_scheduler.features().check_logs_route()?;
|
|
||||||
|
|
||||||
let opt = body.into_inner();
|
|
||||||
|
|
||||||
logs.modify(|layer| {
|
|
||||||
*layer.filter_mut() = opt.target.0.clone();
|
|
||||||
})
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
Ok(HttpResponse::NoContent().finish())
|
|
||||||
}
|
|
||||||
@@ -3,19 +3,18 @@ use std::collections::BTreeMap;
|
|||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
|
use log::debug;
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
use meilisearch_types::error::{Code, ResponseError};
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::settings::{Settings, Unchecked};
|
use meilisearch_types::settings::{Settings, Unchecked};
|
||||||
use meilisearch_types::tasks::{Kind, Status, Task, TaskId};
|
use meilisearch_types::tasks::{Kind, Status, Task, TaskId};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use tracing::debug;
|
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
use crate::extractors::authentication::GuardedData;
|
use crate::extractors::authentication::GuardedData;
|
||||||
use crate::Opt;
|
|
||||||
|
|
||||||
const PAGINATION_DEFAULT_LIMIT: usize = 20;
|
const PAGINATION_DEFAULT_LIMIT: usize = 20;
|
||||||
|
|
||||||
@@ -23,7 +22,6 @@ mod api_key;
|
|||||||
mod dump;
|
mod dump;
|
||||||
pub mod features;
|
pub mod features;
|
||||||
pub mod indexes;
|
pub mod indexes;
|
||||||
mod logs;
|
|
||||||
mod metrics;
|
mod metrics;
|
||||||
mod multi_search;
|
mod multi_search;
|
||||||
mod snapshot;
|
mod snapshot;
|
||||||
@@ -33,7 +31,6 @@ pub mod tasks;
|
|||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(web::scope("/tasks").configure(tasks::configure))
|
cfg.service(web::scope("/tasks").configure(tasks::configure))
|
||||||
.service(web::resource("/health").route(web::get().to(get_health)))
|
.service(web::resource("/health").route(web::get().to(get_health)))
|
||||||
.service(web::scope("/logs").configure(logs::configure))
|
|
||||||
.service(web::scope("/keys").configure(api_key::configure))
|
.service(web::scope("/keys").configure(api_key::configure))
|
||||||
.service(web::scope("/dumps").configure(dump::configure))
|
.service(web::scope("/dumps").configure(dump::configure))
|
||||||
.service(web::scope("/snapshots").configure(snapshot::configure))
|
.service(web::scope("/snapshots").configure(snapshot::configure))
|
||||||
@@ -46,56 +43,6 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
.service(web::scope("/experimental-features").configure(features::configure));
|
.service(web::scope("/experimental-features").configure(features::configure));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_task_id(req: &HttpRequest, opt: &Opt) -> Result<Option<TaskId>, ResponseError> {
|
|
||||||
if !opt.experimental_replication_parameters {
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
let task_id = req
|
|
||||||
.headers()
|
|
||||||
.get("TaskId")
|
|
||||||
.map(|header| {
|
|
||||||
header.to_str().map_err(|e| {
|
|
||||||
ResponseError::from_msg(
|
|
||||||
format!("TaskId is not a valid utf-8 string: {e}"),
|
|
||||||
Code::BadRequest,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.transpose()?
|
|
||||||
.map(|s| {
|
|
||||||
s.parse::<TaskId>().map_err(|e| {
|
|
||||||
ResponseError::from_msg(
|
|
||||||
format!(
|
|
||||||
"Could not parse the TaskId as a {}: {e}",
|
|
||||||
std::any::type_name::<TaskId>(),
|
|
||||||
),
|
|
||||||
Code::BadRequest,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.transpose()?;
|
|
||||||
Ok(task_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_dry_run(req: &HttpRequest, opt: &Opt) -> Result<bool, ResponseError> {
|
|
||||||
if !opt.experimental_replication_parameters {
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
Ok(req
|
|
||||||
.headers()
|
|
||||||
.get("DryRun")
|
|
||||||
.map(|header| {
|
|
||||||
header.to_str().map_err(|e| {
|
|
||||||
ResponseError::from_msg(
|
|
||||||
format!("DryRun is not a valid utf-8 string: {e}"),
|
|
||||||
Code::BadRequest,
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.transpose()?
|
|
||||||
.map_or(false, |s| s.to_lowercase() == "true"))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct SummarizedTaskView {
|
pub struct SummarizedTaskView {
|
||||||
@@ -303,7 +250,7 @@ async fn get_stats(
|
|||||||
|
|
||||||
let stats = create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), filters)?;
|
let stats = create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), filters)?;
|
||||||
|
|
||||||
debug!(returns = ?stats, "Get stats");
|
debug!("returns: {:?}", stats);
|
||||||
Ok(HttpResponse::Ok().json(stats))
|
Ok(HttpResponse::Ok().json(stats))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -359,18 +306,12 @@ async fn get_version(
|
|||||||
) -> HttpResponse {
|
) -> HttpResponse {
|
||||||
analytics.publish("Version Seen".to_string(), json!(null), Some(&req));
|
analytics.publish("Version Seen".to_string(), json!(null), Some(&req));
|
||||||
|
|
||||||
let build_info = build_info::BuildInfo::from_build();
|
let commit_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown");
|
||||||
|
let commit_date = option_env!("VERGEN_GIT_COMMIT_TIMESTAMP").unwrap_or("unknown");
|
||||||
|
|
||||||
HttpResponse::Ok().json(VersionResponse {
|
HttpResponse::Ok().json(VersionResponse {
|
||||||
commit_sha: build_info.commit_sha1.unwrap_or("unknown").to_string(),
|
commit_sha: commit_sha.to_string(),
|
||||||
commit_date: build_info
|
commit_date: commit_date.to_string(),
|
||||||
.commit_timestamp
|
|
||||||
.and_then(|commit_timestamp| {
|
|
||||||
commit_timestamp
|
|
||||||
.format(&time::format_description::well_known::Iso8601::DEFAULT)
|
|
||||||
.ok()
|
|
||||||
})
|
|
||||||
.unwrap_or("unknown".into()),
|
|
||||||
pkg_version: env!("CARGO_PKG_VERSION").to_string(),
|
pkg_version: env!("CARGO_PKG_VERSION").to_string(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,11 +3,11 @@ use actix_web::web::{self, Data};
|
|||||||
use actix_web::{HttpRequest, HttpResponse};
|
use actix_web::{HttpRequest, HttpResponse};
|
||||||
use deserr::actix_web::AwebJson;
|
use deserr::actix_web::AwebJson;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
|
use log::debug;
|
||||||
use meilisearch_types::deserr::DeserrJsonError;
|
use meilisearch_types::deserr::DeserrJsonError;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::keys::actions;
|
use meilisearch_types::keys::actions;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use tracing::debug;
|
|
||||||
|
|
||||||
use crate::analytics::{Analytics, MultiSearchAggregator};
|
use crate::analytics::{Analytics, MultiSearchAggregator};
|
||||||
use crate::extractors::authentication::policies::ActionPolicy;
|
use crate::extractors::authentication::policies::ActionPolicy;
|
||||||
@@ -52,7 +52,7 @@ pub async fn multi_search_with_post(
|
|||||||
for (query_index, (index_uid, mut query)) in
|
for (query_index, (index_uid, mut query)) in
|
||||||
queries.into_iter().map(SearchQueryWithIndex::into_index_query).enumerate()
|
queries.into_iter().map(SearchQueryWithIndex::into_index_query).enumerate()
|
||||||
{
|
{
|
||||||
debug!(on_index = query_index, parameters = ?query, "Multi-search");
|
debug!("multi-search #{query_index}: called with params: {:?}", query);
|
||||||
|
|
||||||
// Check index from API key
|
// Check index from API key
|
||||||
if !index_scheduler.filters().is_index_authorized(&index_uid) {
|
if !index_scheduler.filters().is_index_authorized(&index_uid) {
|
||||||
@@ -107,7 +107,7 @@ pub async fn multi_search_with_post(
|
|||||||
err
|
err
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
debug!(returns = ?search_results, "Multi-search");
|
debug!("returns: {:?}", search_results);
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(SearchResults { results: search_results }))
|
Ok(HttpResponse::Ok().json(SearchResults { results: search_results }))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,17 +1,16 @@
|
|||||||
use actix_web::web::Data;
|
use actix_web::web::Data;
|
||||||
use actix_web::{web, HttpRequest, HttpResponse};
|
use actix_web::{web, HttpRequest, HttpResponse};
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
|
use log::debug;
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::tasks::KindWithContent;
|
use meilisearch_types::tasks::KindWithContent;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use tracing::debug;
|
|
||||||
|
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
use crate::extractors::authentication::GuardedData;
|
use crate::extractors::authentication::GuardedData;
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::routes::{get_task_id, is_dry_run, SummarizedTaskView};
|
use crate::routes::SummarizedTaskView;
|
||||||
use crate::Opt;
|
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_snapshot))));
|
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_snapshot))));
|
||||||
@@ -20,19 +19,14 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
pub async fn create_snapshot(
|
pub async fn create_snapshot(
|
||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SNAPSHOTS_CREATE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::SNAPSHOTS_CREATE }>, Data<IndexScheduler>>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
opt: web::Data<Opt>,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
analytics.publish("Snapshot Created".to_string(), json!({}), Some(&req));
|
analytics.publish("Snapshot Created".to_string(), json!({}), Some(&req));
|
||||||
|
|
||||||
let task = KindWithContent::SnapshotCreation;
|
let task = KindWithContent::SnapshotCreation;
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
|
|
||||||
debug!(returns = ?task, "Create snapshot");
|
debug!("returns: {:?}", task);
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,13 +10,12 @@ use meilisearch_types::index_uid::IndexUid;
|
|||||||
use meilisearch_types::tasks::{IndexSwap, KindWithContent};
|
use meilisearch_types::tasks::{IndexSwap, KindWithContent};
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
|
|
||||||
use super::{get_task_id, is_dry_run, SummarizedTaskView};
|
use super::SummarizedTaskView;
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::error::MeilisearchHttpError;
|
use crate::error::MeilisearchHttpError;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::Opt;
|
|
||||||
|
|
||||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(swap_indexes))));
|
cfg.service(web::resource("").route(web::post().to(SeqHandler(swap_indexes))));
|
||||||
@@ -33,7 +32,6 @@ pub async fn swap_indexes(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>,
|
||||||
params: AwebJson<Vec<SwapIndexesPayload>, DeserrJsonError>,
|
params: AwebJson<Vec<SwapIndexesPayload>, DeserrJsonError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
opt: web::Data<Opt>,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let params = params.into_inner();
|
let params = params.into_inner();
|
||||||
@@ -62,11 +60,7 @@ pub async fn swap_indexes(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let task = KindWithContent::IndexSwap { swaps };
|
let task = KindWithContent::IndexSwap { swaps };
|
||||||
let uid = get_task_id(&req, &opt)?;
|
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task: SummarizedTaskView =
|
let task: SummarizedTaskView =
|
||||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid, dry_run))
|
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||||
.await??
|
|
||||||
.into();
|
|
||||||
Ok(HttpResponse::Accepted().json(task))
|
Ok(HttpResponse::Accepted().json(task))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,9 +8,11 @@ use meilisearch_types::deserr::DeserrQueryParamError;
|
|||||||
use meilisearch_types::error::deserr_codes::*;
|
use meilisearch_types::error::deserr_codes::*;
|
||||||
use meilisearch_types::error::{InvalidTaskDateError, ResponseError};
|
use meilisearch_types::error::{InvalidTaskDateError, ResponseError};
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid::IndexUid;
|
||||||
|
use meilisearch_types::settings::{Settings, Unchecked};
|
||||||
use meilisearch_types::star_or::{OptionStarOr, OptionStarOrList};
|
use meilisearch_types::star_or::{OptionStarOr, OptionStarOrList};
|
||||||
use meilisearch_types::task_view::TaskView;
|
use meilisearch_types::tasks::{
|
||||||
use meilisearch_types::tasks::{Kind, KindWithContent, Status};
|
serialize_duration, Details, IndexSwap, Kind, KindWithContent, Status, Task,
|
||||||
|
};
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
@@ -18,12 +20,11 @@ use time::macros::format_description;
|
|||||||
use time::{Date, Duration, OffsetDateTime, Time};
|
use time::{Date, Duration, OffsetDateTime, Time};
|
||||||
use tokio::task;
|
use tokio::task;
|
||||||
|
|
||||||
use super::{get_task_id, is_dry_run, SummarizedTaskView};
|
use super::SummarizedTaskView;
|
||||||
use crate::analytics::Analytics;
|
use crate::analytics::Analytics;
|
||||||
use crate::extractors::authentication::policies::*;
|
use crate::extractors::authentication::policies::*;
|
||||||
use crate::extractors::authentication::GuardedData;
|
use crate::extractors::authentication::GuardedData;
|
||||||
use crate::extractors::sequential_extractor::SeqHandler;
|
use crate::extractors::sequential_extractor::SeqHandler;
|
||||||
use crate::Opt;
|
|
||||||
|
|
||||||
const DEFAULT_LIMIT: u32 = 20;
|
const DEFAULT_LIMIT: u32 = 20;
|
||||||
|
|
||||||
@@ -36,6 +37,140 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
|||||||
.service(web::resource("/cancel").route(web::post().to(SeqHandler(cancel_tasks))))
|
.service(web::resource("/cancel").route(web::post().to(SeqHandler(cancel_tasks))))
|
||||||
.service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task))));
|
.service(web::resource("/{task_id}").route(web::get().to(SeqHandler(get_task))));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct TaskView {
|
||||||
|
pub uid: TaskId,
|
||||||
|
#[serde(default)]
|
||||||
|
pub index_uid: Option<String>,
|
||||||
|
pub status: Status,
|
||||||
|
#[serde(rename = "type")]
|
||||||
|
pub kind: Kind,
|
||||||
|
pub canceled_by: Option<TaskId>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub details: Option<DetailsView>,
|
||||||
|
pub error: Option<ResponseError>,
|
||||||
|
#[serde(serialize_with = "serialize_duration", default)]
|
||||||
|
pub duration: Option<Duration>,
|
||||||
|
#[serde(with = "time::serde::rfc3339")]
|
||||||
|
pub enqueued_at: OffsetDateTime,
|
||||||
|
#[serde(with = "time::serde::rfc3339::option", default)]
|
||||||
|
pub started_at: Option<OffsetDateTime>,
|
||||||
|
#[serde(with = "time::serde::rfc3339::option", default)]
|
||||||
|
pub finished_at: Option<OffsetDateTime>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TaskView {
|
||||||
|
pub fn from_task(task: &Task) -> TaskView {
|
||||||
|
TaskView {
|
||||||
|
uid: task.uid,
|
||||||
|
index_uid: task.index_uid().map(ToOwned::to_owned),
|
||||||
|
status: task.status,
|
||||||
|
kind: task.kind.as_kind(),
|
||||||
|
canceled_by: task.canceled_by,
|
||||||
|
details: task.details.clone().map(DetailsView::from),
|
||||||
|
error: task.error.clone(),
|
||||||
|
duration: task.started_at.zip(task.finished_at).map(|(start, end)| end - start),
|
||||||
|
enqueued_at: task.enqueued_at,
|
||||||
|
started_at: task.started_at,
|
||||||
|
finished_at: task.finished_at,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Debug, PartialEq, Eq, Clone, Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct DetailsView {
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub received_documents: Option<u64>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub indexed_documents: Option<Option<u64>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub primary_key: Option<Option<String>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub provided_ids: Option<usize>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub deleted_documents: Option<Option<u64>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub matched_tasks: Option<u64>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub canceled_tasks: Option<Option<u64>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub deleted_tasks: Option<Option<u64>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub original_filter: Option<Option<String>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub dump_uid: Option<Option<String>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
#[serde(flatten)]
|
||||||
|
pub settings: Option<Box<Settings<Unchecked>>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub swaps: Option<Vec<IndexSwap>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Details> for DetailsView {
|
||||||
|
fn from(details: Details) -> Self {
|
||||||
|
match details {
|
||||||
|
Details::DocumentAdditionOrUpdate { received_documents, indexed_documents } => {
|
||||||
|
DetailsView {
|
||||||
|
received_documents: Some(received_documents),
|
||||||
|
indexed_documents: Some(indexed_documents),
|
||||||
|
..DetailsView::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Details::SettingsUpdate { settings } => {
|
||||||
|
DetailsView { settings: Some(settings), ..DetailsView::default() }
|
||||||
|
}
|
||||||
|
Details::IndexInfo { primary_key } => {
|
||||||
|
DetailsView { primary_key: Some(primary_key), ..DetailsView::default() }
|
||||||
|
}
|
||||||
|
Details::DocumentDeletion {
|
||||||
|
provided_ids: received_document_ids,
|
||||||
|
deleted_documents,
|
||||||
|
} => DetailsView {
|
||||||
|
provided_ids: Some(received_document_ids),
|
||||||
|
deleted_documents: Some(deleted_documents),
|
||||||
|
original_filter: Some(None),
|
||||||
|
..DetailsView::default()
|
||||||
|
},
|
||||||
|
Details::DocumentDeletionByFilter { original_filter, deleted_documents } => {
|
||||||
|
DetailsView {
|
||||||
|
provided_ids: Some(0),
|
||||||
|
original_filter: Some(Some(original_filter)),
|
||||||
|
deleted_documents: Some(deleted_documents),
|
||||||
|
..DetailsView::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Details::ClearAll { deleted_documents } => {
|
||||||
|
DetailsView { deleted_documents: Some(deleted_documents), ..DetailsView::default() }
|
||||||
|
}
|
||||||
|
Details::TaskCancelation { matched_tasks, canceled_tasks, original_filter } => {
|
||||||
|
DetailsView {
|
||||||
|
matched_tasks: Some(matched_tasks),
|
||||||
|
canceled_tasks: Some(canceled_tasks),
|
||||||
|
original_filter: Some(Some(original_filter)),
|
||||||
|
..DetailsView::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Details::TaskDeletion { matched_tasks, deleted_tasks, original_filter } => {
|
||||||
|
DetailsView {
|
||||||
|
matched_tasks: Some(matched_tasks),
|
||||||
|
deleted_tasks: Some(deleted_tasks),
|
||||||
|
original_filter: Some(Some(original_filter)),
|
||||||
|
..DetailsView::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Details::Dump { dump_uid } => {
|
||||||
|
DetailsView { dump_uid: Some(dump_uid), ..DetailsView::default() }
|
||||||
|
}
|
||||||
|
Details::IndexSwap { swaps } => {
|
||||||
|
DetailsView { swaps: Some(swaps), ..Default::default() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserr)]
|
#[derive(Debug, Deserr)]
|
||||||
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||||
pub struct TasksFilterQuery {
|
pub struct TasksFilterQuery {
|
||||||
@@ -162,7 +297,6 @@ async fn cancel_tasks(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_CANCEL }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_CANCEL }>, Data<IndexScheduler>>,
|
||||||
params: AwebQueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
|
params: AwebQueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
opt: web::Data<Opt>,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let params = params.into_inner();
|
let params = params.into_inner();
|
||||||
@@ -199,11 +333,7 @@ async fn cancel_tasks(
|
|||||||
let task_cancelation =
|
let task_cancelation =
|
||||||
KindWithContent::TaskCancelation { query: format!("?{}", req.query_string()), tasks };
|
KindWithContent::TaskCancelation { query: format!("?{}", req.query_string()), tasks };
|
||||||
|
|
||||||
let uid = get_task_id(&req, &opt)?;
|
let task = task::spawn_blocking(move || index_scheduler.register(task_cancelation)).await??;
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task =
|
|
||||||
task::spawn_blocking(move || index_scheduler.register(task_cancelation, uid, dry_run))
|
|
||||||
.await??;
|
|
||||||
let task: SummarizedTaskView = task.into();
|
let task: SummarizedTaskView = task.into();
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(task))
|
Ok(HttpResponse::Ok().json(task))
|
||||||
@@ -213,7 +343,6 @@ async fn delete_tasks(
|
|||||||
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_DELETE }>, Data<IndexScheduler>>,
|
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_DELETE }>, Data<IndexScheduler>>,
|
||||||
params: AwebQueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
|
params: AwebQueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
|
||||||
req: HttpRequest,
|
req: HttpRequest,
|
||||||
opt: web::Data<Opt>,
|
|
||||||
analytics: web::Data<dyn Analytics>,
|
analytics: web::Data<dyn Analytics>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let params = params.into_inner();
|
let params = params.into_inner();
|
||||||
@@ -249,10 +378,7 @@ async fn delete_tasks(
|
|||||||
let task_deletion =
|
let task_deletion =
|
||||||
KindWithContent::TaskDeletion { query: format!("?{}", req.query_string()), tasks };
|
KindWithContent::TaskDeletion { query: format!("?{}", req.query_string()), tasks };
|
||||||
|
|
||||||
let uid = get_task_id(&req, &opt)?;
|
let task = task::spawn_blocking(move || index_scheduler.register(task_deletion)).await??;
|
||||||
let dry_run = is_dry_run(&req, &opt)?;
|
|
||||||
let task = task::spawn_blocking(move || index_scheduler.register(task_deletion, uid, dry_run))
|
|
||||||
.await??;
|
|
||||||
let task: SummarizedTaskView = task.into();
|
let task: SummarizedTaskView = task.into();
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(task))
|
Ok(HttpResponse::Ok().json(task))
|
||||||
|
|||||||
@@ -441,6 +441,10 @@ fn prepare_search<'t>(
|
|||||||
ScoringStrategy::Skip
|
ScoringStrategy::Skip
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if query.show_ranking_score_details {
|
||||||
|
features.check_score_details()?;
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(HybridQuery { embedder: Some(embedder), .. }) = &query.hybrid {
|
if let Some(HybridQuery { embedder: Some(embedder), .. }) = &query.hybrid {
|
||||||
search.embedder_name(embedder);
|
search.embedder_name(embedder);
|
||||||
}
|
}
|
||||||
@@ -731,9 +735,6 @@ pub fn perform_facet_search(
|
|||||||
if let Some(facet_query) = &facet_query {
|
if let Some(facet_query) = &facet_query {
|
||||||
facet_search.query(facet_query);
|
facet_search.query(facet_query);
|
||||||
}
|
}
|
||||||
if let Some(max_facets) = index.max_values_per_facet(&rtxn)? {
|
|
||||||
facet_search.max_values(max_facets as usize);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(FacetSearchResult {
|
Ok(FacetSearchResult {
|
||||||
facet_hits: facet_search.execute()?,
|
facet_hits: facet_search.execute()?,
|
||||||
@@ -896,14 +897,6 @@ fn format_fields<'a>(
|
|||||||
let mut matches_position = compute_matches.then(BTreeMap::new);
|
let mut matches_position = compute_matches.then(BTreeMap::new);
|
||||||
let mut document = document.clone();
|
let mut document = document.clone();
|
||||||
|
|
||||||
// reduce the formatted option list to the attributes that should be formatted,
|
|
||||||
// instead of all the attributes to display.
|
|
||||||
let formatting_fields_options: Vec<_> = formatted_options
|
|
||||||
.iter()
|
|
||||||
.filter(|(_, option)| option.should_format())
|
|
||||||
.map(|(fid, option)| (field_ids_map.name(*fid).unwrap(), option))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
// select the attributes to retrieve
|
// select the attributes to retrieve
|
||||||
let displayable_names =
|
let displayable_names =
|
||||||
displayable_ids.iter().map(|&fid| field_ids_map.name(fid).expect("Missing field name"));
|
displayable_ids.iter().map(|&fid| field_ids_map.name(fid).expect("Missing field name"));
|
||||||
@@ -912,15 +905,13 @@ fn format_fields<'a>(
|
|||||||
// to the value and merge them together. eg. If a user said he wanted to highlight `doggo`
|
// to the value and merge them together. eg. If a user said he wanted to highlight `doggo`
|
||||||
// and crop `doggo.name`. `doggo.name` needs to be highlighted + cropped while `doggo.age` is only
|
// and crop `doggo.name`. `doggo.name` needs to be highlighted + cropped while `doggo.age` is only
|
||||||
// highlighted.
|
// highlighted.
|
||||||
// Warn: The time to compute the format list scales with the number of fields to format;
|
let format = formatted_options
|
||||||
// cumulated with map_leaf_values that iterates over all the nested fields, it gives a quadratic complexity:
|
|
||||||
// d*f where d is the total number of fields to display and f is the total number of fields to format.
|
|
||||||
let format = formatting_fields_options
|
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|(name, _option)| {
|
.filter(|(field, _option)| {
|
||||||
|
let name = field_ids_map.name(**field).unwrap();
|
||||||
milli::is_faceted_by(name, key) || milli::is_faceted_by(key, name)
|
milli::is_faceted_by(name, key) || milli::is_faceted_by(key, name)
|
||||||
})
|
})
|
||||||
.map(|(_, option)| **option)
|
.map(|(_, option)| *option)
|
||||||
.reduce(|acc, option| acc.merge(option));
|
.reduce(|acc, option| acc.merge(option));
|
||||||
let mut infos = Vec::new();
|
let mut infos = Vec::new();
|
||||||
|
|
||||||
@@ -1017,7 +1008,7 @@ fn format_value<'a>(
|
|||||||
let value = matcher.format(format_options);
|
let value = matcher.format(format_options);
|
||||||
Value::String(value.into_owned())
|
Value::String(value.into_owned())
|
||||||
}
|
}
|
||||||
None => Value::String(s),
|
None => Value::Number(number),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
value => value,
|
value => value,
|
||||||
|
|||||||
@@ -59,8 +59,6 @@ pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'
|
|||||||
("POST", "/snapshots") => hashset!{"snapshots.create", "snapshots.*", "*"},
|
("POST", "/snapshots") => hashset!{"snapshots.create", "snapshots.*", "*"},
|
||||||
("GET", "/version") => hashset!{"version", "*"},
|
("GET", "/version") => hashset!{"version", "*"},
|
||||||
("GET", "/metrics") => hashset!{"metrics.get", "metrics.*", "*"},
|
("GET", "/metrics") => hashset!{"metrics.get", "metrics.*", "*"},
|
||||||
("POST", "/logs/stream") => hashset!{"metrics.get", "metrics.*", "*"},
|
|
||||||
("DELETE", "/logs/stream") => hashset!{"metrics.get", "metrics.*", "*"},
|
|
||||||
("PATCH", "/keys/mykey/") => hashset!{"keys.update", "*"},
|
("PATCH", "/keys/mykey/") => hashset!{"keys.update", "*"},
|
||||||
("GET", "/keys/mykey/") => hashset!{"keys.get", "*"},
|
("GET", "/keys/mykey/") => hashset!{"keys.get", "*"},
|
||||||
("DELETE", "/keys/mykey/") => hashset!{"keys.delete", "*"},
|
("DELETE", "/keys/mykey/") => hashset!{"keys.delete", "*"},
|
||||||
|
|||||||
@@ -100,11 +100,16 @@ impl Index<'_> {
|
|||||||
pub async fn raw_add_documents(
|
pub async fn raw_add_documents(
|
||||||
&self,
|
&self,
|
||||||
payload: &str,
|
payload: &str,
|
||||||
headers: Vec<(&str, &str)>,
|
content_type: Option<&str>,
|
||||||
query_parameter: &str,
|
query_parameter: &str,
|
||||||
) -> (Value, StatusCode) {
|
) -> (Value, StatusCode) {
|
||||||
let url = format!("/indexes/{}/documents{}", urlencode(self.uid.as_ref()), query_parameter);
|
let url = format!("/indexes/{}/documents{}", urlencode(self.uid.as_ref()), query_parameter);
|
||||||
self.service.post_str(url, payload, headers).await
|
|
||||||
|
if let Some(content_type) = content_type {
|
||||||
|
self.service.post_str(url, payload, vec![("Content-Type", content_type)]).await
|
||||||
|
} else {
|
||||||
|
self.service.post_str(url, payload, Vec::new()).await
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update_documents(
|
pub async fn update_documents(
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ impl Display for Value {
|
|||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"{}",
|
"{}",
|
||||||
json_string!(self, { ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" })
|
json_string!(self, { ".enqueuedAt" => "[date]", ".processedAt" => "[date]", ".finishedAt" => "[date]", ".duration" => "[duration]" })
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,12 +9,10 @@ use actix_web::http::StatusCode;
|
|||||||
use byte_unit::{Byte, ByteUnit};
|
use byte_unit::{Byte, ByteUnit};
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use meilisearch::option::{IndexerOpts, MaxMemory, Opt};
|
use meilisearch::option::{IndexerOpts, MaxMemory, Opt};
|
||||||
use meilisearch::{analytics, create_app, setup_meilisearch, SubscriberForSecondLayer};
|
use meilisearch::{analytics, create_app, setup_meilisearch};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
use tokio::time::sleep;
|
use tokio::time::sleep;
|
||||||
use tracing::level_filters::LevelFilter;
|
|
||||||
use tracing_subscriber::Layer;
|
|
||||||
|
|
||||||
use super::index::Index;
|
use super::index::Index;
|
||||||
use super::service::Service;
|
use super::service::Service;
|
||||||
@@ -83,24 +81,10 @@ impl Server {
|
|||||||
Response = ServiceResponse<impl MessageBody>,
|
Response = ServiceResponse<impl MessageBody>,
|
||||||
Error = actix_web::Error,
|
Error = actix_web::Error,
|
||||||
> {
|
> {
|
||||||
let (_route_layer, route_layer_handle) =
|
|
||||||
tracing_subscriber::reload::Layer::new(None.with_filter(
|
|
||||||
tracing_subscriber::filter::Targets::new().with_target("", LevelFilter::OFF),
|
|
||||||
));
|
|
||||||
let (_stderr_layer, stderr_layer_handle) = tracing_subscriber::reload::Layer::new(
|
|
||||||
(Box::new(
|
|
||||||
tracing_subscriber::fmt::layer()
|
|
||||||
.with_span_events(tracing_subscriber::fmt::format::FmtSpan::CLOSE),
|
|
||||||
)
|
|
||||||
as Box<dyn tracing_subscriber::Layer<SubscriberForSecondLayer> + Send + Sync>)
|
|
||||||
.with_filter(tracing_subscriber::filter::Targets::new()),
|
|
||||||
);
|
|
||||||
|
|
||||||
actix_web::test::init_service(create_app(
|
actix_web::test::init_service(create_app(
|
||||||
self.service.index_scheduler.clone().into(),
|
self.service.index_scheduler.clone().into(),
|
||||||
self.service.auth.clone().into(),
|
self.service.auth.clone().into(),
|
||||||
self.service.options.clone(),
|
self.service.options.clone(),
|
||||||
(route_layer_handle, stderr_layer_handle),
|
|
||||||
analytics::MockAnalytics::new(&self.service.options),
|
analytics::MockAnalytics::new(&self.service.options),
|
||||||
true,
|
true,
|
||||||
))
|
))
|
||||||
|
|||||||
@@ -5,10 +5,8 @@ use actix_web::http::StatusCode;
|
|||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use actix_web::test::TestRequest;
|
use actix_web::test::TestRequest;
|
||||||
use index_scheduler::IndexScheduler;
|
use index_scheduler::IndexScheduler;
|
||||||
use meilisearch::{analytics, create_app, Opt, SubscriberForSecondLayer};
|
use meilisearch::{analytics, create_app, Opt};
|
||||||
use meilisearch_auth::AuthController;
|
use meilisearch_auth::AuthController;
|
||||||
use tracing::level_filters::LevelFilter;
|
|
||||||
use tracing_subscriber::Layer;
|
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::Value;
|
use crate::common::Value;
|
||||||
@@ -107,24 +105,10 @@ impl Service {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn request(&self, mut req: test::TestRequest) -> (Value, StatusCode) {
|
pub async fn request(&self, mut req: test::TestRequest) -> (Value, StatusCode) {
|
||||||
let (_route_layer, route_layer_handle) =
|
|
||||||
tracing_subscriber::reload::Layer::new(None.with_filter(
|
|
||||||
tracing_subscriber::filter::Targets::new().with_target("", LevelFilter::OFF),
|
|
||||||
));
|
|
||||||
let (_stderr_layer, stderr_layer_handle) = tracing_subscriber::reload::Layer::new(
|
|
||||||
(Box::new(
|
|
||||||
tracing_subscriber::fmt::layer()
|
|
||||||
.with_span_events(tracing_subscriber::fmt::format::FmtSpan::CLOSE),
|
|
||||||
)
|
|
||||||
as Box<dyn tracing_subscriber::Layer<SubscriberForSecondLayer> + Send + Sync>)
|
|
||||||
.with_filter(tracing_subscriber::filter::Targets::new()),
|
|
||||||
);
|
|
||||||
|
|
||||||
let app = test::init_service(create_app(
|
let app = test::init_service(create_app(
|
||||||
self.index_scheduler.clone().into(),
|
self.index_scheduler.clone().into(),
|
||||||
self.auth.clone().into(),
|
self.auth.clone().into(),
|
||||||
self.options.clone(),
|
self.options.clone(),
|
||||||
(route_layer_handle, stderr_layer_handle),
|
|
||||||
analytics::MockAnalytics::new(&self.options),
|
analytics::MockAnalytics::new(&self.options),
|
||||||
true,
|
true,
|
||||||
))
|
))
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use meilisearch::Opt;
|
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::{default_settings, GetAllDocumentsOptions, Server, Value};
|
use crate::common::{GetAllDocumentsOptions, Server, Value};
|
||||||
use crate::json;
|
use crate::json;
|
||||||
|
|
||||||
/// This is the basic usage of our API and every other tests uses the content-type application/json
|
/// This is the basic usage of our API and every other tests uses the content-type application/json
|
||||||
@@ -1761,181 +1760,6 @@ async fn add_documents_invalid_geo_field() {
|
|||||||
"finishedAt": "[date]"
|
"finishedAt": "[date]"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
// The three next tests are related to #4333
|
|
||||||
|
|
||||||
// _geo has a lat and lng but set to `null`
|
|
||||||
let documents = json!([
|
|
||||||
{
|
|
||||||
"id": "12",
|
|
||||||
"_geo": { "lng": null, "lat": 67}
|
|
||||||
}
|
|
||||||
]);
|
|
||||||
|
|
||||||
let (response, code) = index.add_documents(documents, None).await;
|
|
||||||
snapshot!(code, @"202 Accepted");
|
|
||||||
let response = index.wait_task(response.uid()).await;
|
|
||||||
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
|
||||||
@r###"
|
|
||||||
{
|
|
||||||
"uid": 14,
|
|
||||||
"indexUid": "test",
|
|
||||||
"status": "failed",
|
|
||||||
"type": "documentAdditionOrUpdate",
|
|
||||||
"canceledBy": null,
|
|
||||||
"details": {
|
|
||||||
"receivedDocuments": 1,
|
|
||||||
"indexedDocuments": 0
|
|
||||||
},
|
|
||||||
"error": {
|
|
||||||
"message": "Could not parse longitude in the document with the id: `12`. Was expecting a finite number but instead got `null`.",
|
|
||||||
"code": "invalid_document_geo_field",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_document_geo_field"
|
|
||||||
},
|
|
||||||
"duration": "[duration]",
|
|
||||||
"enqueuedAt": "[date]",
|
|
||||||
"startedAt": "[date]",
|
|
||||||
"finishedAt": "[date]"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
// _geo has a lat and lng but set to `null`
|
|
||||||
let documents = json!([
|
|
||||||
{
|
|
||||||
"id": "12",
|
|
||||||
"_geo": { "lng": 35, "lat": null }
|
|
||||||
}
|
|
||||||
]);
|
|
||||||
|
|
||||||
let (response, code) = index.add_documents(documents, None).await;
|
|
||||||
snapshot!(code, @"202 Accepted");
|
|
||||||
let response = index.wait_task(response.uid()).await;
|
|
||||||
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
|
||||||
@r###"
|
|
||||||
{
|
|
||||||
"uid": 15,
|
|
||||||
"indexUid": "test",
|
|
||||||
"status": "failed",
|
|
||||||
"type": "documentAdditionOrUpdate",
|
|
||||||
"canceledBy": null,
|
|
||||||
"details": {
|
|
||||||
"receivedDocuments": 1,
|
|
||||||
"indexedDocuments": 0
|
|
||||||
},
|
|
||||||
"error": {
|
|
||||||
"message": "Could not parse latitude in the document with the id: `12`. Was expecting a finite number but instead got `null`.",
|
|
||||||
"code": "invalid_document_geo_field",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_document_geo_field"
|
|
||||||
},
|
|
||||||
"duration": "[duration]",
|
|
||||||
"enqueuedAt": "[date]",
|
|
||||||
"startedAt": "[date]",
|
|
||||||
"finishedAt": "[date]"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
// _geo has a lat and lng but set to `null`
|
|
||||||
let documents = json!([
|
|
||||||
{
|
|
||||||
"id": "13",
|
|
||||||
"_geo": { "lng": null, "lat": null }
|
|
||||||
}
|
|
||||||
]);
|
|
||||||
|
|
||||||
let (response, code) = index.add_documents(documents, None).await;
|
|
||||||
snapshot!(code, @"202 Accepted");
|
|
||||||
let response = index.wait_task(response.uid()).await;
|
|
||||||
snapshot!(json_string!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
|
||||||
@r###"
|
|
||||||
{
|
|
||||||
"uid": 16,
|
|
||||||
"indexUid": "test",
|
|
||||||
"status": "failed",
|
|
||||||
"type": "documentAdditionOrUpdate",
|
|
||||||
"canceledBy": null,
|
|
||||||
"details": {
|
|
||||||
"receivedDocuments": 1,
|
|
||||||
"indexedDocuments": 0
|
|
||||||
},
|
|
||||||
"error": {
|
|
||||||
"message": "Could not parse latitude nor longitude in the document with the id: `13`. Was expecting finite numbers but instead got `null` and `null`.",
|
|
||||||
"code": "invalid_document_geo_field",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_document_geo_field"
|
|
||||||
},
|
|
||||||
"duration": "[duration]",
|
|
||||||
"enqueuedAt": "[date]",
|
|
||||||
"startedAt": "[date]",
|
|
||||||
"finishedAt": "[date]"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Related to #4333
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn add_invalid_geo_and_then_settings() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
let index = server.index("test");
|
|
||||||
index.create(Some("id")).await;
|
|
||||||
|
|
||||||
// _geo is not an object
|
|
||||||
let documents = json!([
|
|
||||||
{
|
|
||||||
"id": "11",
|
|
||||||
"_geo": { "lat": null, "lng": null },
|
|
||||||
}
|
|
||||||
]);
|
|
||||||
let (ret, code) = index.add_documents(documents, None).await;
|
|
||||||
snapshot!(code, @"202 Accepted");
|
|
||||||
let ret = index.wait_task(ret.uid()).await;
|
|
||||||
snapshot!(ret, @r###"
|
|
||||||
{
|
|
||||||
"uid": 1,
|
|
||||||
"indexUid": "test",
|
|
||||||
"status": "succeeded",
|
|
||||||
"type": "documentAdditionOrUpdate",
|
|
||||||
"canceledBy": null,
|
|
||||||
"details": {
|
|
||||||
"receivedDocuments": 1,
|
|
||||||
"indexedDocuments": 1
|
|
||||||
},
|
|
||||||
"error": null,
|
|
||||||
"duration": "[duration]",
|
|
||||||
"enqueuedAt": "[date]",
|
|
||||||
"startedAt": "[date]",
|
|
||||||
"finishedAt": "[date]"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
let (ret, code) = index.update_settings(json!({"sortableAttributes": ["_geo"]})).await;
|
|
||||||
snapshot!(code, @"202 Accepted");
|
|
||||||
let ret = index.wait_task(ret.uid()).await;
|
|
||||||
snapshot!(ret, @r###"
|
|
||||||
{
|
|
||||||
"uid": 2,
|
|
||||||
"indexUid": "test",
|
|
||||||
"status": "failed",
|
|
||||||
"type": "settingsUpdate",
|
|
||||||
"canceledBy": null,
|
|
||||||
"details": {
|
|
||||||
"sortableAttributes": [
|
|
||||||
"_geo"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"error": {
|
|
||||||
"message": "Could not parse latitude in the document with the id: `\"11\"`. Was expecting a finite number but instead got `null`.",
|
|
||||||
"code": "invalid_document_geo_field",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_document_geo_field"
|
|
||||||
},
|
|
||||||
"duration": "[duration]",
|
|
||||||
"enqueuedAt": "[date]",
|
|
||||||
"startedAt": "[date]",
|
|
||||||
"finishedAt": "[date]"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -2158,49 +1982,3 @@ async fn batch_several_documents_addition() {
|
|||||||
assert_eq!(code, 200, "failed with `{}`", response);
|
assert_eq!(code, 200, "failed with `{}`", response);
|
||||||
assert_eq!(response["results"].as_array().unwrap().len(), 120);
|
assert_eq!(response["results"].as_array().unwrap().len(), 120);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn dry_register_file() {
|
|
||||||
let temp = tempfile::tempdir().unwrap();
|
|
||||||
|
|
||||||
let options =
|
|
||||||
Opt { experimental_replication_parameters: true, ..default_settings(temp.path()) };
|
|
||||||
let server = Server::new_with_options(options).await.unwrap();
|
|
||||||
let index = server.index("tamo");
|
|
||||||
|
|
||||||
let documents = r#"
|
|
||||||
{
|
|
||||||
"id": "12",
|
|
||||||
"doggo": "kefir"
|
|
||||||
}
|
|
||||||
"#;
|
|
||||||
|
|
||||||
let (response, code) = index
|
|
||||||
.raw_add_documents(
|
|
||||||
documents,
|
|
||||||
vec![("Content-Type", "application/json"), ("DryRun", "true")],
|
|
||||||
"",
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"taskUid": 0,
|
|
||||||
"indexUid": "tamo",
|
|
||||||
"status": "enqueued",
|
|
||||||
"type": "documentAdditionOrUpdate",
|
|
||||||
"enqueuedAt": "[date]"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
snapshot!(code, @"202 Accepted");
|
|
||||||
|
|
||||||
let (response, code) = index.get_task(response.uid()).await;
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"message": "Task `0` not found.",
|
|
||||||
"code": "task_not_found",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#task_not_found"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
snapshot!(code, @"404 Not Found");
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -209,8 +209,7 @@ async fn replace_documents_missing_payload() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
let (response, code) =
|
let (response, code) = index.raw_add_documents("", Some("application/json"), "").await;
|
||||||
index.raw_add_documents("", vec![("Content-Type", "application/json")], "").await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -221,8 +220,7 @@ async fn replace_documents_missing_payload() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) =
|
let (response, code) = index.raw_add_documents("", Some("application/x-ndjson"), "").await;
|
||||||
index.raw_add_documents("", vec![("Content-Type", "application/x-ndjson")], "").await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -233,8 +231,7 @@ async fn replace_documents_missing_payload() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) =
|
let (response, code) = index.raw_add_documents("", Some("text/csv"), "").await;
|
||||||
index.raw_add_documents("", vec![("Content-Type", "text/csv")], "").await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -290,7 +287,7 @@ async fn replace_documents_missing_content_type() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
let (response, code) = index.raw_add_documents("", Vec::new(), "").await;
|
let (response, code) = index.raw_add_documents("", None, "").await;
|
||||||
snapshot!(code, @"415 Unsupported Media Type");
|
snapshot!(code, @"415 Unsupported Media Type");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -302,7 +299,7 @@ async fn replace_documents_missing_content_type() {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
// even with a csv delimiter specified this error is triggered first
|
// even with a csv delimiter specified this error is triggered first
|
||||||
let (response, code) = index.raw_add_documents("", Vec::new(), "?csvDelimiter=;").await;
|
let (response, code) = index.raw_add_documents("", None, "?csvDelimiter=;").await;
|
||||||
snapshot!(code, @"415 Unsupported Media Type");
|
snapshot!(code, @"415 Unsupported Media Type");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -348,7 +345,7 @@ async fn replace_documents_bad_content_type() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
let (response, code) = index.raw_add_documents("", vec![("Content-Type", "doggo")], "").await;
|
let (response, code) = index.raw_add_documents("", Some("doggo"), "").await;
|
||||||
snapshot!(code, @"415 Unsupported Media Type");
|
snapshot!(code, @"415 Unsupported Media Type");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -382,9 +379,8 @@ async fn replace_documents_bad_csv_delimiter() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) =
|
||||||
.raw_add_documents("", vec![("Content-Type", "application/json")], "?csvDelimiter")
|
index.raw_add_documents("", Some("application/json"), "?csvDelimiter").await;
|
||||||
.await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -395,9 +391,8 @@ async fn replace_documents_bad_csv_delimiter() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) =
|
||||||
.raw_add_documents("", vec![("Content-Type", "application/json")], "?csvDelimiter=doggo")
|
index.raw_add_documents("", Some("application/json"), "?csvDelimiter=doggo").await;
|
||||||
.await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -409,11 +404,7 @@ async fn replace_documents_bad_csv_delimiter() {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.raw_add_documents(
|
.raw_add_documents("", Some("application/json"), &format!("?csvDelimiter={}", encode("🍰")))
|
||||||
"",
|
|
||||||
vec![("Content-Type", "application/json")],
|
|
||||||
&format!("?csvDelimiter={}", encode("🍰")),
|
|
||||||
)
|
|
||||||
.await;
|
.await;
|
||||||
snapshot!(code, @"400 Bad Request");
|
snapshot!(code, @"400 Bad Request");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
@@ -478,9 +469,8 @@ async fn replace_documents_csv_delimiter_with_bad_content_type() {
|
|||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) =
|
||||||
.raw_add_documents("", vec![("Content-Type", "application/json")], "?csvDelimiter=a")
|
index.raw_add_documents("", Some("application/json"), "?csvDelimiter=a").await;
|
||||||
.await;
|
|
||||||
snapshot!(code, @"415 Unsupported Media Type");
|
snapshot!(code, @"415 Unsupported Media Type");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
@@ -491,9 +481,8 @@ async fn replace_documents_csv_delimiter_with_bad_content_type() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) =
|
||||||
.raw_add_documents("", vec![("Content-Type", "application/x-ndjson")], "?csvDelimiter=a")
|
index.raw_add_documents("", Some("application/x-ndjson"), "?csvDelimiter=a").await;
|
||||||
.await;
|
|
||||||
snapshot!(code, @"415 Unsupported Media Type");
|
snapshot!(code, @"415 Unsupported Media Type");
|
||||||
snapshot!(json_string!(response), @r###"
|
snapshot!(json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::snapshot;
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::{GetAllDocumentsOptions, Server};
|
use crate::common::{GetAllDocumentsOptions, Server};
|
||||||
@@ -209,93 +209,3 @@ async fn error_update_documents_missing_document_id() {
|
|||||||
"https://docs.meilisearch.com/errors#missing_document_id"
|
"https://docs.meilisearch.com/errors#missing_document_id"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn update_faceted_document() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
let index = server.index("test");
|
|
||||||
|
|
||||||
let (response, code) = index
|
|
||||||
.update_settings(json!({
|
|
||||||
"rankingRules": ["facet:asc"],
|
|
||||||
}))
|
|
||||||
.await;
|
|
||||||
assert_eq!("202", code.as_str(), "{:?}", response);
|
|
||||||
index.wait_task(0).await;
|
|
||||||
|
|
||||||
let documents: Vec<_> = (0..1000)
|
|
||||||
.map(|id| {
|
|
||||||
json!({
|
|
||||||
"doc_id": id,
|
|
||||||
"facet": (id/3),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let (_response, code) = index.add_documents(documents.into(), None).await;
|
|
||||||
assert_eq!(code, 202);
|
|
||||||
|
|
||||||
index.wait_task(1).await;
|
|
||||||
|
|
||||||
let documents = json!([
|
|
||||||
{
|
|
||||||
"doc_id": 9,
|
|
||||||
"facet": 1.5,
|
|
||||||
}
|
|
||||||
]);
|
|
||||||
|
|
||||||
let (response, code) = index.update_documents(documents, None).await;
|
|
||||||
assert_eq!(code, 202, "response: {}", response);
|
|
||||||
|
|
||||||
index.wait_task(2).await;
|
|
||||||
|
|
||||||
index
|
|
||||||
.search(json!({"limit": 10}), |response, code| {
|
|
||||||
snapshot!(code, @"200 OK");
|
|
||||||
snapshot!(json_string!(response["hits"]), @r###"
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"doc_id": 0,
|
|
||||||
"facet": 0
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"doc_id": 1,
|
|
||||||
"facet": 0
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"doc_id": 2,
|
|
||||||
"facet": 0
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"doc_id": 3,
|
|
||||||
"facet": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"doc_id": 4,
|
|
||||||
"facet": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"doc_id": 5,
|
|
||||||
"facet": 1
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"doc_id": 9,
|
|
||||||
"facet": 1.5
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"doc_id": 6,
|
|
||||||
"facet": 2
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"doc_id": 7,
|
|
||||||
"facet": 2
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"doc_id": 8,
|
|
||||||
"facet": 2
|
|
||||||
}
|
|
||||||
]
|
|
||||||
"###);
|
|
||||||
})
|
|
||||||
.await;
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ async fn import_dump_v1_movie_raw() {
|
|||||||
"dictionary": [],
|
"dictionary": [],
|
||||||
"synonyms": {},
|
"synonyms": {},
|
||||||
"distinctAttribute": null,
|
"distinctAttribute": null,
|
||||||
"proximityPrecision": "byWord",
|
"proximityPrecision": null,
|
||||||
"typoTolerance": {
|
"typoTolerance": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"minWordSizeForTypos": {
|
"minWordSizeForTypos": {
|
||||||
@@ -77,7 +77,8 @@ async fn import_dump_v1_movie_raw() {
|
|||||||
},
|
},
|
||||||
"pagination": {
|
"pagination": {
|
||||||
"maxTotalHits": 1000
|
"maxTotalHits": 1000
|
||||||
}
|
},
|
||||||
|
"embedders": {}
|
||||||
}
|
}
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
@@ -220,7 +221,7 @@ async fn import_dump_v1_movie_with_settings() {
|
|||||||
"dictionary": [],
|
"dictionary": [],
|
||||||
"synonyms": {},
|
"synonyms": {},
|
||||||
"distinctAttribute": null,
|
"distinctAttribute": null,
|
||||||
"proximityPrecision": "byWord",
|
"proximityPrecision": null,
|
||||||
"typoTolerance": {
|
"typoTolerance": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"minWordSizeForTypos": {
|
"minWordSizeForTypos": {
|
||||||
@@ -238,7 +239,8 @@ async fn import_dump_v1_movie_with_settings() {
|
|||||||
},
|
},
|
||||||
"pagination": {
|
"pagination": {
|
||||||
"maxTotalHits": 1000
|
"maxTotalHits": 1000
|
||||||
}
|
},
|
||||||
|
"embedders": {}
|
||||||
}
|
}
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
@@ -367,7 +369,7 @@ async fn import_dump_v1_rubygems_with_settings() {
|
|||||||
"dictionary": [],
|
"dictionary": [],
|
||||||
"synonyms": {},
|
"synonyms": {},
|
||||||
"distinctAttribute": null,
|
"distinctAttribute": null,
|
||||||
"proximityPrecision": "byWord",
|
"proximityPrecision": null,
|
||||||
"typoTolerance": {
|
"typoTolerance": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"minWordSizeForTypos": {
|
"minWordSizeForTypos": {
|
||||||
@@ -385,7 +387,8 @@ async fn import_dump_v1_rubygems_with_settings() {
|
|||||||
},
|
},
|
||||||
"pagination": {
|
"pagination": {
|
||||||
"maxTotalHits": 1000
|
"maxTotalHits": 1000
|
||||||
}
|
},
|
||||||
|
"embedders": {}
|
||||||
}
|
}
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
@@ -500,7 +503,7 @@ async fn import_dump_v2_movie_raw() {
|
|||||||
"dictionary": [],
|
"dictionary": [],
|
||||||
"synonyms": {},
|
"synonyms": {},
|
||||||
"distinctAttribute": null,
|
"distinctAttribute": null,
|
||||||
"proximityPrecision": "byWord",
|
"proximityPrecision": null,
|
||||||
"typoTolerance": {
|
"typoTolerance": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"minWordSizeForTypos": {
|
"minWordSizeForTypos": {
|
||||||
@@ -518,7 +521,8 @@ async fn import_dump_v2_movie_raw() {
|
|||||||
},
|
},
|
||||||
"pagination": {
|
"pagination": {
|
||||||
"maxTotalHits": 1000
|
"maxTotalHits": 1000
|
||||||
}
|
},
|
||||||
|
"embedders": {}
|
||||||
}
|
}
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
@@ -645,7 +649,7 @@ async fn import_dump_v2_movie_with_settings() {
|
|||||||
"dictionary": [],
|
"dictionary": [],
|
||||||
"synonyms": {},
|
"synonyms": {},
|
||||||
"distinctAttribute": null,
|
"distinctAttribute": null,
|
||||||
"proximityPrecision": "byWord",
|
"proximityPrecision": null,
|
||||||
"typoTolerance": {
|
"typoTolerance": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"minWordSizeForTypos": {
|
"minWordSizeForTypos": {
|
||||||
@@ -663,7 +667,8 @@ async fn import_dump_v2_movie_with_settings() {
|
|||||||
},
|
},
|
||||||
"pagination": {
|
"pagination": {
|
||||||
"maxTotalHits": 1000
|
"maxTotalHits": 1000
|
||||||
}
|
},
|
||||||
|
"embedders": {}
|
||||||
}
|
}
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
@@ -789,7 +794,7 @@ async fn import_dump_v2_rubygems_with_settings() {
|
|||||||
"dictionary": [],
|
"dictionary": [],
|
||||||
"synonyms": {},
|
"synonyms": {},
|
||||||
"distinctAttribute": null,
|
"distinctAttribute": null,
|
||||||
"proximityPrecision": "byWord",
|
"proximityPrecision": null,
|
||||||
"typoTolerance": {
|
"typoTolerance": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"minWordSizeForTypos": {
|
"minWordSizeForTypos": {
|
||||||
@@ -807,7 +812,8 @@ async fn import_dump_v2_rubygems_with_settings() {
|
|||||||
},
|
},
|
||||||
"pagination": {
|
"pagination": {
|
||||||
"maxTotalHits": 1000
|
"maxTotalHits": 1000
|
||||||
}
|
},
|
||||||
|
"embedders": {}
|
||||||
}
|
}
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
@@ -922,7 +928,7 @@ async fn import_dump_v3_movie_raw() {
|
|||||||
"dictionary": [],
|
"dictionary": [],
|
||||||
"synonyms": {},
|
"synonyms": {},
|
||||||
"distinctAttribute": null,
|
"distinctAttribute": null,
|
||||||
"proximityPrecision": "byWord",
|
"proximityPrecision": null,
|
||||||
"typoTolerance": {
|
"typoTolerance": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"minWordSizeForTypos": {
|
"minWordSizeForTypos": {
|
||||||
@@ -940,7 +946,8 @@ async fn import_dump_v3_movie_raw() {
|
|||||||
},
|
},
|
||||||
"pagination": {
|
"pagination": {
|
||||||
"maxTotalHits": 1000
|
"maxTotalHits": 1000
|
||||||
}
|
},
|
||||||
|
"embedders": {}
|
||||||
}
|
}
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
@@ -1067,7 +1074,7 @@ async fn import_dump_v3_movie_with_settings() {
|
|||||||
"dictionary": [],
|
"dictionary": [],
|
||||||
"synonyms": {},
|
"synonyms": {},
|
||||||
"distinctAttribute": null,
|
"distinctAttribute": null,
|
||||||
"proximityPrecision": "byWord",
|
"proximityPrecision": null,
|
||||||
"typoTolerance": {
|
"typoTolerance": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"minWordSizeForTypos": {
|
"minWordSizeForTypos": {
|
||||||
@@ -1085,7 +1092,8 @@ async fn import_dump_v3_movie_with_settings() {
|
|||||||
},
|
},
|
||||||
"pagination": {
|
"pagination": {
|
||||||
"maxTotalHits": 1000
|
"maxTotalHits": 1000
|
||||||
}
|
},
|
||||||
|
"embedders": {}
|
||||||
}
|
}
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
@@ -1211,7 +1219,7 @@ async fn import_dump_v3_rubygems_with_settings() {
|
|||||||
"dictionary": [],
|
"dictionary": [],
|
||||||
"synonyms": {},
|
"synonyms": {},
|
||||||
"distinctAttribute": null,
|
"distinctAttribute": null,
|
||||||
"proximityPrecision": "byWord",
|
"proximityPrecision": null,
|
||||||
"typoTolerance": {
|
"typoTolerance": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"minWordSizeForTypos": {
|
"minWordSizeForTypos": {
|
||||||
@@ -1229,7 +1237,8 @@ async fn import_dump_v3_rubygems_with_settings() {
|
|||||||
},
|
},
|
||||||
"pagination": {
|
"pagination": {
|
||||||
"maxTotalHits": 1000
|
"maxTotalHits": 1000
|
||||||
}
|
},
|
||||||
|
"embedders": {}
|
||||||
}
|
}
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
@@ -1344,7 +1353,7 @@ async fn import_dump_v4_movie_raw() {
|
|||||||
"dictionary": [],
|
"dictionary": [],
|
||||||
"synonyms": {},
|
"synonyms": {},
|
||||||
"distinctAttribute": null,
|
"distinctAttribute": null,
|
||||||
"proximityPrecision": "byWord",
|
"proximityPrecision": null,
|
||||||
"typoTolerance": {
|
"typoTolerance": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"minWordSizeForTypos": {
|
"minWordSizeForTypos": {
|
||||||
@@ -1362,7 +1371,8 @@ async fn import_dump_v4_movie_raw() {
|
|||||||
},
|
},
|
||||||
"pagination": {
|
"pagination": {
|
||||||
"maxTotalHits": 1000
|
"maxTotalHits": 1000
|
||||||
}
|
},
|
||||||
|
"embedders": {}
|
||||||
}
|
}
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
@@ -1489,7 +1499,7 @@ async fn import_dump_v4_movie_with_settings() {
|
|||||||
"dictionary": [],
|
"dictionary": [],
|
||||||
"synonyms": {},
|
"synonyms": {},
|
||||||
"distinctAttribute": null,
|
"distinctAttribute": null,
|
||||||
"proximityPrecision": "byWord",
|
"proximityPrecision": null,
|
||||||
"typoTolerance": {
|
"typoTolerance": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"minWordSizeForTypos": {
|
"minWordSizeForTypos": {
|
||||||
@@ -1507,7 +1517,8 @@ async fn import_dump_v4_movie_with_settings() {
|
|||||||
},
|
},
|
||||||
"pagination": {
|
"pagination": {
|
||||||
"maxTotalHits": 1000
|
"maxTotalHits": 1000
|
||||||
}
|
},
|
||||||
|
"embedders": {}
|
||||||
}
|
}
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
@@ -1633,7 +1644,7 @@ async fn import_dump_v4_rubygems_with_settings() {
|
|||||||
"dictionary": [],
|
"dictionary": [],
|
||||||
"synonyms": {},
|
"synonyms": {},
|
||||||
"distinctAttribute": null,
|
"distinctAttribute": null,
|
||||||
"proximityPrecision": "byWord",
|
"proximityPrecision": null,
|
||||||
"typoTolerance": {
|
"typoTolerance": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"minWordSizeForTypos": {
|
"minWordSizeForTypos": {
|
||||||
@@ -1651,7 +1662,8 @@ async fn import_dump_v4_rubygems_with_settings() {
|
|||||||
},
|
},
|
||||||
"pagination": {
|
"pagination": {
|
||||||
"maxTotalHits": 1000
|
"maxTotalHits": 1000
|
||||||
}
|
},
|
||||||
|
"embedders": {}
|
||||||
}
|
}
|
||||||
"###
|
"###
|
||||||
);
|
);
|
||||||
@@ -1845,9 +1857,9 @@ async fn import_dump_v6_containing_experimental_features() {
|
|||||||
meili_snap::snapshot!(code, @"200 OK");
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
|
"scoreDetails": false,
|
||||||
"vectorStore": false,
|
"vectorStore": false,
|
||||||
"metrics": false,
|
"metrics": false,
|
||||||
"logsRoute": false,
|
|
||||||
"exportPuffinReports": false
|
"exportPuffinReports": false
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
@@ -1895,7 +1907,8 @@ async fn import_dump_v6_containing_experimental_features() {
|
|||||||
},
|
},
|
||||||
"pagination": {
|
"pagination": {
|
||||||
"maxTotalHits": 1000
|
"maxTotalHits": 1000
|
||||||
}
|
},
|
||||||
|
"embedders": {}
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
|||||||
@@ -18,9 +18,9 @@ async fn experimental_features() {
|
|||||||
meili_snap::snapshot!(code, @"200 OK");
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
|
"scoreDetails": false,
|
||||||
"vectorStore": false,
|
"vectorStore": false,
|
||||||
"metrics": false,
|
"metrics": false,
|
||||||
"logsRoute": false,
|
|
||||||
"exportPuffinReports": false
|
"exportPuffinReports": false
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
@@ -30,9 +30,9 @@ async fn experimental_features() {
|
|||||||
meili_snap::snapshot!(code, @"200 OK");
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
|
"scoreDetails": false,
|
||||||
"vectorStore": true,
|
"vectorStore": true,
|
||||||
"metrics": false,
|
"metrics": false,
|
||||||
"logsRoute": false,
|
|
||||||
"exportPuffinReports": false
|
"exportPuffinReports": false
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
@@ -42,9 +42,9 @@ async fn experimental_features() {
|
|||||||
meili_snap::snapshot!(code, @"200 OK");
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
|
"scoreDetails": false,
|
||||||
"vectorStore": true,
|
"vectorStore": true,
|
||||||
"metrics": false,
|
"metrics": false,
|
||||||
"logsRoute": false,
|
|
||||||
"exportPuffinReports": false
|
"exportPuffinReports": false
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
@@ -55,9 +55,9 @@ async fn experimental_features() {
|
|||||||
meili_snap::snapshot!(code, @"200 OK");
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
|
"scoreDetails": false,
|
||||||
"vectorStore": true,
|
"vectorStore": true,
|
||||||
"metrics": false,
|
"metrics": false,
|
||||||
"logsRoute": false,
|
|
||||||
"exportPuffinReports": false
|
"exportPuffinReports": false
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
@@ -68,9 +68,9 @@ async fn experimental_features() {
|
|||||||
meili_snap::snapshot!(code, @"200 OK");
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
|
"scoreDetails": false,
|
||||||
"vectorStore": true,
|
"vectorStore": true,
|
||||||
"metrics": false,
|
"metrics": false,
|
||||||
"logsRoute": false,
|
|
||||||
"exportPuffinReports": false
|
"exportPuffinReports": false
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
@@ -88,9 +88,9 @@ async fn experimental_feature_metrics() {
|
|||||||
meili_snap::snapshot!(code, @"200 OK");
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
|
"scoreDetails": false,
|
||||||
"vectorStore": false,
|
"vectorStore": false,
|
||||||
"metrics": true,
|
"metrics": true,
|
||||||
"logsRoute": false,
|
|
||||||
"exportPuffinReports": false
|
"exportPuffinReports": false
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
@@ -146,7 +146,7 @@ async fn errors() {
|
|||||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
"message": "Unknown field `NotAFeature`: expected one of `vectorStore`, `metrics`, `logsRoute`, `exportPuffinReports`",
|
"message": "Unknown field `NotAFeature`: expected one of `scoreDetails`, `vectorStore`, `metrics`, `exportPuffinReports`",
|
||||||
"code": "bad_request",
|
"code": "bad_request",
|
||||||
"type": "invalid_request",
|
"type": "invalid_request",
|
||||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||||
|
|||||||
@@ -2,10 +2,9 @@ use actix_web::http::header::ContentType;
|
|||||||
use actix_web::test;
|
use actix_web::test;
|
||||||
use http::header::ACCEPT_ENCODING;
|
use http::header::ACCEPT_ENCODING;
|
||||||
use meili_snap::{json_string, snapshot};
|
use meili_snap::{json_string, snapshot};
|
||||||
use meilisearch::Opt;
|
|
||||||
|
|
||||||
use crate::common::encoder::Encoder;
|
use crate::common::encoder::Encoder;
|
||||||
use crate::common::{default_settings, Server, Value};
|
use crate::common::{Server, Value};
|
||||||
use crate::json;
|
use crate::json;
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@@ -200,79 +199,3 @@ async fn error_create_with_invalid_index_uid() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn send_task_id() {
|
|
||||||
let temp = tempfile::tempdir().unwrap();
|
|
||||||
|
|
||||||
let options =
|
|
||||||
Opt { experimental_replication_parameters: true, ..default_settings(temp.path()) };
|
|
||||||
let server = Server::new_with_options(options).await.unwrap();
|
|
||||||
|
|
||||||
let app = server.init_web_app().await;
|
|
||||||
let index = server.index("catto");
|
|
||||||
let (response, code) = index.create(None).await;
|
|
||||||
snapshot!(code, @"202 Accepted");
|
|
||||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###"
|
|
||||||
{
|
|
||||||
"taskUid": 0,
|
|
||||||
"indexUid": "catto",
|
|
||||||
"status": "enqueued",
|
|
||||||
"type": "indexCreation",
|
|
||||||
"enqueuedAt": "[date]"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
let body = serde_json::to_string(&json!({
|
|
||||||
"uid": "doggo",
|
|
||||||
"primaryKey": None::<&str>,
|
|
||||||
}))
|
|
||||||
.unwrap();
|
|
||||||
let req = test::TestRequest::post()
|
|
||||||
.uri("/indexes")
|
|
||||||
.insert_header(("TaskId", "25"))
|
|
||||||
.insert_header(ContentType::json())
|
|
||||||
.set_payload(body)
|
|
||||||
.to_request();
|
|
||||||
|
|
||||||
let res = test::call_service(&app, req).await;
|
|
||||||
snapshot!(res.status(), @"202 Accepted");
|
|
||||||
|
|
||||||
let bytes = test::read_body(res).await;
|
|
||||||
let response = serde_json::from_slice::<Value>(&bytes).expect("Expecting valid json");
|
|
||||||
snapshot!(json_string!(response, { ".enqueuedAt" => "[date]" }), @r###"
|
|
||||||
{
|
|
||||||
"taskUid": 25,
|
|
||||||
"indexUid": "doggo",
|
|
||||||
"status": "enqueued",
|
|
||||||
"type": "indexCreation",
|
|
||||||
"enqueuedAt": "[date]"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
let body = serde_json::to_string(&json!({
|
|
||||||
"uid": "girafo",
|
|
||||||
"primaryKey": None::<&str>,
|
|
||||||
}))
|
|
||||||
.unwrap();
|
|
||||||
let req = test::TestRequest::post()
|
|
||||||
.uri("/indexes")
|
|
||||||
.insert_header(("TaskId", "12"))
|
|
||||||
.insert_header(ContentType::json())
|
|
||||||
.set_payload(body)
|
|
||||||
.to_request();
|
|
||||||
|
|
||||||
let res = test::call_service(&app, req).await;
|
|
||||||
snapshot!(res.status(), @"400 Bad Request");
|
|
||||||
|
|
||||||
let bytes = test::read_body(res).await;
|
|
||||||
let response = serde_json::from_slice::<Value>(&bytes).expect("Expecting valid json");
|
|
||||||
snapshot!(json_string!(response), @r###"
|
|
||||||
{
|
|
||||||
"message": "Received bad task id: 12 should be >= to 26.",
|
|
||||||
"code": "bad_request",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ mod documents;
|
|||||||
mod dumps;
|
mod dumps;
|
||||||
mod features;
|
mod features;
|
||||||
mod index;
|
mod index;
|
||||||
mod logs;
|
|
||||||
mod search;
|
mod search;
|
||||||
mod settings;
|
mod settings;
|
||||||
mod snapshot;
|
mod snapshot;
|
||||||
|
|||||||
@@ -1,193 +0,0 @@
|
|||||||
use meili_snap::*;
|
|
||||||
|
|
||||||
use crate::common::Server;
|
|
||||||
use crate::json;
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn logs_stream_bad_target() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
|
|
||||||
// Wrong type
|
|
||||||
let (response, code) = server.service.post("/logs/stream", json!({ "target": true })).await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"message": "Invalid value type at `.target`: expected a string, but found a boolean: `true`",
|
|
||||||
"code": "bad_request",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
// Wrong type
|
|
||||||
let (response, code) = server.service.post("/logs/stream", json!({ "target": [] })).await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"message": "Invalid value type at `.target`: expected a string, but found an array: `[]`",
|
|
||||||
"code": "bad_request",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
// Our help message
|
|
||||||
let (response, code) = server.service.post("/logs/stream", json!({ "target": "" })).await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"message": "Invalid value at `.target`: Empty string is not a valid target. If you want to get no logs use `OFF`. Usage: `info`, `meilisearch=info`, or you can write multiple filters in one target: `index_scheduler=info,milli=trace`",
|
|
||||||
"code": "bad_request",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
// An error from the target parser
|
|
||||||
let (response, code) = server.service.post("/logs/stream", json!({ "target": "==" })).await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"message": "Invalid value at `.target`: invalid filter directive: too many '=' in filter directive, expected 0 or 1",
|
|
||||||
"code": "bad_request",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn logs_stream_bad_mode() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
|
|
||||||
// Wrong type
|
|
||||||
let (response, code) = server.service.post("/logs/stream", json!({ "mode": true })).await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"message": "Invalid value type at `.mode`: expected a string, but found a boolean: `true`",
|
|
||||||
"code": "bad_request",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
// Wrong type
|
|
||||||
let (response, code) = server.service.post("/logs/stream", json!({ "mode": [] })).await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"message": "Invalid value type at `.mode`: expected a string, but found an array: `[]`",
|
|
||||||
"code": "bad_request",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
// Wrong value
|
|
||||||
let (response, code) = server.service.post("/logs/stream", json!({ "mode": "tamo" })).await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"message": "Unknown value `tamo` at `.mode`: expected one of `human`, `json`, `profile`",
|
|
||||||
"code": "bad_request",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn logs_stream_bad_profile_memory() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
|
|
||||||
// Wrong type
|
|
||||||
let (response, code) =
|
|
||||||
server.service.post("/logs/stream", json!({ "profileMemory": "tamo" })).await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"message": "Invalid value type at `.profileMemory`: expected a boolean, but found a string: `\"tamo\"`",
|
|
||||||
"code": "bad_request",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
// Wrong type
|
|
||||||
let (response, code) =
|
|
||||||
server.service.post("/logs/stream", json!({ "profileMemory": ["hello", "kefir"] })).await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"message": "Invalid value type at `.profileMemory`: expected a boolean, but found an array: `[\"hello\",\"kefir\"]`",
|
|
||||||
"code": "bad_request",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
// Used with default parameters
|
|
||||||
let (response, code) =
|
|
||||||
server.service.post("/logs/stream", json!({ "profileMemory": true })).await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"message": "Invalid value: `profile_memory` can only be used while profiling code and is not compatible with the Human mode.",
|
|
||||||
"code": "invalid_settings_typo_tolerance",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#invalid_settings_typo_tolerance"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
// Used with an unsupported mode
|
|
||||||
let (response, code) =
|
|
||||||
server.service.post("/logs/stream", json!({ "mode": "fmt", "profileMemory": true })).await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"message": "Unknown value `fmt` at `.mode`: expected one of `human`, `json`, `profile`",
|
|
||||||
"code": "bad_request",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn logs_stream_without_enabling_the_route() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
|
|
||||||
let (response, code) = server.service.post("/logs/stream", json!({})).await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"message": "Modifying logs through the `/logs/*` routes requires enabling the `logs route` experimental feature. See https://github.com/orgs/meilisearch/discussions/721",
|
|
||||||
"code": "feature_not_enabled",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
let (response, code) = server.service.delete("/logs/stream").await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"message": "Modifying logs through the `/logs/*` routes requires enabling the `logs route` experimental feature. See https://github.com/orgs/meilisearch/discussions/721",
|
|
||||||
"code": "feature_not_enabled",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
|
|
||||||
let (response, code) = server.service.post("/logs/stderr", json!({})).await;
|
|
||||||
snapshot!(code, @"400 Bad Request");
|
|
||||||
snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"message": "Modifying logs through the `/logs/*` routes requires enabling the `logs route` experimental feature. See https://github.com/orgs/meilisearch/discussions/721",
|
|
||||||
"code": "feature_not_enabled",
|
|
||||||
"type": "invalid_request",
|
|
||||||
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
}
|
|
||||||
@@ -1,99 +0,0 @@
|
|||||||
mod error;
|
|
||||||
|
|
||||||
use std::rc::Rc;
|
|
||||||
use std::str::FromStr;
|
|
||||||
|
|
||||||
use actix_web::http::header::ContentType;
|
|
||||||
use meili_snap::snapshot;
|
|
||||||
use meilisearch::{analytics, create_app, Opt, SubscriberForSecondLayer};
|
|
||||||
use tracing::level_filters::LevelFilter;
|
|
||||||
use tracing_subscriber::layer::SubscriberExt;
|
|
||||||
use tracing_subscriber::Layer;
|
|
||||||
|
|
||||||
use crate::common::{default_settings, Server};
|
|
||||||
use crate::json;
|
|
||||||
|
|
||||||
#[actix_web::test]
|
|
||||||
async fn basic_test_log_stream_route() {
|
|
||||||
let db_path = tempfile::tempdir().unwrap();
|
|
||||||
let server = Server::new_with_options(Opt {
|
|
||||||
experimental_enable_logs_route: true,
|
|
||||||
..default_settings(db_path.path())
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let (route_layer, route_layer_handle) =
|
|
||||||
tracing_subscriber::reload::Layer::new(None.with_filter(
|
|
||||||
tracing_subscriber::filter::Targets::new().with_target("", LevelFilter::OFF),
|
|
||||||
));
|
|
||||||
let (_stderr_layer, stderr_layer_handle) = tracing_subscriber::reload::Layer::new(
|
|
||||||
(Box::new(
|
|
||||||
tracing_subscriber::fmt::layer()
|
|
||||||
.with_span_events(tracing_subscriber::fmt::format::FmtSpan::CLOSE),
|
|
||||||
) as Box<dyn tracing_subscriber::Layer<SubscriberForSecondLayer> + Send + Sync>)
|
|
||||||
.with_filter(tracing_subscriber::filter::Targets::new()),
|
|
||||||
);
|
|
||||||
|
|
||||||
let subscriber = tracing_subscriber::registry().with(route_layer).with(
|
|
||||||
tracing_subscriber::fmt::layer()
|
|
||||||
.with_span_events(tracing_subscriber::fmt::format::FmtSpan::ACTIVE)
|
|
||||||
.with_filter(tracing_subscriber::filter::LevelFilter::from_str("OFF").unwrap()),
|
|
||||||
);
|
|
||||||
|
|
||||||
let app = actix_web::test::init_service(create_app(
|
|
||||||
server.service.index_scheduler.clone().into(),
|
|
||||||
server.service.auth.clone().into(),
|
|
||||||
server.service.options.clone(),
|
|
||||||
(route_layer_handle, stderr_layer_handle),
|
|
||||||
analytics::MockAnalytics::new(&server.service.options),
|
|
||||||
true,
|
|
||||||
))
|
|
||||||
.await;
|
|
||||||
|
|
||||||
// set the subscriber as the default for the application
|
|
||||||
tracing::subscriber::set_global_default(subscriber).unwrap();
|
|
||||||
|
|
||||||
let app = Rc::new(app);
|
|
||||||
|
|
||||||
// First, we start listening on the `/logs/stream` route
|
|
||||||
let handle_app = app.clone();
|
|
||||||
let handle = tokio::task::spawn_local(async move {
|
|
||||||
let req = actix_web::test::TestRequest::post()
|
|
||||||
.uri("/logs/stream")
|
|
||||||
.insert_header(ContentType::json())
|
|
||||||
.set_payload(
|
|
||||||
serde_json::to_vec(&json!({
|
|
||||||
"mode": "human",
|
|
||||||
"target": "info",
|
|
||||||
}))
|
|
||||||
.unwrap(),
|
|
||||||
);
|
|
||||||
let req = req.to_request();
|
|
||||||
let ret = actix_web::test::call_service(&*handle_app, req).await;
|
|
||||||
actix_web::test::read_body(ret).await
|
|
||||||
});
|
|
||||||
|
|
||||||
// We're going to create an index to get at least one info log saying we processed a batch of task
|
|
||||||
let (ret, _code) = server.create_index(json!({ "uid": "tamo" })).await;
|
|
||||||
snapshot!(ret, @r###"
|
|
||||||
{
|
|
||||||
"taskUid": 0,
|
|
||||||
"indexUid": "tamo",
|
|
||||||
"status": "enqueued",
|
|
||||||
"type": "indexCreation",
|
|
||||||
"enqueuedAt": "[date]"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
server.wait_task(ret.uid()).await;
|
|
||||||
|
|
||||||
let req = actix_web::test::TestRequest::delete().uri("/logs/stream");
|
|
||||||
let req = req.to_request();
|
|
||||||
let ret = actix_web::test::call_service(&*app, req).await;
|
|
||||||
let code = ret.status();
|
|
||||||
snapshot!(code, @"204 No Content");
|
|
||||||
|
|
||||||
let logs = handle.await.unwrap();
|
|
||||||
let logs = String::from_utf8(logs.to_vec()).unwrap();
|
|
||||||
assert!(logs.contains("INFO"), "{logs}");
|
|
||||||
}
|
|
||||||
@@ -105,24 +105,6 @@ async fn more_advanced_facet_search() {
|
|||||||
snapshot!(response["facetHits"].as_array().unwrap().len(), @"1");
|
snapshot!(response["facetHits"].as_array().unwrap().len(), @"1");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn simple_facet_search_with_max_values() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
let index = server.index("test");
|
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
|
||||||
index.update_settings_faceting(json!({ "maxValuesPerFacet": 1 })).await;
|
|
||||||
index.update_settings_filterable_attributes(json!(["genres"])).await;
|
|
||||||
index.add_documents(documents, None).await;
|
|
||||||
index.wait_task(2).await;
|
|
||||||
|
|
||||||
let (response, code) =
|
|
||||||
index.facet_search(json!({"facetName": "genres", "facetQuery": "a"})).await;
|
|
||||||
|
|
||||||
assert_eq!(code, 200, "{}", response);
|
|
||||||
assert_eq!(dbg!(response)["facetHits"].as_array().unwrap().len(), 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn non_filterable_facet_search_error() {
|
async fn non_filterable_facet_search_error() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
|
|||||||
@@ -13,17 +13,17 @@ async fn index_with_documents<'a>(server: &'a Server, documents: &Value) -> Inde
|
|||||||
meili_snap::snapshot!(code, @"200 OK");
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
{
|
{
|
||||||
|
"scoreDetails": false,
|
||||||
"vectorStore": true,
|
"vectorStore": true,
|
||||||
"metrics": false,
|
"metrics": false,
|
||||||
"logsRoute": false,
|
|
||||||
"exportPuffinReports": false
|
"exportPuffinReports": false
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.update_settings(json!({ "embedders": {"default": {
|
.update_settings(
|
||||||
"source": "userProvided",
|
json!({ "embedders": {"default": {"source": {"userProvided": {"dimensions": 2}}}} }),
|
||||||
"dimensions": 2}}} ))
|
)
|
||||||
.await;
|
.await;
|
||||||
assert_eq!(202, code, "{:?}", response);
|
assert_eq!(202, code, "{:?}", response);
|
||||||
index.wait_task(response.uid()).await;
|
index.wait_task(response.uid()).await;
|
||||||
@@ -56,15 +56,6 @@ static SIMPLE_SEARCH_DOCUMENTS: Lazy<Value> = Lazy::new(|| {
|
|||||||
}])
|
}])
|
||||||
});
|
});
|
||||||
|
|
||||||
static SINGLE_DOCUMENT: Lazy<Value> = Lazy::new(|| {
|
|
||||||
json!([{
|
|
||||||
"title": "Shazam!",
|
|
||||||
"desc": "a Captain Marvel ersatz",
|
|
||||||
"id": "1",
|
|
||||||
"_vectors": {"default": [1.0, 3.0]},
|
|
||||||
}])
|
|
||||||
});
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn simple_search() {
|
async fn simple_search() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
@@ -87,52 +78,6 @@ async fn simple_search() {
|
|||||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_semanticScore":0.99029034},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_semanticScore":0.97434163},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_semanticScore":0.9472136}]"###);
|
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_semanticScore":0.99029034},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_semanticScore":0.97434163},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_semanticScore":0.9472136}]"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn highlighter() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
let index = index_with_documents(&server, &SIMPLE_SEARCH_DOCUMENTS).await;
|
|
||||||
|
|
||||||
let (response, code) = index
|
|
||||||
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
|
|
||||||
"hybrid": {"semanticRatio": 0.2},
|
|
||||||
"attributesToHighlight": [
|
|
||||||
"desc"
|
|
||||||
],
|
|
||||||
"highlightPreTag": "**BEGIN**",
|
|
||||||
"highlightPostTag": "**END**"
|
|
||||||
}))
|
|
||||||
.await;
|
|
||||||
snapshot!(code, @"200 OK");
|
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_formatted":{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":["2.0","3.0"]}}},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_formatted":{"title":"Shazam!","desc":"a **BEGIN**Captain**END** **BEGIN**Marvel**END** ersatz","id":"1","_vectors":{"default":["1.0","3.0"]}}},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_formatted":{"title":"Captain Planet","desc":"He's not part of the **BEGIN**Marvel**END** Cinematic Universe","id":"2","_vectors":{"default":["1.0","2.0"]}}}]"###);
|
|
||||||
|
|
||||||
let (response, code) = index
|
|
||||||
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
|
|
||||||
"hybrid": {"semanticRatio": 0.8},
|
|
||||||
"attributesToHighlight": [
|
|
||||||
"desc"
|
|
||||||
],
|
|
||||||
"highlightPreTag": "**BEGIN**",
|
|
||||||
"highlightPostTag": "**END**"
|
|
||||||
}))
|
|
||||||
.await;
|
|
||||||
snapshot!(code, @"200 OK");
|
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_formatted":{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":["2.0","3.0"]}},"_semanticScore":0.99029034},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_formatted":{"title":"Captain Planet","desc":"He's not part of the **BEGIN**Marvel**END** Cinematic Universe","id":"2","_vectors":{"default":["1.0","2.0"]}},"_semanticScore":0.97434163},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_formatted":{"title":"Shazam!","desc":"a **BEGIN**Captain**END** **BEGIN**Marvel**END** ersatz","id":"1","_vectors":{"default":["1.0","3.0"]}},"_semanticScore":0.9472136}]"###);
|
|
||||||
|
|
||||||
// no highlighting on full semantic
|
|
||||||
let (response, code) = index
|
|
||||||
.search_post(json!({"q": "Captain Marvel", "vector": [1.0, 1.0],
|
|
||||||
"hybrid": {"semanticRatio": 1.0},
|
|
||||||
"attributesToHighlight": [
|
|
||||||
"desc"
|
|
||||||
],
|
|
||||||
"highlightPreTag": "**BEGIN**",
|
|
||||||
"highlightPostTag": "**END**"
|
|
||||||
}))
|
|
||||||
.await;
|
|
||||||
snapshot!(code, @"200 OK");
|
|
||||||
snapshot!(response["hits"], @r###"[{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":[2.0,3.0]},"_formatted":{"title":"Captain Marvel","desc":"a Shazam ersatz","id":"3","_vectors":{"default":["2.0","3.0"]}},"_semanticScore":0.99029034},{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":[1.0,2.0]},"_formatted":{"title":"Captain Planet","desc":"He's not part of the Marvel Cinematic Universe","id":"2","_vectors":{"default":["1.0","2.0"]}},"_semanticScore":0.97434163},{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_formatted":{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":["1.0","3.0"]}}}]"###);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn invalid_semantic_ratio() {
|
async fn invalid_semantic_ratio() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
@@ -204,18 +149,3 @@ async fn invalid_semantic_ratio() {
|
|||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
|
||||||
async fn single_document() {
|
|
||||||
let server = Server::new().await;
|
|
||||||
let index = index_with_documents(&server, &SINGLE_DOCUMENT).await;
|
|
||||||
|
|
||||||
let (response, code) = index
|
|
||||||
.search_post(
|
|
||||||
json!({"vector": [1.0, 3.0], "hybrid": {"semanticRatio": 1.0}, "showRankingScore": true}),
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
|
|
||||||
snapshot!(code, @"200 OK");
|
|
||||||
snapshot!(response["hits"][0], @r###"{"title":"Shazam!","desc":"a Captain Marvel ersatz","id":"1","_vectors":{"default":[1.0,3.0]},"_rankingScore":1.0,"_semanticScore":1.0}"###);
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -766,14 +766,38 @@ async fn faceting_max_values_per_facet() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
async fn test_score_details() {
|
async fn experimental_feature_score_details() {
|
||||||
let server = Server::new().await;
|
let server = Server::new().await;
|
||||||
let index = server.index("test");
|
let index = server.index("test");
|
||||||
|
|
||||||
let documents = DOCUMENTS.clone();
|
let documents = DOCUMENTS.clone();
|
||||||
|
|
||||||
let res = index.add_documents(json!(documents), None).await;
|
index.add_documents(json!(documents), None).await;
|
||||||
index.wait_task(res.0.uid()).await;
|
index.wait_task(0).await;
|
||||||
|
|
||||||
|
index
|
||||||
|
.search(
|
||||||
|
json!({
|
||||||
|
"q": "train dragon",
|
||||||
|
"showRankingScoreDetails": true,
|
||||||
|
}),
|
||||||
|
|response, code| {
|
||||||
|
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||||
|
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||||
|
{
|
||||||
|
"message": "Computing score details requires enabling the `score details` experimental feature. See https://github.com/meilisearch/product/discussions/674",
|
||||||
|
"code": "feature_not_enabled",
|
||||||
|
"type": "invalid_request",
|
||||||
|
"link": "https://docs.meilisearch.com/errors#feature_not_enabled"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let (response, code) = server.set_features(json!({"scoreDetails": true})).await;
|
||||||
|
meili_snap::snapshot!(code, @"200 OK");
|
||||||
|
meili_snap::snapshot!(response["scoreDetails"], @"true");
|
||||||
|
|
||||||
index
|
index
|
||||||
.search(
|
.search(
|
||||||
@@ -866,21 +890,13 @@ async fn experimental_feature_vector_store() {
|
|||||||
let (response, code) = index
|
let (response, code) = index
|
||||||
.update_settings(json!({"embedders": {
|
.update_settings(json!({"embedders": {
|
||||||
"manual": {
|
"manual": {
|
||||||
"source": "userProvided",
|
"source": {
|
||||||
"dimensions": 3,
|
"userProvided": {"dimensions": 3}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}}))
|
}}))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
meili_snap::snapshot!(response, @r###"
|
|
||||||
{
|
|
||||||
"taskUid": 1,
|
|
||||||
"indexUid": "test",
|
|
||||||
"status": "enqueued",
|
|
||||||
"type": "settingsUpdate",
|
|
||||||
"enqueuedAt": "[date]"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
meili_snap::snapshot!(code, @"202 Accepted");
|
meili_snap::snapshot!(code, @"202 Accepted");
|
||||||
let response = index.wait_task(response.uid()).await;
|
let response = index.wait_task(response.uid()).await;
|
||||||
|
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ async fn get_settings() {
|
|||||||
let (response, code) = index.settings().await;
|
let (response, code) = index.settings().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
let settings = response.as_object().unwrap();
|
let settings = response.as_object().unwrap();
|
||||||
assert_eq!(settings.keys().len(), 15);
|
assert_eq!(settings.keys().len(), 16);
|
||||||
assert_eq!(settings["displayedAttributes"], json!(["*"]));
|
assert_eq!(settings["displayedAttributes"], json!(["*"]));
|
||||||
assert_eq!(settings["searchableAttributes"], json!(["*"]));
|
assert_eq!(settings["searchableAttributes"], json!(["*"]));
|
||||||
assert_eq!(settings["filterableAttributes"], json!([]));
|
assert_eq!(settings["filterableAttributes"], json!([]));
|
||||||
@@ -83,7 +83,7 @@ async fn get_settings() {
|
|||||||
"maxTotalHits": 1000,
|
"maxTotalHits": 1000,
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
assert_eq!(settings["proximityPrecision"], json!("byWord"));
|
assert_eq!(settings["embedders"], json!({}));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
mod errors;
|
mod errors;
|
||||||
mod webhook;
|
|
||||||
|
|
||||||
use meili_snap::insta::assert_json_snapshot;
|
use meili_snap::insta::assert_json_snapshot;
|
||||||
use time::format_description::well_known::Rfc3339;
|
use time::format_description::well_known::Rfc3339;
|
||||||
|
|||||||
@@ -1,129 +0,0 @@
|
|||||||
//! To test the webhook, we need to spawn a new server with a URL listening for
|
|
||||||
//! post requests. The webhook handle starts a server and forwards all the
|
|
||||||
//! received requests into a channel for you to handle.
|
|
||||||
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use actix_http::body::MessageBody;
|
|
||||||
use actix_web::dev::{ServiceFactory, ServiceResponse};
|
|
||||||
use actix_web::web::{Bytes, Data};
|
|
||||||
use actix_web::{post, App, HttpRequest, HttpResponse, HttpServer};
|
|
||||||
use meili_snap::{json_string, snapshot};
|
|
||||||
use meilisearch::Opt;
|
|
||||||
use tokio::sync::mpsc;
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
use crate::common::{default_settings, Server};
|
|
||||||
use crate::json;
|
|
||||||
|
|
||||||
#[post("/")]
|
|
||||||
async fn forward_body(
|
|
||||||
req: HttpRequest,
|
|
||||||
sender: Data<mpsc::UnboundedSender<Vec<u8>>>,
|
|
||||||
body: Bytes,
|
|
||||||
) -> HttpResponse {
|
|
||||||
let headers = req.headers();
|
|
||||||
assert_eq!(headers.get("content-type").unwrap(), "application/x-ndjson");
|
|
||||||
assert_eq!(headers.get("transfer-encoding").unwrap(), "chunked");
|
|
||||||
assert_eq!(headers.get("accept-encoding").unwrap(), "gzip");
|
|
||||||
assert_eq!(headers.get("content-encoding").unwrap(), "gzip");
|
|
||||||
|
|
||||||
let body = body.to_vec();
|
|
||||||
sender.send(body).unwrap();
|
|
||||||
HttpResponse::Ok().into()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_app(
|
|
||||||
sender: Arc<mpsc::UnboundedSender<Vec<u8>>>,
|
|
||||||
) -> actix_web::App<
|
|
||||||
impl ServiceFactory<
|
|
||||||
actix_web::dev::ServiceRequest,
|
|
||||||
Config = (),
|
|
||||||
Response = ServiceResponse<impl MessageBody>,
|
|
||||||
Error = actix_web::Error,
|
|
||||||
InitError = (),
|
|
||||||
>,
|
|
||||||
> {
|
|
||||||
App::new().service(forward_body).app_data(Data::from(sender))
|
|
||||||
}
|
|
||||||
|
|
||||||
struct WebhookHandle {
|
|
||||||
pub server_handle: tokio::task::JoinHandle<Result<(), std::io::Error>>,
|
|
||||||
pub url: String,
|
|
||||||
pub receiver: mpsc::UnboundedReceiver<Vec<u8>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn create_webhook_server() -> WebhookHandle {
|
|
||||||
let (sender, receiver) = mpsc::unbounded_channel();
|
|
||||||
let sender = Arc::new(sender);
|
|
||||||
|
|
||||||
// By listening on the port 0, the system will give us any available port.
|
|
||||||
let server =
|
|
||||||
HttpServer::new(move || create_app(sender.clone())).bind(("127.0.0.1", 0)).unwrap();
|
|
||||||
let (ip, scheme) = server.addrs_with_scheme()[0];
|
|
||||||
let url = format!("{scheme}://{ip}/");
|
|
||||||
|
|
||||||
let server_handle = tokio::spawn(server.run());
|
|
||||||
WebhookHandle { server_handle, url, receiver }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[actix_web::test]
|
|
||||||
async fn test_basic_webhook() {
|
|
||||||
let WebhookHandle { server_handle, url, mut receiver } = create_webhook_server().await;
|
|
||||||
|
|
||||||
let db_path = tempfile::tempdir().unwrap();
|
|
||||||
let server = Server::new_with_options(Opt {
|
|
||||||
task_webhook_url: Some(Url::parse(&url).unwrap()),
|
|
||||||
..default_settings(db_path.path())
|
|
||||||
})
|
|
||||||
.await
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let index = server.index("tamo");
|
|
||||||
// May be flaky: we're relying on the fact that while the first document addition is processed, the other
|
|
||||||
// operations will be received and will be batched together. If it doesn't happen it's not a problem
|
|
||||||
// the rest of the test won't assume anything about the number of tasks per batch.
|
|
||||||
for i in 0..5 {
|
|
||||||
let (_, _status) = index.add_documents(json!({ "id": i, "doggo": "bone" }), None).await;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut nb_tasks = 0;
|
|
||||||
while let Some(payload) = receiver.recv().await {
|
|
||||||
let payload = String::from_utf8(payload).unwrap();
|
|
||||||
let jsonl = payload.split('\n');
|
|
||||||
for json in jsonl {
|
|
||||||
if json.is_empty() {
|
|
||||||
break; // we reached EOF
|
|
||||||
}
|
|
||||||
nb_tasks += 1;
|
|
||||||
let json: serde_json::Value = serde_json::from_str(json).unwrap();
|
|
||||||
snapshot!(
|
|
||||||
json_string!(json, { ".uid" => "[uid]", ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" }),
|
|
||||||
@r###"
|
|
||||||
{
|
|
||||||
"uid": "[uid]",
|
|
||||||
"indexUid": "tamo",
|
|
||||||
"status": "succeeded",
|
|
||||||
"type": "documentAdditionOrUpdate",
|
|
||||||
"canceledBy": null,
|
|
||||||
"details": {
|
|
||||||
"receivedDocuments": 1,
|
|
||||||
"indexedDocuments": 1
|
|
||||||
},
|
|
||||||
"error": null,
|
|
||||||
"duration": "[duration]",
|
|
||||||
"enqueuedAt": "[date]",
|
|
||||||
"startedAt": "[date]",
|
|
||||||
"finishedAt": "[date]"
|
|
||||||
}
|
|
||||||
"###);
|
|
||||||
}
|
|
||||||
if nb_tasks == 5 {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
assert!(nb_tasks == 5, "We should have received the 5 tasks but only received {nb_tasks}");
|
|
||||||
|
|
||||||
server_handle.abort();
|
|
||||||
}
|
|
||||||
@@ -9,11 +9,11 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.79"
|
anyhow = "1.0.75"
|
||||||
clap = { version = "4.4.17", features = ["derive"] }
|
clap = { version = "4.2.1", features = ["derive"] }
|
||||||
dump = { path = "../dump" }
|
dump = { path = "../dump" }
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
time = { version = "0.3.31", features = ["formatting"] }
|
time = { version = "0.3.30", features = ["formatting"] }
|
||||||
uuid = { version = "1.6.1", features = ["v4"], default-features = false }
|
uuid = { version = "1.5.0", features = ["v4"], default-features = false }
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::convert::TryInto;
|
||||||
|
|
||||||
use meilisearch_types::heed::{BoxedError, BytesDecode, BytesEncode};
|
use meilisearch_types::heed::{BoxedError, BytesDecode, BytesEncode};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user