mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-20 13:30:38 +00:00
Compare commits
36 Commits
embedding-
...
chat-route
Author | SHA1 | Date | |
---|---|---|---|
6c1739218c | |||
72d4998dce | |||
fde11573da | |||
41220f786b | |||
4d59fdb65d | |||
3e51c0a4c1 | |||
91c6ab8392 | |||
beff6adeb1 | |||
18eab165a7 | |||
5c6b63df65 | |||
7266aed770 | |||
bae6c98aa3 | |||
42c95cf3c4 | |||
4f919db344 | |||
295840d07a | |||
c0c3bddda8 | |||
10b5fcd4ba | |||
8113d4a52e | |||
5964289284 | |||
6b81854d48 | |||
9e5b466426 | |||
b43ffd8fac | |||
43da2bcb8c | |||
5e3b126d73 | |||
6c034754ca | |||
6329cf7ed6 | |||
e0c8c11a94 | |||
6e8b371111 | |||
da7d651f4b | |||
24050f06e4 | |||
af482d8ee9 | |||
7d62307739 | |||
3a71df7b5a | |||
ac39a436d9 | |||
e5c963a170 | |||
9baf2ce1a6 |
10
.github/workflows/db-change-missing.yml
vendored
10
.github/workflows/db-change-missing.yml
vendored
@ -4,22 +4,22 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
types: [opened, synchronize, reopened, labeled, unlabeled]
|
types: [opened, synchronize, reopened, labeled, unlabeled]
|
||||||
|
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-labels:
|
check-labels:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
- name: Check db change labels
|
- name: Check db change labels
|
||||||
id: check_labels
|
id: check_labels
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
run: |
|
run: |
|
||||||
URL=/repos/meilisearch/meilisearch/pulls/${{ github.event.pull_request.number }}/labels
|
URL=/repos/meilisearch/meilisearch/pulls/${{ github.event.pull_request.number }}/labels
|
||||||
echo ${{ github.event.pull_request.number }}
|
echo ${{ github.event.pull_request.number }}
|
||||||
echo $URL
|
echo $URL
|
||||||
LABELS=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/labels -q .[].name)
|
LABELS=$(gh api -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" /repos/meilisearch/meilisearch/issues/${{ github.event.pull_request.number }}/labels -q .[].name)
|
||||||
echo "Labels: $LABELS"
|
|
||||||
if [[ ! "$LABELS" =~ "db change" && ! "$LABELS" =~ "no db change" ]]; then
|
if [[ ! "$LABELS" =~ "db change" && ! "$LABELS" =~ "no db change" ]]; then
|
||||||
echo "::error::Pull request must contain either the 'db change' or 'no db change' label."
|
echo "::error::Pull request must contain either the 'db change' or 'no db change' label."
|
||||||
exit 1
|
exit 1
|
||||||
|
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
@ -32,7 +32,7 @@ jobs:
|
|||||||
- name: Build deb package
|
- name: Build deb package
|
||||||
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
||||||
- name: Upload debian pkg to release
|
- name: Upload debian pkg to release
|
||||||
uses: svenstaro/upload-release-action@2.11.1
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/debian/meilisearch.deb
|
file: target/debian/meilisearch.deb
|
||||||
|
8
.github/workflows/publish-binaries.yml
vendored
8
.github/workflows/publish-binaries.yml
vendored
@ -51,7 +51,7 @@ jobs:
|
|||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
- name: Upload binaries to release
|
- name: Upload binaries to release
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.11.1
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/release/meilisearch
|
file: target/release/meilisearch
|
||||||
@ -81,7 +81,7 @@ jobs:
|
|||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
- name: Upload binaries to release
|
- name: Upload binaries to release
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.11.1
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/release/${{ matrix.artifact_name }}
|
file: target/release/${{ matrix.artifact_name }}
|
||||||
@ -113,7 +113,7 @@ jobs:
|
|||||||
- name: Upload the binary to release
|
- name: Upload the binary to release
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.11.1
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/${{ matrix.target }}/release/meilisearch
|
file: target/${{ matrix.target }}/release/meilisearch
|
||||||
@ -178,7 +178,7 @@ jobs:
|
|||||||
- name: Upload the binary to release
|
- name: Upload the binary to release
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.11.1
|
uses: svenstaro/upload-release-action@2.7.0
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/${{ matrix.target }}/release/meilisearch
|
file: target/${{ matrix.target }}/release/meilisearch
|
||||||
|
30
.github/workflows/publish-docker-images.yml
vendored
30
.github/workflows/publish-docker-images.yml
vendored
@ -106,20 +106,18 @@ jobs:
|
|||||||
client-payload: '{ "meilisearch_version": "${{ github.ref_name }}", "stable": "${{ steps.check-tag-format.outputs.stable }}" }'
|
client-payload: '{ "meilisearch_version": "${{ github.ref_name }}", "stable": "${{ steps.check-tag-format.outputs.stable }}" }'
|
||||||
|
|
||||||
# Send notification to Swarmia to notify of a deployment: https://app.swarmia.com
|
# Send notification to Swarmia to notify of a deployment: https://app.swarmia.com
|
||||||
# - name: 'Setup jq'
|
- name: Send deployment to Swarmia
|
||||||
# uses: dcarbone/install-jq-action
|
if: github.event_name == 'push' && success()
|
||||||
# - name: Send deployment to Swarmia
|
run: |
|
||||||
# if: github.event_name == 'push' && success()
|
JSON_STRING=$( jq --null-input --compact-output \
|
||||||
# run: |
|
--arg version "${{ github.ref_name }}" \
|
||||||
# JSON_STRING=$( jq --null-input --compact-output \
|
--arg appName "meilisearch" \
|
||||||
# --arg version "${{ github.ref_name }}" \
|
--arg environment "production" \
|
||||||
# --arg appName "meilisearch" \
|
--arg commitSha "${{ github.sha }}" \
|
||||||
# --arg environment "production" \
|
--arg repositoryFullName "${{ github.repository }}" \
|
||||||
# --arg commitSha "${{ github.sha }}" \
|
'{"version": $version, "appName": $appName, "environment": $environment, "commitSha": $commitSha, "repositoryFullName": $repositoryFullName}' )
|
||||||
# --arg repositoryFullName "${{ github.repository }}" \
|
|
||||||
# '{"version": $version, "appName": $appName, "environment": $environment, "commitSha": $commitSha, "repositoryFullName": $repositoryFullName}' )
|
|
||||||
|
|
||||||
# curl -H "Authorization: ${{ secrets.SWARMIA_DEPLOYMENTS_AUTHORIZATION }}" \
|
curl -H "Authorization: ${{ secrets.SWARMIA_DEPLOYMENTS_AUTHORIZATION }}" \
|
||||||
# -H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
# -d "$JSON_STRING" \
|
-d "$JSON_STRING" \
|
||||||
# https://hook.swarmia.com/deployments
|
https://hook.swarmia.com/deployments
|
||||||
|
10
.github/workflows/test-suite.yml
vendored
10
.github/workflows/test-suite.yml
vendored
@ -29,7 +29,7 @@ jobs:
|
|||||||
- name: Setup test with Rust stable
|
- name: Setup test with Rust stable
|
||||||
uses: dtolnay/rust-toolchain@1.85
|
uses: dtolnay/rust-toolchain@1.85
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.7.8
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@ -51,7 +51,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.7.8
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
@ -155,7 +155,7 @@ jobs:
|
|||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.7.8
|
||||||
- name: Run tests in debug
|
- name: Run tests in debug
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@ -172,7 +172,7 @@ jobs:
|
|||||||
profile: minimal
|
profile: minimal
|
||||||
components: clippy
|
components: clippy
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.7.8
|
||||||
- name: Run cargo clippy
|
- name: Run cargo clippy
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@ -191,7 +191,7 @@ jobs:
|
|||||||
override: true
|
override: true
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.8.0
|
uses: Swatinem/rust-cache@v2.7.8
|
||||||
- name: Run cargo fmt
|
- name: Run cargo fmt
|
||||||
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
||||||
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
||||||
|
11
.gitignore
vendored
11
.gitignore
vendored
@ -11,21 +11,12 @@
|
|||||||
/bench
|
/bench
|
||||||
/_xtask_benchmark.ms
|
/_xtask_benchmark.ms
|
||||||
/benchmarks
|
/benchmarks
|
||||||
.DS_Store
|
|
||||||
|
|
||||||
# Snapshots
|
# Snapshots
|
||||||
## ... large
|
## ... large
|
||||||
*.full.snap
|
*.full.snap
|
||||||
## ... unreviewed
|
## ... unreviewed
|
||||||
*.snap.new
|
*.snap.new
|
||||||
## ... pending
|
|
||||||
*.pending-snap
|
|
||||||
|
|
||||||
# Tmp files
|
|
||||||
.tmp*
|
|
||||||
|
|
||||||
# Database snapshot
|
|
||||||
crates/meilisearch/db.snapshot
|
|
||||||
|
|
||||||
# Fuzzcheck data for the facet indexing fuzz test
|
# Fuzzcheck data for the facet indexing fuzz test
|
||||||
crates/milli/fuzz/update::facet::incremental::fuzz::fuzz/
|
crates/milli/fuzz/update::facet::incremental::fuzz::fuzz/
|
||||||
|
@ -57,17 +57,9 @@ This command will be triggered to each PR as a requirement for merging it.
|
|||||||
You can set the `LINDERA_CACHE` environment variable to speed up your successive builds by up to 2 minutes.
|
You can set the `LINDERA_CACHE` environment variable to speed up your successive builds by up to 2 minutes.
|
||||||
It'll store some built artifacts in the directory of your choice.
|
It'll store some built artifacts in the directory of your choice.
|
||||||
|
|
||||||
We recommend using the `$HOME/.cache/meili/lindera` directory:
|
We recommend using the standard `$HOME/.cache/lindera` directory:
|
||||||
```sh
|
```sh
|
||||||
export LINDERA_CACHE=$HOME/.cache/meili/lindera
|
export LINDERA_CACHE=$HOME/.cache/lindera
|
||||||
```
|
|
||||||
|
|
||||||
You can set the `MILLI_BENCH_DATASETS_PATH` environment variable to further speed up your builds.
|
|
||||||
It'll store some big files used for the benchmarks in the directory of your choice.
|
|
||||||
|
|
||||||
We recommend using the `$HOME/.cache/meili/benches` directory:
|
|
||||||
```sh
|
|
||||||
export MILLI_BENCH_DATASETS_PATH=$HOME/.cache/meili/benches
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Furthermore, you can improve incremental compilation by setting the `MEILI_NO_VERGEN` environment variable.
|
Furthermore, you can improve incremental compilation by setting the `MEILI_NO_VERGEN` environment variable.
|
||||||
|
3120
Cargo.lock
generated
3120
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -22,7 +22,7 @@ members = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "1.16.0"
|
version = "1.15.0"
|
||||||
authors = [
|
authors = [
|
||||||
"Quentin de Quelen <quentin@dequelen.me>",
|
"Quentin de Quelen <quentin@dequelen.me>",
|
||||||
"Clément Renault <clement@meilisearch.com>",
|
"Clément Renault <clement@meilisearch.com>",
|
||||||
|
@ -11,27 +11,27 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.98"
|
anyhow = "1.0.95"
|
||||||
bumpalo = "3.18.1"
|
bumpalo = "3.16.0"
|
||||||
csv = "1.3.1"
|
csv = "1.3.1"
|
||||||
memmap2 = "0.9.5"
|
memmap2 = "0.9.5"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
mimalloc = { version = "0.1.47", default-features = false }
|
mimalloc = { version = "0.1.43", default-features = false }
|
||||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
serde_json = { version = "1.0.135", features = ["preserve_order"] }
|
||||||
tempfile = "3.20.0"
|
tempfile = "3.15.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = { version = "0.6.0", features = ["html_reports"] }
|
criterion = { version = "0.5.1", features = ["html_reports"] }
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
rand_chacha = "0.3.1"
|
rand_chacha = "0.3.1"
|
||||||
roaring = "0.10.12"
|
roaring = "0.10.10"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
anyhow = "1.0.98"
|
anyhow = "1.0.95"
|
||||||
bytes = "1.10.1"
|
bytes = "1.9.0"
|
||||||
convert_case = "0.8.0"
|
convert_case = "0.6.0"
|
||||||
flate2 = "1.1.2"
|
flate2 = "1.0.35"
|
||||||
reqwest = { version = "0.12.20", features = ["blocking", "rustls-tls"], default-features = false }
|
reqwest = { version = "0.12.15", features = ["blocking", "rustls-tls"], default-features = false }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["milli/all-tokenizations"]
|
default = ["milli/all-tokenizations"]
|
||||||
@ -51,8 +51,3 @@ harness = false
|
|||||||
[[bench]]
|
[[bench]]
|
||||||
name = "indexing"
|
name = "indexing"
|
||||||
harness = false
|
harness = false
|
||||||
|
|
||||||
[[bench]]
|
|
||||||
name = "sort"
|
|
||||||
harness = false
|
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ use milli::heed::{EnvOpenOptions, RwTxn};
|
|||||||
use milli::progress::Progress;
|
use milli::progress::Progress;
|
||||||
use milli::update::new::indexer;
|
use milli::update::new::indexer;
|
||||||
use milli::update::{IndexerConfig, Settings};
|
use milli::update::{IndexerConfig, Settings};
|
||||||
use milli::vector::RuntimeEmbedders;
|
use milli::vector::EmbeddingConfigs;
|
||||||
use milli::{FilterableAttributesRule, Index};
|
use milli::{FilterableAttributesRule, Index};
|
||||||
use rand::seq::SliceRandom;
|
use rand::seq::SliceRandom;
|
||||||
use rand_chacha::rand_core::SeedableRng;
|
use rand_chacha::rand_core::SeedableRng;
|
||||||
@ -65,7 +65,7 @@ fn setup_settings<'t>(
|
|||||||
let sortable_fields = sortable_fields.iter().map(|s| s.to_string()).collect();
|
let sortable_fields = sortable_fields.iter().map(|s| s.to_string()).collect();
|
||||||
builder.set_sortable_fields(sortable_fields);
|
builder.set_sortable_fields(sortable_fields);
|
||||||
|
|
||||||
builder.execute(&|| false, &Progress::default(), Default::default()).unwrap();
|
builder.execute(|_| (), || false).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn setup_index_with_settings(
|
fn setup_index_with_settings(
|
||||||
@ -166,10 +166,9 @@ fn indexing_songs_default(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -233,10 +232,9 @@ fn reindexing_songs_default(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -278,10 +276,9 @@ fn reindexing_songs_default(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -347,10 +344,9 @@ fn deleting_songs_in_batches_default(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -424,10 +420,9 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -469,10 +464,9 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -510,10 +504,9 @@ fn indexing_songs_in_three_batches_default(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -578,10 +571,9 @@ fn indexing_songs_without_faceted_numbers(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -645,10 +637,9 @@ fn indexing_songs_without_faceted_fields(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -712,10 +703,9 @@ fn indexing_wiki(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -778,10 +768,9 @@ fn reindexing_wiki(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -823,10 +812,9 @@ fn reindexing_wiki(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -891,10 +879,9 @@ fn deleting_wiki_in_batches_default(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -968,10 +955,9 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1014,10 +1000,9 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1056,10 +1041,9 @@ fn indexing_wiki_in_three_batches(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1123,10 +1107,9 @@ fn indexing_movies_default(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1189,10 +1172,9 @@ fn reindexing_movies_default(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1234,10 +1216,9 @@ fn reindexing_movies_default(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1302,10 +1283,9 @@ fn deleting_movies_in_batches_default(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1351,10 +1331,9 @@ fn delete_documents_from_ids(index: Index, document_ids_to_delete: Vec<RoaringBi
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
Some(primary_key),
|
Some(primary_key),
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1416,10 +1395,9 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1461,10 +1439,9 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1502,10 +1479,9 @@ fn indexing_movies_in_three_batches(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1592,10 +1568,9 @@ fn indexing_nested_movies_default(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1683,10 +1658,9 @@ fn deleting_nested_movies_in_batches_default(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1766,10 +1740,9 @@ fn indexing_nested_movies_without_faceted_fields(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1833,10 +1806,9 @@ fn indexing_geo(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1899,10 +1871,9 @@ fn reindexing_geo(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -1944,10 +1915,9 @@ fn reindexing_geo(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -2012,10 +1982,9 @@ fn deleting_geo_in_batches_default(c: &mut Criterion) {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
@ -2,8 +2,7 @@ mod datasets_paths;
|
|||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
use criterion::{criterion_group, criterion_main};
|
use criterion::{criterion_group, criterion_main};
|
||||||
use milli::update::Settings;
|
use milli::{update::Settings, FilterableAttributesRule};
|
||||||
use milli::FilterableAttributesRule;
|
|
||||||
use utils::Conf;
|
use utils::Conf;
|
||||||
|
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
|
@ -2,8 +2,7 @@ mod datasets_paths;
|
|||||||
mod utils;
|
mod utils;
|
||||||
|
|
||||||
use criterion::{criterion_group, criterion_main};
|
use criterion::{criterion_group, criterion_main};
|
||||||
use milli::update::Settings;
|
use milli::{update::Settings, FilterableAttributesRule};
|
||||||
use milli::FilterableAttributesRule;
|
|
||||||
use utils::Conf;
|
use utils::Conf;
|
||||||
|
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
|
@ -1,114 +0,0 @@
|
|||||||
//! This benchmark module is used to compare the performance of sorting documents in /search VS /documents
|
|
||||||
//!
|
|
||||||
//! The tests/benchmarks were designed in the context of a query returning only 20 documents.
|
|
||||||
|
|
||||||
mod datasets_paths;
|
|
||||||
mod utils;
|
|
||||||
|
|
||||||
use criterion::{criterion_group, criterion_main};
|
|
||||||
use milli::update::Settings;
|
|
||||||
use utils::Conf;
|
|
||||||
|
|
||||||
#[cfg(not(windows))]
|
|
||||||
#[global_allocator]
|
|
||||||
static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
|
||||||
|
|
||||||
fn base_conf(builder: &mut Settings) {
|
|
||||||
let displayed_fields =
|
|
||||||
["geonameid", "name", "asciiname", "alternatenames", "_geo", "population"]
|
|
||||||
.iter()
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.collect();
|
|
||||||
builder.set_displayed_fields(displayed_fields);
|
|
||||||
|
|
||||||
let sortable_fields =
|
|
||||||
["_geo", "name", "population", "elevation", "timezone", "modification-date"]
|
|
||||||
.iter()
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.collect();
|
|
||||||
builder.set_sortable_fields(sortable_fields);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[rustfmt::skip]
|
|
||||||
const BASE_CONF: Conf = Conf {
|
|
||||||
dataset: datasets_paths::SMOL_ALL_COUNTRIES,
|
|
||||||
dataset_format: "jsonl",
|
|
||||||
configure: base_conf,
|
|
||||||
primary_key: Some("geonameid"),
|
|
||||||
queries: &[""],
|
|
||||||
offsets: &[
|
|
||||||
Some((0, 20)), // The most common query in the real world
|
|
||||||
Some((0, 500)), // A query that ranges over many documents
|
|
||||||
Some((980, 20)), // The worst query that could happen in the real world
|
|
||||||
Some((800_000, 20)) // The worst query
|
|
||||||
],
|
|
||||||
get_documents: true,
|
|
||||||
..Conf::BASE
|
|
||||||
};
|
|
||||||
|
|
||||||
fn bench_sort(c: &mut criterion::Criterion) {
|
|
||||||
#[rustfmt::skip]
|
|
||||||
let confs = &[
|
|
||||||
utils::Conf {
|
|
||||||
group_name: "without sort",
|
|
||||||
sort: None,
|
|
||||||
..BASE_CONF
|
|
||||||
},
|
|
||||||
|
|
||||||
utils::Conf {
|
|
||||||
group_name: "sort on many different values",
|
|
||||||
sort: Some(vec!["name:asc"]),
|
|
||||||
..BASE_CONF
|
|
||||||
},
|
|
||||||
|
|
||||||
utils::Conf {
|
|
||||||
group_name: "sort on many similar values",
|
|
||||||
sort: Some(vec!["timezone:desc"]),
|
|
||||||
..BASE_CONF
|
|
||||||
},
|
|
||||||
|
|
||||||
utils::Conf {
|
|
||||||
group_name: "sort on many similar then different values",
|
|
||||||
sort: Some(vec!["timezone:desc", "name:asc"]),
|
|
||||||
..BASE_CONF
|
|
||||||
},
|
|
||||||
|
|
||||||
utils::Conf {
|
|
||||||
group_name: "sort on many different then similar values",
|
|
||||||
sort: Some(vec!["timezone:desc", "name:asc"]),
|
|
||||||
..BASE_CONF
|
|
||||||
},
|
|
||||||
|
|
||||||
utils::Conf {
|
|
||||||
group_name: "geo sort",
|
|
||||||
sample_size: Some(10),
|
|
||||||
sort: Some(vec!["_geoPoint(45.4777599, 9.1967508):asc"]),
|
|
||||||
..BASE_CONF
|
|
||||||
},
|
|
||||||
|
|
||||||
utils::Conf {
|
|
||||||
group_name: "sort on many similar values then geo sort",
|
|
||||||
sample_size: Some(50),
|
|
||||||
sort: Some(vec!["timezone:desc", "_geoPoint(45.4777599, 9.1967508):asc"]),
|
|
||||||
..BASE_CONF
|
|
||||||
},
|
|
||||||
|
|
||||||
utils::Conf {
|
|
||||||
group_name: "sort on many different values then geo sort",
|
|
||||||
sample_size: Some(50),
|
|
||||||
sort: Some(vec!["name:desc", "_geoPoint(45.4777599, 9.1967508):asc"]),
|
|
||||||
..BASE_CONF
|
|
||||||
},
|
|
||||||
|
|
||||||
utils::Conf {
|
|
||||||
group_name: "sort on many fields",
|
|
||||||
sort: Some(vec!["population:asc", "name:asc", "elevation:asc", "timezone:asc"]),
|
|
||||||
..BASE_CONF
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
utils::run_benches(c, confs);
|
|
||||||
}
|
|
||||||
|
|
||||||
criterion_group!(benches, bench_sort);
|
|
||||||
criterion_main!(benches);
|
|
@ -9,12 +9,11 @@ use anyhow::Context;
|
|||||||
use bumpalo::Bump;
|
use bumpalo::Bump;
|
||||||
use criterion::BenchmarkId;
|
use criterion::BenchmarkId;
|
||||||
use memmap2::Mmap;
|
use memmap2::Mmap;
|
||||||
use milli::documents::sort::recursive_sort;
|
|
||||||
use milli::heed::EnvOpenOptions;
|
use milli::heed::EnvOpenOptions;
|
||||||
use milli::progress::Progress;
|
use milli::progress::Progress;
|
||||||
use milli::update::new::indexer;
|
use milli::update::new::indexer;
|
||||||
use milli::update::{IndexerConfig, Settings};
|
use milli::update::{IndexerConfig, Settings};
|
||||||
use milli::vector::RuntimeEmbedders;
|
use milli::vector::EmbeddingConfigs;
|
||||||
use milli::{Criterion, Filter, Index, Object, TermsMatchingStrategy};
|
use milli::{Criterion, Filter, Index, Object, TermsMatchingStrategy};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
|
|
||||||
@ -36,12 +35,6 @@ pub struct Conf<'a> {
|
|||||||
pub configure: fn(&mut Settings),
|
pub configure: fn(&mut Settings),
|
||||||
pub filter: Option<&'a str>,
|
pub filter: Option<&'a str>,
|
||||||
pub sort: Option<Vec<&'a str>>,
|
pub sort: Option<Vec<&'a str>>,
|
||||||
/// set to skip documents (offset, limit)
|
|
||||||
pub offsets: &'a [Option<(usize, usize)>],
|
|
||||||
/// enable if you want to bench getting documents without querying
|
|
||||||
pub get_documents: bool,
|
|
||||||
/// configure the benchmark sample size
|
|
||||||
pub sample_size: Option<usize>,
|
|
||||||
/// enable or disable the optional words on the query
|
/// enable or disable the optional words on the query
|
||||||
pub optional_words: bool,
|
pub optional_words: bool,
|
||||||
/// primary key, if there is None we'll auto-generate docids for every documents
|
/// primary key, if there is None we'll auto-generate docids for every documents
|
||||||
@ -59,9 +52,6 @@ impl Conf<'_> {
|
|||||||
configure: |_| (),
|
configure: |_| (),
|
||||||
filter: None,
|
filter: None,
|
||||||
sort: None,
|
sort: None,
|
||||||
offsets: &[None],
|
|
||||||
get_documents: false,
|
|
||||||
sample_size: None,
|
|
||||||
optional_words: true,
|
optional_words: true,
|
||||||
primary_key: None,
|
primary_key: None,
|
||||||
};
|
};
|
||||||
@ -100,7 +90,7 @@ pub fn base_setup(conf: &Conf) -> Index {
|
|||||||
|
|
||||||
(conf.configure)(&mut builder);
|
(conf.configure)(&mut builder);
|
||||||
|
|
||||||
builder.execute(&|| false, &Progress::default(), Default::default()).unwrap();
|
builder.execute(|_| (), || false).unwrap();
|
||||||
wtxn.commit().unwrap();
|
wtxn.commit().unwrap();
|
||||||
|
|
||||||
let config = IndexerConfig::default();
|
let config = IndexerConfig::default();
|
||||||
@ -135,10 +125,9 @@ pub fn base_setup(conf: &Conf) -> Index {
|
|||||||
new_fields_ids_map,
|
new_fields_ids_map,
|
||||||
primary_key,
|
primary_key,
|
||||||
&document_changes,
|
&document_changes,
|
||||||
RuntimeEmbedders::default(),
|
EmbeddingConfigs::default(),
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
@ -155,79 +144,25 @@ pub fn run_benches(c: &mut criterion::Criterion, confs: &[Conf]) {
|
|||||||
let file_name = Path::new(conf.dataset).file_name().and_then(|f| f.to_str()).unwrap();
|
let file_name = Path::new(conf.dataset).file_name().and_then(|f| f.to_str()).unwrap();
|
||||||
let name = format!("{}: {}", file_name, conf.group_name);
|
let name = format!("{}: {}", file_name, conf.group_name);
|
||||||
let mut group = c.benchmark_group(&name);
|
let mut group = c.benchmark_group(&name);
|
||||||
if let Some(sample_size) = conf.sample_size {
|
|
||||||
group.sample_size(sample_size);
|
|
||||||
}
|
|
||||||
|
|
||||||
for &query in conf.queries {
|
for &query in conf.queries {
|
||||||
for offset in conf.offsets {
|
group.bench_with_input(BenchmarkId::from_parameter(query), &query, |b, &query| {
|
||||||
let parameter = match offset {
|
b.iter(|| {
|
||||||
None => query.to_string(),
|
let rtxn = index.read_txn().unwrap();
|
||||||
Some((offset, limit)) => format!("{query}[{offset}:{limit}]"),
|
let mut search = index.search(&rtxn);
|
||||||
};
|
search.query(query).terms_matching_strategy(TermsMatchingStrategy::default());
|
||||||
group.bench_with_input(
|
if let Some(filter) = conf.filter {
|
||||||
BenchmarkId::from_parameter(parameter),
|
let filter = Filter::from_str(filter).unwrap().unwrap();
|
||||||
&query,
|
search.filter(filter);
|
||||||
|b, &query| {
|
}
|
||||||
b.iter(|| {
|
if let Some(sort) = &conf.sort {
|
||||||
let rtxn = index.read_txn().unwrap();
|
let sort = sort.iter().map(|sort| sort.parse().unwrap()).collect();
|
||||||
let mut search = index.search(&rtxn);
|
search.sort_criteria(sort);
|
||||||
search
|
}
|
||||||
.query(query)
|
let _ids = search.execute().unwrap();
|
||||||
.terms_matching_strategy(TermsMatchingStrategy::default());
|
|
||||||
if let Some(filter) = conf.filter {
|
|
||||||
let filter = Filter::from_str(filter).unwrap().unwrap();
|
|
||||||
search.filter(filter);
|
|
||||||
}
|
|
||||||
if let Some(sort) = &conf.sort {
|
|
||||||
let sort = sort.iter().map(|sort| sort.parse().unwrap()).collect();
|
|
||||||
search.sort_criteria(sort);
|
|
||||||
}
|
|
||||||
if let Some((offset, limit)) = offset {
|
|
||||||
search.offset(*offset).limit(*limit);
|
|
||||||
}
|
|
||||||
|
|
||||||
let _ids = search.execute().unwrap();
|
|
||||||
});
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if conf.get_documents {
|
|
||||||
for offset in conf.offsets {
|
|
||||||
let parameter = match offset {
|
|
||||||
None => String::from("get_documents"),
|
|
||||||
Some((offset, limit)) => format!("get_documents[{offset}:{limit}]"),
|
|
||||||
};
|
|
||||||
group.bench_with_input(BenchmarkId::from_parameter(parameter), &(), |b, &()| {
|
|
||||||
b.iter(|| {
|
|
||||||
let rtxn = index.read_txn().unwrap();
|
|
||||||
if let Some(sort) = &conf.sort {
|
|
||||||
let sort = sort.iter().map(|sort| sort.parse().unwrap()).collect();
|
|
||||||
let all_docs = index.documents_ids(&rtxn).unwrap();
|
|
||||||
let facet_sort =
|
|
||||||
recursive_sort(&index, &rtxn, sort, &all_docs).unwrap();
|
|
||||||
let iter = facet_sort.iter().unwrap();
|
|
||||||
if let Some((offset, limit)) = offset {
|
|
||||||
let _results = iter.skip(*offset).take(*limit).collect::<Vec<_>>();
|
|
||||||
} else {
|
|
||||||
let _results = iter.collect::<Vec<_>>();
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let all_docs = index.documents_ids(&rtxn).unwrap();
|
|
||||||
if let Some((offset, limit)) = offset {
|
|
||||||
let _results =
|
|
||||||
all_docs.iter().skip(*offset).take(*limit).collect::<Vec<_>>();
|
|
||||||
} else {
|
|
||||||
let _results = all_docs.iter().collect::<Vec<_>>();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
}
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
group.finish();
|
group.finish();
|
||||||
|
|
||||||
index.prepare_for_closing().wait();
|
index.prepare_for_closing().wait();
|
||||||
|
@ -67,7 +67,7 @@ fn main() -> anyhow::Result<()> {
|
|||||||
writeln!(
|
writeln!(
|
||||||
&mut manifest_paths_file,
|
&mut manifest_paths_file,
|
||||||
r#"pub const {}: &str = {:?};"#,
|
r#"pub const {}: &str = {:?};"#,
|
||||||
dataset.to_case(Case::UpperSnake),
|
dataset.to_case(Case::ScreamingSnake),
|
||||||
out_file.display(),
|
out_file.display(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
@ -11,8 +11,8 @@ license.workspace = true
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
time = { version = "0.3.41", features = ["parsing"] }
|
time = { version = "0.3.37", features = ["parsing"] }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
anyhow = "1.0.98"
|
anyhow = "1.0.95"
|
||||||
vergen-git2 = "1.0.7"
|
vergen-git2 = "1.0.2"
|
||||||
|
@ -11,21 +11,21 @@ readme.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.98"
|
anyhow = "1.0.95"
|
||||||
flate2 = "1.1.2"
|
flate2 = "1.0.35"
|
||||||
http = "1.3.1"
|
http = "1.2.0"
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
once_cell = "1.21.3"
|
once_cell = "1.20.2"
|
||||||
regex = "1.11.1"
|
regex = "1.11.1"
|
||||||
roaring = { version = "0.10.12", features = ["serde"] }
|
roaring = { version = "0.10.10", features = ["serde"] }
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.217", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
serde_json = { version = "1.0.135", features = ["preserve_order"] }
|
||||||
tar = "0.4.44"
|
tar = "0.4.43"
|
||||||
tempfile = "3.20.0"
|
tempfile = "3.15.0"
|
||||||
thiserror = "2.0.12"
|
thiserror = "2.0.9"
|
||||||
time = { version = "0.3.41", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
time = { version = "0.3.37", features = ["serde-well-known", "formatting", "parsing", "macros"] }
|
||||||
tracing = "0.1.41"
|
tracing = "0.1.41"
|
||||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
uuid = { version = "1.11.0", features = ["serde", "v4"] }
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
big_s = "1.0.2"
|
big_s = "1.0.2"
|
||||||
|
@ -1,17 +1,12 @@
|
|||||||
#![allow(clippy::type_complexity)]
|
#![allow(clippy::type_complexity)]
|
||||||
#![allow(clippy::wrong_self_convention)]
|
#![allow(clippy::wrong_self_convention)]
|
||||||
|
|
||||||
use std::collections::BTreeMap;
|
|
||||||
|
|
||||||
use meilisearch_types::batches::BatchId;
|
use meilisearch_types::batches::BatchId;
|
||||||
use meilisearch_types::byte_unit::Byte;
|
|
||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::keys::Key;
|
use meilisearch_types::keys::Key;
|
||||||
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
||||||
use meilisearch_types::settings::Unchecked;
|
use meilisearch_types::settings::Unchecked;
|
||||||
use meilisearch_types::tasks::{
|
use meilisearch_types::tasks::{Details, IndexSwap, KindWithContent, Status, Task, TaskId};
|
||||||
Details, ExportIndexSettings, IndexSwap, KindWithContent, Status, Task, TaskId,
|
|
||||||
};
|
|
||||||
use meilisearch_types::InstanceUid;
|
use meilisearch_types::InstanceUid;
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@ -146,12 +141,6 @@ pub enum KindDump {
|
|||||||
instance_uid: Option<InstanceUid>,
|
instance_uid: Option<InstanceUid>,
|
||||||
},
|
},
|
||||||
SnapshotCreation,
|
SnapshotCreation,
|
||||||
Export {
|
|
||||||
url: String,
|
|
||||||
api_key: Option<String>,
|
|
||||||
payload_size: Option<Byte>,
|
|
||||||
indexes: BTreeMap<String, ExportIndexSettings>,
|
|
||||||
},
|
|
||||||
UpgradeDatabase {
|
UpgradeDatabase {
|
||||||
from: (u32, u32, u32),
|
from: (u32, u32, u32),
|
||||||
},
|
},
|
||||||
@ -224,15 +213,6 @@ impl From<KindWithContent> for KindDump {
|
|||||||
KindDump::DumpCreation { keys, instance_uid }
|
KindDump::DumpCreation { keys, instance_uid }
|
||||||
}
|
}
|
||||||
KindWithContent::SnapshotCreation => KindDump::SnapshotCreation,
|
KindWithContent::SnapshotCreation => KindDump::SnapshotCreation,
|
||||||
KindWithContent::Export { url, api_key, payload_size, indexes } => KindDump::Export {
|
|
||||||
url,
|
|
||||||
api_key,
|
|
||||||
payload_size,
|
|
||||||
indexes: indexes
|
|
||||||
.into_iter()
|
|
||||||
.map(|(pattern, settings)| (pattern.to_string(), settings))
|
|
||||||
.collect(),
|
|
||||||
},
|
|
||||||
KindWithContent::UpgradeDatabase { from: version } => {
|
KindWithContent::UpgradeDatabase { from: version } => {
|
||||||
KindDump::UpgradeDatabase { from: version }
|
KindDump::UpgradeDatabase { from: version }
|
||||||
}
|
}
|
||||||
@ -325,7 +305,6 @@ pub(crate) mod test {
|
|||||||
localized_attributes: Setting::NotSet,
|
localized_attributes: Setting::NotSet,
|
||||||
facet_search: Setting::NotSet,
|
facet_search: Setting::NotSet,
|
||||||
prefix_search: Setting::NotSet,
|
prefix_search: Setting::NotSet,
|
||||||
chat: Setting::NotSet,
|
|
||||||
_kind: std::marker::PhantomData,
|
_kind: std::marker::PhantomData,
|
||||||
};
|
};
|
||||||
settings.check()
|
settings.check()
|
||||||
@ -349,7 +328,6 @@ pub(crate) mod test {
|
|||||||
write_channel_congestion: None,
|
write_channel_congestion: None,
|
||||||
internal_database_sizes: Default::default(),
|
internal_database_sizes: Default::default(),
|
||||||
},
|
},
|
||||||
embedder_stats: Default::default(),
|
|
||||||
enqueued_at: Some(BatchEnqueuedAt {
|
enqueued_at: Some(BatchEnqueuedAt {
|
||||||
earliest: datetime!(2022-11-11 0:00 UTC),
|
earliest: datetime!(2022-11-11 0:00 UTC),
|
||||||
oldest: datetime!(2022-11-11 0:00 UTC),
|
oldest: datetime!(2022-11-11 0:00 UTC),
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
use std::num::NonZeroUsize;
|
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use super::v4_to_v5::{CompatIndexV4ToV5, CompatV4ToV5};
|
use super::v4_to_v5::{CompatIndexV4ToV5, CompatV4ToV5};
|
||||||
@ -389,13 +388,7 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
|||||||
},
|
},
|
||||||
pagination: match settings.pagination {
|
pagination: match settings.pagination {
|
||||||
v5::Setting::Set(pagination) => v6::Setting::Set(v6::PaginationSettings {
|
v5::Setting::Set(pagination) => v6::Setting::Set(v6::PaginationSettings {
|
||||||
max_total_hits: match pagination.max_total_hits {
|
max_total_hits: pagination.max_total_hits.into(),
|
||||||
v5::Setting::Set(max_total_hits) => v6::Setting::Set(
|
|
||||||
max_total_hits.try_into().unwrap_or(NonZeroUsize::new(1).unwrap()),
|
|
||||||
),
|
|
||||||
v5::Setting::Reset => v6::Setting::Reset,
|
|
||||||
v5::Setting::NotSet => v6::Setting::NotSet,
|
|
||||||
},
|
|
||||||
}),
|
}),
|
||||||
v5::Setting::Reset => v6::Setting::Reset,
|
v5::Setting::Reset => v6::Setting::Reset,
|
||||||
v5::Setting::NotSet => v6::Setting::NotSet,
|
v5::Setting::NotSet => v6::Setting::NotSet,
|
||||||
|
@ -116,15 +116,6 @@ impl DumpReader {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chat_completions_settings(
|
|
||||||
&mut self,
|
|
||||||
) -> Result<Box<dyn Iterator<Item = Result<(String, v6::ChatCompletionSettings)>> + '_>> {
|
|
||||||
match self {
|
|
||||||
DumpReader::Current(current) => current.chat_completions_settings(),
|
|
||||||
DumpReader::Compat(_compat) => Ok(Box::new(std::iter::empty())),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn features(&self) -> Result<Option<v6::RuntimeTogglableFeatures>> {
|
pub fn features(&self) -> Result<Option<v6::RuntimeTogglableFeatures>> {
|
||||||
match self {
|
match self {
|
||||||
DumpReader::Current(current) => Ok(current.features()),
|
DumpReader::Current(current) => Ok(current.features()),
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
use std::ffi::OsStr;
|
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
use std::io::{BufRead, BufReader, ErrorKind};
|
use std::io::{BufRead, BufReader, ErrorKind};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
@ -22,7 +21,6 @@ pub type Unchecked = meilisearch_types::settings::Unchecked;
|
|||||||
pub type Task = crate::TaskDump;
|
pub type Task = crate::TaskDump;
|
||||||
pub type Batch = meilisearch_types::batches::Batch;
|
pub type Batch = meilisearch_types::batches::Batch;
|
||||||
pub type Key = meilisearch_types::keys::Key;
|
pub type Key = meilisearch_types::keys::Key;
|
||||||
pub type ChatCompletionSettings = meilisearch_types::features::ChatCompletionSettings;
|
|
||||||
pub type RuntimeTogglableFeatures = meilisearch_types::features::RuntimeTogglableFeatures;
|
pub type RuntimeTogglableFeatures = meilisearch_types::features::RuntimeTogglableFeatures;
|
||||||
pub type Network = meilisearch_types::features::Network;
|
pub type Network = meilisearch_types::features::Network;
|
||||||
|
|
||||||
@ -194,34 +192,6 @@ impl V6Reader {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chat_completions_settings(
|
|
||||||
&mut self,
|
|
||||||
) -> Result<Box<dyn Iterator<Item = Result<(String, ChatCompletionSettings)>> + '_>> {
|
|
||||||
let entries = match fs::read_dir(self.dump.path().join("chat-completions-settings")) {
|
|
||||||
Ok(entries) => entries,
|
|
||||||
Err(e) if e.kind() == ErrorKind::NotFound => return Ok(Box::new(std::iter::empty())),
|
|
||||||
Err(e) => return Err(e.into()),
|
|
||||||
};
|
|
||||||
Ok(Box::new(
|
|
||||||
entries
|
|
||||||
.map(|entry| -> Result<Option<_>> {
|
|
||||||
let entry = entry?;
|
|
||||||
let file_name = entry.file_name();
|
|
||||||
let path = Path::new(&file_name);
|
|
||||||
if entry.file_type()?.is_file() && path.extension() == Some(OsStr::new("json"))
|
|
||||||
{
|
|
||||||
let name = path.file_stem().unwrap().to_str().unwrap().to_string();
|
|
||||||
let file = File::open(entry.path())?;
|
|
||||||
let settings = serde_json::from_reader(file)?;
|
|
||||||
Ok(Some((name, settings)))
|
|
||||||
} else {
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.filter_map(|entry| entry.transpose()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn features(&self) -> Option<RuntimeTogglableFeatures> {
|
pub fn features(&self) -> Option<RuntimeTogglableFeatures> {
|
||||||
self.features
|
self.features
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@ use std::path::PathBuf;
|
|||||||
use flate2::write::GzEncoder;
|
use flate2::write::GzEncoder;
|
||||||
use flate2::Compression;
|
use flate2::Compression;
|
||||||
use meilisearch_types::batches::Batch;
|
use meilisearch_types::batches::Batch;
|
||||||
use meilisearch_types::features::{ChatCompletionSettings, Network, RuntimeTogglableFeatures};
|
use meilisearch_types::features::{Network, RuntimeTogglableFeatures};
|
||||||
use meilisearch_types::keys::Key;
|
use meilisearch_types::keys::Key;
|
||||||
use meilisearch_types::settings::{Checked, Settings};
|
use meilisearch_types::settings::{Checked, Settings};
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
@ -51,10 +51,6 @@ impl DumpWriter {
|
|||||||
KeyWriter::new(self.dir.path().to_path_buf())
|
KeyWriter::new(self.dir.path().to_path_buf())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_chat_completions_settings(&self) -> Result<ChatCompletionsSettingsWriter> {
|
|
||||||
ChatCompletionsSettingsWriter::new(self.dir.path().join("chat-completions-settings"))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn create_tasks_queue(&self) -> Result<TaskWriter> {
|
pub fn create_tasks_queue(&self) -> Result<TaskWriter> {
|
||||||
TaskWriter::new(self.dir.path().join("tasks"))
|
TaskWriter::new(self.dir.path().join("tasks"))
|
||||||
}
|
}
|
||||||
@ -108,24 +104,6 @@ impl KeyWriter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct ChatCompletionsSettingsWriter {
|
|
||||||
path: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ChatCompletionsSettingsWriter {
|
|
||||||
pub(crate) fn new(path: PathBuf) -> Result<Self> {
|
|
||||||
std::fs::create_dir(&path)?;
|
|
||||||
Ok(ChatCompletionsSettingsWriter { path })
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn push_settings(&mut self, name: &str, settings: &ChatCompletionSettings) -> Result<()> {
|
|
||||||
let mut settings_file = File::create(self.path.join(name).with_extension("json"))?;
|
|
||||||
serde_json::to_writer(&mut settings_file, &settings)?;
|
|
||||||
settings_file.flush()?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct TaskWriter {
|
pub struct TaskWriter {
|
||||||
queue: BufWriter<File>,
|
queue: BufWriter<File>,
|
||||||
update_files: PathBuf,
|
update_files: PathBuf,
|
||||||
|
@ -11,7 +11,7 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tempfile = "3.20.0"
|
tempfile = "3.15.0"
|
||||||
thiserror = "2.0.12"
|
thiserror = "2.0.9"
|
||||||
tracing = "0.1.41"
|
tracing = "0.1.41"
|
||||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
uuid = { version = "1.11.0", features = ["serde", "v4"] }
|
||||||
|
@ -14,7 +14,7 @@ license.workspace = true
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
nom = "7.1.3"
|
nom = "7.1.3"
|
||||||
nom_locate = "4.2.0"
|
nom_locate = "4.2.0"
|
||||||
unescaper = "0.1.6"
|
unescaper = "0.1.5"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
# fixed version due to format breakages in v1.40
|
# fixed version due to format breakages in v1.40
|
||||||
|
@ -16,7 +16,7 @@ license.workspace = true
|
|||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
criterion = { version = "0.6.0", features = ["html_reports"] }
|
criterion = { version = "0.5.1", features = ["html_reports"] }
|
||||||
|
|
||||||
[[bench]]
|
[[bench]]
|
||||||
name = "benchmarks"
|
name = "benchmarks"
|
||||||
|
@ -12,11 +12,11 @@ license.workspace = true
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
arbitrary = { version = "1.4.1", features = ["derive"] }
|
arbitrary = { version = "1.4.1", features = ["derive"] }
|
||||||
bumpalo = "3.18.1"
|
bumpalo = "3.16.0"
|
||||||
clap = { version = "4.5.40", features = ["derive"] }
|
clap = { version = "4.5.24", features = ["derive"] }
|
||||||
either = "1.15.0"
|
either = "1.13.0"
|
||||||
fastrand = "2.3.0"
|
fastrand = "2.3.0"
|
||||||
milli = { path = "../milli" }
|
milli = { path = "../milli" }
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.217", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
serde_json = { version = "1.0.135", features = ["preserve_order"] }
|
||||||
tempfile = "3.20.0"
|
tempfile = "3.15.0"
|
||||||
|
@ -13,7 +13,7 @@ use milli::heed::EnvOpenOptions;
|
|||||||
use milli::progress::Progress;
|
use milli::progress::Progress;
|
||||||
use milli::update::new::indexer;
|
use milli::update::new::indexer;
|
||||||
use milli::update::IndexerConfig;
|
use milli::update::IndexerConfig;
|
||||||
use milli::vector::RuntimeEmbedders;
|
use milli::vector::EmbeddingConfigs;
|
||||||
use milli::Index;
|
use milli::Index;
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use tempfile::TempDir;
|
use tempfile::TempDir;
|
||||||
@ -89,7 +89,7 @@ fn main() {
|
|||||||
let mut new_fields_ids_map = db_fields_ids_map.clone();
|
let mut new_fields_ids_map = db_fields_ids_map.clone();
|
||||||
|
|
||||||
let indexer_alloc = Bump::new();
|
let indexer_alloc = Bump::new();
|
||||||
let embedders = RuntimeEmbedders::default();
|
let embedders = EmbeddingConfigs::default();
|
||||||
let mut indexer = indexer::DocumentOperation::new();
|
let mut indexer = indexer::DocumentOperation::new();
|
||||||
|
|
||||||
let mut operations = Vec::new();
|
let mut operations = Vec::new();
|
||||||
@ -144,7 +144,6 @@ fn main() {
|
|||||||
embedders,
|
embedders,
|
||||||
&|| false,
|
&|| false,
|
||||||
&Progress::default(),
|
&Progress::default(),
|
||||||
&Default::default(),
|
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
|
@ -11,31 +11,31 @@ edition.workspace = true
|
|||||||
license.workspace = true
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.98"
|
anyhow = "1.0.95"
|
||||||
bincode = "1.3.3"
|
bincode = "1.3.3"
|
||||||
byte-unit = "5.1.6"
|
byte-unit = "5.1.6"
|
||||||
bumpalo = "3.18.1"
|
bumpalo = "3.16.0"
|
||||||
bumparaw-collections = "0.1.4"
|
bumparaw-collections = "0.1.4"
|
||||||
convert_case = "0.8.0"
|
convert_case = "0.6.0"
|
||||||
csv = "1.3.1"
|
csv = "1.3.1"
|
||||||
derive_builder = "0.20.2"
|
derive_builder = "0.20.2"
|
||||||
dump = { path = "../dump" }
|
dump = { path = "../dump" }
|
||||||
enum-iterator = "2.1.0"
|
enum-iterator = "2.1.0"
|
||||||
file-store = { path = "../file-store" }
|
file-store = { path = "../file-store" }
|
||||||
flate2 = "1.1.2"
|
flate2 = "1.0.35"
|
||||||
indexmap = "2.9.0"
|
indexmap = "2.7.0"
|
||||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||||
meilisearch-types = { path = "../meilisearch-types" }
|
meilisearch-types = { path = "../meilisearch-types" }
|
||||||
memmap2 = "0.9.5"
|
memmap2 = "0.9.5"
|
||||||
page_size = "0.6.0"
|
page_size = "0.6.0"
|
||||||
rayon = "1.10.0"
|
rayon = "1.10.0"
|
||||||
roaring = { version = "0.10.12", features = ["serde"] }
|
roaring = { version = "0.10.10", features = ["serde"] }
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde = { version = "1.0.217", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.140", features = ["preserve_order"] }
|
serde_json = { version = "1.0.138", features = ["preserve_order"] }
|
||||||
synchronoise = "1.0.1"
|
synchronoise = "1.0.1"
|
||||||
tempfile = "3.20.0"
|
tempfile = "3.15.0"
|
||||||
thiserror = "2.0.12"
|
thiserror = "2.0.9"
|
||||||
time = { version = "0.3.41", features = [
|
time = { version = "0.3.37", features = [
|
||||||
"serde-well-known",
|
"serde-well-known",
|
||||||
"formatting",
|
"formatting",
|
||||||
"parsing",
|
"parsing",
|
||||||
@ -43,8 +43,7 @@ time = { version = "0.3.41", features = [
|
|||||||
] }
|
] }
|
||||||
tracing = "0.1.41"
|
tracing = "0.1.41"
|
||||||
ureq = "2.12.1"
|
ureq = "2.12.1"
|
||||||
uuid = { version = "1.17.0", features = ["serde", "v4"] }
|
uuid = { version = "1.11.0", features = ["serde", "v4"] }
|
||||||
backoff = "0.4.0"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
big_s = "1.0.2"
|
big_s = "1.0.2"
|
||||||
|
@ -4,7 +4,6 @@ use std::io;
|
|||||||
use dump::{KindDump, TaskDump, UpdateFile};
|
use dump::{KindDump, TaskDump, UpdateFile};
|
||||||
use meilisearch_types::batches::{Batch, BatchId};
|
use meilisearch_types::batches::{Batch, BatchId};
|
||||||
use meilisearch_types::heed::RwTxn;
|
use meilisearch_types::heed::RwTxn;
|
||||||
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
|
||||||
use meilisearch_types::milli;
|
use meilisearch_types::milli;
|
||||||
use meilisearch_types::tasks::{Kind, KindWithContent, Status, Task};
|
use meilisearch_types::tasks::{Kind, KindWithContent, Status, Task};
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
@ -212,23 +211,6 @@ impl<'a> Dump<'a> {
|
|||||||
KindWithContent::DumpCreation { keys, instance_uid }
|
KindWithContent::DumpCreation { keys, instance_uid }
|
||||||
}
|
}
|
||||||
KindDump::SnapshotCreation => KindWithContent::SnapshotCreation,
|
KindDump::SnapshotCreation => KindWithContent::SnapshotCreation,
|
||||||
KindDump::Export { url, api_key, payload_size, indexes } => {
|
|
||||||
KindWithContent::Export {
|
|
||||||
url,
|
|
||||||
api_key,
|
|
||||||
payload_size,
|
|
||||||
indexes: indexes
|
|
||||||
.into_iter()
|
|
||||||
.map(|(pattern, settings)| {
|
|
||||||
Ok((
|
|
||||||
IndexUidPattern::try_from(pattern)
|
|
||||||
.map_err(|_| Error::CorruptedDump)?,
|
|
||||||
settings,
|
|
||||||
))
|
|
||||||
})
|
|
||||||
.collect::<Result<_, Error>>()?,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
KindDump::UpgradeDatabase { from } => KindWithContent::UpgradeDatabase { from },
|
KindDump::UpgradeDatabase { from } => KindWithContent::UpgradeDatabase { from },
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -151,10 +151,6 @@ pub enum Error {
|
|||||||
CorruptedTaskQueue,
|
CorruptedTaskQueue,
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
DatabaseUpgrade(Box<Self>),
|
DatabaseUpgrade(Box<Self>),
|
||||||
#[error(transparent)]
|
|
||||||
Export(Box<Self>),
|
|
||||||
#[error("Failed to export documents to remote server {code} ({type}): {message} <{link}>")]
|
|
||||||
FromRemoteWhenExporting { message: String, code: String, r#type: String, link: String },
|
|
||||||
#[error("Failed to rollback for index `{index}`: {rollback_outcome} ")]
|
#[error("Failed to rollback for index `{index}`: {rollback_outcome} ")]
|
||||||
RollbackFailed { index: String, rollback_outcome: RollbackOutcome },
|
RollbackFailed { index: String, rollback_outcome: RollbackOutcome },
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
@ -216,7 +212,6 @@ impl Error {
|
|||||||
| Error::BatchNotFound(_)
|
| Error::BatchNotFound(_)
|
||||||
| Error::TaskDeletionWithEmptyQuery
|
| Error::TaskDeletionWithEmptyQuery
|
||||||
| Error::TaskCancelationWithEmptyQuery
|
| Error::TaskCancelationWithEmptyQuery
|
||||||
| Error::FromRemoteWhenExporting { .. }
|
|
||||||
| Error::AbortedTask
|
| Error::AbortedTask
|
||||||
| Error::Dump(_)
|
| Error::Dump(_)
|
||||||
| Error::Heed(_)
|
| Error::Heed(_)
|
||||||
@ -226,7 +221,6 @@ impl Error {
|
|||||||
| Error::IoError(_)
|
| Error::IoError(_)
|
||||||
| Error::Persist(_)
|
| Error::Persist(_)
|
||||||
| Error::FeatureNotEnabled(_)
|
| Error::FeatureNotEnabled(_)
|
||||||
| Error::Export(_)
|
|
||||||
| Error::Anyhow(_) => true,
|
| Error::Anyhow(_) => true,
|
||||||
Error::CreateBatch(_)
|
Error::CreateBatch(_)
|
||||||
| Error::CorruptedTaskQueue
|
| Error::CorruptedTaskQueue
|
||||||
@ -288,7 +282,6 @@ impl ErrorCode for Error {
|
|||||||
Error::Dump(e) => e.error_code(),
|
Error::Dump(e) => e.error_code(),
|
||||||
Error::Milli { error, .. } => error.error_code(),
|
Error::Milli { error, .. } => error.error_code(),
|
||||||
Error::ProcessBatchPanicked(_) => Code::Internal,
|
Error::ProcessBatchPanicked(_) => Code::Internal,
|
||||||
Error::FromRemoteWhenExporting { .. } => Code::Internal,
|
|
||||||
Error::Heed(e) => e.error_code(),
|
Error::Heed(e) => e.error_code(),
|
||||||
Error::HeedTransaction(e) => e.error_code(),
|
Error::HeedTransaction(e) => e.error_code(),
|
||||||
Error::FileStore(e) => e.error_code(),
|
Error::FileStore(e) => e.error_code(),
|
||||||
@ -301,7 +294,6 @@ impl ErrorCode for Error {
|
|||||||
Error::CorruptedTaskQueue => Code::Internal,
|
Error::CorruptedTaskQueue => Code::Internal,
|
||||||
Error::CorruptedDump => Code::Internal,
|
Error::CorruptedDump => Code::Internal,
|
||||||
Error::DatabaseUpgrade(_) => Code::Internal,
|
Error::DatabaseUpgrade(_) => Code::Internal,
|
||||||
Error::Export(_) => Code::Internal,
|
|
||||||
Error::RollbackFailed { .. } => Code::Internal,
|
Error::RollbackFailed { .. } => Code::Internal,
|
||||||
Error::UnrecoverableError(_) => Code::Internal,
|
Error::UnrecoverableError(_) => Code::Internal,
|
||||||
Error::IndexSchedulerVersionMismatch { .. } => Code::Internal,
|
Error::IndexSchedulerVersionMismatch { .. } => Code::Internal,
|
||||||
|
@ -131,32 +131,6 @@ impl RoFeatures {
|
|||||||
.into())
|
.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn check_chat_completions(&self, disabled_action: &'static str) -> Result<()> {
|
|
||||||
if self.runtime.chat_completions {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(FeatureNotEnabledError {
|
|
||||||
disabled_action,
|
|
||||||
feature: "chat completions",
|
|
||||||
issue_link: "https://github.com/orgs/meilisearch/discussions/835",
|
|
||||||
}
|
|
||||||
.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn check_multimodal(&self, disabled_action: &'static str) -> Result<()> {
|
|
||||||
if self.runtime.multimodal {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(FeatureNotEnabledError {
|
|
||||||
disabled_action,
|
|
||||||
feature: "multimodal",
|
|
||||||
issue_link: "https://github.com/orgs/meilisearch/discussions/846",
|
|
||||||
}
|
|
||||||
.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FeatureData {
|
impl FeatureData {
|
||||||
|
@ -34,7 +34,6 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
|||||||
planned_failures: _,
|
planned_failures: _,
|
||||||
run_loop_iteration: _,
|
run_loop_iteration: _,
|
||||||
embedders: _,
|
embedders: _,
|
||||||
chat_settings: _,
|
|
||||||
} = scheduler;
|
} = scheduler;
|
||||||
|
|
||||||
let rtxn = env.read_txn().unwrap();
|
let rtxn = env.read_txn().unwrap();
|
||||||
@ -289,9 +288,6 @@ fn snapshot_details(d: &Details) -> String {
|
|||||||
Details::IndexSwap { swaps } => {
|
Details::IndexSwap { swaps } => {
|
||||||
format!("{{ swaps: {swaps:?} }}")
|
format!("{{ swaps: {swaps:?} }}")
|
||||||
}
|
}
|
||||||
Details::Export { url, api_key, payload_size, indexes } => {
|
|
||||||
format!("{{ url: {url:?}, api_key: {api_key:?}, payload_size: {payload_size:?}, indexes: {indexes:?} }}")
|
|
||||||
}
|
|
||||||
Details::UpgradeDatabase { from, to } => {
|
Details::UpgradeDatabase { from, to } => {
|
||||||
format!("{{ from: {from:?}, to: {to:?} }}")
|
format!("{{ from: {from:?}, to: {to:?} }}")
|
||||||
}
|
}
|
||||||
@ -346,7 +342,6 @@ pub fn snapshot_batch(batch: &Batch) -> String {
|
|||||||
uid,
|
uid,
|
||||||
details,
|
details,
|
||||||
stats,
|
stats,
|
||||||
embedder_stats,
|
|
||||||
started_at,
|
started_at,
|
||||||
finished_at,
|
finished_at,
|
||||||
progress: _,
|
progress: _,
|
||||||
@ -370,12 +365,6 @@ pub fn snapshot_batch(batch: &Batch) -> String {
|
|||||||
snap.push_str(&format!("uid: {uid}, "));
|
snap.push_str(&format!("uid: {uid}, "));
|
||||||
snap.push_str(&format!("details: {}, ", serde_json::to_string(details).unwrap()));
|
snap.push_str(&format!("details: {}, ", serde_json::to_string(details).unwrap()));
|
||||||
snap.push_str(&format!("stats: {}, ", serde_json::to_string(&stats).unwrap()));
|
snap.push_str(&format!("stats: {}, ", serde_json::to_string(&stats).unwrap()));
|
||||||
if !embedder_stats.skip_serializing() {
|
|
||||||
snap.push_str(&format!(
|
|
||||||
"embedder stats: {}, ",
|
|
||||||
serde_json::to_string(&embedder_stats).unwrap()
|
|
||||||
));
|
|
||||||
}
|
|
||||||
snap.push_str(&format!("stop reason: {}, ", serde_json::to_string(&stop_reason).unwrap()));
|
snap.push_str(&format!("stop reason: {}, ", serde_json::to_string(&stop_reason).unwrap()));
|
||||||
snap.push('}');
|
snap.push('}');
|
||||||
snap
|
snap
|
||||||
|
@ -28,6 +28,7 @@ mod lru;
|
|||||||
mod processing;
|
mod processing;
|
||||||
mod queue;
|
mod queue;
|
||||||
mod scheduler;
|
mod scheduler;
|
||||||
|
mod settings;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test_utils;
|
mod test_utils;
|
||||||
pub mod upgrade;
|
pub mod upgrade;
|
||||||
@ -51,21 +52,16 @@ pub use features::RoFeatures;
|
|||||||
use flate2::bufread::GzEncoder;
|
use flate2::bufread::GzEncoder;
|
||||||
use flate2::Compression;
|
use flate2::Compression;
|
||||||
use meilisearch_types::batches::Batch;
|
use meilisearch_types::batches::Batch;
|
||||||
use meilisearch_types::features::{
|
use meilisearch_types::features::{InstanceTogglableFeatures, Network, RuntimeTogglableFeatures};
|
||||||
ChatCompletionSettings, InstanceTogglableFeatures, Network, RuntimeTogglableFeatures,
|
|
||||||
};
|
|
||||||
use meilisearch_types::heed::byteorder::BE;
|
use meilisearch_types::heed::byteorder::BE;
|
||||||
use meilisearch_types::heed::types::{DecodeIgnore, SerdeJson, Str, I128};
|
use meilisearch_types::heed::types::{SerdeJson, Str, I128};
|
||||||
use meilisearch_types::heed::{self, Database, Env, RoTxn, WithoutTls};
|
use meilisearch_types::heed::{self, Database, Env, RoTxn, Unspecified, WithoutTls};
|
||||||
|
use meilisearch_types::milli::index::IndexEmbeddingConfig;
|
||||||
use meilisearch_types::milli::update::IndexerConfig;
|
use meilisearch_types::milli::update::IndexerConfig;
|
||||||
use meilisearch_types::milli::vector::json_template::JsonTemplate;
|
use meilisearch_types::milli::vector::{Embedder, EmbedderOptions, EmbeddingConfigs};
|
||||||
use meilisearch_types::milli::vector::{
|
|
||||||
Embedder, EmbedderOptions, RuntimeEmbedder, RuntimeEmbedders, RuntimeFragment,
|
|
||||||
};
|
|
||||||
use meilisearch_types::milli::{self, Index};
|
use meilisearch_types::milli::{self, Index};
|
||||||
use meilisearch_types::task_view::TaskView;
|
use meilisearch_types::task_view::TaskView;
|
||||||
use meilisearch_types::tasks::{KindWithContent, Task};
|
use meilisearch_types::tasks::{KindWithContent, Task};
|
||||||
use milli::vector::db::IndexEmbeddingConfig;
|
|
||||||
use processing::ProcessingTasks;
|
use processing::ProcessingTasks;
|
||||||
pub use queue::Query;
|
pub use queue::Query;
|
||||||
use queue::Queue;
|
use queue::Queue;
|
||||||
@ -80,7 +76,6 @@ use crate::utils::clamp_to_page_size;
|
|||||||
pub(crate) type BEI128 = I128<BE>;
|
pub(crate) type BEI128 = I128<BE>;
|
||||||
|
|
||||||
const TASK_SCHEDULER_SIZE_THRESHOLD_PERCENT_INT: u64 = 40;
|
const TASK_SCHEDULER_SIZE_THRESHOLD_PERCENT_INT: u64 = 40;
|
||||||
const CHAT_SETTINGS_DB_NAME: &str = "chat-settings";
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct IndexSchedulerOptions {
|
pub struct IndexSchedulerOptions {
|
||||||
@ -137,8 +132,6 @@ pub struct IndexSchedulerOptions {
|
|||||||
///
|
///
|
||||||
/// 0 disables the cache.
|
/// 0 disables the cache.
|
||||||
pub embedding_cache_cap: usize,
|
pub embedding_cache_cap: usize,
|
||||||
/// Snapshot compaction status.
|
|
||||||
pub experimental_no_snapshot_compaction: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Structure which holds meilisearch's indexes and schedules the tasks
|
/// Structure which holds meilisearch's indexes and schedules the tasks
|
||||||
@ -150,6 +143,8 @@ pub struct IndexScheduler {
|
|||||||
/// The list of tasks currently processing
|
/// The list of tasks currently processing
|
||||||
pub(crate) processing_tasks: Arc<RwLock<ProcessingTasks>>,
|
pub(crate) processing_tasks: Arc<RwLock<ProcessingTasks>>,
|
||||||
|
|
||||||
|
/// The main database that also has the chat settings.
|
||||||
|
pub main: Database<Str, Unspecified>,
|
||||||
/// A database containing only the version of the index-scheduler
|
/// A database containing only the version of the index-scheduler
|
||||||
pub version: versioning::Versioning,
|
pub version: versioning::Versioning,
|
||||||
/// The queue containing both the tasks and the batches.
|
/// The queue containing both the tasks and the batches.
|
||||||
@ -159,9 +154,6 @@ pub struct IndexScheduler {
|
|||||||
/// In charge of fetching and setting the status of experimental features.
|
/// In charge of fetching and setting the status of experimental features.
|
||||||
features: features::FeatureData,
|
features: features::FeatureData,
|
||||||
|
|
||||||
/// Stores the custom chat prompts and other settings of the indexes.
|
|
||||||
pub(crate) chat_settings: Database<Str, SerdeJson<ChatCompletionSettings>>,
|
|
||||||
|
|
||||||
/// Everything related to the processing of the tasks
|
/// Everything related to the processing of the tasks
|
||||||
pub scheduler: scheduler::Scheduler,
|
pub scheduler: scheduler::Scheduler,
|
||||||
|
|
||||||
@ -207,7 +199,7 @@ impl IndexScheduler {
|
|||||||
version: self.version.clone(),
|
version: self.version.clone(),
|
||||||
queue: self.queue.private_clone(),
|
queue: self.queue.private_clone(),
|
||||||
scheduler: self.scheduler.private_clone(),
|
scheduler: self.scheduler.private_clone(),
|
||||||
|
main: self.main.clone(),
|
||||||
index_mapper: self.index_mapper.clone(),
|
index_mapper: self.index_mapper.clone(),
|
||||||
cleanup_enabled: self.cleanup_enabled,
|
cleanup_enabled: self.cleanup_enabled,
|
||||||
webhook_url: self.webhook_url.clone(),
|
webhook_url: self.webhook_url.clone(),
|
||||||
@ -220,16 +212,11 @@ impl IndexScheduler {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
run_loop_iteration: self.run_loop_iteration.clone(),
|
run_loop_iteration: self.run_loop_iteration.clone(),
|
||||||
features: self.features.clone(),
|
features: self.features.clone(),
|
||||||
chat_settings: self.chat_settings,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) const fn nb_db() -> u32 {
|
pub(crate) const fn nb_db() -> u32 {
|
||||||
Versioning::nb_db()
|
Versioning::nb_db() + Queue::nb_db() + IndexMapper::nb_db() + features::FeatureData::nb_db()
|
||||||
+ Queue::nb_db()
|
|
||||||
+ IndexMapper::nb_db()
|
|
||||||
+ features::FeatureData::nb_db()
|
|
||||||
+ 1 // chat-prompts
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create an index scheduler and start its run loop.
|
/// Create an index scheduler and start its run loop.
|
||||||
@ -283,7 +270,7 @@ impl IndexScheduler {
|
|||||||
let features = features::FeatureData::new(&env, &mut wtxn, options.instance_features)?;
|
let features = features::FeatureData::new(&env, &mut wtxn, options.instance_features)?;
|
||||||
let queue = Queue::new(&env, &mut wtxn, &options)?;
|
let queue = Queue::new(&env, &mut wtxn, &options)?;
|
||||||
let index_mapper = IndexMapper::new(&env, &mut wtxn, &options, budget)?;
|
let index_mapper = IndexMapper::new(&env, &mut wtxn, &options, budget)?;
|
||||||
let chat_settings = env.create_database(&mut wtxn, Some(CHAT_SETTINGS_DB_NAME))?;
|
let chat_settings = env.create_database(&mut wtxn, Some("chat-settings"))?;
|
||||||
wtxn.commit()?;
|
wtxn.commit()?;
|
||||||
|
|
||||||
// allow unreachable_code to get rids of the warning in the case of a test build.
|
// allow unreachable_code to get rids of the warning in the case of a test build.
|
||||||
@ -314,10 +301,6 @@ impl IndexScheduler {
|
|||||||
Ok(this)
|
Ok(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_txn(&self) -> Result<RoTxn<WithoutTls>> {
|
|
||||||
self.env.read_txn().map_err(|e| e.into())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return `Ok(())` if the index scheduler is able to access one of its database.
|
/// Return `Ok(())` if the index scheduler is able to access one of its database.
|
||||||
pub fn health(&self) -> Result<()> {
|
pub fn health(&self) -> Result<()> {
|
||||||
let rtxn = self.env.read_txn()?;
|
let rtxn = self.env.read_txn()?;
|
||||||
@ -394,16 +377,15 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn read_txn(&self) -> Result<RoTxn<WithoutTls>> {
|
||||||
|
self.env.read_txn().map_err(|e| e.into())
|
||||||
|
}
|
||||||
|
|
||||||
/// Start the run loop for the given index scheduler.
|
/// Start the run loop for the given index scheduler.
|
||||||
///
|
///
|
||||||
/// This function will execute in a different thread and must be called
|
/// This function will execute in a different thread and must be called
|
||||||
/// only once per index scheduler.
|
/// only once per index scheduler.
|
||||||
fn run(&self) {
|
fn run(&self) {
|
||||||
// If the number of batched tasks is 0, we don't need to run the scheduler at all.
|
|
||||||
// It will never be able to process any tasks.
|
|
||||||
if self.scheduler.max_number_of_batched_tasks == 0 {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let run = self.private_clone();
|
let run = self.private_clone();
|
||||||
std::thread::Builder::new()
|
std::thread::Builder::new()
|
||||||
.name(String::from("scheduler"))
|
.name(String::from("scheduler"))
|
||||||
@ -511,7 +493,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
/// Returns the total number of indexes available for the specified filter.
|
/// Returns the total number of indexes available for the specified filter.
|
||||||
/// And a `Vec` of the index_uid + its stats
|
/// And a `Vec` of the index_uid + its stats
|
||||||
pub fn paginated_indexes_stats(
|
pub fn get_paginated_indexes_stats(
|
||||||
&self,
|
&self,
|
||||||
filters: &meilisearch_auth::AuthFilter,
|
filters: &meilisearch_auth::AuthFilter,
|
||||||
from: usize,
|
from: usize,
|
||||||
@ -552,24 +534,6 @@ impl IndexScheduler {
|
|||||||
ret.map(|ret| (total, ret))
|
ret.map(|ret| (total, ret))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the total number of chat workspaces available ~~for the specified filter~~.
|
|
||||||
/// And a `Vec` of the workspace_uids
|
|
||||||
pub fn paginated_chat_workspace_uids(
|
|
||||||
&self,
|
|
||||||
from: usize,
|
|
||||||
limit: usize,
|
|
||||||
) -> Result<(usize, Vec<String>)> {
|
|
||||||
let rtxn = self.read_txn()?;
|
|
||||||
let total = self.chat_settings.len(&rtxn)?;
|
|
||||||
let mut iter = self.chat_settings.iter(&rtxn)?.skip(from);
|
|
||||||
iter.by_ref()
|
|
||||||
.take(limit)
|
|
||||||
.map(|ret| ret.map_err(Error::from))
|
|
||||||
.map(|ret| ret.map(|(uid, _)| uid.to_string()))
|
|
||||||
.collect::<Result<Vec<_>, Error>>()
|
|
||||||
.map(|ret| (total as usize, ret))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The returned structure contains:
|
/// The returned structure contains:
|
||||||
/// 1. The name of the property being observed can be `statuses`, `types`, or `indexes`.
|
/// 1. The name of the property being observed can be `statuses`, `types`, or `indexes`.
|
||||||
/// 2. The name of the specific data related to the property can be `enqueued` for the `statuses`, `settingsUpdate` for the `types`, or the name of the index for the `indexes`, for example.
|
/// 2. The name of the specific data related to the property can be `enqueued` for the `statuses`, `settingsUpdate` for the `types`, or the name of the index for the `indexes`, for example.
|
||||||
@ -854,42 +818,29 @@ impl IndexScheduler {
|
|||||||
&self,
|
&self,
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
embedding_configs: Vec<IndexEmbeddingConfig>,
|
embedding_configs: Vec<IndexEmbeddingConfig>,
|
||||||
) -> Result<RuntimeEmbedders> {
|
) -> Result<EmbeddingConfigs> {
|
||||||
let res: Result<_> = embedding_configs
|
let res: Result<_> = embedding_configs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(
|
.map(
|
||||||
|IndexEmbeddingConfig {
|
|IndexEmbeddingConfig {
|
||||||
name,
|
name,
|
||||||
config: milli::vector::EmbeddingConfig { embedder_options, prompt, quantized },
|
config: milli::vector::EmbeddingConfig { embedder_options, prompt, quantized },
|
||||||
fragments,
|
..
|
||||||
}|
|
}| {
|
||||||
-> Result<(String, Arc<RuntimeEmbedder>)> {
|
let prompt = Arc::new(
|
||||||
let document_template = prompt
|
prompt
|
||||||
.try_into()
|
.try_into()
|
||||||
.map_err(meilisearch_types::milli::Error::from)
|
.map_err(meilisearch_types::milli::Error::from)
|
||||||
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?;
|
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?,
|
||||||
|
);
|
||||||
let fragments = fragments
|
|
||||||
.into_inner()
|
|
||||||
.into_iter()
|
|
||||||
.map(|fragment| {
|
|
||||||
let value = embedder_options.fragment(&fragment.name).unwrap();
|
|
||||||
let template = JsonTemplate::new(value.clone()).unwrap();
|
|
||||||
RuntimeFragment { name: fragment.name, id: fragment.id, template }
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
// optimistically return existing embedder
|
// optimistically return existing embedder
|
||||||
{
|
{
|
||||||
let embedders = self.embedders.read().unwrap();
|
let embedders = self.embedders.read().unwrap();
|
||||||
if let Some(embedder) = embedders.get(&embedder_options) {
|
if let Some(embedder) = embedders.get(&embedder_options) {
|
||||||
let runtime = Arc::new(RuntimeEmbedder::new(
|
return Ok((
|
||||||
embedder.clone(),
|
name,
|
||||||
document_template,
|
(embedder.clone(), prompt, quantized.unwrap_or_default()),
|
||||||
fragments,
|
|
||||||
quantized.unwrap_or_default(),
|
|
||||||
));
|
));
|
||||||
|
|
||||||
return Ok((name, runtime));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -905,45 +856,24 @@ impl IndexScheduler {
|
|||||||
let mut embedders = self.embedders.write().unwrap();
|
let mut embedders = self.embedders.write().unwrap();
|
||||||
embedders.insert(embedder_options, embedder.clone());
|
embedders.insert(embedder_options, embedder.clone());
|
||||||
}
|
}
|
||||||
|
Ok((name, (embedder, prompt, quantized.unwrap_or_default())))
|
||||||
let runtime = Arc::new(RuntimeEmbedder::new(
|
|
||||||
embedder.clone(),
|
|
||||||
document_template,
|
|
||||||
fragments,
|
|
||||||
quantized.unwrap_or_default(),
|
|
||||||
));
|
|
||||||
|
|
||||||
Ok((name, runtime))
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.collect();
|
.collect();
|
||||||
res.map(RuntimeEmbedders::new)
|
res.map(EmbeddingConfigs::new)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chat_settings(&self, uid: &str) -> Result<Option<ChatCompletionSettings>> {
|
pub fn chat_settings(&self) -> Result<Option<serde_json::Value>> {
|
||||||
let rtxn = self.env.read_txn()?;
|
let rtxn = self.env.read_txn().map_err(Error::HeedTransaction)?;
|
||||||
self.chat_settings.get(&rtxn, uid).map_err(Into::into)
|
self.chat_settings.get(&rtxn, "main").map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return true if chat workspace exists.
|
pub fn put_chat_settings(&self, settings: &serde_json::Value) -> Result<()> {
|
||||||
pub fn chat_workspace_exists(&self, name: &str) -> Result<bool> {
|
let mut wtxn = self.env.write_txn().map_err(Error::HeedTransaction)?;
|
||||||
let rtxn = self.env.read_txn()?;
|
self.chat_settings.put(&mut wtxn, "main", settings)?;
|
||||||
Ok(self.chat_settings.remap_data_type::<DecodeIgnore>().get(&rtxn, name)?.is_some())
|
wtxn.commit().map_err(Error::HeedTransaction)?;
|
||||||
}
|
|
||||||
|
|
||||||
pub fn put_chat_settings(&self, uid: &str, settings: &ChatCompletionSettings) -> Result<()> {
|
|
||||||
let mut wtxn = self.env.write_txn()?;
|
|
||||||
self.chat_settings.put(&mut wtxn, uid, settings)?;
|
|
||||||
wtxn.commit()?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn delete_chat_settings(&self, uid: &str) -> Result<bool> {
|
|
||||||
let mut wtxn = self.env.write_txn()?;
|
|
||||||
let deleted = self.chat_settings.delete(&mut wtxn, uid)?;
|
|
||||||
wtxn.commit()?;
|
|
||||||
Ok(deleted)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The outcome of calling the [`IndexScheduler::tick`] function.
|
/// The outcome of calling the [`IndexScheduler::tick`] function.
|
||||||
|
@ -103,7 +103,6 @@ make_enum_progress! {
|
|||||||
pub enum DumpCreationProgress {
|
pub enum DumpCreationProgress {
|
||||||
StartTheDumpCreation,
|
StartTheDumpCreation,
|
||||||
DumpTheApiKeys,
|
DumpTheApiKeys,
|
||||||
DumpTheChatCompletionSettings,
|
|
||||||
DumpTheTasks,
|
DumpTheTasks,
|
||||||
DumpTheBatches,
|
DumpTheBatches,
|
||||||
DumpTheIndexes,
|
DumpTheIndexes,
|
||||||
@ -176,17 +175,8 @@ make_enum_progress! {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
make_enum_progress! {
|
|
||||||
pub enum Export {
|
|
||||||
EnsuringCorrectnessOfTheTarget,
|
|
||||||
ExportingTheSettings,
|
|
||||||
ExportingTheDocuments,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
make_atomic_progress!(Task alias AtomicTaskStep => "task" );
|
make_atomic_progress!(Task alias AtomicTaskStep => "task" );
|
||||||
make_atomic_progress!(Document alias AtomicDocumentStep => "document" );
|
make_atomic_progress!(Document alias AtomicDocumentStep => "document" );
|
||||||
make_atomic_progress!(Index alias AtomicIndexStep => "index" );
|
|
||||||
make_atomic_progress!(Batch alias AtomicBatchStep => "batch" );
|
make_atomic_progress!(Batch alias AtomicBatchStep => "batch" );
|
||||||
make_atomic_progress!(UpdateFile alias AtomicUpdateFileStep => "update file" );
|
make_atomic_progress!(UpdateFile alias AtomicUpdateFileStep => "update file" );
|
||||||
|
|
||||||
|
@ -179,7 +179,6 @@ impl BatchQueue {
|
|||||||
progress: None,
|
progress: None,
|
||||||
details: batch.details,
|
details: batch.details,
|
||||||
stats: batch.stats,
|
stats: batch.stats,
|
||||||
embedder_stats: batch.embedder_stats.as_ref().into(),
|
|
||||||
started_at: batch.started_at,
|
started_at: batch.started_at,
|
||||||
finished_at: batch.finished_at,
|
finished_at: batch.finished_at,
|
||||||
enqueued_at: batch.enqueued_at,
|
enqueued_at: batch.enqueued_at,
|
||||||
|
@ -127,7 +127,7 @@ fn query_batches_simple() {
|
|||||||
"startedAt": "1970-01-01T00:00:00Z",
|
"startedAt": "1970-01-01T00:00:00Z",
|
||||||
"finishedAt": null,
|
"finishedAt": null,
|
||||||
"enqueuedAt": null,
|
"enqueuedAt": null,
|
||||||
"stopReason": "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task."
|
"stopReason": "task with id 0 of type `indexCreation` cannot be batched"
|
||||||
}
|
}
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
@ -48,8 +48,8 @@ catto: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [1,2,3,]
|
[timestamp] [1,2,3,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"sheep","matchedTasks":3,"canceledTasks":2,"originalFilter":"test_query","swaps":[{"indexes":["catto","doggo"]}]}, stats: {"totalNbTasks":3,"status":{"succeeded":1,"canceled":2},"types":{"indexCreation":1,"indexSwap":1,"taskCancelation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `taskCancelation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"sheep","matchedTasks":3,"canceledTasks":2,"originalFilter":"test_query","swaps":[{"indexes":["catto","doggo"]}]}, stats: {"totalNbTasks":3,"status":{"succeeded":1,"canceled":2},"types":{"indexCreation":1,"indexSwap":1,"taskCancelation":1},"indexUids":{"doggo":1}}, stop reason: "task with id 3 of type `taskCancelation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -47,9 +47,9 @@ whalo: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [2,]
|
[timestamp] [2,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"plankton"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"whalo":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"plankton"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"whalo":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"primaryKey":"his_own_vomit"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"his_own_vomit"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -4,7 +4,7 @@ source: crates/index-scheduler/src/queue/batches_test.rs
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(1):
|
### Processing batch Some(1):
|
||||||
[1,]
|
[1,]
|
||||||
{uid: 1, details: {"primaryKey":"sheep"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
{uid: 1, details: {"primaryKey":"sheep"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
@ -42,7 +42,7 @@ catto: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [0,]
|
[timestamp] [0,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -47,9 +47,9 @@ doggo: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [2,]
|
[timestamp] [2,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"sheep"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"sheep"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"primaryKey":"fish"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"whalo":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"fish"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"whalo":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -52,10 +52,10 @@ doggo: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [3,]
|
[timestamp] [3,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"sheep"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"sheep"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"swaps":[{"indexes":["catto","doggo"]}]}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "created batch containing only task with id 2 of type `indexSwap` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"swaps":[{"indexes":["catto","doggo"]}]}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "task with id 2 of type `indexSwap` cannot be batched", }
|
||||||
3 {uid: 3, details: {"swaps":[{"indexes":["catto","whalo"]}]}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "created batch containing only task with id 3 of type `indexSwap` that cannot be batched with any other task.", }
|
3 {uid: 3, details: {"swaps":[{"indexes":["catto","whalo"]}]}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "task with id 3 of type `indexSwap` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -48,8 +48,8 @@ catto: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [1,2,3,]
|
[timestamp] [1,2,3,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"sheep","matchedTasks":3,"canceledTasks":2,"originalFilter":"test_query","swaps":[{"indexes":["catto","doggo"]}]}, stats: {"totalNbTasks":3,"status":{"succeeded":1,"canceled":2},"types":{"indexCreation":1,"indexSwap":1,"taskCancelation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `taskCancelation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"sheep","matchedTasks":3,"canceledTasks":2,"originalFilter":"test_query","swaps":[{"indexes":["catto","doggo"]}]}, stats: {"totalNbTasks":3,"status":{"succeeded":1,"canceled":2},"types":{"indexCreation":1,"indexSwap":1,"taskCancelation":1},"indexUids":{"doggo":1}}, stop reason: "task with id 3 of type `taskCancelation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -47,9 +47,9 @@ whalo: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [2,]
|
[timestamp] [2,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"plankton"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"whalo":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"plankton"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"whalo":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"primaryKey":"his_own_vomit"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"his_own_vomit"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -47,9 +47,9 @@ doggo: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [2,]
|
[timestamp] [2,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"sheep"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"sheep"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"primaryKey":"fish"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"whalo":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"fish"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"whalo":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -71,7 +71,6 @@ impl From<KindWithContent> for AutobatchKind {
|
|||||||
KindWithContent::TaskCancelation { .. }
|
KindWithContent::TaskCancelation { .. }
|
||||||
| KindWithContent::TaskDeletion { .. }
|
| KindWithContent::TaskDeletion { .. }
|
||||||
| KindWithContent::DumpCreation { .. }
|
| KindWithContent::DumpCreation { .. }
|
||||||
| KindWithContent::Export { .. }
|
|
||||||
| KindWithContent::UpgradeDatabase { .. }
|
| KindWithContent::UpgradeDatabase { .. }
|
||||||
| KindWithContent::SnapshotCreation => {
|
| KindWithContent::SnapshotCreation => {
|
||||||
panic!("The autobatcher should never be called with tasks that don't apply to an index.")
|
panic!("The autobatcher should never be called with tasks that don't apply to an index.")
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::io::ErrorKind;
|
|
||||||
|
|
||||||
use meilisearch_types::heed::RoTxn;
|
use meilisearch_types::heed::RoTxn;
|
||||||
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
use meilisearch_types::milli::update::IndexDocumentsMethod;
|
||||||
@ -48,9 +47,6 @@ pub(crate) enum Batch {
|
|||||||
IndexSwap {
|
IndexSwap {
|
||||||
task: Task,
|
task: Task,
|
||||||
},
|
},
|
||||||
Export {
|
|
||||||
task: Task,
|
|
||||||
},
|
|
||||||
UpgradeDatabase {
|
UpgradeDatabase {
|
||||||
tasks: Vec<Task>,
|
tasks: Vec<Task>,
|
||||||
},
|
},
|
||||||
@ -107,7 +103,6 @@ impl Batch {
|
|||||||
Batch::TaskCancelation { task, .. }
|
Batch::TaskCancelation { task, .. }
|
||||||
| Batch::Dump(task)
|
| Batch::Dump(task)
|
||||||
| Batch::IndexCreation { task, .. }
|
| Batch::IndexCreation { task, .. }
|
||||||
| Batch::Export { task }
|
|
||||||
| Batch::IndexUpdate { task, .. } => {
|
| Batch::IndexUpdate { task, .. } => {
|
||||||
RoaringBitmap::from_sorted_iter(std::iter::once(task.uid)).unwrap()
|
RoaringBitmap::from_sorted_iter(std::iter::once(task.uid)).unwrap()
|
||||||
}
|
}
|
||||||
@ -147,7 +142,6 @@ impl Batch {
|
|||||||
| TaskDeletions(_)
|
| TaskDeletions(_)
|
||||||
| SnapshotCreation(_)
|
| SnapshotCreation(_)
|
||||||
| Dump(_)
|
| Dump(_)
|
||||||
| Export { .. }
|
|
||||||
| UpgradeDatabase { .. }
|
| UpgradeDatabase { .. }
|
||||||
| IndexSwap { .. } => None,
|
| IndexSwap { .. } => None,
|
||||||
IndexOperation { op, .. } => Some(op.index_uid()),
|
IndexOperation { op, .. } => Some(op.index_uid()),
|
||||||
@ -173,7 +167,6 @@ impl fmt::Display for Batch {
|
|||||||
Batch::IndexUpdate { .. } => f.write_str("IndexUpdate")?,
|
Batch::IndexUpdate { .. } => f.write_str("IndexUpdate")?,
|
||||||
Batch::IndexDeletion { .. } => f.write_str("IndexDeletion")?,
|
Batch::IndexDeletion { .. } => f.write_str("IndexDeletion")?,
|
||||||
Batch::IndexSwap { .. } => f.write_str("IndexSwap")?,
|
Batch::IndexSwap { .. } => f.write_str("IndexSwap")?,
|
||||||
Batch::Export { .. } => f.write_str("Export")?,
|
|
||||||
Batch::UpgradeDatabase { .. } => f.write_str("UpgradeDatabase")?,
|
Batch::UpgradeDatabase { .. } => f.write_str("UpgradeDatabase")?,
|
||||||
};
|
};
|
||||||
match index_uid {
|
match index_uid {
|
||||||
@ -433,10 +426,9 @@ impl IndexScheduler {
|
|||||||
/// 0. We get the *last* task to cancel.
|
/// 0. We get the *last* task to cancel.
|
||||||
/// 1. We get the tasks to upgrade.
|
/// 1. We get the tasks to upgrade.
|
||||||
/// 2. We get the *next* task to delete.
|
/// 2. We get the *next* task to delete.
|
||||||
/// 3. We get the *next* export to process.
|
/// 3. We get the *next* snapshot to process.
|
||||||
/// 4. We get the *next* snapshot to process.
|
/// 4. We get the *next* dump to process.
|
||||||
/// 5. We get the *next* dump to process.
|
/// 5. We get the *next* tasks to process for a specific index.
|
||||||
/// 6. We get the *next* tasks to process for a specific index.
|
|
||||||
#[tracing::instrument(level = "trace", skip(self, rtxn), target = "indexing::scheduler")]
|
#[tracing::instrument(level = "trace", skip(self, rtxn), target = "indexing::scheduler")]
|
||||||
pub(crate) fn create_next_batch(
|
pub(crate) fn create_next_batch(
|
||||||
&self,
|
&self,
|
||||||
@ -508,17 +500,7 @@ impl IndexScheduler {
|
|||||||
return Ok(Some((Batch::TaskDeletions(tasks), current_batch)));
|
return Ok(Some((Batch::TaskDeletions(tasks), current_batch)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. we batch the export.
|
// 3. we batch the snapshot.
|
||||||
let to_export = self.queue.tasks.get_kind(rtxn, Kind::Export)? & enqueued;
|
|
||||||
if !to_export.is_empty() {
|
|
||||||
let task_id = to_export.iter().next().expect("There must be at least one export task");
|
|
||||||
let mut task = self.queue.tasks.get_task(rtxn, task_id)?.unwrap();
|
|
||||||
current_batch.processing([&mut task]);
|
|
||||||
current_batch.reason(BatchStopReason::TaskKindCannotBeBatched { kind: Kind::Export });
|
|
||||||
return Ok(Some((Batch::Export { task }, current_batch)));
|
|
||||||
}
|
|
||||||
|
|
||||||
// 4. we batch the snapshot.
|
|
||||||
let to_snapshot = self.queue.tasks.get_kind(rtxn, Kind::SnapshotCreation)? & enqueued;
|
let to_snapshot = self.queue.tasks.get_kind(rtxn, Kind::SnapshotCreation)? & enqueued;
|
||||||
if !to_snapshot.is_empty() {
|
if !to_snapshot.is_empty() {
|
||||||
let mut tasks = self.queue.tasks.get_existing_tasks(rtxn, to_snapshot)?;
|
let mut tasks = self.queue.tasks.get_existing_tasks(rtxn, to_snapshot)?;
|
||||||
@ -528,7 +510,7 @@ impl IndexScheduler {
|
|||||||
return Ok(Some((Batch::SnapshotCreation(tasks), current_batch)));
|
return Ok(Some((Batch::SnapshotCreation(tasks), current_batch)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// 5. we batch the dumps.
|
// 4. we batch the dumps.
|
||||||
let to_dump = self.queue.tasks.get_kind(rtxn, Kind::DumpCreation)? & enqueued;
|
let to_dump = self.queue.tasks.get_kind(rtxn, Kind::DumpCreation)? & enqueued;
|
||||||
if let Some(to_dump) = to_dump.min() {
|
if let Some(to_dump) = to_dump.min() {
|
||||||
let mut task =
|
let mut task =
|
||||||
@ -541,7 +523,7 @@ impl IndexScheduler {
|
|||||||
return Ok(Some((Batch::Dump(task), current_batch)));
|
return Ok(Some((Batch::Dump(task), current_batch)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// 6. We make a batch from the unprioritised tasks. Start by taking the next enqueued task.
|
// 5. We make a batch from the unprioritised tasks. Start by taking the next enqueued task.
|
||||||
let task_id = if let Some(task_id) = enqueued.min() { task_id } else { return Ok(None) };
|
let task_id = if let Some(task_id) = enqueued.min() { task_id } else { return Ok(None) };
|
||||||
let mut task =
|
let mut task =
|
||||||
self.queue.tasks.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
self.queue.tasks.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||||
@ -595,11 +577,7 @@ impl IndexScheduler {
|
|||||||
.and_then(|task| task.ok_or(Error::CorruptedTaskQueue))?;
|
.and_then(|task| task.ok_or(Error::CorruptedTaskQueue))?;
|
||||||
|
|
||||||
if let Some(uuid) = task.content_uuid() {
|
if let Some(uuid) = task.content_uuid() {
|
||||||
let content_size = match self.queue.file_store.compute_size(uuid) {
|
let content_size = self.queue.file_store.compute_size(uuid)?;
|
||||||
Ok(content_size) => content_size,
|
|
||||||
Err(file_store::Error::IoError(err)) if err.kind() == ErrorKind::NotFound => 0,
|
|
||||||
Err(otherwise) => return Err(otherwise.into()),
|
|
||||||
};
|
|
||||||
total_size = total_size.saturating_add(content_size);
|
total_size = total_size.saturating_add(content_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,7 +4,6 @@ mod autobatcher_test;
|
|||||||
mod create_batch;
|
mod create_batch;
|
||||||
mod process_batch;
|
mod process_batch;
|
||||||
mod process_dump_creation;
|
mod process_dump_creation;
|
||||||
mod process_export;
|
|
||||||
mod process_index_operation;
|
mod process_index_operation;
|
||||||
mod process_snapshot_creation;
|
mod process_snapshot_creation;
|
||||||
mod process_upgrade;
|
mod process_upgrade;
|
||||||
@ -84,9 +83,6 @@ pub struct Scheduler {
|
|||||||
///
|
///
|
||||||
/// 0 disables the cache.
|
/// 0 disables the cache.
|
||||||
pub(crate) embedding_cache_cap: usize,
|
pub(crate) embedding_cache_cap: usize,
|
||||||
|
|
||||||
/// Snapshot compaction status.
|
|
||||||
pub(crate) experimental_no_snapshot_compaction: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Scheduler {
|
impl Scheduler {
|
||||||
@ -102,7 +98,6 @@ impl Scheduler {
|
|||||||
auth_env: self.auth_env.clone(),
|
auth_env: self.auth_env.clone(),
|
||||||
version_file_path: self.version_file_path.clone(),
|
version_file_path: self.version_file_path.clone(),
|
||||||
embedding_cache_cap: self.embedding_cache_cap,
|
embedding_cache_cap: self.embedding_cache_cap,
|
||||||
experimental_no_snapshot_compaction: self.experimental_no_snapshot_compaction,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -119,7 +114,6 @@ impl Scheduler {
|
|||||||
auth_env,
|
auth_env,
|
||||||
version_file_path: options.version_file_path.clone(),
|
version_file_path: options.version_file_path.clone(),
|
||||||
embedding_cache_cap: options.embedding_cache_cap,
|
embedding_cache_cap: options.embedding_cache_cap,
|
||||||
experimental_no_snapshot_compaction: options.experimental_no_snapshot_compaction,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -376,11 +370,9 @@ impl IndexScheduler {
|
|||||||
post_commit_dabases_sizes
|
post_commit_dabases_sizes
|
||||||
.get(dbname)
|
.get(dbname)
|
||||||
.map(|post_size| {
|
.map(|post_size| {
|
||||||
|
use byte_unit::{Byte, UnitType::Binary};
|
||||||
use std::cmp::Ordering::{Equal, Greater, Less};
|
use std::cmp::Ordering::{Equal, Greater, Less};
|
||||||
|
|
||||||
use byte_unit::Byte;
|
|
||||||
use byte_unit::UnitType::Binary;
|
|
||||||
|
|
||||||
let post = Byte::from_u64(*post_size as u64).get_appropriate_unit(Binary);
|
let post = Byte::from_u64(*post_size as u64).get_appropriate_unit(Binary);
|
||||||
let diff_size = post_size.abs_diff(*pre_size) as u64;
|
let diff_size = post_size.abs_diff(*pre_size) as u64;
|
||||||
let diff = Byte::from_u64(diff_size).get_appropriate_unit(Binary);
|
let diff = Byte::from_u64(diff_size).get_appropriate_unit(Binary);
|
||||||
|
@ -162,13 +162,8 @@ impl IndexScheduler {
|
|||||||
.set_currently_updating_index(Some((index_uid.clone(), index.clone())));
|
.set_currently_updating_index(Some((index_uid.clone(), index.clone())));
|
||||||
|
|
||||||
let pre_commit_dabases_sizes = index.database_sizes(&index_wtxn)?;
|
let pre_commit_dabases_sizes = index.database_sizes(&index_wtxn)?;
|
||||||
let (tasks, congestion) = self.apply_index_operation(
|
let (tasks, congestion) =
|
||||||
&mut index_wtxn,
|
self.apply_index_operation(&mut index_wtxn, &index, op, &progress)?;
|
||||||
&index,
|
|
||||||
op,
|
|
||||||
&progress,
|
|
||||||
current_batch.embedder_stats.clone(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
{
|
{
|
||||||
progress.update_progress(FinalizingIndexStep::Committing);
|
progress.update_progress(FinalizingIndexStep::Committing);
|
||||||
@ -243,12 +238,10 @@ impl IndexScheduler {
|
|||||||
);
|
);
|
||||||
builder.set_primary_key(primary_key);
|
builder.set_primary_key(primary_key);
|
||||||
let must_stop_processing = self.scheduler.must_stop_processing.clone();
|
let must_stop_processing = self.scheduler.must_stop_processing.clone();
|
||||||
|
|
||||||
builder
|
builder
|
||||||
.execute(
|
.execute(
|
||||||
&|| must_stop_processing.get(),
|
|indexing_step| tracing::debug!(update = ?indexing_step),
|
||||||
&progress,
|
|| must_stop_processing.get(),
|
||||||
current_batch.embedder_stats.clone(),
|
|
||||||
)
|
)
|
||||||
.map_err(|e| Error::from_milli(e, Some(index_uid.to_string())))?;
|
.map_err(|e| Error::from_milli(e, Some(index_uid.to_string())))?;
|
||||||
index_wtxn.commit()?;
|
index_wtxn.commit()?;
|
||||||
@ -368,46 +361,6 @@ impl IndexScheduler {
|
|||||||
task.status = Status::Succeeded;
|
task.status = Status::Succeeded;
|
||||||
Ok((vec![task], ProcessBatchInfo::default()))
|
Ok((vec![task], ProcessBatchInfo::default()))
|
||||||
}
|
}
|
||||||
Batch::Export { mut task } => {
|
|
||||||
let KindWithContent::Export { url, api_key, payload_size, indexes } = &task.kind
|
|
||||||
else {
|
|
||||||
unreachable!()
|
|
||||||
};
|
|
||||||
|
|
||||||
let ret = catch_unwind(AssertUnwindSafe(|| {
|
|
||||||
self.process_export(
|
|
||||||
url,
|
|
||||||
api_key.as_deref(),
|
|
||||||
payload_size.as_ref(),
|
|
||||||
indexes,
|
|
||||||
progress,
|
|
||||||
)
|
|
||||||
}));
|
|
||||||
|
|
||||||
let stats = match ret {
|
|
||||||
Ok(Ok(stats)) => stats,
|
|
||||||
Ok(Err(Error::AbortedTask)) => return Err(Error::AbortedTask),
|
|
||||||
Ok(Err(e)) => return Err(Error::Export(Box::new(e))),
|
|
||||||
Err(e) => {
|
|
||||||
let msg = match e.downcast_ref::<&'static str>() {
|
|
||||||
Some(s) => *s,
|
|
||||||
None => match e.downcast_ref::<String>() {
|
|
||||||
Some(s) => &s[..],
|
|
||||||
None => "Box<dyn Any>",
|
|
||||||
},
|
|
||||||
};
|
|
||||||
return Err(Error::Export(Box::new(Error::ProcessBatchPanicked(
|
|
||||||
msg.to_string(),
|
|
||||||
))));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
task.status = Status::Succeeded;
|
|
||||||
if let Some(Details::Export { indexes, .. }) = task.details.as_mut() {
|
|
||||||
*indexes = stats;
|
|
||||||
}
|
|
||||||
Ok((vec![task], ProcessBatchInfo::default()))
|
|
||||||
}
|
|
||||||
Batch::UpgradeDatabase { mut tasks } => {
|
Batch::UpgradeDatabase { mut tasks } => {
|
||||||
let KindWithContent::UpgradeDatabase { from } = tasks.last().unwrap().kind else {
|
let KindWithContent::UpgradeDatabase { from } = tasks.last().unwrap().kind else {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
@ -755,11 +708,9 @@ impl IndexScheduler {
|
|||||||
from.1,
|
from.1,
|
||||||
from.2
|
from.2
|
||||||
);
|
);
|
||||||
let ret = catch_unwind(std::panic::AssertUnwindSafe(|| {
|
match std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
|
||||||
self.process_rollback(from, progress)
|
self.process_rollback(from, progress)
|
||||||
}));
|
})) {
|
||||||
|
|
||||||
match ret {
|
|
||||||
Ok(Ok(())) => {}
|
Ok(Ok(())) => {}
|
||||||
Ok(Err(err)) => return Err(Error::DatabaseUpgrade(Box::new(err))),
|
Ok(Err(err)) => return Err(Error::DatabaseUpgrade(Box::new(err))),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
|
@ -43,16 +43,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
let rtxn = self.env.read_txn()?;
|
let rtxn = self.env.read_txn()?;
|
||||||
|
|
||||||
// 2. dump the chat completion settings
|
// 2. dump the tasks
|
||||||
// TODO should I skip the export if the chat completion has been disabled?
|
|
||||||
progress.update_progress(DumpCreationProgress::DumpTheChatCompletionSettings);
|
|
||||||
let mut dump_chat_completion_settings = dump.create_chat_completions_settings()?;
|
|
||||||
for result in self.chat_settings.iter(&rtxn)? {
|
|
||||||
let (name, chat_settings) = result?;
|
|
||||||
dump_chat_completion_settings.push_settings(name, &chat_settings)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. dump the tasks
|
|
||||||
progress.update_progress(DumpCreationProgress::DumpTheTasks);
|
progress.update_progress(DumpCreationProgress::DumpTheTasks);
|
||||||
let mut dump_tasks = dump.create_tasks_queue()?;
|
let mut dump_tasks = dump.create_tasks_queue()?;
|
||||||
|
|
||||||
@ -90,7 +81,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
let mut dump_content_file = dump_tasks.push_task(&t.into())?;
|
let mut dump_content_file = dump_tasks.push_task(&t.into())?;
|
||||||
|
|
||||||
// 3.1. Dump the `content_file` associated with the task if there is one and the task is not finished yet.
|
// 2.1. Dump the `content_file` associated with the task if there is one and the task is not finished yet.
|
||||||
if let Some(content_file) = content_file {
|
if let Some(content_file) = content_file {
|
||||||
if self.scheduler.must_stop_processing.get() {
|
if self.scheduler.must_stop_processing.get() {
|
||||||
return Err(Error::AbortedTask);
|
return Err(Error::AbortedTask);
|
||||||
@ -114,7 +105,7 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
dump_tasks.flush()?;
|
dump_tasks.flush()?;
|
||||||
|
|
||||||
// 4. dump the batches
|
// 3. dump the batches
|
||||||
progress.update_progress(DumpCreationProgress::DumpTheBatches);
|
progress.update_progress(DumpCreationProgress::DumpTheBatches);
|
||||||
let mut dump_batches = dump.create_batches_queue()?;
|
let mut dump_batches = dump.create_batches_queue()?;
|
||||||
|
|
||||||
@ -147,7 +138,7 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
dump_batches.flush()?;
|
dump_batches.flush()?;
|
||||||
|
|
||||||
// 5. Dump the indexes
|
// 4. Dump the indexes
|
||||||
progress.update_progress(DumpCreationProgress::DumpTheIndexes);
|
progress.update_progress(DumpCreationProgress::DumpTheIndexes);
|
||||||
let nb_indexes = self.index_mapper.index_mapping.len(&rtxn)? as u32;
|
let nb_indexes = self.index_mapper.index_mapping.len(&rtxn)? as u32;
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
@ -174,6 +165,9 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
let fields_ids_map = index.fields_ids_map(&rtxn)?;
|
let fields_ids_map = index.fields_ids_map(&rtxn)?;
|
||||||
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
|
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
|
||||||
|
let embedding_configs = index
|
||||||
|
.embedding_configs(&rtxn)
|
||||||
|
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
|
||||||
|
|
||||||
let nb_documents = index
|
let nb_documents = index
|
||||||
.number_of_documents(&rtxn)
|
.number_of_documents(&rtxn)
|
||||||
@ -184,7 +178,7 @@ impl IndexScheduler {
|
|||||||
let documents = index
|
let documents = index
|
||||||
.all_documents(&rtxn)
|
.all_documents(&rtxn)
|
||||||
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
|
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
|
||||||
// 5.1. Dump the documents
|
// 4.1. Dump the documents
|
||||||
for ret in documents {
|
for ret in documents {
|
||||||
if self.scheduler.must_stop_processing.get() {
|
if self.scheduler.must_stop_processing.get() {
|
||||||
return Err(Error::AbortedTask);
|
return Err(Error::AbortedTask);
|
||||||
@ -227,12 +221,16 @@ impl IndexScheduler {
|
|||||||
return Err(Error::from_milli(user_err, Some(uid.to_string())));
|
return Err(Error::from_milli(user_err, Some(uid.to_string())));
|
||||||
};
|
};
|
||||||
|
|
||||||
for (embedder_name, (embeddings, regenerate)) in embeddings {
|
for (embedder_name, embeddings) in embeddings {
|
||||||
|
let user_provided = embedding_configs
|
||||||
|
.iter()
|
||||||
|
.find(|conf| conf.name == embedder_name)
|
||||||
|
.is_some_and(|conf| conf.user_provided.contains(id));
|
||||||
let embeddings = ExplicitVectors {
|
let embeddings = ExplicitVectors {
|
||||||
embeddings: Some(VectorOrArrayOfVectors::from_array_of_vectors(
|
embeddings: Some(VectorOrArrayOfVectors::from_array_of_vectors(
|
||||||
embeddings,
|
embeddings,
|
||||||
)),
|
)),
|
||||||
regenerate,
|
regenerate: !user_provided,
|
||||||
};
|
};
|
||||||
vectors.insert(embedder_name, serde_json::to_value(embeddings).unwrap());
|
vectors.insert(embedder_name, serde_json::to_value(embeddings).unwrap());
|
||||||
}
|
}
|
||||||
@ -242,7 +240,7 @@ impl IndexScheduler {
|
|||||||
atomic.fetch_add(1, Ordering::Relaxed);
|
atomic.fetch_add(1, Ordering::Relaxed);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 5.2. Dump the settings
|
// 4.2. Dump the settings
|
||||||
let settings = meilisearch_types::settings::settings(
|
let settings = meilisearch_types::settings::settings(
|
||||||
index,
|
index,
|
||||||
&rtxn,
|
&rtxn,
|
||||||
@ -253,7 +251,7 @@ impl IndexScheduler {
|
|||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
// 6. Dump experimental feature settings
|
// 5. Dump experimental feature settings
|
||||||
progress.update_progress(DumpCreationProgress::DumpTheExperimentalFeatures);
|
progress.update_progress(DumpCreationProgress::DumpTheExperimentalFeatures);
|
||||||
let features = self.features().runtime_features();
|
let features = self.features().runtime_features();
|
||||||
dump.create_experimental_features(features)?;
|
dump.create_experimental_features(features)?;
|
||||||
|
@ -1,367 +0,0 @@
|
|||||||
use std::collections::BTreeMap;
|
|
||||||
use std::io::{self, Write as _};
|
|
||||||
use std::sync::atomic;
|
|
||||||
use std::time::Duration;
|
|
||||||
|
|
||||||
use backoff::ExponentialBackoff;
|
|
||||||
use byte_unit::Byte;
|
|
||||||
use flate2::write::GzEncoder;
|
|
||||||
use flate2::Compression;
|
|
||||||
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
|
||||||
use meilisearch_types::milli::constants::RESERVED_VECTORS_FIELD_NAME;
|
|
||||||
use meilisearch_types::milli::progress::{Progress, VariableNameStep};
|
|
||||||
use meilisearch_types::milli::update::{request_threads, Setting};
|
|
||||||
use meilisearch_types::milli::vector::parsed_vectors::{ExplicitVectors, VectorOrArrayOfVectors};
|
|
||||||
use meilisearch_types::milli::{self, obkv_to_json, Filter, InternalError};
|
|
||||||
use meilisearch_types::settings::{self, SecretPolicy};
|
|
||||||
use meilisearch_types::tasks::{DetailsExportIndexSettings, ExportIndexSettings};
|
|
||||||
use serde::Deserialize;
|
|
||||||
use ureq::{json, Response};
|
|
||||||
|
|
||||||
use super::MustStopProcessing;
|
|
||||||
use crate::processing::AtomicDocumentStep;
|
|
||||||
use crate::{Error, IndexScheduler, Result};
|
|
||||||
|
|
||||||
impl IndexScheduler {
|
|
||||||
pub(super) fn process_export(
|
|
||||||
&self,
|
|
||||||
base_url: &str,
|
|
||||||
api_key: Option<&str>,
|
|
||||||
payload_size: Option<&Byte>,
|
|
||||||
indexes: &BTreeMap<IndexUidPattern, ExportIndexSettings>,
|
|
||||||
progress: Progress,
|
|
||||||
) -> Result<BTreeMap<IndexUidPattern, DetailsExportIndexSettings>> {
|
|
||||||
#[cfg(test)]
|
|
||||||
self.maybe_fail(crate::test_utils::FailureLocation::ProcessExport)?;
|
|
||||||
|
|
||||||
let indexes: Vec<_> = self
|
|
||||||
.index_names()?
|
|
||||||
.into_iter()
|
|
||||||
.flat_map(|uid| {
|
|
||||||
indexes
|
|
||||||
.iter()
|
|
||||||
.find(|(pattern, _)| pattern.matches_str(&uid))
|
|
||||||
.map(|(pattern, settings)| (pattern, uid, settings))
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let mut output = BTreeMap::new();
|
|
||||||
let agent = ureq::AgentBuilder::new().timeout(Duration::from_secs(5)).build();
|
|
||||||
let must_stop_processing = self.scheduler.must_stop_processing.clone();
|
|
||||||
for (i, (_pattern, uid, export_settings)) in indexes.iter().enumerate() {
|
|
||||||
if must_stop_processing.get() {
|
|
||||||
return Err(Error::AbortedTask);
|
|
||||||
}
|
|
||||||
|
|
||||||
progress.update_progress(VariableNameStep::<ExportIndex>::new(
|
|
||||||
format!("Exporting index `{uid}`"),
|
|
||||||
i as u32,
|
|
||||||
indexes.len() as u32,
|
|
||||||
));
|
|
||||||
|
|
||||||
let ExportIndexSettings { filter, override_settings } = export_settings;
|
|
||||||
let index = self.index(uid)?;
|
|
||||||
let index_rtxn = index.read_txn()?;
|
|
||||||
let bearer = api_key.map(|api_key| format!("Bearer {api_key}"));
|
|
||||||
|
|
||||||
// First, check if the index already exists
|
|
||||||
let url = format!("{base_url}/indexes/{uid}");
|
|
||||||
let response = retry(&must_stop_processing, || {
|
|
||||||
let mut request = agent.get(&url);
|
|
||||||
if let Some(bearer) = &bearer {
|
|
||||||
request = request.set("Authorization", bearer);
|
|
||||||
}
|
|
||||||
|
|
||||||
request.send_bytes(Default::default()).map_err(into_backoff_error)
|
|
||||||
});
|
|
||||||
let index_exists = match response {
|
|
||||||
Ok(response) => response.status() == 200,
|
|
||||||
Err(Error::FromRemoteWhenExporting { code, .. }) if code == "index_not_found" => {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
Err(e) => return Err(e),
|
|
||||||
};
|
|
||||||
|
|
||||||
let primary_key = index
|
|
||||||
.primary_key(&index_rtxn)
|
|
||||||
.map_err(|e| Error::from_milli(e.into(), Some(uid.to_string())))?;
|
|
||||||
|
|
||||||
// Create the index
|
|
||||||
if !index_exists {
|
|
||||||
let url = format!("{base_url}/indexes");
|
|
||||||
retry(&must_stop_processing, || {
|
|
||||||
let mut request = agent.post(&url);
|
|
||||||
if let Some(bearer) = &bearer {
|
|
||||||
request = request.set("Authorization", bearer);
|
|
||||||
}
|
|
||||||
let index_param = json!({ "uid": uid, "primaryKey": primary_key });
|
|
||||||
request.send_json(&index_param).map_err(into_backoff_error)
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Patch the index primary key
|
|
||||||
if index_exists && *override_settings {
|
|
||||||
let url = format!("{base_url}/indexes/{uid}");
|
|
||||||
retry(&must_stop_processing, || {
|
|
||||||
let mut request = agent.patch(&url);
|
|
||||||
if let Some(bearer) = &bearer {
|
|
||||||
request = request.set("Authorization", bearer);
|
|
||||||
}
|
|
||||||
let index_param = json!({ "primaryKey": primary_key });
|
|
||||||
request.send_json(&index_param).map_err(into_backoff_error)
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Send the index settings
|
|
||||||
if !index_exists || *override_settings {
|
|
||||||
let mut settings =
|
|
||||||
settings::settings(&index, &index_rtxn, SecretPolicy::RevealSecrets)
|
|
||||||
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
|
|
||||||
// Remove the experimental chat setting if not enabled
|
|
||||||
if self.features().check_chat_completions("exporting chat settings").is_err() {
|
|
||||||
settings.chat = Setting::NotSet;
|
|
||||||
}
|
|
||||||
// Retry logic for sending settings
|
|
||||||
let url = format!("{base_url}/indexes/{uid}/settings");
|
|
||||||
retry(&must_stop_processing, || {
|
|
||||||
let mut request = agent.patch(&url);
|
|
||||||
if let Some(bearer) = bearer.as_ref() {
|
|
||||||
request = request.set("Authorization", bearer);
|
|
||||||
}
|
|
||||||
request.send_json(settings.clone()).map_err(into_backoff_error)
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let filter = filter
|
|
||||||
.as_ref()
|
|
||||||
.map(Filter::from_json)
|
|
||||||
.transpose()
|
|
||||||
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?
|
|
||||||
.flatten();
|
|
||||||
|
|
||||||
let filter_universe = filter
|
|
||||||
.map(|f| f.evaluate(&index_rtxn, &index))
|
|
||||||
.transpose()
|
|
||||||
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
|
|
||||||
let whole_universe = index
|
|
||||||
.documents_ids(&index_rtxn)
|
|
||||||
.map_err(|e| Error::from_milli(e.into(), Some(uid.to_string())))?;
|
|
||||||
let universe = filter_universe.unwrap_or(whole_universe);
|
|
||||||
|
|
||||||
let fields_ids_map = index.fields_ids_map(&index_rtxn)?;
|
|
||||||
let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect();
|
|
||||||
|
|
||||||
// We don't need to keep this one alive as we will
|
|
||||||
// spawn many threads to process the documents
|
|
||||||
drop(index_rtxn);
|
|
||||||
|
|
||||||
let total_documents = universe.len() as u32;
|
|
||||||
let (step, progress_step) = AtomicDocumentStep::new(total_documents);
|
|
||||||
progress.update_progress(progress_step);
|
|
||||||
|
|
||||||
output.insert(
|
|
||||||
IndexUidPattern::new_unchecked(uid.clone()),
|
|
||||||
DetailsExportIndexSettings {
|
|
||||||
settings: (*export_settings).clone(),
|
|
||||||
matched_documents: Some(total_documents as u64),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
let limit = payload_size.map(|ps| ps.as_u64() as usize).unwrap_or(20 * 1024 * 1024); // defaults to 20 MiB
|
|
||||||
let documents_url = format!("{base_url}/indexes/{uid}/documents");
|
|
||||||
|
|
||||||
let results = request_threads()
|
|
||||||
.broadcast(|ctx| {
|
|
||||||
let index_rtxn = index
|
|
||||||
.read_txn()
|
|
||||||
.map_err(|e| Error::from_milli(e.into(), Some(uid.to_string())))?;
|
|
||||||
|
|
||||||
let mut buffer = Vec::new();
|
|
||||||
let mut tmp_buffer = Vec::new();
|
|
||||||
let mut compressed_buffer = Vec::new();
|
|
||||||
for (i, docid) in universe.iter().enumerate() {
|
|
||||||
if i % ctx.num_threads() != ctx.index() {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let document = index
|
|
||||||
.document(&index_rtxn, docid)
|
|
||||||
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
|
|
||||||
|
|
||||||
let mut document = obkv_to_json(&all_fields, &fields_ids_map, document)
|
|
||||||
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
|
|
||||||
|
|
||||||
// TODO definitely factorize this code
|
|
||||||
'inject_vectors: {
|
|
||||||
let embeddings = index
|
|
||||||
.embeddings(&index_rtxn, docid)
|
|
||||||
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
|
|
||||||
|
|
||||||
if embeddings.is_empty() {
|
|
||||||
break 'inject_vectors;
|
|
||||||
}
|
|
||||||
|
|
||||||
let vectors = document
|
|
||||||
.entry(RESERVED_VECTORS_FIELD_NAME)
|
|
||||||
.or_insert(serde_json::Value::Object(Default::default()));
|
|
||||||
|
|
||||||
let serde_json::Value::Object(vectors) = vectors else {
|
|
||||||
return Err(Error::from_milli(
|
|
||||||
milli::Error::UserError(
|
|
||||||
milli::UserError::InvalidVectorsMapType {
|
|
||||||
document_id: {
|
|
||||||
if let Ok(Some(Ok(index))) = index
|
|
||||||
.external_id_of(
|
|
||||||
&index_rtxn,
|
|
||||||
std::iter::once(docid),
|
|
||||||
)
|
|
||||||
.map(|it| it.into_iter().next())
|
|
||||||
{
|
|
||||||
index
|
|
||||||
} else {
|
|
||||||
format!("internal docid={docid}")
|
|
||||||
}
|
|
||||||
},
|
|
||||||
value: vectors.clone(),
|
|
||||||
},
|
|
||||||
),
|
|
||||||
Some(uid.to_string()),
|
|
||||||
));
|
|
||||||
};
|
|
||||||
|
|
||||||
for (embedder_name, (embeddings, regenerate)) in embeddings {
|
|
||||||
let embeddings = ExplicitVectors {
|
|
||||||
embeddings: Some(
|
|
||||||
VectorOrArrayOfVectors::from_array_of_vectors(embeddings),
|
|
||||||
),
|
|
||||||
regenerate,
|
|
||||||
};
|
|
||||||
vectors.insert(
|
|
||||||
embedder_name,
|
|
||||||
serde_json::to_value(embeddings).unwrap(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tmp_buffer.clear();
|
|
||||||
serde_json::to_writer(&mut tmp_buffer, &document)
|
|
||||||
.map_err(milli::InternalError::from)
|
|
||||||
.map_err(|e| Error::from_milli(e.into(), Some(uid.to_string())))?;
|
|
||||||
|
|
||||||
// Make sure we put at least one document in the buffer even
|
|
||||||
// though we might go above the buffer limit before sending
|
|
||||||
if !buffer.is_empty() && buffer.len() + tmp_buffer.len() > limit {
|
|
||||||
// We compress the documents before sending them
|
|
||||||
let mut encoder =
|
|
||||||
GzEncoder::new(&mut compressed_buffer, Compression::default());
|
|
||||||
encoder
|
|
||||||
.write_all(&buffer)
|
|
||||||
.map_err(|e| Error::from_milli(e.into(), Some(uid.clone())))?;
|
|
||||||
encoder
|
|
||||||
.finish()
|
|
||||||
.map_err(|e| Error::from_milli(e.into(), Some(uid.clone())))?;
|
|
||||||
|
|
||||||
retry(&must_stop_processing, || {
|
|
||||||
let mut request = agent.post(&documents_url);
|
|
||||||
request = request.set("Content-Type", "application/x-ndjson");
|
|
||||||
request = request.set("Content-Encoding", "gzip");
|
|
||||||
if let Some(bearer) = &bearer {
|
|
||||||
request = request.set("Authorization", bearer);
|
|
||||||
}
|
|
||||||
request.send_bytes(&compressed_buffer).map_err(into_backoff_error)
|
|
||||||
})?;
|
|
||||||
buffer.clear();
|
|
||||||
compressed_buffer.clear();
|
|
||||||
}
|
|
||||||
buffer.extend_from_slice(&tmp_buffer);
|
|
||||||
|
|
||||||
if i > 0 && i % 100 == 0 {
|
|
||||||
step.fetch_add(100, atomic::Ordering::Relaxed);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
retry(&must_stop_processing, || {
|
|
||||||
let mut request = agent.post(&documents_url);
|
|
||||||
request = request.set("Content-Type", "application/x-ndjson");
|
|
||||||
if let Some(bearer) = &bearer {
|
|
||||||
request = request.set("Authorization", bearer);
|
|
||||||
}
|
|
||||||
request.send_bytes(&buffer).map_err(into_backoff_error)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
|
||||||
.map_err(|e| {
|
|
||||||
Error::from_milli(
|
|
||||||
milli::Error::InternalError(InternalError::PanicInThreadPool(e)),
|
|
||||||
Some(uid.to_string()),
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
for result in results {
|
|
||||||
result?;
|
|
||||||
}
|
|
||||||
|
|
||||||
step.store(total_documents, atomic::Ordering::Relaxed);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(output)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn retry<F>(must_stop_processing: &MustStopProcessing, send_request: F) -> Result<ureq::Response>
|
|
||||||
where
|
|
||||||
F: Fn() -> Result<ureq::Response, backoff::Error<ureq::Error>>,
|
|
||||||
{
|
|
||||||
match backoff::retry(ExponentialBackoff::default(), || {
|
|
||||||
if must_stop_processing.get() {
|
|
||||||
return Err(backoff::Error::Permanent(ureq::Error::Status(
|
|
||||||
u16::MAX,
|
|
||||||
// 444: Connection Closed Without Response
|
|
||||||
Response::new(444, "Abort", "Aborted task").unwrap(),
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
send_request()
|
|
||||||
}) {
|
|
||||||
Ok(response) => Ok(response),
|
|
||||||
Err(backoff::Error::Permanent(e)) => Err(ureq_error_into_error(e)),
|
|
||||||
Err(backoff::Error::Transient { err, retry_after: _ }) => Err(ureq_error_into_error(err)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn into_backoff_error(err: ureq::Error) -> backoff::Error<ureq::Error> {
|
|
||||||
match err {
|
|
||||||
// Those code status must trigger an automatic retry
|
|
||||||
// <https://www.restapitutorial.com/advanced/responses/retries>
|
|
||||||
ureq::Error::Status(408 | 429 | 500 | 502 | 503 | 504, _) => {
|
|
||||||
backoff::Error::Transient { err, retry_after: None }
|
|
||||||
}
|
|
||||||
ureq::Error::Status(_, _) => backoff::Error::Permanent(err),
|
|
||||||
ureq::Error::Transport(_) => backoff::Error::Transient { err, retry_after: None },
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Converts a `ureq::Error` into an `Error`.
|
|
||||||
fn ureq_error_into_error(error: ureq::Error) -> Error {
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct MeiliError {
|
|
||||||
message: String,
|
|
||||||
code: String,
|
|
||||||
r#type: String,
|
|
||||||
link: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
match error {
|
|
||||||
// This is a workaround to handle task abortion - the error propagation path
|
|
||||||
// makes it difficult to cleanly surface the abortion at this level.
|
|
||||||
ureq::Error::Status(u16::MAX, _) => Error::AbortedTask,
|
|
||||||
ureq::Error::Status(_, response) => match response.into_json() {
|
|
||||||
Ok(MeiliError { message, code, r#type, link }) => {
|
|
||||||
Error::FromRemoteWhenExporting { message, code, r#type, link }
|
|
||||||
}
|
|
||||||
Err(e) => e.into(),
|
|
||||||
},
|
|
||||||
ureq::Error::Transport(transport) => io::Error::new(io::ErrorKind::Other, transport).into(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
enum ExportIndex {}
|
|
@ -1,13 +1,11 @@
|
|||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
use bumpalo::collections::CollectIn;
|
use bumpalo::collections::CollectIn;
|
||||||
use bumpalo::Bump;
|
use bumpalo::Bump;
|
||||||
use meilisearch_types::heed::RwTxn;
|
use meilisearch_types::heed::RwTxn;
|
||||||
use meilisearch_types::milli::documents::PrimaryKey;
|
use meilisearch_types::milli::documents::PrimaryKey;
|
||||||
use meilisearch_types::milli::progress::{EmbedderStats, Progress};
|
use meilisearch_types::milli::progress::Progress;
|
||||||
use meilisearch_types::milli::update::new::indexer::{self, UpdateByFunction};
|
use meilisearch_types::milli::update::new::indexer::{self, UpdateByFunction};
|
||||||
use meilisearch_types::milli::update::DocumentAdditionResult;
|
use meilisearch_types::milli::update::DocumentAdditionResult;
|
||||||
use meilisearch_types::milli::{self, ChannelCongestion, Filter};
|
use meilisearch_types::milli::{self, ChannelCongestion, Filter, ThreadPoolNoAbortBuilder};
|
||||||
use meilisearch_types::settings::apply_settings_to_builder;
|
use meilisearch_types::settings::apply_settings_to_builder;
|
||||||
use meilisearch_types::tasks::{Details, KindWithContent, Status, Task};
|
use meilisearch_types::tasks::{Details, KindWithContent, Status, Task};
|
||||||
use meilisearch_types::Index;
|
use meilisearch_types::Index;
|
||||||
@ -26,7 +24,7 @@ impl IndexScheduler {
|
|||||||
/// The list of processed tasks.
|
/// The list of processed tasks.
|
||||||
#[tracing::instrument(
|
#[tracing::instrument(
|
||||||
level = "trace",
|
level = "trace",
|
||||||
skip(self, index_wtxn, index, progress, embedder_stats),
|
skip(self, index_wtxn, index, progress),
|
||||||
target = "indexing::scheduler"
|
target = "indexing::scheduler"
|
||||||
)]
|
)]
|
||||||
pub(crate) fn apply_index_operation<'i>(
|
pub(crate) fn apply_index_operation<'i>(
|
||||||
@ -35,7 +33,6 @@ impl IndexScheduler {
|
|||||||
index: &'i Index,
|
index: &'i Index,
|
||||||
operation: IndexOperation,
|
operation: IndexOperation,
|
||||||
progress: &Progress,
|
progress: &Progress,
|
||||||
embedder_stats: Arc<EmbedderStats>,
|
|
||||||
) -> Result<(Vec<Task>, Option<ChannelCongestion>)> {
|
) -> Result<(Vec<Task>, Option<ChannelCongestion>)> {
|
||||||
let indexer_alloc = Bump::new();
|
let indexer_alloc = Bump::new();
|
||||||
let started_processing_at = std::time::Instant::now();
|
let started_processing_at = std::time::Instant::now();
|
||||||
@ -89,9 +86,8 @@ impl IndexScheduler {
|
|||||||
let mut content_files_iter = content_files.iter();
|
let mut content_files_iter = content_files.iter();
|
||||||
let mut indexer = indexer::DocumentOperation::new();
|
let mut indexer = indexer::DocumentOperation::new();
|
||||||
let embedders = index
|
let embedders = index
|
||||||
.embedding_configs()
|
|
||||||
.embedding_configs(index_wtxn)
|
.embedding_configs(index_wtxn)
|
||||||
.map_err(|e| Error::from_milli(e.into(), Some(index_uid.clone())))?;
|
.map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?;
|
||||||
let embedders = self.embedders(index_uid.clone(), embedders)?;
|
let embedders = self.embedders(index_uid.clone(), embedders)?;
|
||||||
for operation in operations {
|
for operation in operations {
|
||||||
match operation {
|
match operation {
|
||||||
@ -117,8 +113,18 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let local_pool;
|
||||||
let indexer_config = self.index_mapper.indexer_config();
|
let indexer_config = self.index_mapper.indexer_config();
|
||||||
let pool = &indexer_config.thread_pool;
|
let pool = match &indexer_config.thread_pool {
|
||||||
|
Some(pool) => pool,
|
||||||
|
None => {
|
||||||
|
local_pool = ThreadPoolNoAbortBuilder::new()
|
||||||
|
.thread_name(|i| format!("indexing-thread-{i}"))
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
|
&local_pool
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
progress.update_progress(DocumentOperationProgress::ComputingDocumentChanges);
|
progress.update_progress(DocumentOperationProgress::ComputingDocumentChanges);
|
||||||
let (document_changes, operation_stats, primary_key) = indexer
|
let (document_changes, operation_stats, primary_key) = indexer
|
||||||
@ -181,7 +187,6 @@ impl IndexScheduler {
|
|||||||
embedders,
|
embedders,
|
||||||
&|| must_stop_processing.get(),
|
&|| must_stop_processing.get(),
|
||||||
progress,
|
progress,
|
||||||
&embedder_stats,
|
|
||||||
)
|
)
|
||||||
.map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?,
|
.map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?,
|
||||||
);
|
);
|
||||||
@ -261,8 +266,18 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
let mut congestion = None;
|
let mut congestion = None;
|
||||||
if task.error.is_none() {
|
if task.error.is_none() {
|
||||||
|
let local_pool;
|
||||||
let indexer_config = self.index_mapper.indexer_config();
|
let indexer_config = self.index_mapper.indexer_config();
|
||||||
let pool = &indexer_config.thread_pool;
|
let pool = match &indexer_config.thread_pool {
|
||||||
|
Some(pool) => pool,
|
||||||
|
None => {
|
||||||
|
local_pool = ThreadPoolNoAbortBuilder::new()
|
||||||
|
.thread_name(|i| format!("indexing-thread-{i}"))
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
|
&local_pool
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let candidates_count = candidates.len();
|
let candidates_count = candidates.len();
|
||||||
progress.update_progress(DocumentEditionProgress::ComputingDocumentChanges);
|
progress.update_progress(DocumentEditionProgress::ComputingDocumentChanges);
|
||||||
@ -275,9 +290,8 @@ impl IndexScheduler {
|
|||||||
})
|
})
|
||||||
.unwrap()?;
|
.unwrap()?;
|
||||||
let embedders = index
|
let embedders = index
|
||||||
.embedding_configs()
|
|
||||||
.embedding_configs(index_wtxn)
|
.embedding_configs(index_wtxn)
|
||||||
.map_err(|err| Error::from_milli(err.into(), Some(index_uid.clone())))?;
|
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?;
|
||||||
let embedders = self.embedders(index_uid.clone(), embedders)?;
|
let embedders = self.embedders(index_uid.clone(), embedders)?;
|
||||||
|
|
||||||
progress.update_progress(DocumentEditionProgress::Indexing);
|
progress.update_progress(DocumentEditionProgress::Indexing);
|
||||||
@ -294,7 +308,6 @@ impl IndexScheduler {
|
|||||||
embedders,
|
embedders,
|
||||||
&|| must_stop_processing.get(),
|
&|| must_stop_processing.get(),
|
||||||
progress,
|
progress,
|
||||||
&embedder_stats,
|
|
||||||
)
|
)
|
||||||
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?,
|
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?,
|
||||||
);
|
);
|
||||||
@ -416,8 +429,18 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
let mut congestion = None;
|
let mut congestion = None;
|
||||||
if !tasks.iter().all(|res| res.error.is_some()) {
|
if !tasks.iter().all(|res| res.error.is_some()) {
|
||||||
|
let local_pool;
|
||||||
let indexer_config = self.index_mapper.indexer_config();
|
let indexer_config = self.index_mapper.indexer_config();
|
||||||
let pool = &indexer_config.thread_pool;
|
let pool = match &indexer_config.thread_pool {
|
||||||
|
Some(pool) => pool,
|
||||||
|
None => {
|
||||||
|
local_pool = ThreadPoolNoAbortBuilder::new()
|
||||||
|
.thread_name(|i| format!("indexing-thread-{i}"))
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
|
&local_pool
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
progress.update_progress(DocumentDeletionProgress::DeleteDocuments);
|
progress.update_progress(DocumentDeletionProgress::DeleteDocuments);
|
||||||
let mut indexer = indexer::DocumentDeletion::new();
|
let mut indexer = indexer::DocumentDeletion::new();
|
||||||
@ -425,9 +448,8 @@ impl IndexScheduler {
|
|||||||
indexer.delete_documents_by_docids(to_delete);
|
indexer.delete_documents_by_docids(to_delete);
|
||||||
let document_changes = indexer.into_changes(&indexer_alloc, primary_key);
|
let document_changes = indexer.into_changes(&indexer_alloc, primary_key);
|
||||||
let embedders = index
|
let embedders = index
|
||||||
.embedding_configs()
|
|
||||||
.embedding_configs(index_wtxn)
|
.embedding_configs(index_wtxn)
|
||||||
.map_err(|err| Error::from_milli(err.into(), Some(index_uid.clone())))?;
|
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?;
|
||||||
let embedders = self.embedders(index_uid.clone(), embedders)?;
|
let embedders = self.embedders(index_uid.clone(), embedders)?;
|
||||||
|
|
||||||
progress.update_progress(DocumentDeletionProgress::Indexing);
|
progress.update_progress(DocumentDeletionProgress::Indexing);
|
||||||
@ -444,7 +466,6 @@ impl IndexScheduler {
|
|||||||
embedders,
|
embedders,
|
||||||
&|| must_stop_processing.get(),
|
&|| must_stop_processing.get(),
|
||||||
progress,
|
progress,
|
||||||
&embedder_stats,
|
|
||||||
)
|
)
|
||||||
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?,
|
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?,
|
||||||
);
|
);
|
||||||
@ -477,11 +498,14 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
progress.update_progress(SettingsProgress::ApplyTheSettings);
|
progress.update_progress(SettingsProgress::ApplyTheSettings);
|
||||||
let congestion = builder
|
builder
|
||||||
.execute(&|| must_stop_processing.get(), progress, embedder_stats)
|
.execute(
|
||||||
|
|indexing_step| tracing::debug!(update = ?indexing_step),
|
||||||
|
|| must_stop_processing.get(),
|
||||||
|
)
|
||||||
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?;
|
.map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?;
|
||||||
|
|
||||||
Ok((tasks, congestion))
|
Ok((tasks, None))
|
||||||
}
|
}
|
||||||
IndexOperation::DocumentClearAndSetting {
|
IndexOperation::DocumentClearAndSetting {
|
||||||
index_uid,
|
index_uid,
|
||||||
@ -497,7 +521,6 @@ impl IndexScheduler {
|
|||||||
tasks: cleared_tasks,
|
tasks: cleared_tasks,
|
||||||
},
|
},
|
||||||
progress,
|
progress,
|
||||||
embedder_stats.clone(),
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let (settings_tasks, _congestion) = self.apply_index_operation(
|
let (settings_tasks, _congestion) = self.apply_index_operation(
|
||||||
@ -505,7 +528,6 @@ impl IndexScheduler {
|
|||||||
index,
|
index,
|
||||||
IndexOperation::Settings { index_uid, settings, tasks: settings_tasks },
|
IndexOperation::Settings { index_uid, settings, tasks: settings_tasks },
|
||||||
progress,
|
progress,
|
||||||
embedder_stats,
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let mut tasks = settings_tasks;
|
let mut tasks = settings_tasks;
|
||||||
|
@ -41,12 +41,7 @@ impl IndexScheduler {
|
|||||||
progress.update_progress(SnapshotCreationProgress::SnapshotTheIndexScheduler);
|
progress.update_progress(SnapshotCreationProgress::SnapshotTheIndexScheduler);
|
||||||
let dst = temp_snapshot_dir.path().join("tasks");
|
let dst = temp_snapshot_dir.path().join("tasks");
|
||||||
fs::create_dir_all(&dst)?;
|
fs::create_dir_all(&dst)?;
|
||||||
let compaction_option = if self.scheduler.experimental_no_snapshot_compaction {
|
self.env.copy_to_path(dst.join("data.mdb"), CompactionOption::Disabled)?;
|
||||||
CompactionOption::Disabled
|
|
||||||
} else {
|
|
||||||
CompactionOption::Enabled
|
|
||||||
};
|
|
||||||
self.env.copy_to_path(dst.join("data.mdb"), compaction_option)?;
|
|
||||||
|
|
||||||
// 2.2 Create a read transaction on the index-scheduler
|
// 2.2 Create a read transaction on the index-scheduler
|
||||||
let rtxn = self.env.read_txn()?;
|
let rtxn = self.env.read_txn()?;
|
||||||
@ -85,7 +80,7 @@ impl IndexScheduler {
|
|||||||
let dst = temp_snapshot_dir.path().join("indexes").join(uuid.to_string());
|
let dst = temp_snapshot_dir.path().join("indexes").join(uuid.to_string());
|
||||||
fs::create_dir_all(&dst)?;
|
fs::create_dir_all(&dst)?;
|
||||||
index
|
index
|
||||||
.copy_to_path(dst.join("data.mdb"), compaction_option)
|
.copy_to_path(dst.join("data.mdb"), CompactionOption::Disabled)
|
||||||
.map_err(|e| Error::from_milli(e, Some(name.to_string())))?;
|
.map_err(|e| Error::from_milli(e, Some(name.to_string())))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -95,7 +90,7 @@ impl IndexScheduler {
|
|||||||
progress.update_progress(SnapshotCreationProgress::SnapshotTheApiKeys);
|
progress.update_progress(SnapshotCreationProgress::SnapshotTheApiKeys);
|
||||||
let dst = temp_snapshot_dir.path().join("auth");
|
let dst = temp_snapshot_dir.path().join("auth");
|
||||||
fs::create_dir_all(&dst)?;
|
fs::create_dir_all(&dst)?;
|
||||||
self.scheduler.auth_env.copy_to_path(dst.join("data.mdb"), compaction_option)?;
|
self.scheduler.auth_env.copy_to_path(dst.join("data.mdb"), CompactionOption::Disabled)?;
|
||||||
|
|
||||||
// 5. Copy and tarball the flat snapshot
|
// 5. Copy and tarball the flat snapshot
|
||||||
progress.update_progress(SnapshotCreationProgress::CreateTheTarball);
|
progress.update_progress(SnapshotCreationProgress::CreateTheTarball);
|
||||||
|
@ -1,17 +0,0 @@
|
|||||||
---
|
|
||||||
source: crates/index-scheduler/src/scheduler/test.rs
|
|
||||||
expression: config.embedder_options
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"Rest": {
|
|
||||||
"api_key": "My super secret",
|
|
||||||
"distribution": null,
|
|
||||||
"dimensions": 4,
|
|
||||||
"url": "http://localhost:7777",
|
|
||||||
"request": "{{text}}",
|
|
||||||
"search_fragments": {},
|
|
||||||
"indexing_fragments": {},
|
|
||||||
"response": "{{embedding}}",
|
|
||||||
"headers": {}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,12 +0,0 @@
|
|||||||
---
|
|
||||||
source: crates/index-scheduler/src/scheduler/test_embedders.rs
|
|
||||||
expression: simple_hf_config.embedder_options
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"HuggingFace": {
|
|
||||||
"model": "sentence-transformers/all-MiniLM-L6-v2",
|
|
||||||
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
|
|
||||||
"distribution": null,
|
|
||||||
"pooling": "useModel"
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,15 +0,0 @@
|
|||||||
---
|
|
||||||
source: crates/index-scheduler/src/scheduler/test_embedders.rs
|
|
||||||
expression: doc
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"doggo": "Intel",
|
|
||||||
"breed": "beagle",
|
|
||||||
"_vectors": {
|
|
||||||
"noise": [
|
|
||||||
0.1,
|
|
||||||
0.2,
|
|
||||||
0.3
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,15 +0,0 @@
|
|||||||
---
|
|
||||||
source: crates/index-scheduler/src/scheduler/test_embedders.rs
|
|
||||||
expression: doc
|
|
||||||
---
|
|
||||||
{
|
|
||||||
"doggo": "kefir",
|
|
||||||
"breed": "patou",
|
|
||||||
"_vectors": {
|
|
||||||
"noise": [
|
|
||||||
0.1,
|
|
||||||
0.2,
|
|
||||||
0.3
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,17 +1,12 @@
|
|||||||
---
|
---
|
||||||
source: crates/index-scheduler/src/scheduler/test_embedders.rs
|
source: crates/index-scheduler/src/scheduler/test_embedders.rs
|
||||||
expression: fakerest_config.embedder_options
|
expression: simple_hf_config.embedder_options
|
||||||
---
|
---
|
||||||
{
|
{
|
||||||
"Rest": {
|
"HuggingFace": {
|
||||||
"api_key": "My super secret",
|
"model": "sentence-transformers/all-MiniLM-L6-v2",
|
||||||
|
"revision": "e4ce9877abf3edfe10b0d82785e83bdcb973e22e",
|
||||||
"distribution": null,
|
"distribution": null,
|
||||||
"dimensions": 384,
|
"pooling": "useModel"
|
||||||
"url": "http://localhost:7777",
|
|
||||||
"request": "{{text}}",
|
|
||||||
"search_fragments": {},
|
|
||||||
"indexing_fragments": {},
|
|
||||||
"response": "{{embedding}}",
|
|
||||||
"headers": {}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -39,7 +39,7 @@ catto [0,]
|
|||||||
[timestamp] [0,1,]
|
[timestamp] [0,1,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"receivedDocuments":1,"indexedDocuments":0,"matchedTasks":1,"canceledTasks":1,"originalFilter":"test_query"}, stats: {"totalNbTasks":2,"status":{"succeeded":1,"canceled":1},"types":{"documentAdditionOrUpdate":1,"taskCancelation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 1 of type `taskCancelation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"receivedDocuments":1,"indexedDocuments":0,"matchedTasks":1,"canceledTasks":1,"originalFilter":"test_query"}, stats: {"totalNbTasks":2,"status":{"succeeded":1,"canceled":1},"types":{"documentAdditionOrUpdate":1,"taskCancelation":1},"indexUids":{"catto":1}}, stop reason: "task with id 1 of type `taskCancelation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,1,]
|
0 [0,1,]
|
||||||
|
@ -50,7 +50,7 @@ catto: { number_of_documents: 1, field_distribution: {"id": 1} }
|
|||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"receivedDocuments":1,"indexedDocuments":1}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentAdditionOrUpdate":1},"indexUids":{"catto":1}}, stop reason: "batched all enqueued tasks for index `catto`", }
|
0 {uid: 0, details: {"receivedDocuments":1,"indexedDocuments":1}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentAdditionOrUpdate":1},"indexUids":{"catto":1}}, stop reason: "batched all enqueued tasks for index `catto`", }
|
||||||
1 {uid: 1, details: {"receivedDocuments":2,"indexedDocuments":0,"matchedTasks":3,"canceledTasks":2,"originalFilter":"test_query"}, stats: {"totalNbTasks":3,"status":{"succeeded":1,"canceled":2},"types":{"documentAdditionOrUpdate":2,"taskCancelation":1},"indexUids":{"beavero":1,"wolfo":1}}, stop reason: "created batch containing only task with id 3 of type `taskCancelation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"receivedDocuments":2,"indexedDocuments":0,"matchedTasks":3,"canceledTasks":2,"originalFilter":"test_query"}, stats: {"totalNbTasks":3,"status":{"succeeded":1,"canceled":2},"types":{"documentAdditionOrUpdate":2,"taskCancelation":1},"indexUids":{"beavero":1,"wolfo":1}}, stop reason: "task with id 3 of type `taskCancelation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -38,7 +38,7 @@ canceled [0,]
|
|||||||
[timestamp] [0,1,]
|
[timestamp] [0,1,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"matchedTasks":1,"canceledTasks":1,"originalFilter":"cancel dump"}, stats: {"totalNbTasks":2,"status":{"succeeded":1,"canceled":1},"types":{"taskCancelation":1,"dumpCreation":1},"indexUids":{}}, stop reason: "created batch containing only task with id 1 of type `taskCancelation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"matchedTasks":1,"canceledTasks":1,"originalFilter":"cancel dump"}, stats: {"totalNbTasks":2,"status":{"succeeded":1,"canceled":1},"types":{"taskCancelation":1,"dumpCreation":1},"indexUids":{}}, stop reason: "task with id 1 of type `taskCancelation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,1,]
|
0 [0,1,]
|
||||||
|
@ -4,7 +4,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[0,]
|
[0,]
|
||||||
{uid: 0, details: {"dumpUid":null}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"dumpCreation":1},"indexUids":{}}, stop reason: "created batch containing only task with id 0 of type `dumpCreation` that cannot be batched with any other task.", }
|
{uid: 0, details: {"dumpUid":null}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"dumpCreation":1},"indexUids":{}}, stop reason: "task with id 0 of type `dumpCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { dump_uid: None }, kind: DumpCreation { keys: [], instance_uid: None }}
|
0 {uid: 0, status: enqueued, details: { dump_uid: None }, kind: DumpCreation { keys: [], instance_uid: None }}
|
||||||
|
@ -40,7 +40,7 @@ catto: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [0,1,]
|
[timestamp] [0,1,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"receivedDocuments":1,"indexedDocuments":0,"matchedTasks":1,"canceledTasks":1,"originalFilter":"test_query"}, stats: {"totalNbTasks":2,"status":{"succeeded":1,"canceled":1},"types":{"documentAdditionOrUpdate":1,"taskCancelation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 1 of type `taskCancelation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"receivedDocuments":1,"indexedDocuments":0,"matchedTasks":1,"canceledTasks":1,"originalFilter":"test_query"}, stats: {"totalNbTasks":2,"status":{"succeeded":1,"canceled":1},"types":{"documentAdditionOrUpdate":1,"taskCancelation":1},"indexUids":{"catto":1}}, stop reason: "task with id 1 of type `taskCancelation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,1,]
|
0 [0,1,]
|
||||||
|
@ -41,7 +41,7 @@ catto: { number_of_documents: 1, field_distribution: {"id": 1} }
|
|||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"receivedDocuments":1,"indexedDocuments":1}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentAdditionOrUpdate":1},"indexUids":{"catto":1}}, stop reason: "batched all enqueued tasks", }
|
0 {uid: 0, details: {"receivedDocuments":1,"indexedDocuments":1}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentAdditionOrUpdate":1},"indexUids":{"catto":1}}, stop reason: "batched all enqueued tasks", }
|
||||||
1 {uid: 1, details: {"matchedTasks":1,"canceledTasks":0,"originalFilter":"test_query"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"taskCancelation":1},"indexUids":{}}, stop reason: "created batch containing only task with id 1 of type `taskCancelation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"matchedTasks":1,"canceledTasks":0,"originalFilter":"test_query"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"taskCancelation":1},"indexUids":{}}, stop reason: "task with id 1 of type `taskCancelation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -60,9 +60,9 @@ girafos: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [5,]
|
[timestamp] [5,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"cattos":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"cattos":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"girafos":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"girafos":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||||
3 {uid: 3, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "batched all enqueued tasks for index `doggos`", }
|
3 {uid: 3, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "batched all enqueued tasks for index `doggos`", }
|
||||||
4 {uid: 4, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"cattos":1}}, stop reason: "batched all enqueued tasks for index `cattos`", }
|
4 {uid: 4, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"cattos":1}}, stop reason: "batched all enqueued tasks for index `cattos`", }
|
||||||
5 {uid: 5, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"girafos":1}}, stop reason: "batched all enqueued tasks", }
|
5 {uid: 5, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"girafos":1}}, stop reason: "batched all enqueued tasks", }
|
||||||
|
@ -41,7 +41,7 @@ doggos: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [0,]
|
[timestamp] [0,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -42,8 +42,8 @@ doggos [0,1,2,]
|
|||||||
[timestamp] [1,2,]
|
[timestamp] [1,2,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"receivedDocuments":1,"indexedDocuments":0,"deletedDocuments":0}, stats: {"totalNbTasks":2,"status":{"succeeded":2},"types":{"documentAdditionOrUpdate":1,"indexDeletion":1},"indexUids":{"doggos":2}}, stop reason: "stopped after task with id 2 because it deletes the index", }
|
1 {uid: 1, details: {"receivedDocuments":1,"indexedDocuments":0,"deletedDocuments":0}, stats: {"totalNbTasks":2,"status":{"succeeded":2},"types":{"documentAdditionOrUpdate":1,"indexDeletion":1},"indexUids":{"doggos":2}}, stop reason: "task with id 2 deletes the index", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -37,7 +37,7 @@ doggos [0,1,]
|
|||||||
[timestamp] [0,1,]
|
[timestamp] [0,1,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"receivedDocuments":1,"indexedDocuments":0,"deletedDocuments":0}, stats: {"totalNbTasks":2,"status":{"succeeded":2},"types":{"documentAdditionOrUpdate":1,"indexDeletion":1},"indexUids":{"doggos":2}}, stop reason: "stopped after task with id 1 because it deletes the index", }
|
0 {uid: 0, details: {"receivedDocuments":1,"indexedDocuments":0,"deletedDocuments":0}, stats: {"totalNbTasks":2,"status":{"succeeded":2},"types":{"documentAdditionOrUpdate":1,"indexDeletion":1},"indexUids":{"doggos":2}}, stop reason: "task with id 1 deletes the index", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,1,]
|
0 [0,1,]
|
||||||
|
@ -4,7 +4,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[0,]
|
[0,]
|
||||||
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"index_a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"index_a":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
||||||
|
@ -4,7 +4,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[0,]
|
[0,]
|
||||||
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"index_a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"index_a":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
||||||
|
@ -4,7 +4,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[0,]
|
[0,]
|
||||||
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"index_a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
{uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"indexCreation":1},"indexUids":{"index_a":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
||||||
|
@ -41,7 +41,7 @@ doggos: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [0,]
|
[timestamp] [0,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -44,8 +44,8 @@ doggos: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [1,]
|
[timestamp] [1,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"cattos":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"cattos":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -45,9 +45,9 @@ cattos: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [2,]
|
[timestamp] [2,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"cattos":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"cattos":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexDeletion":1},"indexUids":{"doggos":1}}, stop reason: "stopped after task with id 2 because it deletes the index", }
|
2 {uid: 2, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexDeletion":1},"indexUids":{"doggos":1}}, stop reason: "task with id 2 deletes the index", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -42,7 +42,7 @@ doggos: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [0,]
|
[timestamp] [0,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -48,9 +48,9 @@ doggos: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [3,]
|
[timestamp] [3,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "batched up to configured batch limit of 1 tasks", }
|
1 {uid: 1, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "reached configured batch limit of 1 tasks", }
|
||||||
2 {uid: 2, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "batched up to configured batch limit of 1 tasks", }
|
2 {uid: 2, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "reached configured batch limit of 1 tasks", }
|
||||||
3 {uid: 3, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "batched all enqueued tasks", }
|
3 {uid: 3, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "batched all enqueued tasks", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
|
@ -44,8 +44,8 @@ doggos: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [1,]
|
[timestamp] [1,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "batched up to configured batch limit of 1 tasks", }
|
1 {uid: 1, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "reached configured batch limit of 1 tasks", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -46,9 +46,9 @@ doggos: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [2,]
|
[timestamp] [2,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "batched up to configured batch limit of 1 tasks", }
|
1 {uid: 1, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "reached configured batch limit of 1 tasks", }
|
||||||
2 {uid: 2, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "batched up to configured batch limit of 1 tasks", }
|
2 {uid: 2, details: {"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "reached configured batch limit of 1 tasks", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -44,7 +44,7 @@ a: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [0,]
|
[timestamp] [0,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -47,8 +47,8 @@ b: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [1,]
|
[timestamp] [1,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -50,9 +50,9 @@ c: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [2,]
|
[timestamp] [2,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -53,10 +53,10 @@ d: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [3,]
|
[timestamp] [3,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||||
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "task with id 3 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -60,11 +60,11 @@ d: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [4,]
|
[timestamp] [4,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||||
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "task with id 3 of type `indexCreation` cannot be batched", }
|
||||||
4 {uid: 4, details: {"swaps":[{"indexes":["a","b"]},{"indexes":["c","d"]}]}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "created batch containing only task with id 4 of type `indexSwap` that cannot be batched with any other task.", }
|
4 {uid: 4, details: {"swaps":[{"indexes":["a","b"]},{"indexes":["c","d"]}]}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "task with id 4 of type `indexSwap` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -56,10 +56,10 @@ d: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [3,]
|
[timestamp] [3,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||||
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "task with id 3 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -62,12 +62,12 @@ d: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [5,]
|
[timestamp] [5,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||||
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "task with id 3 of type `indexCreation` cannot be batched", }
|
||||||
4 {uid: 4, details: {"swaps":[{"indexes":["a","b"]},{"indexes":["c","d"]}]}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "created batch containing only task with id 4 of type `indexSwap` that cannot be batched with any other task.", }
|
4 {uid: 4, details: {"swaps":[{"indexes":["a","b"]},{"indexes":["c","d"]}]}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "task with id 4 of type `indexSwap` cannot be batched", }
|
||||||
5 {uid: 5, details: {"swaps":[{"indexes":["a","c"]}]}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "created batch containing only task with id 5 of type `indexSwap` that cannot be batched with any other task.", }
|
5 {uid: 5, details: {"swaps":[{"indexes":["a","c"]}]}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "task with id 5 of type `indexSwap` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -66,13 +66,13 @@ d: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [6,]
|
[timestamp] [6,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||||
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "task with id 3 of type `indexCreation` cannot be batched", }
|
||||||
4 {uid: 4, details: {"swaps":[{"indexes":["a","b"]},{"indexes":["c","d"]}]}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "created batch containing only task with id 4 of type `indexSwap` that cannot be batched with any other task.", }
|
4 {uid: 4, details: {"swaps":[{"indexes":["a","b"]},{"indexes":["c","d"]}]}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "task with id 4 of type `indexSwap` cannot be batched", }
|
||||||
5 {uid: 5, details: {"swaps":[{"indexes":["a","c"]}]}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "created batch containing only task with id 5 of type `indexSwap` that cannot be batched with any other task.", }
|
5 {uid: 5, details: {"swaps":[{"indexes":["a","c"]}]}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "task with id 5 of type `indexSwap` cannot be batched", }
|
||||||
6 {uid: 6, details: {"swaps":[]}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "created batch containing only task with id 6 of type `indexSwap` that cannot be batched with any other task.", }
|
6 {uid: 6, details: {"swaps":[]}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "task with id 6 of type `indexSwap` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -58,10 +58,10 @@ d: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [3,]
|
[timestamp] [3,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||||
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "task with id 3 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -53,10 +53,10 @@ d: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [3,]
|
[timestamp] [3,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||||
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "task with id 3 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -61,11 +61,11 @@ d: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [4,]
|
[timestamp] [4,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||||
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "task with id 3 of type `indexCreation` cannot be batched", }
|
||||||
4 {uid: 4, details: {"swaps":[{"indexes":["a","b"]},{"indexes":["c","e"]},{"indexes":["d","f"]}]}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "created batch containing only task with id 4 of type `indexSwap` that cannot be batched with any other task.", }
|
4 {uid: 4, details: {"swaps":[{"indexes":["a","b"]},{"indexes":["c","e"]},{"indexes":["d","f"]}]}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexSwap":1},"indexUids":{}}, stop reason: "task with id 4 of type `indexSwap` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -53,10 +53,10 @@ d: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [3,]
|
[timestamp] [3,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
0 {uid: 0, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"a":1}}, stop reason: "task with id 0 of type `indexCreation` cannot be batched", }
|
||||||
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"b":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||||
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"c":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||||
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
3 {uid: 3, details: {"primaryKey":"id"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"d":1}}, stop reason: "task with id 3 of type `indexCreation` cannot be batched", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -40,7 +40,7 @@ catto: { number_of_documents: 1, field_distribution: {"id": 1} }
|
|||||||
[timestamp] [2,3,]
|
[timestamp] [2,3,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
1 {uid: 1, details: {"matchedTasks":2,"deletedTasks":1,"originalFilter":"test_query&test_query"}, stats: {"totalNbTasks":2,"status":{"succeeded":2},"types":{"taskDeletion":2},"indexUids":{}}, stop reason: "stopped after the last task of type `taskDeletion` because they cannot be batched with tasks of any other type.", }
|
1 {uid: 1, details: {"matchedTasks":2,"deletedTasks":1,"originalFilter":"test_query&test_query"}, stats: {"totalNbTasks":2,"status":{"succeeded":2},"types":{"taskDeletion":2},"indexUids":{}}, stop reason: "a batch of tasks of type `taskDeletion` cannot be batched with any other type of task", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
1 [2,3,]
|
1 [2,3,]
|
||||||
|
@ -38,7 +38,7 @@ catto: { number_of_documents: 1, field_distribution: {"id": 1} }
|
|||||||
[timestamp] [2,]
|
[timestamp] [2,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
1 {uid: 1, details: {"matchedTasks":1,"deletedTasks":1,"originalFilter":"test_query"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"taskDeletion":1},"indexUids":{}}, stop reason: "stopped after the last task of type `taskDeletion` because they cannot be batched with tasks of any other type.", }
|
1 {uid: 1, details: {"matchedTasks":1,"deletedTasks":1,"originalFilter":"test_query"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"taskDeletion":1},"indexUids":{}}, stop reason: "a batch of tasks of type `taskDeletion` cannot be batched with any other type of task", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
1 [2,]
|
1 [2,]
|
||||||
|
@ -43,7 +43,7 @@ doggo [2,]
|
|||||||
[timestamp] [3,]
|
[timestamp] [3,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"matchedTasks":2,"deletedTasks":0,"originalFilter":"test_query"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"taskDeletion":1},"indexUids":{}}, stop reason: "stopped after the last task of type `taskDeletion` because they cannot be batched with tasks of any other type.", }
|
0 {uid: 0, details: {"matchedTasks":2,"deletedTasks":0,"originalFilter":"test_query"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"taskDeletion":1},"indexUids":{}}, stop reason: "a batch of tasks of type `taskDeletion` cannot be batched with any other type of task", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [3,]
|
0 [3,]
|
||||||
|
@ -4,7 +4,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
### Autobatching Enabled = true
|
### Autobatching Enabled = true
|
||||||
### Processing batch Some(0):
|
### Processing batch Some(0):
|
||||||
[3,]
|
[3,]
|
||||||
{uid: 0, details: {"matchedTasks":2,"deletedTasks":null,"originalFilter":"test_query"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"taskDeletion":1},"indexUids":{}}, stop reason: "stopped after the last task of type `taskDeletion` because they cannot be batched with tasks of any other type.", }
|
{uid: 0, details: {"matchedTasks":2,"deletedTasks":null,"originalFilter":"test_query"}, stats: {"totalNbTasks":1,"status":{"processing":1},"types":{"taskDeletion":1},"indexUids":{}}, stop reason: "a batch of tasks of type `taskDeletion` cannot be batched with any other type of task", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
|
@ -1,63 +0,0 @@
|
|||||||
---
|
|
||||||
source: crates/index-scheduler/src/scheduler/test.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing batch None:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued []
|
|
||||||
succeeded [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"indexCreation" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
doggos: { number_of_documents: 0, field_distribution: {} }
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Batches:
|
|
||||||
0 {uid: 0, details: {}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggos":1}}, stop reason: "created batch containing only task with id 0 of type `indexCreation` that cannot be batched with any other task.", }
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Batch to tasks mapping:
|
|
||||||
0 [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Batches Status:
|
|
||||||
succeeded [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Batches Kind:
|
|
||||||
"indexCreation" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Batches Index Tasks:
|
|
||||||
doggos [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Batches Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Batches Started At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Batches Finished At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
@ -1,51 +0,0 @@
|
|||||||
---
|
|
||||||
source: crates/index-scheduler/src/scheduler/test.rs
|
|
||||||
---
|
|
||||||
### Autobatching Enabled = true
|
|
||||||
### Processing batch None:
|
|
||||||
[]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Tasks:
|
|
||||||
0 {uid: 0, status: enqueued, details: { primary_key: None }, kind: IndexCreation { index_uid: "doggos", primary_key: None }}
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Status:
|
|
||||||
enqueued [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Kind:
|
|
||||||
"indexCreation" [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Tasks:
|
|
||||||
doggos [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Index Mapper:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Canceled By:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Enqueued At:
|
|
||||||
[timestamp] [0,]
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Started At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Finished At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### All Batches:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Batch to tasks mapping:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Batches Status:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Batches Kind:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Batches Index Tasks:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Batches Enqueued At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Batches Started At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### Batches Finished At:
|
|
||||||
----------------------------------------------------------------------
|
|
||||||
### File Store:
|
|
||||||
|
|
||||||
----------------------------------------------------------------------
|
|
@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
0 {uid: 0, status: enqueued, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,]
|
enqueued [0,]
|
||||||
|
@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), indexing_fragments: NotSet, search_fragments: NotSet, request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, chat: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: NotSet, sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: Set({"default": Set(EmbeddingSettings { source: Set(Rest), model: NotSet, revision: NotSet, pooling: NotSet, api_key: Set("My super secret"), dimensions: Set(4), binary_quantized: NotSet, document_template: NotSet, document_template_max_bytes: NotSet, url: Set("http://localhost:7777"), request: Set(String("{{text}}")), response: Set(String("{{embedding}}")), headers: NotSet, search_embedder: NotSet, indexing_embedder: NotSet, distribution: NotSet })}), search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued []
|
enqueued []
|
||||||
|
@ -37,7 +37,7 @@ doggos [0,1,]
|
|||||||
[timestamp] [0,]
|
[timestamp] [0,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"providedIds":2,"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "stopped batching before task with id 1 because its index creation rules differ from the ones from the batch", }
|
0 {uid: 0, details: {"providedIds":2,"deletedDocuments":0}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"documentDeletion":1},"indexUids":{"doggos":1}}, stop reason: "task with id 1 has different index creation rules as in the batch", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user