mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-21 05:41:01 +00:00
Compare commits
155 Commits
prototype-
...
prototype-
Author | SHA1 | Date | |
---|---|---|---|
6243422ff4 | |||
3b69233906 | |||
e9b62aacb3 | |||
456960d2c7 | |||
3dda176723 | |||
af0f6f0bf0 | |||
ccf3ba3f32 | |||
65528a3e06 | |||
cdb4b3e024 | |||
8c0ebd1331 | |||
5130e06b41 | |||
08e27ef73f | |||
914b125c5f | |||
717b069907 | |||
7ea154673a | |||
b947f3bb9d | |||
4c35817c5f | |||
c53841e166 | |||
fd81945597 | |||
794e491152 | |||
cab27c2ab4 | |||
624fa9052f | |||
359ede4862 | |||
60c11dbdbd | |||
dacee40ebc | |||
6089083a8e | |||
cc2c19d4c3 | |||
a5c56fac8a | |||
e4e49e63d0 | |||
00bd7bd19a | |||
ef3d098b4d | |||
8084cf29f3 | |||
5a7c1bde84 | |||
6b2d671be7 | |||
43c13faeda | |||
29adfc2f68 | |||
064ee95b1c | |||
604d533b31 | |||
44c1900f36 | |||
04671d0751 | |||
4f4c669d50 | |||
35758db9ec | |||
4988199bb9 | |||
83991ee770 | |||
9d061cec26 | |||
4a21fecf67 | |||
ae8e69c030 | |||
fe819a9d80 | |||
e338ceb97f | |||
75c87d5391 | |||
dd57873f8e | |||
3dda93d50f | |||
117146ec4e | |||
884b4d47b1 | |||
023cb0c2de | |||
f391039a6f | |||
fcdd20b533 | |||
b45c36cd71 | |||
151c31c18f | |||
a8ad0902d3 | |||
e917dbdebb | |||
ba919b6123 | |||
9d5e3457e5 | |||
04694071fe | |||
5b0157c6c6 | |||
3b9a87c790 | |||
3a3414270d | |||
d06e0905db | |||
939b2fc6fd | |||
fae61372be | |||
d8b47b689e | |||
b0c1a9504a | |||
be72be7c0d | |||
88559a2d54 | |||
59201a7852 | |||
9e3e69373e | |||
d57026cd96 | |||
29ab54b259 | |||
41c9e8856a | |||
86d8bb3a3e | |||
d4ff59fcf5 | |||
9c485f8563 | |||
0e2a5951b4 | |||
691a536893 | |||
d8d12d5979 | |||
df528b41d8 | |||
2452ec55b4 | |||
54ae1b5a67 | |||
0597a97c84 | |||
3070a20580 | |||
0497f93494 | |||
2dfbb6813a | |||
8f589a5cce | |||
0b8bbd8750 | |||
eef95de30e | |||
d5ab750627 | |||
2afd10f96d | |||
2d2619bd90 | |||
516d2df862 | |||
c76b488ab1 | |||
d383afc82b | |||
f9d94c5845 | |||
7745cc9d3c | |||
657f24ec5f | |||
c106906f8f | |||
9c0691156f | |||
359b90288d | |||
13e3f8faae | |||
fd7c66fd62 | |||
183f23f40d | |||
16c8437b28 | |||
4310928803 | |||
74315b4ea8 | |||
177e6e27f9 | |||
50afe724ae | |||
012c960fad | |||
76f6d3357e | |||
d59e969c16 | |||
eb7a1aa7af | |||
c30a14cb97 | |||
a3ca8412ce | |||
106f98aa72 | |||
40fa59d64c | |||
bb40ce6e35 | |||
0c8dbf6fa6 | |||
dd6519b64f | |||
da02a9cf32 | |||
ff192bb480 | |||
22762808ab | |||
86b834c9e4 | |||
2d3cec11a7 | |||
76e1ee9988 | |||
222615d3df | |||
11d024c613 | |||
886c8bb647 | |||
b422e5fdc3 | |||
d727ebee05 | |||
da39a7b29e | |||
377fe33aac | |||
55cd7738b9 | |||
48409c9183 | |||
82650eaae1 | |||
b8ca09c13f | |||
a442af6a7c | |||
e7f8daaf86 | |||
d1ff631df8 | |||
202183adf8 | |||
aae099e330 | |||
5387cf1718 | |||
71500a4e15 | |||
9859e65d2f | |||
3bdf01bc1c | |||
a5a31667b0 | |||
e3fc7112bc | |||
6d4981ec25 |
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
@ -35,7 +35,7 @@ jobs:
|
||||
- name: Build deb package
|
||||
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
||||
- name: Upload debian pkg to release
|
||||
uses: svenstaro/upload-release-action@2.6.1
|
||||
uses: svenstaro/upload-release-action@2.7.0
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/debian/meilisearch.deb
|
||||
|
8
.github/workflows/publish-binaries.yml
vendored
8
.github/workflows/publish-binaries.yml
vendored
@ -54,7 +54,7 @@ jobs:
|
||||
# No need to upload binaries for dry run (cron)
|
||||
- name: Upload binaries to release
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.6.1
|
||||
uses: svenstaro/upload-release-action@2.7.0
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/release/meilisearch
|
||||
@ -87,7 +87,7 @@ jobs:
|
||||
# No need to upload binaries for dry run (cron)
|
||||
- name: Upload binaries to release
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.6.1
|
||||
uses: svenstaro/upload-release-action@2.7.0
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/release/${{ matrix.artifact_name }}
|
||||
@ -121,7 +121,7 @@ jobs:
|
||||
- name: Upload the binary to release
|
||||
# No need to upload binaries for dry run (cron)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.6.1
|
||||
uses: svenstaro/upload-release-action@2.7.0
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/${{ matrix.target }}/release/meilisearch
|
||||
@ -183,7 +183,7 @@ jobs:
|
||||
- name: Upload the binary to release
|
||||
# No need to upload binaries for dry run (cron)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.6.1
|
||||
uses: svenstaro/upload-release-action@2.7.0
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: target/${{ matrix.target }}/release/meilisearch
|
||||
|
20
.github/workflows/test-suite.yml
vendored
20
.github/workflows/test-suite.yml
vendored
@ -37,13 +37,13 @@ jobs:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Setup test with Rust nightly
|
||||
if: github.event_name == 'schedule'
|
||||
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.5.0
|
||||
uses: Swatinem/rust-cache@v2.6.2
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@ -65,7 +65,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.5.0
|
||||
uses: Swatinem/rust-cache@v2.6.2
|
||||
- name: Run cargo check without any default features
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@ -78,12 +78,12 @@ jobs:
|
||||
args: --locked --release --all
|
||||
|
||||
test-all-features:
|
||||
name: Tests all features on cron schedule only
|
||||
name: Tests all features
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
# Use ubuntu-18.04 to compile with glibc 2.27, which are the production expectations
|
||||
image: ubuntu:18.04
|
||||
if: github.event_name == 'schedule'
|
||||
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install needed dependencies
|
||||
@ -110,7 +110,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ubuntu:18.04
|
||||
if: github.event_name == 'schedule'
|
||||
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install needed dependencies
|
||||
@ -146,7 +146,7 @@ jobs:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.5.0
|
||||
uses: Swatinem/rust-cache@v2.6.2
|
||||
- name: Run tests in debug
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@ -161,11 +161,11 @@ jobs:
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: 1.69.0
|
||||
toolchain: 1.71.1
|
||||
override: true
|
||||
components: clippy
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.5.0
|
||||
uses: Swatinem/rust-cache@v2.6.2
|
||||
- name: Run cargo clippy
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
@ -184,7 +184,7 @@ jobs:
|
||||
override: true
|
||||
components: rustfmt
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.5.0
|
||||
uses: Swatinem/rust-cache@v2.6.2
|
||||
- name: Run cargo fmt
|
||||
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
||||
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
||||
|
1006
Cargo.lock
generated
1006
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -18,7 +18,7 @@ members = [
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "1.3.0"
|
||||
version = "1.4.0"
|
||||
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||
description = "Meilisearch HTTP server"
|
||||
homepage = "https://meilisearch.com"
|
||||
|
19
PROFILING.md
Normal file
19
PROFILING.md
Normal file
@ -0,0 +1,19 @@
|
||||
# Profiling Meilisearch
|
||||
|
||||
Search engine technologies are complex pieces of software that require thorough profiling tools. We chose to use [Puffin](https://github.com/EmbarkStudios/puffin), which the Rust gaming industry uses extensively. You can export and import the profiling reports using the top bar's _File_ menu options.
|
||||
|
||||

|
||||
|
||||
## Profiling the Indexing Process
|
||||
|
||||
When you enable the `profile-with-puffin` feature of Meilisearch, a Puffin HTTP server will run on Meilisearch and listen on the default _0.0.0.0:8585_ address. This server will record a "frame" whenever it executes the `IndexScheduler::tick` method.
|
||||
|
||||
Once your Meilisearch is running and awaits new indexation operations, you must [install and run the `puffin_viewer` tool](https://github.com/EmbarkStudios/puffin/tree/main/puffin_viewer) to see the profiling results. I advise you to run the viewer with the `RUST_LOG=puffin_http::client=debug` environment variable to see the client trying to connect to your server.
|
||||
|
||||
Another piece of advice on the Puffin viewer UI interface is to consider the _Merge children with same ID_ option. It can hide the exact actual timings at which events were sent. Please turn it off when you see strange gaps on the Flamegraph. It can help.
|
||||
|
||||
## Profiling the Search Process
|
||||
|
||||
We still need to take the time to profile the search side of the engine with Puffin. It would require time to profile the filtering phase, query parsing, creation, and execution. We could even profile the Actix HTTP server.
|
||||
|
||||
The only issue we see is the framing system. Puffin requires a global frame-based profiling phase, which collides with Meilisearch's ability to accept and answer multiple requests on different threads simultaneously.
|
@ -65,7 +65,7 @@ You may also want to check out [Meilisearch 101](https://www.meilisearch.com/doc
|
||||
|
||||
## ⚡ Supercharge your Meilisearch experience
|
||||
|
||||
Say goodbye to server deployment and manual updates with [Meilisearch Cloud](https://www.meilisearch.com/pricing?utm_campaign=oss&utm_source=engine&utm_medium=meilisearch). Get started with a 14-day free trial! No credit card required.
|
||||
Say goodbye to server deployment and manual updates with [Meilisearch Cloud](https://www.meilisearch.com/cloud?utm_campaign=oss&utm_source=github&utm_medium=meilisearch). No credit card required.
|
||||
|
||||
## 🧰 SDKs & integration tools
|
||||
|
||||
@ -87,7 +87,7 @@ Finally, for more in-depth information, refer to our articles explaining fundame
|
||||
|
||||
Meilisearch collects **anonymized** data from users to help us improve our product. You can [deactivate this](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=telemetry#how-to-disable-data-collection) whenever you want.
|
||||
|
||||
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Don't forget to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
|
||||
To request deletion of collected data, please write to us at [privacy@meilisearch.com](mailto:privacy@meilisearch.com). Don't forget to include your `Instance UID` in the message, as this helps us quickly find and delete your data.
|
||||
|
||||
If you want to know more about the kind of data we collect and what we use it for, check the [telemetry section](https://www.meilisearch.com/docs/learn/what_is_meilisearch/telemetry?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=telemetry#how-to-disable-data-collection) of our documentation.
|
||||
|
||||
|
BIN
assets/profiling-example.png
Normal file
BIN
assets/profiling-example.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.2 MiB |
@ -14,11 +14,11 @@ license.workspace = true
|
||||
anyhow = "1.0.70"
|
||||
csv = "1.2.1"
|
||||
milli = { path = "../milli" }
|
||||
mimalloc = { version = "0.1.36", default-features = false }
|
||||
mimalloc = { version = "0.1.37", default-features = false }
|
||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = { version = "0.4.0", features = ["html_reports"] }
|
||||
criterion = { version = "0.5.1", features = ["html_reports"] }
|
||||
rand = "0.8.5"
|
||||
rand_chacha = "0.3.1"
|
||||
roaring = "0.10.1"
|
||||
|
@ -7,7 +7,7 @@ use meilisearch_types::milli::update::IndexDocumentsMethod;
|
||||
use meilisearch_types::settings::Unchecked;
|
||||
use meilisearch_types::tasks::{Details, IndexSwap, KindWithContent, Status, Task, TaskId};
|
||||
use meilisearch_types::InstanceUid;
|
||||
use roaring::RoaringBitmap;
|
||||
use roaring::RoaringTreemap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
@ -121,11 +121,11 @@ pub enum KindDump {
|
||||
},
|
||||
TaskCancelation {
|
||||
query: String,
|
||||
tasks: RoaringBitmap,
|
||||
tasks: RoaringTreemap,
|
||||
},
|
||||
TasksDeletion {
|
||||
query: String,
|
||||
tasks: RoaringBitmap,
|
||||
tasks: RoaringTreemap,
|
||||
},
|
||||
DumpCreation {
|
||||
keys: Vec<Key>,
|
||||
@ -210,6 +210,7 @@ pub(crate) mod test {
|
||||
use big_s::S;
|
||||
use maplit::{btreemap, btreeset};
|
||||
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
||||
use meilisearch_types::features::RuntimeTogglableFeatures;
|
||||
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||
use meilisearch_types::keys::{Action, Key};
|
||||
use meilisearch_types::milli;
|
||||
@ -261,6 +262,9 @@ pub(crate) mod test {
|
||||
sortable_attributes: Setting::Set(btreeset! { S("age") }),
|
||||
ranking_rules: Setting::NotSet,
|
||||
stop_words: Setting::NotSet,
|
||||
non_separator_tokens: Setting::NotSet,
|
||||
separator_tokens: Setting::NotSet,
|
||||
dictionary: Setting::NotSet,
|
||||
synonyms: Setting::NotSet,
|
||||
distinct_attribute: Setting::NotSet,
|
||||
typo_tolerance: Setting::NotSet,
|
||||
@ -418,7 +422,10 @@ pub(crate) mod test {
|
||||
}
|
||||
keys.flush().unwrap();
|
||||
|
||||
// ========== TODO: create features here
|
||||
// ========== experimental features
|
||||
let features = create_test_features();
|
||||
|
||||
dump.create_experimental_features(features).unwrap();
|
||||
|
||||
// create the dump
|
||||
let mut file = tempfile::tempfile().unwrap();
|
||||
@ -428,6 +435,10 @@ pub(crate) mod test {
|
||||
file
|
||||
}
|
||||
|
||||
fn create_test_features() -> RuntimeTogglableFeatures {
|
||||
RuntimeTogglableFeatures { vector_store: true, ..Default::default() }
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_creating_and_read_dump() {
|
||||
let mut file = create_test_dump();
|
||||
@ -472,5 +483,9 @@ pub(crate) mod test {
|
||||
for (key, expected) in dump.keys().unwrap().zip(create_test_api_keys()) {
|
||||
assert_eq!(key.unwrap(), expected);
|
||||
}
|
||||
|
||||
// ==== checking the features
|
||||
let expected = create_test_features();
|
||||
assert_eq!(dump.features().unwrap().unwrap(), expected);
|
||||
}
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ impl CompatV5ToV6 {
|
||||
}
|
||||
|
||||
let task = v6::Task {
|
||||
uid: task_view.uid,
|
||||
uid: task_view.uid as u64,
|
||||
index_uid: task_view.index_uid,
|
||||
status: match task_view.status {
|
||||
v5::Status::Enqueued => v6::Status::Enqueued,
|
||||
@ -340,6 +340,9 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
||||
}
|
||||
},
|
||||
stop_words: settings.stop_words.into(),
|
||||
non_separator_tokens: v6::Setting::NotSet,
|
||||
separator_tokens: v6::Setting::NotSet,
|
||||
dictionary: v6::Setting::NotSet,
|
||||
synonyms: settings.synonyms.into(),
|
||||
distinct_attribute: settings.distinct_attribute.into(),
|
||||
typo_tolerance: match settings.typo_tolerance {
|
||||
|
@ -195,8 +195,53 @@ pub(crate) mod test {
|
||||
use meili_snap::insta;
|
||||
|
||||
use super::*;
|
||||
use crate::reader::v6::RuntimeTogglableFeatures;
|
||||
|
||||
// TODO: add `features` to tests
|
||||
#[test]
|
||||
fn import_dump_v6_experimental() {
|
||||
let dump = File::open("tests/assets/v6-with-experimental.dump").unwrap();
|
||||
let mut dump = DumpReader::open(dump).unwrap();
|
||||
|
||||
// top level infos
|
||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-07-06 7:10:27.21958 +00:00:00");
|
||||
insta::assert_debug_snapshot!(dump.instance_uid().unwrap(), @"None");
|
||||
|
||||
// tasks
|
||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"d45cd8571703e58ae53c7bd7ce3f5c22");
|
||||
assert_eq!(update_files.len(), 2);
|
||||
assert!(update_files[0].is_none()); // the dump creation
|
||||
assert!(update_files[1].is_none()); // the processed document addition
|
||||
|
||||
// keys
|
||||
let keys = dump.keys().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(keys), @"13c2da155e9729c2344688cab29af71d");
|
||||
|
||||
// indexes
|
||||
let mut indexes = dump.indexes().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
// the index are not ordered in any way by default
|
||||
indexes.sort_by_key(|index| index.metadata().uid.to_string());
|
||||
|
||||
let mut test = indexes.pop().unwrap();
|
||||
assert!(indexes.is_empty());
|
||||
|
||||
insta::assert_json_snapshot!(test.metadata(), @r###"
|
||||
{
|
||||
"uid": "test",
|
||||
"primaryKey": "id",
|
||||
"createdAt": "2023-07-06T07:07:41.364694Z",
|
||||
"updatedAt": "2023-07-06T07:07:41.396114Z"
|
||||
}
|
||||
"###);
|
||||
|
||||
assert_eq!(test.documents().unwrap().count(), 1);
|
||||
|
||||
assert_eq!(
|
||||
dump.features().unwrap().unwrap(),
|
||||
RuntimeTogglableFeatures { vector_store: true, ..Default::default() }
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_dump_v5() {
|
||||
@ -274,6 +319,8 @@ pub(crate) mod test {
|
||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
assert_eq!(documents.len(), 10);
|
||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||
|
||||
assert_eq!(dump.features().unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -0,0 +1,24 @@
|
||||
---
|
||||
source: dump/src/reader/mod.rs
|
||||
expression: spells.settings().unwrap()
|
||||
---
|
||||
{
|
||||
"displayedAttributes": [
|
||||
"*"
|
||||
],
|
||||
"searchableAttributes": [
|
||||
"*"
|
||||
],
|
||||
"filterableAttributes": [],
|
||||
"sortableAttributes": [],
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"exactness"
|
||||
],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"distinctAttribute": null
|
||||
}
|
@ -0,0 +1,38 @@
|
||||
---
|
||||
source: dump/src/reader/mod.rs
|
||||
expression: products.settings().unwrap()
|
||||
---
|
||||
{
|
||||
"displayedAttributes": [
|
||||
"*"
|
||||
],
|
||||
"searchableAttributes": [
|
||||
"*"
|
||||
],
|
||||
"filterableAttributes": [],
|
||||
"sortableAttributes": [],
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"exactness"
|
||||
],
|
||||
"stopWords": [],
|
||||
"synonyms": {
|
||||
"android": [
|
||||
"phone",
|
||||
"smartphone"
|
||||
],
|
||||
"iphone": [
|
||||
"phone",
|
||||
"smartphone"
|
||||
],
|
||||
"phone": [
|
||||
"android",
|
||||
"iphone",
|
||||
"smartphone"
|
||||
]
|
||||
},
|
||||
"distinctAttribute": null
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
---
|
||||
source: dump/src/reader/mod.rs
|
||||
expression: movies.settings().unwrap()
|
||||
---
|
||||
{
|
||||
"displayedAttributes": [
|
||||
"*"
|
||||
],
|
||||
"searchableAttributes": [
|
||||
"*"
|
||||
],
|
||||
"filterableAttributes": [
|
||||
"genres",
|
||||
"id"
|
||||
],
|
||||
"sortableAttributes": [
|
||||
"genres",
|
||||
"id"
|
||||
],
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"exactness",
|
||||
"release_date:asc"
|
||||
],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"distinctAttribute": null
|
||||
}
|
@ -292,6 +292,7 @@ pub(crate) mod test {
|
||||
│ ├---- update_files/
|
||||
│ │ └---- 1.jsonl
|
||||
│ └---- queue.jsonl
|
||||
├---- experimental-features.json
|
||||
├---- instance_uid.uuid
|
||||
├---- keys.jsonl
|
||||
└---- metadata.json
|
||||
|
BIN
dump/tests/assets/v6-with-experimental.dump
Normal file
BIN
dump/tests/assets/v6-with-experimental.dump
Normal file
Binary file not shown.
@ -472,6 +472,77 @@ pub fn parse_filter(input: Span) -> IResult<FilterCondition> {
|
||||
terminated(|input| parse_expression(input, 0), eof)(input)
|
||||
}
|
||||
|
||||
impl<'a> std::fmt::Display for FilterCondition<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
FilterCondition::Not(filter) => {
|
||||
write!(f, "NOT ({filter})")
|
||||
}
|
||||
FilterCondition::Condition { fid, op } => {
|
||||
write!(f, "{fid} {op}")
|
||||
}
|
||||
FilterCondition::In { fid, els } => {
|
||||
write!(f, "{fid} IN[")?;
|
||||
for el in els {
|
||||
write!(f, "{el}, ")?;
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
FilterCondition::Or(els) => {
|
||||
write!(f, "OR[")?;
|
||||
for el in els {
|
||||
write!(f, "{el}, ")?;
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
FilterCondition::And(els) => {
|
||||
write!(f, "AND[")?;
|
||||
for el in els {
|
||||
write!(f, "{el}, ")?;
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
FilterCondition::GeoLowerThan { point, radius } => {
|
||||
write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius)
|
||||
}
|
||||
FilterCondition::GeoBoundingBox {
|
||||
top_right_point: top_left_point,
|
||||
bottom_left_point: bottom_right_point,
|
||||
} => {
|
||||
write!(
|
||||
f,
|
||||
"_geoBoundingBox([{}, {}], [{}, {}])",
|
||||
top_left_point[0],
|
||||
top_left_point[1],
|
||||
bottom_right_point[0],
|
||||
bottom_right_point[1]
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'a> std::fmt::Display for Condition<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Condition::GreaterThan(token) => write!(f, "> {token}"),
|
||||
Condition::GreaterThanOrEqual(token) => write!(f, ">= {token}"),
|
||||
Condition::Equal(token) => write!(f, "= {token}"),
|
||||
Condition::NotEqual(token) => write!(f, "!= {token}"),
|
||||
Condition::Null => write!(f, "IS NULL"),
|
||||
Condition::Empty => write!(f, "IS EMPTY"),
|
||||
Condition::Exists => write!(f, "EXISTS"),
|
||||
Condition::LowerThan(token) => write!(f, "< {token}"),
|
||||
Condition::LowerThanOrEqual(token) => write!(f, "<= {token}"),
|
||||
Condition::Between { from, to } => write!(f, "{from} TO {to}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'a> std::fmt::Display for Token<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{{{}}}", self.value())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod tests {
|
||||
use super::*;
|
||||
@ -852,74 +923,3 @@ pub mod tests {
|
||||
assert_eq!(token.value(), s);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> std::fmt::Display for FilterCondition<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
FilterCondition::Not(filter) => {
|
||||
write!(f, "NOT ({filter})")
|
||||
}
|
||||
FilterCondition::Condition { fid, op } => {
|
||||
write!(f, "{fid} {op}")
|
||||
}
|
||||
FilterCondition::In { fid, els } => {
|
||||
write!(f, "{fid} IN[")?;
|
||||
for el in els {
|
||||
write!(f, "{el}, ")?;
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
FilterCondition::Or(els) => {
|
||||
write!(f, "OR[")?;
|
||||
for el in els {
|
||||
write!(f, "{el}, ")?;
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
FilterCondition::And(els) => {
|
||||
write!(f, "AND[")?;
|
||||
for el in els {
|
||||
write!(f, "{el}, ")?;
|
||||
}
|
||||
write!(f, "]")
|
||||
}
|
||||
FilterCondition::GeoLowerThan { point, radius } => {
|
||||
write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius)
|
||||
}
|
||||
FilterCondition::GeoBoundingBox {
|
||||
top_right_point: top_left_point,
|
||||
bottom_left_point: bottom_right_point,
|
||||
} => {
|
||||
write!(
|
||||
f,
|
||||
"_geoBoundingBox([{}, {}], [{}, {}])",
|
||||
top_left_point[0],
|
||||
top_left_point[1],
|
||||
bottom_right_point[0],
|
||||
bottom_right_point[1]
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'a> std::fmt::Display for Condition<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Condition::GreaterThan(token) => write!(f, "> {token}"),
|
||||
Condition::GreaterThanOrEqual(token) => write!(f, ">= {token}"),
|
||||
Condition::Equal(token) => write!(f, "= {token}"),
|
||||
Condition::NotEqual(token) => write!(f, "!= {token}"),
|
||||
Condition::Null => write!(f, "IS NULL"),
|
||||
Condition::Empty => write!(f, "IS EMPTY"),
|
||||
Condition::Exists => write!(f, "EXISTS"),
|
||||
Condition::LowerThan(token) => write!(f, "< {token}"),
|
||||
Condition::LowerThanOrEqual(token) => write!(f, "<= {token}"),
|
||||
Condition::Between { from, to } => write!(f, "{from} TO {to}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<'a> std::fmt::Display for Token<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{{{}}}", self.value())
|
||||
}
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ license.workspace = true
|
||||
serde_json = "1.0"
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = { version = "0.4.0", features = ["html_reports"] }
|
||||
criterion = { version = "0.5.1", features = ["html_reports"] }
|
||||
|
||||
[[bench]]
|
||||
name = "benchmarks"
|
||||
|
@ -13,7 +13,7 @@ license.workspace = true
|
||||
[dependencies]
|
||||
arbitrary = { version = "1.3.0", features = ["derive"] }
|
||||
clap = { version = "4.3.0", features = ["derive"] }
|
||||
fastrand = "1.9.0"
|
||||
fastrand = "2.0.0"
|
||||
milli = { path = "../milli" }
|
||||
serde = { version = "1.0.160", features = ["derive"] }
|
||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||
|
@ -22,6 +22,7 @@ log = "0.4.17"
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
page_size = "0.5.0"
|
||||
puffin = "0.16.0"
|
||||
roaring = { version = "0.10.1", features = ["serde"] }
|
||||
serde = { version = "1.0.160", features = ["derive"] }
|
||||
serde_json = { version = "1.0.95", features = ["preserve_order"] }
|
||||
|
@ -32,11 +32,11 @@ use meilisearch_types::milli::update::{
|
||||
DeleteDocuments, DocumentDeletionResult, IndexDocumentsConfig, IndexDocumentsMethod,
|
||||
Settings as MilliSettings,
|
||||
};
|
||||
use meilisearch_types::milli::{self, Filter, BEU32};
|
||||
use meilisearch_types::milli::{self, Filter, BEU64};
|
||||
use meilisearch_types::settings::{apply_settings_to_builder, Settings, Unchecked};
|
||||
use meilisearch_types::tasks::{Details, IndexSwap, Kind, KindWithContent, Status, Task};
|
||||
use meilisearch_types::{compression, Index, VERSION_FILE_NAME};
|
||||
use roaring::RoaringBitmap;
|
||||
use roaring::RoaringTreemap;
|
||||
use time::macros::format_description;
|
||||
use time::OffsetDateTime;
|
||||
use uuid::Uuid;
|
||||
@ -58,7 +58,7 @@ pub(crate) enum Batch {
|
||||
/// The date and time at which the previously processing tasks started.
|
||||
previous_started_at: OffsetDateTime,
|
||||
/// The list of tasks that were processing when this task cancelation appeared.
|
||||
previous_processing_tasks: RoaringBitmap,
|
||||
previous_processing_tasks: RoaringTreemap,
|
||||
},
|
||||
TaskDeletion(Task),
|
||||
SnapshotCreation(Vec<Task>),
|
||||
@ -471,6 +471,8 @@ impl IndexScheduler {
|
||||
#[cfg(test)]
|
||||
self.maybe_fail(crate::tests::FailureLocation::InsideCreateBatch)?;
|
||||
|
||||
puffin::profile_function!();
|
||||
|
||||
let enqueued = &self.get_status(rtxn, Status::Enqueued)?;
|
||||
let to_cancel = self.get_kind(rtxn, Kind::TaskCancelation)? & enqueued;
|
||||
|
||||
@ -575,6 +577,9 @@ impl IndexScheduler {
|
||||
self.maybe_fail(crate::tests::FailureLocation::PanicInsideProcessBatch)?;
|
||||
self.breakpoint(crate::Breakpoint::InsideProcessBatch);
|
||||
}
|
||||
|
||||
puffin::profile_function!(format!("{:?}", batch));
|
||||
|
||||
match batch {
|
||||
Batch::TaskCancelation { mut task, previous_started_at, previous_processing_tasks } => {
|
||||
// 1. Retrieve the tasks that matched the query at enqueue-time.
|
||||
@ -1060,7 +1065,13 @@ impl IndexScheduler {
|
||||
}
|
||||
|
||||
/// Swap the index `lhs` with the index `rhs`.
|
||||
fn apply_index_swap(&self, wtxn: &mut RwTxn, task_id: u32, lhs: &str, rhs: &str) -> Result<()> {
|
||||
fn apply_index_swap(
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
task_id: TaskId,
|
||||
lhs: &str,
|
||||
rhs: &str,
|
||||
) -> Result<()> {
|
||||
// 1. Verify that both lhs and rhs are existing indexes
|
||||
let index_lhs_exists = self.index_mapper.index_exists(wtxn, lhs)?;
|
||||
if !index_lhs_exists {
|
||||
@ -1081,7 +1092,7 @@ impl IndexScheduler {
|
||||
for task_id in &index_lhs_task_ids | &index_rhs_task_ids {
|
||||
let mut task = self.get_task(wtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||
swap_index_uid_in_task(&mut task, (lhs, rhs));
|
||||
self.all_tasks.put(wtxn, &BEU32::new(task_id), &task)?;
|
||||
self.all_tasks.put(wtxn, &BEU64::new(task_id), &task)?;
|
||||
}
|
||||
|
||||
// 4. remove the task from indexuid = before_name
|
||||
@ -1111,6 +1122,8 @@ impl IndexScheduler {
|
||||
index: &'i Index,
|
||||
operation: IndexOperation,
|
||||
) -> Result<Vec<Task>> {
|
||||
puffin::profile_function!();
|
||||
|
||||
match operation {
|
||||
IndexOperation::DocumentClear { mut tasks, .. } => {
|
||||
let count = milli::update::ClearDocuments::new(index_wtxn, index).execute()?;
|
||||
@ -1382,7 +1395,11 @@ impl IndexScheduler {
|
||||
/// Delete each given task from all the databases (if it is deleteable).
|
||||
///
|
||||
/// Return the number of tasks that were actually deleted.
|
||||
fn delete_matched_tasks(&self, wtxn: &mut RwTxn, matched_tasks: &RoaringBitmap) -> Result<u64> {
|
||||
fn delete_matched_tasks(
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
matched_tasks: &RoaringTreemap,
|
||||
) -> Result<u64> {
|
||||
// 1. Remove from this list the tasks that we are not allowed to delete
|
||||
let enqueued_tasks = self.get_status(wtxn, Status::Enqueued)?;
|
||||
let processing_tasks = &self.processing_tasks.read().unwrap().processing.clone();
|
||||
@ -1397,7 +1414,7 @@ impl IndexScheduler {
|
||||
let mut affected_indexes = HashSet::new();
|
||||
let mut affected_statuses = HashSet::new();
|
||||
let mut affected_kinds = HashSet::new();
|
||||
let mut affected_canceled_by = RoaringBitmap::new();
|
||||
let mut affected_canceled_by = RoaringTreemap::new();
|
||||
|
||||
for task_id in to_delete_tasks.iter() {
|
||||
let task = self.get_task(wtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||
@ -1434,10 +1451,10 @@ impl IndexScheduler {
|
||||
}
|
||||
|
||||
for task in to_delete_tasks.iter() {
|
||||
self.all_tasks.delete(wtxn, &BEU32::new(task))?;
|
||||
self.all_tasks.delete(wtxn, &BEU64::new(task))?;
|
||||
}
|
||||
for canceled_by in affected_canceled_by {
|
||||
let canceled_by = BEU32::new(canceled_by);
|
||||
let canceled_by = BEU64::new(canceled_by);
|
||||
if let Some(mut tasks) = self.canceled_by.get(wtxn, &canceled_by)? {
|
||||
tasks -= &to_delete_tasks;
|
||||
if tasks.is_empty() {
|
||||
@ -1458,9 +1475,9 @@ impl IndexScheduler {
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
cancel_task_id: TaskId,
|
||||
matched_tasks: &RoaringBitmap,
|
||||
matched_tasks: &RoaringTreemap,
|
||||
previous_started_at: OffsetDateTime,
|
||||
previous_processing_tasks: &RoaringBitmap,
|
||||
previous_processing_tasks: &RoaringTreemap,
|
||||
) -> Result<Vec<Uuid>> {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
|
||||
@ -1485,7 +1502,7 @@ impl IndexScheduler {
|
||||
task.details = task.details.map(|d| d.to_failed());
|
||||
self.update_task(wtxn, &task)?;
|
||||
}
|
||||
self.canceled_by.put(wtxn, &BEU32::new(cancel_task_id), &tasks_to_cancel)?;
|
||||
self.canceled_by.put(wtxn, &BEU64::new(cancel_task_id), &tasks_to_cancel)?;
|
||||
|
||||
Ok(content_files_to_delete)
|
||||
}
|
||||
|
@ -48,6 +48,8 @@ impl From<DateField> for Code {
|
||||
pub enum Error {
|
||||
#[error("{1}")]
|
||||
WithCustomErrorCode(Code, Box<Self>),
|
||||
#[error("Received bad task id: {received} should be >= to {expected}.")]
|
||||
BadTaskId { received: TaskId, expected: TaskId },
|
||||
#[error("Index `{0}` not found.")]
|
||||
IndexNotFound(String),
|
||||
#[error("Index `{0}` already exists.")]
|
||||
@ -159,6 +161,7 @@ impl Error {
|
||||
match self {
|
||||
Error::IndexNotFound(_)
|
||||
| Error::WithCustomErrorCode(_, _)
|
||||
| Error::BadTaskId { .. }
|
||||
| Error::IndexAlreadyExists(_)
|
||||
| Error::SwapDuplicateIndexFound(_)
|
||||
| Error::SwapDuplicateIndexesFound(_)
|
||||
@ -202,6 +205,7 @@ impl ErrorCode for Error {
|
||||
fn error_code(&self) -> Code {
|
||||
match self {
|
||||
Error::WithCustomErrorCode(code, _) => *code,
|
||||
Error::BadTaskId { .. } => Code::BadRequest,
|
||||
Error::IndexNotFound(_) => Code::IndexNotFound,
|
||||
Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists,
|
||||
Error::SwapDuplicateIndexesFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
||||
|
@ -3,9 +3,10 @@ use std::fmt::Write;
|
||||
|
||||
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
|
||||
use meilisearch_types::heed::{Database, RoTxn};
|
||||
use meilisearch_types::milli::{CboRoaringBitmapCodec, RoaringBitmapCodec, BEU32};
|
||||
use meilisearch_types::milli::heed_codec::{CboRoaringTreemapCodec, RoaringTreemapCodec};
|
||||
use meilisearch_types::milli::BEU64;
|
||||
use meilisearch_types::tasks::{Details, Task};
|
||||
use roaring::RoaringBitmap;
|
||||
use roaring::RoaringTreemap;
|
||||
|
||||
use crate::index_mapper::IndexMapper;
|
||||
use crate::{IndexScheduler, Kind, Status, BEI128};
|
||||
@ -47,7 +48,7 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
||||
let processing_tasks = processing_tasks.read().unwrap().processing.clone();
|
||||
snap.push_str(&format!("### Autobatching Enabled = {autobatching_enabled}\n"));
|
||||
snap.push_str("### Processing Tasks:\n");
|
||||
snap.push_str(&snapshot_bitmap(&processing_tasks));
|
||||
snap.push_str(&snapshot_treemap(&processing_tasks));
|
||||
snap.push_str("\n----------------------------------------------------------------------\n");
|
||||
|
||||
snap.push_str("### All Tasks:\n");
|
||||
@ -103,7 +104,7 @@ pub fn snapshot_file_store(file_store: &file_store::FileStore) -> String {
|
||||
snap
|
||||
}
|
||||
|
||||
pub fn snapshot_bitmap(r: &RoaringBitmap) -> String {
|
||||
pub fn snapshot_treemap(r: &RoaringTreemap) -> String {
|
||||
let mut snap = String::new();
|
||||
snap.push('[');
|
||||
for x in r {
|
||||
@ -113,7 +114,7 @@ pub fn snapshot_bitmap(r: &RoaringBitmap) -> String {
|
||||
snap
|
||||
}
|
||||
|
||||
pub fn snapshot_all_tasks(rtxn: &RoTxn, db: Database<OwnedType<BEU32>, SerdeJson<Task>>) -> String {
|
||||
pub fn snapshot_all_tasks(rtxn: &RoTxn, db: Database<OwnedType<BEU64>, SerdeJson<Task>>) -> String {
|
||||
let mut snap = String::new();
|
||||
let iter = db.iter(rtxn).unwrap();
|
||||
for next in iter {
|
||||
@ -125,13 +126,13 @@ pub fn snapshot_all_tasks(rtxn: &RoTxn, db: Database<OwnedType<BEU32>, SerdeJson
|
||||
|
||||
pub fn snapshot_date_db(
|
||||
rtxn: &RoTxn,
|
||||
db: Database<OwnedType<BEI128>, CboRoaringBitmapCodec>,
|
||||
db: Database<OwnedType<BEI128>, CboRoaringTreemapCodec>,
|
||||
) -> String {
|
||||
let mut snap = String::new();
|
||||
let iter = db.iter(rtxn).unwrap();
|
||||
for next in iter {
|
||||
let (_timestamp, task_ids) = next.unwrap();
|
||||
snap.push_str(&format!("[timestamp] {}\n", snapshot_bitmap(&task_ids)));
|
||||
snap.push_str(&format!("[timestamp] {}\n", snapshot_treemap(&task_ids)));
|
||||
}
|
||||
snap
|
||||
}
|
||||
@ -216,45 +217,48 @@ fn snapshot_details(d: &Details) -> String {
|
||||
|
||||
pub fn snapshot_status(
|
||||
rtxn: &RoTxn,
|
||||
db: Database<SerdeBincode<Status>, RoaringBitmapCodec>,
|
||||
db: Database<SerdeBincode<Status>, RoaringTreemapCodec>,
|
||||
) -> String {
|
||||
let mut snap = String::new();
|
||||
let iter = db.iter(rtxn).unwrap();
|
||||
for next in iter {
|
||||
let (status, task_ids) = next.unwrap();
|
||||
writeln!(snap, "{status} {}", snapshot_bitmap(&task_ids)).unwrap();
|
||||
writeln!(snap, "{status} {}", snapshot_treemap(&task_ids)).unwrap();
|
||||
}
|
||||
snap
|
||||
}
|
||||
pub fn snapshot_kind(rtxn: &RoTxn, db: Database<SerdeBincode<Kind>, RoaringBitmapCodec>) -> String {
|
||||
let mut snap = String::new();
|
||||
let iter = db.iter(rtxn).unwrap();
|
||||
for next in iter {
|
||||
let (kind, task_ids) = next.unwrap();
|
||||
let kind = serde_json::to_string(&kind).unwrap();
|
||||
writeln!(snap, "{kind} {}", snapshot_bitmap(&task_ids)).unwrap();
|
||||
}
|
||||
snap
|
||||
}
|
||||
|
||||
pub fn snapshot_index_tasks(rtxn: &RoTxn, db: Database<Str, RoaringBitmapCodec>) -> String {
|
||||
let mut snap = String::new();
|
||||
let iter = db.iter(rtxn).unwrap();
|
||||
for next in iter {
|
||||
let (index, task_ids) = next.unwrap();
|
||||
writeln!(snap, "{index} {}", snapshot_bitmap(&task_ids)).unwrap();
|
||||
}
|
||||
snap
|
||||
}
|
||||
pub fn snapshot_canceled_by(
|
||||
pub fn snapshot_kind(
|
||||
rtxn: &RoTxn,
|
||||
db: Database<OwnedType<BEU32>, RoaringBitmapCodec>,
|
||||
db: Database<SerdeBincode<Kind>, RoaringTreemapCodec>,
|
||||
) -> String {
|
||||
let mut snap = String::new();
|
||||
let iter = db.iter(rtxn).unwrap();
|
||||
for next in iter {
|
||||
let (kind, task_ids) = next.unwrap();
|
||||
writeln!(snap, "{kind} {}", snapshot_bitmap(&task_ids)).unwrap();
|
||||
let kind = serde_json::to_string(&kind).unwrap();
|
||||
writeln!(snap, "{kind} {}", snapshot_treemap(&task_ids)).unwrap();
|
||||
}
|
||||
snap
|
||||
}
|
||||
|
||||
pub fn snapshot_index_tasks(rtxn: &RoTxn, db: Database<Str, RoaringTreemapCodec>) -> String {
|
||||
let mut snap = String::new();
|
||||
let iter = db.iter(rtxn).unwrap();
|
||||
for next in iter {
|
||||
let (index, task_ids) = next.unwrap();
|
||||
writeln!(snap, "{index} {}", snapshot_treemap(&task_ids)).unwrap();
|
||||
}
|
||||
snap
|
||||
}
|
||||
pub fn snapshot_canceled_by(
|
||||
rtxn: &RoTxn,
|
||||
db: Database<OwnedType<BEU64>, RoaringTreemapCodec>,
|
||||
) -> String {
|
||||
let mut snap = String::new();
|
||||
let iter = db.iter(rtxn).unwrap();
|
||||
for next in iter {
|
||||
let (kind, task_ids) = next.unwrap();
|
||||
writeln!(snap, "{kind} {}", snapshot_treemap(&task_ids)).unwrap();
|
||||
}
|
||||
snap
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: canceled, canceled_by: 1, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: succeeded, details: { matched_tasks: 1, canceled_tasks: Some(1), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }}
|
||||
1 {uid: 1, status: succeeded, details: { matched_tasks: 1, canceled_tasks: Some(1), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringTreemap<[0]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued []
|
||||
|
@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { matched_tasks: 1, canceled_tasks: None, original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }}
|
||||
1 {uid: 1, status: enqueued, details: { matched_tasks: 1, canceled_tasks: None, original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringTreemap<[0]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,1,]
|
||||
|
@ -9,7 +9,7 @@ source: index-scheduler/src/lib.rs
|
||||
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "beavero", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "wolfo", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: enqueued, details: { matched_tasks: 3, canceled_tasks: None, original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0, 1, 2]> }}
|
||||
3 {uid: 3, status: enqueued, details: { matched_tasks: 3, canceled_tasks: None, original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringTreemap<[0, 1, 2]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [1,2,3,]
|
||||
|
@ -9,7 +9,7 @@ source: index-scheduler/src/lib.rs
|
||||
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: canceled, canceled_by: 3, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "beavero", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: canceled, canceled_by: 3, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "wolfo", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: succeeded, details: { matched_tasks: 3, canceled_tasks: Some(2), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0, 1, 2]> }}
|
||||
3 {uid: 3, status: succeeded, details: { matched_tasks: 3, canceled_tasks: Some(2), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringTreemap<[0, 1, 2]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued []
|
||||
|
@ -9,7 +9,7 @@ source: index-scheduler/src/lib.rs
|
||||
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "beavero", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "wolfo", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: enqueued, details: { matched_tasks: 3, canceled_tasks: None, original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0, 1, 2]> }}
|
||||
3 {uid: 3, status: enqueued, details: { matched_tasks: 3, canceled_tasks: None, original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringTreemap<[0, 1, 2]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [1,2,3,]
|
||||
|
@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { matched_tasks: 1, canceled_tasks: None, original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }}
|
||||
1 {uid: 1, status: enqueued, details: { matched_tasks: 1, canceled_tasks: None, original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringTreemap<[0]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,1,]
|
||||
|
@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: canceled, canceled_by: 1, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: succeeded, details: { matched_tasks: 1, canceled_tasks: Some(1), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }}
|
||||
1 {uid: 1, status: succeeded, details: { matched_tasks: 1, canceled_tasks: Some(1), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringTreemap<[0]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued []
|
||||
|
@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { matched_tasks: 1, canceled_tasks: None, original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }}
|
||||
1 {uid: 1, status: enqueued, details: { matched_tasks: 1, canceled_tasks: None, original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringTreemap<[0]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,1,]
|
||||
|
@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: succeeded, details: { matched_tasks: 1, canceled_tasks: Some(0), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0]> }}
|
||||
1 {uid: 1, status: succeeded, details: { matched_tasks: 1, canceled_tasks: Some(0), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringTreemap<[0]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued []
|
||||
|
@ -9,7 +9,7 @@ source: index-scheduler/src/lib.rs
|
||||
0 {uid: 0, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
1 {uid: 1, status: canceled, canceled_by: 3, details: { primary_key: Some("sheep") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("sheep") }}
|
||||
2 {uid: 2, status: canceled, canceled_by: 3, details: { swaps: [IndexSwap { indexes: ("catto", "doggo") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("catto", "doggo") }] }}
|
||||
3 {uid: 3, status: succeeded, details: { matched_tasks: 3, canceled_tasks: Some(0), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringBitmap<[0, 1, 2]> }}
|
||||
3 {uid: 3, status: succeeded, details: { matched_tasks: 3, canceled_tasks: Some(0), original_filter: "test_query" }, kind: TaskCancelation { query: "test_query", tasks: RoaringTreemap<[0, 1, 2]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued []
|
||||
|
@ -7,8 +7,8 @@ source: index-scheduler/src/lib.rs
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggo", primary_key: Some("bone"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: succeeded, details: { matched_tasks: 1, deleted_tasks: Some(1), original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0]> }}
|
||||
3 {uid: 3, status: succeeded, details: { matched_tasks: 1, deleted_tasks: Some(0), original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0]> }}
|
||||
2 {uid: 2, status: succeeded, details: { matched_tasks: 1, deleted_tasks: Some(1), original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringTreemap<[0]> }}
|
||||
3 {uid: 3, status: succeeded, details: { matched_tasks: 1, deleted_tasks: Some(0), original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringTreemap<[0]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [1,]
|
||||
|
@ -8,7 +8,7 @@ source: index-scheduler/src/lib.rs
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggo", primary_key: Some("bone"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { matched_tasks: 1, deleted_tasks: None, original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0]> }}
|
||||
2 {uid: 2, status: enqueued, details: { matched_tasks: 1, deleted_tasks: None, original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringTreemap<[0]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [1,2,]
|
||||
|
@ -7,7 +7,7 @@ source: index-scheduler/src/lib.rs
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggo", primary_key: Some("bone"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: succeeded, details: { matched_tasks: 1, deleted_tasks: Some(1), original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0]> }}
|
||||
2 {uid: 2, status: succeeded, details: { matched_tasks: 1, deleted_tasks: Some(1), original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringTreemap<[0]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [1,]
|
||||
|
@ -9,7 +9,7 @@ source: index-scheduler/src/lib.rs
|
||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggo", primary_key: Some("bone"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: succeeded, details: { matched_tasks: 2, deleted_tasks: Some(0), original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0, 1]> }}
|
||||
3 {uid: 3, status: succeeded, details: { matched_tasks: 2, deleted_tasks: Some(0), original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringTreemap<[0, 1]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,1,2,]
|
||||
|
@ -9,7 +9,7 @@ source: index-scheduler/src/lib.rs
|
||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggo", primary_key: Some("bone"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: enqueued, details: { matched_tasks: 2, deleted_tasks: None, original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0, 1]> }}
|
||||
3 {uid: 3, status: enqueued, details: { matched_tasks: 2, deleted_tasks: None, original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringTreemap<[0, 1]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,1,2,3,]
|
||||
|
@ -9,7 +9,7 @@ source: index-scheduler/src/lib.rs
|
||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "catto", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggo", primary_key: Some("bone"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: enqueued, details: { matched_tasks: 2, deleted_tasks: None, original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringBitmap<[0, 1]> }}
|
||||
3 {uid: 3, status: enqueued, details: { matched_tasks: 2, deleted_tasks: None, original_filter: "test_query" }, kind: TaskDeletion { query: "test_query", tasks: RoaringTreemap<[0, 1]> }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,1,2,3,]
|
||||
|
@ -41,6 +41,18 @@ source: index-scheduler/src/lib.rs
|
||||
"taskDeletion": {
|
||||
"query": "[query]",
|
||||
"tasks": [
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
58,
|
||||
48,
|
||||
0,
|
||||
|
@ -21,6 +21,18 @@ source: index-scheduler/src/lib.rs
|
||||
"taskDeletion": {
|
||||
"query": "[query]",
|
||||
"tasks": [
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
58,
|
||||
48,
|
||||
0,
|
||||
|
@ -106,6 +106,18 @@ source: index-scheduler/src/lib.rs
|
||||
"taskDeletion": {
|
||||
"query": "[query]",
|
||||
"tasks": [
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
58,
|
||||
48,
|
||||
0,
|
||||
|
@ -61,6 +61,18 @@ source: index-scheduler/src/lib.rs
|
||||
"taskDeletion": {
|
||||
"query": "[query]",
|
||||
"tasks": [
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
58,
|
||||
48,
|
||||
0,
|
||||
|
@ -0,0 +1,36 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "index_a", primary_key: Some("id") }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"indexCreation" [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
index_a [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
@ -5,15 +5,16 @@ use std::ops::Bound;
|
||||
|
||||
use meilisearch_types::heed::types::{DecodeIgnore, OwnedType};
|
||||
use meilisearch_types::heed::{Database, RoTxn, RwTxn};
|
||||
use meilisearch_types::milli::{CboRoaringBitmapCodec, BEU32};
|
||||
use meilisearch_types::milli::heed_codec::CboRoaringTreemapCodec;
|
||||
use meilisearch_types::milli::BEU64;
|
||||
use meilisearch_types::tasks::{Details, IndexSwap, Kind, KindWithContent, Status};
|
||||
use roaring::{MultiOps, RoaringBitmap};
|
||||
use roaring::{MultiOps, RoaringTreemap};
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use crate::{Error, IndexScheduler, Result, Task, TaskId, BEI128};
|
||||
|
||||
impl IndexScheduler {
|
||||
pub(crate) fn all_task_ids(&self, rtxn: &RoTxn) -> Result<RoaringBitmap> {
|
||||
pub(crate) fn all_task_ids(&self, rtxn: &RoTxn) -> Result<RoaringTreemap> {
|
||||
enum_iterator::all().map(|s| self.get_status(rtxn, s)).union()
|
||||
}
|
||||
|
||||
@ -26,7 +27,7 @@ impl IndexScheduler {
|
||||
}
|
||||
|
||||
pub(crate) fn get_task(&self, rtxn: &RoTxn, task_id: TaskId) -> Result<Option<Task>> {
|
||||
Ok(self.all_tasks.get(rtxn, &BEU32::new(task_id))?)
|
||||
Ok(self.all_tasks.get(rtxn, &BEU64::new(task_id))?)
|
||||
}
|
||||
|
||||
/// Convert an iterator to a `Vec` of tasks. The tasks MUST exist or a
|
||||
@ -88,12 +89,12 @@ impl IndexScheduler {
|
||||
}
|
||||
}
|
||||
|
||||
self.all_tasks.put(wtxn, &BEU32::new(task.uid), task)?;
|
||||
self.all_tasks.put(wtxn, &BEU64::new(task.uid), task)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the whole set of tasks that belongs to this index.
|
||||
pub(crate) fn index_tasks(&self, rtxn: &RoTxn, index: &str) -> Result<RoaringBitmap> {
|
||||
pub(crate) fn index_tasks(&self, rtxn: &RoTxn, index: &str) -> Result<RoaringTreemap> {
|
||||
Ok(self.index_tasks.get(rtxn, index)?.unwrap_or_default())
|
||||
}
|
||||
|
||||
@ -101,7 +102,7 @@ impl IndexScheduler {
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
index: &str,
|
||||
f: impl Fn(&mut RoaringBitmap),
|
||||
f: impl Fn(&mut RoaringTreemap),
|
||||
) -> Result<()> {
|
||||
let mut tasks = self.index_tasks(wtxn, index)?;
|
||||
f(&mut tasks);
|
||||
@ -114,7 +115,7 @@ impl IndexScheduler {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn get_status(&self, rtxn: &RoTxn, status: Status) -> Result<RoaringBitmap> {
|
||||
pub(crate) fn get_status(&self, rtxn: &RoTxn, status: Status) -> Result<RoaringTreemap> {
|
||||
Ok(self.status.get(rtxn, &status)?.unwrap_or_default())
|
||||
}
|
||||
|
||||
@ -122,7 +123,7 @@ impl IndexScheduler {
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
status: Status,
|
||||
bitmap: &RoaringBitmap,
|
||||
bitmap: &RoaringTreemap,
|
||||
) -> Result<()> {
|
||||
Ok(self.status.put(wtxn, &status, bitmap)?)
|
||||
}
|
||||
@ -131,7 +132,7 @@ impl IndexScheduler {
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
status: Status,
|
||||
f: impl Fn(&mut RoaringBitmap),
|
||||
f: impl Fn(&mut RoaringTreemap),
|
||||
) -> Result<()> {
|
||||
let mut tasks = self.get_status(wtxn, status)?;
|
||||
f(&mut tasks);
|
||||
@ -140,7 +141,7 @@ impl IndexScheduler {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn get_kind(&self, rtxn: &RoTxn, kind: Kind) -> Result<RoaringBitmap> {
|
||||
pub(crate) fn get_kind(&self, rtxn: &RoTxn, kind: Kind) -> Result<RoaringTreemap> {
|
||||
Ok(self.kind.get(rtxn, &kind)?.unwrap_or_default())
|
||||
}
|
||||
|
||||
@ -148,7 +149,7 @@ impl IndexScheduler {
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
kind: Kind,
|
||||
bitmap: &RoaringBitmap,
|
||||
bitmap: &RoaringTreemap,
|
||||
) -> Result<()> {
|
||||
Ok(self.kind.put(wtxn, &kind, bitmap)?)
|
||||
}
|
||||
@ -157,7 +158,7 @@ impl IndexScheduler {
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
kind: Kind,
|
||||
f: impl Fn(&mut RoaringBitmap),
|
||||
f: impl Fn(&mut RoaringTreemap),
|
||||
) -> Result<()> {
|
||||
let mut tasks = self.get_kind(wtxn, kind)?;
|
||||
f(&mut tasks);
|
||||
@ -169,20 +170,20 @@ impl IndexScheduler {
|
||||
|
||||
pub(crate) fn insert_task_datetime(
|
||||
wtxn: &mut RwTxn,
|
||||
database: Database<OwnedType<BEI128>, CboRoaringBitmapCodec>,
|
||||
database: Database<OwnedType<BEI128>, CboRoaringTreemapCodec>,
|
||||
time: OffsetDateTime,
|
||||
task_id: TaskId,
|
||||
) -> Result<()> {
|
||||
let timestamp = BEI128::new(time.unix_timestamp_nanos());
|
||||
let mut task_ids = database.get(wtxn, ×tamp)?.unwrap_or_default();
|
||||
task_ids.insert(task_id);
|
||||
database.put(wtxn, ×tamp, &RoaringBitmap::from_iter(task_ids))?;
|
||||
database.put(wtxn, ×tamp, &RoaringTreemap::from_iter(task_ids))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn remove_task_datetime(
|
||||
wtxn: &mut RwTxn,
|
||||
database: Database<OwnedType<BEI128>, CboRoaringBitmapCodec>,
|
||||
database: Database<OwnedType<BEI128>, CboRoaringTreemapCodec>,
|
||||
time: OffsetDateTime,
|
||||
task_id: TaskId,
|
||||
) -> Result<()> {
|
||||
@ -192,7 +193,7 @@ pub(crate) fn remove_task_datetime(
|
||||
if existing.is_empty() {
|
||||
database.delete(wtxn, ×tamp)?;
|
||||
} else {
|
||||
database.put(wtxn, ×tamp, &RoaringBitmap::from_iter(existing))?;
|
||||
database.put(wtxn, ×tamp, &RoaringTreemap::from_iter(existing))?;
|
||||
}
|
||||
}
|
||||
|
||||
@ -201,8 +202,8 @@ pub(crate) fn remove_task_datetime(
|
||||
|
||||
pub(crate) fn keep_tasks_within_datetimes(
|
||||
rtxn: &RoTxn,
|
||||
tasks: &mut RoaringBitmap,
|
||||
database: Database<OwnedType<BEI128>, CboRoaringBitmapCodec>,
|
||||
tasks: &mut RoaringTreemap,
|
||||
database: Database<OwnedType<BEI128>, CboRoaringTreemapCodec>,
|
||||
after: Option<OffsetDateTime>,
|
||||
before: Option<OffsetDateTime>,
|
||||
) -> Result<()> {
|
||||
@ -212,7 +213,7 @@ pub(crate) fn keep_tasks_within_datetimes(
|
||||
(Some(after), None) => (Bound::Excluded(*after), Bound::Unbounded),
|
||||
(Some(after), Some(before)) => (Bound::Excluded(*after), Bound::Excluded(*before)),
|
||||
};
|
||||
let mut collected_task_ids = RoaringBitmap::new();
|
||||
let mut collected_task_ids = RoaringTreemap::new();
|
||||
let start = map_bound(start, |b| BEI128::new(b.unix_timestamp_nanos()));
|
||||
let end = map_bound(end, |b| BEI128::new(b.unix_timestamp_nanos()));
|
||||
let iter = database.range(rtxn, &(start, end))?;
|
||||
|
@ -15,7 +15,7 @@ license.workspace = true
|
||||
serde_json = "1.0"
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = "0.4.0"
|
||||
criterion = "0.5.1"
|
||||
|
||||
[[bench]]
|
||||
name = "depth"
|
||||
|
@ -167,7 +167,9 @@ macro_rules! snapshot {
|
||||
let (settings, snap_name, _) = $crate::default_snapshot_settings_for_test(test_name, Some(&snap_name));
|
||||
settings.bind(|| {
|
||||
let snap = format!("{}", $value);
|
||||
meili_snap::insta::assert_snapshot!(format!("{}", snap_name), snap);
|
||||
insta::allow_duplicates! {
|
||||
meili_snap::insta::assert_snapshot!(format!("{}", snap_name), snap);
|
||||
}
|
||||
});
|
||||
};
|
||||
($value:expr, @$inline:literal) => {
|
||||
@ -176,7 +178,9 @@ macro_rules! snapshot {
|
||||
let (settings, _, _) = $crate::default_snapshot_settings_for_test("", Some("_dummy_argument"));
|
||||
settings.bind(|| {
|
||||
let snap = format!("{}", $value);
|
||||
meili_snap::insta::assert_snapshot!(snap, @$inline);
|
||||
insta::allow_duplicates! {
|
||||
meili_snap::insta::assert_snapshot!(snap, @$inline);
|
||||
}
|
||||
});
|
||||
};
|
||||
($value:expr) => {
|
||||
@ -194,11 +198,37 @@ macro_rules! snapshot {
|
||||
let (settings, snap_name, _) = $crate::default_snapshot_settings_for_test(test_name, None);
|
||||
settings.bind(|| {
|
||||
let snap = format!("{}", $value);
|
||||
meili_snap::insta::assert_snapshot!(format!("{}", snap_name), snap);
|
||||
insta::allow_duplicates! {
|
||||
meili_snap::insta::assert_snapshot!(format!("{}", snap_name), snap);
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
/// Create a string from the value by serializing it as Json, optionally
|
||||
/// redacting some parts of it.
|
||||
///
|
||||
/// The second argument to the macro can be an object expression for redaction.
|
||||
/// It's in the form { selector => replacement }. For more information about redactions
|
||||
/// refer to the redactions feature in the `insta` guide.
|
||||
#[macro_export]
|
||||
macro_rules! json_string {
|
||||
($value:expr, {$($k:expr => $v:expr),*$(,)?}) => {
|
||||
{
|
||||
let (_, snap) = meili_snap::insta::_prepare_snapshot_for_redaction!($value, {$($k => $v),*}, Json, File);
|
||||
snap
|
||||
}
|
||||
};
|
||||
($value:expr) => {{
|
||||
let value = meili_snap::insta::_macro_support::serialize_value(
|
||||
&$value,
|
||||
meili_snap::insta::_macro_support::SerializationFormat::Json,
|
||||
meili_snap::insta::_macro_support::SnapshotLocation::File
|
||||
);
|
||||
value
|
||||
}};
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate as meili_snap;
|
||||
@ -250,27 +280,3 @@ mod tests {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a string from the value by serializing it as Json, optionally
|
||||
/// redacting some parts of it.
|
||||
///
|
||||
/// The second argument to the macro can be an object expression for redaction.
|
||||
/// It's in the form { selector => replacement }. For more information about redactions
|
||||
/// refer to the redactions feature in the `insta` guide.
|
||||
#[macro_export]
|
||||
macro_rules! json_string {
|
||||
($value:expr, {$($k:expr => $v:expr),*$(,)?}) => {
|
||||
{
|
||||
let (_, snap) = meili_snap::insta::_prepare_snapshot_for_redaction!($value, {$($k => $v),*}, Json, File);
|
||||
snap
|
||||
}
|
||||
};
|
||||
($value:expr) => {{
|
||||
let value = meili_snap::insta::_macro_support::serialize_value(
|
||||
&$value,
|
||||
meili_snap::insta::_macro_support::SerializationFormat::Json,
|
||||
meili_snap::insta::_macro_support::SnapshotLocation::File
|
||||
);
|
||||
value
|
||||
}};
|
||||
}
|
||||
|
@ -15,13 +15,13 @@ actix-web = { version = "4.3.1", default-features = false }
|
||||
anyhow = "1.0.70"
|
||||
convert_case = "0.6.0"
|
||||
csv = "1.2.1"
|
||||
deserr = "0.5.0"
|
||||
deserr = { version = "0.6.0", features = ["actix-web"]}
|
||||
either = { version = "1.8.1", features = ["serde"] }
|
||||
enum-iterator = "1.4.0"
|
||||
file-store = { path = "../file-store" }
|
||||
flate2 = "1.0.25"
|
||||
fst = "0.4.7"
|
||||
memmap2 = "0.5.10"
|
||||
memmap2 = "0.7.1"
|
||||
milli = { path = "../milli" }
|
||||
roaring = { version = "0.10.1", features = ["serde"] }
|
||||
serde = { version = "1.0.160", features = ["derive"] }
|
||||
|
@ -104,6 +104,7 @@ macro_rules! impl_from_query_param_wrap_original_value_in_error {
|
||||
}
|
||||
impl_from_query_param_wrap_original_value_in_error!(usize, DeserrParseIntError);
|
||||
impl_from_query_param_wrap_original_value_in_error!(u32, DeserrParseIntError);
|
||||
impl_from_query_param_wrap_original_value_in_error!(u64, DeserrParseIntError);
|
||||
impl_from_query_param_wrap_original_value_in_error!(bool, DeserrParseBoolError);
|
||||
|
||||
impl FromQueryParameter for String {
|
||||
|
@ -1,4 +1,3 @@
|
||||
use std::borrow::Borrow;
|
||||
use std::fmt::{self, Debug, Display};
|
||||
use std::fs::File;
|
||||
use std::io::{self, Seek, Write};
|
||||
@ -42,7 +41,7 @@ impl Display for DocumentFormatError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Io(e) => write!(f, "{e}"),
|
||||
Self::MalformedPayload(me, b) => match me.borrow() {
|
||||
Self::MalformedPayload(me, b) => match me {
|
||||
Error::Json(se) => {
|
||||
let mut message = match se.classify() {
|
||||
Category::Data => {
|
||||
|
@ -259,6 +259,9 @@ InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsNonSeparatorTokens , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsSeparatorTokens , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsDictionary , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidState , Internal , INTERNAL_SERVER_ERROR ;
|
||||
|
@ -1,6 +1,6 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Default)]
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, Default, PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase", default)]
|
||||
pub struct RuntimeTogglableFeatures {
|
||||
pub score_details: bool,
|
||||
|
@ -171,6 +171,15 @@ pub struct Settings<T> {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsStopWords>)]
|
||||
pub stop_words: Setting<BTreeSet<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsNonSeparatorTokens>)]
|
||||
pub non_separator_tokens: Setting<BTreeSet<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSeparatorTokens>)]
|
||||
pub separator_tokens: Setting<BTreeSet<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsDictionary>)]
|
||||
pub dictionary: Setting<BTreeSet<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSynonyms>)]
|
||||
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
@ -201,6 +210,9 @@ impl Settings<Checked> {
|
||||
ranking_rules: Setting::Reset,
|
||||
stop_words: Setting::Reset,
|
||||
synonyms: Setting::Reset,
|
||||
non_separator_tokens: Setting::Reset,
|
||||
separator_tokens: Setting::Reset,
|
||||
dictionary: Setting::Reset,
|
||||
distinct_attribute: Setting::Reset,
|
||||
typo_tolerance: Setting::Reset,
|
||||
faceting: Setting::Reset,
|
||||
@ -217,6 +229,9 @@ impl Settings<Checked> {
|
||||
sortable_attributes,
|
||||
ranking_rules,
|
||||
stop_words,
|
||||
non_separator_tokens,
|
||||
separator_tokens,
|
||||
dictionary,
|
||||
synonyms,
|
||||
distinct_attribute,
|
||||
typo_tolerance,
|
||||
@ -232,6 +247,9 @@ impl Settings<Checked> {
|
||||
sortable_attributes,
|
||||
ranking_rules,
|
||||
stop_words,
|
||||
non_separator_tokens,
|
||||
separator_tokens,
|
||||
dictionary,
|
||||
synonyms,
|
||||
distinct_attribute,
|
||||
typo_tolerance,
|
||||
@ -274,6 +292,9 @@ impl Settings<Unchecked> {
|
||||
ranking_rules: self.ranking_rules,
|
||||
stop_words: self.stop_words,
|
||||
synonyms: self.synonyms,
|
||||
non_separator_tokens: self.non_separator_tokens,
|
||||
separator_tokens: self.separator_tokens,
|
||||
dictionary: self.dictionary,
|
||||
distinct_attribute: self.distinct_attribute,
|
||||
typo_tolerance: self.typo_tolerance,
|
||||
faceting: self.faceting,
|
||||
@ -335,6 +356,28 @@ pub fn apply_settings_to_builder(
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match settings.non_separator_tokens {
|
||||
Setting::Set(ref non_separator_tokens) => {
|
||||
builder.set_non_separator_tokens(non_separator_tokens.clone())
|
||||
}
|
||||
Setting::Reset => builder.reset_non_separator_tokens(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match settings.separator_tokens {
|
||||
Setting::Set(ref separator_tokens) => {
|
||||
builder.set_separator_tokens(separator_tokens.clone())
|
||||
}
|
||||
Setting::Reset => builder.reset_separator_tokens(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match settings.dictionary {
|
||||
Setting::Set(ref dictionary) => builder.set_dictionary(dictionary.clone()),
|
||||
Setting::Reset => builder.reset_dictionary(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match settings.synonyms {
|
||||
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
|
||||
Setting::Reset => builder.reset_synonyms(),
|
||||
@ -459,15 +502,14 @@ pub fn settings(
|
||||
})
|
||||
.transpose()?
|
||||
.unwrap_or_default();
|
||||
|
||||
let non_separator_tokens = index.non_separator_tokens(rtxn)?.unwrap_or_default();
|
||||
let separator_tokens = index.separator_tokens(rtxn)?.unwrap_or_default();
|
||||
let dictionary = index.dictionary(rtxn)?.unwrap_or_default();
|
||||
|
||||
let distinct_field = index.distinct_field(rtxn)?.map(String::from);
|
||||
|
||||
// in milli each word in the synonyms map were split on their separator. Since we lost
|
||||
// this information we are going to put space between words.
|
||||
let synonyms = index
|
||||
.synonyms(rtxn)?
|
||||
.iter()
|
||||
.map(|(key, values)| (key.join(" "), values.iter().map(|value| value.join(" ")).collect()))
|
||||
.collect();
|
||||
let synonyms = index.user_defined_synonyms(rtxn)?;
|
||||
|
||||
let min_typo_word_len = MinWordSizeTyposSetting {
|
||||
one_typo: Setting::Set(index.min_word_len_one_typo(rtxn)?),
|
||||
@ -520,6 +562,9 @@ pub fn settings(
|
||||
sortable_attributes: Setting::Set(sortable_attributes),
|
||||
ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
|
||||
stop_words: Setting::Set(stop_words),
|
||||
non_separator_tokens: Setting::Set(non_separator_tokens),
|
||||
separator_tokens: Setting::Set(separator_tokens),
|
||||
dictionary: Setting::Set(dictionary),
|
||||
distinct_attribute: match distinct_field {
|
||||
Some(field) => Setting::Set(field),
|
||||
None => Setting::Reset,
|
||||
@ -642,6 +687,9 @@ pub(crate) mod test {
|
||||
sortable_attributes: Setting::NotSet,
|
||||
ranking_rules: Setting::NotSet,
|
||||
stop_words: Setting::NotSet,
|
||||
non_separator_tokens: Setting::NotSet,
|
||||
separator_tokens: Setting::NotSet,
|
||||
dictionary: Setting::NotSet,
|
||||
synonyms: Setting::NotSet,
|
||||
distinct_attribute: Setting::NotSet,
|
||||
typo_tolerance: Setting::NotSet,
|
||||
@ -663,6 +711,9 @@ pub(crate) mod test {
|
||||
sortable_attributes: Setting::NotSet,
|
||||
ranking_rules: Setting::NotSet,
|
||||
stop_words: Setting::NotSet,
|
||||
non_separator_tokens: Setting::NotSet,
|
||||
separator_tokens: Setting::NotSet,
|
||||
dictionary: Setting::NotSet,
|
||||
synonyms: Setting::NotSet,
|
||||
distinct_attribute: Setting::NotSet,
|
||||
typo_tolerance: Setting::NotSet,
|
||||
|
@ -5,7 +5,7 @@ use std::str::FromStr;
|
||||
|
||||
use enum_iterator::Sequence;
|
||||
use milli::update::IndexDocumentsMethod;
|
||||
use roaring::RoaringBitmap;
|
||||
use roaring::RoaringTreemap;
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
use uuid::Uuid;
|
||||
@ -15,7 +15,7 @@ use crate::keys::Key;
|
||||
use crate::settings::{Settings, Unchecked};
|
||||
use crate::InstanceUid;
|
||||
|
||||
pub type TaskId = u32;
|
||||
pub type TaskId = u64;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
@ -127,11 +127,11 @@ pub enum KindWithContent {
|
||||
},
|
||||
TaskCancelation {
|
||||
query: String,
|
||||
tasks: RoaringBitmap,
|
||||
tasks: RoaringTreemap,
|
||||
},
|
||||
TaskDeletion {
|
||||
query: String,
|
||||
tasks: RoaringBitmap,
|
||||
tasks: RoaringTreemap,
|
||||
},
|
||||
DumpCreation {
|
||||
keys: Vec<Key>,
|
||||
|
@ -19,6 +19,7 @@ actix-http = { version = "3.3.1", default-features = false, features = [
|
||||
"compress-gzip",
|
||||
"rustls",
|
||||
] }
|
||||
actix-utils = "3.0.1"
|
||||
actix-web = { version = "4.3.1", default-features = false, features = [
|
||||
"macros",
|
||||
"compress-brotli",
|
||||
@ -38,7 +39,7 @@ byte-unit = { version = "4.0.19", default-features = false, features = [
|
||||
bytes = "1.4.0"
|
||||
clap = { version = "4.2.1", features = ["derive", "env"] }
|
||||
crossbeam-channel = "0.5.8"
|
||||
deserr = "0.5.0"
|
||||
deserr = { version = "0.6.0", features = ["actix-web"]}
|
||||
dump = { path = "../dump" }
|
||||
either = "1.8.1"
|
||||
env_logger = "0.10.0"
|
||||
@ -49,14 +50,15 @@ futures = "0.3.28"
|
||||
futures-util = "0.3.28"
|
||||
http = "0.2.9"
|
||||
index-scheduler = { path = "../index-scheduler" }
|
||||
indexmap = { version = "1.9.3", features = ["serde-1"] }
|
||||
itertools = "0.10.5"
|
||||
indexmap = { version = "2.0.0", features = ["serde"] }
|
||||
is-terminal = "0.4.8"
|
||||
itertools = "0.11.0"
|
||||
jsonwebtoken = "8.3.0"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4.17"
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
mimalloc = { version = "0.1.36", default-features = false }
|
||||
mimalloc = { version = "0.1.37", default-features = false }
|
||||
mime = "0.3.17"
|
||||
num_cpus = "1.15.0"
|
||||
obkv = "0.2.0"
|
||||
@ -67,6 +69,8 @@ permissive-json-pointer = { path = "../permissive-json-pointer" }
|
||||
pin-project-lite = "0.2.9"
|
||||
platform-dirs = "0.3.0"
|
||||
prometheus = { version = "0.13.3", features = ["process"] }
|
||||
puffin = "0.16.0"
|
||||
puffin_http = { version = "0.13.0", optional = true }
|
||||
rand = "0.8.5"
|
||||
rayon = "1.7.0"
|
||||
regex = "1.7.3"
|
||||
@ -83,7 +87,7 @@ sha2 = "0.10.6"
|
||||
siphasher = "0.3.10"
|
||||
slice-group-by = "0.3.0"
|
||||
static-files = { version = "0.2.3", optional = true }
|
||||
sysinfo = "0.28.4"
|
||||
sysinfo = "0.29.7"
|
||||
tar = "0.4.38"
|
||||
tempfile = "3.5.0"
|
||||
thiserror = "1.0.40"
|
||||
@ -100,8 +104,6 @@ uuid = { version = "1.3.1", features = ["serde", "v4"] }
|
||||
walkdir = "2.3.3"
|
||||
yaup = "0.2.1"
|
||||
serde_urlencoded = "0.7.1"
|
||||
actix-utils = "3.0.1"
|
||||
atty = "0.2.14"
|
||||
termcolor = "1.2.0"
|
||||
|
||||
[dev-dependencies]
|
||||
@ -133,6 +135,7 @@ zip = { version = "0.6.4", optional = true }
|
||||
[features]
|
||||
default = ["analytics", "meilisearch-types/all-tokenizations", "mini-dashboard"]
|
||||
analytics = ["segment"]
|
||||
profile-with-puffin = ["dep:puffin_http"]
|
||||
mini-dashboard = [
|
||||
"actix-web-static-files",
|
||||
"static-files",
|
||||
@ -151,5 +154,5 @@ thai = ["meilisearch-types/thai"]
|
||||
greek = ["meilisearch-types/greek"]
|
||||
|
||||
[package.metadata.mini-dashboard]
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.7/build.zip"
|
||||
sha1 = "28b45bf772c84f9a6e16bc1689b393bfce8da7d6"
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.11/build.zip"
|
||||
sha1 = "83cd44ed1e5f97ecb581dc9f958a63f4ccc982d9"
|
||||
|
@ -20,7 +20,7 @@ pub struct SearchAggregator;
|
||||
#[allow(dead_code)]
|
||||
impl SearchAggregator {
|
||||
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
||||
Self::default()
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||
@ -32,7 +32,7 @@ pub struct MultiSearchAggregator;
|
||||
#[allow(dead_code)]
|
||||
impl MultiSearchAggregator {
|
||||
pub fn from_queries(_: &dyn Any, _: &dyn Any) -> Self {
|
||||
Self::default()
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self) {}
|
||||
@ -44,7 +44,7 @@ pub struct FacetSearchAggregator;
|
||||
#[allow(dead_code)]
|
||||
impl FacetSearchAggregator {
|
||||
pub fn from_query(_: &dyn Any, _: &dyn Any) -> Self {
|
||||
Self::default()
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn succeed(&mut self, _: &dyn Any) {}
|
||||
|
@ -574,6 +574,10 @@ pub struct SearchAggregator {
|
||||
filter_total_number_of_criteria: usize,
|
||||
used_syntax: HashMap<String, usize>,
|
||||
|
||||
// attributes_to_search_on
|
||||
// every time a search is done using attributes_to_search_on
|
||||
attributes_to_search_on_total_number_of_uses: usize,
|
||||
|
||||
// q
|
||||
// The maximum number of terms in a q request
|
||||
max_terms_number: usize,
|
||||
@ -647,6 +651,11 @@ impl SearchAggregator {
|
||||
ret.filter_sum_of_criteria_terms = RE.split(&stringified_filters).count();
|
||||
}
|
||||
|
||||
// attributes_to_search_on
|
||||
if let Some(_) = query.attributes_to_search_on {
|
||||
ret.attributes_to_search_on_total_number_of_uses = 1;
|
||||
}
|
||||
|
||||
if let Some(ref q) = query.q {
|
||||
ret.max_terms_number = q.split_whitespace().count();
|
||||
}
|
||||
@ -720,9 +729,18 @@ impl SearchAggregator {
|
||||
let used_syntax = self.used_syntax.entry(key).or_insert(0);
|
||||
*used_syntax = used_syntax.saturating_add(value);
|
||||
}
|
||||
|
||||
// attributes_to_search_on
|
||||
self.attributes_to_search_on_total_number_of_uses = self
|
||||
.attributes_to_search_on_total_number_of_uses
|
||||
.saturating_add(other.attributes_to_search_on_total_number_of_uses);
|
||||
|
||||
// q
|
||||
self.max_terms_number = self.max_terms_number.max(other.max_terms_number);
|
||||
|
||||
// vector
|
||||
self.max_vector_size = self.max_vector_size.max(other.max_vector_size);
|
||||
|
||||
// pagination
|
||||
self.max_limit = self.max_limit.max(other.max_limit);
|
||||
self.max_offset = self.max_offset.max(other.max_offset);
|
||||
@ -761,17 +779,17 @@ impl SearchAggregator {
|
||||
if self.total_received == 0 {
|
||||
None
|
||||
} else {
|
||||
// the index of the 99th percentage of value
|
||||
let percentile_99th = 0.99 * (self.total_succeeded as f64 - 1.) + 1.;
|
||||
// we get all the values in a sorted manner
|
||||
let time_spent = self.time_spent.into_sorted_vec();
|
||||
// the index of the 99th percentage of value
|
||||
let percentile_99th = time_spent.len() * 99 / 100;
|
||||
// We are only interested by the slowest value of the 99th fastest results
|
||||
let time_spent = time_spent.get(percentile_99th as usize);
|
||||
let time_spent = time_spent.get(percentile_99th);
|
||||
|
||||
let properties = json!({
|
||||
"user-agent": self.user_agents,
|
||||
"requests": {
|
||||
"99th_response_time": time_spent.map(|t| format!("{:.2}", t)),
|
||||
"99th_response_time": time_spent.map(|t| format!("{:.2}", t)),
|
||||
"total_succeeded": self.total_succeeded,
|
||||
"total_failed": self.total_received.saturating_sub(self.total_succeeded), // just to be sure we never panics
|
||||
"total_received": self.total_received,
|
||||
@ -786,9 +804,15 @@ impl SearchAggregator {
|
||||
"avg_criteria_number": format!("{:.2}", self.filter_sum_of_criteria_terms as f64 / self.filter_total_number_of_criteria as f64),
|
||||
"most_used_syntax": self.used_syntax.iter().max_by_key(|(_, v)| *v).map(|(k, _)| json!(k)).unwrap_or_else(|| json!(null)),
|
||||
},
|
||||
"attributes_to_search_on": {
|
||||
"total_number_of_uses": self.attributes_to_search_on_total_number_of_uses,
|
||||
},
|
||||
"q": {
|
||||
"max_terms_number": self.max_terms_number,
|
||||
},
|
||||
"vector": {
|
||||
"max_vector_size": self.max_vector_size,
|
||||
},
|
||||
"pagination": {
|
||||
"max_limit": self.max_limit,
|
||||
"max_offset": self.max_offset,
|
||||
@ -843,6 +867,10 @@ pub struct MultiSearchAggregator {
|
||||
// sum of the number of search queries in the requests, use with total_received to compute an average
|
||||
total_search_count: usize,
|
||||
|
||||
// scoring
|
||||
show_ranking_score: bool,
|
||||
show_ranking_score_details: bool,
|
||||
|
||||
// context
|
||||
user_agents: HashSet<String>,
|
||||
}
|
||||
@ -856,6 +884,9 @@ impl MultiSearchAggregator {
|
||||
let distinct_indexes: HashSet<_> =
|
||||
query.iter().map(|query| query.index_uid.as_str()).collect();
|
||||
|
||||
let show_ranking_score = query.iter().any(|query| query.show_ranking_score);
|
||||
let show_ranking_score_details = query.iter().any(|query| query.show_ranking_score_details);
|
||||
|
||||
Self {
|
||||
timestamp,
|
||||
total_received: 1,
|
||||
@ -863,6 +894,8 @@ impl MultiSearchAggregator {
|
||||
total_distinct_index_count: distinct_indexes.len(),
|
||||
total_single_index: if distinct_indexes.len() == 1 { 1 } else { 0 },
|
||||
total_search_count: query.len(),
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
user_agents,
|
||||
}
|
||||
}
|
||||
@ -884,6 +917,9 @@ impl MultiSearchAggregator {
|
||||
this.total_distinct_index_count.saturating_add(other.total_distinct_index_count);
|
||||
let total_single_index = this.total_single_index.saturating_add(other.total_single_index);
|
||||
let total_search_count = this.total_search_count.saturating_add(other.total_search_count);
|
||||
let show_ranking_score = this.show_ranking_score || other.show_ranking_score;
|
||||
let show_ranking_score_details =
|
||||
this.show_ranking_score_details || other.show_ranking_score_details;
|
||||
let mut user_agents = this.user_agents;
|
||||
|
||||
for user_agent in other.user_agents.into_iter() {
|
||||
@ -899,6 +935,8 @@ impl MultiSearchAggregator {
|
||||
total_single_index,
|
||||
total_search_count,
|
||||
user_agents,
|
||||
show_ranking_score,
|
||||
show_ranking_score_details,
|
||||
// do not add _ or ..Default::default() here
|
||||
};
|
||||
|
||||
@ -925,6 +963,10 @@ impl MultiSearchAggregator {
|
||||
"searches": {
|
||||
"total_search_count": self.total_search_count,
|
||||
"avg_search_count": (self.total_search_count as f64) / (self.total_received as f64),
|
||||
},
|
||||
"scoring": {
|
||||
"show_ranking_score": self.show_ranking_score,
|
||||
"show_ranking_score_details": self.show_ranking_score_details,
|
||||
}
|
||||
});
|
||||
|
||||
@ -1145,6 +1187,7 @@ pub struct DocumentsDeletionAggregator {
|
||||
#[serde(rename = "user-agent")]
|
||||
user_agents: HashSet<String>,
|
||||
|
||||
#[serde(rename = "requests.total_received")]
|
||||
total_received: usize,
|
||||
per_document_id: bool,
|
||||
clear_all: bool,
|
||||
@ -1295,6 +1338,7 @@ pub struct HealthAggregator {
|
||||
#[serde(rename = "user-agent")]
|
||||
user_agents: HashSet<String>,
|
||||
|
||||
#[serde(rename = "requests.total_received")]
|
||||
total_received: usize,
|
||||
}
|
||||
|
||||
@ -1345,7 +1389,7 @@ pub struct DocumentsFetchAggregator {
|
||||
#[serde(rename = "user-agent")]
|
||||
user_agents: HashSet<String>,
|
||||
|
||||
#[serde(rename = "requests.max_limit")]
|
||||
#[serde(rename = "requests.total_received")]
|
||||
total_received: usize,
|
||||
|
||||
// a call on ../documents/:doc_id
|
||||
|
@ -203,7 +203,7 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(Arc<IndexScheduler>, Arc<
|
||||
.name(String::from("register-snapshot-tasks"))
|
||||
.spawn(move || loop {
|
||||
thread::sleep(snapshot_delay);
|
||||
if let Err(e) = index_scheduler.register(KindWithContent::SnapshotCreation) {
|
||||
if let Err(e) = index_scheduler.register(KindWithContent::SnapshotCreation, None) {
|
||||
error!("Error while registering snapshot: {}", e);
|
||||
}
|
||||
})
|
||||
|
@ -1,5 +1,5 @@
|
||||
use std::env;
|
||||
use std::io::Write;
|
||||
use std::io::{stderr, Write};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
@ -7,6 +7,7 @@ use actix_web::http::KeepAlive;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::HttpServer;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use is_terminal::IsTerminal;
|
||||
use meilisearch::analytics::Analytics;
|
||||
use meilisearch::{analytics, create_app, prototype_name, setup_meilisearch, Opt};
|
||||
use meilisearch_auth::{generate_master_key, AuthController, MASTER_KEY_MIN_SIZE};
|
||||
@ -29,6 +30,10 @@ fn setup(opt: &Opt) -> anyhow::Result<()> {
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let (opt, config_read_from) = Opt::try_build()?;
|
||||
|
||||
#[cfg(feature = "profile-with-puffin")]
|
||||
let _server = puffin_http::Server::new(&format!("0.0.0.0:{}", puffin_http::DEFAULT_PORT))?;
|
||||
puffin::set_scopes_on(cfg!(feature = "profile-with-puffin"));
|
||||
|
||||
anyhow::ensure!(
|
||||
!(cfg!(windows) && opt.experimental_reduce_indexing_memory_usage),
|
||||
"The `experimental-reduce-indexing-memory-usage` flag is not supported on Windows"
|
||||
@ -186,7 +191,7 @@ Anonymous telemetry:\t\"Enabled\""
|
||||
}
|
||||
|
||||
eprintln!();
|
||||
eprintln!("Check out Meilisearch Cloud!\thttps://cloud.meilisearch.com/login?utm_campaign=oss&utm_source=engine&utm_medium=cli");
|
||||
eprintln!("Check out Meilisearch Cloud!\thttps://www.meilisearch.com/cloud?utm_campaign=oss&utm_source=engine&utm_medium=cli");
|
||||
eprintln!("Documentation:\t\t\thttps://www.meilisearch.com/docs");
|
||||
eprintln!("Source code:\t\t\thttps://github.com/meilisearch/meilisearch");
|
||||
eprintln!("Discord:\t\t\thttps://discord.meilisearch.com");
|
||||
@ -197,8 +202,7 @@ const WARNING_BG_COLOR: Option<Color> = Some(Color::Ansi256(178));
|
||||
const WARNING_FG_COLOR: Option<Color> = Some(Color::Ansi256(0));
|
||||
|
||||
fn print_master_key_too_short_warning() {
|
||||
let choice =
|
||||
if atty::is(atty::Stream::Stderr) { ColorChoice::Auto } else { ColorChoice::Never };
|
||||
let choice = if stderr().is_terminal() { ColorChoice::Auto } else { ColorChoice::Never };
|
||||
let mut stderr = StandardStream::stderr(choice);
|
||||
stderr
|
||||
.set_color(
|
||||
@ -223,8 +227,7 @@ fn print_master_key_too_short_warning() {
|
||||
}
|
||||
|
||||
fn print_missing_master_key_warning() {
|
||||
let choice =
|
||||
if atty::is(atty::Stream::Stderr) { ColorChoice::Auto } else { ColorChoice::Never };
|
||||
let choice = if stderr().is_terminal() { ColorChoice::Auto } else { ColorChoice::Never };
|
||||
let mut stderr = StandardStream::stderr(choice);
|
||||
stderr
|
||||
.set_color(
|
||||
|
@ -50,4 +50,10 @@ lazy_static! {
|
||||
&["kind", "value"]
|
||||
)
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_LAST_UPDATE: IntGauge =
|
||||
register_int_gauge!(opts!("meilisearch_last_update", "Meilisearch Last Update"))
|
||||
.expect("Can't create a metric");
|
||||
pub static ref MEILISEARCH_IS_INDEXING: IntGauge =
|
||||
register_int_gauge!(opts!("meilisearch_is_indexing", "Meilisearch Is Indexing"))
|
||||
.expect("Can't create a metric");
|
||||
}
|
||||
|
@ -11,7 +11,7 @@ use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::SummarizedTaskView;
|
||||
use crate::routes::{get_task_id, SummarizedTaskView};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(create_dump))));
|
||||
@ -29,8 +29,9 @@ pub async fn create_dump(
|
||||
keys: auth_controller.list_keys()?,
|
||||
instance_uid: analytics.instance_uid().cloned(),
|
||||
};
|
||||
let uid = get_task_id(&req)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid)).await??.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
|
@ -64,7 +64,20 @@ async fn patch_features(
|
||||
vector_store: new_features.0.vector_store.unwrap_or(old_features.vector_store),
|
||||
};
|
||||
|
||||
analytics.publish("Experimental features Updated".to_string(), json!(new_features), Some(&req));
|
||||
// explicitly destructure for analytics rather than using the `Serialize` implementation, because
|
||||
// the it renames to camelCase, which we don't want for analytics.
|
||||
// **Do not** ignore fields with `..` or `_` here, because we want to add them in the future.
|
||||
let meilisearch_types::features::RuntimeTogglableFeatures { score_details, vector_store } =
|
||||
new_features;
|
||||
|
||||
analytics.publish(
|
||||
"Experimental features Updated".to_string(),
|
||||
json!({
|
||||
"score_details": score_details,
|
||||
"vector_store": vector_store,
|
||||
}),
|
||||
Some(&req),
|
||||
);
|
||||
index_scheduler.put_runtime_features(new_features)?;
|
||||
Ok(HttpResponse::Ok().json(new_features))
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ use bstr::ByteSlice;
|
||||
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
||||
use deserr::Deserr;
|
||||
use futures::StreamExt;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use index_scheduler::{IndexScheduler, TaskId};
|
||||
use log::debug;
|
||||
use meilisearch_types::deserr::query_params::Param;
|
||||
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||
@ -36,7 +36,7 @@ use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::payload::Payload;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::{PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
||||
use crate::routes::{get_task_id, PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
||||
use crate::search::parse_filter;
|
||||
|
||||
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
|
||||
@ -129,8 +129,9 @@ pub async fn delete_document(
|
||||
index_uid: index_uid.to_string(),
|
||||
documents_ids: vec![document_id],
|
||||
};
|
||||
let uid = get_task_id(&req)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid)).await??.into();
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
@ -277,6 +278,7 @@ pub async fn replace_documents(
|
||||
analytics.add_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req);
|
||||
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation(&index_uid);
|
||||
let uid = get_task_id(&req)?;
|
||||
let task = document_addition(
|
||||
extract_mime_type(&req)?,
|
||||
index_scheduler,
|
||||
@ -285,6 +287,7 @@ pub async fn replace_documents(
|
||||
params.csv_delimiter,
|
||||
body,
|
||||
IndexDocumentsMethod::ReplaceDocuments,
|
||||
uid,
|
||||
allow_index_creation,
|
||||
)
|
||||
.await?;
|
||||
@ -308,6 +311,7 @@ pub async fn update_documents(
|
||||
analytics.update_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req);
|
||||
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation(&index_uid);
|
||||
let uid = get_task_id(&req)?;
|
||||
let task = document_addition(
|
||||
extract_mime_type(&req)?,
|
||||
index_scheduler,
|
||||
@ -316,6 +320,7 @@ pub async fn update_documents(
|
||||
params.csv_delimiter,
|
||||
body,
|
||||
IndexDocumentsMethod::UpdateDocuments,
|
||||
uid,
|
||||
allow_index_creation,
|
||||
)
|
||||
.await?;
|
||||
@ -332,6 +337,7 @@ async fn document_addition(
|
||||
csv_delimiter: Option<u8>,
|
||||
mut body: Payload,
|
||||
method: IndexDocumentsMethod,
|
||||
task_id: Option<TaskId>,
|
||||
allow_index_creation: bool,
|
||||
) -> Result<SummarizedTaskView, MeilisearchHttpError> {
|
||||
let format = match (
|
||||
@ -445,7 +451,7 @@ async fn document_addition(
|
||||
};
|
||||
|
||||
let scheduler = index_scheduler.clone();
|
||||
let task = match tokio::task::spawn_blocking(move || scheduler.register(task)).await? {
|
||||
let task = match tokio::task::spawn_blocking(move || scheduler.register(task, task_id)).await? {
|
||||
Ok(task) => task,
|
||||
Err(e) => {
|
||||
index_scheduler.delete_update_file(uuid)?;
|
||||
@ -476,8 +482,9 @@ pub async fn delete_documents_batch(
|
||||
|
||||
let task =
|
||||
KindWithContent::DocumentDeletion { index_uid: index_uid.to_string(), documents_ids: ids };
|
||||
let uid = get_task_id(&req)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid)).await??.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
@ -512,8 +519,9 @@ pub async fn delete_documents_by_filter(
|
||||
.map_err(|err| ResponseError::from_msg(err.message, Code::InvalidDocumentFilter))?;
|
||||
let task = KindWithContent::DocumentDeletionByFilter { index_uid, filter_expr: filter };
|
||||
|
||||
let uid = get_task_id(&req)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid)).await??.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
@ -529,8 +537,9 @@ pub async fn clear_all_documents(
|
||||
analytics.delete_documents(DocumentDeletionKind::ClearAll, &req);
|
||||
|
||||
let task = KindWithContent::DocumentClear { index_uid: index_uid.to_string() };
|
||||
let uid = get_task_id(&req)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid)).await??.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
|
@ -17,7 +17,7 @@ use serde::Serialize;
|
||||
use serde_json::json;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
||||
use super::{get_task_id, Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||
@ -135,8 +135,9 @@ pub async fn create_index(
|
||||
);
|
||||
|
||||
let task = KindWithContent::IndexCreation { index_uid: uid.to_string(), primary_key };
|
||||
let uid = get_task_id(&req)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid)).await??.into();
|
||||
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
} else {
|
||||
@ -203,8 +204,9 @@ pub async fn update_index(
|
||||
primary_key: body.primary_key,
|
||||
};
|
||||
|
||||
let uid = get_task_id(&req)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid)).await??.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
@ -213,11 +215,13 @@ pub async fn update_index(
|
||||
pub async fn delete_index(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
req: HttpRequest,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() };
|
||||
let uid = get_task_id(&req)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid)).await??.into();
|
||||
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
@ -35,7 +35,7 @@ pub struct SearchQueryGet {
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchQ>)]
|
||||
q: Option<String>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchVector>)]
|
||||
vector: Option<Vec<f32>>,
|
||||
vector: Option<CS<f32>>,
|
||||
#[deserr(default = Param(DEFAULT_SEARCH_OFFSET()), error = DeserrQueryParamError<InvalidSearchOffset>)]
|
||||
offset: Param<usize>,
|
||||
#[deserr(default = Param(DEFAULT_SEARCH_LIMIT()), error = DeserrQueryParamError<InvalidSearchLimit>)]
|
||||
@ -88,7 +88,7 @@ impl From<SearchQueryGet> for SearchQuery {
|
||||
|
||||
Self {
|
||||
q: other.q,
|
||||
vector: other.vector,
|
||||
vector: other.vector.map(CS::into_inner),
|
||||
offset: other.offset.0,
|
||||
limit: other.limit.0,
|
||||
page: other.page.as_deref().copied(),
|
||||
|
@ -5,6 +5,7 @@ use index_scheduler::IndexScheduler;
|
||||
use log::debug;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::facet_values_sort::FacetValuesSort;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
@ -13,7 +14,7 @@ use serde_json::json;
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::routes::SummarizedTaskView;
|
||||
use crate::routes::{get_task_id, SummarizedTaskView};
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! make_setting_route {
|
||||
@ -32,7 +33,7 @@ macro_rules! make_setting_route {
|
||||
use $crate::extractors::authentication::policies::*;
|
||||
use $crate::extractors::authentication::GuardedData;
|
||||
use $crate::extractors::sequential_extractor::SeqHandler;
|
||||
use $crate::routes::SummarizedTaskView;
|
||||
use $crate::routes::{get_task_id, SummarizedTaskView};
|
||||
|
||||
pub async fn delete(
|
||||
index_scheduler: GuardedData<
|
||||
@ -40,6 +41,7 @@ macro_rules! make_setting_route {
|
||||
Data<IndexScheduler>,
|
||||
>,
|
||||
index_uid: web::Path<String>,
|
||||
req: HttpRequest,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
@ -54,8 +56,9 @@ macro_rules! make_setting_route {
|
||||
is_deletion: true,
|
||||
allow_index_creation,
|
||||
};
|
||||
let uid = get_task_id(&req)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task))
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid))
|
||||
.await??
|
||||
.into();
|
||||
|
||||
@ -96,8 +99,9 @@ macro_rules! make_setting_route {
|
||||
is_deletion: false,
|
||||
allow_index_creation,
|
||||
};
|
||||
let uid = get_task_id(&req)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task))
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid))
|
||||
.await??
|
||||
.into();
|
||||
|
||||
@ -309,6 +313,81 @@ make_setting_route!(
|
||||
}
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
"/non-separator-tokens",
|
||||
put,
|
||||
std::collections::BTreeSet<String>,
|
||||
meilisearch_types::deserr::DeserrJsonError<
|
||||
meilisearch_types::error::deserr_codes::InvalidSettingsNonSeparatorTokens,
|
||||
>,
|
||||
non_separator_tokens,
|
||||
"nonSeparatorTokens",
|
||||
analytics,
|
||||
|non_separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"nonSeparatorTokens Updated".to_string(),
|
||||
json!({
|
||||
"non_separator_tokens": {
|
||||
"total": non_separator_tokens.as_ref().map(|non_separator_tokens| non_separator_tokens.len()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
"/separator-tokens",
|
||||
put,
|
||||
std::collections::BTreeSet<String>,
|
||||
meilisearch_types::deserr::DeserrJsonError<
|
||||
meilisearch_types::error::deserr_codes::InvalidSettingsSeparatorTokens,
|
||||
>,
|
||||
separator_tokens,
|
||||
"separatorTokens",
|
||||
analytics,
|
||||
|separator_tokens: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"separatorTokens Updated".to_string(),
|
||||
json!({
|
||||
"separator_tokens": {
|
||||
"total": separator_tokens.as_ref().map(|separator_tokens| separator_tokens.len()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
"/dictionary",
|
||||
put,
|
||||
std::collections::BTreeSet<String>,
|
||||
meilisearch_types::deserr::DeserrJsonError<
|
||||
meilisearch_types::error::deserr_codes::InvalidSettingsDictionary,
|
||||
>,
|
||||
dictionary,
|
||||
"dictionary",
|
||||
analytics,
|
||||
|dictionary: &Option<std::collections::BTreeSet<String>>, req: &HttpRequest| {
|
||||
use serde_json::json;
|
||||
|
||||
analytics.publish(
|
||||
"dictionary Updated".to_string(),
|
||||
json!({
|
||||
"dictionary": {
|
||||
"total": dictionary.as_ref().map(|dictionary| dictionary.len()),
|
||||
},
|
||||
}),
|
||||
Some(req),
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
make_setting_route!(
|
||||
"/synonyms",
|
||||
put,
|
||||
@ -465,6 +544,9 @@ generate_configure!(
|
||||
searchable_attributes,
|
||||
distinct_attribute,
|
||||
stop_words,
|
||||
separator_tokens,
|
||||
non_separator_tokens,
|
||||
dictionary,
|
||||
synonyms,
|
||||
ranking_rules,
|
||||
typo_tolerance,
|
||||
@ -550,10 +632,16 @@ pub async fn update_all(
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.max_values_per_facet.as_ref().set()),
|
||||
"sort_facet_values_by": new_settings.faceting
|
||||
"sort_facet_values_by_star_count": new_settings.faceting
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.sort_facet_values_by.as_ref().set()),
|
||||
.and_then(|s| {
|
||||
s.sort_facet_values_by.as_ref().set().map(|s| s.iter().any(|(k, v)| k == "*" && v == &FacetValuesSort::Count))
|
||||
}),
|
||||
"sort_facet_values_by_total": new_settings.faceting
|
||||
.as_ref()
|
||||
.set()
|
||||
.and_then(|s| s.sort_facet_values_by.as_ref().set().map(|s| s.len())),
|
||||
},
|
||||
"pagination": {
|
||||
"max_total_hits": new_settings.pagination
|
||||
@ -579,8 +667,9 @@ pub async fn update_all(
|
||||
is_deletion: false,
|
||||
allow_index_creation,
|
||||
};
|
||||
let uid = get_task_id(&req)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid)).await??.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
@ -602,6 +691,7 @@ pub async fn get_all(
|
||||
pub async fn delete_all(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
req: HttpRequest,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
|
||||
@ -615,8 +705,9 @@ pub async fn delete_all(
|
||||
is_deletion: true,
|
||||
allow_index_creation,
|
||||
};
|
||||
let uid = get_task_id(&req)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid)).await??.into();
|
||||
|
||||
debug!("returns: {:?}", task);
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
|
@ -49,6 +49,11 @@ pub async fn get_metrics(
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(last_update) = response.last_update {
|
||||
crate::metrics::MEILISEARCH_LAST_UPDATE.set(last_update.unix_timestamp());
|
||||
}
|
||||
crate::metrics::MEILISEARCH_IS_INDEXING.set(index_scheduler.is_task_processing()? as i64);
|
||||
|
||||
let encoder = TextEncoder::new();
|
||||
let mut buffer = vec![];
|
||||
encoder.encode(&prometheus::gather(), &mut buffer).expect("Failed to encode metrics");
|
||||
|
@ -5,7 +5,7 @@ use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use log::debug;
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
use meilisearch_types::settings::{Settings, Unchecked};
|
||||
use meilisearch_types::tasks::{Kind, Status, Task, TaskId};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -41,6 +41,34 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(web::scope("/experimental-features").configure(features::configure));
|
||||
}
|
||||
|
||||
pub fn get_task_id(req: &HttpRequest) -> Result<Option<TaskId>, ResponseError> {
|
||||
let task_id = req
|
||||
.headers()
|
||||
.get("TaskId")
|
||||
.map(|header| {
|
||||
header.to_str().map_err(|e| {
|
||||
ResponseError::from_msg(
|
||||
format!("TaskId is not a valid utf-8 string: {e}"),
|
||||
Code::BadRequest,
|
||||
)
|
||||
})
|
||||
})
|
||||
.transpose()?
|
||||
.map(|s| {
|
||||
s.parse::<TaskId>().map_err(|e| {
|
||||
ResponseError::from_msg(
|
||||
format!(
|
||||
"Could not parse the TaskId as a {}: {e}",
|
||||
std::any::type_name::<TaskId>(),
|
||||
),
|
||||
Code::BadRequest,
|
||||
)
|
||||
})
|
||||
})
|
||||
.transpose()?;
|
||||
Ok(task_id)
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SummarizedTaskView {
|
||||
@ -284,9 +312,6 @@ pub fn create_all_stats(
|
||||
used_database_size += index_scheduler.used_size()?;
|
||||
database_size += auth_controller.size()?;
|
||||
used_database_size += auth_controller.used_size()?;
|
||||
let update_file_size = index_scheduler.compute_update_file_size()?;
|
||||
database_size += update_file_size;
|
||||
used_database_size += update_file_size;
|
||||
|
||||
let stats = Stats { database_size, used_database_size, last_update: last_task, indexes };
|
||||
Ok(stats)
|
||||
|
@ -10,7 +10,7 @@ use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::tasks::{IndexSwap, KindWithContent};
|
||||
use serde_json::json;
|
||||
|
||||
use super::SummarizedTaskView;
|
||||
use super::{get_task_id, SummarizedTaskView};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
@ -61,7 +61,8 @@ pub async fn swap_indexes(
|
||||
|
||||
let task = KindWithContent::IndexSwap { swaps };
|
||||
|
||||
let task = index_scheduler.register(task)?;
|
||||
let task: SummarizedTaskView = task.into();
|
||||
let uid = get_task_id(&req)?;
|
||||
let task: SummarizedTaskView =
|
||||
tokio::task::spawn_blocking(move || index_scheduler.register(task, uid)).await??.into();
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
@ -20,13 +20,13 @@ use time::macros::format_description;
|
||||
use time::{Date, Duration, OffsetDateTime, Time};
|
||||
use tokio::task;
|
||||
|
||||
use super::SummarizedTaskView;
|
||||
use super::{get_task_id, SummarizedTaskView};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
|
||||
const DEFAULT_LIMIT: u32 = 20;
|
||||
const DEFAULT_LIMIT: u64 = 20;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
@ -175,14 +175,14 @@ impl From<Details> for DetailsView {
|
||||
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct TasksFilterQuery {
|
||||
#[deserr(default = Param(DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidTaskLimit>)]
|
||||
pub limit: Param<u32>,
|
||||
pub limit: Param<TaskId>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskFrom>)]
|
||||
pub from: Option<Param<TaskId>>,
|
||||
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskUids>)]
|
||||
pub uids: OptionStarOrList<u32>,
|
||||
pub uids: OptionStarOrList<TaskId>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskCanceledBy>)]
|
||||
pub canceled_by: OptionStarOrList<u32>,
|
||||
pub canceled_by: OptionStarOrList<TaskId>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskTypes>)]
|
||||
pub types: OptionStarOrList<Kind>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskStatuses>)]
|
||||
@ -249,9 +249,9 @@ impl TaskDeletionOrCancelationQuery {
|
||||
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct TaskDeletionOrCancelationQuery {
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskUids>)]
|
||||
pub uids: OptionStarOrList<u32>,
|
||||
pub uids: OptionStarOrList<TaskId>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskCanceledBy>)]
|
||||
pub canceled_by: OptionStarOrList<u32>,
|
||||
pub canceled_by: OptionStarOrList<TaskId>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskTypes>)]
|
||||
pub types: OptionStarOrList<Kind>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskStatuses>)]
|
||||
@ -325,7 +325,7 @@ async fn cancel_tasks(
|
||||
|
||||
let query = params.into_query();
|
||||
|
||||
let tasks = index_scheduler.get_task_ids_from_authorized_indexes(
|
||||
let (tasks, _) = index_scheduler.get_task_ids_from_authorized_indexes(
|
||||
&index_scheduler.read_txn()?,
|
||||
&query,
|
||||
index_scheduler.filters(),
|
||||
@ -333,7 +333,9 @@ async fn cancel_tasks(
|
||||
let task_cancelation =
|
||||
KindWithContent::TaskCancelation { query: format!("?{}", req.query_string()), tasks };
|
||||
|
||||
let task = task::spawn_blocking(move || index_scheduler.register(task_cancelation)).await??;
|
||||
let uid = get_task_id(&req)?;
|
||||
let task =
|
||||
task::spawn_blocking(move || index_scheduler.register(task_cancelation, uid)).await??;
|
||||
let task: SummarizedTaskView = task.into();
|
||||
|
||||
Ok(HttpResponse::Ok().json(task))
|
||||
@ -370,7 +372,7 @@ async fn delete_tasks(
|
||||
);
|
||||
let query = params.into_query();
|
||||
|
||||
let tasks = index_scheduler.get_task_ids_from_authorized_indexes(
|
||||
let (tasks, _) = index_scheduler.get_task_ids_from_authorized_indexes(
|
||||
&index_scheduler.read_txn()?,
|
||||
&query,
|
||||
index_scheduler.filters(),
|
||||
@ -378,7 +380,8 @@ async fn delete_tasks(
|
||||
let task_deletion =
|
||||
KindWithContent::TaskDeletion { query: format!("?{}", req.query_string()), tasks };
|
||||
|
||||
let task = task::spawn_blocking(move || index_scheduler.register(task_deletion)).await??;
|
||||
let uid = get_task_id(&req)?;
|
||||
let task = task::spawn_blocking(move || index_scheduler.register(task_deletion, uid)).await??;
|
||||
let task: SummarizedTaskView = task.into();
|
||||
|
||||
Ok(HttpResponse::Ok().json(task))
|
||||
@ -387,9 +390,10 @@ async fn delete_tasks(
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct AllTasks {
|
||||
results: Vec<TaskView>,
|
||||
limit: u32,
|
||||
from: Option<u32>,
|
||||
next: Option<u32>,
|
||||
total: u64,
|
||||
limit: TaskId,
|
||||
from: Option<TaskId>,
|
||||
next: Option<TaskId>,
|
||||
}
|
||||
|
||||
async fn get_tasks(
|
||||
@ -406,23 +410,17 @@ async fn get_tasks(
|
||||
let limit = params.limit.0;
|
||||
let query = params.into_query();
|
||||
|
||||
let mut tasks_results: Vec<TaskView> = index_scheduler
|
||||
.get_tasks_from_authorized_indexes(query, index_scheduler.filters())?
|
||||
.into_iter()
|
||||
.map(|t| TaskView::from_task(&t))
|
||||
.collect();
|
||||
let filters = index_scheduler.filters();
|
||||
let (tasks, total) = index_scheduler.get_tasks_from_authorized_indexes(query, filters)?;
|
||||
let mut results: Vec<_> = tasks.iter().map(TaskView::from_task).collect();
|
||||
|
||||
// If we were able to fetch the number +1 tasks we asked
|
||||
// it means that there is more to come.
|
||||
let next = if tasks_results.len() == limit as usize {
|
||||
tasks_results.pop().map(|t| t.uid)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let next = if results.len() == limit as usize { results.pop().map(|t| t.uid) } else { None };
|
||||
|
||||
let from = tasks_results.first().map(|t| t.uid);
|
||||
let from = results.first().map(|t| t.uid);
|
||||
let tasks = AllTasks { results, limit: limit.saturating_sub(1), total, from, next };
|
||||
|
||||
let tasks = AllTasks { results: tasks_results, limit: limit.saturating_sub(1), from, next };
|
||||
Ok(HttpResponse::Ok().json(tasks))
|
||||
}
|
||||
|
||||
@ -444,10 +442,10 @@ async fn get_task(
|
||||
analytics.publish("Tasks Seen".to_string(), json!({ "per_task_uid": true }), Some(&req));
|
||||
|
||||
let query = index_scheduler::Query { uids: Some(vec![task_uid]), ..Query::default() };
|
||||
let filters = index_scheduler.filters();
|
||||
let (tasks, _) = index_scheduler.get_tasks_from_authorized_indexes(query, filters)?;
|
||||
|
||||
if let Some(task) =
|
||||
index_scheduler.get_tasks_from_authorized_indexes(query, index_scheduler.filters())?.first()
|
||||
{
|
||||
if let Some(task) = tasks.first() {
|
||||
let task_view = TaskView::from_task(task);
|
||||
Ok(HttpResponse::Ok().json(task_view))
|
||||
} else {
|
||||
|
@ -491,6 +491,20 @@ pub fn perform_search(
|
||||
tokenizer_builder.allow_list(&script_lang_map);
|
||||
}
|
||||
|
||||
let separators = index.allowed_separators(&rtxn)?;
|
||||
let separators: Option<Vec<_>> =
|
||||
separators.as_ref().map(|x| x.iter().map(String::as_str).collect());
|
||||
if let Some(ref separators) = separators {
|
||||
tokenizer_builder.separators(separators);
|
||||
}
|
||||
|
||||
let dictionary = index.dictionary(&rtxn)?;
|
||||
let dictionary: Option<Vec<_>> =
|
||||
dictionary.as_ref().map(|x| x.iter().map(String::as_str).collect());
|
||||
if let Some(ref dictionary) = dictionary {
|
||||
tokenizer_builder.words_dict(dictionary);
|
||||
}
|
||||
|
||||
let mut formatter_builder = MatcherBuilder::new(matching_words, tokenizer_builder.build());
|
||||
formatter_builder.crop_marker(query.crop_marker);
|
||||
formatter_builder.highlight_prefix(query.highlight_pre_tag);
|
||||
@ -666,6 +680,7 @@ fn compute_semantic_score(query: &[f32], vectors: Value) -> milli::Result<Option
|
||||
.map_err(InternalError::SerdeJson)?;
|
||||
Ok(vectors
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.map(|v| OrderedFloat(dot_product_similarity(query, &v)))
|
||||
.max()
|
||||
.map(OrderedFloat::into_inner))
|
||||
|
@ -61,6 +61,8 @@ pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'
|
||||
("DELETE", "/keys/mykey/") => hashset!{"keys.delete", "*"},
|
||||
("POST", "/keys") => hashset!{"keys.create", "*"},
|
||||
("GET", "/keys") => hashset!{"keys.get", "*"},
|
||||
("GET", "/experimental-features") => hashset!{"experimental.get", "*"},
|
||||
("PATCH", "/experimental-features") => hashset!{"experimental.update", "*"},
|
||||
};
|
||||
|
||||
authorizations
|
||||
|
@ -189,6 +189,14 @@ impl Server {
|
||||
let url = format!("/tasks/{}", update_id);
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn get_features(&self) -> (Value, StatusCode) {
|
||||
self.service.get("/experimental-features").await
|
||||
}
|
||||
|
||||
pub async fn set_features(&self, value: Value) -> (Value, StatusCode) {
|
||||
self.service.patch("/experimental-features", value).await
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:08:54.713227Z"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
"limit": 1,
|
||||
"from": 0,
|
||||
"next": null
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:08:54.713227Z"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
"limit": 1,
|
||||
"from": 0,
|
||||
"next": null
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:08:54.713227Z"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
"limit": 1,
|
||||
"from": 0,
|
||||
"next": null
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:08:54.713227Z"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
"limit": 1,
|
||||
"from": 0,
|
||||
"next": null
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:08:54.713227Z"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
"limit": 1,
|
||||
"from": 0,
|
||||
"next": null
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:08:54.713227Z"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
"limit": 1,
|
||||
"from": 0,
|
||||
"next": null
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:08:54.713227Z"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
"limit": 1,
|
||||
"from": 0,
|
||||
"next": null
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:34:25.351927Z"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
"limit": 1,
|
||||
"from": 0,
|
||||
"next": null
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:34:25.351927Z"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
"limit": 1,
|
||||
"from": 0,
|
||||
"next": null
|
||||
|
@ -40,6 +40,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:34:48.1719Z"
|
||||
}
|
||||
],
|
||||
"total": 2,
|
||||
"limit": 1,
|
||||
"from": 1,
|
||||
"next": 0
|
||||
|
@ -40,6 +40,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:34:48.1719Z"
|
||||
}
|
||||
],
|
||||
"total": 2,
|
||||
"limit": 1,
|
||||
"from": 1,
|
||||
"next": 0
|
||||
|
@ -40,6 +40,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:34:48.1719Z"
|
||||
}
|
||||
],
|
||||
"total": 2,
|
||||
"limit": 1,
|
||||
"from": 1,
|
||||
"next": 0
|
||||
|
@ -40,6 +40,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:34:48.1719Z"
|
||||
}
|
||||
],
|
||||
"total": 2,
|
||||
"limit": 1,
|
||||
"from": 1,
|
||||
"next": 0
|
||||
|
@ -40,6 +40,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:34:48.1719Z"
|
||||
}
|
||||
],
|
||||
"total": 2,
|
||||
"limit": 1,
|
||||
"from": 1,
|
||||
"next": 0
|
||||
|
@ -45,6 +45,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:26:57.319083Z"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
"limit": 1,
|
||||
"from": 0,
|
||||
"next": null
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:28:46.369971Z"
|
||||
}
|
||||
],
|
||||
"total": 92,
|
||||
"limit": 1,
|
||||
"from": 92,
|
||||
"next": 91
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:28:46.369971Z"
|
||||
}
|
||||
],
|
||||
"total": 93,
|
||||
"limit": 1,
|
||||
"from": 92,
|
||||
"next": 91
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:28:46.369971Z"
|
||||
}
|
||||
],
|
||||
"total": 93,
|
||||
"limit": 1,
|
||||
"from": 92,
|
||||
"next": 91
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:28:46.369971Z"
|
||||
}
|
||||
],
|
||||
"total": 93,
|
||||
"limit": 1,
|
||||
"from": 92,
|
||||
"next": 91
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:28:46.369971Z"
|
||||
}
|
||||
],
|
||||
"total": 93,
|
||||
"limit": 1,
|
||||
"from": 92,
|
||||
"next": 91
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T09:28:46.369971Z"
|
||||
}
|
||||
],
|
||||
"total": 93,
|
||||
"limit": 1,
|
||||
"from": 92,
|
||||
"next": 91
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T08:31:12.304168Z"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
"limit": 1,
|
||||
"from": 0,
|
||||
"next": null
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T08:31:12.304168Z"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
"limit": 1,
|
||||
"from": 0,
|
||||
"next": null
|
||||
|
@ -20,6 +20,7 @@ source: meilisearch/tests/dumps/mod.rs
|
||||
"finishedAt": "2021-09-08T08:31:12.304168Z"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
"limit": 1,
|
||||
"from": 0,
|
||||
"next": null
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user