mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-01 18:25:37 +00:00
Compare commits
3 Commits
v1.0.1
...
with_rate_
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6678491212 | ||
|
|
a82f8aacde | ||
|
|
5cf71c6014 |
3
.github/workflows/latest-git-tag.yml
vendored
3
.github/workflows/latest-git-tag.yml
vendored
@@ -3,7 +3,7 @@ name: Update latest git tag
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [released]
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
check-version:
|
||||
@@ -17,7 +17,6 @@ jobs:
|
||||
|
||||
update-latest-tag:
|
||||
runs-on: ubuntu-latest
|
||||
needs: check-version
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: rickstaa/action-create-tag@v1
|
||||
|
||||
2
.github/workflows/publish-deb-brew-pkg.yml
vendored
2
.github/workflows/publish-deb-brew-pkg.yml
vendored
@@ -2,7 +2,7 @@ name: Publish to APT repository & Homebrew
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [released]
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
check-version:
|
||||
|
||||
3
.github/workflows/publish-docker-images.yml
vendored
3
.github/workflows/publish-docker-images.yml
vendored
@@ -52,9 +52,6 @@ jobs:
|
||||
- name: Set build-args for Docker buildx
|
||||
id: build-metadata
|
||||
run: |
|
||||
# Define ownership
|
||||
git config --global --add safe.directory /home/meili/actions-runner/_work/meilisearch/meilisearch
|
||||
|
||||
# Extract commit date
|
||||
commit_date=$(git show -s --format=%cd --date=iso-strict ${{ github.sha }})
|
||||
|
||||
|
||||
2
.github/workflows/rust.yml
vendored
2
.github/workflows/rust.yml
vendored
@@ -108,7 +108,7 @@ jobs:
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: clippy
|
||||
args: --all-targets -- --deny warnings --allow clippy::uninlined_format_args
|
||||
args: --all-targets -- --deny warnings
|
||||
|
||||
fmt:
|
||||
name: Run Rustfmt
|
||||
|
||||
@@ -44,5 +44,5 @@ jobs:
|
||||
--title "Update version for the next release ($NEW_VERSION) in Cargo.toml files" \
|
||||
--body '⚠️ This PR is automatically generated. Check the new version is the expected one before merging.' \
|
||||
--label 'skip changelog' \
|
||||
--milestone $NEW_VERSION \
|
||||
--milestone $NEW_VERSION
|
||||
--base $GITHUB_REF_NAME
|
||||
|
||||
1345
Cargo.lock
generated
1345
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
86
config.toml
86
config.toml
@@ -28,9 +28,17 @@ http_payload_size_limit = "100 MB"
|
||||
|
||||
log_level = "INFO"
|
||||
# Defines how much detail should be present in Meilisearch's logs.
|
||||
# Meilisearch currently supports six log levels, listed in order of increasing verbosity: `OFF`, `ERROR`, `WARN`, `INFO`, `DEBUG`, `TRACE`
|
||||
# Meilisearch currently supports five log levels, listed in order of increasing verbosity: `ERROR`, `WARN`, `INFO`, `DEBUG`, `TRACE`
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#log-level
|
||||
|
||||
max_index_size = "100 GiB"
|
||||
# Sets the maximum size of the index.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#max-index-size
|
||||
|
||||
max_task_db_size = "100 GiB"
|
||||
# Sets the maximum size of the task database.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#max-task-db-size
|
||||
|
||||
# max_indexing_memory = "2 GiB"
|
||||
# Sets the maximum amount of RAM Meilisearch can use when indexing.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#max-indexing-memory
|
||||
@@ -39,6 +47,11 @@ log_level = "INFO"
|
||||
# Sets the maximum number of threads Meilisearch can use during indexing.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#max-indexing-threads
|
||||
|
||||
disable_auto_batching = false
|
||||
# Deactivates auto-batching when provided.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#disable-auto-batching
|
||||
|
||||
|
||||
#############
|
||||
### DUMPS ###
|
||||
#############
|
||||
@@ -60,20 +73,85 @@ ignore_dump_if_db_exists = false
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#ignore-dump-if-db-exists
|
||||
|
||||
|
||||
#####################
|
||||
### RATE LIMITING ###
|
||||
#####################
|
||||
|
||||
rate_limiting_disable_all = false
|
||||
# Prevents a Meilisearch instance from performing any rate limiting.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-disable-all
|
||||
|
||||
rate_limiting_disable_global = false
|
||||
# Prevents a Meilisearch instance from performing rate limiting global to all queries.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-disable-global
|
||||
|
||||
rate_limiting_global_pool = 100000
|
||||
# The maximum pool of search requests that can be performed before they are rejected.
|
||||
#
|
||||
# The pool starts full at the provided value, then each search request diminishes the pool by 1.
|
||||
# When the pool is empty the search request is rejected.
|
||||
# The pool is replenished by 1 depending on the cooldown period.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-global-pool
|
||||
|
||||
rate_limiting_global_cooldown_ns = 50000
|
||||
# The amount of time, in nanoseconds, before the pool of available search requests is replenished by 1 again.
|
||||
#
|
||||
# The maximum number of available search requests is given by `rate_limiting_global_pool`.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-global-cooldown-ns
|
||||
|
||||
rate_limiting_disable_ip = false
|
||||
# Prevents a Meilisearch instance from performing rate limiting per IP address.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-disable-ip
|
||||
|
||||
rate_limiting_ip_pool = 200
|
||||
# The maximum pool of search requests that can be performed from a specific IP before they are rejected.
|
||||
#
|
||||
# The pool starts full at the provided value, then each search request from the same IP address diminishes the pool by 1.
|
||||
# When the pool is empty the search request is rejected.
|
||||
# The pool is replenished by 1 depending on the cooldown period.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-ip-pool
|
||||
|
||||
rate_limiting_ip_cooldown_ns = 50000000
|
||||
# The amount of time, in nanoseconds, before the pool of available search requests for a specific IP address is replenished by 1 again.
|
||||
#
|
||||
# The maximum number of available search requests for a specific IP address is given by `rate_limiting_ip_pool`.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-ip-cooldown-ns
|
||||
|
||||
rate_limiting_disable_api_key = false
|
||||
# Prevents a Meilisearch instance from performing rate limiting per API key.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-disable-api-key
|
||||
|
||||
rate_limiting_api_key_pool = 10000
|
||||
# The maximum pool of search requests that can be performed using a specific API key before they are rejected.
|
||||
#
|
||||
# The pool starts full at the provided value, then each search request using the same API key diminishes the pool by 1.
|
||||
# When the pool is empty the search request is rejected.
|
||||
# The pool is replenished by 1 depending on the cooldown period.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-api-key-pool
|
||||
|
||||
rate_limiting_api_key_cooldown_ns = 500000
|
||||
# The amount of time, in nanoseconds, before the pool of available search requests using a specific API key is replenished by 1 again.
|
||||
#
|
||||
# The maximum number of available search requests using a specific API key is given by `rate_limiting_api_key_pool`.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-api-key-cooldown-ns
|
||||
|
||||
|
||||
#################
|
||||
### SNAPSHOTS ###
|
||||
#################
|
||||
|
||||
schedule_snapshot = false
|
||||
# Enables scheduled snapshots when true, disable when false (the default).
|
||||
# If the value is given as an integer, then enables the scheduled snapshot with the passed value as the interval
|
||||
# between each snapshot, in seconds.
|
||||
# Activates scheduled snapshots when provided.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#schedule-snapshot-creation
|
||||
|
||||
snapshot_dir = "snapshots/"
|
||||
# Sets the directory where Meilisearch will store snapshots.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#snapshot-destination
|
||||
|
||||
snapshot_interval_sec = 86400
|
||||
# Defines the interval between each snapshot. Value must be given in seconds.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#snapshot-interval
|
||||
|
||||
# import_snapshot = "./path/to/my/snapshot"
|
||||
# Launches Meilisearch after importing a previously-generated snapshot at the given filepath.
|
||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#import-snapshot
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "dump"
|
||||
version = "1.0.1"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -198,7 +198,7 @@ impl From<KindWithContent> for KindDump {
|
||||
#[cfg(test)]
|
||||
pub(crate) mod test {
|
||||
use std::fs::File;
|
||||
use std::io::Seek;
|
||||
use std::io::{Seek, SeekFrom};
|
||||
use std::str::FromStr;
|
||||
|
||||
use big_s::S;
|
||||
@@ -410,7 +410,7 @@ pub(crate) mod test {
|
||||
// create the dump
|
||||
let mut file = tempfile::tempfile().unwrap();
|
||||
dump.persist_to(&mut file).unwrap();
|
||||
file.rewind().unwrap();
|
||||
file.seek(SeekFrom::Start(0)).unwrap();
|
||||
|
||||
file
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: dump/src/reader/v2/mod.rs
|
||||
source: dump/src/reader/compat/v1_to_v2.rs
|
||||
expression: spells.settings().unwrap()
|
||||
---
|
||||
{
|
||||
@@ -10,13 +10,11 @@ expression: spells.settings().unwrap()
|
||||
"*"
|
||||
],
|
||||
"filterableAttributes": [],
|
||||
"sortableAttributes": [],
|
||||
"rankingRules": [
|
||||
"words",
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"sort",
|
||||
"exactness"
|
||||
],
|
||||
"stopWords": [],
|
||||
@@ -10,7 +10,6 @@ expression: products.settings().unwrap()
|
||||
"*"
|
||||
],
|
||||
"filterableAttributes": [],
|
||||
"sortableAttributes": [],
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
|
||||
@@ -13,17 +13,13 @@ expression: movies.settings().unwrap()
|
||||
"genres",
|
||||
"id"
|
||||
],
|
||||
"sortableAttributes": [
|
||||
"genres",
|
||||
"id"
|
||||
],
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"exactness",
|
||||
"release_date:asc"
|
||||
"asc(release_date)"
|
||||
],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
source: dump/src/reader/v2/mod.rs
|
||||
source: dump/src/reader/compat/v1_to_v2.rs
|
||||
expression: movies.settings().unwrap()
|
||||
---
|
||||
{
|
||||
@@ -13,16 +13,13 @@ expression: movies.settings().unwrap()
|
||||
"genres",
|
||||
"id"
|
||||
],
|
||||
"sortableAttributes": [
|
||||
"release_date"
|
||||
],
|
||||
"rankingRules": [
|
||||
"words",
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"exactness",
|
||||
"release_date:asc"
|
||||
"asc(release_date)"
|
||||
],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
@@ -10,7 +10,6 @@ expression: spells.settings().unwrap()
|
||||
"*"
|
||||
],
|
||||
"filterableAttributes": [],
|
||||
"sortableAttributes": [],
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::collections::BTreeSet;
|
||||
use std::str::FromStr;
|
||||
|
||||
use super::v2_to_v3::CompatV2ToV3;
|
||||
@@ -101,15 +102,14 @@ impl CompatIndexV1ToV2 {
|
||||
|
||||
impl From<v1::settings::Settings> for v2::Settings<v2::Unchecked> {
|
||||
fn from(source: v1::settings::Settings) -> Self {
|
||||
Self {
|
||||
displayed_attributes: option_to_setting(source.displayed_attributes)
|
||||
.map(|displayed| displayed.into_iter().collect()),
|
||||
searchable_attributes: option_to_setting(source.searchable_attributes),
|
||||
filterable_attributes: option_to_setting(source.attributes_for_faceting.clone())
|
||||
.map(|filterable| filterable.into_iter().collect()),
|
||||
sortable_attributes: option_to_setting(source.attributes_for_faceting)
|
||||
.map(|sortable| sortable.into_iter().collect()),
|
||||
ranking_rules: option_to_setting(source.ranking_rules).map(|ranking_rules| {
|
||||
let displayed_attributes = source
|
||||
.displayed_attributes
|
||||
.map(|opt| opt.map(|displayed_attributes| displayed_attributes.into_iter().collect()));
|
||||
let attributes_for_faceting = source.attributes_for_faceting.map(|opt| {
|
||||
opt.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect())
|
||||
});
|
||||
let ranking_rules = source.ranking_rules.map(|opt| {
|
||||
opt.map(|ranking_rules| {
|
||||
ranking_rules
|
||||
.into_iter()
|
||||
.filter_map(|ranking_rule| {
|
||||
@@ -119,33 +119,26 @@ impl From<v1::settings::Settings> for v2::Settings<v2::Unchecked> {
|
||||
ranking_rule.into();
|
||||
criterion.as_ref().map(ToString::to_string)
|
||||
}
|
||||
Err(()) => {
|
||||
log::warn!(
|
||||
"Could not import the following ranking rule: `{}`.",
|
||||
ranking_rule
|
||||
);
|
||||
None
|
||||
}
|
||||
Err(()) => Some(ranking_rule),
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}),
|
||||
stop_words: option_to_setting(source.stop_words),
|
||||
synonyms: option_to_setting(source.synonyms),
|
||||
distinct_attribute: option_to_setting(source.distinct_attribute),
|
||||
})
|
||||
});
|
||||
|
||||
Self {
|
||||
displayed_attributes,
|
||||
searchable_attributes: source.searchable_attributes,
|
||||
filterable_attributes: attributes_for_faceting,
|
||||
ranking_rules,
|
||||
stop_words: source.stop_words,
|
||||
synonyms: source.synonyms,
|
||||
distinct_attribute: source.distinct_attribute,
|
||||
_kind: std::marker::PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn option_to_setting<T>(opt: Option<Option<T>>) -> v2::Setting<T> {
|
||||
match opt {
|
||||
Some(Some(t)) => v2::Setting::Set(t),
|
||||
None => v2::Setting::NotSet,
|
||||
Some(None) => v2::Setting::Reset,
|
||||
}
|
||||
}
|
||||
|
||||
impl From<v1::update::UpdateStatus> for Option<v2::updates::UpdateStatus> {
|
||||
fn from(source: v1::update::UpdateStatus) -> Self {
|
||||
use v1::update::UpdateStatus as UpdateStatusV1;
|
||||
@@ -258,27 +251,38 @@ impl From<v1::update::UpdateType> for Option<v2::updates::UpdateMeta> {
|
||||
|
||||
impl From<v1::settings::SettingsUpdate> for v2::Settings<v2::Unchecked> {
|
||||
fn from(source: v1::settings::SettingsUpdate) -> Self {
|
||||
let ranking_rules = v2::Setting::from(source.ranking_rules);
|
||||
let displayed_attributes: Option<Option<BTreeSet<String>>> =
|
||||
source.displayed_attributes.into();
|
||||
|
||||
let attributes_for_faceting: Option<Option<Vec<String>>> =
|
||||
source.attributes_for_faceting.into();
|
||||
|
||||
let ranking_rules: Option<Option<Vec<v1::settings::RankingRule>>> =
|
||||
source.ranking_rules.into();
|
||||
|
||||
// go from the concrete types of v1 (RankingRule) to the concrete type of v2 (Criterion),
|
||||
// and then back to string as this is what the settings manipulate
|
||||
let ranking_rules = ranking_rules.map(|ranking_rules| {
|
||||
ranking_rules
|
||||
.into_iter()
|
||||
// filter out the WordsPosition ranking rule that exists in v1 but not v2
|
||||
.filter_map(Option::<v2::settings::Criterion>::from)
|
||||
.map(|criterion| criterion.to_string())
|
||||
.collect()
|
||||
let ranking_rules = ranking_rules.map(|opt| {
|
||||
opt.map(|ranking_rules| {
|
||||
ranking_rules
|
||||
.into_iter()
|
||||
// filter out the WordsPosition ranking rule that exists in v1 but not v2
|
||||
.filter_map(|ranking_rule| {
|
||||
Option::<v2::settings::Criterion>::from(ranking_rule)
|
||||
})
|
||||
.map(|criterion| criterion.to_string())
|
||||
.collect()
|
||||
})
|
||||
});
|
||||
|
||||
Self {
|
||||
displayed_attributes: v2::Setting::from(source.displayed_attributes)
|
||||
.map(|displayed_attributes| displayed_attributes.into_iter().collect()),
|
||||
displayed_attributes: displayed_attributes.map(|opt| {
|
||||
opt.map(|displayed_attributes| displayed_attributes.into_iter().collect())
|
||||
}),
|
||||
searchable_attributes: source.searchable_attributes.into(),
|
||||
filterable_attributes: v2::Setting::from(source.attributes_for_faceting.clone())
|
||||
.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect()),
|
||||
sortable_attributes: v2::Setting::from(source.attributes_for_faceting)
|
||||
.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect()),
|
||||
filterable_attributes: attributes_for_faceting.map(|opt| {
|
||||
opt.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect())
|
||||
}),
|
||||
ranking_rules,
|
||||
stop_words: source.stop_words.into(),
|
||||
synonyms: source.synonyms.into(),
|
||||
@@ -310,12 +314,12 @@ impl From<v1::settings::RankingRule> for Option<v2::settings::Criterion> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<v1::settings::UpdateState<T>> for v2::Setting<T> {
|
||||
impl<T> From<v1::settings::UpdateState<T>> for Option<Option<T>> {
|
||||
fn from(source: v1::settings::UpdateState<T>) -> Self {
|
||||
match source {
|
||||
v1::settings::UpdateState::Update(new_value) => v2::Setting::Set(new_value),
|
||||
v1::settings::UpdateState::Clear => v2::Setting::Reset,
|
||||
v1::settings::UpdateState::Nothing => v2::Setting::NotSet,
|
||||
v1::settings::UpdateState::Update(new_value) => Some(Some(new_value)),
|
||||
v1::settings::UpdateState::Clear => Some(None),
|
||||
v1::settings::UpdateState::Nothing => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -348,7 +352,7 @@ pub(crate) mod test {
|
||||
// tasks
|
||||
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"2298010973ee98cf4670787314176a3a");
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"ad6245d98d1a8e30535f3339a9a8d223");
|
||||
assert_eq!(update_files.len(), 9);
|
||||
assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dumps v1
|
||||
|
||||
|
||||
@@ -361,29 +361,28 @@ impl From<String> for v3::Code {
|
||||
}
|
||||
}
|
||||
|
||||
impl<A> From<v2::Setting<A>> for v3::Setting<A> {
|
||||
fn from(setting: v2::Setting<A>) -> Self {
|
||||
match setting {
|
||||
v2::settings::Setting::Set(a) => v3::settings::Setting::Set(a),
|
||||
v2::settings::Setting::Reset => v3::settings::Setting::Reset,
|
||||
v2::settings::Setting::NotSet => v3::settings::Setting::NotSet,
|
||||
}
|
||||
fn option_to_setting<T>(opt: Option<Option<T>>) -> v3::Setting<T> {
|
||||
match opt {
|
||||
Some(Some(t)) => v3::Setting::Set(t),
|
||||
None => v3::Setting::NotSet,
|
||||
Some(None) => v3::Setting::Reset,
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<v2::Settings<T>> for v3::Settings<v3::Unchecked> {
|
||||
fn from(settings: v2::Settings<T>) -> Self {
|
||||
v3::Settings {
|
||||
displayed_attributes: settings.displayed_attributes.into(),
|
||||
searchable_attributes: settings.searchable_attributes.into(),
|
||||
filterable_attributes: settings.filterable_attributes.into(),
|
||||
sortable_attributes: settings.sortable_attributes.into(),
|
||||
ranking_rules: v3::Setting::from(settings.ranking_rules).map(|criteria| {
|
||||
displayed_attributes: option_to_setting(settings.displayed_attributes),
|
||||
searchable_attributes: option_to_setting(settings.searchable_attributes),
|
||||
filterable_attributes: option_to_setting(settings.filterable_attributes)
|
||||
.map(|f| f.into_iter().collect()),
|
||||
sortable_attributes: v3::Setting::NotSet,
|
||||
ranking_rules: option_to_setting(settings.ranking_rules).map(|criteria| {
|
||||
criteria.into_iter().map(|criterion| patch_ranking_rules(&criterion)).collect()
|
||||
}),
|
||||
stop_words: settings.stop_words.into(),
|
||||
synonyms: settings.synonyms.into(),
|
||||
distinct_attribute: settings.distinct_attribute.into(),
|
||||
stop_words: option_to_setting(settings.stop_words),
|
||||
synonyms: option_to_setting(settings.synonyms),
|
||||
distinct_attribute: option_to_setting(settings.distinct_attribute),
|
||||
_kind: std::marker::PhantomData,
|
||||
}
|
||||
}
|
||||
@@ -395,7 +394,6 @@ fn patch_ranking_rules(ranking_rule: &str) -> String {
|
||||
Ok(v2::settings::Criterion::Typo) => String::from("typo"),
|
||||
Ok(v2::settings::Criterion::Proximity) => String::from("proximity"),
|
||||
Ok(v2::settings::Criterion::Attribute) => String::from("attribute"),
|
||||
Ok(v2::settings::Criterion::Sort) => String::from("sort"),
|
||||
Ok(v2::settings::Criterion::Exactness) => String::from("exactness"),
|
||||
Ok(v2::settings::Criterion::Asc(name)) => format!("{name}:asc"),
|
||||
Ok(v2::settings::Criterion::Desc(name)) => format!("{name}:desc"),
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use super::v4_to_v5::{CompatIndexV4ToV5, CompatV4ToV5};
|
||||
use crate::reader::{v5, v6, Document, UpdateFile};
|
||||
use crate::Result;
|
||||
@@ -256,50 +254,51 @@ impl<T> From<v5::Setting<T>> for v6::Setting<T> {
|
||||
impl From<v5::ResponseError> for v6::ResponseError {
|
||||
fn from(error: v5::ResponseError) -> Self {
|
||||
let code = match error.error_code.as_ref() {
|
||||
"index_creation_failed" => v6::Code::IndexCreationFailed,
|
||||
"index_creation_failed" => v6::Code::CreateIndex,
|
||||
"index_already_exists" => v6::Code::IndexAlreadyExists,
|
||||
"index_not_found" => v6::Code::IndexNotFound,
|
||||
"invalid_index_uid" => v6::Code::InvalidIndexUid,
|
||||
"invalid_min_word_length_for_typo" => v6::Code::InvalidSettingsTypoTolerance,
|
||||
"invalid_min_word_length_for_typo" => v6::Code::InvalidMinWordLengthForTypo,
|
||||
"invalid_state" => v6::Code::InvalidState,
|
||||
"primary_key_inference_failed" => v6::Code::IndexPrimaryKeyNoCandidateFound,
|
||||
"index_primary_key_already_exists" => v6::Code::IndexPrimaryKeyAlreadyExists,
|
||||
"primary_key_inference_failed" => v6::Code::MissingPrimaryKey,
|
||||
"index_primary_key_already_exists" => v6::Code::PrimaryKeyAlreadyPresent,
|
||||
"max_fields_limit_exceeded" => v6::Code::MaxFieldsLimitExceeded,
|
||||
"missing_document_id" => v6::Code::MissingDocumentId,
|
||||
"invalid_document_id" => v6::Code::InvalidDocumentId,
|
||||
"invalid_filter" => v6::Code::InvalidSettingsFilterableAttributes,
|
||||
"invalid_sort" => v6::Code::InvalidSettingsSortableAttributes,
|
||||
"invalid_filter" => v6::Code::Filter,
|
||||
"invalid_sort" => v6::Code::Sort,
|
||||
"bad_parameter" => v6::Code::BadParameter,
|
||||
"bad_request" => v6::Code::BadRequest,
|
||||
"database_size_limit_reached" => v6::Code::DatabaseSizeLimitReached,
|
||||
"document_not_found" => v6::Code::DocumentNotFound,
|
||||
"internal" => v6::Code::Internal,
|
||||
"invalid_geo_field" => v6::Code::InvalidDocumentGeoField,
|
||||
"invalid_ranking_rule" => v6::Code::InvalidSettingsRankingRules,
|
||||
"invalid_store_file" => v6::Code::InvalidStoreFile,
|
||||
"invalid_api_key" => v6::Code::InvalidApiKey,
|
||||
"invalid_geo_field" => v6::Code::InvalidGeoField,
|
||||
"invalid_ranking_rule" => v6::Code::InvalidRankingRule,
|
||||
"invalid_store_file" => v6::Code::InvalidStore,
|
||||
"invalid_api_key" => v6::Code::InvalidToken,
|
||||
"missing_authorization_header" => v6::Code::MissingAuthorizationHeader,
|
||||
"no_space_left_on_device" => v6::Code::NoSpaceLeftOnDevice,
|
||||
"dump_not_found" => v6::Code::DumpNotFound,
|
||||
"task_not_found" => v6::Code::TaskNotFound,
|
||||
"payload_too_large" => v6::Code::PayloadTooLarge,
|
||||
"unretrievable_document" => v6::Code::UnretrievableDocument,
|
||||
"unretrievable_document" => v6::Code::RetrieveDocument,
|
||||
"search_error" => v6::Code::SearchDocuments,
|
||||
"unsupported_media_type" => v6::Code::UnsupportedMediaType,
|
||||
"dump_already_processing" => v6::Code::DumpAlreadyProcessing,
|
||||
"dump_already_processing" => v6::Code::DumpAlreadyInProgress,
|
||||
"dump_process_failed" => v6::Code::DumpProcessFailed,
|
||||
"invalid_content_type" => v6::Code::InvalidContentType,
|
||||
"missing_content_type" => v6::Code::MissingContentType,
|
||||
"malformed_payload" => v6::Code::MalformedPayload,
|
||||
"missing_payload" => v6::Code::MissingPayload,
|
||||
"api_key_not_found" => v6::Code::ApiKeyNotFound,
|
||||
"missing_parameter" => v6::Code::BadRequest,
|
||||
"missing_parameter" => v6::Code::MissingParameter,
|
||||
"invalid_api_key_actions" => v6::Code::InvalidApiKeyActions,
|
||||
"invalid_api_key_indexes" => v6::Code::InvalidApiKeyIndexes,
|
||||
"invalid_api_key_expires_at" => v6::Code::InvalidApiKeyExpiresAt,
|
||||
"invalid_api_key_description" => v6::Code::InvalidApiKeyDescription,
|
||||
"invalid_api_key_name" => v6::Code::InvalidApiKeyName,
|
||||
"invalid_api_key_uid" => v6::Code::InvalidApiKeyUid,
|
||||
"immutable_field" => v6::Code::BadRequest,
|
||||
"immutable_field" => v6::Code::ImmutableField,
|
||||
"api_key_already_exists" => v6::Code::ApiKeyAlreadyExists,
|
||||
other => {
|
||||
log::warn!("Unknown error code {}", other);
|
||||
@@ -317,26 +316,7 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
||||
searchable_attributes: settings.searchable_attributes.into(),
|
||||
filterable_attributes: settings.filterable_attributes.into(),
|
||||
sortable_attributes: settings.sortable_attributes.into(),
|
||||
ranking_rules: {
|
||||
match settings.ranking_rules {
|
||||
v5::settings::Setting::Set(ranking_rules) => {
|
||||
let mut new_ranking_rules = vec![];
|
||||
for rule in ranking_rules {
|
||||
match v6::RankingRuleView::from_str(&rule) {
|
||||
Ok(new_rule) => {
|
||||
new_ranking_rules.push(new_rule);
|
||||
}
|
||||
Err(_) => {
|
||||
log::warn!("Error while importing settings. The ranking rule `{rule}` does not exist anymore.")
|
||||
}
|
||||
}
|
||||
}
|
||||
v6::Setting::Set(new_ranking_rules)
|
||||
}
|
||||
v5::settings::Setting::Reset => v6::Setting::Reset,
|
||||
v5::settings::Setting::NotSet => v6::Setting::NotSet,
|
||||
}
|
||||
},
|
||||
ranking_rules: settings.ranking_rules.into(),
|
||||
stop_words: settings.stop_words.into(),
|
||||
synonyms: settings.synonyms.into(),
|
||||
distinct_attribute: settings.distinct_attribute.into(),
|
||||
@@ -439,7 +419,7 @@ pub(crate) mod test {
|
||||
// tasks
|
||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"41f91d3a94911b2735ec41b07540df5c");
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"6519f7064c45d2196dd59b71350a9bf5");
|
||||
assert_eq!(update_files.len(), 22);
|
||||
assert!(update_files[0].is_none()); // the dump creation
|
||||
assert!(update_files[1].is_some()); // the enqueued document addition
|
||||
|
||||
@@ -201,7 +201,7 @@ pub(crate) mod test {
|
||||
// tasks
|
||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"41f91d3a94911b2735ec41b07540df5c");
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"6519f7064c45d2196dd59b71350a9bf5");
|
||||
assert_eq!(update_files.len(), 22);
|
||||
assert!(update_files[0].is_none()); // the dump creation
|
||||
assert!(update_files[1].is_some()); // the enqueued document addition
|
||||
@@ -222,12 +222,12 @@ pub(crate) mod test {
|
||||
assert!(indexes.is_empty());
|
||||
|
||||
// products
|
||||
insta::assert_json_snapshot!(products.metadata(), @r###"
|
||||
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||
{
|
||||
"uid": "products",
|
||||
"primaryKey": "sku",
|
||||
"createdAt": "2022-10-04T15:51:35.939396731Z",
|
||||
"updatedAt": "2022-10-04T15:55:01.897325373Z"
|
||||
"createdAt": "[now]",
|
||||
"updatedAt": "[now]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -237,12 +237,12 @@ pub(crate) mod test {
|
||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
||||
|
||||
// movies
|
||||
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
||||
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||
{
|
||||
"uid": "movies",
|
||||
"primaryKey": "id",
|
||||
"createdAt": "2022-10-04T15:51:35.291992167Z",
|
||||
"updatedAt": "2022-10-04T15:55:10.33561842Z"
|
||||
"createdAt": "[now]",
|
||||
"updatedAt": "[now]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -252,12 +252,12 @@ pub(crate) mod test {
|
||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
|
||||
|
||||
// spells
|
||||
insta::assert_json_snapshot!(spells.metadata(), @r###"
|
||||
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||
{
|
||||
"uid": "dnd_spells",
|
||||
"primaryKey": "index",
|
||||
"createdAt": "2022-10-04T15:51:37.381094632Z",
|
||||
"updatedAt": "2022-10-04T15:55:02.394503431Z"
|
||||
"createdAt": "[now]",
|
||||
"updatedAt": "[now]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -279,7 +279,7 @@ pub(crate) mod test {
|
||||
// tasks
|
||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"c2445ddd1785528b80f2ba534d3bd00c");
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"491e244a80a19fe2a900b809d310c24a");
|
||||
assert_eq!(update_files.len(), 10);
|
||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||
@@ -356,7 +356,7 @@ pub(crate) mod test {
|
||||
// tasks
|
||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"cd12efd308fe3ed226356a727ab42ed3");
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"7cacce2e21702be696b866808c726946");
|
||||
assert_eq!(update_files.len(), 10);
|
||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||
@@ -449,7 +449,7 @@ pub(crate) mod test {
|
||||
// tasks
|
||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"bc616290adfe7d09a624cf6065ca9069");
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"6cabec4e252b74c8f3a2c8517622e85f");
|
||||
assert_eq!(update_files.len(), 9);
|
||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||
@@ -530,82 +530,6 @@ pub(crate) mod test {
|
||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_dump_v2_from_meilisearch_v0_22_0_issue_3435() {
|
||||
let dump = File::open("tests/assets/v2-v0.22.0.dump").unwrap();
|
||||
let mut dump = DumpReader::open(dump).unwrap();
|
||||
|
||||
// top level infos
|
||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00");
|
||||
assert_eq!(dump.instance_uid().unwrap(), None);
|
||||
|
||||
// tasks
|
||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"2db37756d8af1fb7623436b76e8956a6");
|
||||
assert_eq!(update_files.len(), 8);
|
||||
assert!(update_files[0..].iter().all(|u| u.is_none())); // everything already processed
|
||||
|
||||
// keys
|
||||
let keys = dump.keys().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(keys), @"d751713988987e9331980363e24189ce");
|
||||
|
||||
// indexes
|
||||
let mut indexes = dump.indexes().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
// the index are not ordered in any way by default
|
||||
indexes.sort_by_key(|index| index.metadata().uid.to_string());
|
||||
|
||||
let mut products = indexes.pop().unwrap();
|
||||
let mut movies = indexes.pop().unwrap();
|
||||
let mut spells = indexes.pop().unwrap();
|
||||
assert!(indexes.is_empty());
|
||||
|
||||
// products
|
||||
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||
{
|
||||
"uid": "products",
|
||||
"primaryKey": "sku",
|
||||
"createdAt": "[now]",
|
||||
"updatedAt": "[now]"
|
||||
}
|
||||
"###);
|
||||
|
||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
assert_eq!(documents.len(), 10);
|
||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||
|
||||
// movies
|
||||
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||
{
|
||||
"uid": "movies",
|
||||
"primaryKey": "id",
|
||||
"createdAt": "[now]",
|
||||
"updatedAt": "[now]"
|
||||
}
|
||||
"###);
|
||||
|
||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
assert_eq!(documents.len(), 10);
|
||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720");
|
||||
|
||||
// spells
|
||||
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||
{
|
||||
"uid": "dnd_spells",
|
||||
"primaryKey": "index",
|
||||
"createdAt": "[now]",
|
||||
"updatedAt": "[now]"
|
||||
}
|
||||
"###);
|
||||
|
||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
assert_eq!(documents.len(), 10);
|
||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn import_dump_v1() {
|
||||
let dump = File::open("tests/assets/v1.dump").unwrap();
|
||||
@@ -618,7 +542,7 @@ pub(crate) mod test {
|
||||
// tasks
|
||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"8df6eab075a44b3c1af6b726f9fd9a43");
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"b3e3652bfc10a76670be157d2507d761");
|
||||
assert_eq!(update_files.len(), 9);
|
||||
assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dump v1
|
||||
|
||||
|
||||
@@ -13,12 +13,9 @@ expression: movies.settings().unwrap()
|
||||
"genres",
|
||||
"id"
|
||||
],
|
||||
"sortableAttributes": [
|
||||
"release_date"
|
||||
],
|
||||
"rankingRules": [
|
||||
"words",
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"exactness",
|
||||
@@ -10,7 +10,6 @@ expression: spells.settings().unwrap()
|
||||
"*"
|
||||
],
|
||||
"filterableAttributes": [],
|
||||
"sortableAttributes": [],
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
|
||||
@@ -10,13 +10,11 @@ expression: spells.settings().unwrap()
|
||||
"*"
|
||||
],
|
||||
"filterableAttributes": [],
|
||||
"sortableAttributes": [],
|
||||
"rankingRules": [
|
||||
"words",
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"sort",
|
||||
"exactness"
|
||||
],
|
||||
"stopWords": [],
|
||||
@@ -10,7 +10,6 @@ expression: products.settings().unwrap()
|
||||
"*"
|
||||
],
|
||||
"filterableAttributes": [],
|
||||
"sortableAttributes": [],
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
|
||||
@@ -10,13 +10,11 @@ expression: products.settings().unwrap()
|
||||
"*"
|
||||
],
|
||||
"filterableAttributes": [],
|
||||
"sortableAttributes": [],
|
||||
"rankingRules": [
|
||||
"words",
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"sort",
|
||||
"exactness"
|
||||
],
|
||||
"stopWords": [],
|
||||
@@ -13,10 +13,6 @@ expression: movies.settings().unwrap()
|
||||
"genres",
|
||||
"id"
|
||||
],
|
||||
"sortableAttributes": [
|
||||
"genres",
|
||||
"id"
|
||||
],
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
---
|
||||
source: dump/src/reader/v1/mod.rs
|
||||
expression: dnd_spells.settings().unwrap()
|
||||
---
|
||||
{
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"*"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"*"
|
||||
],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"attributesForFaceting": []
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
---
|
||||
source: dump/src/reader/v1/mod.rs
|
||||
expression: movies.settings().unwrap()
|
||||
---
|
||||
{
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"asc(release_date)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"*"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"*"
|
||||
],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"attributesForFaceting": [
|
||||
"id",
|
||||
"genres"
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
---
|
||||
source: dump/src/reader/v1/mod.rs
|
||||
expression: movies.settings().unwrap()
|
||||
---
|
||||
{
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness",
|
||||
"asc(release_date)"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"*"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"*"
|
||||
],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"attributesForFaceting": [
|
||||
"id",
|
||||
"genres"
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
---
|
||||
source: dump/src/reader/v1/mod.rs
|
||||
expression: dnd_spells.settings().unwrap()
|
||||
---
|
||||
{
|
||||
"rankingRules": [
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness"
|
||||
],
|
||||
"distinctAttribute": null,
|
||||
"searchableAttributes": [
|
||||
"*"
|
||||
],
|
||||
"displayedAttributes": [
|
||||
"*"
|
||||
],
|
||||
"stopWords": [],
|
||||
"synonyms": {},
|
||||
"attributesForFaceting": []
|
||||
}
|
||||
@@ -41,7 +41,6 @@ use super::Document;
|
||||
use crate::{IndexMetadata, Result, Version};
|
||||
|
||||
pub type Settings<T> = settings::Settings<T>;
|
||||
pub type Setting<T> = settings::Setting<T>;
|
||||
pub type Checked = settings::Checked;
|
||||
pub type Unchecked = settings::Unchecked;
|
||||
|
||||
@@ -307,81 +306,4 @@ pub(crate) mod test {
|
||||
assert_eq!(documents.len(), 10);
|
||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn read_dump_v2_from_meilisearch_v0_22_0_issue_3435() {
|
||||
let dump = File::open("tests/assets/v2-v0.22.0.dump").unwrap();
|
||||
let dir = TempDir::new().unwrap();
|
||||
let mut dump = BufReader::new(dump);
|
||||
let gz = GzDecoder::new(&mut dump);
|
||||
let mut archive = tar::Archive::new(gz);
|
||||
archive.unpack(dir.path()).unwrap();
|
||||
|
||||
let mut dump = V2Reader::open(dir).unwrap();
|
||||
|
||||
// top level infos
|
||||
insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00");
|
||||
|
||||
// tasks
|
||||
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"aca8ba13046272664eb3ea2da3031633");
|
||||
assert_eq!(update_files.len(), 8);
|
||||
assert!(update_files[0..].iter().all(|u| u.is_none())); // everything has already been processed
|
||||
|
||||
// indexes
|
||||
let mut indexes = dump.indexes().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
// the index are not ordered in any way by default
|
||||
indexes.sort_by_key(|index| index.metadata().uid.to_string());
|
||||
|
||||
let mut products = indexes.pop().unwrap();
|
||||
let mut movies = indexes.pop().unwrap();
|
||||
let mut spells = indexes.pop().unwrap();
|
||||
assert!(indexes.is_empty());
|
||||
|
||||
// products
|
||||
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||
{
|
||||
"uid": "products",
|
||||
"primaryKey": "sku",
|
||||
"createdAt": "[now]",
|
||||
"updatedAt": "[now]"
|
||||
}
|
||||
"###);
|
||||
|
||||
insta::assert_json_snapshot!(products.settings().unwrap());
|
||||
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
assert_eq!(documents.len(), 10);
|
||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||
|
||||
// movies
|
||||
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||
{
|
||||
"uid": "movies",
|
||||
"primaryKey": "id",
|
||||
"createdAt": "[now]",
|
||||
"updatedAt": "[now]"
|
||||
}
|
||||
"###);
|
||||
|
||||
insta::assert_json_snapshot!(movies.settings().unwrap());
|
||||
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
assert_eq!(documents.len(), 10);
|
||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720");
|
||||
|
||||
// spells
|
||||
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||
{
|
||||
"uid": "dnd_spells",
|
||||
"primaryKey": "index",
|
||||
"createdAt": "[now]",
|
||||
"updatedAt": "[now]"
|
||||
}
|
||||
"###);
|
||||
|
||||
insta::assert_json_snapshot!(spells.settings().unwrap());
|
||||
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||
assert_eq!(documents.len(), 10);
|
||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,33 +1,35 @@
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::fmt;
|
||||
use std::fmt::Display;
|
||||
use std::marker::PhantomData;
|
||||
use std::str::FromStr;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Deserializer};
|
||||
|
||||
#[cfg(test)]
|
||||
fn serialize_with_wildcard<S>(
|
||||
field: &Setting<Vec<String>>,
|
||||
field: &Option<Option<Vec<String>>>,
|
||||
s: S,
|
||||
) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
use serde::Serialize;
|
||||
|
||||
let wildcard = vec!["*".to_string()];
|
||||
match field {
|
||||
Setting::Set(value) => Some(value),
|
||||
Setting::Reset => Some(&wildcard),
|
||||
Setting::NotSet => None,
|
||||
}
|
||||
.serialize(s)
|
||||
s.serialize_some(&field.as_ref().map(|o| o.as_ref().unwrap_or(&wildcard)))
|
||||
}
|
||||
|
||||
fn deserialize_some<'de, T, D>(deserializer: D) -> std::result::Result<Option<T>, D::Error>
|
||||
where
|
||||
T: Deserialize<'de>,
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
Deserialize::deserialize(deserializer).map(Some)
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Debug)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
pub struct Checked;
|
||||
|
||||
#[derive(Clone, Default, Debug, Deserialize)]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
pub struct Unchecked;
|
||||
@@ -40,54 +42,75 @@ pub struct Unchecked;
|
||||
pub struct Settings<T> {
|
||||
#[serde(
|
||||
default,
|
||||
deserialize_with = "deserialize_some",
|
||||
serialize_with = "serialize_with_wildcard",
|
||||
skip_serializing_if = "Setting::is_not_set"
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub displayed_attributes: Setting<Vec<String>>,
|
||||
pub displayed_attributes: Option<Option<Vec<String>>>,
|
||||
|
||||
#[serde(
|
||||
default,
|
||||
deserialize_with = "deserialize_some",
|
||||
serialize_with = "serialize_with_wildcard",
|
||||
skip_serializing_if = "Setting::is_not_set"
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub searchable_attributes: Setting<Vec<String>>,
|
||||
pub searchable_attributes: Option<Option<Vec<String>>>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
pub filterable_attributes: Setting<BTreeSet<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
pub sortable_attributes: Setting<BTreeSet<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
pub ranking_rules: Setting<Vec<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
pub stop_words: Setting<BTreeSet<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
pub distinct_attribute: Setting<String>,
|
||||
#[serde(
|
||||
default,
|
||||
deserialize_with = "deserialize_some",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub filterable_attributes: Option<Option<BTreeSet<String>>>,
|
||||
|
||||
#[serde(
|
||||
default,
|
||||
deserialize_with = "deserialize_some",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub ranking_rules: Option<Option<Vec<String>>>,
|
||||
#[serde(
|
||||
default,
|
||||
deserialize_with = "deserialize_some",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub stop_words: Option<Option<BTreeSet<String>>>,
|
||||
#[serde(
|
||||
default,
|
||||
deserialize_with = "deserialize_some",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub synonyms: Option<Option<BTreeMap<String, Vec<String>>>>,
|
||||
#[serde(
|
||||
default,
|
||||
deserialize_with = "deserialize_some",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub distinct_attribute: Option<Option<String>>,
|
||||
|
||||
#[serde(skip)]
|
||||
pub _kind: PhantomData<T>,
|
||||
}
|
||||
|
||||
impl Settings<Unchecked> {
|
||||
pub fn check(self) -> Settings<Checked> {
|
||||
let displayed_attributes = match self.displayed_attributes {
|
||||
Setting::Set(fields) => {
|
||||
pub fn check(mut self) -> Settings<Checked> {
|
||||
let displayed_attributes = match self.displayed_attributes.take() {
|
||||
Some(Some(fields)) => {
|
||||
if fields.iter().any(|f| f == "*") {
|
||||
Setting::Reset
|
||||
Some(None)
|
||||
} else {
|
||||
Setting::Set(fields)
|
||||
Some(Some(fields))
|
||||
}
|
||||
}
|
||||
otherwise => otherwise,
|
||||
};
|
||||
|
||||
let searchable_attributes = match self.searchable_attributes {
|
||||
Setting::Set(fields) => {
|
||||
let searchable_attributes = match self.searchable_attributes.take() {
|
||||
Some(Some(fields)) => {
|
||||
if fields.iter().any(|f| f == "*") {
|
||||
Setting::Reset
|
||||
Some(None)
|
||||
} else {
|
||||
Setting::Set(fields)
|
||||
Some(Some(fields))
|
||||
}
|
||||
}
|
||||
otherwise => otherwise,
|
||||
@@ -97,7 +120,6 @@ impl Settings<Unchecked> {
|
||||
displayed_attributes,
|
||||
searchable_attributes,
|
||||
filterable_attributes: self.filterable_attributes,
|
||||
sortable_attributes: self.sortable_attributes,
|
||||
ranking_rules: self.ranking_rules,
|
||||
stop_words: self.stop_words,
|
||||
synonyms: self.synonyms,
|
||||
@@ -107,61 +129,10 @@ impl Settings<Unchecked> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum Setting<T> {
|
||||
Set(T),
|
||||
Reset,
|
||||
NotSet,
|
||||
}
|
||||
static ASC_DESC_REGEX: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r#"(asc|desc)\(([\w_-]+)\)"#).unwrap());
|
||||
|
||||
impl<T> Default for Setting<T> {
|
||||
fn default() -> Self {
|
||||
Self::NotSet
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Setting<T> {
|
||||
pub const fn is_not_set(&self) -> bool {
|
||||
matches!(self, Self::NotSet)
|
||||
}
|
||||
|
||||
pub fn map<A>(self, f: fn(T) -> A) -> Setting<A> {
|
||||
match self {
|
||||
Setting::Set(a) => Setting::Set(f(a)),
|
||||
Setting::Reset => Setting::Reset,
|
||||
Setting::NotSet => Setting::NotSet,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl<T: serde::Serialize> serde::Serialize for Setting<T> {
|
||||
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
match self {
|
||||
Self::Set(value) => Some(value),
|
||||
// Usually not_set isn't serialized by setting skip_serializing_if field attribute
|
||||
Self::NotSet | Self::Reset => None,
|
||||
}
|
||||
.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, T: Deserialize<'de>> Deserialize<'de> for Setting<T> {
|
||||
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
Deserialize::deserialize(deserializer).map(|x| match x {
|
||||
Some(x) => Self::Set(x),
|
||||
None => Self::Reset, // Reset is forced by sending null value
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
|
||||
pub enum Criterion {
|
||||
/// Sorted by decreasing number of matched query terms.
|
||||
/// Query words at the front of an attribute is considered better than if it was at the back.
|
||||
@@ -171,11 +142,8 @@ pub enum Criterion {
|
||||
/// Sorted by increasing distance between matched query terms.
|
||||
Proximity,
|
||||
/// Documents with quey words contained in more important
|
||||
/// attributes are considered better.
|
||||
/// attributes are considred better.
|
||||
Attribute,
|
||||
/// Dynamically sort at query time the documents. None, one or multiple Asc/Desc sortable
|
||||
/// attributes can be used in place of this criterion at query time.
|
||||
Sort,
|
||||
/// Sorted by the similarity of the matched words with the query words.
|
||||
Exactness,
|
||||
/// Sorted by the increasing value of the field specified.
|
||||
@@ -184,86 +152,40 @@ pub enum Criterion {
|
||||
Desc(String),
|
||||
}
|
||||
|
||||
impl Criterion {
|
||||
/// Returns the field name parameter of this criterion.
|
||||
pub fn field_name(&self) -> Option<&str> {
|
||||
match self {
|
||||
Criterion::Asc(name) | Criterion::Desc(name) => Some(name),
|
||||
_otherwise => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Criterion {
|
||||
// since we're not going to show the custom error message we can override the
|
||||
// error type.
|
||||
type Err = ();
|
||||
|
||||
fn from_str(text: &str) -> Result<Criterion, Self::Err> {
|
||||
match text {
|
||||
fn from_str(txt: &str) -> Result<Criterion, Self::Err> {
|
||||
match txt {
|
||||
"words" => Ok(Criterion::Words),
|
||||
"typo" => Ok(Criterion::Typo),
|
||||
"proximity" => Ok(Criterion::Proximity),
|
||||
"attribute" => Ok(Criterion::Attribute),
|
||||
"sort" => Ok(Criterion::Sort),
|
||||
"exactness" => Ok(Criterion::Exactness),
|
||||
text => match AscDesc::from_str(text) {
|
||||
Ok(AscDesc::Asc(field)) => Ok(Criterion::Asc(field)),
|
||||
Ok(AscDesc::Desc(field)) => Ok(Criterion::Desc(field)),
|
||||
Err(_) => Err(()),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
|
||||
pub enum AscDesc {
|
||||
Asc(String),
|
||||
Desc(String),
|
||||
}
|
||||
|
||||
impl FromStr for AscDesc {
|
||||
type Err = ();
|
||||
|
||||
// since we don't know if this comes from the old or new syntax we need to check
|
||||
// for both syntax.
|
||||
// WARN: this code doesn't come from the original meilisearch v0.22.0 but was
|
||||
// written specifically to be able to import the dump of meilisearch v0.21.0 AND
|
||||
// meilisearch v0.22.0.
|
||||
fn from_str(text: &str) -> Result<AscDesc, Self::Err> {
|
||||
if let Some((field_name, asc_desc)) = text.rsplit_once(':') {
|
||||
match asc_desc {
|
||||
"asc" => Ok(AscDesc::Asc(field_name.to_string())),
|
||||
"desc" => Ok(AscDesc::Desc(field_name.to_string())),
|
||||
_ => Err(()),
|
||||
text => {
|
||||
let caps = ASC_DESC_REGEX.captures(text).ok_or(())?;
|
||||
let order = caps.get(1).unwrap().as_str();
|
||||
let field_name = caps.get(2).unwrap().as_str();
|
||||
match order {
|
||||
"asc" => Ok(Criterion::Asc(field_name.to_string())),
|
||||
"desc" => Ok(Criterion::Desc(field_name.to_string())),
|
||||
_text => Err(()),
|
||||
}
|
||||
}
|
||||
} else if text.starts_with("asc(") && text.ends_with(')') {
|
||||
Ok(AscDesc::Asc(
|
||||
text.strip_prefix("asc(").unwrap().strip_suffix(')').unwrap().to_string(),
|
||||
))
|
||||
} else if text.starts_with("desc(") && text.ends_with(')') {
|
||||
Ok(AscDesc::Desc(
|
||||
text.strip_prefix("desc(").unwrap().strip_suffix(')').unwrap().to_string(),
|
||||
))
|
||||
} else {
|
||||
Err(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Criterion {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
use Criterion::*;
|
||||
|
||||
impl Display for Criterion {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Words => f.write_str("words"),
|
||||
Typo => f.write_str("typo"),
|
||||
Proximity => f.write_str("proximity"),
|
||||
Attribute => f.write_str("attribute"),
|
||||
Sort => f.write_str("sort"),
|
||||
Exactness => f.write_str("exactness"),
|
||||
Asc(attr) => write!(f, "{}:asc", attr),
|
||||
Desc(attr) => write!(f, "{}:desc", attr),
|
||||
Criterion::Words => write!(f, "words"),
|
||||
Criterion::Typo => write!(f, "typo"),
|
||||
Criterion::Proximity => write!(f, "proximity"),
|
||||
Criterion::Attribute => write!(f, "attribute"),
|
||||
Criterion::Exactness => write!(f, "exactness"),
|
||||
Criterion::Asc(field_name) => write!(f, "asc({})", field_name),
|
||||
Criterion::Desc(field_name) => write!(f, "desc({})", field_name),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
---
|
||||
source: dump/src/reader/v2/mod.rs
|
||||
expression: products.settings().unwrap()
|
||||
---
|
||||
{
|
||||
"displayedAttributes": [
|
||||
"*"
|
||||
],
|
||||
"searchableAttributes": [
|
||||
"*"
|
||||
],
|
||||
"filterableAttributes": [],
|
||||
"sortableAttributes": [],
|
||||
"rankingRules": [
|
||||
"words",
|
||||
"typo",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"sort",
|
||||
"exactness"
|
||||
],
|
||||
"stopWords": [],
|
||||
"synonyms": {
|
||||
"android": [
|
||||
"phone",
|
||||
"smartphone"
|
||||
],
|
||||
"iphone": [
|
||||
"phone",
|
||||
"smartphone"
|
||||
],
|
||||
"phone": [
|
||||
"android",
|
||||
"iphone",
|
||||
"smartphone"
|
||||
]
|
||||
},
|
||||
"distinctAttribute": null
|
||||
}
|
||||
@@ -5,8 +5,10 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
||||
pub struct ResponseError {
|
||||
#[serde(skip)]
|
||||
#[cfg_attr(feature = "test-traits", proptest(strategy = "strategy::status_code_strategy()"))]
|
||||
pub code: StatusCode,
|
||||
pub message: String,
|
||||
#[serde(rename = "code")]
|
||||
|
||||
@@ -5,6 +5,7 @@ use serde::Deserialize;
|
||||
|
||||
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
||||
#[cfg_attr(test, derive(serde::Serialize))]
|
||||
pub struct ResponseError {
|
||||
#[serde(skip)]
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
//!
|
||||
|
||||
use std::fs::{self, File};
|
||||
use std::io::{BufRead, BufReader, ErrorKind, Seek};
|
||||
use std::io::{BufRead, BufReader, ErrorKind, Seek, SeekFrom};
|
||||
use std::path::Path;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -141,10 +141,6 @@ impl V5Reader {
|
||||
V5IndexReader::new(
|
||||
index.uid.clone(),
|
||||
&self.dump.path().join("indexes").join(index.index_meta.uuid.to_string()),
|
||||
&index.index_meta,
|
||||
BufReader::new(
|
||||
File::open(self.dump.path().join("updates").join("data.jsonl")).unwrap(),
|
||||
),
|
||||
)
|
||||
}))
|
||||
}
|
||||
@@ -178,7 +174,7 @@ impl V5Reader {
|
||||
}
|
||||
|
||||
pub fn keys(&mut self) -> Result<Box<dyn Iterator<Item = Result<Key>> + '_>> {
|
||||
self.keys.rewind()?;
|
||||
self.keys.seek(SeekFrom::Start(0))?;
|
||||
Ok(Box::new(
|
||||
(&mut self.keys).lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
|
||||
))
|
||||
@@ -193,39 +189,16 @@ pub struct V5IndexReader {
|
||||
}
|
||||
|
||||
impl V5IndexReader {
|
||||
pub fn new(
|
||||
name: String,
|
||||
path: &Path,
|
||||
index_metadata: &meta::IndexMeta,
|
||||
tasks: BufReader<File>,
|
||||
) -> Result<Self> {
|
||||
pub fn new(name: String, path: &Path) -> Result<Self> {
|
||||
let meta = File::open(path.join("meta.json"))?;
|
||||
let meta: meta::DumpMeta = serde_json::from_reader(meta)?;
|
||||
|
||||
let mut created_at = None;
|
||||
let mut updated_at = None;
|
||||
|
||||
for line in tasks.lines() {
|
||||
let task: Task = serde_json::from_str(&line?)?;
|
||||
|
||||
if *task.index_uid().unwrap_or_default().to_string() == name {
|
||||
if updated_at.is_none() {
|
||||
updated_at = task.processed_at()
|
||||
}
|
||||
|
||||
if task.id as usize == index_metadata.creation_task_id {
|
||||
created_at = task.created_at();
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let metadata = IndexMetadata {
|
||||
uid: name,
|
||||
primary_key: meta.primary_key,
|
||||
created_at: created_at.unwrap_or_else(OffsetDateTime::now_utc),
|
||||
updated_at: updated_at.unwrap_or_else(OffsetDateTime::now_utc),
|
||||
// FIXME: Iterate over the whole task queue to find the creation and last update date.
|
||||
created_at: OffsetDateTime::now_utc(),
|
||||
updated_at: OffsetDateTime::now_utc(),
|
||||
};
|
||||
|
||||
let ret = V5IndexReader {
|
||||
@@ -329,12 +302,12 @@ pub(crate) mod test {
|
||||
assert!(indexes.is_empty());
|
||||
|
||||
// products
|
||||
insta::assert_json_snapshot!(products.metadata(), @r###"
|
||||
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||
{
|
||||
"uid": "products",
|
||||
"primaryKey": "sku",
|
||||
"createdAt": "2022-10-04T15:51:35.939396731Z",
|
||||
"updatedAt": "2022-10-04T15:55:01.897325373Z"
|
||||
"createdAt": "[now]",
|
||||
"updatedAt": "[now]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -344,12 +317,12 @@ pub(crate) mod test {
|
||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
|
||||
|
||||
// movies
|
||||
insta::assert_json_snapshot!(movies.metadata(), @r###"
|
||||
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||
{
|
||||
"uid": "movies",
|
||||
"primaryKey": "id",
|
||||
"createdAt": "2022-10-04T15:51:35.291992167Z",
|
||||
"updatedAt": "2022-10-04T15:55:10.33561842Z"
|
||||
"createdAt": "[now]",
|
||||
"updatedAt": "[now]"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -359,12 +332,12 @@ pub(crate) mod test {
|
||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
|
||||
|
||||
// spells
|
||||
insta::assert_json_snapshot!(spells.metadata(), @r###"
|
||||
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||
{
|
||||
"uid": "dnd_spells",
|
||||
"primaryKey": "index",
|
||||
"createdAt": "2022-10-04T15:51:37.381094632Z",
|
||||
"updatedAt": "2022-10-04T15:55:02.394503431Z"
|
||||
"createdAt": "[now]",
|
||||
"updatedAt": "[now]"
|
||||
}
|
||||
"###);
|
||||
|
||||
|
||||
@@ -140,45 +140,6 @@ impl Task {
|
||||
TaskContent::Dump { .. } => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn processed_at(&self) -> Option<OffsetDateTime> {
|
||||
match self.events.last() {
|
||||
Some(TaskEvent::Succeeded { result: _, timestamp }) => Some(*timestamp),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn created_at(&self) -> Option<OffsetDateTime> {
|
||||
match &self.content {
|
||||
TaskContent::IndexCreation { index_uid: _, primary_key: _ } => {
|
||||
match self.events.first() {
|
||||
Some(TaskEvent::Created(ts)) => Some(*ts),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
TaskContent::DocumentAddition {
|
||||
index_uid: _,
|
||||
content_uuid: _,
|
||||
merge_strategy: _,
|
||||
primary_key: _,
|
||||
documents_count: _,
|
||||
allow_index_creation: _,
|
||||
} => match self.events.first() {
|
||||
Some(TaskEvent::Created(ts)) => Some(*ts),
|
||||
_ => None,
|
||||
},
|
||||
TaskContent::SettingsUpdate {
|
||||
index_uid: _,
|
||||
settings: _,
|
||||
is_deletion: _,
|
||||
allow_index_creation: _,
|
||||
} => match self.events.first() {
|
||||
Some(TaskEvent::Created(ts)) => Some(*ts),
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IndexUid {
|
||||
|
||||
@@ -40,7 +40,6 @@ pub type IndexUid = meilisearch_types::index_uid::IndexUid;
|
||||
// everything related to the errors
|
||||
pub type ResponseError = meilisearch_types::error::ResponseError;
|
||||
pub type Code = meilisearch_types::error::Code;
|
||||
pub type RankingRuleView = meilisearch_types::settings::RankingRuleView;
|
||||
|
||||
pub struct V6Reader {
|
||||
dump: TempDir,
|
||||
|
||||
Binary file not shown.
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "file-store"
|
||||
version = "1.0.1"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::collections::BTreeSet;
|
||||
use std::fs::File as StdFile;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::path::{Path, PathBuf};
|
||||
@@ -10,14 +11,10 @@ const UPDATE_FILES_PATH: &str = "updates/updates_files";
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Error {
|
||||
#[error("Could not parse file name as utf-8")]
|
||||
CouldNotParseFileNameAsUtf8,
|
||||
#[error(transparent)]
|
||||
IoError(#[from] std::io::Error),
|
||||
#[error(transparent)]
|
||||
PersistError(#[from] tempfile::PersistError),
|
||||
#[error(transparent)]
|
||||
UuidError(#[from] uuid::Error),
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
@@ -36,11 +33,13 @@ impl DerefMut for File {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(test, faux::create)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct FileStore {
|
||||
path: PathBuf,
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
impl FileStore {
|
||||
pub fn new(path: impl AsRef<Path>) -> Result<FileStore> {
|
||||
let path = path.as_ref().to_path_buf();
|
||||
@@ -49,6 +48,7 @@ impl FileStore {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg_attr(test, faux::methods)]
|
||||
impl FileStore {
|
||||
/// Creates a new temporary update file.
|
||||
/// A call to `persist` is needed to persist the file in the database.
|
||||
@@ -94,17 +94,7 @@ impl FileStore {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Compute the size of all the updates contained in the file store.
|
||||
pub fn compute_total_size(&self) -> Result<u64> {
|
||||
let mut total = 0;
|
||||
for uuid in self.all_uuids()? {
|
||||
total += self.compute_size(uuid?).unwrap_or_default();
|
||||
}
|
||||
Ok(total)
|
||||
}
|
||||
|
||||
/// Compute the size of one update
|
||||
pub fn compute_size(&self, uuid: Uuid) -> Result<u64> {
|
||||
pub fn get_size(&self, uuid: Uuid) -> Result<u64> {
|
||||
Ok(self.get_update(uuid)?.metadata()?.len())
|
||||
}
|
||||
|
||||
@@ -115,12 +105,17 @@ impl FileStore {
|
||||
}
|
||||
|
||||
/// List the Uuids of the files in the FileStore
|
||||
pub fn all_uuids(&self) -> Result<impl Iterator<Item = Result<Uuid>>> {
|
||||
Ok(self.path.read_dir()?.map(|entry| {
|
||||
Ok(Uuid::from_str(
|
||||
entry?.file_name().to_str().ok_or(Error::CouldNotParseFileNameAsUtf8)?,
|
||||
)?)
|
||||
}))
|
||||
///
|
||||
/// This function is meant to be used by tests only.
|
||||
#[doc(hidden)]
|
||||
pub fn __all_uuids(&self) -> BTreeSet<Uuid> {
|
||||
let mut uuids = BTreeSet::new();
|
||||
for entry in self.path.read_dir().unwrap() {
|
||||
let entry = entry.unwrap();
|
||||
let uuid = Uuid::from_str(entry.file_name().to_str().unwrap()).unwrap();
|
||||
uuids.insert(uuid);
|
||||
}
|
||||
uuids
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "index-scheduler"
|
||||
version = "1.0.1"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -19,16 +19,10 @@ use crate::KindWithContent;
|
||||
///
|
||||
/// Only the non-prioritised tasks that can be grouped in a batch have a corresponding [`AutobatchKind`]
|
||||
enum AutobatchKind {
|
||||
DocumentImport {
|
||||
method: IndexDocumentsMethod,
|
||||
allow_index_creation: bool,
|
||||
primary_key: Option<String>,
|
||||
},
|
||||
DocumentImport { method: IndexDocumentsMethod, allow_index_creation: bool },
|
||||
DocumentDeletion,
|
||||
DocumentClear,
|
||||
Settings {
|
||||
allow_index_creation: bool,
|
||||
},
|
||||
Settings { allow_index_creation: bool },
|
||||
IndexCreation,
|
||||
IndexDeletion,
|
||||
IndexUpdate,
|
||||
@@ -44,24 +38,14 @@ impl AutobatchKind {
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn primary_key(&self) -> Option<Option<&str>> {
|
||||
match self {
|
||||
AutobatchKind::DocumentImport { primary_key, .. } => Some(primary_key.as_deref()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<KindWithContent> for AutobatchKind {
|
||||
fn from(kind: KindWithContent) -> Self {
|
||||
match kind {
|
||||
KindWithContent::DocumentAdditionOrUpdate {
|
||||
method,
|
||||
allow_index_creation,
|
||||
primary_key,
|
||||
..
|
||||
} => AutobatchKind::DocumentImport { method, allow_index_creation, primary_key },
|
||||
KindWithContent::DocumentAdditionOrUpdate { method, allow_index_creation, .. } => {
|
||||
AutobatchKind::DocumentImport { method, allow_index_creation }
|
||||
}
|
||||
KindWithContent::DocumentDeletion { .. } => AutobatchKind::DocumentDeletion,
|
||||
KindWithContent::DocumentClear { .. } => AutobatchKind::DocumentClear,
|
||||
KindWithContent::SettingsUpdate { allow_index_creation, is_deletion, .. } => {
|
||||
@@ -91,7 +75,6 @@ pub enum BatchKind {
|
||||
DocumentImport {
|
||||
method: IndexDocumentsMethod,
|
||||
allow_index_creation: bool,
|
||||
primary_key: Option<String>,
|
||||
import_ids: Vec<TaskId>,
|
||||
},
|
||||
DocumentDeletion {
|
||||
@@ -106,7 +89,6 @@ pub enum BatchKind {
|
||||
settings_ids: Vec<TaskId>,
|
||||
method: IndexDocumentsMethod,
|
||||
allow_index_creation: bool,
|
||||
primary_key: Option<String>,
|
||||
import_ids: Vec<TaskId>,
|
||||
},
|
||||
Settings {
|
||||
@@ -138,16 +120,6 @@ impl BatchKind {
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn primary_key(&self) -> Option<Option<&str>> {
|
||||
match self {
|
||||
BatchKind::DocumentImport { primary_key, .. }
|
||||
| BatchKind::SettingsAndDocumentImport { primary_key, .. } => {
|
||||
Some(primary_key.as_deref())
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BatchKind {
|
||||
@@ -159,7 +131,6 @@ impl BatchKind {
|
||||
pub fn new(
|
||||
task_id: TaskId,
|
||||
kind: KindWithContent,
|
||||
primary_key: Option<&str>,
|
||||
) -> (ControlFlow<BatchKind, BatchKind>, bool) {
|
||||
use AutobatchKind as K;
|
||||
|
||||
@@ -169,25 +140,10 @@ impl BatchKind {
|
||||
K::IndexUpdate => (Break(BatchKind::IndexUpdate { id: task_id }), false),
|
||||
K::IndexSwap => (Break(BatchKind::IndexSwap { id: task_id }), false),
|
||||
K::DocumentClear => (Continue(BatchKind::DocumentClear { ids: vec![task_id] }), false),
|
||||
K::DocumentImport { method, allow_index_creation, primary_key: pk }
|
||||
if primary_key.is_none() || pk.is_none() || primary_key == pk.as_deref() =>
|
||||
{
|
||||
(
|
||||
Continue(BatchKind::DocumentImport {
|
||||
method,
|
||||
allow_index_creation,
|
||||
primary_key: pk,
|
||||
import_ids: vec![task_id],
|
||||
}),
|
||||
allow_index_creation,
|
||||
)
|
||||
}
|
||||
// if the primary key set in the task was different than ours we should stop and make this batch fail asap.
|
||||
K::DocumentImport { method, allow_index_creation, primary_key } => (
|
||||
Break(BatchKind::DocumentImport {
|
||||
K::DocumentImport { method, allow_index_creation } => (
|
||||
Continue(BatchKind::DocumentImport {
|
||||
method,
|
||||
allow_index_creation,
|
||||
primary_key,
|
||||
import_ids: vec![task_id],
|
||||
}),
|
||||
allow_index_creation,
|
||||
@@ -207,7 +163,7 @@ impl BatchKind {
|
||||
/// To ease the writting of the code. `true` can be returned when you don't need to create an index
|
||||
/// but false can't be returned if you needs to create an index.
|
||||
#[rustfmt::skip]
|
||||
fn accumulate(self, id: TaskId, kind: AutobatchKind, index_already_exists: bool, primary_key: Option<&str>) -> ControlFlow<BatchKind, BatchKind> {
|
||||
fn accumulate(self, id: TaskId, kind: AutobatchKind, index_already_exists: bool) -> ControlFlow<BatchKind, BatchKind> {
|
||||
use AutobatchKind as K;
|
||||
|
||||
match (self, kind) {
|
||||
@@ -217,39 +173,11 @@ impl BatchKind {
|
||||
(this, kind) if !index_already_exists && this.allow_index_creation() == Some(false) && kind.allow_index_creation() == Some(true) => {
|
||||
Break(this)
|
||||
},
|
||||
// NOTE: We need to negate the whole condition since we're checking if we need to break instead of continue.
|
||||
// I wrote it this way because it's easier to understand than the other way around.
|
||||
(this, kind) if !(
|
||||
// 1. If both task don't interact with primary key -> we can continue
|
||||
(this.primary_key().is_none() && kind.primary_key().is_none()) ||
|
||||
// 2. Else ->
|
||||
(
|
||||
// 2.1 If we already have a primary-key ->
|
||||
(
|
||||
primary_key.is_some() &&
|
||||
// 2.1.1 If the task we're trying to accumulate have a pk it must be equal to our primary key
|
||||
// 2.1.2 If the task don't have a primary-key -> we can continue
|
||||
kind.primary_key().map_or(true, |pk| pk == primary_key)
|
||||
) ||
|
||||
// 2.2 If we don't have a primary-key ->
|
||||
(
|
||||
// 2.2.1 If both the batch and the task have a primary key they should be equal
|
||||
// 2.2.2 If the batch is set to Some(None), the task should be too
|
||||
// 2.2.3 If the batch is set to None -> we can continue
|
||||
this.primary_key().zip(kind.primary_key()).map_or(true, |(this, kind)| this == kind)
|
||||
)
|
||||
)
|
||||
|
||||
) // closing the negation
|
||||
|
||||
=> {
|
||||
Break(this)
|
||||
},
|
||||
// The index deletion can batch with everything but must stop after
|
||||
(
|
||||
BatchKind::DocumentClear { mut ids }
|
||||
| BatchKind::DocumentDeletion { deletion_ids: mut ids }
|
||||
| BatchKind::DocumentImport { method: _, allow_index_creation: _, primary_key: _, import_ids: mut ids }
|
||||
| BatchKind::DocumentImport { method: _, allow_index_creation: _, import_ids: mut ids }
|
||||
| BatchKind::Settings { allow_index_creation: _, settings_ids: mut ids },
|
||||
K::IndexDeletion,
|
||||
) => {
|
||||
@@ -258,7 +186,7 @@ impl BatchKind {
|
||||
}
|
||||
(
|
||||
BatchKind::ClearAndSettings { settings_ids: mut ids, allow_index_creation: _, mut other }
|
||||
| BatchKind::SettingsAndDocumentImport { import_ids: mut ids, method: _, allow_index_creation: _, primary_key: _, settings_ids: mut other },
|
||||
| BatchKind::SettingsAndDocumentImport { import_ids: mut ids, method: _, allow_index_creation: _, settings_ids: mut other },
|
||||
K::IndexDeletion,
|
||||
) => {
|
||||
ids.push(id);
|
||||
@@ -278,7 +206,7 @@ impl BatchKind {
|
||||
K::DocumentImport { .. } | K::Settings { .. },
|
||||
) => Break(this),
|
||||
(
|
||||
BatchKind::DocumentImport { method: _, allow_index_creation: _, primary_key: _, import_ids: mut ids },
|
||||
BatchKind::DocumentImport { method: _, allow_index_creation: _, import_ids: mut ids },
|
||||
K::DocumentClear,
|
||||
) => {
|
||||
ids.push(id);
|
||||
@@ -287,27 +215,24 @@ impl BatchKind {
|
||||
|
||||
// we can autobatch the same kind of document additions / updates
|
||||
(
|
||||
BatchKind::DocumentImport { method: ReplaceDocuments, allow_index_creation, primary_key: _, mut import_ids },
|
||||
K::DocumentImport { method: ReplaceDocuments, primary_key: pk, .. },
|
||||
BatchKind::DocumentImport { method: ReplaceDocuments, allow_index_creation, mut import_ids },
|
||||
K::DocumentImport { method: ReplaceDocuments, .. },
|
||||
) => {
|
||||
import_ids.push(id);
|
||||
Continue(BatchKind::DocumentImport {
|
||||
method: ReplaceDocuments,
|
||||
allow_index_creation,
|
||||
import_ids,
|
||||
primary_key: pk,
|
||||
})
|
||||
}
|
||||
(
|
||||
BatchKind::DocumentImport { method: UpdateDocuments, allow_index_creation, primary_key: _, mut import_ids },
|
||||
K::DocumentImport { method: UpdateDocuments, primary_key: pk, .. },
|
||||
BatchKind::DocumentImport { method: UpdateDocuments, allow_index_creation, mut import_ids },
|
||||
K::DocumentImport { method: UpdateDocuments, .. },
|
||||
) => {
|
||||
|
||||
import_ids.push(id);
|
||||
Continue(BatchKind::DocumentImport {
|
||||
method: UpdateDocuments,
|
||||
allow_index_creation,
|
||||
primary_key: pk,
|
||||
import_ids,
|
||||
})
|
||||
}
|
||||
@@ -320,13 +245,12 @@ impl BatchKind {
|
||||
) => Break(this),
|
||||
|
||||
(
|
||||
BatchKind::DocumentImport { method, allow_index_creation, primary_key, import_ids },
|
||||
BatchKind::DocumentImport { method, allow_index_creation, import_ids },
|
||||
K::Settings { .. },
|
||||
) => Continue(BatchKind::SettingsAndDocumentImport {
|
||||
settings_ids: vec![id],
|
||||
method,
|
||||
allow_index_creation,
|
||||
primary_key,
|
||||
import_ids,
|
||||
}),
|
||||
|
||||
@@ -403,7 +327,7 @@ impl BatchKind {
|
||||
})
|
||||
}
|
||||
(
|
||||
BatchKind::SettingsAndDocumentImport { settings_ids, method: _, import_ids: mut other, allow_index_creation, primary_key: _ },
|
||||
BatchKind::SettingsAndDocumentImport { settings_ids, method: _, import_ids: mut other, allow_index_creation },
|
||||
K::DocumentClear,
|
||||
) => {
|
||||
other.push(id);
|
||||
@@ -415,28 +339,26 @@ impl BatchKind {
|
||||
}
|
||||
|
||||
(
|
||||
BatchKind::SettingsAndDocumentImport { settings_ids, method: ReplaceDocuments, mut import_ids, allow_index_creation, primary_key: _},
|
||||
K::DocumentImport { method: ReplaceDocuments, primary_key: pk2, .. },
|
||||
BatchKind::SettingsAndDocumentImport { settings_ids, method: ReplaceDocuments, mut import_ids, allow_index_creation },
|
||||
K::DocumentImport { method: ReplaceDocuments, .. },
|
||||
) => {
|
||||
import_ids.push(id);
|
||||
Continue(BatchKind::SettingsAndDocumentImport {
|
||||
settings_ids,
|
||||
method: ReplaceDocuments,
|
||||
allow_index_creation,
|
||||
primary_key: pk2,
|
||||
import_ids,
|
||||
})
|
||||
}
|
||||
(
|
||||
BatchKind::SettingsAndDocumentImport { settings_ids, method: UpdateDocuments, allow_index_creation, primary_key: _, mut import_ids },
|
||||
K::DocumentImport { method: UpdateDocuments, primary_key: pk2, .. },
|
||||
BatchKind::SettingsAndDocumentImport { settings_ids, method: UpdateDocuments, allow_index_creation, mut import_ids },
|
||||
K::DocumentImport { method: UpdateDocuments, .. },
|
||||
) => {
|
||||
import_ids.push(id);
|
||||
Continue(BatchKind::SettingsAndDocumentImport {
|
||||
settings_ids,
|
||||
method: UpdateDocuments,
|
||||
allow_index_creation,
|
||||
primary_key: pk2,
|
||||
import_ids,
|
||||
})
|
||||
}
|
||||
@@ -447,7 +369,7 @@ impl BatchKind {
|
||||
K::DocumentDeletion | K::DocumentImport { .. },
|
||||
) => Break(this),
|
||||
(
|
||||
BatchKind::SettingsAndDocumentImport { mut settings_ids, method, allow_index_creation,primary_key, import_ids },
|
||||
BatchKind::SettingsAndDocumentImport { mut settings_ids, method, allow_index_creation, import_ids },
|
||||
K::Settings { .. },
|
||||
) => {
|
||||
settings_ids.push(id);
|
||||
@@ -455,7 +377,6 @@ impl BatchKind {
|
||||
settings_ids,
|
||||
method,
|
||||
allow_index_creation,
|
||||
primary_key,
|
||||
import_ids,
|
||||
})
|
||||
}
|
||||
@@ -485,7 +406,6 @@ impl BatchKind {
|
||||
pub fn autobatch(
|
||||
enqueued: Vec<(TaskId, KindWithContent)>,
|
||||
index_already_exists: bool,
|
||||
primary_key: Option<&str>,
|
||||
) -> Option<(BatchKind, bool)> {
|
||||
let mut enqueued = enqueued.into_iter();
|
||||
let (id, kind) = enqueued.next()?;
|
||||
@@ -493,7 +413,7 @@ pub fn autobatch(
|
||||
// index_exist will keep track of if the index should exist at this point after the tasks we batched.
|
||||
let mut index_exist = index_already_exists;
|
||||
|
||||
let (mut acc, must_create_index) = match BatchKind::new(id, kind, primary_key) {
|
||||
let (mut acc, must_create_index) = match BatchKind::new(id, kind) {
|
||||
(Continue(acc), create) => (acc, create),
|
||||
(Break(acc), create) => return Some((acc, create)),
|
||||
};
|
||||
@@ -502,7 +422,7 @@ pub fn autobatch(
|
||||
index_exist |= must_create_index;
|
||||
|
||||
for (id, kind) in enqueued {
|
||||
acc = match acc.accumulate(id, kind.into(), index_exist, primary_key) {
|
||||
acc = match acc.accumulate(id, kind.into(), index_exist) {
|
||||
Continue(acc) => acc,
|
||||
Break(acc) => return Some((acc, must_create_index)),
|
||||
};
|
||||
@@ -521,24 +441,18 @@ mod tests {
|
||||
|
||||
fn autobatch_from(
|
||||
index_already_exists: bool,
|
||||
primary_key: Option<&str>,
|
||||
input: impl IntoIterator<Item = KindWithContent>,
|
||||
) -> Option<(BatchKind, bool)> {
|
||||
autobatch(
|
||||
input.into_iter().enumerate().map(|(id, kind)| (id as TaskId, kind)).collect(),
|
||||
index_already_exists,
|
||||
primary_key,
|
||||
)
|
||||
}
|
||||
|
||||
fn doc_imp(
|
||||
method: IndexDocumentsMethod,
|
||||
allow_index_creation: bool,
|
||||
primary_key: Option<&str>,
|
||||
) -> KindWithContent {
|
||||
fn doc_imp(method: IndexDocumentsMethod, allow_index_creation: bool) -> KindWithContent {
|
||||
KindWithContent::DocumentAdditionOrUpdate {
|
||||
index_uid: String::from("doggo"),
|
||||
primary_key: primary_key.map(|pk| pk.to_string()),
|
||||
primary_key: None,
|
||||
method,
|
||||
content_file: Uuid::new_v4(),
|
||||
documents_count: 0,
|
||||
@@ -588,268 +502,226 @@ mod tests {
|
||||
fn autobatch_simple_operation_together() {
|
||||
// we can autobatch one or multiple `ReplaceDocuments` together.
|
||||
// if the index exists.
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp( ReplaceDocuments, false , None), doc_imp(ReplaceDocuments, false , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp( ReplaceDocuments, false ), doc_imp(ReplaceDocuments, false )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1, 2] }, false))");
|
||||
|
||||
// if it doesn't exists.
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
||||
|
||||
// we can autobatch one or multiple `UpdateDocuments` together.
|
||||
// if the index exists.
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0, 1, 2] }, false))");
|
||||
|
||||
// if it doesn't exists.
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0, 1, 2] }, false))");
|
||||
|
||||
// we can autobatch one or multiple DocumentDeletion together
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))");
|
||||
|
||||
// we can autobatch one or multiple Settings together
|
||||
debug_snapshot!(autobatch_from(true, None, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
||||
|
||||
debug_snapshot!(autobatch_from(false,None, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_document_operation_dont_autobatch_with_other() {
|
||||
// addition, updates and deletion can't batch together
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_del(), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_del(), doc_imp(UpdateDocuments, true)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_create()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_create()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_create()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_create()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_create()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_del(), idx_create()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_update()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_update()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_update()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_update()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_update()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_del(), idx_update()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_swap()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_swap()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_swap()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_swap()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_swap()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_del(), idx_swap()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn document_addition_batch_with_settings() {
|
||||
// simple case
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
|
||||
// multiple settings and doc addition
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))");
|
||||
|
||||
// addition and setting unordered
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_imp(UpdateDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_imp(UpdateDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: UpdateDocuments, allow_index_creation: true, import_ids: [0, 2] }, true))");
|
||||
|
||||
// We ensure this kind of batch doesn't batch with forbidden operations
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_imp(UpdateDocuments, true, None)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_imp(ReplaceDocuments, true, None)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_imp(UpdateDocuments, true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_imp(ReplaceDocuments, true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn clear_and_additions() {
|
||||
// these two doesn't need to batch
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_clr(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentClear { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_clr(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentClear { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_clr(), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentClear { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_clr(), doc_imp(UpdateDocuments, true)]), @"Some((DocumentClear { ids: [0] }, false))");
|
||||
|
||||
// Basic use case
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||
|
||||
// This batch kind doesn't mix with other document addition
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), doc_clr(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_clr(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), doc_clr(), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_clr(), doc_imp(UpdateDocuments, true)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||
|
||||
// But you can batch multiple clear together
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn clear_and_additions_and_settings() {
|
||||
// A clear don't need to autobatch the settings that happens AFTER there is no documents
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_clr(), settings(true)]), @"Some((DocumentClear { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_clr(), settings(true)]), @"Some((DocumentClear { ids: [0] }, false))");
|
||||
|
||||
debug_snapshot!(autobatch_from(true, None, [settings(true), doc_clr(), settings(true)]), @"Some((ClearAndSettings { other: [1], allow_index_creation: true, settings_ids: [0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [settings(true), doc_clr(), settings(true)]), @"Some((ClearAndSettings { other: [1], allow_index_creation: true, settings_ids: [0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn anything_and_index_deletion() {
|
||||
// The `IndexDeletion` doesn't batch with anything that happens AFTER.
|
||||
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(ReplaceDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(UpdateDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(ReplaceDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(UpdateDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
|
||||
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(ReplaceDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(UpdateDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(ReplaceDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(UpdateDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||
|
||||
// The index deletion can accept almost any type of `BatchKind` and transform it to an `IndexDeletion`.
|
||||
// First, the basic cases
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||
|
||||
// Then the mixed cases.
|
||||
// The index already exists, whatever is the right of the tasks it shouldn't change the result.
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
|
||||
// When the index doesn't exists yet it's more complicated.
|
||||
// Either the first task we encounter create it, in which case we can create a big batch with everything.
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
// The right of the tasks following isn't really important.
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||
// Or, the second case; the first task doesn't create the index and thus we wants to batch it with only tasks that can't create an index.
|
||||
// that can be a second task that don't have the right to create an index. Or anything that can't create an index like an index deletion, document deletion, document clear, etc.
|
||||
// All theses tasks are going to throw an error `Index doesn't exist` once the batch is processed.
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||
// The third and final case is when the first task doesn't create an index but is directly followed by a task creating an index. In this case we can't batch whith what
|
||||
// follows because we first need to process the erronous batch.
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(true), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(true), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(true), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(true), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn allowed_and_disallowed_index_creation() {
|
||||
// `DocumentImport` can't be mixed with those disallowed to do so except if the index already exists.
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
||||
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), settings(true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, import_ids: [0] }, false))");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn autobatch_primary_key() {
|
||||
// ==> If I have a pk
|
||||
// With a single update
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||
|
||||
// With a multiple updates
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other"))]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0, 1] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###);
|
||||
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||
|
||||
// ==> If I don't have a pk
|
||||
// With a single update
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), import_ids: [0] }, true))"###);
|
||||
|
||||
// With a multiple updates
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id"))]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), import_ids: [0] }, true))"###);
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1] }, false))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), settings(true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -217,8 +217,7 @@ impl IndexScheduler {
|
||||
|
||||
let mut documents_counts = Vec::new();
|
||||
let mut content_files = Vec::new();
|
||||
|
||||
for task in tasks.iter() {
|
||||
for task in &tasks {
|
||||
match task.kind {
|
||||
KindWithContent::DocumentAdditionOrUpdate {
|
||||
content_file,
|
||||
@@ -326,7 +325,6 @@ impl IndexScheduler {
|
||||
settings_ids,
|
||||
method,
|
||||
allow_index_creation,
|
||||
primary_key,
|
||||
import_ids,
|
||||
} => {
|
||||
let settings = self.create_next_batch_index(
|
||||
@@ -339,12 +337,7 @@ impl IndexScheduler {
|
||||
let document_import = self.create_next_batch_index(
|
||||
rtxn,
|
||||
index_uid.clone(),
|
||||
BatchKind::DocumentImport {
|
||||
method,
|
||||
allow_index_creation,
|
||||
primary_key,
|
||||
import_ids,
|
||||
},
|
||||
BatchKind::DocumentImport { method, allow_index_creation, import_ids },
|
||||
must_create_index,
|
||||
)?;
|
||||
|
||||
@@ -474,12 +467,6 @@ impl IndexScheduler {
|
||||
};
|
||||
|
||||
let index_already_exists = self.index_mapper.exists(rtxn, index_name)?;
|
||||
let mut primary_key = None;
|
||||
if index_already_exists {
|
||||
let index = self.index_mapper.index(rtxn, index_name)?;
|
||||
let rtxn = index.read_txn()?;
|
||||
primary_key = index.primary_key(&rtxn)?.map(|pk| pk.to_string());
|
||||
}
|
||||
|
||||
let index_tasks = self.index_tasks(rtxn, index_name)? & enqueued;
|
||||
|
||||
@@ -497,7 +484,7 @@ impl IndexScheduler {
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
|
||||
if let Some((batchkind, create_index)) =
|
||||
autobatcher::autobatch(enqueued, index_already_exists, primary_key.as_deref())
|
||||
autobatcher::autobatch(enqueued, index_already_exists)
|
||||
{
|
||||
return self.create_next_batch_index(
|
||||
rtxn,
|
||||
@@ -895,11 +882,11 @@ impl IndexScheduler {
|
||||
}
|
||||
if !not_found_indexes.is_empty() {
|
||||
if not_found_indexes.len() == 1 {
|
||||
return Err(Error::SwapIndexNotFound(
|
||||
return Err(Error::IndexNotFound(
|
||||
not_found_indexes.into_iter().next().unwrap().clone(),
|
||||
));
|
||||
} else {
|
||||
return Err(Error::SwapIndexesNotFound(
|
||||
return Err(Error::IndexesNotFound(
|
||||
not_found_indexes.into_iter().cloned().collect(),
|
||||
));
|
||||
}
|
||||
@@ -960,9 +947,9 @@ impl IndexScheduler {
|
||||
///
|
||||
/// ## Return
|
||||
/// The list of processed tasks.
|
||||
fn apply_index_operation<'i>(
|
||||
fn apply_index_operation<'txn, 'i>(
|
||||
&self,
|
||||
index_wtxn: &mut RwTxn<'i, '_>,
|
||||
index_wtxn: &'txn mut RwTxn<'i, '_>,
|
||||
index: &'i Index,
|
||||
operation: IndexOperation,
|
||||
) -> Result<Vec<Task>> {
|
||||
@@ -998,31 +985,17 @@ impl IndexScheduler {
|
||||
let mut primary_key_has_been_set = false;
|
||||
let must_stop_processing = self.must_stop_processing.clone();
|
||||
let indexer_config = self.index_mapper.indexer_config();
|
||||
|
||||
// TODO use the code from the IndexCreate operation
|
||||
if let Some(primary_key) = primary_key {
|
||||
match index.primary_key(index_wtxn)? {
|
||||
// if a primary key was set AND had already been defined in the index
|
||||
// but to a different value, we can make the whole batch fail.
|
||||
Some(pk) => {
|
||||
if primary_key != pk {
|
||||
return Err(milli::Error::from(
|
||||
milli::UserError::PrimaryKeyCannotBeChanged(pk.to_string()),
|
||||
)
|
||||
.into());
|
||||
}
|
||||
}
|
||||
// if the primary key was set and there was no primary key set for this index
|
||||
// we set it to the received value before starting the indexing process.
|
||||
None => {
|
||||
let mut builder =
|
||||
milli::update::Settings::new(index_wtxn, index, indexer_config);
|
||||
builder.set_primary_key(primary_key);
|
||||
builder.execute(
|
||||
|indexing_step| debug!("update: {:?}", indexing_step),
|
||||
|| must_stop_processing.clone().get(),
|
||||
)?;
|
||||
primary_key_has_been_set = true;
|
||||
}
|
||||
if index.primary_key(index_wtxn)?.is_none() {
|
||||
let mut builder =
|
||||
milli::update::Settings::new(index_wtxn, index, indexer_config);
|
||||
builder.set_primary_key(primary_key);
|
||||
builder.execute(
|
||||
|indexing_step| debug!("update: {:?}", indexing_step),
|
||||
|| must_stop_processing.clone().get(),
|
||||
)?;
|
||||
primary_key_has_been_set = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1086,8 +1059,7 @@ impl IndexScheduler {
|
||||
task.status = Status::Failed;
|
||||
task.details = Some(Details::DocumentAdditionOrUpdate {
|
||||
received_documents: count,
|
||||
// if there was an error we indexed 0 documents.
|
||||
indexed_documents: Some(0),
|
||||
indexed_documents: Some(count),
|
||||
});
|
||||
task.error = Some(error.into())
|
||||
}
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use std::fmt::Display;
|
||||
|
||||
use meilisearch_types::error::{Code, ErrorCode};
|
||||
use meilisearch_types::tasks::{Kind, Status};
|
||||
use meilisearch_types::{heed, milli};
|
||||
@@ -7,47 +5,16 @@ use thiserror::Error;
|
||||
|
||||
use crate::TaskId;
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum DateField {
|
||||
BeforeEnqueuedAt,
|
||||
AfterEnqueuedAt,
|
||||
BeforeStartedAt,
|
||||
AfterStartedAt,
|
||||
BeforeFinishedAt,
|
||||
AfterFinishedAt,
|
||||
}
|
||||
|
||||
impl Display for DateField {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
DateField::BeforeEnqueuedAt => write!(f, "beforeEnqueuedAt"),
|
||||
DateField::AfterEnqueuedAt => write!(f, "afterEnqueuedAt"),
|
||||
DateField::BeforeStartedAt => write!(f, "beforeStartedAt"),
|
||||
DateField::AfterStartedAt => write!(f, "afterStartedAt"),
|
||||
DateField::BeforeFinishedAt => write!(f, "beforeFinishedAt"),
|
||||
DateField::AfterFinishedAt => write!(f, "afterFinishedAt"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DateField> for Code {
|
||||
fn from(date: DateField) -> Self {
|
||||
match date {
|
||||
DateField::BeforeEnqueuedAt => Code::InvalidTaskBeforeEnqueuedAt,
|
||||
DateField::AfterEnqueuedAt => Code::InvalidTaskAfterEnqueuedAt,
|
||||
DateField::BeforeStartedAt => Code::InvalidTaskBeforeStartedAt,
|
||||
DateField::AfterStartedAt => Code::InvalidTaskAfterStartedAt,
|
||||
DateField::BeforeFinishedAt => Code::InvalidTaskBeforeFinishedAt,
|
||||
DateField::AfterFinishedAt => Code::InvalidTaskAfterFinishedAt,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error("Index `{0}` not found.")]
|
||||
IndexNotFound(String),
|
||||
#[error(
|
||||
"Indexes {} not found.",
|
||||
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", ")
|
||||
)]
|
||||
IndexesNotFound(Vec<String>),
|
||||
#[error("Index `{0}` already exists.")]
|
||||
IndexAlreadyExists(String),
|
||||
#[error(
|
||||
@@ -59,19 +26,12 @@ pub enum Error {
|
||||
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", ")
|
||||
)]
|
||||
SwapDuplicateIndexesFound(Vec<String>),
|
||||
#[error("Index `{0}` not found.")]
|
||||
SwapIndexNotFound(String),
|
||||
#[error(
|
||||
"Indexes {} not found.",
|
||||
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", ")
|
||||
)]
|
||||
SwapIndexesNotFound(Vec<String>),
|
||||
#[error("Corrupted dump.")]
|
||||
CorruptedDump,
|
||||
#[error(
|
||||
"Task `{field}` `{date}` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format."
|
||||
)]
|
||||
InvalidTaskDate { field: DateField, date: String },
|
||||
InvalidTaskDate { field: String, date: String },
|
||||
#[error("Task uid `{task_uid}` is invalid. It should only contain numeric characters.")]
|
||||
InvalidTaskUids { task_uid: String },
|
||||
#[error(
|
||||
@@ -100,9 +60,9 @@ pub enum Error {
|
||||
InvalidIndexUid { index_uid: String },
|
||||
#[error("Task `{0}` not found.")]
|
||||
TaskNotFound(TaskId),
|
||||
#[error("Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
||||
#[error("Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
||||
TaskDeletionWithEmptyQuery,
|
||||
#[error("Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
||||
#[error("Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
||||
TaskCancelationWithEmptyQuery,
|
||||
|
||||
#[error(transparent)]
|
||||
@@ -138,20 +98,19 @@ impl ErrorCode for Error {
|
||||
fn error_code(&self) -> Code {
|
||||
match self {
|
||||
Error::IndexNotFound(_) => Code::IndexNotFound,
|
||||
Error::IndexesNotFound(_) => Code::IndexNotFound,
|
||||
Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists,
|
||||
Error::SwapDuplicateIndexesFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
||||
Error::SwapDuplicateIndexFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
||||
Error::SwapIndexNotFound(_) => Code::IndexNotFound,
|
||||
Error::SwapIndexesNotFound(_) => Code::IndexNotFound,
|
||||
Error::InvalidTaskDate { field, .. } => (*field).into(),
|
||||
Error::InvalidTaskUids { .. } => Code::InvalidTaskUids,
|
||||
Error::InvalidTaskStatuses { .. } => Code::InvalidTaskStatuses,
|
||||
Error::InvalidTaskTypes { .. } => Code::InvalidTaskTypes,
|
||||
Error::InvalidTaskCanceledBy { .. } => Code::InvalidTaskCanceledBy,
|
||||
Error::SwapDuplicateIndexesFound(_) => Code::DuplicateIndexFound,
|
||||
Error::SwapDuplicateIndexFound(_) => Code::DuplicateIndexFound,
|
||||
Error::InvalidTaskDate { .. } => Code::InvalidTaskDateFilter,
|
||||
Error::InvalidTaskUids { .. } => Code::InvalidTaskUidsFilter,
|
||||
Error::InvalidTaskStatuses { .. } => Code::InvalidTaskStatusesFilter,
|
||||
Error::InvalidTaskTypes { .. } => Code::InvalidTaskTypesFilter,
|
||||
Error::InvalidTaskCanceledBy { .. } => Code::InvalidTaskCanceledByFilter,
|
||||
Error::InvalidIndexUid { .. } => Code::InvalidIndexUid,
|
||||
Error::TaskNotFound(_) => Code::TaskNotFound,
|
||||
Error::TaskDeletionWithEmptyQuery => Code::MissingTaskFilters,
|
||||
Error::TaskCancelationWithEmptyQuery => Code::MissingTaskFilters,
|
||||
Error::TaskDeletionWithEmptyQuery => Code::TaskDeletionWithEmptyQuery,
|
||||
Error::TaskCancelationWithEmptyQuery => Code::TaskCancelationWithEmptyQuery,
|
||||
Error::Dump(e) => e.error_code(),
|
||||
Error::Milli(e) => e.error_code(),
|
||||
Error::ProcessBatchPanicked => Code::Internal,
|
||||
@@ -160,7 +119,6 @@ impl ErrorCode for Error {
|
||||
Error::FileStore(e) => e.error_code(),
|
||||
Error::IoError(e) => e.error_code(),
|
||||
Error::Persist(e) => e.error_code(),
|
||||
|
||||
// Irrecoverable errors
|
||||
Error::Anyhow(_) => Code::Internal,
|
||||
Error::CorruptedTaskQueue => Code::Internal,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use std::collections::BTreeSet;
|
||||
use std::fmt::Write;
|
||||
|
||||
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
|
||||
@@ -93,9 +92,7 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
||||
|
||||
pub fn snapshot_file_store(file_store: &file_store::FileStore) -> String {
|
||||
let mut snap = String::new();
|
||||
// we store the uuid in a `BTreeSet` to keep them ordered.
|
||||
let all_uuids = file_store.all_uuids().unwrap().collect::<Result<BTreeSet<_>, _>>().unwrap();
|
||||
for uuid in all_uuids {
|
||||
for uuid in file_store.__all_uuids() {
|
||||
snap.push_str(&format!("{uuid}\n"));
|
||||
}
|
||||
snap
|
||||
|
||||
@@ -227,9 +227,9 @@ pub struct IndexSchedulerOptions {
|
||||
pub snapshots_path: PathBuf,
|
||||
/// The path to the folder containing the dumps.
|
||||
pub dumps_path: PathBuf,
|
||||
/// The maximum size, in bytes, of the task index.
|
||||
pub task_db_size: usize,
|
||||
/// The maximum size, in bytes, of each meilisearch index.
|
||||
pub task_db_size: usize,
|
||||
/// The maximum size, in bytes, of the tasks index.
|
||||
pub index_size: usize,
|
||||
/// Configuration used during indexing for each meilisearch index.
|
||||
pub indexer_config: IndexerConfig,
|
||||
@@ -452,10 +452,6 @@ impl IndexScheduler {
|
||||
&self.index_mapper.indexer_config
|
||||
}
|
||||
|
||||
pub fn size(&self) -> Result<u64> {
|
||||
Ok(self.env.real_disk_size()?)
|
||||
}
|
||||
|
||||
/// Return the index corresponding to the name.
|
||||
///
|
||||
/// * If the index wasn't opened before, the index will be opened.
|
||||
@@ -506,22 +502,13 @@ impl IndexScheduler {
|
||||
}
|
||||
|
||||
if let Some(canceled_by) = &query.canceled_by {
|
||||
let mut all_canceled_tasks = RoaringBitmap::new();
|
||||
for cancel_task_uid in canceled_by {
|
||||
if let Some(canceled_by_uid) =
|
||||
self.canceled_by.get(rtxn, &BEU32::new(*cancel_task_uid))?
|
||||
{
|
||||
all_canceled_tasks |= canceled_by_uid;
|
||||
tasks &= canceled_by_uid;
|
||||
}
|
||||
}
|
||||
|
||||
// if the canceled_by has been specified but no task
|
||||
// matches then we prefer matching zero than all tasks.
|
||||
if all_canceled_tasks.is_empty() {
|
||||
return Ok(RoaringBitmap::new());
|
||||
} else {
|
||||
tasks &= all_canceled_tasks;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(kind) = &query.types {
|
||||
@@ -902,11 +889,6 @@ impl IndexScheduler {
|
||||
Ok(self.file_store.new_update_with_uuid(uuid)?)
|
||||
}
|
||||
|
||||
/// The size on disk taken by all the updates files contained in the `IndexScheduler`, in bytes.
|
||||
pub fn compute_update_file_size(&self) -> Result<u64> {
|
||||
Ok(self.file_store.compute_total_size()?)
|
||||
}
|
||||
|
||||
/// Delete a file from the index scheduler.
|
||||
///
|
||||
/// Counterpart to the [`create_update_file`](IndexScheduler::create_update_file) method.
|
||||
@@ -2009,7 +1991,7 @@ mod tests {
|
||||
.unwrap()
|
||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -2056,7 +2038,7 @@ mod tests {
|
||||
.unwrap()
|
||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -2108,7 +2090,7 @@ mod tests {
|
||||
.unwrap()
|
||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -2159,7 +2141,7 @@ mod tests {
|
||||
.unwrap()
|
||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -2210,7 +2192,7 @@ mod tests {
|
||||
.unwrap()
|
||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
@@ -2849,7 +2831,7 @@ mod tests {
|
||||
.unwrap()
|
||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -2912,7 +2894,7 @@ mod tests {
|
||||
.unwrap()
|
||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -2972,7 +2954,7 @@ mod tests {
|
||||
.unwrap()
|
||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -3029,361 +3011,7 @@ mod tests {
|
||||
.unwrap()
|
||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_document_addition_with_multiple_primary_key() {
|
||||
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||
|
||||
for (id, primary_key) in ["id", "bork", "bloup"].iter().enumerate() {
|
||||
let content = format!(
|
||||
r#"{{
|
||||
"id": {id},
|
||||
"doggo": "jean bob"
|
||||
}}"#,
|
||||
);
|
||||
let (uuid, mut file) =
|
||||
index_scheduler.create_update_file_with_uuid(id as u128).unwrap();
|
||||
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||
assert_eq!(documents_count, 1);
|
||||
file.persist().unwrap();
|
||||
|
||||
index_scheduler
|
||||
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||
index_uid: S("doggos"),
|
||||
primary_key: Some(S(primary_key)),
|
||||
method: ReplaceDocuments,
|
||||
content_file: uuid,
|
||||
documents_count,
|
||||
allow_index_creation: true,
|
||||
})
|
||||
.unwrap();
|
||||
index_scheduler.assert_internally_consistent();
|
||||
}
|
||||
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_3_tasks");
|
||||
|
||||
// A first batch should be processed with only the first documentAddition.
|
||||
handle.advance_one_successful_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "only_first_task_succeed");
|
||||
|
||||
// The second batch should fail.
|
||||
handle.advance_one_failed_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_task_fails");
|
||||
|
||||
// The second batch should fail.
|
||||
handle.advance_one_failed_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_fails");
|
||||
|
||||
// Is the primary key still what we expect?
|
||||
let index = index_scheduler.index("doggos").unwrap();
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||
snapshot!(primary_key, @"id");
|
||||
|
||||
// Is the document still the one we expect?.
|
||||
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||
let documents = index
|
||||
.all_documents(&rtxn)
|
||||
.unwrap()
|
||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_document_addition_with_multiple_primary_key_batch_wrong_key() {
|
||||
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||
|
||||
for (id, primary_key) in ["id", "bork", "bork"].iter().enumerate() {
|
||||
let content = format!(
|
||||
r#"{{
|
||||
"id": {id},
|
||||
"doggo": "jean bob"
|
||||
}}"#,
|
||||
);
|
||||
let (uuid, mut file) =
|
||||
index_scheduler.create_update_file_with_uuid(id as u128).unwrap();
|
||||
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||
assert_eq!(documents_count, 1);
|
||||
file.persist().unwrap();
|
||||
|
||||
index_scheduler
|
||||
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||
index_uid: S("doggos"),
|
||||
primary_key: Some(S(primary_key)),
|
||||
method: ReplaceDocuments,
|
||||
content_file: uuid,
|
||||
documents_count,
|
||||
allow_index_creation: true,
|
||||
})
|
||||
.unwrap();
|
||||
index_scheduler.assert_internally_consistent();
|
||||
}
|
||||
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_3_tasks");
|
||||
|
||||
// A first batch should be processed with only the first documentAddition.
|
||||
handle.advance_one_successful_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "only_first_task_succeed");
|
||||
|
||||
// The second batch should fail and contains two tasks.
|
||||
handle.advance_one_failed_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_and_third_tasks_fails");
|
||||
|
||||
// Is the primary key still what we expect?
|
||||
let index = index_scheduler.index("doggos").unwrap();
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||
snapshot!(primary_key, @"id");
|
||||
|
||||
// Is the document still the one we expect?.
|
||||
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||
let documents = index
|
||||
.all_documents(&rtxn)
|
||||
.unwrap()
|
||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_document_addition_with_bad_primary_key() {
|
||||
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||
|
||||
for (id, primary_key) in ["bork", "bork", "id", "bork", "id"].iter().enumerate() {
|
||||
let content = format!(
|
||||
r#"{{
|
||||
"id": {id},
|
||||
"doggo": "jean bob"
|
||||
}}"#,
|
||||
);
|
||||
let (uuid, mut file) =
|
||||
index_scheduler.create_update_file_with_uuid(id as u128).unwrap();
|
||||
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||
assert_eq!(documents_count, 1);
|
||||
file.persist().unwrap();
|
||||
|
||||
index_scheduler
|
||||
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||
index_uid: S("doggos"),
|
||||
primary_key: Some(S(primary_key)),
|
||||
method: ReplaceDocuments,
|
||||
content_file: uuid,
|
||||
documents_count,
|
||||
allow_index_creation: true,
|
||||
})
|
||||
.unwrap();
|
||||
index_scheduler.assert_internally_consistent();
|
||||
}
|
||||
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_5_tasks");
|
||||
|
||||
// A first batch should be processed with only the first two documentAddition.
|
||||
// it should fails because the documents don't contains any `bork` field.
|
||||
// NOTE: it's marked as successful because the batch didn't fails, it's the individual tasks that failed.
|
||||
handle.advance_one_successful_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_and_second_task_fails");
|
||||
|
||||
// The primary key should be set to none since we failed the batch.
|
||||
let index = index_scheduler.index("doggos").unwrap();
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let primary_key = index.primary_key(&rtxn).unwrap();
|
||||
snapshot!(primary_key.is_none(), @"true");
|
||||
|
||||
// The second batch should succeed and only contains one task.
|
||||
handle.advance_one_successful_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_succeeds");
|
||||
|
||||
// The primary key should be set to `id` since this batch succeeded.
|
||||
let index = index_scheduler.index("doggos").unwrap();
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||
snapshot!(primary_key, @"id");
|
||||
|
||||
// We're trying to `bork` again, but now there is already a primary key set for this index.
|
||||
handle.advance_one_failed_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "fourth_task_fails");
|
||||
|
||||
// Finally the last task should succeed since its primary key is the same as the valid one.
|
||||
handle.advance_one_successful_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "fifth_task_succeeds");
|
||||
|
||||
// Is the primary key still what we expect?
|
||||
let index = index_scheduler.index("doggos").unwrap();
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||
snapshot!(primary_key, @"id");
|
||||
|
||||
// Is the document still the one we expect?.
|
||||
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||
let documents = index
|
||||
.all_documents(&rtxn)
|
||||
.unwrap()
|
||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_document_addition_with_set_and_null_primary_key() {
|
||||
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||
|
||||
for (id, primary_key) in
|
||||
[None, Some("bork"), Some("paw"), None, None, Some("paw")].into_iter().enumerate()
|
||||
{
|
||||
let content = format!(
|
||||
r#"{{
|
||||
"paw": {id},
|
||||
"doggo": "jean bob"
|
||||
}}"#,
|
||||
);
|
||||
let (uuid, mut file) =
|
||||
index_scheduler.create_update_file_with_uuid(id as u128).unwrap();
|
||||
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||
assert_eq!(documents_count, 1);
|
||||
file.persist().unwrap();
|
||||
|
||||
index_scheduler
|
||||
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||
index_uid: S("doggos"),
|
||||
primary_key: primary_key.map(|pk| pk.to_string()),
|
||||
method: ReplaceDocuments,
|
||||
content_file: uuid,
|
||||
documents_count,
|
||||
allow_index_creation: true,
|
||||
})
|
||||
.unwrap();
|
||||
index_scheduler.assert_internally_consistent();
|
||||
}
|
||||
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_6_tasks");
|
||||
|
||||
// A first batch should contains only one task that fails because we can't infer the primary key.
|
||||
// NOTE: it's marked as successful because the batch didn't fails, it's the individual tasks that failed.
|
||||
handle.advance_one_successful_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_task_fails");
|
||||
|
||||
// The second batch should contains only one task that fails because we bork is not a valid primary key.
|
||||
// NOTE: it's marked as successful because the batch didn't fails, it's the individual tasks that failed.
|
||||
handle.advance_one_successful_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_task_fails");
|
||||
|
||||
// No primary key should be set at this point.
|
||||
let index = index_scheduler.index("doggos").unwrap();
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let primary_key = index.primary_key(&rtxn).unwrap();
|
||||
snapshot!(primary_key.is_none(), @"true");
|
||||
|
||||
// The third batch should succeed and only contains one task.
|
||||
handle.advance_one_successful_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_succeeds");
|
||||
|
||||
// The primary key should be set to `id` since this batch succeeded.
|
||||
let index = index_scheduler.index("doggos").unwrap();
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||
snapshot!(primary_key, @"paw");
|
||||
|
||||
// We should be able to batch together the next two tasks that don't specify any primary key
|
||||
// + the last task that matches the current primary-key. Everything should succeed.
|
||||
handle.advance_one_successful_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_other_tasks_succeeds");
|
||||
|
||||
// Is the primary key still what we expect?
|
||||
let index = index_scheduler.index("doggos").unwrap();
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||
snapshot!(primary_key, @"paw");
|
||||
|
||||
// Is the document still the one we expect?.
|
||||
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||
let documents = index
|
||||
.all_documents(&rtxn)
|
||||
.unwrap()
|
||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_document_addition_with_set_and_null_primary_key_inference_works() {
|
||||
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||
|
||||
for (id, primary_key) in [None, Some("bork"), Some("doggoid"), None, None, Some("doggoid")]
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
{
|
||||
let content = format!(
|
||||
r#"{{
|
||||
"doggoid": {id},
|
||||
"doggo": "jean bob"
|
||||
}}"#,
|
||||
);
|
||||
let (uuid, mut file) =
|
||||
index_scheduler.create_update_file_with_uuid(id as u128).unwrap();
|
||||
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||
assert_eq!(documents_count, 1);
|
||||
file.persist().unwrap();
|
||||
|
||||
index_scheduler
|
||||
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||
index_uid: S("doggos"),
|
||||
primary_key: primary_key.map(|pk| pk.to_string()),
|
||||
method: ReplaceDocuments,
|
||||
content_file: uuid,
|
||||
documents_count,
|
||||
allow_index_creation: true,
|
||||
})
|
||||
.unwrap();
|
||||
index_scheduler.assert_internally_consistent();
|
||||
}
|
||||
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_6_tasks");
|
||||
|
||||
// A first batch should contains only one task that succeed and sets the primary key to `doggoid`.
|
||||
handle.advance_one_successful_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_task_succeed");
|
||||
|
||||
// Checking the primary key.
|
||||
let index = index_scheduler.index("doggos").unwrap();
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let primary_key = index.primary_key(&rtxn).unwrap();
|
||||
snapshot!(primary_key.is_none(), @"false");
|
||||
|
||||
// The second batch should contains only one task that fails because it tries to update the primary key to `bork`.
|
||||
handle.advance_one_failed_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_task_fails");
|
||||
|
||||
// The third batch should succeed and only contains one task.
|
||||
handle.advance_one_successful_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_succeeds");
|
||||
|
||||
// We should be able to batch together the next two tasks that don't specify any primary key
|
||||
// + the last task that matches the current primary-key. Everything should succeed.
|
||||
handle.advance_one_successful_batch();
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_other_tasks_succeeds");
|
||||
|
||||
// Is the primary key still what we expect?
|
||||
let index = index_scheduler.index("doggos").unwrap();
|
||||
let rtxn = index.read_txn().unwrap();
|
||||
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||
snapshot!(primary_key, @"doggoid");
|
||||
|
||||
// Is the document still the one we expect?.
|
||||
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||
let documents = index
|
||||
.all_documents(&rtxn)
|
||||
.unwrap()
|
||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||
.collect::<Vec<_>>();
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,1,2,3,4,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,3,4,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000000
|
||||
00000000-0000-0000-0000-000000000001
|
||||
00000000-0000-0000-0000-000000000002
|
||||
00000000-0000-0000-0000-000000000003
|
||||
00000000-0000-0000-0000-000000000004
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
[
|
||||
{
|
||||
"id": 2,
|
||||
"doggo": "jean bob"
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"doggo": "jean bob"
|
||||
}
|
||||
]
|
||||
@@ -1,54 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||
4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued []
|
||||
succeeded [2,4,]
|
||||
failed [0,1,3,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,3,4,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [2,3,4,]
|
||||
failed [0,1,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,3,4,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,1,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,1,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000002
|
||||
00000000-0000-0000-0000-000000000003
|
||||
00000000-0000-0000-0000-000000000004
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [4,]
|
||||
succeeded [2,]
|
||||
failed [0,1,3,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,3,4,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000004
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [3,4,]
|
||||
succeeded [2,]
|
||||
failed [0,1,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,3,4,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,1,]
|
||||
[timestamp] [2,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,1,]
|
||||
[timestamp] [2,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000003
|
||||
00000000-0000-0000-0000-000000000004
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000000
|
||||
00000000-0000-0000-0000-000000000001
|
||||
00000000-0000-0000-0000-000000000002
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
[
|
||||
{
|
||||
"id": 0,
|
||||
"doggo": "jean bob"
|
||||
}
|
||||
]
|
||||
@@ -1,45 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [1,2,]
|
||||
succeeded [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000001
|
||||
00000000-0000-0000-0000-000000000002
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [2,]
|
||||
succeeded [0,]
|
||||
failed [1,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000002
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued []
|
||||
succeeded [0,]
|
||||
failed [1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000000
|
||||
00000000-0000-0000-0000-000000000001
|
||||
00000000-0000-0000-0000-000000000002
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
[
|
||||
{
|
||||
"id": 0,
|
||||
"doggo": "jean bob"
|
||||
}
|
||||
]
|
||||
@@ -1,45 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [1,2,]
|
||||
succeeded [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000001
|
||||
00000000-0000-0000-0000-000000000002
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [2,]
|
||||
succeeded [0,]
|
||||
failed [1,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000002
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
[timestamp] [5,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000000
|
||||
00000000-0000-0000-0000-000000000001
|
||||
00000000-0000-0000-0000-000000000002
|
||||
00000000-0000-0000-0000-000000000003
|
||||
00000000-0000-0000-0000-000000000004
|
||||
00000000-0000-0000-0000-000000000005
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"paw\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||
4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||
5 {uid: 5, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued []
|
||||
succeeded [2,3,4,5,]
|
||||
failed [0,1,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
[timestamp] [5,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
[
|
||||
{
|
||||
"paw": 2,
|
||||
"doggo": "jean bob"
|
||||
},
|
||||
{
|
||||
"paw": 3,
|
||||
"doggo": "jean bob"
|
||||
},
|
||||
{
|
||||
"paw": 4,
|
||||
"doggo": "jean bob"
|
||||
},
|
||||
{
|
||||
"paw": 5,
|
||||
"doggo": "jean bob"
|
||||
}
|
||||
]
|
||||
@@ -1,54 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [1,2,3,4,5,]
|
||||
failed [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
[timestamp] [5,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000001
|
||||
00000000-0000-0000-0000-000000000002
|
||||
00000000-0000-0000-0000-000000000003
|
||||
00000000-0000-0000-0000-000000000004
|
||||
00000000-0000-0000-0000-000000000005
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"paw\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [2,3,4,5,]
|
||||
failed [0,1,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
[timestamp] [5,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000002
|
||||
00000000-0000-0000-0000-000000000003
|
||||
00000000-0000-0000-0000-000000000004
|
||||
00000000-0000-0000-0000-000000000005
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"paw\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [3,4,5,]
|
||||
succeeded [2,]
|
||||
failed [0,1,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
[timestamp] [5,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000003
|
||||
00000000-0000-0000-0000-000000000004
|
||||
00000000-0000-0000-0000-000000000005
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
[timestamp] [5,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000000
|
||||
00000000-0000-0000-0000-000000000001
|
||||
00000000-0000-0000-0000-000000000002
|
||||
00000000-0000-0000-0000-000000000003
|
||||
00000000-0000-0000-0000-000000000004
|
||||
00000000-0000-0000-0000-000000000005
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||
4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||
5 {uid: 5, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued []
|
||||
succeeded [0,2,3,4,5,]
|
||||
failed [1,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
[timestamp] [5,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
[
|
||||
{
|
||||
"doggoid": 0,
|
||||
"doggo": "jean bob"
|
||||
},
|
||||
{
|
||||
"doggoid": 2,
|
||||
"doggo": "jean bob"
|
||||
},
|
||||
{
|
||||
"doggoid": 3,
|
||||
"doggo": "jean bob"
|
||||
},
|
||||
{
|
||||
"doggoid": 4,
|
||||
"doggo": "jean bob"
|
||||
},
|
||||
{
|
||||
"doggoid": 5,
|
||||
"doggo": "jean bob"
|
||||
}
|
||||
]
|
||||
@@ -1,54 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [1,2,3,4,5,]
|
||||
succeeded [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
[timestamp] [5,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000001
|
||||
00000000-0000-0000-0000-000000000002
|
||||
00000000-0000-0000-0000-000000000003
|
||||
00000000-0000-0000-0000-000000000004
|
||||
00000000-0000-0000-0000-000000000005
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [2,3,4,5,]
|
||||
succeeded [0,]
|
||||
failed [1,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
[timestamp] [5,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000002
|
||||
00000000-0000-0000-0000-000000000003
|
||||
00000000-0000-0000-0000-000000000004
|
||||
00000000-0000-0000-0000-000000000005
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
---
|
||||
source: index-scheduler/src/lib.rs
|
||||
---
|
||||
### Autobatching Enabled = true
|
||||
### Processing Tasks:
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [3,4,5,]
|
||||
succeeded [0,2,]
|
||||
failed [1,]
|
||||
----------------------------------------------------------------------
|
||||
### Kind:
|
||||
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Tasks:
|
||||
doggos [0,1,2,3,4,5,]
|
||||
----------------------------------------------------------------------
|
||||
### Index Mapper:
|
||||
["doggos"]
|
||||
----------------------------------------------------------------------
|
||||
### Canceled By:
|
||||
|
||||
----------------------------------------------------------------------
|
||||
### Enqueued At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
[timestamp] [3,]
|
||||
[timestamp] [4,]
|
||||
[timestamp] [5,]
|
||||
----------------------------------------------------------------------
|
||||
### Started At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
----------------------------------------------------------------------
|
||||
### Finished At:
|
||||
[timestamp] [0,]
|
||||
[timestamp] [1,]
|
||||
[timestamp] [2,]
|
||||
----------------------------------------------------------------------
|
||||
### File Store:
|
||||
00000000-0000-0000-0000-000000000003
|
||||
00000000-0000-0000-0000-000000000004
|
||||
00000000-0000-0000-0000-000000000005
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
@@ -404,19 +404,15 @@ impl IndexScheduler {
|
||||
Details::DocumentAdditionOrUpdate { received_documents, indexed_documents } => {
|
||||
assert_eq!(kind.as_kind(), Kind::DocumentAdditionOrUpdate);
|
||||
match indexed_documents {
|
||||
Some(0) => assert_ne!(status, Status::Enqueued),
|
||||
Some(indexed_documents) => {
|
||||
assert!(matches!(
|
||||
status,
|
||||
Status::Succeeded | Status::Failed | Status::Canceled
|
||||
));
|
||||
match status {
|
||||
Status::Succeeded => assert!(indexed_documents <= received_documents),
|
||||
Status::Failed | Status::Canceled => assert_eq!(indexed_documents, 0),
|
||||
status => panic!("DocumentAddition can't have an indexed_document set if it's {}", status),
|
||||
}
|
||||
assert_eq!(status, Status::Succeeded);
|
||||
assert!(indexed_documents <= received_documents);
|
||||
}
|
||||
None => {
|
||||
assert!(matches!(status, Status::Enqueued | Status::Processing))
|
||||
assert_ne!(status, Status::Succeeded);
|
||||
assert_ne!(status, Status::Canceled);
|
||||
assert_ne!(status, Status::Failed);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -508,21 +504,10 @@ impl IndexScheduler {
|
||||
if let KindWithContent::DocumentAdditionOrUpdate { content_file, .. } = kind {
|
||||
match status {
|
||||
Status::Enqueued | Status::Processing => {
|
||||
assert!(self
|
||||
.file_store
|
||||
.all_uuids()
|
||||
.unwrap()
|
||||
.any(|uuid| uuid.as_ref().unwrap() == &content_file),
|
||||
"Could not find uuid `{content_file}` in the file_store. Available uuids are {:?}.",
|
||||
self.file_store.all_uuids().unwrap().collect::<std::result::Result<Vec<_>, file_store::Error>>().unwrap(),
|
||||
);
|
||||
assert!(self.file_store.__all_uuids().contains(&content_file));
|
||||
}
|
||||
Status::Succeeded | Status::Failed | Status::Canceled => {
|
||||
assert!(self
|
||||
.file_store
|
||||
.all_uuids()
|
||||
.unwrap()
|
||||
.all(|uuid| uuid.as_ref().unwrap() != &content_file));
|
||||
assert!(!self.file_store.__all_uuids().contains(&content_file));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "meili-snap"
|
||||
version = "1.0.1"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "meilisearch-auth"
|
||||
version = "1.0.1"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::error::Error;
|
||||
|
||||
use meilisearch_types::error::{Code, ErrorCode};
|
||||
use meilisearch_types::internal_error;
|
||||
use meilisearch_types::{internal_error, keys};
|
||||
|
||||
pub type Result<T> = std::result::Result<T, AuthControllerError>;
|
||||
|
||||
@@ -11,6 +11,8 @@ pub enum AuthControllerError {
|
||||
ApiKeyNotFound(String),
|
||||
#[error("`uid` field value `{0}` is already an existing API key.")]
|
||||
ApiKeyAlreadyExists(String),
|
||||
#[error(transparent)]
|
||||
ApiKey(#[from] keys::Error),
|
||||
#[error("Internal error: {0}")]
|
||||
Internal(Box<dyn Error + Send + Sync + 'static>),
|
||||
}
|
||||
@@ -25,6 +27,7 @@ internal_error!(
|
||||
impl ErrorCode for AuthControllerError {
|
||||
fn error_code(&self) -> Code {
|
||||
match self {
|
||||
Self::ApiKey(e) => e.error_code(),
|
||||
Self::ApiKeyNotFound(_) => Code::ApiKeyNotFound,
|
||||
Self::ApiKeyAlreadyExists(_) => Code::ApiKeyAlreadyExists,
|
||||
Self::Internal(_) => Code::Internal,
|
||||
|
||||
@@ -3,14 +3,15 @@ pub mod error;
|
||||
mod store;
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::ops::Deref;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use error::{AuthControllerError, Result};
|
||||
use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey};
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::keys::{Action, Key};
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
pub use store::open_auth_store_env;
|
||||
use store::{generate_key_as_hexa, HeedAuthStore};
|
||||
use time::OffsetDateTime;
|
||||
@@ -33,29 +34,17 @@ impl AuthController {
|
||||
Ok(Self { store: Arc::new(store), master_key: master_key.clone() })
|
||||
}
|
||||
|
||||
/// Return the size of the `AuthController` database in bytes.
|
||||
pub fn size(&self) -> Result<u64> {
|
||||
self.store.size()
|
||||
}
|
||||
|
||||
pub fn create_key(&self, create_key: CreateApiKey) -> Result<Key> {
|
||||
match self.store.get_api_key(create_key.uid)? {
|
||||
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(create_key.uid.to_string())),
|
||||
None => self.store.put_api_key(create_key.to_key()),
|
||||
pub fn create_key(&self, value: Value) -> Result<Key> {
|
||||
let key = Key::create_from_value(value)?;
|
||||
match self.store.get_api_key(key.uid)? {
|
||||
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(key.uid.to_string())),
|
||||
None => self.store.put_api_key(key),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_key(&self, uid: Uuid, patch: PatchApiKey) -> Result<Key> {
|
||||
pub fn update_key(&self, uid: Uuid, value: Value) -> Result<Key> {
|
||||
let mut key = self.get_key(uid)?;
|
||||
match patch.description {
|
||||
Setting::NotSet => (),
|
||||
description => key.description = description.set(),
|
||||
};
|
||||
match patch.name {
|
||||
Setting::NotSet => (),
|
||||
name => key.name = name.set(),
|
||||
};
|
||||
key.updated_at = OffsetDateTime::now_utc();
|
||||
key.update_from_value(value)?;
|
||||
self.store.put_api_key(key)
|
||||
}
|
||||
|
||||
@@ -97,13 +86,15 @@ impl AuthController {
|
||||
key.indexes
|
||||
.into_iter()
|
||||
.filter_map(|index| {
|
||||
search_rules.get_index_search_rules(&format!("{index}")).map(
|
||||
|index_search_rules| (index.to_string(), Some(index_search_rules)),
|
||||
search_rules.get_index_search_rules(index.deref()).map(
|
||||
|index_search_rules| {
|
||||
(String::from(index), Some(index_search_rules))
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
None => SearchRules::Set(key.indexes.into_iter().map(|x| x.to_string()).collect()),
|
||||
None => SearchRules::Set(key.indexes.into_iter().map(String::from).collect()),
|
||||
};
|
||||
} else if let Some(search_rules) = search_rules {
|
||||
filters.search_rules = search_rules;
|
||||
|
||||
@@ -3,6 +3,7 @@ use std::cmp::Reverse;
|
||||
use std::collections::HashSet;
|
||||
use std::convert::{TryFrom, TryInto};
|
||||
use std::fs::create_dir_all;
|
||||
use std::ops::Deref;
|
||||
use std::path::Path;
|
||||
use std::str;
|
||||
use std::sync::Arc;
|
||||
@@ -60,11 +61,6 @@ impl HeedAuthStore {
|
||||
Ok(Self { env, keys, action_keyid_index_expiration, should_close_on_drop: true })
|
||||
}
|
||||
|
||||
/// Return the size in bytes of database
|
||||
pub fn size(&self) -> Result<u64> {
|
||||
Ok(self.env.real_disk_size()?)
|
||||
}
|
||||
|
||||
pub fn set_drop_on_close(&mut self, v: bool) {
|
||||
self.should_close_on_drop = v;
|
||||
}
|
||||
@@ -139,7 +135,7 @@ impl HeedAuthStore {
|
||||
for index in key.indexes.iter() {
|
||||
db.put(
|
||||
&mut wtxn,
|
||||
&(&uid, &action, Some(index.to_string().as_bytes())),
|
||||
&(&uid, &action, Some(index.deref().as_bytes())),
|
||||
&key.expires_at,
|
||||
)?;
|
||||
}
|
||||
|
||||
@@ -1,25 +1,24 @@
|
||||
[package]
|
||||
name = "meilisearch-types"
|
||||
version = "1.0.1"
|
||||
version = "1.0.0"
|
||||
authors = ["marin <postma.marin@protonmail.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "4.2.1", default-features = false }
|
||||
anyhow = "1.0.65"
|
||||
convert_case = "0.6.0"
|
||||
csv = "1.1.6"
|
||||
deserr = "0.3.0"
|
||||
either = { version = "1.6.1", features = ["serde"] }
|
||||
enum-iterator = "1.1.3"
|
||||
file-store = { path = "../file-store" }
|
||||
flate2 = "1.0.24"
|
||||
fst = "0.4.7"
|
||||
memmap2 = "0.5.7"
|
||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.41.2", default-features = false }
|
||||
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.37.3", default-features = false }
|
||||
proptest = { version = "1.0.0", optional = true }
|
||||
proptest-derive = { version = "0.3.0", optional = true }
|
||||
roaring = { version = "0.10.0", features = ["serde"] }
|
||||
serde = { version = "1.0.145", features = ["derive"] }
|
||||
serde-cs = "0.2.4"
|
||||
serde_json = "1.0.85"
|
||||
tar = "0.4.38"
|
||||
tempfile = "3.3.0"
|
||||
@@ -31,6 +30,8 @@ uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
||||
[dev-dependencies]
|
||||
insta = "1.19.1"
|
||||
meili-snap = { path = "../meili-snap" }
|
||||
proptest = "1.0.0"
|
||||
proptest-derive = "0.3.0"
|
||||
|
||||
[features]
|
||||
# all specialized tokenizations
|
||||
@@ -44,3 +45,4 @@ hebrew = ["milli/hebrew"]
|
||||
japanese = ["milli/japanese"]
|
||||
# thai specialized tokenization
|
||||
thai = ["milli/thai"]
|
||||
test-traits = ["proptest", "proptest-derive"]
|
||||
|
||||
@@ -1,328 +0,0 @@
|
||||
/*!
|
||||
This module implements the error messages of deserialization errors.
|
||||
|
||||
We try to:
|
||||
1. Give a human-readable description of where the error originated.
|
||||
2. Use the correct terms depending on the format of the request (json/query param)
|
||||
3. Categorise the type of the error (e.g. missing field, wrong value type, unexpected error, etc.)
|
||||
*/
|
||||
use deserr::{ErrorKind, IntoValue, ValueKind, ValuePointerRef};
|
||||
|
||||
use super::{DeserrJsonError, DeserrQueryParamError};
|
||||
use crate::error::{Code, ErrorCode};
|
||||
|
||||
/// Return a description of the given location in a Json, preceded by the given article.
|
||||
/// e.g. `at .key1[8].key2`. If the location is the origin, the given article will not be
|
||||
/// included in the description.
|
||||
pub fn location_json_description(location: ValuePointerRef, article: &str) -> String {
|
||||
fn rec(location: ValuePointerRef) -> String {
|
||||
match location {
|
||||
ValuePointerRef::Origin => String::new(),
|
||||
ValuePointerRef::Key { key, prev } => rec(*prev) + "." + key,
|
||||
ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)),
|
||||
}
|
||||
}
|
||||
match location {
|
||||
ValuePointerRef::Origin => String::new(),
|
||||
_ => {
|
||||
format!("{article} `{}`", rec(location))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a description of the list of value kinds for a Json payload.
|
||||
fn value_kinds_description_json(kinds: &[ValueKind]) -> String {
|
||||
// Rank each value kind so that they can be sorted (and deduplicated)
|
||||
// Having a predictable order helps with pattern matching
|
||||
fn order(kind: &ValueKind) -> u8 {
|
||||
match kind {
|
||||
ValueKind::Null => 0,
|
||||
ValueKind::Boolean => 1,
|
||||
ValueKind::Integer => 2,
|
||||
ValueKind::NegativeInteger => 3,
|
||||
ValueKind::Float => 4,
|
||||
ValueKind::String => 5,
|
||||
ValueKind::Sequence => 6,
|
||||
ValueKind::Map => 7,
|
||||
}
|
||||
}
|
||||
// Return a description of a single value kind, preceded by an article
|
||||
fn single_description(kind: &ValueKind) -> &'static str {
|
||||
match kind {
|
||||
ValueKind::Null => "null",
|
||||
ValueKind::Boolean => "a boolean",
|
||||
ValueKind::Integer => "a positive integer",
|
||||
ValueKind::NegativeInteger => "a negative integer",
|
||||
ValueKind::Float => "a number",
|
||||
ValueKind::String => "a string",
|
||||
ValueKind::Sequence => "an array",
|
||||
ValueKind::Map => "an object",
|
||||
}
|
||||
}
|
||||
|
||||
fn description_rec(kinds: &[ValueKind], count_items: &mut usize, message: &mut String) {
|
||||
let (msg_part, rest): (_, &[ValueKind]) = match kinds {
|
||||
[] => (String::new(), &[]),
|
||||
[ValueKind::Integer | ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => {
|
||||
("a number".to_owned(), rest)
|
||||
}
|
||||
[ValueKind::Integer, ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => {
|
||||
("a number".to_owned(), rest)
|
||||
}
|
||||
[ValueKind::Integer, ValueKind::NegativeInteger, rest @ ..] => {
|
||||
("an integer".to_owned(), rest)
|
||||
}
|
||||
[a] => (single_description(a).to_owned(), &[]),
|
||||
[a, rest @ ..] => (single_description(a).to_owned(), rest),
|
||||
};
|
||||
|
||||
if rest.is_empty() {
|
||||
if *count_items == 0 {
|
||||
message.push_str(&msg_part);
|
||||
} else if *count_items == 1 {
|
||||
message.push_str(&format!(" or {msg_part}"));
|
||||
} else {
|
||||
message.push_str(&format!(", or {msg_part}"));
|
||||
}
|
||||
} else {
|
||||
if *count_items == 0 {
|
||||
message.push_str(&msg_part);
|
||||
} else {
|
||||
message.push_str(&format!(", {msg_part}"));
|
||||
}
|
||||
|
||||
*count_items += 1;
|
||||
description_rec(rest, count_items, message);
|
||||
}
|
||||
}
|
||||
|
||||
let mut kinds = kinds.to_owned();
|
||||
kinds.sort_by_key(order);
|
||||
kinds.dedup();
|
||||
|
||||
if kinds.is_empty() {
|
||||
// Should not happen ideally
|
||||
"a different value".to_owned()
|
||||
} else {
|
||||
let mut message = String::new();
|
||||
description_rec(kinds.as_slice(), &mut 0, &mut message);
|
||||
message
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the JSON string of the value preceded by a description of its kind
|
||||
fn value_description_with_kind_json(v: &serde_json::Value) -> String {
|
||||
match v.kind() {
|
||||
ValueKind::Null => "null".to_owned(),
|
||||
kind => {
|
||||
format!(
|
||||
"{}: `{}`",
|
||||
value_kinds_description_json(&[kind]),
|
||||
serde_json::to_string(v).unwrap()
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Default + ErrorCode> deserr::DeserializeError for DeserrJsonError<C> {
|
||||
fn error<V: IntoValue>(
|
||||
_self_: Option<Self>,
|
||||
error: deserr::ErrorKind<V>,
|
||||
location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
let mut message = String::new();
|
||||
|
||||
message.push_str(&match error {
|
||||
ErrorKind::IncorrectValueKind { actual, accepted } => {
|
||||
let expected = value_kinds_description_json(accepted);
|
||||
let received = value_description_with_kind_json(&serde_json::Value::from(actual));
|
||||
|
||||
let location = location_json_description(location, " at");
|
||||
|
||||
format!("Invalid value type{location}: expected {expected}, but found {received}")
|
||||
}
|
||||
ErrorKind::MissingField { field } => {
|
||||
let location = location_json_description(location, " inside");
|
||||
format!("Missing field `{field}`{location}")
|
||||
}
|
||||
ErrorKind::UnknownKey { key, accepted } => {
|
||||
let location = location_json_description(location, " inside");
|
||||
format!(
|
||||
"Unknown field `{}`{location}: expected one of {}",
|
||||
key,
|
||||
accepted
|
||||
.iter()
|
||||
.map(|accepted| format!("`{}`", accepted))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
)
|
||||
}
|
||||
ErrorKind::UnknownValue { value, accepted } => {
|
||||
let location = location_json_description(location, " at");
|
||||
format!(
|
||||
"Unknown value `{}`{location}: expected one of {}",
|
||||
value,
|
||||
accepted
|
||||
.iter()
|
||||
.map(|accepted| format!("`{}`", accepted))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", "),
|
||||
)
|
||||
}
|
||||
ErrorKind::Unexpected { msg } => {
|
||||
let location = location_json_description(location, " at");
|
||||
format!("Invalid value{location}: {msg}")
|
||||
}
|
||||
});
|
||||
|
||||
Err(DeserrJsonError::new(message, C::default().error_code()))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn immutable_field_error(field: &str, accepted: &[&str], code: Code) -> DeserrJsonError {
|
||||
let msg = format!(
|
||||
"Immutable field `{field}`: expected one of {}",
|
||||
accepted
|
||||
.iter()
|
||||
.map(|accepted| format!("`{}`", accepted))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
);
|
||||
|
||||
DeserrJsonError::new(msg, code)
|
||||
}
|
||||
|
||||
/// Return a description of the given location in query parameters, preceded by the
|
||||
/// given article. e.g. `at key5[2]`. If the location is the origin, the given article
|
||||
/// will not be included in the description.
|
||||
pub fn location_query_param_description(location: ValuePointerRef, article: &str) -> String {
|
||||
fn rec(location: ValuePointerRef) -> String {
|
||||
match location {
|
||||
ValuePointerRef::Origin => String::new(),
|
||||
ValuePointerRef::Key { key, prev } => {
|
||||
if matches!(prev, ValuePointerRef::Origin) {
|
||||
key.to_owned()
|
||||
} else {
|
||||
rec(*prev) + "." + key
|
||||
}
|
||||
}
|
||||
ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)),
|
||||
}
|
||||
}
|
||||
match location {
|
||||
ValuePointerRef::Origin => String::new(),
|
||||
_ => {
|
||||
format!("{article} `{}`", rec(location))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Default + ErrorCode> deserr::DeserializeError for DeserrQueryParamError<C> {
|
||||
fn error<V: IntoValue>(
|
||||
_self_: Option<Self>,
|
||||
error: deserr::ErrorKind<V>,
|
||||
location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
let mut message = String::new();
|
||||
|
||||
message.push_str(&match error {
|
||||
ErrorKind::IncorrectValueKind { actual, accepted } => {
|
||||
let expected = value_kinds_description_query_param(accepted);
|
||||
let received = value_description_with_kind_query_param(actual);
|
||||
|
||||
let location = location_query_param_description(location, " for parameter");
|
||||
|
||||
format!("Invalid value type{location}: expected {expected}, but found {received}")
|
||||
}
|
||||
ErrorKind::MissingField { field } => {
|
||||
let location = location_query_param_description(location, " inside");
|
||||
format!("Missing parameter `{field}`{location}")
|
||||
}
|
||||
ErrorKind::UnknownKey { key, accepted } => {
|
||||
let location = location_query_param_description(location, " inside");
|
||||
format!(
|
||||
"Unknown parameter `{}`{location}: expected one of {}",
|
||||
key,
|
||||
accepted
|
||||
.iter()
|
||||
.map(|accepted| format!("`{}`", accepted))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
)
|
||||
}
|
||||
ErrorKind::UnknownValue { value, accepted } => {
|
||||
let location = location_query_param_description(location, " for parameter");
|
||||
format!(
|
||||
"Unknown value `{}`{location}: expected one of {}",
|
||||
value,
|
||||
accepted
|
||||
.iter()
|
||||
.map(|accepted| format!("`{}`", accepted))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", "),
|
||||
)
|
||||
}
|
||||
ErrorKind::Unexpected { msg } => {
|
||||
let location = location_query_param_description(location, " in parameter");
|
||||
format!("Invalid value{location}: {msg}")
|
||||
}
|
||||
});
|
||||
|
||||
Err(DeserrQueryParamError::new(message, C::default().error_code()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a description of the list of value kinds for query parameters
|
||||
/// Since query parameters are always treated as strings, we always return
|
||||
/// "a string" for now.
|
||||
fn value_kinds_description_query_param(_accepted: &[ValueKind]) -> String {
|
||||
"a string".to_owned()
|
||||
}
|
||||
|
||||
fn value_description_with_kind_query_param<V: IntoValue>(actual: deserr::Value<V>) -> String {
|
||||
match actual {
|
||||
deserr::Value::Null => "null".to_owned(),
|
||||
deserr::Value::Boolean(x) => format!("a boolean: `{x}`"),
|
||||
deserr::Value::Integer(x) => format!("an integer: `{x}`"),
|
||||
deserr::Value::NegativeInteger(x) => {
|
||||
format!("an integer: `{x}`")
|
||||
}
|
||||
deserr::Value::Float(x) => {
|
||||
format!("a number: `{x}`")
|
||||
}
|
||||
deserr::Value::String(x) => {
|
||||
format!("a string: `{x}`")
|
||||
}
|
||||
deserr::Value::Sequence(_) => "multiple values".to_owned(),
|
||||
deserr::Value::Map(_) => "multiple parameters".to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use deserr::ValueKind;
|
||||
|
||||
use crate::deserr::error_messages::value_kinds_description_json;
|
||||
|
||||
#[test]
|
||||
fn test_value_kinds_description_json() {
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[]), @"a different value");
|
||||
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean]), @"a boolean");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::NegativeInteger]), @"a negative integer");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::String]), @"a string");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence]), @"an array");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Map]), @"an object");
|
||||
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Boolean]), @"a boolean or a positive integer");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Integer]), @"null or a positive integer");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence, ValueKind::NegativeInteger]), @"a negative integer or an array");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float]), @"a number");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger]), @"a number");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null or a number");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number");
|
||||
}
|
||||
}
|
||||
@@ -1,134 +0,0 @@
|
||||
use std::convert::Infallible;
|
||||
use std::fmt;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use deserr::{DeserializeError, MergeWithError, ValuePointerRef};
|
||||
|
||||
use crate::error::deserr_codes::{self, *};
|
||||
use crate::error::{
|
||||
unwrap_any, Code, DeserrParseBoolError, DeserrParseIntError, ErrorCode, InvalidTaskDateError,
|
||||
ParseOffsetDateTimeError,
|
||||
};
|
||||
use crate::index_uid::IndexUidFormatError;
|
||||
use crate::tasks::{ParseTaskKindError, ParseTaskStatusError};
|
||||
|
||||
pub mod error_messages;
|
||||
pub mod query_params;
|
||||
|
||||
/// Marker type for the Json format
|
||||
pub struct DeserrJson;
|
||||
/// Marker type for the Query Parameter format
|
||||
pub struct DeserrQueryParam;
|
||||
|
||||
pub type DeserrJsonError<C = deserr_codes::BadRequest> = DeserrError<DeserrJson, C>;
|
||||
pub type DeserrQueryParamError<C = deserr_codes::BadRequest> = DeserrError<DeserrQueryParam, C>;
|
||||
|
||||
/// A request deserialization error.
|
||||
///
|
||||
/// The first generic paramater is a marker type describing the format of the request: either json (e.g. [`DeserrJson`] or [`DeserrQueryParam`]).
|
||||
/// The second generic parameter is the default error code for the deserialization error, in case it is not given.
|
||||
pub struct DeserrError<Format, C: Default + ErrorCode> {
|
||||
pub msg: String,
|
||||
pub code: Code,
|
||||
_phantom: PhantomData<(Format, C)>,
|
||||
}
|
||||
impl<Format, C: Default + ErrorCode> DeserrError<Format, C> {
|
||||
pub fn new(msg: String, code: Code) -> Self {
|
||||
Self { msg, code, _phantom: PhantomData }
|
||||
}
|
||||
}
|
||||
impl<Format, C: Default + ErrorCode> std::fmt::Debug for DeserrError<Format, C> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("DeserrError").field("msg", &self.msg).field("code", &self.code).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Format, C: Default + ErrorCode> std::fmt::Display for DeserrError<Format, C> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.msg)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Format, C: Default + ErrorCode> std::error::Error for DeserrError<Format, C> {}
|
||||
impl<Format, C: Default + ErrorCode> ErrorCode for DeserrError<Format, C> {
|
||||
fn error_code(&self) -> Code {
|
||||
self.code
|
||||
}
|
||||
}
|
||||
|
||||
// For now, we don't accumulate errors. Only one deserialisation error is ever returned at a time.
|
||||
impl<Format, C1: Default + ErrorCode, C2: Default + ErrorCode>
|
||||
MergeWithError<DeserrError<Format, C2>> for DeserrError<Format, C1>
|
||||
{
|
||||
fn merge(
|
||||
_self_: Option<Self>,
|
||||
other: DeserrError<Format, C2>,
|
||||
_merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
Err(DeserrError { msg: other.msg, code: other.code, _phantom: PhantomData })
|
||||
}
|
||||
}
|
||||
|
||||
impl<Format, C: Default + ErrorCode> MergeWithError<Infallible> for DeserrError<Format, C> {
|
||||
fn merge(
|
||||
_self_: Option<Self>,
|
||||
_other: Infallible,
|
||||
_merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
// Implement a convenience function to build a `missing_field` error
|
||||
macro_rules! make_missing_field_convenience_builder {
|
||||
($err_code:ident, $fn_name:ident) => {
|
||||
impl DeserrJsonError<$err_code> {
|
||||
pub fn $fn_name(field: &str, location: ValuePointerRef) -> Self {
|
||||
let x = unwrap_any(Self::error::<Infallible>(
|
||||
None,
|
||||
deserr::ErrorKind::MissingField { field },
|
||||
location,
|
||||
));
|
||||
Self { msg: x.msg, code: $err_code.error_code(), _phantom: PhantomData }
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
make_missing_field_convenience_builder!(MissingIndexUid, missing_index_uid);
|
||||
make_missing_field_convenience_builder!(MissingApiKeyActions, missing_api_key_actions);
|
||||
make_missing_field_convenience_builder!(MissingApiKeyExpiresAt, missing_api_key_expires_at);
|
||||
make_missing_field_convenience_builder!(MissingApiKeyIndexes, missing_api_key_indexes);
|
||||
make_missing_field_convenience_builder!(MissingSwapIndexes, missing_swap_indexes);
|
||||
|
||||
// Integrate a sub-error into a [`DeserrError`] by taking its error message but using
|
||||
// the default error code (C) from `Self`
|
||||
macro_rules! merge_with_error_impl_take_error_message {
|
||||
($err_type:ty) => {
|
||||
impl<Format, C: Default + ErrorCode> MergeWithError<$err_type> for DeserrError<Format, C>
|
||||
where
|
||||
DeserrError<Format, C>: deserr::DeserializeError,
|
||||
{
|
||||
fn merge(
|
||||
_self_: Option<Self>,
|
||||
other: $err_type,
|
||||
merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
DeserrError::<Format, C>::error::<Infallible>(
|
||||
None,
|
||||
deserr::ErrorKind::Unexpected { msg: other.to_string() },
|
||||
merge_location,
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// All these errors can be merged into a `DeserrError`
|
||||
merge_with_error_impl_take_error_message!(DeserrParseIntError);
|
||||
merge_with_error_impl_take_error_message!(DeserrParseBoolError);
|
||||
merge_with_error_impl_take_error_message!(uuid::Error);
|
||||
merge_with_error_impl_take_error_message!(InvalidTaskDateError);
|
||||
merge_with_error_impl_take_error_message!(ParseOffsetDateTimeError);
|
||||
merge_with_error_impl_take_error_message!(ParseTaskKindError);
|
||||
merge_with_error_impl_take_error_message!(ParseTaskStatusError);
|
||||
merge_with_error_impl_take_error_message!(IndexUidFormatError);
|
||||
@@ -1,115 +0,0 @@
|
||||
/*!
|
||||
This module provides helper traits, types, and functions to deserialize query parameters.
|
||||
|
||||
The source of the problem is that query parameters only give us a string to work with.
|
||||
This means `deserr` is never given a sequence or numbers, and thus the default deserialization
|
||||
code for common types such as `usize` or `Vec<T>` does not work. To work around it, we create a
|
||||
wrapper type called `Param<T>`, which is deserialised using the `from_query_param` method of the trait
|
||||
`FromQueryParameter`.
|
||||
|
||||
We also use other helper types such as `CS` (i.e. comma-separated) from `serde_cs` as well as
|
||||
`StarOr`, `OptionStarOr`, and `OptionStarOrList`.
|
||||
*/
|
||||
|
||||
use std::convert::Infallible;
|
||||
use std::ops::Deref;
|
||||
use std::str::FromStr;
|
||||
|
||||
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind};
|
||||
|
||||
use super::{DeserrParseBoolError, DeserrParseIntError};
|
||||
use crate::error::unwrap_any;
|
||||
use crate::index_uid::IndexUid;
|
||||
use crate::tasks::{Kind, Status};
|
||||
|
||||
/// A wrapper type indicating that the inner value should be
|
||||
/// deserialised from a query parameter string.
|
||||
///
|
||||
/// Note that if the field is optional, it is better to use
|
||||
/// `Option<Param<T>>` instead of `Param<Option<T>>`.
|
||||
#[derive(Default, Debug, Clone, Copy)]
|
||||
pub struct Param<T>(pub T);
|
||||
|
||||
impl<T> Deref for Param<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> DeserializeFromValue<E> for Param<T>
|
||||
where
|
||||
E: DeserializeError + MergeWithError<T::Err>,
|
||||
T: FromQueryParameter,
|
||||
{
|
||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||
value: deserr::Value<V>,
|
||||
location: deserr::ValuePointerRef,
|
||||
) -> Result<Self, E> {
|
||||
match value {
|
||||
deserr::Value::String(s) => match T::from_query_param(&s) {
|
||||
Ok(x) => Ok(Param(x)),
|
||||
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
|
||||
},
|
||||
_ => Err(unwrap_any(E::error(
|
||||
None,
|
||||
deserr::ErrorKind::IncorrectValueKind {
|
||||
actual: value,
|
||||
accepted: &[ValueKind::String],
|
||||
},
|
||||
location,
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a value from a query parameter string.
|
||||
///
|
||||
/// This trait is functionally equivalent to `FromStr`.
|
||||
/// Having a separate trait trait allows us to return better
|
||||
/// deserializatio error messages.
|
||||
pub trait FromQueryParameter: Sized {
|
||||
type Err;
|
||||
fn from_query_param(p: &str) -> Result<Self, Self::Err>;
|
||||
}
|
||||
|
||||
/// Implement `FromQueryParameter` for the given type using its `FromStr`
|
||||
/// trait implementation.
|
||||
macro_rules! impl_from_query_param_from_str {
|
||||
($type:ty) => {
|
||||
impl FromQueryParameter for $type {
|
||||
type Err = <$type as FromStr>::Err;
|
||||
fn from_query_param(p: &str) -> Result<Self, Self::Err> {
|
||||
p.parse()
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
impl_from_query_param_from_str!(Kind);
|
||||
impl_from_query_param_from_str!(Status);
|
||||
impl_from_query_param_from_str!(IndexUid);
|
||||
|
||||
/// Implement `FromQueryParameter` for the given type using its `FromStr`
|
||||
/// trait implementation, replacing the returned error with a struct
|
||||
/// that wraps the original query parameter.
|
||||
macro_rules! impl_from_query_param_wrap_original_value_in_error {
|
||||
($type:ty, $err_type:path) => {
|
||||
impl FromQueryParameter for $type {
|
||||
type Err = $err_type;
|
||||
fn from_query_param(p: &str) -> Result<Self, Self::Err> {
|
||||
p.parse().map_err(|_| $err_type(p.to_owned()))
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
impl_from_query_param_wrap_original_value_in_error!(usize, DeserrParseIntError);
|
||||
impl_from_query_param_wrap_original_value_in_error!(u32, DeserrParseIntError);
|
||||
impl_from_query_param_wrap_original_value_in_error!(bool, DeserrParseBoolError);
|
||||
|
||||
impl FromQueryParameter for String {
|
||||
type Err = Infallible;
|
||||
fn from_query_param(p: &str) -> Result<Self, Infallible> {
|
||||
Ok(p.to_owned())
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ use std::fs::File;
|
||||
use std::io::{self, Seek, Write};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use either::Either;
|
||||
use memmap2::MmapOptions;
|
||||
use milli::documents::{DocumentsBatchBuilder, Error};
|
||||
use milli::Object;
|
||||
@@ -119,6 +120,20 @@ pub fn read_csv(file: &File, writer: impl Write + Seek) -> Result<u64> {
|
||||
|
||||
/// Reads JSON from temporary file and write an obkv batch to writer.
|
||||
pub fn read_json(file: &File, writer: impl Write + Seek) -> Result<u64> {
|
||||
read_json_inner(file, writer, PayloadType::Json)
|
||||
}
|
||||
|
||||
/// Reads JSON from temporary file and write an obkv batch to writer.
|
||||
pub fn read_ndjson(file: &File, writer: impl Write + Seek) -> Result<u64> {
|
||||
read_json_inner(file, writer, PayloadType::Ndjson)
|
||||
}
|
||||
|
||||
/// Reads JSON from temporary file and write an obkv batch to writer.
|
||||
fn read_json_inner(
|
||||
file: &File,
|
||||
writer: impl Write + Seek,
|
||||
payload_type: PayloadType,
|
||||
) -> Result<u64> {
|
||||
let mut builder = DocumentsBatchBuilder::new(writer);
|
||||
let mmap = unsafe { MmapOptions::new().map(file)? };
|
||||
let mut deserializer = serde_json::Deserializer::from_slice(&mmap);
|
||||
@@ -128,20 +143,23 @@ pub fn read_json(file: &File, writer: impl Write + Seek) -> Result<u64> {
|
||||
// The data has been transferred to the writer during the deserialization process.
|
||||
Ok(Ok(_)) => (),
|
||||
Ok(Err(e)) => return Err(DocumentFormatError::Io(e)),
|
||||
Err(e) => {
|
||||
// Attempt to deserialize a single json string when the cause of the exception is not Category.data
|
||||
// Other types of deserialisation exceptions are returned directly to the front-end
|
||||
if e.classify() != serde_json::error::Category::Data {
|
||||
return Err(DocumentFormatError::MalformedPayload(
|
||||
Error::Json(e),
|
||||
PayloadType::Json,
|
||||
));
|
||||
Err(_e) => {
|
||||
// If we cannot deserialize the content as an array of object then we try
|
||||
// to deserialize it with the original method to keep correct error messages.
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(transparent)]
|
||||
struct ArrayOrSingleObject {
|
||||
#[serde(with = "either::serde_untagged")]
|
||||
inner: Either<Vec<Object>, Object>,
|
||||
}
|
||||
|
||||
let content: Object = serde_json::from_slice(&mmap)
|
||||
let content: ArrayOrSingleObject = serde_json::from_reader(file)
|
||||
.map_err(Error::Json)
|
||||
.map_err(|e| (PayloadType::Json, e))?;
|
||||
builder.append_json_object(&content).map_err(DocumentFormatError::Io)?;
|
||||
.map_err(|e| (payload_type, e))?;
|
||||
|
||||
for object in content.inner.map_right(|o| vec![o]).into_inner() {
|
||||
builder.append_json_object(&object).map_err(DocumentFormatError::Io)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -151,22 +169,6 @@ pub fn read_json(file: &File, writer: impl Write + Seek) -> Result<u64> {
|
||||
Ok(count as u64)
|
||||
}
|
||||
|
||||
/// Reads JSON from temporary file and write an obkv batch to writer.
|
||||
pub fn read_ndjson(file: &File, writer: impl Write + Seek) -> Result<u64> {
|
||||
let mut builder = DocumentsBatchBuilder::new(writer);
|
||||
let mmap = unsafe { MmapOptions::new().map(file)? };
|
||||
|
||||
for result in serde_json::Deserializer::from_slice(&mmap).into_iter() {
|
||||
let object = result.map_err(Error::Json).map_err(|e| (PayloadType::Ndjson, e))?;
|
||||
builder.append_json_object(&object).map_err(Into::into).map_err(DocumentFormatError::Io)?;
|
||||
}
|
||||
|
||||
let count = builder.documents_count();
|
||||
let _ = builder.into_inner().map_err(Into::into).map_err(DocumentFormatError::Io)?;
|
||||
|
||||
Ok(count as u64)
|
||||
}
|
||||
|
||||
/// The actual handling of the deserialization process in serde
|
||||
/// avoids storing the deserialized object in memory.
|
||||
///
|
||||
|
||||
@@ -3,14 +3,15 @@ use std::{fmt, io};
|
||||
use actix_web::http::StatusCode;
|
||||
use actix_web::{self as aweb, HttpResponseBuilder};
|
||||
use aweb::rt::task::JoinError;
|
||||
use convert_case::Casing;
|
||||
use milli::heed::{Error as HeedError, MdbError};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
||||
pub struct ResponseError {
|
||||
#[serde(skip)]
|
||||
#[cfg_attr(feature = "test-traits", proptest(strategy = "strategy::status_code_strategy()"))]
|
||||
code: StatusCode,
|
||||
message: String,
|
||||
#[serde(rename = "code")]
|
||||
@@ -29,7 +30,7 @@ impl ResponseError {
|
||||
Self {
|
||||
code: code.http(),
|
||||
message,
|
||||
error_code: code.name(),
|
||||
error_code: code.err_code().error_name.to_string(),
|
||||
error_type: code.type_(),
|
||||
error_link: code.url(),
|
||||
}
|
||||
@@ -46,7 +47,7 @@ impl std::error::Error for ResponseError {}
|
||||
|
||||
impl<T> From<T> for ResponseError
|
||||
where
|
||||
T: std::error::Error + ErrorCode,
|
||||
T: ErrorCode,
|
||||
{
|
||||
fn from(other: T) -> Self {
|
||||
Self::from_msg(other.to_string(), other.error_code())
|
||||
@@ -64,7 +65,7 @@ impl aweb::error::ResponseError for ResponseError {
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ErrorCode {
|
||||
pub trait ErrorCode: std::error::Error {
|
||||
fn error_code(&self) -> Code;
|
||||
|
||||
/// returns the HTTP status code associated with the error
|
||||
@@ -90,10 +91,9 @@ pub trait ErrorCode {
|
||||
|
||||
#[allow(clippy::enum_variant_names)]
|
||||
enum ErrorType {
|
||||
Internal,
|
||||
InvalidRequest,
|
||||
Auth,
|
||||
System,
|
||||
InternalError,
|
||||
InvalidRequestError,
|
||||
AuthenticationError,
|
||||
}
|
||||
|
||||
impl fmt::Display for ErrorType {
|
||||
@@ -101,191 +101,278 @@ impl fmt::Display for ErrorType {
|
||||
use ErrorType::*;
|
||||
|
||||
match self {
|
||||
Internal => write!(f, "internal"),
|
||||
InvalidRequest => write!(f, "invalid_request"),
|
||||
Auth => write!(f, "auth"),
|
||||
System => write!(f, "system"),
|
||||
InternalError => write!(f, "internal"),
|
||||
InvalidRequestError => write!(f, "invalid_request"),
|
||||
AuthenticationError => write!(f, "auth"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Implement all the error codes.
|
||||
///
|
||||
/// 1. Make an enum `Code` where each error code is a variant
|
||||
/// 2. Implement the `http`, `name`, and `type_` method on the enum
|
||||
/// 3. Make a unit type for each error code in the module `deserr_codes`.
|
||||
///
|
||||
/// The unit type's purpose is to be used as a marker type parameter, e.g.
|
||||
/// `DeserrJsonError<MyErrorCode>`. It implements `Default` and `ErrorCode`,
|
||||
/// so we can get a value of the `Code` enum with the correct variant by calling
|
||||
/// `MyErrorCode::default().error_code()`.
|
||||
macro_rules! make_error_codes {
|
||||
($($code_ident:ident, $err_type:ident, $status:ident);*) => {
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Code {
|
||||
$($code_ident),*
|
||||
}
|
||||
impl Code {
|
||||
/// return the HTTP status code associated with the `Code`
|
||||
fn http(&self) -> StatusCode {
|
||||
match self {
|
||||
$(
|
||||
Code::$code_ident => StatusCode::$status
|
||||
),*
|
||||
}
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum Code {
|
||||
// error related to your setup
|
||||
IoError,
|
||||
NoSpaceLeftOnDevice,
|
||||
TooManyOpenFiles,
|
||||
|
||||
// index related error
|
||||
CreateIndex,
|
||||
IndexAlreadyExists,
|
||||
IndexNotFound,
|
||||
InvalidIndexUid,
|
||||
InvalidMinWordLengthForTypo,
|
||||
|
||||
DuplicateIndexFound,
|
||||
|
||||
// invalid state error
|
||||
InvalidState,
|
||||
MissingPrimaryKey,
|
||||
PrimaryKeyAlreadyPresent,
|
||||
|
||||
MaxFieldsLimitExceeded,
|
||||
MissingDocumentId,
|
||||
InvalidDocumentId,
|
||||
|
||||
Filter,
|
||||
Sort,
|
||||
|
||||
BadParameter,
|
||||
BadRequest,
|
||||
DatabaseSizeLimitReached,
|
||||
DocumentNotFound,
|
||||
Internal,
|
||||
InvalidGeoField,
|
||||
InvalidRankingRule,
|
||||
InvalidStore,
|
||||
InvalidToken,
|
||||
MissingAuthorizationHeader,
|
||||
MissingMasterKey,
|
||||
DumpNotFound,
|
||||
InvalidTaskDateFilter,
|
||||
InvalidTaskStatusesFilter,
|
||||
InvalidTaskTypesFilter,
|
||||
InvalidTaskCanceledByFilter,
|
||||
InvalidTaskUidsFilter,
|
||||
TaskNotFound,
|
||||
TaskDeletionWithEmptyQuery,
|
||||
TaskCancelationWithEmptyQuery,
|
||||
PayloadTooLarge,
|
||||
RetrieveDocument,
|
||||
SearchDocuments,
|
||||
UnsupportedMediaType,
|
||||
|
||||
DumpAlreadyInProgress,
|
||||
DumpProcessFailed,
|
||||
// Only used when importing a dump
|
||||
UnretrievableErrorCode,
|
||||
|
||||
InvalidContentType,
|
||||
MissingContentType,
|
||||
MalformedPayload,
|
||||
MissingPayload,
|
||||
|
||||
ApiKeyNotFound,
|
||||
MissingParameter,
|
||||
InvalidApiKeyActions,
|
||||
InvalidApiKeyIndexes,
|
||||
InvalidApiKeyExpiresAt,
|
||||
InvalidApiKeyDescription,
|
||||
InvalidApiKeyName,
|
||||
InvalidApiKeyUid,
|
||||
ImmutableField,
|
||||
ApiKeyAlreadyExists,
|
||||
}
|
||||
|
||||
impl Code {
|
||||
/// associate a `Code` variant to the actual ErrCode
|
||||
fn err_code(&self) -> ErrCode {
|
||||
use Code::*;
|
||||
|
||||
match self {
|
||||
// related to the setup
|
||||
IoError => ErrCode::invalid("io_error", StatusCode::UNPROCESSABLE_ENTITY),
|
||||
TooManyOpenFiles => {
|
||||
ErrCode::invalid("too_many_open_files", StatusCode::UNPROCESSABLE_ENTITY)
|
||||
}
|
||||
NoSpaceLeftOnDevice => {
|
||||
ErrCode::invalid("no_space_left_on_device", StatusCode::UNPROCESSABLE_ENTITY)
|
||||
}
|
||||
|
||||
/// return error name, used as error code
|
||||
fn name(&self) -> String {
|
||||
match self {
|
||||
$(
|
||||
Code::$code_ident => stringify!($code_ident).to_case(convert_case::Case::Snake)
|
||||
),*
|
||||
}
|
||||
// index related errors
|
||||
// create index is thrown on internal error while creating an index.
|
||||
CreateIndex => {
|
||||
ErrCode::internal("index_creation_failed", StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
IndexAlreadyExists => ErrCode::invalid("index_already_exists", StatusCode::CONFLICT),
|
||||
// thrown when requesting an unexisting index
|
||||
IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND),
|
||||
InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST),
|
||||
|
||||
// invalid state error
|
||||
InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR),
|
||||
// thrown when no primary key has been set
|
||||
MissingPrimaryKey => {
|
||||
ErrCode::invalid("primary_key_inference_failed", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
// error thrown when trying to set an already existing primary key
|
||||
PrimaryKeyAlreadyPresent => {
|
||||
ErrCode::invalid("index_primary_key_already_exists", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
// invalid ranking rule
|
||||
InvalidRankingRule => ErrCode::invalid("invalid_ranking_rule", StatusCode::BAD_REQUEST),
|
||||
|
||||
// invalid database
|
||||
InvalidStore => {
|
||||
ErrCode::internal("invalid_store_file", StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
|
||||
/// return the error type
|
||||
fn type_(&self) -> String {
|
||||
match self {
|
||||
$(
|
||||
Code::$code_ident => ErrorType::$err_type.to_string()
|
||||
),*
|
||||
}
|
||||
// invalid document
|
||||
MaxFieldsLimitExceeded => {
|
||||
ErrCode::invalid("max_fields_limit_exceeded", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
MissingDocumentId => ErrCode::invalid("missing_document_id", StatusCode::BAD_REQUEST),
|
||||
InvalidDocumentId => ErrCode::invalid("invalid_document_id", StatusCode::BAD_REQUEST),
|
||||
|
||||
// error related to filters
|
||||
Filter => ErrCode::invalid("invalid_filter", StatusCode::BAD_REQUEST),
|
||||
// error related to sorts
|
||||
Sort => ErrCode::invalid("invalid_sort", StatusCode::BAD_REQUEST),
|
||||
|
||||
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
|
||||
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
|
||||
DatabaseSizeLimitReached => {
|
||||
ErrCode::internal("database_size_limit_reached", StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
|
||||
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
|
||||
InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST),
|
||||
InvalidToken => ErrCode::authentication("invalid_api_key", StatusCode::FORBIDDEN),
|
||||
MissingAuthorizationHeader => {
|
||||
ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED)
|
||||
}
|
||||
MissingMasterKey => {
|
||||
ErrCode::authentication("missing_master_key", StatusCode::UNAUTHORIZED)
|
||||
}
|
||||
InvalidTaskDateFilter => {
|
||||
ErrCode::invalid("invalid_task_date_filter", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
InvalidTaskUidsFilter => {
|
||||
ErrCode::invalid("invalid_task_uids_filter", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
InvalidTaskStatusesFilter => {
|
||||
ErrCode::invalid("invalid_task_statuses_filter", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
InvalidTaskTypesFilter => {
|
||||
ErrCode::invalid("invalid_task_types_filter", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
InvalidTaskCanceledByFilter => {
|
||||
ErrCode::invalid("invalid_task_canceled_by_filter", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
TaskNotFound => ErrCode::invalid("task_not_found", StatusCode::NOT_FOUND),
|
||||
TaskDeletionWithEmptyQuery => {
|
||||
ErrCode::invalid("missing_task_filters", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
TaskCancelationWithEmptyQuery => {
|
||||
ErrCode::invalid("missing_task_filters", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
DumpNotFound => ErrCode::invalid("dump_not_found", StatusCode::NOT_FOUND),
|
||||
PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE),
|
||||
RetrieveDocument => {
|
||||
ErrCode::internal("unretrievable_document", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
SearchDocuments => ErrCode::internal("search_error", StatusCode::BAD_REQUEST),
|
||||
UnsupportedMediaType => {
|
||||
ErrCode::invalid("unsupported_media_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
|
||||
}
|
||||
|
||||
/// return the doc url associated with the error
|
||||
fn url(&self) -> String {
|
||||
format!("https://docs.meilisearch.com/errors#{}", self.name())
|
||||
// error related to dump
|
||||
DumpAlreadyInProgress => {
|
||||
ErrCode::invalid("dump_already_processing", StatusCode::CONFLICT)
|
||||
}
|
||||
DumpProcessFailed => {
|
||||
ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
MissingContentType => {
|
||||
ErrCode::invalid("missing_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
|
||||
}
|
||||
MalformedPayload => ErrCode::invalid("malformed_payload", StatusCode::BAD_REQUEST),
|
||||
InvalidContentType => {
|
||||
ErrCode::invalid("invalid_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
|
||||
}
|
||||
MissingPayload => ErrCode::invalid("missing_payload", StatusCode::BAD_REQUEST),
|
||||
// This one can only happen when importing a dump and encountering an unknown code in the task queue.
|
||||
UnretrievableErrorCode => {
|
||||
ErrCode::invalid("unretrievable_error_code", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
|
||||
// error related to keys
|
||||
ApiKeyNotFound => ErrCode::invalid("api_key_not_found", StatusCode::NOT_FOUND),
|
||||
MissingParameter => ErrCode::invalid("missing_parameter", StatusCode::BAD_REQUEST),
|
||||
InvalidApiKeyActions => {
|
||||
ErrCode::invalid("invalid_api_key_actions", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
InvalidApiKeyIndexes => {
|
||||
ErrCode::invalid("invalid_api_key_indexes", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
InvalidApiKeyExpiresAt => {
|
||||
ErrCode::invalid("invalid_api_key_expires_at", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
InvalidApiKeyDescription => {
|
||||
ErrCode::invalid("invalid_api_key_description", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
InvalidApiKeyName => ErrCode::invalid("invalid_api_key_name", StatusCode::BAD_REQUEST),
|
||||
InvalidApiKeyUid => ErrCode::invalid("invalid_api_key_uid", StatusCode::BAD_REQUEST),
|
||||
ApiKeyAlreadyExists => ErrCode::invalid("api_key_already_exists", StatusCode::CONFLICT),
|
||||
ImmutableField => ErrCode::invalid("immutable_field", StatusCode::BAD_REQUEST),
|
||||
InvalidMinWordLengthForTypo => {
|
||||
ErrCode::invalid("invalid_min_word_length_for_typo", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
DuplicateIndexFound => {
|
||||
ErrCode::invalid("duplicate_index_found", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
}
|
||||
pub mod deserr_codes {
|
||||
use super::{Code, ErrorCode};
|
||||
$(
|
||||
#[derive(Default)]
|
||||
pub struct $code_ident;
|
||||
impl ErrorCode for $code_ident {
|
||||
fn error_code(&self) -> Code {
|
||||
Code::$code_ident
|
||||
}
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
/// return the HTTP status code associated with the `Code`
|
||||
fn http(&self) -> StatusCode {
|
||||
self.err_code().status_code
|
||||
}
|
||||
|
||||
/// return error name, used as error code
|
||||
fn name(&self) -> String {
|
||||
self.err_code().error_name.to_string()
|
||||
}
|
||||
|
||||
/// return the error type
|
||||
fn type_(&self) -> String {
|
||||
self.err_code().error_type.to_string()
|
||||
}
|
||||
|
||||
/// return the doc url associated with the error
|
||||
fn url(&self) -> String {
|
||||
format!("https://docs.meilisearch.com/errors#{}", self.name())
|
||||
}
|
||||
}
|
||||
|
||||
// An exhaustive list of all the error codes used by meilisearch.
|
||||
make_error_codes! {
|
||||
ApiKeyAlreadyExists , InvalidRequest , CONFLICT ;
|
||||
ApiKeyNotFound , InvalidRequest , NOT_FOUND ;
|
||||
BadParameter , InvalidRequest , BAD_REQUEST;
|
||||
BadRequest , InvalidRequest , BAD_REQUEST;
|
||||
DatabaseSizeLimitReached , Internal , INTERNAL_SERVER_ERROR;
|
||||
DocumentNotFound , InvalidRequest , NOT_FOUND;
|
||||
DumpAlreadyProcessing , InvalidRequest , CONFLICT;
|
||||
DumpNotFound , InvalidRequest , NOT_FOUND;
|
||||
DumpProcessFailed , Internal , INTERNAL_SERVER_ERROR;
|
||||
DuplicateIndexFound , InvalidRequest , BAD_REQUEST;
|
||||
ImmutableApiKeyActions , InvalidRequest , BAD_REQUEST;
|
||||
ImmutableApiKeyCreatedAt , InvalidRequest , BAD_REQUEST;
|
||||
ImmutableApiKeyExpiresAt , InvalidRequest , BAD_REQUEST;
|
||||
ImmutableApiKeyIndexes , InvalidRequest , BAD_REQUEST;
|
||||
ImmutableApiKeyKey , InvalidRequest , BAD_REQUEST;
|
||||
ImmutableApiKeyUid , InvalidRequest , BAD_REQUEST;
|
||||
ImmutableApiKeyUpdatedAt , InvalidRequest , BAD_REQUEST;
|
||||
ImmutableIndexCreatedAt , InvalidRequest , BAD_REQUEST;
|
||||
ImmutableIndexUid , InvalidRequest , BAD_REQUEST;
|
||||
ImmutableIndexUpdatedAt , InvalidRequest , BAD_REQUEST;
|
||||
IndexAlreadyExists , InvalidRequest , CONFLICT ;
|
||||
IndexCreationFailed , Internal , INTERNAL_SERVER_ERROR;
|
||||
IndexNotFound , InvalidRequest , NOT_FOUND;
|
||||
IndexPrimaryKeyAlreadyExists , InvalidRequest , BAD_REQUEST ;
|
||||
IndexPrimaryKeyMultipleCandidatesFound, InvalidRequest , BAD_REQUEST;
|
||||
IndexPrimaryKeyNoCandidateFound , InvalidRequest , BAD_REQUEST ;
|
||||
Internal , Internal , INTERNAL_SERVER_ERROR ;
|
||||
InvalidApiKey , Auth , FORBIDDEN ;
|
||||
InvalidApiKeyActions , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidApiKeyDescription , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidApiKeyExpiresAt , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidApiKeyIndexes , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidApiKeyLimit , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidApiKeyName , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidApiKeyOffset , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidApiKeyUid , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
|
||||
InvalidDocumentFields , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentGeoField , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentId , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentLimit , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentOffset , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidIndexLimit , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidIndexOffset , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidIndexPrimaryKey , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidIndexUid , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchAttributesToCrop , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchAttributesToHighlight , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchAttributesToRetrieve , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchCropLength , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchCropMarker , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchFacets , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchFilter , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchHighlightPostTag , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchHighlightPreTag , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchHitsPerPage , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchLimit , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchMatchingStrategy , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchOffset , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchPage , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchQ , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchShowMatchesPosition , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSearchSort , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsFaceting , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsFilterableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsPagination , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidState , Internal , INTERNAL_SERVER_ERROR ;
|
||||
InvalidStoreFile , Internal , INTERNAL_SERVER_ERROR ;
|
||||
InvalidSwapDuplicateIndexFound , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSwapIndexes , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidTaskAfterEnqueuedAt , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidTaskAfterFinishedAt , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidTaskAfterStartedAt , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidTaskBeforeEnqueuedAt , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidTaskBeforeFinishedAt , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidTaskBeforeStartedAt , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidTaskCanceledBy , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidTaskFrom , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidTaskLimit , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidTaskStatuses , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidTaskTypes , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidTaskUids , InvalidRequest , BAD_REQUEST ;
|
||||
IoError , System , UNPROCESSABLE_ENTITY;
|
||||
MalformedPayload , InvalidRequest , BAD_REQUEST ;
|
||||
MaxFieldsLimitExceeded , InvalidRequest , BAD_REQUEST ;
|
||||
MissingApiKeyActions , InvalidRequest , BAD_REQUEST ;
|
||||
MissingApiKeyExpiresAt , InvalidRequest , BAD_REQUEST ;
|
||||
MissingApiKeyIndexes , InvalidRequest , BAD_REQUEST ;
|
||||
MissingAuthorizationHeader , Auth , UNAUTHORIZED ;
|
||||
MissingContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
|
||||
MissingDocumentId , InvalidRequest , BAD_REQUEST ;
|
||||
MissingIndexUid , InvalidRequest , BAD_REQUEST ;
|
||||
MissingMasterKey , Auth , UNAUTHORIZED ;
|
||||
MissingPayload , InvalidRequest , BAD_REQUEST ;
|
||||
MissingSwapIndexes , InvalidRequest , BAD_REQUEST ;
|
||||
MissingTaskFilters , InvalidRequest , BAD_REQUEST ;
|
||||
NoSpaceLeftOnDevice , System , UNPROCESSABLE_ENTITY;
|
||||
PayloadTooLarge , InvalidRequest , PAYLOAD_TOO_LARGE ;
|
||||
TaskNotFound , InvalidRequest , NOT_FOUND ;
|
||||
TooManyOpenFiles , System , UNPROCESSABLE_ENTITY ;
|
||||
UnretrievableDocument , Internal , BAD_REQUEST ;
|
||||
UnretrievableErrorCode , InvalidRequest , BAD_REQUEST ;
|
||||
UnsupportedMediaType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE
|
||||
/// Internal structure providing a convenient way to create error codes
|
||||
struct ErrCode {
|
||||
status_code: StatusCode,
|
||||
error_type: ErrorType,
|
||||
error_name: &'static str,
|
||||
}
|
||||
|
||||
impl ErrCode {
|
||||
fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
||||
ErrCode { status_code, error_name, error_type: ErrorType::AuthenticationError }
|
||||
}
|
||||
|
||||
fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
||||
ErrCode { status_code, error_name, error_type: ErrorType::InternalError }
|
||||
}
|
||||
|
||||
fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode {
|
||||
ErrCode { status_code, error_name, error_type: ErrorType::InvalidRequestError }
|
||||
}
|
||||
}
|
||||
|
||||
impl ErrorCode for JoinError {
|
||||
@@ -309,28 +396,25 @@ impl ErrorCode for milli::Error {
|
||||
| UserError::DocumentLimitReached
|
||||
| UserError::AccessingSoftDeletedDocument { .. }
|
||||
| UserError::UnknownInternalDocumentId { .. } => Code::Internal,
|
||||
UserError::InvalidStoreFile => Code::InvalidStoreFile,
|
||||
UserError::InvalidStoreFile => Code::InvalidStore,
|
||||
UserError::NoSpaceLeftOnDevice => Code::NoSpaceLeftOnDevice,
|
||||
UserError::MaxDatabaseSizeReached => Code::DatabaseSizeLimitReached,
|
||||
UserError::AttributeLimitReached => Code::MaxFieldsLimitExceeded,
|
||||
UserError::InvalidFilter(_) => Code::InvalidSearchFilter,
|
||||
UserError::InvalidFilter(_) => Code::Filter,
|
||||
UserError::MissingDocumentId { .. } => Code::MissingDocumentId,
|
||||
UserError::InvalidDocumentId { .. } | UserError::TooManyDocumentIds { .. } => {
|
||||
Code::InvalidDocumentId
|
||||
}
|
||||
UserError::NoPrimaryKeyCandidateFound => Code::IndexPrimaryKeyNoCandidateFound,
|
||||
UserError::MultiplePrimaryKeyCandidatesFound { .. } => {
|
||||
Code::IndexPrimaryKeyMultipleCandidatesFound
|
||||
}
|
||||
UserError::PrimaryKeyCannotBeChanged(_) => Code::IndexPrimaryKeyAlreadyExists,
|
||||
UserError::SortRankingRuleMissing => Code::InvalidSearchSort,
|
||||
UserError::InvalidFacetsDistribution { .. } => Code::InvalidSearchFacets,
|
||||
UserError::InvalidSortableAttribute { .. } => Code::InvalidSearchSort,
|
||||
UserError::CriterionError(_) => Code::InvalidSettingsRankingRules,
|
||||
UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField,
|
||||
UserError::SortError(_) => Code::InvalidSearchSort,
|
||||
UserError::MissingPrimaryKey => Code::MissingPrimaryKey,
|
||||
UserError::PrimaryKeyCannotBeChanged(_) => Code::PrimaryKeyAlreadyPresent,
|
||||
UserError::SortRankingRuleMissing => Code::Sort,
|
||||
UserError::InvalidFacetsDistribution { .. } => Code::BadRequest,
|
||||
UserError::InvalidSortableAttribute { .. } => Code::Sort,
|
||||
UserError::CriterionError(_) => Code::InvalidRankingRule,
|
||||
UserError::InvalidGeoField { .. } => Code::InvalidGeoField,
|
||||
UserError::SortError(_) => Code::Sort,
|
||||
UserError::InvalidMinTypoWordLenSetting(_, _) => {
|
||||
Code::InvalidSettingsTypoTolerance
|
||||
Code::InvalidMinWordLengthForTypo
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -343,7 +427,6 @@ impl ErrorCode for file_store::Error {
|
||||
match self {
|
||||
Self::IoError(e) => e.error_code(),
|
||||
Self::PersistError(e) => e.error_code(),
|
||||
Self::CouldNotParseFileNameAsUtf8 | Self::UuidError(_) => Code::Internal,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -358,7 +441,7 @@ impl ErrorCode for HeedError {
|
||||
fn error_code(&self) -> Code {
|
||||
match self {
|
||||
HeedError::Mdb(MdbError::MapFull) => Code::DatabaseSizeLimitReached,
|
||||
HeedError::Mdb(MdbError::Invalid) => Code::InvalidStoreFile,
|
||||
HeedError::Mdb(MdbError::Invalid) => Code::InvalidStore,
|
||||
HeedError::Io(e) => e.error_code(),
|
||||
HeedError::Mdb(_)
|
||||
| HeedError::Encoding
|
||||
@@ -381,49 +464,14 @@ impl ErrorCode for io::Error {
|
||||
}
|
||||
}
|
||||
|
||||
/// Unwrap a result, either its Ok or Err value.
|
||||
pub fn unwrap_any<T>(any: Result<T, T>) -> T {
|
||||
match any {
|
||||
Ok(any) => any,
|
||||
Err(any) => any,
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "test-traits")]
|
||||
mod strategy {
|
||||
use proptest::strategy::Strategy;
|
||||
|
||||
/// Deserialization when `deserr` cannot parse an API key date.
|
||||
#[derive(Debug)]
|
||||
pub struct ParseOffsetDateTimeError(pub String);
|
||||
impl fmt::Display for ParseOffsetDateTimeError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
writeln!(f, "`{original}` is not a valid date. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.", original = self.0)
|
||||
}
|
||||
}
|
||||
use super::*;
|
||||
|
||||
/// Deserialization when `deserr` cannot parse a task date.
|
||||
#[derive(Debug)]
|
||||
pub struct InvalidTaskDateError(pub String);
|
||||
impl std::fmt::Display for InvalidTaskDateError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "`{}` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Deserialization error when `deserr` cannot parse a String
|
||||
/// into a bool.
|
||||
#[derive(Debug)]
|
||||
pub struct DeserrParseBoolError(pub String);
|
||||
impl fmt::Display for DeserrParseBoolError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "could not parse `{}` as a boolean, expected either `true` or `false`", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Deserialization error when `deserr` cannot parse a String
|
||||
/// into an integer.
|
||||
#[derive(Debug)]
|
||||
pub struct DeserrParseIntError(pub String);
|
||||
impl fmt::Display for DeserrParseIntError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "could not parse `{}` as a positive integer", self.0)
|
||||
pub(super) fn status_code_strategy() -> impl Strategy<Value = StatusCode> {
|
||||
(100..999u16).prop_map(|i| StatusCode::from_u16(i).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,15 +2,17 @@ use std::error::Error;
|
||||
use std::fmt;
|
||||
use std::str::FromStr;
|
||||
|
||||
use deserr::DeserializeFromValue;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::error::{Code, ErrorCode};
|
||||
|
||||
/// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400
|
||||
/// bytes long
|
||||
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[deserr(from(String) = IndexUid::try_from -> IndexUidFormatError)]
|
||||
pub struct IndexUid(String);
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
||||
pub struct IndexUid(
|
||||
#[cfg_attr(feature = "test-traits", proptest(regex("[a-zA-Z0-9_-]{1,400}")))] String,
|
||||
);
|
||||
|
||||
impl IndexUid {
|
||||
pub fn new_unchecked(s: impl AsRef<str>) -> Self {
|
||||
@@ -27,12 +29,6 @@ impl IndexUid {
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for IndexUid {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Display::fmt(&self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for IndexUid {
|
||||
type Target = str;
|
||||
|
||||
|
||||
@@ -1,87 +1,22 @@
|
||||
use std::convert::Infallible;
|
||||
use std::hash::Hash;
|
||||
use std::str::FromStr;
|
||||
|
||||
use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef};
|
||||
use enum_iterator::Sequence;
|
||||
use milli::update::Setting;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{from_value, Value};
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::macros::{format_description, time};
|
||||
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::deserr::error_messages::immutable_field_error;
|
||||
use crate::deserr::DeserrJsonError;
|
||||
use crate::error::deserr_codes::*;
|
||||
use crate::error::{unwrap_any, Code, ParseOffsetDateTimeError};
|
||||
use crate::index_uid::IndexUid;
|
||||
use crate::error::{Code, ErrorCode};
|
||||
use crate::index_uid::{IndexUid, IndexUidFormatError};
|
||||
use crate::star_or::StarOr;
|
||||
|
||||
type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
pub type KeyId = Uuid;
|
||||
|
||||
#[derive(Debug, DeserializeFromValue)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct CreateApiKey {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidApiKeyDescription>)]
|
||||
pub description: Option<String>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidApiKeyName>)]
|
||||
pub name: Option<String>,
|
||||
#[deserr(default = Uuid::new_v4(), error = DeserrJsonError<InvalidApiKeyUid>, from(&String) = Uuid::from_str -> uuid::Error)]
|
||||
pub uid: KeyId,
|
||||
#[deserr(error = DeserrJsonError<InvalidApiKeyActions>, missing_field_error = DeserrJsonError::missing_api_key_actions)]
|
||||
pub actions: Vec<Action>,
|
||||
#[deserr(error = DeserrJsonError<InvalidApiKeyIndexes>, missing_field_error = DeserrJsonError::missing_api_key_indexes)]
|
||||
pub indexes: Vec<StarOr<IndexUid>>,
|
||||
#[deserr(error = DeserrJsonError<InvalidApiKeyExpiresAt>, from(Option<String>) = parse_expiration_date -> ParseOffsetDateTimeError, missing_field_error = DeserrJsonError::missing_api_key_expires_at)]
|
||||
pub expires_at: Option<OffsetDateTime>,
|
||||
}
|
||||
impl CreateApiKey {
|
||||
pub fn to_key(self) -> Key {
|
||||
let CreateApiKey { description, name, uid, actions, indexes, expires_at } = self;
|
||||
let now = OffsetDateTime::now_utc();
|
||||
Key {
|
||||
description,
|
||||
name,
|
||||
uid,
|
||||
actions,
|
||||
indexes,
|
||||
expires_at,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn deny_immutable_fields_api_key(
|
||||
field: &str,
|
||||
accepted: &[&str],
|
||||
location: ValuePointerRef,
|
||||
) -> DeserrJsonError {
|
||||
match field {
|
||||
"uid" => immutable_field_error(field, accepted, Code::ImmutableApiKeyUid),
|
||||
"actions" => immutable_field_error(field, accepted, Code::ImmutableApiKeyActions),
|
||||
"indexes" => immutable_field_error(field, accepted, Code::ImmutableApiKeyIndexes),
|
||||
"expiresAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyExpiresAt),
|
||||
"createdAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyCreatedAt),
|
||||
"updatedAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyUpdatedAt),
|
||||
_ => unwrap_any(DeserrJsonError::<BadRequest>::error::<Infallible>(
|
||||
None,
|
||||
deserr::ErrorKind::UnknownKey { key: field, accepted },
|
||||
location,
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, DeserializeFromValue)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_api_key)]
|
||||
pub struct PatchApiKey {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidApiKeyDescription>)]
|
||||
pub description: Setting<String>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidApiKeyName>)]
|
||||
pub name: Setting<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
|
||||
pub struct Key {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
@@ -100,6 +35,100 @@ pub struct Key {
|
||||
}
|
||||
|
||||
impl Key {
|
||||
pub fn create_from_value(value: Value) -> Result<Self> {
|
||||
let name = match value.get("name") {
|
||||
None | Some(Value::Null) => None,
|
||||
Some(des) => from_value(des.clone())
|
||||
.map(Some)
|
||||
.map_err(|_| Error::InvalidApiKeyName(des.clone()))?,
|
||||
};
|
||||
|
||||
let description = match value.get("description") {
|
||||
None | Some(Value::Null) => None,
|
||||
Some(des) => from_value(des.clone())
|
||||
.map(Some)
|
||||
.map_err(|_| Error::InvalidApiKeyDescription(des.clone()))?,
|
||||
};
|
||||
|
||||
let uid = value.get("uid").map_or_else(
|
||||
|| Ok(Uuid::new_v4()),
|
||||
|uid| from_value(uid.clone()).map_err(|_| Error::InvalidApiKeyUid(uid.clone())),
|
||||
)?;
|
||||
|
||||
let actions = value
|
||||
.get("actions")
|
||||
.map(|act| {
|
||||
from_value(act.clone()).map_err(|_| Error::InvalidApiKeyActions(act.clone()))
|
||||
})
|
||||
.ok_or(Error::MissingParameter("actions"))??;
|
||||
|
||||
let indexes = value
|
||||
.get("indexes")
|
||||
.map(|ind| {
|
||||
from_value::<Vec<String>>(ind.clone())
|
||||
// If it's not a vec of string, return an API key parsing error.
|
||||
.map_err(|_| Error::InvalidApiKeyIndexes(ind.clone()))
|
||||
.and_then(|ind| {
|
||||
ind.into_iter()
|
||||
// If it's not a valid Index uid, return an Index Uid parsing error.
|
||||
.map(|i| StarOr::<IndexUid>::from_str(&i).map_err(Error::from))
|
||||
.collect()
|
||||
})
|
||||
})
|
||||
.ok_or(Error::MissingParameter("indexes"))??;
|
||||
|
||||
let expires_at = value
|
||||
.get("expiresAt")
|
||||
.map(parse_expiration_date)
|
||||
.ok_or(Error::MissingParameter("expiresAt"))??;
|
||||
|
||||
let created_at = OffsetDateTime::now_utc();
|
||||
let updated_at = created_at;
|
||||
|
||||
Ok(Self { name, description, uid, actions, indexes, expires_at, created_at, updated_at })
|
||||
}
|
||||
|
||||
pub fn update_from_value(&mut self, value: Value) -> Result<()> {
|
||||
if let Some(des) = value.get("description") {
|
||||
let des =
|
||||
from_value(des.clone()).map_err(|_| Error::InvalidApiKeyDescription(des.clone()));
|
||||
self.description = des?;
|
||||
}
|
||||
|
||||
if let Some(des) = value.get("name") {
|
||||
let des = from_value(des.clone()).map_err(|_| Error::InvalidApiKeyName(des.clone()));
|
||||
self.name = des?;
|
||||
}
|
||||
|
||||
if value.get("uid").is_some() {
|
||||
return Err(Error::ImmutableField("uid".to_string()));
|
||||
}
|
||||
|
||||
if value.get("actions").is_some() {
|
||||
return Err(Error::ImmutableField("actions".to_string()));
|
||||
}
|
||||
|
||||
if value.get("indexes").is_some() {
|
||||
return Err(Error::ImmutableField("indexes".to_string()));
|
||||
}
|
||||
|
||||
if value.get("expiresAt").is_some() {
|
||||
return Err(Error::ImmutableField("expiresAt".to_string()));
|
||||
}
|
||||
|
||||
if value.get("createdAt").is_some() {
|
||||
return Err(Error::ImmutableField("createdAt".to_string()));
|
||||
}
|
||||
|
||||
if value.get("updatedAt").is_some() {
|
||||
return Err(Error::ImmutableField("updatedAt".to_string()));
|
||||
}
|
||||
|
||||
self.updated_at = OffsetDateTime::now_utc();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn default_admin() -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let uid = Uuid::new_v4();
|
||||
@@ -131,137 +160,107 @@ impl Key {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_expiration_date(
|
||||
string: Option<String>,
|
||||
) -> std::result::Result<Option<OffsetDateTime>, ParseOffsetDateTimeError> {
|
||||
let Some(string) = string else {
|
||||
return Ok(None)
|
||||
};
|
||||
let datetime = if let Ok(datetime) = OffsetDateTime::parse(&string, &Rfc3339) {
|
||||
datetime
|
||||
} else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
|
||||
&string,
|
||||
format_description!(
|
||||
"[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]"
|
||||
),
|
||||
) {
|
||||
primitive_datetime.assume_utc()
|
||||
} else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
|
||||
&string,
|
||||
format_description!(
|
||||
"[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]"
|
||||
),
|
||||
) {
|
||||
primitive_datetime.assume_utc()
|
||||
} else if let Ok(date) = Date::parse(
|
||||
&string,
|
||||
format_description!("[year repr:full base:calendar]-[month repr:numerical]-[day]"),
|
||||
) {
|
||||
PrimitiveDateTime::new(date, time!(00:00)).assume_utc()
|
||||
} else {
|
||||
return Err(ParseOffsetDateTimeError(string));
|
||||
};
|
||||
if datetime > OffsetDateTime::now_utc() {
|
||||
Ok(Some(datetime))
|
||||
} else {
|
||||
Err(ParseOffsetDateTimeError(string))
|
||||
fn parse_expiration_date(value: &Value) -> Result<Option<OffsetDateTime>> {
|
||||
match value {
|
||||
Value::String(string) => OffsetDateTime::parse(string, &Rfc3339)
|
||||
.or_else(|_| {
|
||||
PrimitiveDateTime::parse(
|
||||
string,
|
||||
format_description!(
|
||||
"[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]"
|
||||
),
|
||||
).map(|datetime| datetime.assume_utc())
|
||||
})
|
||||
.or_else(|_| {
|
||||
PrimitiveDateTime::parse(
|
||||
string,
|
||||
format_description!(
|
||||
"[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]"
|
||||
),
|
||||
).map(|datetime| datetime.assume_utc())
|
||||
})
|
||||
.or_else(|_| {
|
||||
Date::parse(string, format_description!(
|
||||
"[year repr:full base:calendar]-[month repr:numerical]-[day]"
|
||||
)).map(|date| PrimitiveDateTime::new(date, time!(00:00)).assume_utc())
|
||||
})
|
||||
.map_err(|_| Error::InvalidApiKeyExpiresAt(value.clone()))
|
||||
// check if the key is already expired.
|
||||
.and_then(|d| {
|
||||
if d > OffsetDateTime::now_utc() {
|
||||
Ok(d)
|
||||
} else {
|
||||
Err(Error::InvalidApiKeyExpiresAt(value.clone()))
|
||||
}
|
||||
})
|
||||
.map(Option::Some),
|
||||
Value::Null => Ok(None),
|
||||
_otherwise => Err(Error::InvalidApiKeyExpiresAt(value.clone())),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Sequence, DeserializeFromValue,
|
||||
)]
|
||||
#[derive(Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Sequence)]
|
||||
#[repr(u8)]
|
||||
pub enum Action {
|
||||
#[serde(rename = "*")]
|
||||
#[deserr(rename = "*")]
|
||||
All = 0,
|
||||
#[serde(rename = "search")]
|
||||
#[deserr(rename = "search")]
|
||||
Search,
|
||||
#[serde(rename = "documents.*")]
|
||||
#[deserr(rename = "documents.*")]
|
||||
DocumentsAll,
|
||||
#[serde(rename = "documents.add")]
|
||||
#[deserr(rename = "documents.add")]
|
||||
DocumentsAdd,
|
||||
#[serde(rename = "documents.get")]
|
||||
#[deserr(rename = "documents.get")]
|
||||
DocumentsGet,
|
||||
#[serde(rename = "documents.delete")]
|
||||
#[deserr(rename = "documents.delete")]
|
||||
DocumentsDelete,
|
||||
#[serde(rename = "indexes.*")]
|
||||
#[deserr(rename = "indexes.*")]
|
||||
IndexesAll,
|
||||
#[serde(rename = "indexes.create")]
|
||||
#[deserr(rename = "indexes.create")]
|
||||
IndexesAdd,
|
||||
#[serde(rename = "indexes.get")]
|
||||
#[deserr(rename = "indexes.get")]
|
||||
IndexesGet,
|
||||
#[serde(rename = "indexes.update")]
|
||||
#[deserr(rename = "indexes.update")]
|
||||
IndexesUpdate,
|
||||
#[serde(rename = "indexes.delete")]
|
||||
#[deserr(rename = "indexes.delete")]
|
||||
IndexesDelete,
|
||||
#[serde(rename = "indexes.swap")]
|
||||
#[deserr(rename = "indexes.swap")]
|
||||
IndexesSwap,
|
||||
#[serde(rename = "tasks.*")]
|
||||
#[deserr(rename = "tasks.*")]
|
||||
TasksAll,
|
||||
#[serde(rename = "tasks.cancel")]
|
||||
#[deserr(rename = "tasks.cancel")]
|
||||
TasksCancel,
|
||||
#[serde(rename = "tasks.delete")]
|
||||
#[deserr(rename = "tasks.delete")]
|
||||
TasksDelete,
|
||||
#[serde(rename = "tasks.get")]
|
||||
#[deserr(rename = "tasks.get")]
|
||||
TasksGet,
|
||||
#[serde(rename = "settings.*")]
|
||||
#[deserr(rename = "settings.*")]
|
||||
SettingsAll,
|
||||
#[serde(rename = "settings.get")]
|
||||
#[deserr(rename = "settings.get")]
|
||||
SettingsGet,
|
||||
#[serde(rename = "settings.update")]
|
||||
#[deserr(rename = "settings.update")]
|
||||
SettingsUpdate,
|
||||
#[serde(rename = "stats.*")]
|
||||
#[deserr(rename = "stats.*")]
|
||||
StatsAll,
|
||||
#[serde(rename = "stats.get")]
|
||||
#[deserr(rename = "stats.get")]
|
||||
StatsGet,
|
||||
#[serde(rename = "metrics.*")]
|
||||
#[deserr(rename = "metrics.*")]
|
||||
MetricsAll,
|
||||
#[serde(rename = "metrics.get")]
|
||||
#[deserr(rename = "metrics.get")]
|
||||
MetricsGet,
|
||||
#[serde(rename = "dumps.*")]
|
||||
#[deserr(rename = "dumps.*")]
|
||||
DumpsAll,
|
||||
#[serde(rename = "dumps.create")]
|
||||
#[deserr(rename = "dumps.create")]
|
||||
DumpsCreate,
|
||||
#[serde(rename = "version")]
|
||||
#[deserr(rename = "version")]
|
||||
Version,
|
||||
#[serde(rename = "keys.create")]
|
||||
#[deserr(rename = "keys.create")]
|
||||
KeysAdd,
|
||||
#[serde(rename = "keys.get")]
|
||||
#[deserr(rename = "keys.get")]
|
||||
KeysGet,
|
||||
#[serde(rename = "keys.update")]
|
||||
#[deserr(rename = "keys.update")]
|
||||
KeysUpdate,
|
||||
#[serde(rename = "keys.delete")]
|
||||
#[deserr(rename = "keys.delete")]
|
||||
KeysDelete,
|
||||
}
|
||||
|
||||
@@ -342,3 +341,50 @@ pub mod actions {
|
||||
pub const KEYS_UPDATE: u8 = KeysUpdate.repr();
|
||||
pub const KEYS_DELETE: u8 = KeysDelete.repr();
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum Error {
|
||||
#[error("`{0}` field is mandatory.")]
|
||||
MissingParameter(&'static str),
|
||||
#[error("`actions` field value `{0}` is invalid. It should be an array of string representing action names.")]
|
||||
InvalidApiKeyActions(Value),
|
||||
#[error("`indexes` field value `{0}` is invalid. It should be an array of string representing index names.")]
|
||||
InvalidApiKeyIndexes(Value),
|
||||
#[error("{0}")]
|
||||
InvalidApiKeyIndexUid(IndexUidFormatError),
|
||||
#[error("`expiresAt` field value `{0}` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.")]
|
||||
InvalidApiKeyExpiresAt(Value),
|
||||
#[error("`description` field value `{0}` is invalid. It should be a string or specified as a null value.")]
|
||||
InvalidApiKeyDescription(Value),
|
||||
#[error(
|
||||
"`name` field value `{0}` is invalid. It should be a string or specified as a null value."
|
||||
)]
|
||||
InvalidApiKeyName(Value),
|
||||
#[error("`uid` field value `{0}` is invalid. It should be a valid UUID v4 string or omitted.")]
|
||||
InvalidApiKeyUid(Value),
|
||||
#[error("The `{0}` field cannot be modified for the given resource.")]
|
||||
ImmutableField(String),
|
||||
}
|
||||
|
||||
impl From<IndexUidFormatError> for Error {
|
||||
fn from(e: IndexUidFormatError) -> Self {
|
||||
Self::InvalidApiKeyIndexUid(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl ErrorCode for Error {
|
||||
fn error_code(&self) -> Code {
|
||||
match self {
|
||||
Self::MissingParameter(_) => Code::MissingParameter,
|
||||
Self::InvalidApiKeyActions(_) => Code::InvalidApiKeyActions,
|
||||
Self::InvalidApiKeyIndexes(_) | Self::InvalidApiKeyIndexUid(_) => {
|
||||
Code::InvalidApiKeyIndexes
|
||||
}
|
||||
Self::InvalidApiKeyExpiresAt(_) => Code::InvalidApiKeyExpiresAt,
|
||||
Self::InvalidApiKeyDescription(_) => Code::InvalidApiKeyDescription,
|
||||
Self::InvalidApiKeyName(_) => Code::InvalidApiKeyName,
|
||||
Self::InvalidApiKeyUid(_) => Code::InvalidApiKeyUid,
|
||||
Self::ImmutableField(_) => Code::ImmutableField,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
pub mod compression;
|
||||
pub mod deserr;
|
||||
pub mod document_formats;
|
||||
pub mod error;
|
||||
pub mod index_uid;
|
||||
@@ -8,10 +7,11 @@ pub mod settings;
|
||||
pub mod star_or;
|
||||
pub mod tasks;
|
||||
pub mod versioning;
|
||||
|
||||
pub use milli;
|
||||
pub use milli::{heed, Index};
|
||||
use uuid::Uuid;
|
||||
pub use versioning::VERSION_FILE_NAME;
|
||||
pub use {milli, serde_cs};
|
||||
|
||||
pub type Document = serde_json::Map<String, serde_json::Value>;
|
||||
pub type InstanceUid = Uuid;
|
||||
|
||||
@@ -1,20 +1,12 @@
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::convert::Infallible;
|
||||
use std::fmt;
|
||||
use std::marker::PhantomData;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::str::FromStr;
|
||||
|
||||
use deserr::{DeserializeError, DeserializeFromValue, ErrorKind, MergeWithError, ValuePointerRef};
|
||||
use fst::IntoStreamer;
|
||||
use milli::update::Setting;
|
||||
use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET};
|
||||
use milli::{Index, DEFAULT_VALUES_PER_FACET};
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
|
||||
use crate::deserr::DeserrJsonError;
|
||||
use crate::error::deserr_codes::*;
|
||||
use crate::error::unwrap_any;
|
||||
|
||||
/// The maximimum number of results that the engine
|
||||
/// will be able to return in one search call.
|
||||
pub const DEFAULT_PAGINATION_MAX_TOTAL_HITS: usize = 1000;
|
||||
@@ -41,109 +33,73 @@ pub struct Checked;
|
||||
#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct Unchecked;
|
||||
|
||||
impl<E> DeserializeFromValue<E> for Unchecked
|
||||
where
|
||||
E: DeserializeError,
|
||||
{
|
||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||
_value: deserr::Value<V>,
|
||||
_location: deserr::ValuePointerRef,
|
||||
) -> Result<Self, E> {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_min_word_size_for_typo_setting<E: DeserializeError>(
|
||||
s: MinWordSizeTyposSetting,
|
||||
location: ValuePointerRef,
|
||||
) -> Result<MinWordSizeTyposSetting, E> {
|
||||
if let (Setting::Set(one), Setting::Set(two)) = (s.one_typo, s.two_typos) {
|
||||
if one > two {
|
||||
return Err(unwrap_any(E::error::<Infallible>(None, ErrorKind::Unexpected { msg: format!("`minWordSizeForTypos` setting is invalid. `oneTypo` and `twoTypos` fields should be between `0` and `255`, and `twoTypos` should be greater or equals to `oneTypo` but found `oneTypo: {one}` and twoTypos: {two}`.") }, location)));
|
||||
}
|
||||
}
|
||||
Ok(s)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
#[deserr(deny_unknown_fields, rename_all = camelCase, validate = validate_min_word_size_for_typo_setting -> DeserrJsonError<InvalidSettingsTypoTolerance>)]
|
||||
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct MinWordSizeTyposSetting {
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
pub one_typo: Setting<u8>,
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
pub two_typos: Setting<u8>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
#[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError<DeserrJsonError<InvalidSettingsTypoTolerance>>)]
|
||||
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TypoSettings {
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
pub enabled: Setting<bool>,
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsTypoTolerance>)]
|
||||
pub min_word_size_for_typos: Setting<MinWordSizeTyposSetting>,
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
pub disable_on_words: Setting<BTreeSet<String>>,
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
pub disable_on_attributes: Setting<BTreeSet<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
||||
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct FacetingSettings {
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
pub max_values_per_facet: Setting<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
||||
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PaginationSettings {
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
pub max_total_hits: Setting<usize>,
|
||||
}
|
||||
|
||||
impl MergeWithError<milli::CriterionError> for DeserrJsonError<InvalidSettingsRankingRules> {
|
||||
fn merge(
|
||||
_self_: Option<Self>,
|
||||
other: milli::CriterionError,
|
||||
merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
Self::error::<Infallible>(
|
||||
None,
|
||||
ErrorKind::Unexpected { msg: other.to_string() },
|
||||
merge_location,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Holds all the settings for an index. `T` can either be `Checked` if they represents settings
|
||||
/// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a
|
||||
/// call to `check` will return a `Settings<Checked>` from a `Settings<Unchecked>`.
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[serde(
|
||||
deny_unknown_fields,
|
||||
rename_all = "camelCase",
|
||||
bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>")
|
||||
)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>"))]
|
||||
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
||||
pub struct Settings<T> {
|
||||
#[serde(
|
||||
default,
|
||||
serialize_with = "serialize_with_wildcard",
|
||||
skip_serializing_if = "Setting::is_not_set"
|
||||
)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsDisplayedAttributes>)]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub displayed_attributes: Setting<Vec<String>>,
|
||||
|
||||
#[serde(
|
||||
@@ -151,39 +107,38 @@ pub struct Settings<T> {
|
||||
serialize_with = "serialize_with_wildcard",
|
||||
skip_serializing_if = "Setting::is_not_set"
|
||||
)]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSearchableAttributes>)]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub searchable_attributes: Setting<Vec<String>>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsFilterableAttributes>)]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub filterable_attributes: Setting<BTreeSet<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSortableAttributes>)]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub sortable_attributes: Setting<BTreeSet<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsRankingRules>)]
|
||||
pub ranking_rules: Setting<Vec<RankingRuleView>>,
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub ranking_rules: Setting<Vec<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsStopWords>)]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub stop_words: Setting<BTreeSet<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSynonyms>)]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsDistinctAttribute>)]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub distinct_attribute: Setting<String>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsTypoTolerance>)]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub typo_tolerance: Setting<TypoSettings>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsFaceting>)]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub faceting: Setting<FacetingSettings>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsPagination>)]
|
||||
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
|
||||
pub pagination: Setting<PaginationSettings>,
|
||||
|
||||
#[serde(skip)]
|
||||
#[deserr(skip)]
|
||||
pub _kind: PhantomData<T>,
|
||||
}
|
||||
|
||||
@@ -318,9 +273,7 @@ pub fn apply_settings_to_builder(
|
||||
}
|
||||
|
||||
match settings.ranking_rules {
|
||||
Setting::Set(ref criteria) => {
|
||||
builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect())
|
||||
}
|
||||
Setting::Set(ref criteria) => builder.set_criteria(criteria.clone()),
|
||||
Setting::Reset => builder.reset_criteria(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
@@ -434,7 +387,7 @@ pub fn settings(
|
||||
|
||||
let sortable_attributes = index.sortable_fields(rtxn)?.into_iter().collect();
|
||||
|
||||
let criteria = index.criteria(rtxn)?;
|
||||
let criteria = index.criteria(rtxn)?.into_iter().map(|c| c.to_string()).collect();
|
||||
|
||||
let stop_words = index
|
||||
.stop_words(rtxn)?
|
||||
@@ -495,7 +448,7 @@ pub fn settings(
|
||||
},
|
||||
filterable_attributes: Setting::Set(filterable_attributes),
|
||||
sortable_attributes: Setting::Set(sortable_attributes),
|
||||
ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
|
||||
ranking_rules: Setting::Set(criteria),
|
||||
stop_words: Setting::Set(stop_words),
|
||||
distinct_attribute: match distinct_field {
|
||||
Some(field) => Setting::Set(field),
|
||||
@@ -509,106 +462,16 @@ pub fn settings(
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[deserr(from(&String) = FromStr::from_str -> CriterionError)]
|
||||
pub enum RankingRuleView {
|
||||
/// Sorted by decreasing number of matched query terms.
|
||||
/// Query words at the front of an attribute is considered better than if it was at the back.
|
||||
Words,
|
||||
/// Sorted by increasing number of typos.
|
||||
Typo,
|
||||
/// Sorted by increasing distance between matched query terms.
|
||||
Proximity,
|
||||
/// Documents with quey words contained in more important
|
||||
/// attributes are considered better.
|
||||
Attribute,
|
||||
/// Dynamically sort at query time the documents. None, one or multiple Asc/Desc sortable
|
||||
/// attributes can be used in place of this criterion at query time.
|
||||
Sort,
|
||||
/// Sorted by the similarity of the matched words with the query words.
|
||||
Exactness,
|
||||
/// Sorted by the increasing value of the field specified.
|
||||
Asc(String),
|
||||
/// Sorted by the decreasing value of the field specified.
|
||||
Desc(String),
|
||||
}
|
||||
impl Serialize for RankingRuleView {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(&format!("{}", Criterion::from(self.clone())))
|
||||
}
|
||||
}
|
||||
impl<'de> Deserialize<'de> for RankingRuleView {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
struct Visitor;
|
||||
impl<'de> serde::de::Visitor<'de> for Visitor {
|
||||
type Value = RankingRuleView;
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(formatter, "the name of a valid ranking rule (string)")
|
||||
}
|
||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
let criterion = Criterion::from_str(v).map_err(|_| {
|
||||
E::invalid_value(serde::de::Unexpected::Str(v), &"a valid ranking rule")
|
||||
})?;
|
||||
Ok(RankingRuleView::from(criterion))
|
||||
}
|
||||
}
|
||||
deserializer.deserialize_str(Visitor)
|
||||
}
|
||||
}
|
||||
impl FromStr for RankingRuleView {
|
||||
type Err = <Criterion as FromStr>::Err;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Ok(RankingRuleView::from(Criterion::from_str(s)?))
|
||||
}
|
||||
}
|
||||
impl fmt::Display for RankingRuleView {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
fmt::Display::fmt(&Criterion::from(self.clone()), f)
|
||||
}
|
||||
}
|
||||
impl From<Criterion> for RankingRuleView {
|
||||
fn from(value: Criterion) -> Self {
|
||||
match value {
|
||||
Criterion::Words => RankingRuleView::Words,
|
||||
Criterion::Typo => RankingRuleView::Typo,
|
||||
Criterion::Proximity => RankingRuleView::Proximity,
|
||||
Criterion::Attribute => RankingRuleView::Attribute,
|
||||
Criterion::Sort => RankingRuleView::Sort,
|
||||
Criterion::Exactness => RankingRuleView::Exactness,
|
||||
Criterion::Asc(x) => RankingRuleView::Asc(x),
|
||||
Criterion::Desc(x) => RankingRuleView::Desc(x),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl From<RankingRuleView> for Criterion {
|
||||
fn from(value: RankingRuleView) -> Self {
|
||||
match value {
|
||||
RankingRuleView::Words => Criterion::Words,
|
||||
RankingRuleView::Typo => Criterion::Typo,
|
||||
RankingRuleView::Proximity => Criterion::Proximity,
|
||||
RankingRuleView::Attribute => Criterion::Attribute,
|
||||
RankingRuleView::Sort => Criterion::Sort,
|
||||
RankingRuleView::Exactness => Criterion::Exactness,
|
||||
RankingRuleView::Asc(x) => Criterion::Asc(x),
|
||||
RankingRuleView::Desc(x) => Criterion::Desc(x),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod test {
|
||||
use proptest::prelude::*;
|
||||
|
||||
use super::*;
|
||||
|
||||
pub(super) fn setting_strategy<T: Arbitrary + Clone>() -> impl Strategy<Value = Setting<T>> {
|
||||
prop_oneof![Just(Setting::NotSet), Just(Setting::Reset), any::<T>().prop_map(Setting::Set)]
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_setting_check() {
|
||||
// test no changes
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
use std::fmt;
|
||||
use std::fmt::{Display, Formatter};
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::Deref;
|
||||
use std::str::FromStr;
|
||||
|
||||
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind};
|
||||
use serde::de::Visitor;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
use crate::deserr::query_params::FromQueryParameter;
|
||||
use crate::error::unwrap_any;
|
||||
|
||||
/// A type that tries to match either a star (*) or
|
||||
/// any other thing that implements `FromStr`.
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -28,11 +25,23 @@ impl<T: FromStr> FromStr for StarOr<T> {
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<T: fmt::Display> fmt::Display for StarOr<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
|
||||
impl<T: Deref<Target = str>> Deref for StarOr<T> {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
match self {
|
||||
StarOr::Star => write!(f, "*"),
|
||||
StarOr::Other(x) => fmt::Display::fmt(x, f),
|
||||
Self::Star => "*",
|
||||
Self::Other(t) => t.deref(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Into<String>> From<StarOr<T>> for String {
|
||||
fn from(s: StarOr<T>) -> Self {
|
||||
match s {
|
||||
StarOr::Star => "*".to_string(),
|
||||
StarOr::Other(t) => t.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -52,7 +61,7 @@ impl<T: PartialEq + Eq> Eq for StarOr<T> {}
|
||||
impl<'de, T, E> Deserialize<'de> for StarOr<T>
|
||||
where
|
||||
T: FromStr<Err = E>,
|
||||
E: fmt::Display,
|
||||
E: Display,
|
||||
{
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
@@ -68,11 +77,11 @@ where
|
||||
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
|
||||
where
|
||||
T: FromStr<Err = FE>,
|
||||
FE: fmt::Display,
|
||||
FE: Display,
|
||||
{
|
||||
type Value = StarOr<T>;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> std::fmt::Result {
|
||||
fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
|
||||
formatter.write_str("a string")
|
||||
}
|
||||
|
||||
@@ -98,7 +107,7 @@ where
|
||||
|
||||
impl<T> Serialize for StarOr<T>
|
||||
where
|
||||
T: ToString,
|
||||
T: Deref<Target = str>,
|
||||
{
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
@@ -106,222 +115,7 @@ where
|
||||
{
|
||||
match self {
|
||||
StarOr::Star => serializer.serialize_str("*"),
|
||||
StarOr::Other(other) => serializer.serialize_str(&other.to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> DeserializeFromValue<E> for StarOr<T>
|
||||
where
|
||||
T: FromStr,
|
||||
E: DeserializeError + MergeWithError<T::Err>,
|
||||
{
|
||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||
value: deserr::Value<V>,
|
||||
location: deserr::ValuePointerRef,
|
||||
) -> Result<Self, E> {
|
||||
match value {
|
||||
deserr::Value::String(v) => {
|
||||
if v == "*" {
|
||||
Ok(StarOr::Star)
|
||||
} else {
|
||||
match T::from_str(&v) {
|
||||
Ok(parsed) => Ok(StarOr::Other(parsed)),
|
||||
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => Err(unwrap_any(E::error::<V>(
|
||||
None,
|
||||
deserr::ErrorKind::IncorrectValueKind {
|
||||
actual: value,
|
||||
accepted: &[ValueKind::String],
|
||||
},
|
||||
location,
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A type representing the content of a query parameter that can either not exist,
|
||||
/// be equal to a star (*), or another value
|
||||
///
|
||||
/// It is a convenient alternative to `Option<StarOr<T>>`.
|
||||
#[derive(Debug, Default, Clone, Copy)]
|
||||
pub enum OptionStarOr<T> {
|
||||
#[default]
|
||||
None,
|
||||
Star,
|
||||
Other(T),
|
||||
}
|
||||
|
||||
impl<T> OptionStarOr<T> {
|
||||
pub fn is_some(&self) -> bool {
|
||||
match self {
|
||||
Self::None => false,
|
||||
Self::Star => false,
|
||||
Self::Other(_) => true,
|
||||
}
|
||||
}
|
||||
pub fn merge_star_and_none(self) -> Option<T> {
|
||||
match self {
|
||||
Self::None | Self::Star => None,
|
||||
Self::Other(x) => Some(x),
|
||||
}
|
||||
}
|
||||
pub fn try_map<U, E, F: Fn(T) -> Result<U, E>>(self, map_f: F) -> Result<OptionStarOr<U>, E> {
|
||||
match self {
|
||||
OptionStarOr::None => Ok(OptionStarOr::None),
|
||||
OptionStarOr::Star => Ok(OptionStarOr::Star),
|
||||
OptionStarOr::Other(x) => map_f(x).map(OptionStarOr::Other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> FromQueryParameter for OptionStarOr<T>
|
||||
where
|
||||
T: FromQueryParameter,
|
||||
{
|
||||
type Err = T::Err;
|
||||
fn from_query_param(p: &str) -> Result<Self, Self::Err> {
|
||||
match p {
|
||||
"*" => Ok(OptionStarOr::Star),
|
||||
s => T::from_query_param(s).map(OptionStarOr::Other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> DeserializeFromValue<E> for OptionStarOr<T>
|
||||
where
|
||||
E: DeserializeError + MergeWithError<T::Err>,
|
||||
T: FromQueryParameter,
|
||||
{
|
||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||
value: deserr::Value<V>,
|
||||
location: deserr::ValuePointerRef,
|
||||
) -> Result<Self, E> {
|
||||
match value {
|
||||
deserr::Value::String(s) => match s.as_str() {
|
||||
"*" => Ok(OptionStarOr::Star),
|
||||
s => match T::from_query_param(s) {
|
||||
Ok(x) => Ok(OptionStarOr::Other(x)),
|
||||
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
|
||||
},
|
||||
},
|
||||
_ => Err(unwrap_any(E::error::<V>(
|
||||
None,
|
||||
deserr::ErrorKind::IncorrectValueKind {
|
||||
actual: value,
|
||||
accepted: &[ValueKind::String],
|
||||
},
|
||||
location,
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A type representing the content of a query parameter that can either not exist, be equal to a star (*), or represent a list of other values
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub enum OptionStarOrList<T> {
|
||||
#[default]
|
||||
None,
|
||||
Star,
|
||||
List(Vec<T>),
|
||||
}
|
||||
|
||||
impl<T> OptionStarOrList<T> {
|
||||
pub fn is_some(&self) -> bool {
|
||||
match self {
|
||||
Self::None => false,
|
||||
Self::Star => false,
|
||||
Self::List(_) => true,
|
||||
}
|
||||
}
|
||||
pub fn map<U, F: Fn(T) -> U>(self, map_f: F) -> OptionStarOrList<U> {
|
||||
match self {
|
||||
Self::None => OptionStarOrList::None,
|
||||
Self::Star => OptionStarOrList::Star,
|
||||
Self::List(xs) => OptionStarOrList::List(xs.into_iter().map(map_f).collect()),
|
||||
}
|
||||
}
|
||||
pub fn try_map<U, E, F: Fn(T) -> Result<U, E>>(
|
||||
self,
|
||||
map_f: F,
|
||||
) -> Result<OptionStarOrList<U>, E> {
|
||||
match self {
|
||||
Self::None => Ok(OptionStarOrList::None),
|
||||
Self::Star => Ok(OptionStarOrList::Star),
|
||||
Self::List(xs) => {
|
||||
xs.into_iter().map(map_f).collect::<Result<Vec<_>, _>>().map(OptionStarOrList::List)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn merge_star_and_none(self) -> Option<Vec<T>> {
|
||||
match self {
|
||||
Self::None | Self::Star => None,
|
||||
Self::List(xs) => Some(xs),
|
||||
}
|
||||
}
|
||||
pub fn push(&mut self, el: T) {
|
||||
match self {
|
||||
Self::None => *self = Self::List(vec![el]),
|
||||
Self::Star => (),
|
||||
Self::List(xs) => xs.push(el),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> DeserializeFromValue<E> for OptionStarOrList<T>
|
||||
where
|
||||
E: DeserializeError + MergeWithError<T::Err>,
|
||||
T: FromQueryParameter,
|
||||
{
|
||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||
value: deserr::Value<V>,
|
||||
location: deserr::ValuePointerRef,
|
||||
) -> Result<Self, E> {
|
||||
match value {
|
||||
deserr::Value::String(s) => {
|
||||
let mut error = None;
|
||||
let mut is_star = false;
|
||||
// CS::<String>::from_str is infaillible
|
||||
let cs = serde_cs::vec::CS::<String>::from_str(&s).unwrap();
|
||||
let len_cs = cs.0.len();
|
||||
let mut els = vec![];
|
||||
for (i, el_str) in cs.into_iter().enumerate() {
|
||||
if el_str == "*" {
|
||||
is_star = true;
|
||||
} else {
|
||||
match T::from_query_param(&el_str) {
|
||||
Ok(el) => {
|
||||
els.push(el);
|
||||
}
|
||||
Err(e) => {
|
||||
let location =
|
||||
if len_cs > 1 { location.push_index(i) } else { location };
|
||||
error = Some(E::merge(error, e, location)?);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(error) = error {
|
||||
return Err(error);
|
||||
}
|
||||
|
||||
if is_star {
|
||||
Ok(OptionStarOrList::Star)
|
||||
} else {
|
||||
Ok(OptionStarOrList::List(els))
|
||||
}
|
||||
}
|
||||
_ => Err(unwrap_any(E::error::<V>(
|
||||
None,
|
||||
deserr::ErrorKind::IncorrectValueKind {
|
||||
actual: value,
|
||||
accepted: &[ValueKind::String],
|
||||
},
|
||||
location,
|
||||
))),
|
||||
StarOr::Other(other) => serializer.serialize_str(other.deref()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use core::fmt;
|
||||
use std::collections::HashSet;
|
||||
use std::fmt::{Display, Write};
|
||||
use std::str::FromStr;
|
||||
@@ -10,7 +9,7 @@ use serde::{Deserialize, Serialize, Serializer};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::error::ResponseError;
|
||||
use crate::error::{Code, ResponseError};
|
||||
use crate::keys::Key;
|
||||
use crate::settings::{Settings, Unchecked};
|
||||
use crate::InstanceUid;
|
||||
@@ -333,7 +332,7 @@ impl Display for Status {
|
||||
}
|
||||
|
||||
impl FromStr for Status {
|
||||
type Err = ParseTaskStatusError;
|
||||
type Err = ResponseError;
|
||||
|
||||
fn from_str(status: &str) -> Result<Self, Self::Err> {
|
||||
if status.eq_ignore_ascii_case("enqueued") {
|
||||
@@ -347,28 +346,21 @@ impl FromStr for Status {
|
||||
} else if status.eq_ignore_ascii_case("canceled") {
|
||||
Ok(Status::Canceled)
|
||||
} else {
|
||||
Err(ParseTaskStatusError(status.to_owned()))
|
||||
Err(ResponseError::from_msg(
|
||||
format!(
|
||||
"`{}` is not a status. Available status are {}.",
|
||||
status,
|
||||
enum_iterator::all::<Status>()
|
||||
.map(|s| format!("`{s}`"))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
),
|
||||
Code::BadRequest,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ParseTaskStatusError(pub String);
|
||||
impl fmt::Display for ParseTaskStatusError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"`{}` is not a valid task status. Available statuses are {}.",
|
||||
self.0,
|
||||
enum_iterator::all::<Status>()
|
||||
.map(|s| format!("`{s}`"))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
)
|
||||
}
|
||||
}
|
||||
impl std::error::Error for ParseTaskStatusError {}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Sequence)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum Kind {
|
||||
@@ -420,7 +412,7 @@ impl Display for Kind {
|
||||
}
|
||||
}
|
||||
impl FromStr for Kind {
|
||||
type Err = ParseTaskKindError;
|
||||
type Err = ResponseError;
|
||||
|
||||
fn from_str(kind: &str) -> Result<Self, Self::Err> {
|
||||
if kind.eq_ignore_ascii_case("indexCreation") {
|
||||
@@ -446,32 +438,25 @@ impl FromStr for Kind {
|
||||
} else if kind.eq_ignore_ascii_case("snapshotCreation") {
|
||||
Ok(Kind::SnapshotCreation)
|
||||
} else {
|
||||
Err(ParseTaskKindError(kind.to_owned()))
|
||||
Err(ResponseError::from_msg(
|
||||
format!(
|
||||
"`{}` is not a type. Available types are {}.",
|
||||
kind,
|
||||
enum_iterator::all::<Kind>()
|
||||
.map(|k| format!(
|
||||
"`{}`",
|
||||
// by default serde is going to insert `"` around the value.
|
||||
serde_json::to_string(&k).unwrap().trim_matches('"')
|
||||
))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
),
|
||||
Code::BadRequest,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ParseTaskKindError(pub String);
|
||||
impl fmt::Display for ParseTaskKindError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"`{}` is not a valid task type. Available types are {}.",
|
||||
self.0,
|
||||
enum_iterator::all::<Kind>()
|
||||
.map(|k| format!(
|
||||
"`{}`",
|
||||
// by default serde is going to insert `"` around the value.
|
||||
serde_json::to_string(&k).unwrap().trim_matches('"')
|
||||
))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
)
|
||||
}
|
||||
}
|
||||
impl std::error::Error for ParseTaskKindError {}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub enum Details {
|
||||
DocumentAdditionOrUpdate { received_documents: u64, indexed_documents: Option<u64> },
|
||||
|
||||
@@ -4,10 +4,11 @@ description = "Meilisearch HTTP server"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
name = "meilisearch"
|
||||
version = "1.0.1"
|
||||
version = "0.30.1"
|
||||
|
||||
[dependencies]
|
||||
actix-cors = "0.6.3"
|
||||
actix-governor = "0.3.2"
|
||||
actix-http = { version = "3.2.2", default-features = false, features = ["compress-brotli", "compress-gzip", "rustls"] }
|
||||
actix-web = { version = "4.2.1", default-features = false, features = ["macros", "compress-brotli", "compress-gzip", "cookies", "rustls"] }
|
||||
actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true }
|
||||
@@ -19,7 +20,6 @@ byte-unit = { version = "4.0.14", default-features = false, features = ["std", "
|
||||
bytes = "1.2.1"
|
||||
clap = { version = "4.0.9", features = ["derive", "env"] }
|
||||
crossbeam-channel = "0.5.6"
|
||||
deserr = "0.3.0"
|
||||
dump = { path = "../dump" }
|
||||
either = "1.8.0"
|
||||
env_logger = "0.9.1"
|
||||
@@ -55,6 +55,7 @@ rustls = "0.20.6"
|
||||
rustls-pemfile = "1.0.1"
|
||||
segment = { version = "0.2.1", optional = true }
|
||||
serde = { version = "1.0.145", features = ["derive"] }
|
||||
serde-cs = "0.2.4"
|
||||
serde_json = { version = "1.0.85", features = ["preserve_order"] }
|
||||
sha2 = "0.10.6"
|
||||
siphasher = "0.3.10"
|
||||
@@ -71,16 +72,11 @@ toml = "0.5.9"
|
||||
uuid = { version = "1.1.2", features = ["serde", "v4"] }
|
||||
walkdir = "2.3.2"
|
||||
yaup = "0.2.0"
|
||||
serde_urlencoded = "0.7.1"
|
||||
actix-utils = "3.0.1"
|
||||
atty = "0.2.14"
|
||||
termcolor = "1.1.3"
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.7.0"
|
||||
assert-json-diff = "2.0.2"
|
||||
brotli = "3.3.4"
|
||||
insta = "1.19.1"
|
||||
manifest-dir-macros = "0.1.16"
|
||||
maplit = "1.0.2"
|
||||
meili-snap = {path = "../meili-snap"}
|
||||
@@ -110,5 +106,5 @@ japanese = ["meilisearch-types/japanese"]
|
||||
thai = ["meilisearch-types/thai"]
|
||||
|
||||
[package.metadata.mini-dashboard]
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.5/build.zip"
|
||||
sha1 = "6fe959b78511b32e9ff857fd9fd31740633b9fce"
|
||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.4/build.zip"
|
||||
sha1 = "b53c2edb51d4ce1984d5586333b91c4ad3a1b4e4"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user