Compare commits

..

3 Commits

Author SHA1 Message Date
Louis Dureuil
6678491212 Use actix-governor to perform rate-limiting 2023-01-03 16:25:30 +01:00
Louis Dureuil
a82f8aacde Add actix-governor 2023-01-03 16:25:30 +01:00
Louis Dureuil
5cf71c6014 Add rate-limiting options 2023-01-03 16:25:30 +01:00
98 changed files with 4178 additions and 6714 deletions

View File

@@ -3,7 +3,7 @@ name: Update latest git tag
on:
workflow_dispatch:
release:
types: [released]
types: [published]
jobs:
check-version:
@@ -17,7 +17,6 @@ jobs:
update-latest-tag:
runs-on: ubuntu-latest
needs: check-version
steps:
- uses: actions/checkout@v3
- uses: rickstaa/action-create-tag@v1

View File

@@ -2,7 +2,7 @@ name: Publish to APT repository & Homebrew
on:
release:
types: [released]
types: [published]
jobs:
check-version:

View File

@@ -44,5 +44,5 @@ jobs:
--title "Update version for the next release ($NEW_VERSION) in Cargo.toml files" \
--body '⚠️ This PR is automatically generated. Check the new version is the expected one before merging.' \
--label 'skip changelog' \
--milestone $NEW_VERSION \
--milestone $NEW_VERSION
--base $GITHUB_REF_NAME

1329
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -28,9 +28,17 @@ http_payload_size_limit = "100 MB"
log_level = "INFO"
# Defines how much detail should be present in Meilisearch's logs.
# Meilisearch currently supports six log levels, listed in order of increasing verbosity: `OFF`, `ERROR`, `WARN`, `INFO`, `DEBUG`, `TRACE`
# Meilisearch currently supports five log levels, listed in order of increasing verbosity: `ERROR`, `WARN`, `INFO`, `DEBUG`, `TRACE`
# https://docs.meilisearch.com/learn/configuration/instance_options.html#log-level
max_index_size = "100 GiB"
# Sets the maximum size of the index.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#max-index-size
max_task_db_size = "100 GiB"
# Sets the maximum size of the task database.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#max-task-db-size
# max_indexing_memory = "2 GiB"
# Sets the maximum amount of RAM Meilisearch can use when indexing.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#max-indexing-memory
@@ -39,6 +47,11 @@ log_level = "INFO"
# Sets the maximum number of threads Meilisearch can use during indexing.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#max-indexing-threads
disable_auto_batching = false
# Deactivates auto-batching when provided.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#disable-auto-batching
#############
### DUMPS ###
#############
@@ -60,20 +73,85 @@ ignore_dump_if_db_exists = false
# https://docs.meilisearch.com/learn/configuration/instance_options.html#ignore-dump-if-db-exists
#####################
### RATE LIMITING ###
#####################
rate_limiting_disable_all = false
# Prevents a Meilisearch instance from performing any rate limiting.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-disable-all
rate_limiting_disable_global = false
# Prevents a Meilisearch instance from performing rate limiting global to all queries.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-disable-global
rate_limiting_global_pool = 100000
# The maximum pool of search requests that can be performed before they are rejected.
#
# The pool starts full at the provided value, then each search request diminishes the pool by 1.
# When the pool is empty the search request is rejected.
# The pool is replenished by 1 depending on the cooldown period.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-global-pool
rate_limiting_global_cooldown_ns = 50000
# The amount of time, in nanoseconds, before the pool of available search requests is replenished by 1 again.
#
# The maximum number of available search requests is given by `rate_limiting_global_pool`.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-global-cooldown-ns
rate_limiting_disable_ip = false
# Prevents a Meilisearch instance from performing rate limiting per IP address.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-disable-ip
rate_limiting_ip_pool = 200
# The maximum pool of search requests that can be performed from a specific IP before they are rejected.
#
# The pool starts full at the provided value, then each search request from the same IP address diminishes the pool by 1.
# When the pool is empty the search request is rejected.
# The pool is replenished by 1 depending on the cooldown period.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-ip-pool
rate_limiting_ip_cooldown_ns = 50000000
# The amount of time, in nanoseconds, before the pool of available search requests for a specific IP address is replenished by 1 again.
#
# The maximum number of available search requests for a specific IP address is given by `rate_limiting_ip_pool`.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-ip-cooldown-ns
rate_limiting_disable_api_key = false
# Prevents a Meilisearch instance from performing rate limiting per API key.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-disable-api-key
rate_limiting_api_key_pool = 10000
# The maximum pool of search requests that can be performed using a specific API key before they are rejected.
#
# The pool starts full at the provided value, then each search request using the same API key diminishes the pool by 1.
# When the pool is empty the search request is rejected.
# The pool is replenished by 1 depending on the cooldown period.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-api-key-pool
rate_limiting_api_key_cooldown_ns = 500000
# The amount of time, in nanoseconds, before the pool of available search requests using a specific API key is replenished by 1 again.
#
# The maximum number of available search requests using a specific API key is given by `rate_limiting_api_key_pool`.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#rate-limiting-api-key-cooldown-ns
#################
### SNAPSHOTS ###
#################
schedule_snapshot = false
# Enables scheduled snapshots when true, disable when false (the default).
# If the value is given as an integer, then enables the scheduled snapshot with the passed value as the interval
# between each snapshot, in seconds.
# Activates scheduled snapshots when provided.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#schedule-snapshot-creation
snapshot_dir = "snapshots/"
# Sets the directory where Meilisearch will store snapshots.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#snapshot-destination
snapshot_interval_sec = 86400
# Defines the interval between each snapshot. Value must be given in seconds.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#snapshot-interval
# import_snapshot = "./path/to/my/snapshot"
# Launches Meilisearch after importing a previously-generated snapshot at the given filepath.
# https://docs.meilisearch.com/learn/configuration/instance_options.html#import-snapshot

View File

@@ -0,0 +1,23 @@
---
source: dump/src/reader/compat/v1_to_v2.rs
expression: spells.settings().unwrap()
---
{
"displayedAttributes": [
"*"
],
"searchableAttributes": [
"*"
],
"filterableAttributes": [],
"rankingRules": [
"typo",
"words",
"proximity",
"attribute",
"exactness"
],
"stopWords": [],
"synonyms": {},
"distinctAttribute": null
}

View File

@@ -0,0 +1,27 @@
---
source: dump/src/reader/compat/v1_to_v2.rs
expression: movies.settings().unwrap()
---
{
"displayedAttributes": [
"*"
],
"searchableAttributes": [
"*"
],
"filterableAttributes": [
"genres",
"id"
],
"rankingRules": [
"typo",
"words",
"proximity",
"attribute",
"exactness",
"asc(release_date)"
],
"stopWords": [],
"synonyms": {},
"distinctAttribute": null
}

View File

@@ -1,5 +1,3 @@
use std::str::FromStr;
use super::v4_to_v5::{CompatIndexV4ToV5, CompatV4ToV5};
use crate::reader::{v5, v6, Document, UpdateFile};
use crate::Result;
@@ -256,50 +254,51 @@ impl<T> From<v5::Setting<T>> for v6::Setting<T> {
impl From<v5::ResponseError> for v6::ResponseError {
fn from(error: v5::ResponseError) -> Self {
let code = match error.error_code.as_ref() {
"index_creation_failed" => v6::Code::IndexCreationFailed,
"index_creation_failed" => v6::Code::CreateIndex,
"index_already_exists" => v6::Code::IndexAlreadyExists,
"index_not_found" => v6::Code::IndexNotFound,
"invalid_index_uid" => v6::Code::InvalidIndexUid,
"invalid_min_word_length_for_typo" => v6::Code::InvalidSettingsTypoTolerance,
"invalid_min_word_length_for_typo" => v6::Code::InvalidMinWordLengthForTypo,
"invalid_state" => v6::Code::InvalidState,
"primary_key_inference_failed" => v6::Code::IndexPrimaryKeyNoCandidateFound,
"index_primary_key_already_exists" => v6::Code::IndexPrimaryKeyAlreadyExists,
"primary_key_inference_failed" => v6::Code::MissingPrimaryKey,
"index_primary_key_already_exists" => v6::Code::PrimaryKeyAlreadyPresent,
"max_fields_limit_exceeded" => v6::Code::MaxFieldsLimitExceeded,
"missing_document_id" => v6::Code::MissingDocumentId,
"invalid_document_id" => v6::Code::InvalidDocumentId,
"invalid_filter" => v6::Code::InvalidSettingsFilterableAttributes,
"invalid_sort" => v6::Code::InvalidSettingsSortableAttributes,
"invalid_filter" => v6::Code::Filter,
"invalid_sort" => v6::Code::Sort,
"bad_parameter" => v6::Code::BadParameter,
"bad_request" => v6::Code::BadRequest,
"database_size_limit_reached" => v6::Code::DatabaseSizeLimitReached,
"document_not_found" => v6::Code::DocumentNotFound,
"internal" => v6::Code::Internal,
"invalid_geo_field" => v6::Code::InvalidDocumentGeoField,
"invalid_ranking_rule" => v6::Code::InvalidSettingsRankingRules,
"invalid_store_file" => v6::Code::InvalidStoreFile,
"invalid_api_key" => v6::Code::InvalidApiKey,
"invalid_geo_field" => v6::Code::InvalidGeoField,
"invalid_ranking_rule" => v6::Code::InvalidRankingRule,
"invalid_store_file" => v6::Code::InvalidStore,
"invalid_api_key" => v6::Code::InvalidToken,
"missing_authorization_header" => v6::Code::MissingAuthorizationHeader,
"no_space_left_on_device" => v6::Code::NoSpaceLeftOnDevice,
"dump_not_found" => v6::Code::DumpNotFound,
"task_not_found" => v6::Code::TaskNotFound,
"payload_too_large" => v6::Code::PayloadTooLarge,
"unretrievable_document" => v6::Code::UnretrievableDocument,
"unretrievable_document" => v6::Code::RetrieveDocument,
"search_error" => v6::Code::SearchDocuments,
"unsupported_media_type" => v6::Code::UnsupportedMediaType,
"dump_already_processing" => v6::Code::DumpAlreadyProcessing,
"dump_already_processing" => v6::Code::DumpAlreadyInProgress,
"dump_process_failed" => v6::Code::DumpProcessFailed,
"invalid_content_type" => v6::Code::InvalidContentType,
"missing_content_type" => v6::Code::MissingContentType,
"malformed_payload" => v6::Code::MalformedPayload,
"missing_payload" => v6::Code::MissingPayload,
"api_key_not_found" => v6::Code::ApiKeyNotFound,
"missing_parameter" => v6::Code::BadRequest,
"missing_parameter" => v6::Code::MissingParameter,
"invalid_api_key_actions" => v6::Code::InvalidApiKeyActions,
"invalid_api_key_indexes" => v6::Code::InvalidApiKeyIndexes,
"invalid_api_key_expires_at" => v6::Code::InvalidApiKeyExpiresAt,
"invalid_api_key_description" => v6::Code::InvalidApiKeyDescription,
"invalid_api_key_name" => v6::Code::InvalidApiKeyName,
"invalid_api_key_uid" => v6::Code::InvalidApiKeyUid,
"immutable_field" => v6::Code::BadRequest,
"immutable_field" => v6::Code::ImmutableField,
"api_key_already_exists" => v6::Code::ApiKeyAlreadyExists,
other => {
log::warn!("Unknown error code {}", other);
@@ -317,26 +316,7 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
searchable_attributes: settings.searchable_attributes.into(),
filterable_attributes: settings.filterable_attributes.into(),
sortable_attributes: settings.sortable_attributes.into(),
ranking_rules: {
match settings.ranking_rules {
v5::settings::Setting::Set(ranking_rules) => {
let mut new_ranking_rules = vec![];
for rule in ranking_rules {
match v6::RankingRuleView::from_str(&rule) {
Ok(new_rule) => {
new_ranking_rules.push(new_rule);
}
Err(_) => {
log::warn!("Error while importing settings. The ranking rule `{rule}` does not exist anymore.")
}
}
}
v6::Setting::Set(new_ranking_rules)
}
v5::settings::Setting::Reset => v6::Setting::Reset,
v5::settings::Setting::NotSet => v6::Setting::NotSet,
}
},
ranking_rules: settings.ranking_rules.into(),
stop_words: settings.stop_words.into(),
synonyms: settings.synonyms.into(),
distinct_attribute: settings.distinct_attribute.into(),
@@ -439,7 +419,7 @@ pub(crate) mod test {
// tasks
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"10c673c97f053830aa659876d7aa0b53");
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"6519f7064c45d2196dd59b71350a9bf5");
assert_eq!(update_files.len(), 22);
assert!(update_files[0].is_none()); // the dump creation
assert!(update_files[1].is_some()); // the enqueued document addition

View File

@@ -201,7 +201,7 @@ pub(crate) mod test {
// tasks
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"10c673c97f053830aa659876d7aa0b53");
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"6519f7064c45d2196dd59b71350a9bf5");
assert_eq!(update_files.len(), 22);
assert!(update_files[0].is_none()); // the dump creation
assert!(update_files[1].is_some()); // the enqueued document addition
@@ -222,12 +222,12 @@ pub(crate) mod test {
assert!(indexes.is_empty());
// products
insta::assert_json_snapshot!(products.metadata(), @r###"
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
{
"uid": "products",
"primaryKey": "sku",
"createdAt": "2022-10-04T15:51:35.939396731Z",
"updatedAt": "2022-10-04T15:55:01.897325373Z"
"createdAt": "[now]",
"updatedAt": "[now]"
}
"###);
@@ -237,12 +237,12 @@ pub(crate) mod test {
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
// movies
insta::assert_json_snapshot!(movies.metadata(), @r###"
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
{
"uid": "movies",
"primaryKey": "id",
"createdAt": "2022-10-04T15:51:35.291992167Z",
"updatedAt": "2022-10-04T15:55:10.33561842Z"
"createdAt": "[now]",
"updatedAt": "[now]"
}
"###);
@@ -252,12 +252,12 @@ pub(crate) mod test {
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
// spells
insta::assert_json_snapshot!(spells.metadata(), @r###"
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
{
"uid": "dnd_spells",
"primaryKey": "index",
"createdAt": "2022-10-04T15:51:37.381094632Z",
"updatedAt": "2022-10-04T15:55:02.394503431Z"
"createdAt": "[now]",
"updatedAt": "[now]"
}
"###);
@@ -279,7 +279,7 @@ pub(crate) mod test {
// tasks
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"12eca43d5d1e1f334200eb4df653b0c9");
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"491e244a80a19fe2a900b809d310c24a");
assert_eq!(update_files.len(), 10);
assert!(update_files[0].is_some()); // the enqueued document addition
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
@@ -356,7 +356,7 @@ pub(crate) mod test {
// tasks
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"2f51c6345fabccf47b18c82bad618ffe");
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"7cacce2e21702be696b866808c726946");
assert_eq!(update_files.len(), 10);
assert!(update_files[0].is_some()); // the enqueued document addition
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
@@ -449,7 +449,7 @@ pub(crate) mod test {
// tasks
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"b27292d0bb86d4b4dd1b375a46b33890");
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"6cabec4e252b74c8f3a2c8517622e85f");
assert_eq!(update_files.len(), 9);
assert!(update_files[0].is_some()); // the enqueued document addition
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
@@ -542,7 +542,7 @@ pub(crate) mod test {
// tasks
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"9725ccfceea3f8d5846c44006c9e1e7b");
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"b3e3652bfc10a76670be157d2507d761");
assert_eq!(update_files.len(), 9);
assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dump v1

View File

@@ -0,0 +1,27 @@
---
source: dump/src/reader/mod.rs
expression: movies.settings().unwrap()
---
{
"displayedAttributes": [
"*"
],
"searchableAttributes": [
"*"
],
"filterableAttributes": [
"genres",
"id"
],
"rankingRules": [
"typo",
"words",
"proximity",
"attribute",
"exactness",
"release_date:asc"
],
"stopWords": [],
"synonyms": {},
"distinctAttribute": null
}

View File

@@ -0,0 +1,23 @@
---
source: dump/src/reader/mod.rs
expression: spells.settings().unwrap()
---
{
"displayedAttributes": [
"*"
],
"searchableAttributes": [
"*"
],
"filterableAttributes": [],
"rankingRules": [
"typo",
"words",
"proximity",
"attribute",
"exactness"
],
"stopWords": [],
"synonyms": {},
"distinctAttribute": null
}

View File

@@ -0,0 +1,37 @@
---
source: dump/src/reader/mod.rs
expression: products.settings().unwrap()
---
{
"displayedAttributes": [
"*"
],
"searchableAttributes": [
"*"
],
"filterableAttributes": [],
"rankingRules": [
"typo",
"words",
"proximity",
"attribute",
"exactness"
],
"stopWords": [],
"synonyms": {
"android": [
"phone",
"smartphone"
],
"iphone": [
"phone",
"smartphone"
],
"phone": [
"android",
"iphone",
"smartphone"
]
},
"distinctAttribute": null
}

View File

@@ -0,0 +1,24 @@
---
source: dump/src/reader/v1/mod.rs
expression: dnd_spells.settings().unwrap()
---
{
"rankingRules": [
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness"
],
"distinctAttribute": null,
"searchableAttributes": [
"*"
],
"displayedAttributes": [
"*"
],
"stopWords": [],
"synonyms": {},
"attributesForFaceting": []
}

View File

@@ -0,0 +1,28 @@
---
source: dump/src/reader/v1/mod.rs
expression: movies.settings().unwrap()
---
{
"rankingRules": [
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness",
"asc(release_date)"
],
"distinctAttribute": null,
"searchableAttributes": [
"*"
],
"displayedAttributes": [
"*"
],
"stopWords": [],
"synonyms": {},
"attributesForFaceting": [
"id",
"genres"
]
}

View File

@@ -0,0 +1,28 @@
---
source: dump/src/reader/v1/mod.rs
expression: movies.settings().unwrap()
---
{
"rankingRules": [
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness",
"asc(release_date)"
],
"distinctAttribute": null,
"searchableAttributes": [
"*"
],
"displayedAttributes": [
"*"
],
"stopWords": [],
"synonyms": {},
"attributesForFaceting": [
"id",
"genres"
]
}

View File

@@ -0,0 +1,24 @@
---
source: dump/src/reader/v1/mod.rs
expression: dnd_spells.settings().unwrap()
---
{
"rankingRules": [
"typo",
"words",
"proximity",
"attribute",
"wordsPosition",
"exactness"
],
"distinctAttribute": null,
"searchableAttributes": [
"*"
],
"displayedAttributes": [
"*"
],
"stopWords": [],
"synonyms": {},
"attributesForFaceting": []
}

View File

@@ -5,8 +5,10 @@ use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
pub struct ResponseError {
#[serde(skip)]
#[cfg_attr(feature = "test-traits", proptest(strategy = "strategy::status_code_strategy()"))]
pub code: StatusCode,
pub message: String,
#[serde(rename = "code")]

View File

@@ -5,6 +5,7 @@ use serde::Deserialize;
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
#[cfg_attr(test, derive(serde::Serialize))]
pub struct ResponseError {
#[serde(skip)]

View File

@@ -141,10 +141,6 @@ impl V5Reader {
V5IndexReader::new(
index.uid.clone(),
&self.dump.path().join("indexes").join(index.index_meta.uuid.to_string()),
&index.index_meta,
BufReader::new(
File::open(self.dump.path().join("updates").join("data.jsonl")).unwrap(),
),
)
}))
}
@@ -193,39 +189,16 @@ pub struct V5IndexReader {
}
impl V5IndexReader {
pub fn new(
name: String,
path: &Path,
index_metadata: &meta::IndexMeta,
tasks: BufReader<File>,
) -> Result<Self> {
pub fn new(name: String, path: &Path) -> Result<Self> {
let meta = File::open(path.join("meta.json"))?;
let meta: meta::DumpMeta = serde_json::from_reader(meta)?;
let mut created_at = None;
let mut updated_at = None;
for line in tasks.lines() {
let task: Task = serde_json::from_str(&line?)?;
if *task.index_uid().unwrap_or_default().to_string() == name {
if updated_at.is_none() {
updated_at = task.processed_at()
}
if task.id as usize == index_metadata.creation_task_id {
created_at = task.created_at();
break;
}
}
}
let metadata = IndexMetadata {
uid: name,
primary_key: meta.primary_key,
created_at: created_at.unwrap_or_else(OffsetDateTime::now_utc),
updated_at: updated_at.unwrap_or_else(OffsetDateTime::now_utc),
// FIXME: Iterate over the whole task queue to find the creation and last update date.
created_at: OffsetDateTime::now_utc(),
updated_at: OffsetDateTime::now_utc(),
};
let ret = V5IndexReader {
@@ -329,12 +302,12 @@ pub(crate) mod test {
assert!(indexes.is_empty());
// products
insta::assert_json_snapshot!(products.metadata(), @r###"
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
{
"uid": "products",
"primaryKey": "sku",
"createdAt": "2022-10-04T15:51:35.939396731Z",
"updatedAt": "2022-10-04T15:55:01.897325373Z"
"createdAt": "[now]",
"updatedAt": "[now]"
}
"###);
@@ -344,12 +317,12 @@ pub(crate) mod test {
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"b01c8371aea4c7171af0d4d846a2bdca");
// movies
insta::assert_json_snapshot!(movies.metadata(), @r###"
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
{
"uid": "movies",
"primaryKey": "id",
"createdAt": "2022-10-04T15:51:35.291992167Z",
"updatedAt": "2022-10-04T15:55:10.33561842Z"
"createdAt": "[now]",
"updatedAt": "[now]"
}
"###);
@@ -359,12 +332,12 @@ pub(crate) mod test {
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"e962baafd2fbae4cdd14e876053b0c5a");
// spells
insta::assert_json_snapshot!(spells.metadata(), @r###"
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
{
"uid": "dnd_spells",
"primaryKey": "index",
"createdAt": "2022-10-04T15:51:37.381094632Z",
"updatedAt": "2022-10-04T15:55:02.394503431Z"
"createdAt": "[now]",
"updatedAt": "[now]"
}
"###);

View File

@@ -140,45 +140,6 @@ impl Task {
TaskContent::Dump { .. } => None,
}
}
pub fn processed_at(&self) -> Option<OffsetDateTime> {
match self.events.last() {
Some(TaskEvent::Succeeded { result: _, timestamp }) => Some(*timestamp),
_ => None,
}
}
pub fn created_at(&self) -> Option<OffsetDateTime> {
match &self.content {
TaskContent::IndexCreation { index_uid: _, primary_key: _ } => {
match self.events.first() {
Some(TaskEvent::Created(ts)) => Some(*ts),
_ => None,
}
}
TaskContent::DocumentAddition {
index_uid: _,
content_uuid: _,
merge_strategy: _,
primary_key: _,
documents_count: _,
allow_index_creation: _,
} => match self.events.first() {
Some(TaskEvent::Created(ts)) => Some(*ts),
_ => None,
},
TaskContent::SettingsUpdate {
index_uid: _,
settings: _,
is_deletion: _,
allow_index_creation: _,
} => match self.events.first() {
Some(TaskEvent::Created(ts)) => Some(*ts),
_ => None,
},
_ => None,
}
}
}
impl IndexUid {

View File

@@ -40,7 +40,6 @@ pub type IndexUid = meilisearch_types::index_uid::IndexUid;
// everything related to the errors
pub type ResponseError = meilisearch_types::error::ResponseError;
pub type Code = meilisearch_types::error::Code;
pub type RankingRuleView = meilisearch_types::settings::RankingRuleView;
pub struct V6Reader {
dump: TempDir,

View File

@@ -882,11 +882,11 @@ impl IndexScheduler {
}
if !not_found_indexes.is_empty() {
if not_found_indexes.len() == 1 {
return Err(Error::SwapIndexNotFound(
return Err(Error::IndexNotFound(
not_found_indexes.into_iter().next().unwrap().clone(),
));
} else {
return Err(Error::SwapIndexesNotFound(
return Err(Error::IndexesNotFound(
not_found_indexes.into_iter().cloned().collect(),
));
}

View File

@@ -1,5 +1,3 @@
use std::fmt::Display;
use meilisearch_types::error::{Code, ErrorCode};
use meilisearch_types::tasks::{Kind, Status};
use meilisearch_types::{heed, milli};
@@ -7,47 +5,16 @@ use thiserror::Error;
use crate::TaskId;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum DateField {
BeforeEnqueuedAt,
AfterEnqueuedAt,
BeforeStartedAt,
AfterStartedAt,
BeforeFinishedAt,
AfterFinishedAt,
}
impl Display for DateField {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DateField::BeforeEnqueuedAt => write!(f, "beforeEnqueuedAt"),
DateField::AfterEnqueuedAt => write!(f, "afterEnqueuedAt"),
DateField::BeforeStartedAt => write!(f, "beforeStartedAt"),
DateField::AfterStartedAt => write!(f, "afterStartedAt"),
DateField::BeforeFinishedAt => write!(f, "beforeFinishedAt"),
DateField::AfterFinishedAt => write!(f, "afterFinishedAt"),
}
}
}
impl From<DateField> for Code {
fn from(date: DateField) -> Self {
match date {
DateField::BeforeEnqueuedAt => Code::InvalidTaskBeforeEnqueuedAt,
DateField::AfterEnqueuedAt => Code::InvalidTaskAfterEnqueuedAt,
DateField::BeforeStartedAt => Code::InvalidTaskBeforeStartedAt,
DateField::AfterStartedAt => Code::InvalidTaskAfterStartedAt,
DateField::BeforeFinishedAt => Code::InvalidTaskBeforeFinishedAt,
DateField::AfterFinishedAt => Code::InvalidTaskAfterFinishedAt,
}
}
}
#[allow(clippy::large_enum_variant)]
#[derive(Error, Debug)]
pub enum Error {
#[error("Index `{0}` not found.")]
IndexNotFound(String),
#[error(
"Indexes {} not found.",
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", ")
)]
IndexesNotFound(Vec<String>),
#[error("Index `{0}` already exists.")]
IndexAlreadyExists(String),
#[error(
@@ -59,19 +26,12 @@ pub enum Error {
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", ")
)]
SwapDuplicateIndexesFound(Vec<String>),
#[error("Index `{0}` not found.")]
SwapIndexNotFound(String),
#[error(
"Indexes {} not found.",
.0.iter().map(|s| format!("`{}`", s)).collect::<Vec<_>>().join(", ")
)]
SwapIndexesNotFound(Vec<String>),
#[error("Corrupted dump.")]
CorruptedDump,
#[error(
"Task `{field}` `{date}` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format."
)]
InvalidTaskDate { field: DateField, date: String },
InvalidTaskDate { field: String, date: String },
#[error("Task uid `{task_uid}` is invalid. It should only contain numeric characters.")]
InvalidTaskUids { task_uid: String },
#[error(
@@ -138,20 +98,19 @@ impl ErrorCode for Error {
fn error_code(&self) -> Code {
match self {
Error::IndexNotFound(_) => Code::IndexNotFound,
Error::IndexesNotFound(_) => Code::IndexNotFound,
Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists,
Error::SwapDuplicateIndexesFound(_) => Code::InvalidSwapDuplicateIndexFound,
Error::SwapDuplicateIndexFound(_) => Code::InvalidSwapDuplicateIndexFound,
Error::SwapIndexNotFound(_) => Code::IndexNotFound,
Error::SwapIndexesNotFound(_) => Code::IndexNotFound,
Error::InvalidTaskDate { field, .. } => (*field).into(),
Error::InvalidTaskUids { .. } => Code::InvalidTaskUids,
Error::InvalidTaskStatuses { .. } => Code::InvalidTaskStatuses,
Error::InvalidTaskTypes { .. } => Code::InvalidTaskTypes,
Error::InvalidTaskCanceledBy { .. } => Code::InvalidTaskCanceledBy,
Error::SwapDuplicateIndexesFound(_) => Code::DuplicateIndexFound,
Error::SwapDuplicateIndexFound(_) => Code::DuplicateIndexFound,
Error::InvalidTaskDate { .. } => Code::InvalidTaskDateFilter,
Error::InvalidTaskUids { .. } => Code::InvalidTaskUidsFilter,
Error::InvalidTaskStatuses { .. } => Code::InvalidTaskStatusesFilter,
Error::InvalidTaskTypes { .. } => Code::InvalidTaskTypesFilter,
Error::InvalidTaskCanceledBy { .. } => Code::InvalidTaskCanceledByFilter,
Error::InvalidIndexUid { .. } => Code::InvalidIndexUid,
Error::TaskNotFound(_) => Code::TaskNotFound,
Error::TaskDeletionWithEmptyQuery => Code::MissingTaskFilters,
Error::TaskCancelationWithEmptyQuery => Code::MissingTaskFilters,
Error::TaskDeletionWithEmptyQuery => Code::TaskDeletionWithEmptyQuery,
Error::TaskCancelationWithEmptyQuery => Code::TaskCancelationWithEmptyQuery,
Error::Dump(e) => e.error_code(),
Error::Milli(e) => e.error_code(),
Error::ProcessBatchPanicked => Code::Internal,
@@ -160,7 +119,6 @@ impl ErrorCode for Error {
Error::FileStore(e) => e.error_code(),
Error::IoError(e) => e.error_code(),
Error::Persist(e) => e.error_code(),
// Irrecoverable errors
Error::Anyhow(_) => Code::Internal,
Error::CorruptedTaskQueue => Code::Internal,

View File

@@ -227,9 +227,9 @@ pub struct IndexSchedulerOptions {
pub snapshots_path: PathBuf,
/// The path to the folder containing the dumps.
pub dumps_path: PathBuf,
/// The maximum size, in bytes, of the task index.
pub task_db_size: usize,
/// The maximum size, in bytes, of each meilisearch index.
pub task_db_size: usize,
/// The maximum size, in bytes, of the tasks index.
pub index_size: usize,
/// Configuration used during indexing for each meilisearch index.
pub indexer_config: IndexerConfig,
@@ -502,22 +502,13 @@ impl IndexScheduler {
}
if let Some(canceled_by) = &query.canceled_by {
let mut all_canceled_tasks = RoaringBitmap::new();
for cancel_task_uid in canceled_by {
if let Some(canceled_by_uid) =
self.canceled_by.get(rtxn, &BEU32::new(*cancel_task_uid))?
{
all_canceled_tasks |= canceled_by_uid;
tasks &= canceled_by_uid;
}
}
// if the canceled_by has been specified but no task
// matches then we prefer matching zero than all tasks.
if all_canceled_tasks.is_empty() {
return Ok(RoaringBitmap::new());
} else {
tasks &= all_canceled_tasks;
}
}
if let Some(kind) = &query.types {

View File

@@ -10,7 +10,7 @@ source: index-scheduler/src/lib.rs
1 {uid: 1, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }}
2 {uid: 2, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "c", primary_key: Some("id") }}
3 {uid: 3, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }}
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Indexes `e`, `f` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }}
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Indexes `e`, `f` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }}
----------------------------------------------------------------------
### Status:
enqueued []

View File

@@ -6,16 +6,16 @@ source: index-scheduler/src/lib.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }}
9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }}
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }}
9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }}
----------------------------------------------------------------------
### Status:
enqueued []

View File

@@ -6,16 +6,16 @@ source: index-scheduler/src/lib.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }}
9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }}
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }}
9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }}
----------------------------------------------------------------------
### Status:
enqueued []

View File

@@ -6,11 +6,11 @@ source: index-scheduler/src/lib.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}

View File

@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}

View File

@@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
[]
----------------------------------------------------------------------
### All Tasks:
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}

View File

@@ -1,7 +1,7 @@
use std::error::Error;
use meilisearch_types::error::{Code, ErrorCode};
use meilisearch_types::internal_error;
use meilisearch_types::{internal_error, keys};
pub type Result<T> = std::result::Result<T, AuthControllerError>;
@@ -11,6 +11,8 @@ pub enum AuthControllerError {
ApiKeyNotFound(String),
#[error("`uid` field value `{0}` is already an existing API key.")]
ApiKeyAlreadyExists(String),
#[error(transparent)]
ApiKey(#[from] keys::Error),
#[error("Internal error: {0}")]
Internal(Box<dyn Error + Send + Sync + 'static>),
}
@@ -25,6 +27,7 @@ internal_error!(
impl ErrorCode for AuthControllerError {
fn error_code(&self) -> Code {
match self {
Self::ApiKey(e) => e.error_code(),
Self::ApiKeyNotFound(_) => Code::ApiKeyNotFound,
Self::ApiKeyAlreadyExists(_) => Code::ApiKeyAlreadyExists,
Self::Internal(_) => Code::Internal,

View File

@@ -3,13 +3,15 @@ pub mod error;
mod store;
use std::collections::{HashMap, HashSet};
use std::ops::Deref;
use std::path::Path;
use std::sync::Arc;
use error::{AuthControllerError, Result};
use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey};
use meilisearch_types::keys::{Action, Key};
use meilisearch_types::star_or::StarOr;
use serde::{Deserialize, Serialize};
use serde_json::Value;
pub use store::open_auth_store_env;
use store::{generate_key_as_hexa, HeedAuthStore};
use time::OffsetDateTime;
@@ -32,18 +34,17 @@ impl AuthController {
Ok(Self { store: Arc::new(store), master_key: master_key.clone() })
}
pub fn create_key(&self, create_key: CreateApiKey) -> Result<Key> {
match self.store.get_api_key(create_key.uid)? {
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(create_key.uid.to_string())),
None => self.store.put_api_key(create_key.to_key()),
pub fn create_key(&self, value: Value) -> Result<Key> {
let key = Key::create_from_value(value)?;
match self.store.get_api_key(key.uid)? {
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(key.uid.to_string())),
None => self.store.put_api_key(key),
}
}
pub fn update_key(&self, uid: Uuid, patch: PatchApiKey) -> Result<Key> {
pub fn update_key(&self, uid: Uuid, value: Value) -> Result<Key> {
let mut key = self.get_key(uid)?;
key.description = patch.description;
key.name = patch.name;
key.updated_at = OffsetDateTime::now_utc();
key.update_from_value(value)?;
self.store.put_api_key(key)
}
@@ -85,13 +86,15 @@ impl AuthController {
key.indexes
.into_iter()
.filter_map(|index| {
search_rules.get_index_search_rules(&format!("{index}")).map(
|index_search_rules| (index.to_string(), Some(index_search_rules)),
search_rules.get_index_search_rules(index.deref()).map(
|index_search_rules| {
(String::from(index), Some(index_search_rules))
},
)
})
.collect(),
),
None => SearchRules::Set(key.indexes.into_iter().map(|x| x.to_string()).collect()),
None => SearchRules::Set(key.indexes.into_iter().map(String::from).collect()),
};
} else if let Some(search_rules) = search_rules {
filters.search_rules = search_rules;

View File

@@ -3,6 +3,7 @@ use std::cmp::Reverse;
use std::collections::HashSet;
use std::convert::{TryFrom, TryInto};
use std::fs::create_dir_all;
use std::ops::Deref;
use std::path::Path;
use std::str;
use std::sync::Arc;
@@ -134,7 +135,7 @@ impl HeedAuthStore {
for index in key.indexes.iter() {
db.put(
&mut wtxn,
&(&uid, &action, Some(index.to_string().as_bytes())),
&(&uid, &action, Some(index.deref().as_bytes())),
&key.expires_at,
)?;
}

View File

@@ -7,19 +7,18 @@ edition = "2021"
[dependencies]
actix-web = { version = "4.2.1", default-features = false }
anyhow = "1.0.65"
convert_case = "0.6.0"
csv = "1.1.6"
deserr = "0.1.5"
either = { version = "1.6.1", features = ["serde"] }
enum-iterator = "1.1.3"
file-store = { path = "../file-store" }
flate2 = "1.0.24"
fst = "0.4.7"
memmap2 = "0.5.7"
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.39.1", default-features = false }
milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.37.3", default-features = false }
proptest = { version = "1.0.0", optional = true }
proptest-derive = { version = "0.3.0", optional = true }
roaring = { version = "0.10.0", features = ["serde"] }
serde = { version = "1.0.145", features = ["derive"] }
serde-cs = "0.2.4"
serde_json = "1.0.85"
tar = "0.4.38"
tempfile = "3.3.0"
@@ -31,6 +30,8 @@ uuid = { version = "1.1.2", features = ["serde", "v4"] }
[dev-dependencies]
insta = "1.19.1"
meili-snap = { path = "../meili-snap" }
proptest = "1.0.0"
proptest-derive = "0.3.0"
[features]
# all specialized tokenizations
@@ -44,3 +45,4 @@ hebrew = ["milli/hebrew"]
japanese = ["milli/japanese"]
# thai specialized tokenization
thai = ["milli/thai"]
test-traits = ["proptest", "proptest-derive"]

View File

@@ -1,315 +0,0 @@
/*!
This module implements the error messages of deserialization errors.
We try to:
1. Give a human-readable description of where the error originated.
2. Use the correct terms depending on the format of the request (json/query param)
3. Categorise the type of the error (e.g. missing field, wrong value type, unexpected error, etc.)
*/
use deserr::{ErrorKind, IntoValue, ValueKind, ValuePointerRef};
use super::{DeserrJsonError, DeserrQueryParamError};
use crate::error::ErrorCode;
/// Return a description of the given location in a Json, preceded by the given article.
/// e.g. `at .key1[8].key2`. If the location is the origin, the given article will not be
/// included in the description.
pub fn location_json_description(location: ValuePointerRef, article: &str) -> String {
fn rec(location: ValuePointerRef) -> String {
match location {
ValuePointerRef::Origin => String::new(),
ValuePointerRef::Key { key, prev } => rec(*prev) + "." + key,
ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)),
}
}
match location {
ValuePointerRef::Origin => String::new(),
_ => {
format!("{article} `{}`", rec(location))
}
}
}
/// Return a description of the list of value kinds for a Json payload.
fn value_kinds_description_json(kinds: &[ValueKind]) -> String {
// Rank each value kind so that they can be sorted (and deduplicated)
// Having a predictable order helps with pattern matching
fn order(kind: &ValueKind) -> u8 {
match kind {
ValueKind::Null => 0,
ValueKind::Boolean => 1,
ValueKind::Integer => 2,
ValueKind::NegativeInteger => 3,
ValueKind::Float => 4,
ValueKind::String => 5,
ValueKind::Sequence => 6,
ValueKind::Map => 7,
}
}
// Return a description of a single value kind, preceded by an article
fn single_description(kind: &ValueKind) -> &'static str {
match kind {
ValueKind::Null => "null",
ValueKind::Boolean => "a boolean",
ValueKind::Integer => "a positive integer",
ValueKind::NegativeInteger => "an integer",
ValueKind::Float => "a number",
ValueKind::String => "a string",
ValueKind::Sequence => "an array",
ValueKind::Map => "an object",
}
}
fn description_rec(kinds: &[ValueKind], count_items: &mut usize, message: &mut String) {
let (msg_part, rest): (_, &[ValueKind]) = match kinds {
[] => (String::new(), &[]),
[ValueKind::Integer | ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => {
("a number".to_owned(), rest)
}
[ValueKind::Integer, ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => {
("a number".to_owned(), rest)
}
[ValueKind::Integer, ValueKind::NegativeInteger, rest @ ..] => {
("an integer".to_owned(), rest)
}
[a] => (single_description(a).to_owned(), &[]),
[a, rest @ ..] => (single_description(a).to_owned(), rest),
};
if rest.is_empty() {
if *count_items == 0 {
message.push_str(&msg_part);
} else if *count_items == 1 {
message.push_str(&format!(" or {msg_part}"));
} else {
message.push_str(&format!(", or {msg_part}"));
}
} else {
if *count_items == 0 {
message.push_str(&msg_part);
} else {
message.push_str(&format!(", {msg_part}"));
}
*count_items += 1;
description_rec(rest, count_items, message);
}
}
let mut kinds = kinds.to_owned();
kinds.sort_by_key(order);
kinds.dedup();
if kinds.is_empty() {
// Should not happen ideally
"a different value".to_owned()
} else {
let mut message = String::new();
description_rec(kinds.as_slice(), &mut 0, &mut message);
message
}
}
/// Return the JSON string of the value preceded by a description of its kind
fn value_description_with_kind_json(v: &serde_json::Value) -> String {
match v.kind() {
ValueKind::Null => "null".to_owned(),
kind => {
format!(
"{}: `{}`",
value_kinds_description_json(&[kind]),
serde_json::to_string(v).unwrap()
)
}
}
}
impl<C: Default + ErrorCode> deserr::DeserializeError for DeserrJsonError<C> {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: deserr::ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let mut message = String::new();
message.push_str(&match error {
ErrorKind::IncorrectValueKind { actual, accepted } => {
let expected = value_kinds_description_json(accepted);
let received = value_description_with_kind_json(&serde_json::Value::from(actual));
let location = location_json_description(location, " at");
format!("Invalid value type{location}: expected {expected}, but found {received}")
}
ErrorKind::MissingField { field } => {
let location = location_json_description(location, " inside");
format!("Missing field `{field}`{location}")
}
ErrorKind::UnknownKey { key, accepted } => {
let location = location_json_description(location, " inside");
format!(
"Unknown field `{}`{location}: expected one of {}",
key,
accepted
.iter()
.map(|accepted| format!("`{}`", accepted))
.collect::<Vec<String>>()
.join(", ")
)
}
ErrorKind::UnknownValue { value, accepted } => {
let location = location_json_description(location, " at");
format!(
"Unknown value `{}`{location}: expected one of {}",
value,
accepted
.iter()
.map(|accepted| format!("`{}`", accepted))
.collect::<Vec<String>>()
.join(", "),
)
}
ErrorKind::Unexpected { msg } => {
let location = location_json_description(location, " at");
format!("Invalid value{location}: {msg}")
}
});
Err(DeserrJsonError::new(message, C::default().error_code()))
}
}
/// Return a description of the given location in query parameters, preceded by the
/// given article. e.g. `at key5[2]`. If the location is the origin, the given article
/// will not be included in the description.
pub fn location_query_param_description(location: ValuePointerRef, article: &str) -> String {
fn rec(location: ValuePointerRef) -> String {
match location {
ValuePointerRef::Origin => String::new(),
ValuePointerRef::Key { key, prev } => {
if matches!(prev, ValuePointerRef::Origin) {
key.to_owned()
} else {
rec(*prev) + "." + key
}
}
ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)),
}
}
match location {
ValuePointerRef::Origin => String::new(),
_ => {
format!("{article} `{}`", rec(location))
}
}
}
impl<C: Default + ErrorCode> deserr::DeserializeError for DeserrQueryParamError<C> {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: deserr::ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let mut message = String::new();
message.push_str(&match error {
ErrorKind::IncorrectValueKind { actual, accepted } => {
let expected = value_kinds_description_query_param(accepted);
let received = value_description_with_kind_query_param(actual);
let location = location_query_param_description(location, " for parameter");
format!("Invalid value type{location}: expected {expected}, but found {received}")
}
ErrorKind::MissingField { field } => {
let location = location_query_param_description(location, " inside");
format!("Missing parameter `{field}`{location}")
}
ErrorKind::UnknownKey { key, accepted } => {
let location = location_query_param_description(location, " inside");
format!(
"Unknown parameter `{}`{location}: expected one of {}",
key,
accepted
.iter()
.map(|accepted| format!("`{}`", accepted))
.collect::<Vec<String>>()
.join(", ")
)
}
ErrorKind::UnknownValue { value, accepted } => {
let location = location_query_param_description(location, " for parameter");
format!(
"Unknown value `{}`{location}: expected one of {}",
value,
accepted
.iter()
.map(|accepted| format!("`{}`", accepted))
.collect::<Vec<String>>()
.join(", "),
)
}
ErrorKind::Unexpected { msg } => {
let location = location_query_param_description(location, " in parameter");
format!("Invalid value{location}: {msg}")
}
});
Err(DeserrQueryParamError::new(message, C::default().error_code()))
}
}
/// Return a description of the list of value kinds for query parameters
/// Since query parameters are always treated as strings, we always return
/// "a string" for now.
fn value_kinds_description_query_param(_accepted: &[ValueKind]) -> String {
"a string".to_owned()
}
fn value_description_with_kind_query_param<V: IntoValue>(actual: deserr::Value<V>) -> String {
match actual {
deserr::Value::Null => "null".to_owned(),
deserr::Value::Boolean(x) => format!("a boolean: `{x}`"),
deserr::Value::Integer(x) => format!("an integer: `{x}`"),
deserr::Value::NegativeInteger(x) => {
format!("an integer: `{x}`")
}
deserr::Value::Float(x) => {
format!("a number: `{x}`")
}
deserr::Value::String(x) => {
format!("a string: `{x}`")
}
deserr::Value::Sequence(_) => "multiple values".to_owned(),
deserr::Value::Map(_) => "multiple parameters".to_owned(),
}
}
#[cfg(test)]
mod tests {
use deserr::ValueKind;
use crate::deserr::error_messages::value_kinds_description_json;
#[test]
fn test_value_kinds_description_json() {
insta::assert_display_snapshot!(value_kinds_description_json(&[]), @"a different value");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean]), @"a boolean");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::NegativeInteger]), @"an integer");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::String]), @"a string");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence]), @"an array");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Map]), @"an object");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Boolean]), @"a boolean or a positive integer");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Integer]), @"null or a positive integer");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence, ValueKind::NegativeInteger]), @"an integer or an array");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float]), @"a number");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger]), @"a number");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null or a number");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number");
}
}

View File

@@ -1,134 +0,0 @@
use std::convert::Infallible;
use std::fmt;
use std::marker::PhantomData;
use deserr::{DeserializeError, MergeWithError, ValuePointerRef};
use crate::error::deserr_codes::{self, *};
use crate::error::{
unwrap_any, Code, DeserrParseBoolError, DeserrParseIntError, ErrorCode, InvalidTaskDateError,
ParseOffsetDateTimeError,
};
use crate::index_uid::IndexUidFormatError;
use crate::tasks::{ParseTaskKindError, ParseTaskStatusError};
pub mod error_messages;
pub mod query_params;
/// Marker type for the Json format
pub struct DeserrJson;
/// Marker type for the Query Parameter format
pub struct DeserrQueryParam;
pub type DeserrJsonError<C = deserr_codes::BadRequest> = DeserrError<DeserrJson, C>;
pub type DeserrQueryParamError<C = deserr_codes::BadRequest> = DeserrError<DeserrQueryParam, C>;
/// A request deserialization error.
///
/// The first generic paramater is a marker type describing the format of the request: either json (e.g. [`DeserrJson`] or [`DeserrQueryParam`]).
/// The second generic parameter is the default error code for the deserialization error, in case it is not given.
pub struct DeserrError<Format, C: Default + ErrorCode> {
pub msg: String,
pub code: Code,
_phantom: PhantomData<(Format, C)>,
}
impl<Format, C: Default + ErrorCode> DeserrError<Format, C> {
pub fn new(msg: String, code: Code) -> Self {
Self { msg, code, _phantom: PhantomData }
}
}
impl<Format, C: Default + ErrorCode> std::fmt::Debug for DeserrError<Format, C> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("DeserrError").field("msg", &self.msg).field("code", &self.code).finish()
}
}
impl<Format, C: Default + ErrorCode> std::fmt::Display for DeserrError<Format, C> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.msg)
}
}
impl<Format, C: Default + ErrorCode> std::error::Error for DeserrError<Format, C> {}
impl<Format, C: Default + ErrorCode> ErrorCode for DeserrError<Format, C> {
fn error_code(&self) -> Code {
self.code
}
}
// For now, we don't accumulate errors. Only one deserialisation error is ever returned at a time.
impl<Format, C1: Default + ErrorCode, C2: Default + ErrorCode>
MergeWithError<DeserrError<Format, C2>> for DeserrError<Format, C1>
{
fn merge(
_self_: Option<Self>,
other: DeserrError<Format, C2>,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Err(DeserrError { msg: other.msg, code: other.code, _phantom: PhantomData })
}
}
impl<Format, C: Default + ErrorCode> MergeWithError<Infallible> for DeserrError<Format, C> {
fn merge(
_self_: Option<Self>,
_other: Infallible,
_merge_location: ValuePointerRef,
) -> Result<Self, Self> {
unreachable!()
}
}
// Implement a convenience function to build a `missing_field` error
macro_rules! make_missing_field_convenience_builder {
($err_code:ident, $fn_name:ident) => {
impl DeserrJsonError<$err_code> {
pub fn $fn_name(field: &str, location: ValuePointerRef) -> Self {
let x = unwrap_any(Self::error::<Infallible>(
None,
deserr::ErrorKind::MissingField { field },
location,
));
Self { msg: x.msg, code: $err_code.error_code(), _phantom: PhantomData }
}
}
};
}
make_missing_field_convenience_builder!(MissingIndexUid, missing_index_uid);
make_missing_field_convenience_builder!(MissingApiKeyActions, missing_api_key_actions);
make_missing_field_convenience_builder!(MissingApiKeyExpiresAt, missing_api_key_expires_at);
make_missing_field_convenience_builder!(MissingApiKeyIndexes, missing_api_key_indexes);
make_missing_field_convenience_builder!(MissingSwapIndexes, missing_swap_indexes);
// Integrate a sub-error into a [`DeserrError`] by taking its error message but using
// the default error code (C) from `Self`
macro_rules! merge_with_error_impl_take_error_message {
($err_type:ty) => {
impl<Format, C: Default + ErrorCode> MergeWithError<$err_type> for DeserrError<Format, C>
where
DeserrError<Format, C>: deserr::DeserializeError,
{
fn merge(
_self_: Option<Self>,
other: $err_type,
merge_location: ValuePointerRef,
) -> Result<Self, Self> {
DeserrError::<Format, C>::error::<Infallible>(
None,
deserr::ErrorKind::Unexpected { msg: other.to_string() },
merge_location,
)
}
}
};
}
// All these errors can be merged into a `DeserrError`
merge_with_error_impl_take_error_message!(DeserrParseIntError);
merge_with_error_impl_take_error_message!(DeserrParseBoolError);
merge_with_error_impl_take_error_message!(uuid::Error);
merge_with_error_impl_take_error_message!(InvalidTaskDateError);
merge_with_error_impl_take_error_message!(ParseOffsetDateTimeError);
merge_with_error_impl_take_error_message!(ParseTaskKindError);
merge_with_error_impl_take_error_message!(ParseTaskStatusError);
merge_with_error_impl_take_error_message!(IndexUidFormatError);

View File

@@ -1,115 +0,0 @@
/*!
This module provides helper traits, types, and functions to deserialize query parameters.
The source of the problem is that query parameters only give us a string to work with.
This means `deserr` is never given a sequence or numbers, and thus the default deserialization
code for common types such as `usize` or `Vec<T>` does not work. To work around it, we create a
wrapper type called `Param<T>`, which is deserialised using the `from_query_param` method of the trait
`FromQueryParameter`.
We also use other helper types such as `CS` (i.e. comma-separated) from `serde_cs` as well as
`StarOr`, `OptionStarOr`, and `OptionStarOrList`.
*/
use std::convert::Infallible;
use std::ops::Deref;
use std::str::FromStr;
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind};
use super::{DeserrParseBoolError, DeserrParseIntError};
use crate::error::unwrap_any;
use crate::index_uid::IndexUid;
use crate::tasks::{Kind, Status};
/// A wrapper type indicating that the inner value should be
/// deserialised from a query parameter string.
///
/// Note that if the field is optional, it is better to use
/// `Option<Param<T>>` instead of `Param<Option<T>>`.
#[derive(Default, Debug, Clone, Copy)]
pub struct Param<T>(pub T);
impl<T> Deref for Param<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T, E> DeserializeFromValue<E> for Param<T>
where
E: DeserializeError + MergeWithError<T::Err>,
T: FromQueryParameter,
{
fn deserialize_from_value<V: deserr::IntoValue>(
value: deserr::Value<V>,
location: deserr::ValuePointerRef,
) -> Result<Self, E> {
match value {
deserr::Value::String(s) => match T::from_query_param(&s) {
Ok(x) => Ok(Param(x)),
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
},
_ => Err(unwrap_any(E::error(
None,
deserr::ErrorKind::IncorrectValueKind {
actual: value,
accepted: &[ValueKind::String],
},
location,
))),
}
}
}
/// Parse a value from a query parameter string.
///
/// This trait is functionally equivalent to `FromStr`.
/// Having a separate trait trait allows us to return better
/// deserializatio error messages.
pub trait FromQueryParameter: Sized {
type Err;
fn from_query_param(p: &str) -> Result<Self, Self::Err>;
}
/// Implement `FromQueryParameter` for the given type using its `FromStr`
/// trait implementation.
macro_rules! impl_from_query_param_from_str {
($type:ty) => {
impl FromQueryParameter for $type {
type Err = <$type as FromStr>::Err;
fn from_query_param(p: &str) -> Result<Self, Self::Err> {
p.parse()
}
}
};
}
impl_from_query_param_from_str!(Kind);
impl_from_query_param_from_str!(Status);
impl_from_query_param_from_str!(IndexUid);
/// Implement `FromQueryParameter` for the given type using its `FromStr`
/// trait implementation, replacing the returned error with a struct
/// that wraps the original query parameter.
macro_rules! impl_from_query_param_wrap_original_value_in_error {
($type:ty, $err_type:path) => {
impl FromQueryParameter for $type {
type Err = $err_type;
fn from_query_param(p: &str) -> Result<Self, Self::Err> {
p.parse().map_err(|_| $err_type(p.to_owned()))
}
}
};
}
impl_from_query_param_wrap_original_value_in_error!(usize, DeserrParseIntError);
impl_from_query_param_wrap_original_value_in_error!(u32, DeserrParseIntError);
impl_from_query_param_wrap_original_value_in_error!(bool, DeserrParseBoolError);
impl FromQueryParameter for String {
type Err = Infallible;
fn from_query_param(p: &str) -> Result<Self, Infallible> {
Ok(p.to_owned())
}
}

View File

@@ -4,6 +4,7 @@ use std::fs::File;
use std::io::{self, Seek, Write};
use std::marker::PhantomData;
use either::Either;
use memmap2::MmapOptions;
use milli::documents::{DocumentsBatchBuilder, Error};
use milli::Object;
@@ -119,6 +120,20 @@ pub fn read_csv(file: &File, writer: impl Write + Seek) -> Result<u64> {
/// Reads JSON from temporary file and write an obkv batch to writer.
pub fn read_json(file: &File, writer: impl Write + Seek) -> Result<u64> {
read_json_inner(file, writer, PayloadType::Json)
}
/// Reads JSON from temporary file and write an obkv batch to writer.
pub fn read_ndjson(file: &File, writer: impl Write + Seek) -> Result<u64> {
read_json_inner(file, writer, PayloadType::Ndjson)
}
/// Reads JSON from temporary file and write an obkv batch to writer.
fn read_json_inner(
file: &File,
writer: impl Write + Seek,
payload_type: PayloadType,
) -> Result<u64> {
let mut builder = DocumentsBatchBuilder::new(writer);
let mmap = unsafe { MmapOptions::new().map(file)? };
let mut deserializer = serde_json::Deserializer::from_slice(&mmap);
@@ -128,20 +143,23 @@ pub fn read_json(file: &File, writer: impl Write + Seek) -> Result<u64> {
// The data has been transferred to the writer during the deserialization process.
Ok(Ok(_)) => (),
Ok(Err(e)) => return Err(DocumentFormatError::Io(e)),
Err(e) => {
// Attempt to deserialize a single json string when the cause of the exception is not Category.data
// Other types of deserialisation exceptions are returned directly to the front-end
if e.classify() != serde_json::error::Category::Data {
return Err(DocumentFormatError::MalformedPayload(
Error::Json(e),
PayloadType::Json,
));
Err(_e) => {
// If we cannot deserialize the content as an array of object then we try
// to deserialize it with the original method to keep correct error messages.
#[derive(Deserialize, Debug)]
#[serde(transparent)]
struct ArrayOrSingleObject {
#[serde(with = "either::serde_untagged")]
inner: Either<Vec<Object>, Object>,
}
let content: Object = serde_json::from_slice(&mmap)
let content: ArrayOrSingleObject = serde_json::from_reader(file)
.map_err(Error::Json)
.map_err(|e| (PayloadType::Json, e))?;
builder.append_json_object(&content).map_err(DocumentFormatError::Io)?;
.map_err(|e| (payload_type, e))?;
for object in content.inner.map_right(|o| vec![o]).into_inner() {
builder.append_json_object(&object).map_err(DocumentFormatError::Io)?;
}
}
}
@@ -151,22 +169,6 @@ pub fn read_json(file: &File, writer: impl Write + Seek) -> Result<u64> {
Ok(count as u64)
}
/// Reads JSON from temporary file and write an obkv batch to writer.
pub fn read_ndjson(file: &File, writer: impl Write + Seek) -> Result<u64> {
let mut builder = DocumentsBatchBuilder::new(writer);
let mmap = unsafe { MmapOptions::new().map(file)? };
for result in serde_json::Deserializer::from_slice(&mmap).into_iter() {
let object = result.map_err(Error::Json).map_err(|e| (PayloadType::Ndjson, e))?;
builder.append_json_object(&object).map_err(Into::into).map_err(DocumentFormatError::Io)?;
}
let count = builder.documents_count();
let _ = builder.into_inner().map_err(Into::into).map_err(DocumentFormatError::Io)?;
Ok(count as u64)
}
/// The actual handling of the deserialization process in serde
/// avoids storing the deserialized object in memory.
///

View File

@@ -3,14 +3,15 @@ use std::{fmt, io};
use actix_web::http::StatusCode;
use actix_web::{self as aweb, HttpResponseBuilder};
use aweb::rt::task::JoinError;
use convert_case::Casing;
use milli::heed::{Error as HeedError, MdbError};
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
pub struct ResponseError {
#[serde(skip)]
#[cfg_attr(feature = "test-traits", proptest(strategy = "strategy::status_code_strategy()"))]
code: StatusCode,
message: String,
#[serde(rename = "code")]
@@ -29,7 +30,7 @@ impl ResponseError {
Self {
code: code.http(),
message,
error_code: code.name(),
error_code: code.err_code().error_name.to_string(),
error_type: code.type_(),
error_link: code.url(),
}
@@ -46,7 +47,7 @@ impl std::error::Error for ResponseError {}
impl<T> From<T> for ResponseError
where
T: std::error::Error + ErrorCode,
T: ErrorCode,
{
fn from(other: T) -> Self {
Self::from_msg(other.to_string(), other.error_code())
@@ -64,7 +65,7 @@ impl aweb::error::ResponseError for ResponseError {
}
}
pub trait ErrorCode {
pub trait ErrorCode: std::error::Error {
fn error_code(&self) -> Code;
/// returns the HTTP status code associated with the error
@@ -90,10 +91,9 @@ pub trait ErrorCode {
#[allow(clippy::enum_variant_names)]
enum ErrorType {
Internal,
InvalidRequest,
Auth,
System,
InternalError,
InvalidRequestError,
AuthenticationError,
}
impl fmt::Display for ErrorType {
@@ -101,194 +101,278 @@ impl fmt::Display for ErrorType {
use ErrorType::*;
match self {
Internal => write!(f, "internal"),
InvalidRequest => write!(f, "invalid_request"),
Auth => write!(f, "auth"),
System => write!(f, "system"),
InternalError => write!(f, "internal"),
InvalidRequestError => write!(f, "invalid_request"),
AuthenticationError => write!(f, "auth"),
}
}
}
/// Implement all the error codes.
///
/// 1. Make an enum `Code` where each error code is a variant
/// 2. Implement the `http`, `name`, and `type_` method on the enum
/// 3. Make a unit type for each error code in the module `deserr_codes`.
///
/// The unit type's purpose is to be used as a marker type parameter, e.g.
/// `DeserrJsonError<MyErrorCode>`. It implements `Default` and `ErrorCode`,
/// so we can get a value of the `Code` enum with the correct variant by calling
/// `MyErrorCode::default().error_code()`.
macro_rules! make_error_codes {
($($code_ident:ident, $err_type:ident, $status:ident);*) => {
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Code {
$($code_ident),*
}
impl Code {
/// return the HTTP status code associated with the `Code`
fn http(&self) -> StatusCode {
match self {
$(
Code::$code_ident => StatusCode::$status
),*
}
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq)]
pub enum Code {
// error related to your setup
IoError,
NoSpaceLeftOnDevice,
TooManyOpenFiles,
// index related error
CreateIndex,
IndexAlreadyExists,
IndexNotFound,
InvalidIndexUid,
InvalidMinWordLengthForTypo,
DuplicateIndexFound,
// invalid state error
InvalidState,
MissingPrimaryKey,
PrimaryKeyAlreadyPresent,
MaxFieldsLimitExceeded,
MissingDocumentId,
InvalidDocumentId,
Filter,
Sort,
BadParameter,
BadRequest,
DatabaseSizeLimitReached,
DocumentNotFound,
Internal,
InvalidGeoField,
InvalidRankingRule,
InvalidStore,
InvalidToken,
MissingAuthorizationHeader,
MissingMasterKey,
DumpNotFound,
InvalidTaskDateFilter,
InvalidTaskStatusesFilter,
InvalidTaskTypesFilter,
InvalidTaskCanceledByFilter,
InvalidTaskUidsFilter,
TaskNotFound,
TaskDeletionWithEmptyQuery,
TaskCancelationWithEmptyQuery,
PayloadTooLarge,
RetrieveDocument,
SearchDocuments,
UnsupportedMediaType,
DumpAlreadyInProgress,
DumpProcessFailed,
// Only used when importing a dump
UnretrievableErrorCode,
InvalidContentType,
MissingContentType,
MalformedPayload,
MissingPayload,
ApiKeyNotFound,
MissingParameter,
InvalidApiKeyActions,
InvalidApiKeyIndexes,
InvalidApiKeyExpiresAt,
InvalidApiKeyDescription,
InvalidApiKeyName,
InvalidApiKeyUid,
ImmutableField,
ApiKeyAlreadyExists,
}
impl Code {
/// associate a `Code` variant to the actual ErrCode
fn err_code(&self) -> ErrCode {
use Code::*;
match self {
// related to the setup
IoError => ErrCode::invalid("io_error", StatusCode::UNPROCESSABLE_ENTITY),
TooManyOpenFiles => {
ErrCode::invalid("too_many_open_files", StatusCode::UNPROCESSABLE_ENTITY)
}
NoSpaceLeftOnDevice => {
ErrCode::invalid("no_space_left_on_device", StatusCode::UNPROCESSABLE_ENTITY)
}
/// return error name, used as error code
fn name(&self) -> String {
match self {
$(
Code::$code_ident => stringify!($code_ident).to_case(convert_case::Case::Snake)
),*
}
// index related errors
// create index is thrown on internal error while creating an index.
CreateIndex => {
ErrCode::internal("index_creation_failed", StatusCode::INTERNAL_SERVER_ERROR)
}
IndexAlreadyExists => ErrCode::invalid("index_already_exists", StatusCode::CONFLICT),
// thrown when requesting an unexisting index
IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND),
InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST),
// invalid state error
InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR),
// thrown when no primary key has been set
MissingPrimaryKey => {
ErrCode::invalid("primary_key_inference_failed", StatusCode::BAD_REQUEST)
}
// error thrown when trying to set an already existing primary key
PrimaryKeyAlreadyPresent => {
ErrCode::invalid("index_primary_key_already_exists", StatusCode::BAD_REQUEST)
}
// invalid ranking rule
InvalidRankingRule => ErrCode::invalid("invalid_ranking_rule", StatusCode::BAD_REQUEST),
// invalid database
InvalidStore => {
ErrCode::internal("invalid_store_file", StatusCode::INTERNAL_SERVER_ERROR)
}
/// return the error type
fn type_(&self) -> String {
match self {
$(
Code::$code_ident => ErrorType::$err_type.to_string()
),*
}
// invalid document
MaxFieldsLimitExceeded => {
ErrCode::invalid("max_fields_limit_exceeded", StatusCode::BAD_REQUEST)
}
MissingDocumentId => ErrCode::invalid("missing_document_id", StatusCode::BAD_REQUEST),
InvalidDocumentId => ErrCode::invalid("invalid_document_id", StatusCode::BAD_REQUEST),
// error related to filters
Filter => ErrCode::invalid("invalid_filter", StatusCode::BAD_REQUEST),
// error related to sorts
Sort => ErrCode::invalid("invalid_sort", StatusCode::BAD_REQUEST),
BadParameter => ErrCode::invalid("bad_parameter", StatusCode::BAD_REQUEST),
BadRequest => ErrCode::invalid("bad_request", StatusCode::BAD_REQUEST),
DatabaseSizeLimitReached => {
ErrCode::internal("database_size_limit_reached", StatusCode::INTERNAL_SERVER_ERROR)
}
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
InvalidGeoField => ErrCode::invalid("invalid_geo_field", StatusCode::BAD_REQUEST),
InvalidToken => ErrCode::authentication("invalid_api_key", StatusCode::FORBIDDEN),
MissingAuthorizationHeader => {
ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED)
}
MissingMasterKey => {
ErrCode::authentication("missing_master_key", StatusCode::UNAUTHORIZED)
}
InvalidTaskDateFilter => {
ErrCode::invalid("invalid_task_date_filter", StatusCode::BAD_REQUEST)
}
InvalidTaskUidsFilter => {
ErrCode::invalid("invalid_task_uids_filter", StatusCode::BAD_REQUEST)
}
InvalidTaskStatusesFilter => {
ErrCode::invalid("invalid_task_statuses_filter", StatusCode::BAD_REQUEST)
}
InvalidTaskTypesFilter => {
ErrCode::invalid("invalid_task_types_filter", StatusCode::BAD_REQUEST)
}
InvalidTaskCanceledByFilter => {
ErrCode::invalid("invalid_task_canceled_by_filter", StatusCode::BAD_REQUEST)
}
TaskNotFound => ErrCode::invalid("task_not_found", StatusCode::NOT_FOUND),
TaskDeletionWithEmptyQuery => {
ErrCode::invalid("missing_task_filters", StatusCode::BAD_REQUEST)
}
TaskCancelationWithEmptyQuery => {
ErrCode::invalid("missing_task_filters", StatusCode::BAD_REQUEST)
}
DumpNotFound => ErrCode::invalid("dump_not_found", StatusCode::NOT_FOUND),
PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE),
RetrieveDocument => {
ErrCode::internal("unretrievable_document", StatusCode::BAD_REQUEST)
}
SearchDocuments => ErrCode::internal("search_error", StatusCode::BAD_REQUEST),
UnsupportedMediaType => {
ErrCode::invalid("unsupported_media_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
}
/// return the doc url associated with the error
fn url(&self) -> String {
format!(
"https://docs.meilisearch.com/errors#{}",
self.name().to_case(convert_case::Case::Kebab)
)
// error related to dump
DumpAlreadyInProgress => {
ErrCode::invalid("dump_already_processing", StatusCode::CONFLICT)
}
DumpProcessFailed => {
ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR)
}
MissingContentType => {
ErrCode::invalid("missing_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
}
MalformedPayload => ErrCode::invalid("malformed_payload", StatusCode::BAD_REQUEST),
InvalidContentType => {
ErrCode::invalid("invalid_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
}
MissingPayload => ErrCode::invalid("missing_payload", StatusCode::BAD_REQUEST),
// This one can only happen when importing a dump and encountering an unknown code in the task queue.
UnretrievableErrorCode => {
ErrCode::invalid("unretrievable_error_code", StatusCode::BAD_REQUEST)
}
// error related to keys
ApiKeyNotFound => ErrCode::invalid("api_key_not_found", StatusCode::NOT_FOUND),
MissingParameter => ErrCode::invalid("missing_parameter", StatusCode::BAD_REQUEST),
InvalidApiKeyActions => {
ErrCode::invalid("invalid_api_key_actions", StatusCode::BAD_REQUEST)
}
InvalidApiKeyIndexes => {
ErrCode::invalid("invalid_api_key_indexes", StatusCode::BAD_REQUEST)
}
InvalidApiKeyExpiresAt => {
ErrCode::invalid("invalid_api_key_expires_at", StatusCode::BAD_REQUEST)
}
InvalidApiKeyDescription => {
ErrCode::invalid("invalid_api_key_description", StatusCode::BAD_REQUEST)
}
InvalidApiKeyName => ErrCode::invalid("invalid_api_key_name", StatusCode::BAD_REQUEST),
InvalidApiKeyUid => ErrCode::invalid("invalid_api_key_uid", StatusCode::BAD_REQUEST),
ApiKeyAlreadyExists => ErrCode::invalid("api_key_already_exists", StatusCode::CONFLICT),
ImmutableField => ErrCode::invalid("immutable_field", StatusCode::BAD_REQUEST),
InvalidMinWordLengthForTypo => {
ErrCode::invalid("invalid_min_word_length_for_typo", StatusCode::BAD_REQUEST)
}
DuplicateIndexFound => {
ErrCode::invalid("duplicate_index_found", StatusCode::BAD_REQUEST)
}
}
pub mod deserr_codes {
use super::{Code, ErrorCode};
$(
#[derive(Default)]
pub struct $code_ident;
impl ErrorCode for $code_ident {
fn error_code(&self) -> Code {
Code::$code_ident
}
}
)*
}
}
/// return the HTTP status code associated with the `Code`
fn http(&self) -> StatusCode {
self.err_code().status_code
}
/// return error name, used as error code
fn name(&self) -> String {
self.err_code().error_name.to_string()
}
/// return the error type
fn type_(&self) -> String {
self.err_code().error_type.to_string()
}
/// return the doc url associated with the error
fn url(&self) -> String {
format!("https://docs.meilisearch.com/errors#{}", self.name())
}
}
// An exhaustive list of all the error codes used by meilisearch.
make_error_codes! {
ApiKeyAlreadyExists , InvalidRequest , CONFLICT ;
ApiKeyNotFound , InvalidRequest , NOT_FOUND ;
BadParameter , InvalidRequest , BAD_REQUEST;
BadRequest , InvalidRequest , BAD_REQUEST;
DatabaseSizeLimitReached , Internal , INTERNAL_SERVER_ERROR;
DocumentNotFound , InvalidRequest , NOT_FOUND;
DumpAlreadyProcessing , InvalidRequest , CONFLICT;
DumpNotFound , InvalidRequest , NOT_FOUND;
DumpProcessFailed , Internal , INTERNAL_SERVER_ERROR;
DuplicateIndexFound , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyActions , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyCreatedAt , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyExpiresAt , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyIndexes , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyKey , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyUid , InvalidRequest , BAD_REQUEST;
ImmutableApiKeyUpdatedAt , InvalidRequest , BAD_REQUEST;
ImmutableIndexCreatedAt , InvalidRequest , BAD_REQUEST;
ImmutableIndexUid , InvalidRequest , BAD_REQUEST;
ImmutableIndexUpdatedAt , InvalidRequest , BAD_REQUEST;
IndexAlreadyExists , InvalidRequest , CONFLICT ;
IndexCreationFailed , Internal , INTERNAL_SERVER_ERROR;
IndexNotFound , InvalidRequest , NOT_FOUND;
IndexPrimaryKeyAlreadyExists , InvalidRequest , BAD_REQUEST ;
IndexPrimaryKeyMultipleCandidatesFound, InvalidRequest , BAD_REQUEST;
IndexPrimaryKeyNoCandidateFound , InvalidRequest , BAD_REQUEST ;
Internal , Internal , INTERNAL_SERVER_ERROR ;
InvalidApiKey , Auth , FORBIDDEN ;
InvalidApiKeyActions , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyDescription , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyExpiresAt , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyIndexes , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyLimit , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyName , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyOffset , InvalidRequest , BAD_REQUEST ;
InvalidApiKeyUid , InvalidRequest , BAD_REQUEST ;
InvalidContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
InvalidDocumentFields , InvalidRequest , BAD_REQUEST ;
InvalidDocumentGeoField , InvalidRequest , BAD_REQUEST ;
InvalidDocumentId , InvalidRequest , BAD_REQUEST ;
InvalidDocumentLimit , InvalidRequest , BAD_REQUEST ;
InvalidDocumentOffset , InvalidRequest , BAD_REQUEST ;
InvalidIndexLimit , InvalidRequest , BAD_REQUEST ;
InvalidIndexOffset , InvalidRequest , BAD_REQUEST ;
InvalidIndexPrimaryKey , InvalidRequest , BAD_REQUEST ;
InvalidIndexUid , InvalidRequest , BAD_REQUEST ;
InvalidSearchAttributesToCrop , InvalidRequest , BAD_REQUEST ;
InvalidSearchAttributesToHighlight , InvalidRequest , BAD_REQUEST ;
InvalidSearchAttributesToRetrieve , InvalidRequest , BAD_REQUEST ;
InvalidSearchCropLength , InvalidRequest , BAD_REQUEST ;
InvalidSearchCropMarker , InvalidRequest , BAD_REQUEST ;
InvalidSearchFacets , InvalidRequest , BAD_REQUEST ;
InvalidSearchFilter , InvalidRequest , BAD_REQUEST ;
InvalidSearchHighlightPostTag , InvalidRequest , BAD_REQUEST ;
InvalidSearchHighlightPreTag , InvalidRequest , BAD_REQUEST ;
InvalidSearchHitsPerPage , InvalidRequest , BAD_REQUEST ;
InvalidSearchLimit , InvalidRequest , BAD_REQUEST ;
InvalidSearchMatchingStrategy , InvalidRequest , BAD_REQUEST ;
InvalidSearchOffset , InvalidRequest , BAD_REQUEST ;
InvalidSearchPage , InvalidRequest , BAD_REQUEST ;
InvalidSearchQ , InvalidRequest , BAD_REQUEST ;
InvalidSearchShowMatchesPosition , InvalidRequest , BAD_REQUEST ;
InvalidSearchSort , InvalidRequest , BAD_REQUEST ;
InvalidSettingsDisplayedAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsDistinctAttribute , InvalidRequest , BAD_REQUEST ;
InvalidSettingsFaceting , InvalidRequest , BAD_REQUEST ;
InvalidSettingsFilterableAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsPagination , InvalidRequest , BAD_REQUEST ;
InvalidSettingsRankingRules , InvalidRequest , BAD_REQUEST ;
InvalidSettingsSearchableAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsSortableAttributes , InvalidRequest , BAD_REQUEST ;
InvalidSettingsStopWords , InvalidRequest , BAD_REQUEST ;
InvalidSettingsSynonyms , InvalidRequest , BAD_REQUEST ;
InvalidSettingsTypoTolerance , InvalidRequest , BAD_REQUEST ;
InvalidState , Internal , INTERNAL_SERVER_ERROR ;
InvalidStoreFile , Internal , INTERNAL_SERVER_ERROR ;
InvalidSwapDuplicateIndexFound , InvalidRequest , BAD_REQUEST ;
InvalidSwapIndexes , InvalidRequest , BAD_REQUEST ;
InvalidTaskAfterEnqueuedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskAfterFinishedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskAfterStartedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskBeforeEnqueuedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskBeforeFinishedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskBeforeStartedAt , InvalidRequest , BAD_REQUEST ;
InvalidTaskCanceledBy , InvalidRequest , BAD_REQUEST ;
InvalidTaskFrom , InvalidRequest , BAD_REQUEST ;
InvalidTaskLimit , InvalidRequest , BAD_REQUEST ;
InvalidTaskStatuses , InvalidRequest , BAD_REQUEST ;
InvalidTaskTypes , InvalidRequest , BAD_REQUEST ;
InvalidTaskUids , InvalidRequest , BAD_REQUEST ;
IoError , System , UNPROCESSABLE_ENTITY;
MalformedPayload , InvalidRequest , BAD_REQUEST ;
MaxFieldsLimitExceeded , InvalidRequest , BAD_REQUEST ;
MissingApiKeyActions , InvalidRequest , BAD_REQUEST ;
MissingApiKeyExpiresAt , InvalidRequest , BAD_REQUEST ;
MissingApiKeyIndexes , InvalidRequest , BAD_REQUEST ;
MissingAuthorizationHeader , Auth , UNAUTHORIZED ;
MissingContentType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE ;
MissingDocumentId , InvalidRequest , BAD_REQUEST ;
MissingIndexUid , InvalidRequest , BAD_REQUEST ;
MissingMasterKey , Auth , UNAUTHORIZED ;
MissingPayload , InvalidRequest , BAD_REQUEST ;
MissingSwapIndexes , InvalidRequest , BAD_REQUEST ;
MissingTaskFilters , InvalidRequest , BAD_REQUEST ;
NoSpaceLeftOnDevice , System , UNPROCESSABLE_ENTITY;
PayloadTooLarge , InvalidRequest , PAYLOAD_TOO_LARGE ;
TaskNotFound , InvalidRequest , NOT_FOUND ;
TooManyOpenFiles , System , UNPROCESSABLE_ENTITY ;
UnretrievableDocument , Internal , BAD_REQUEST ;
UnretrievableErrorCode , InvalidRequest , BAD_REQUEST ;
UnsupportedMediaType , InvalidRequest , UNSUPPORTED_MEDIA_TYPE
/// Internal structure providing a convenient way to create error codes
struct ErrCode {
status_code: StatusCode,
error_type: ErrorType,
error_name: &'static str,
}
impl ErrCode {
fn authentication(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode { status_code, error_name, error_type: ErrorType::AuthenticationError }
}
fn internal(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode { status_code, error_name, error_type: ErrorType::InternalError }
}
fn invalid(error_name: &'static str, status_code: StatusCode) -> ErrCode {
ErrCode { status_code, error_name, error_type: ErrorType::InvalidRequestError }
}
}
impl ErrorCode for JoinError {
@@ -312,28 +396,25 @@ impl ErrorCode for milli::Error {
| UserError::DocumentLimitReached
| UserError::AccessingSoftDeletedDocument { .. }
| UserError::UnknownInternalDocumentId { .. } => Code::Internal,
UserError::InvalidStoreFile => Code::InvalidStoreFile,
UserError::InvalidStoreFile => Code::InvalidStore,
UserError::NoSpaceLeftOnDevice => Code::NoSpaceLeftOnDevice,
UserError::MaxDatabaseSizeReached => Code::DatabaseSizeLimitReached,
UserError::AttributeLimitReached => Code::MaxFieldsLimitExceeded,
UserError::InvalidFilter(_) => Code::InvalidSearchFilter,
UserError::InvalidFilter(_) => Code::Filter,
UserError::MissingDocumentId { .. } => Code::MissingDocumentId,
UserError::InvalidDocumentId { .. } | UserError::TooManyDocumentIds { .. } => {
Code::InvalidDocumentId
}
UserError::NoPrimaryKeyCandidateFound => Code::IndexPrimaryKeyNoCandidateFound,
UserError::MultiplePrimaryKeyCandidatesFound { .. } => {
Code::IndexPrimaryKeyMultipleCandidatesFound
}
UserError::PrimaryKeyCannotBeChanged(_) => Code::IndexPrimaryKeyAlreadyExists,
UserError::SortRankingRuleMissing => Code::InvalidSearchSort,
UserError::MissingPrimaryKey => Code::MissingPrimaryKey,
UserError::PrimaryKeyCannotBeChanged(_) => Code::PrimaryKeyAlreadyPresent,
UserError::SortRankingRuleMissing => Code::Sort,
UserError::InvalidFacetsDistribution { .. } => Code::BadRequest,
UserError::InvalidSortableAttribute { .. } => Code::InvalidSearchSort,
UserError::CriterionError(_) => Code::InvalidSettingsRankingRules,
UserError::InvalidGeoField { .. } => Code::InvalidDocumentGeoField,
UserError::SortError(_) => Code::InvalidSearchSort,
UserError::InvalidSortableAttribute { .. } => Code::Sort,
UserError::CriterionError(_) => Code::InvalidRankingRule,
UserError::InvalidGeoField { .. } => Code::InvalidGeoField,
UserError::SortError(_) => Code::Sort,
UserError::InvalidMinTypoWordLenSetting(_, _) => {
Code::InvalidSettingsTypoTolerance
Code::InvalidMinWordLengthForTypo
}
}
}
@@ -360,7 +441,7 @@ impl ErrorCode for HeedError {
fn error_code(&self) -> Code {
match self {
HeedError::Mdb(MdbError::MapFull) => Code::DatabaseSizeLimitReached,
HeedError::Mdb(MdbError::Invalid) => Code::InvalidStoreFile,
HeedError::Mdb(MdbError::Invalid) => Code::InvalidStore,
HeedError::Io(e) => e.error_code(),
HeedError::Mdb(_)
| HeedError::Encoding
@@ -383,49 +464,14 @@ impl ErrorCode for io::Error {
}
}
/// Unwrap a result, either its Ok or Err value.
pub fn unwrap_any<T>(any: Result<T, T>) -> T {
match any {
Ok(any) => any,
Err(any) => any,
}
}
#[cfg(feature = "test-traits")]
mod strategy {
use proptest::strategy::Strategy;
/// Deserialization when `deserr` cannot parse an API key date.
#[derive(Debug)]
pub struct ParseOffsetDateTimeError(pub String);
impl fmt::Display for ParseOffsetDateTimeError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
writeln!(f, "`{original}` is not a valid date. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.", original = self.0)
}
}
use super::*;
/// Deserialization when `deserr` cannot parse a task date.
#[derive(Debug)]
pub struct InvalidTaskDateError(pub String);
impl std::fmt::Display for InvalidTaskDateError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "`{}` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.", self.0)
}
}
/// Deserialization error when `deserr` cannot parse a String
/// into a bool.
#[derive(Debug)]
pub struct DeserrParseBoolError(pub String);
impl fmt::Display for DeserrParseBoolError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "could not parse `{}` as a boolean, expected either `true` or `false`", self.0)
}
}
/// Deserialization error when `deserr` cannot parse a String
/// into an integer.
#[derive(Debug)]
pub struct DeserrParseIntError(pub String);
impl fmt::Display for DeserrParseIntError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "could not parse `{}` as a positive integer", self.0)
pub(super) fn status_code_strategy() -> impl Strategy<Value = StatusCode> {
(100..999u16).prop_map(|i| StatusCode::from_u16(i).unwrap())
}
}

View File

@@ -2,15 +2,17 @@ use std::error::Error;
use std::fmt;
use std::str::FromStr;
use deserr::DeserializeFromValue;
use serde::{Deserialize, Serialize};
use crate::error::{Code, ErrorCode};
/// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400
/// bytes long
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
#[deserr(from(String) = IndexUid::try_from -> IndexUidFormatError)]
pub struct IndexUid(String);
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
pub struct IndexUid(
#[cfg_attr(feature = "test-traits", proptest(regex("[a-zA-Z0-9_-]{1,400}")))] String,
);
impl IndexUid {
pub fn new_unchecked(s: impl AsRef<str>) -> Self {
@@ -27,12 +29,6 @@ impl IndexUid {
}
}
impl fmt::Display for IndexUid {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.0, f)
}
}
impl std::ops::Deref for IndexUid {
type Target = str;

View File

@@ -1,88 +1,22 @@
use std::convert::Infallible;
use std::hash::Hash;
use std::str::FromStr;
use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef};
use enum_iterator::Sequence;
use serde::{Deserialize, Serialize};
use serde_json::{from_value, Value};
use time::format_description::well_known::Rfc3339;
use time::macros::{format_description, time};
use time::{Date, OffsetDateTime, PrimitiveDateTime};
use uuid::Uuid;
use crate::deserr::DeserrJsonError;
use crate::error::deserr_codes::*;
use crate::error::{unwrap_any, Code, ParseOffsetDateTimeError};
use crate::index_uid::IndexUid;
use crate::error::{Code, ErrorCode};
use crate::index_uid::{IndexUid, IndexUidFormatError};
use crate::star_or::StarOr;
type Result<T> = std::result::Result<T, Error>;
pub type KeyId = Uuid;
#[derive(Debug, DeserializeFromValue)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct CreateApiKey {
#[deserr(default, error = DeserrJsonError<InvalidApiKeyDescription>)]
pub description: Option<String>,
#[deserr(default, error = DeserrJsonError<InvalidApiKeyName>)]
pub name: Option<String>,
#[deserr(default = Uuid::new_v4(), error = DeserrJsonError<InvalidApiKeyUid>, from(&String) = Uuid::from_str -> uuid::Error)]
pub uid: KeyId,
#[deserr(error = DeserrJsonError<InvalidApiKeyActions>, missing_field_error = DeserrJsonError::missing_api_key_actions)]
pub actions: Vec<Action>,
#[deserr(error = DeserrJsonError<InvalidApiKeyIndexes>, missing_field_error = DeserrJsonError::missing_api_key_indexes)]
pub indexes: Vec<StarOr<IndexUid>>,
#[deserr(error = DeserrJsonError<InvalidApiKeyExpiresAt>, from(Option<String>) = parse_expiration_date -> ParseOffsetDateTimeError, missing_field_error = DeserrJsonError::missing_api_key_expires_at)]
pub expires_at: Option<OffsetDateTime>,
}
impl CreateApiKey {
pub fn to_key(self) -> Key {
let CreateApiKey { description, name, uid, actions, indexes, expires_at } = self;
let now = OffsetDateTime::now_utc();
Key {
description,
name,
uid,
actions,
indexes,
expires_at,
created_at: now,
updated_at: now,
}
}
}
fn deny_immutable_fields_api_key(
field: &str,
accepted: &[&str],
location: ValuePointerRef,
) -> DeserrJsonError {
let mut error = unwrap_any(DeserrJsonError::<BadRequest>::error::<Infallible>(
None,
deserr::ErrorKind::UnknownKey { key: field, accepted },
location,
));
error.code = match field {
"uid" => Code::ImmutableApiKeyUid,
"actions" => Code::ImmutableApiKeyActions,
"indexes" => Code::ImmutableApiKeyIndexes,
"expiresAt" => Code::ImmutableApiKeyExpiresAt,
"createdAt" => Code::ImmutableApiKeyCreatedAt,
"updatedAt" => Code::ImmutableApiKeyUpdatedAt,
_ => Code::BadRequest,
};
error
}
#[derive(Debug, DeserializeFromValue)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_api_key)]
pub struct PatchApiKey {
#[deserr(default, error = DeserrJsonError<InvalidApiKeyDescription>)]
pub description: Option<String>,
#[deserr(default, error = DeserrJsonError<InvalidApiKeyName>)]
pub name: Option<String>,
}
#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
pub struct Key {
#[serde(skip_serializing_if = "Option::is_none")]
@@ -101,6 +35,100 @@ pub struct Key {
}
impl Key {
pub fn create_from_value(value: Value) -> Result<Self> {
let name = match value.get("name") {
None | Some(Value::Null) => None,
Some(des) => from_value(des.clone())
.map(Some)
.map_err(|_| Error::InvalidApiKeyName(des.clone()))?,
};
let description = match value.get("description") {
None | Some(Value::Null) => None,
Some(des) => from_value(des.clone())
.map(Some)
.map_err(|_| Error::InvalidApiKeyDescription(des.clone()))?,
};
let uid = value.get("uid").map_or_else(
|| Ok(Uuid::new_v4()),
|uid| from_value(uid.clone()).map_err(|_| Error::InvalidApiKeyUid(uid.clone())),
)?;
let actions = value
.get("actions")
.map(|act| {
from_value(act.clone()).map_err(|_| Error::InvalidApiKeyActions(act.clone()))
})
.ok_or(Error::MissingParameter("actions"))??;
let indexes = value
.get("indexes")
.map(|ind| {
from_value::<Vec<String>>(ind.clone())
// If it's not a vec of string, return an API key parsing error.
.map_err(|_| Error::InvalidApiKeyIndexes(ind.clone()))
.and_then(|ind| {
ind.into_iter()
// If it's not a valid Index uid, return an Index Uid parsing error.
.map(|i| StarOr::<IndexUid>::from_str(&i).map_err(Error::from))
.collect()
})
})
.ok_or(Error::MissingParameter("indexes"))??;
let expires_at = value
.get("expiresAt")
.map(parse_expiration_date)
.ok_or(Error::MissingParameter("expiresAt"))??;
let created_at = OffsetDateTime::now_utc();
let updated_at = created_at;
Ok(Self { name, description, uid, actions, indexes, expires_at, created_at, updated_at })
}
pub fn update_from_value(&mut self, value: Value) -> Result<()> {
if let Some(des) = value.get("description") {
let des =
from_value(des.clone()).map_err(|_| Error::InvalidApiKeyDescription(des.clone()));
self.description = des?;
}
if let Some(des) = value.get("name") {
let des = from_value(des.clone()).map_err(|_| Error::InvalidApiKeyName(des.clone()));
self.name = des?;
}
if value.get("uid").is_some() {
return Err(Error::ImmutableField("uid".to_string()));
}
if value.get("actions").is_some() {
return Err(Error::ImmutableField("actions".to_string()));
}
if value.get("indexes").is_some() {
return Err(Error::ImmutableField("indexes".to_string()));
}
if value.get("expiresAt").is_some() {
return Err(Error::ImmutableField("expiresAt".to_string()));
}
if value.get("createdAt").is_some() {
return Err(Error::ImmutableField("createdAt".to_string()));
}
if value.get("updatedAt").is_some() {
return Err(Error::ImmutableField("updatedAt".to_string()));
}
self.updated_at = OffsetDateTime::now_utc();
Ok(())
}
pub fn default_admin() -> Self {
let now = OffsetDateTime::now_utc();
let uid = Uuid::new_v4();
@@ -132,137 +160,107 @@ impl Key {
}
}
fn parse_expiration_date(
string: Option<String>,
) -> std::result::Result<Option<OffsetDateTime>, ParseOffsetDateTimeError> {
let Some(string) = string else {
return Ok(None)
};
let datetime = if let Ok(datetime) = OffsetDateTime::parse(&string, &Rfc3339) {
datetime
} else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
&string,
format_description!(
"[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]"
),
) {
primitive_datetime.assume_utc()
} else if let Ok(primitive_datetime) = PrimitiveDateTime::parse(
&string,
format_description!(
"[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]"
),
) {
primitive_datetime.assume_utc()
} else if let Ok(date) = Date::parse(
&string,
format_description!("[year repr:full base:calendar]-[month repr:numerical]-[day]"),
) {
PrimitiveDateTime::new(date, time!(00:00)).assume_utc()
} else {
return Err(ParseOffsetDateTimeError(string));
};
if datetime > OffsetDateTime::now_utc() {
Ok(Some(datetime))
} else {
Err(ParseOffsetDateTimeError(string))
fn parse_expiration_date(value: &Value) -> Result<Option<OffsetDateTime>> {
match value {
Value::String(string) => OffsetDateTime::parse(string, &Rfc3339)
.or_else(|_| {
PrimitiveDateTime::parse(
string,
format_description!(
"[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]"
),
).map(|datetime| datetime.assume_utc())
})
.or_else(|_| {
PrimitiveDateTime::parse(
string,
format_description!(
"[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]"
),
).map(|datetime| datetime.assume_utc())
})
.or_else(|_| {
Date::parse(string, format_description!(
"[year repr:full base:calendar]-[month repr:numerical]-[day]"
)).map(|date| PrimitiveDateTime::new(date, time!(00:00)).assume_utc())
})
.map_err(|_| Error::InvalidApiKeyExpiresAt(value.clone()))
// check if the key is already expired.
.and_then(|d| {
if d > OffsetDateTime::now_utc() {
Ok(d)
} else {
Err(Error::InvalidApiKeyExpiresAt(value.clone()))
}
})
.map(Option::Some),
Value::Null => Ok(None),
_otherwise => Err(Error::InvalidApiKeyExpiresAt(value.clone())),
}
}
#[derive(
Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Sequence, DeserializeFromValue,
)]
#[derive(Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Sequence)]
#[repr(u8)]
pub enum Action {
#[serde(rename = "*")]
#[deserr(rename = "*")]
All = 0,
#[serde(rename = "search")]
#[deserr(rename = "search")]
Search,
#[serde(rename = "documents.*")]
#[deserr(rename = "documents.*")]
DocumentsAll,
#[serde(rename = "documents.add")]
#[deserr(rename = "documents.add")]
DocumentsAdd,
#[serde(rename = "documents.get")]
#[deserr(rename = "documents.get")]
DocumentsGet,
#[serde(rename = "documents.delete")]
#[deserr(rename = "documents.delete")]
DocumentsDelete,
#[serde(rename = "indexes.*")]
#[deserr(rename = "indexes.*")]
IndexesAll,
#[serde(rename = "indexes.create")]
#[deserr(rename = "indexes.create")]
IndexesAdd,
#[serde(rename = "indexes.get")]
#[deserr(rename = "indexes.get")]
IndexesGet,
#[serde(rename = "indexes.update")]
#[deserr(rename = "indexes.update")]
IndexesUpdate,
#[serde(rename = "indexes.delete")]
#[deserr(rename = "indexes.delete")]
IndexesDelete,
#[serde(rename = "indexes.swap")]
#[deserr(rename = "indexes.swap")]
IndexesSwap,
#[serde(rename = "tasks.*")]
#[deserr(rename = "tasks.*")]
TasksAll,
#[serde(rename = "tasks.cancel")]
#[deserr(rename = "tasks.cancel")]
TasksCancel,
#[serde(rename = "tasks.delete")]
#[deserr(rename = "tasks.delete")]
TasksDelete,
#[serde(rename = "tasks.get")]
#[deserr(rename = "tasks.get")]
TasksGet,
#[serde(rename = "settings.*")]
#[deserr(rename = "settings.*")]
SettingsAll,
#[serde(rename = "settings.get")]
#[deserr(rename = "settings.get")]
SettingsGet,
#[serde(rename = "settings.update")]
#[deserr(rename = "settings.update")]
SettingsUpdate,
#[serde(rename = "stats.*")]
#[deserr(rename = "stats.*")]
StatsAll,
#[serde(rename = "stats.get")]
#[deserr(rename = "stats.get")]
StatsGet,
#[serde(rename = "metrics.*")]
#[deserr(rename = "metrics.*")]
MetricsAll,
#[serde(rename = "metrics.get")]
#[deserr(rename = "metrics.get")]
MetricsGet,
#[serde(rename = "dumps.*")]
#[deserr(rename = "dumps.*")]
DumpsAll,
#[serde(rename = "dumps.create")]
#[deserr(rename = "dumps.create")]
DumpsCreate,
#[serde(rename = "version")]
#[deserr(rename = "version")]
Version,
#[serde(rename = "keys.create")]
#[deserr(rename = "keys.create")]
KeysAdd,
#[serde(rename = "keys.get")]
#[deserr(rename = "keys.get")]
KeysGet,
#[serde(rename = "keys.update")]
#[deserr(rename = "keys.update")]
KeysUpdate,
#[serde(rename = "keys.delete")]
#[deserr(rename = "keys.delete")]
KeysDelete,
}
@@ -343,3 +341,50 @@ pub mod actions {
pub const KEYS_UPDATE: u8 = KeysUpdate.repr();
pub const KEYS_DELETE: u8 = KeysDelete.repr();
}
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("`{0}` field is mandatory.")]
MissingParameter(&'static str),
#[error("`actions` field value `{0}` is invalid. It should be an array of string representing action names.")]
InvalidApiKeyActions(Value),
#[error("`indexes` field value `{0}` is invalid. It should be an array of string representing index names.")]
InvalidApiKeyIndexes(Value),
#[error("{0}")]
InvalidApiKeyIndexUid(IndexUidFormatError),
#[error("`expiresAt` field value `{0}` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.")]
InvalidApiKeyExpiresAt(Value),
#[error("`description` field value `{0}` is invalid. It should be a string or specified as a null value.")]
InvalidApiKeyDescription(Value),
#[error(
"`name` field value `{0}` is invalid. It should be a string or specified as a null value."
)]
InvalidApiKeyName(Value),
#[error("`uid` field value `{0}` is invalid. It should be a valid UUID v4 string or omitted.")]
InvalidApiKeyUid(Value),
#[error("The `{0}` field cannot be modified for the given resource.")]
ImmutableField(String),
}
impl From<IndexUidFormatError> for Error {
fn from(e: IndexUidFormatError) -> Self {
Self::InvalidApiKeyIndexUid(e)
}
}
impl ErrorCode for Error {
fn error_code(&self) -> Code {
match self {
Self::MissingParameter(_) => Code::MissingParameter,
Self::InvalidApiKeyActions(_) => Code::InvalidApiKeyActions,
Self::InvalidApiKeyIndexes(_) | Self::InvalidApiKeyIndexUid(_) => {
Code::InvalidApiKeyIndexes
}
Self::InvalidApiKeyExpiresAt(_) => Code::InvalidApiKeyExpiresAt,
Self::InvalidApiKeyDescription(_) => Code::InvalidApiKeyDescription,
Self::InvalidApiKeyName(_) => Code::InvalidApiKeyName,
Self::InvalidApiKeyUid(_) => Code::InvalidApiKeyUid,
Self::ImmutableField(_) => Code::ImmutableField,
}
}
}

View File

@@ -1,5 +1,4 @@
pub mod compression;
pub mod deserr;
pub mod document_formats;
pub mod error;
pub mod index_uid;
@@ -8,10 +7,11 @@ pub mod settings;
pub mod star_or;
pub mod tasks;
pub mod versioning;
pub use milli;
pub use milli::{heed, Index};
use uuid::Uuid;
pub use versioning::VERSION_FILE_NAME;
pub use {milli, serde_cs};
pub type Document = serde_json::Map<String, serde_json::Value>;
pub type InstanceUid = Uuid;

View File

@@ -1,20 +1,12 @@
use std::collections::{BTreeMap, BTreeSet};
use std::convert::Infallible;
use std::fmt;
use std::marker::PhantomData;
use std::num::NonZeroUsize;
use std::str::FromStr;
use deserr::{DeserializeError, DeserializeFromValue, ErrorKind, MergeWithError, ValuePointerRef};
use fst::IntoStreamer;
use milli::update::Setting;
use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET};
use milli::{Index, DEFAULT_VALUES_PER_FACET};
use serde::{Deserialize, Serialize, Serializer};
use crate::deserr::DeserrJsonError;
use crate::error::deserr_codes::*;
use crate::error::unwrap_any;
/// The maximimum number of results that the engine
/// will be able to return in one search call.
pub const DEFAULT_PAGINATION_MAX_TOTAL_HITS: usize = 1000;
@@ -41,109 +33,73 @@ pub struct Checked;
#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct Unchecked;
impl<E> DeserializeFromValue<E> for Unchecked
where
E: DeserializeError,
{
fn deserialize_from_value<V: deserr::IntoValue>(
_value: deserr::Value<V>,
_location: deserr::ValuePointerRef,
) -> Result<Self, E> {
unreachable!()
}
}
fn validate_min_word_size_for_typo_setting<E: DeserializeError>(
s: MinWordSizeTyposSetting,
location: ValuePointerRef,
) -> Result<MinWordSizeTyposSetting, E> {
if let (Setting::Set(one), Setting::Set(two)) = (s.one_typo, s.two_typos) {
if one > two {
return Err(unwrap_any(E::error::<Infallible>(None, ErrorKind::Unexpected { msg: format!("`minWordSizeForTypos` setting is invalid. `oneTypo` and `twoTypos` fields should be between `0` and `255`, and `twoTypos` should be greater or equals to `oneTypo` but found `oneTypo: {one}` and twoTypos: {two}`.") }, location)));
}
}
Ok(s)
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[deserr(deny_unknown_fields, rename_all = camelCase, validate = validate_min_word_size_for_typo_setting -> DeserrJsonError<InvalidSettingsTypoTolerance>)]
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
pub struct MinWordSizeTyposSetting {
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)]
pub one_typo: Setting<u8>,
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)]
pub two_typos: Setting<u8>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError<DeserrJsonError<InvalidSettingsTypoTolerance>>)]
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
pub struct TypoSettings {
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)]
pub enabled: Setting<bool>,
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsTypoTolerance>)]
pub min_word_size_for_typos: Setting<MinWordSizeTyposSetting>,
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)]
pub disable_on_words: Setting<BTreeSet<String>>,
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)]
pub disable_on_attributes: Setting<BTreeSet<String>>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
pub struct FacetingSettings {
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)]
pub max_values_per_facet: Setting<usize>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
pub struct PaginationSettings {
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default)]
pub max_total_hits: Setting<usize>,
}
impl MergeWithError<milli::CriterionError> for DeserrJsonError<InvalidSettingsRankingRules> {
fn merge(
_self_: Option<Self>,
other: milli::CriterionError,
merge_location: ValuePointerRef,
) -> Result<Self, Self> {
Self::error::<Infallible>(
None,
ErrorKind::Unexpected { msg: other.to_string() },
merge_location,
)
}
}
/// Holds all the settings for an index. `T` can either be `Checked` if they represents settings
/// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a
/// call to `check` will return a `Settings<Checked>` from a `Settings<Unchecked>`.
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(
deny_unknown_fields,
rename_all = "camelCase",
bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>")
)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
#[serde(rename_all = "camelCase")]
#[serde(bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>"))]
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
pub struct Settings<T> {
#[serde(
default,
serialize_with = "serialize_with_wildcard",
skip_serializing_if = "Setting::is_not_set"
)]
#[deserr(default, error = DeserrJsonError<InvalidSettingsDisplayedAttributes>)]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
pub displayed_attributes: Setting<Vec<String>>,
#[serde(
@@ -151,39 +107,38 @@ pub struct Settings<T> {
serialize_with = "serialize_with_wildcard",
skip_serializing_if = "Setting::is_not_set"
)]
#[deserr(default, error = DeserrJsonError<InvalidSettingsSearchableAttributes>)]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
pub searchable_attributes: Setting<Vec<String>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsFilterableAttributes>)]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
pub filterable_attributes: Setting<BTreeSet<String>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsSortableAttributes>)]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
pub sortable_attributes: Setting<BTreeSet<String>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsRankingRules>)]
pub ranking_rules: Setting<Vec<RankingRuleView>>,
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
pub ranking_rules: Setting<Vec<String>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsStopWords>)]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
pub stop_words: Setting<BTreeSet<String>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsSynonyms>)]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsDistinctAttribute>)]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
pub distinct_attribute: Setting<String>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsTypoTolerance>)]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
pub typo_tolerance: Setting<TypoSettings>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsFaceting>)]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
pub faceting: Setting<FacetingSettings>,
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
#[deserr(default, error = DeserrJsonError<InvalidSettingsPagination>)]
#[cfg_attr(test, proptest(strategy = "test::setting_strategy()"))]
pub pagination: Setting<PaginationSettings>,
#[serde(skip)]
#[deserr(skip)]
pub _kind: PhantomData<T>,
}
@@ -318,9 +273,7 @@ pub fn apply_settings_to_builder(
}
match settings.ranking_rules {
Setting::Set(ref criteria) => {
builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect())
}
Setting::Set(ref criteria) => builder.set_criteria(criteria.clone()),
Setting::Reset => builder.reset_criteria(),
Setting::NotSet => (),
}
@@ -434,7 +387,7 @@ pub fn settings(
let sortable_attributes = index.sortable_fields(rtxn)?.into_iter().collect();
let criteria = index.criteria(rtxn)?;
let criteria = index.criteria(rtxn)?.into_iter().map(|c| c.to_string()).collect();
let stop_words = index
.stop_words(rtxn)?
@@ -495,7 +448,7 @@ pub fn settings(
},
filterable_attributes: Setting::Set(filterable_attributes),
sortable_attributes: Setting::Set(sortable_attributes),
ranking_rules: Setting::Set(criteria.iter().map(|c| c.clone().into()).collect()),
ranking_rules: Setting::Set(criteria),
stop_words: Setting::Set(stop_words),
distinct_attribute: match distinct_field {
Some(field) => Setting::Set(field),
@@ -509,106 +462,16 @@ pub fn settings(
})
}
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
#[deserr(from(&String) = FromStr::from_str -> CriterionError)]
pub enum RankingRuleView {
/// Sorted by decreasing number of matched query terms.
/// Query words at the front of an attribute is considered better than if it was at the back.
Words,
/// Sorted by increasing number of typos.
Typo,
/// Sorted by increasing distance between matched query terms.
Proximity,
/// Documents with quey words contained in more important
/// attributes are considered better.
Attribute,
/// Dynamically sort at query time the documents. None, one or multiple Asc/Desc sortable
/// attributes can be used in place of this criterion at query time.
Sort,
/// Sorted by the similarity of the matched words with the query words.
Exactness,
/// Sorted by the increasing value of the field specified.
Asc(String),
/// Sorted by the decreasing value of the field specified.
Desc(String),
}
impl Serialize for RankingRuleView {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&format!("{}", Criterion::from(self.clone())))
}
}
impl<'de> Deserialize<'de> for RankingRuleView {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = RankingRuleView;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "the name of a valid ranking rule (string)")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
let criterion = Criterion::from_str(v).map_err(|_| {
E::invalid_value(serde::de::Unexpected::Str(v), &"a valid ranking rule")
})?;
Ok(RankingRuleView::from(criterion))
}
}
deserializer.deserialize_str(Visitor)
}
}
impl FromStr for RankingRuleView {
type Err = <Criterion as FromStr>::Err;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(RankingRuleView::from(Criterion::from_str(s)?))
}
}
impl fmt::Display for RankingRuleView {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
fmt::Display::fmt(&Criterion::from(self.clone()), f)
}
}
impl From<Criterion> for RankingRuleView {
fn from(value: Criterion) -> Self {
match value {
Criterion::Words => RankingRuleView::Words,
Criterion::Typo => RankingRuleView::Typo,
Criterion::Proximity => RankingRuleView::Proximity,
Criterion::Attribute => RankingRuleView::Attribute,
Criterion::Sort => RankingRuleView::Sort,
Criterion::Exactness => RankingRuleView::Exactness,
Criterion::Asc(x) => RankingRuleView::Asc(x),
Criterion::Desc(x) => RankingRuleView::Desc(x),
}
}
}
impl From<RankingRuleView> for Criterion {
fn from(value: RankingRuleView) -> Self {
match value {
RankingRuleView::Words => Criterion::Words,
RankingRuleView::Typo => Criterion::Typo,
RankingRuleView::Proximity => Criterion::Proximity,
RankingRuleView::Attribute => Criterion::Attribute,
RankingRuleView::Sort => Criterion::Sort,
RankingRuleView::Exactness => Criterion::Exactness,
RankingRuleView::Asc(x) => Criterion::Asc(x),
RankingRuleView::Desc(x) => Criterion::Desc(x),
}
}
}
#[cfg(test)]
pub(crate) mod test {
use proptest::prelude::*;
use super::*;
pub(super) fn setting_strategy<T: Arbitrary + Clone>() -> impl Strategy<Value = Setting<T>> {
prop_oneof![Just(Setting::NotSet), Just(Setting::Reset), any::<T>().prop_map(Setting::Set)]
}
#[test]
fn test_setting_check() {
// test no changes

View File

@@ -1,14 +1,11 @@
use std::fmt;
use std::fmt::{Display, Formatter};
use std::marker::PhantomData;
use std::ops::Deref;
use std::str::FromStr;
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind};
use serde::de::Visitor;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use crate::deserr::query_params::FromQueryParameter;
use crate::error::unwrap_any;
/// A type that tries to match either a star (*) or
/// any other thing that implements `FromStr`.
#[derive(Debug, Clone)]
@@ -28,11 +25,23 @@ impl<T: FromStr> FromStr for StarOr<T> {
}
}
}
impl<T: fmt::Display> fmt::Display for StarOr<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
impl<T: Deref<Target = str>> Deref for StarOr<T> {
type Target = str;
fn deref(&self) -> &Self::Target {
match self {
StarOr::Star => write!(f, "*"),
StarOr::Other(x) => fmt::Display::fmt(x, f),
Self::Star => "*",
Self::Other(t) => t.deref(),
}
}
}
impl<T: Into<String>> From<StarOr<T>> for String {
fn from(s: StarOr<T>) -> Self {
match s {
StarOr::Star => "*".to_string(),
StarOr::Other(t) => t.into(),
}
}
}
@@ -52,7 +61,7 @@ impl<T: PartialEq + Eq> Eq for StarOr<T> {}
impl<'de, T, E> Deserialize<'de> for StarOr<T>
where
T: FromStr<Err = E>,
E: fmt::Display,
E: Display,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
@@ -68,11 +77,11 @@ where
impl<'de, T, FE> Visitor<'de> for StarOrVisitor<T>
where
T: FromStr<Err = FE>,
FE: fmt::Display,
FE: Display,
{
type Value = StarOr<T>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> std::fmt::Result {
fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result {
formatter.write_str("a string")
}
@@ -98,7 +107,7 @@ where
impl<T> Serialize for StarOr<T>
where
T: ToString,
T: Deref<Target = str>,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
@@ -106,222 +115,7 @@ where
{
match self {
StarOr::Star => serializer.serialize_str("*"),
StarOr::Other(other) => serializer.serialize_str(&other.to_string()),
}
}
}
impl<T, E> DeserializeFromValue<E> for StarOr<T>
where
T: FromStr,
E: DeserializeError + MergeWithError<T::Err>,
{
fn deserialize_from_value<V: deserr::IntoValue>(
value: deserr::Value<V>,
location: deserr::ValuePointerRef,
) -> Result<Self, E> {
match value {
deserr::Value::String(v) => {
if v == "*" {
Ok(StarOr::Star)
} else {
match T::from_str(&v) {
Ok(parsed) => Ok(StarOr::Other(parsed)),
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
}
}
}
_ => Err(unwrap_any(E::error::<V>(
None,
deserr::ErrorKind::IncorrectValueKind {
actual: value,
accepted: &[ValueKind::String],
},
location,
))),
}
}
}
/// A type representing the content of a query parameter that can either not exist,
/// be equal to a star (*), or another value
///
/// It is a convenient alternative to `Option<StarOr<T>>`.
#[derive(Debug, Default, Clone, Copy)]
pub enum OptionStarOr<T> {
#[default]
None,
Star,
Other(T),
}
impl<T> OptionStarOr<T> {
pub fn is_some(&self) -> bool {
match self {
Self::None => false,
Self::Star => false,
Self::Other(_) => true,
}
}
pub fn merge_star_and_none(self) -> Option<T> {
match self {
Self::None | Self::Star => None,
Self::Other(x) => Some(x),
}
}
pub fn try_map<U, E, F: Fn(T) -> Result<U, E>>(self, map_f: F) -> Result<OptionStarOr<U>, E> {
match self {
OptionStarOr::None => Ok(OptionStarOr::None),
OptionStarOr::Star => Ok(OptionStarOr::Star),
OptionStarOr::Other(x) => map_f(x).map(OptionStarOr::Other),
}
}
}
impl<T> FromQueryParameter for OptionStarOr<T>
where
T: FromQueryParameter,
{
type Err = T::Err;
fn from_query_param(p: &str) -> Result<Self, Self::Err> {
match p {
"*" => Ok(OptionStarOr::Star),
s => T::from_query_param(s).map(OptionStarOr::Other),
}
}
}
impl<T, E> DeserializeFromValue<E> for OptionStarOr<T>
where
E: DeserializeError + MergeWithError<T::Err>,
T: FromQueryParameter,
{
fn deserialize_from_value<V: deserr::IntoValue>(
value: deserr::Value<V>,
location: deserr::ValuePointerRef,
) -> Result<Self, E> {
match value {
deserr::Value::String(s) => match s.as_str() {
"*" => Ok(OptionStarOr::Star),
s => match T::from_query_param(s) {
Ok(x) => Ok(OptionStarOr::Other(x)),
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
},
},
_ => Err(unwrap_any(E::error::<V>(
None,
deserr::ErrorKind::IncorrectValueKind {
actual: value,
accepted: &[ValueKind::String],
},
location,
))),
}
}
}
/// A type representing the content of a query parameter that can either not exist, be equal to a star (*), or represent a list of other values
#[derive(Debug, Default, Clone)]
pub enum OptionStarOrList<T> {
#[default]
None,
Star,
List(Vec<T>),
}
impl<T> OptionStarOrList<T> {
pub fn is_some(&self) -> bool {
match self {
Self::None => false,
Self::Star => false,
Self::List(_) => true,
}
}
pub fn map<U, F: Fn(T) -> U>(self, map_f: F) -> OptionStarOrList<U> {
match self {
Self::None => OptionStarOrList::None,
Self::Star => OptionStarOrList::Star,
Self::List(xs) => OptionStarOrList::List(xs.into_iter().map(map_f).collect()),
}
}
pub fn try_map<U, E, F: Fn(T) -> Result<U, E>>(
self,
map_f: F,
) -> Result<OptionStarOrList<U>, E> {
match self {
Self::None => Ok(OptionStarOrList::None),
Self::Star => Ok(OptionStarOrList::Star),
Self::List(xs) => {
xs.into_iter().map(map_f).collect::<Result<Vec<_>, _>>().map(OptionStarOrList::List)
}
}
}
pub fn merge_star_and_none(self) -> Option<Vec<T>> {
match self {
Self::None | Self::Star => None,
Self::List(xs) => Some(xs),
}
}
pub fn push(&mut self, el: T) {
match self {
Self::None => *self = Self::List(vec![el]),
Self::Star => (),
Self::List(xs) => xs.push(el),
}
}
}
impl<T, E> DeserializeFromValue<E> for OptionStarOrList<T>
where
E: DeserializeError + MergeWithError<T::Err>,
T: FromQueryParameter,
{
fn deserialize_from_value<V: deserr::IntoValue>(
value: deserr::Value<V>,
location: deserr::ValuePointerRef,
) -> Result<Self, E> {
match value {
deserr::Value::String(s) => {
let mut error = None;
let mut is_star = false;
// CS::<String>::from_str is infaillible
let cs = serde_cs::vec::CS::<String>::from_str(&s).unwrap();
let len_cs = cs.0.len();
let mut els = vec![];
for (i, el_str) in cs.into_iter().enumerate() {
if el_str == "*" {
is_star = true;
} else {
match T::from_query_param(&el_str) {
Ok(el) => {
els.push(el);
}
Err(e) => {
let location =
if len_cs > 1 { location.push_index(i) } else { location };
error = Some(E::merge(error, e, location)?);
}
}
}
}
if let Some(error) = error {
return Err(error);
}
if is_star {
Ok(OptionStarOrList::Star)
} else {
Ok(OptionStarOrList::List(els))
}
}
_ => Err(unwrap_any(E::error::<V>(
None,
deserr::ErrorKind::IncorrectValueKind {
actual: value,
accepted: &[ValueKind::String],
},
location,
))),
StarOr::Other(other) => serializer.serialize_str(other.deref()),
}
}
}

View File

@@ -1,4 +1,3 @@
use core::fmt;
use std::collections::HashSet;
use std::fmt::{Display, Write};
use std::str::FromStr;
@@ -10,7 +9,7 @@ use serde::{Deserialize, Serialize, Serializer};
use time::{Duration, OffsetDateTime};
use uuid::Uuid;
use crate::error::ResponseError;
use crate::error::{Code, ResponseError};
use crate::keys::Key;
use crate::settings::{Settings, Unchecked};
use crate::InstanceUid;
@@ -333,7 +332,7 @@ impl Display for Status {
}
impl FromStr for Status {
type Err = ParseTaskStatusError;
type Err = ResponseError;
fn from_str(status: &str) -> Result<Self, Self::Err> {
if status.eq_ignore_ascii_case("enqueued") {
@@ -347,28 +346,21 @@ impl FromStr for Status {
} else if status.eq_ignore_ascii_case("canceled") {
Ok(Status::Canceled)
} else {
Err(ParseTaskStatusError(status.to_owned()))
Err(ResponseError::from_msg(
format!(
"`{}` is not a status. Available status are {}.",
status,
enum_iterator::all::<Status>()
.map(|s| format!("`{s}`"))
.collect::<Vec<String>>()
.join(", ")
),
Code::BadRequest,
))
}
}
}
#[derive(Debug)]
pub struct ParseTaskStatusError(pub String);
impl fmt::Display for ParseTaskStatusError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"`{}` is not a valid task status. Available statuses are {}.",
self.0,
enum_iterator::all::<Status>()
.map(|s| format!("`{s}`"))
.collect::<Vec<String>>()
.join(", ")
)
}
}
impl std::error::Error for ParseTaskStatusError {}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize, Sequence)]
#[serde(rename_all = "camelCase")]
pub enum Kind {
@@ -420,7 +412,7 @@ impl Display for Kind {
}
}
impl FromStr for Kind {
type Err = ParseTaskKindError;
type Err = ResponseError;
fn from_str(kind: &str) -> Result<Self, Self::Err> {
if kind.eq_ignore_ascii_case("indexCreation") {
@@ -446,32 +438,25 @@ impl FromStr for Kind {
} else if kind.eq_ignore_ascii_case("snapshotCreation") {
Ok(Kind::SnapshotCreation)
} else {
Err(ParseTaskKindError(kind.to_owned()))
Err(ResponseError::from_msg(
format!(
"`{}` is not a type. Available types are {}.",
kind,
enum_iterator::all::<Kind>()
.map(|k| format!(
"`{}`",
// by default serde is going to insert `"` around the value.
serde_json::to_string(&k).unwrap().trim_matches('"')
))
.collect::<Vec<String>>()
.join(", ")
),
Code::BadRequest,
))
}
}
}
#[derive(Debug)]
pub struct ParseTaskKindError(pub String);
impl fmt::Display for ParseTaskKindError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"`{}` is not a valid task type. Available types are {}.",
self.0,
enum_iterator::all::<Kind>()
.map(|k| format!(
"`{}`",
// by default serde is going to insert `"` around the value.
serde_json::to_string(&k).unwrap().trim_matches('"')
))
.collect::<Vec<String>>()
.join(", ")
)
}
}
impl std::error::Error for ParseTaskKindError {}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub enum Details {
DocumentAdditionOrUpdate { received_documents: u64, indexed_documents: Option<u64> },

View File

@@ -4,10 +4,11 @@ description = "Meilisearch HTTP server"
edition = "2021"
license = "MIT"
name = "meilisearch"
version = "1.0.0"
version = "0.30.1"
[dependencies]
actix-cors = "0.6.3"
actix-governor = "0.3.2"
actix-http = { version = "3.2.2", default-features = false, features = ["compress-brotli", "compress-gzip", "rustls"] }
actix-web = { version = "4.2.1", default-features = false, features = ["macros", "compress-brotli", "compress-gzip", "cookies", "rustls"] }
actix-web-static-files = { git = "https://github.com/kilork/actix-web-static-files.git", rev = "2d3b6160", optional = true }
@@ -19,7 +20,6 @@ byte-unit = { version = "4.0.14", default-features = false, features = ["std", "
bytes = "1.2.1"
clap = { version = "4.0.9", features = ["derive", "env"] }
crossbeam-channel = "0.5.6"
deserr = "0.1.5"
dump = { path = "../dump" }
either = "1.8.0"
env_logger = "0.9.1"
@@ -55,6 +55,7 @@ rustls = "0.20.6"
rustls-pemfile = "1.0.1"
segment = { version = "0.2.1", optional = true }
serde = { version = "1.0.145", features = ["derive"] }
serde-cs = "0.2.4"
serde_json = { version = "1.0.85", features = ["preserve_order"] }
sha2 = "0.10.6"
siphasher = "0.3.10"
@@ -71,14 +72,11 @@ toml = "0.5.9"
uuid = { version = "1.1.2", features = ["serde", "v4"] }
walkdir = "2.3.2"
yaup = "0.2.0"
serde_urlencoded = "0.7.1"
actix-utils = "3.0.1"
[dev-dependencies]
actix-rt = "2.7.0"
assert-json-diff = "2.0.2"
brotli = "3.3.4"
insta = "1.19.1"
manifest-dir-macros = "0.1.16"
maplit = "1.0.2"
meili-snap = {path = "../meili-snap"}
@@ -108,5 +106,5 @@ japanese = ["meilisearch-types/japanese"]
thai = ["meilisearch-types/thai"]
[package.metadata.mini-dashboard]
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.5/build.zip"
sha1 = "6fe959b78511b32e9ff857fd9fd31740633b9fce"
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.4/build.zip"
sha1 = "b53c2edb51d4ce1984d5586333b91c4ad3a1b4e4"

View File

@@ -7,7 +7,7 @@ use serde_json::Value;
use super::{find_user_id, Analytics, DocumentDeletionKind};
use crate::routes::indexes::documents::UpdateDocumentsQuery;
use crate::routes::tasks::TasksFilterQuery;
use crate::routes::tasks::TasksFilterQueryRaw;
use crate::Opt;
pub struct MockAnalytics {
@@ -58,6 +58,6 @@ impl Analytics for MockAnalytics {
_request: &HttpRequest,
) {
}
fn get_tasks(&self, _query: &TasksFilterQuery, _request: &HttpRequest) {}
fn get_tasks(&self, _query: &TasksFilterQueryRaw, _request: &HttpRequest) {}
fn health_seen(&self, _request: &HttpRequest) {}
}

View File

@@ -15,7 +15,7 @@ use platform_dirs::AppDirs;
use serde_json::Value;
use crate::routes::indexes::documents::UpdateDocumentsQuery;
use crate::routes::tasks::TasksFilterQuery;
use crate::routes::tasks::TasksFilterQueryRaw;
// if we are in debug mode OR the analytics feature is disabled
// the `SegmentAnalytics` point to the mock instead of the real analytics
@@ -94,7 +94,7 @@ pub trait Analytics: Sync + Send {
);
// this method should be called to aggregate the get tasks requests.
fn get_tasks(&self, query: &TasksFilterQuery, request: &HttpRequest);
fn get_tasks(&self, query: &TasksFilterQueryRaw, request: &HttpRequest);
// this method should be called to aggregate a add documents request
fn health_seen(&self, request: &HttpRequest);

View File

@@ -25,9 +25,11 @@ use uuid::Uuid;
use super::{config_user_id_path, DocumentDeletionKind, MEILISEARCH_CONFIG_PATH};
use crate::analytics::Analytics;
use crate::option::{default_http_addr, IndexerOpts, MaxMemory, MaxThreads, ScheduleSnapshot};
use crate::option::{
default_http_addr, IndexerOpts, MaxMemory, MaxThreads, RateLimiterConfig, SchedulerConfig,
};
use crate::routes::indexes::documents::UpdateDocumentsQuery;
use crate::routes::tasks::TasksFilterQuery;
use crate::routes::tasks::TasksFilterQueryRaw;
use crate::routes::{create_all_stats, Stats};
use crate::search::{
SearchQuery, SearchResult, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
@@ -195,7 +197,7 @@ impl super::Analytics for SegmentAnalytics {
let _ = self.sender.try_send(AnalyticsMsg::AggregateUpdateDocuments(aggregate));
}
fn get_tasks(&self, query: &TasksFilterQuery, request: &HttpRequest) {
fn get_tasks(&self, query: &TasksFilterQueryRaw, request: &HttpRequest) {
let aggregate = TasksAggregator::from_query(query, request);
let _ = self.sender.try_send(AnalyticsMsg::AggregateTasks(aggregate));
}
@@ -220,12 +222,16 @@ struct Infos {
ignore_missing_dump: bool,
ignore_dump_if_db_exists: bool,
import_snapshot: bool,
schedule_snapshot: Option<u64>,
schedule_snapshot: bool,
snapshot_dir: bool,
snapshot_interval_sec: u64,
ignore_missing_snapshot: bool,
ignore_snapshot_if_db_exists: bool,
http_addr: bool,
max_index_size: Byte,
max_task_db_size: Byte,
http_payload_size_limit: Byte,
disable_auto_batching: bool,
log_level: String,
max_indexing_memory: MaxMemory,
max_indexing_threads: MaxThreads,
@@ -237,6 +243,16 @@ struct Infos {
ssl_require_auth: bool,
ssl_resumption: bool,
ssl_tickets: bool,
rate_limiting_disable_all: bool,
rate_limiting_disable_global: bool,
rate_limiting_global_pool: u32,
rate_limiting_global_cooldown_ns: u64,
rate_limiting_disable_ip: bool,
rate_limiting_ip_pool: u32,
rate_limiting_ip_cooldown_ns: u64,
rate_limiting_disable_api_key: bool,
rate_limiting_api_key_pool: u32,
rate_limiting_api_key_cooldown_ns: u64,
}
impl From<Opt> for Infos {
@@ -249,8 +265,8 @@ impl From<Opt> for Infos {
http_addr,
master_key: _,
env,
max_index_size: _,
max_task_db_size: _,
max_index_size,
max_task_db_size,
http_payload_size_limit,
ssl_cert_path,
ssl_key_path,
@@ -264,23 +280,40 @@ impl From<Opt> for Infos {
ignore_snapshot_if_db_exists,
snapshot_dir,
schedule_snapshot,
snapshot_interval_sec,
import_dump,
ignore_missing_dump,
ignore_dump_if_db_exists,
dump_dir,
log_level,
indexer_options,
scheduler_options,
config_file_path,
generate_master_key: _,
rate_limiter_options,
#[cfg(all(not(debug_assertions), feature = "analytics"))]
no_analytics: _,
} = options;
let schedule_snapshot = match schedule_snapshot {
ScheduleSnapshot::Disabled => None,
ScheduleSnapshot::Enabled(interval) => Some(interval),
};
let IndexerOpts { max_indexing_memory, max_indexing_threads } = indexer_options;
let SchedulerConfig { disable_auto_batching } = scheduler_options;
let IndexerOpts {
log_every_n: _,
max_nb_chunks: _,
max_indexing_memory,
max_indexing_threads,
} = indexer_options;
let RateLimiterConfig {
rate_limiting_disable_all,
rate_limiting_disable_global,
rate_limiting_global_pool,
rate_limiting_global_cooldown_ns,
rate_limiting_disable_ip,
rate_limiting_ip_pool,
rate_limiting_ip_cooldown_ns,
rate_limiting_disable_api_key,
rate_limiting_api_key_pool,
rate_limiting_api_key_cooldown_ns,
} = rate_limiter_options;
// We're going to override every sensible information.
// We consider information sensible if it contains a path, an address, or a key.
@@ -294,11 +327,15 @@ impl From<Opt> for Infos {
import_snapshot: import_snapshot.is_some(),
schedule_snapshot,
snapshot_dir: snapshot_dir != PathBuf::from("snapshots/"),
snapshot_interval_sec,
ignore_missing_snapshot,
ignore_snapshot_if_db_exists,
http_addr: http_addr != default_http_addr(),
max_index_size,
max_task_db_size,
http_payload_size_limit,
log_level: log_level.to_string(),
disable_auto_batching,
log_level,
max_indexing_memory,
max_indexing_threads,
with_configuration_file: config_file_path.is_some(),
@@ -309,6 +346,16 @@ impl From<Opt> for Infos {
ssl_require_auth,
ssl_resumption,
ssl_tickets,
rate_limiting_disable_all,
rate_limiting_disable_global,
rate_limiting_global_pool,
rate_limiting_global_cooldown_ns,
rate_limiting_disable_ip,
rate_limiting_ip_pool,
rate_limiting_ip_cooldown_ns,
rate_limiting_disable_api_key,
rate_limiting_api_key_pool,
rate_limiting_api_key_cooldown_ns,
}
}
}
@@ -868,21 +915,21 @@ pub struct TasksAggregator {
}
impl TasksAggregator {
pub fn from_query(query: &TasksFilterQuery, request: &HttpRequest) -> Self {
pub fn from_query(query: &TasksFilterQueryRaw, request: &HttpRequest) -> Self {
Self {
timestamp: Some(OffsetDateTime::now_utc()),
user_agents: extract_user_agents(request).into_iter().collect(),
filtered_by_uid: query.uids.is_some(),
filtered_by_index_uid: query.index_uids.is_some(),
filtered_by_type: query.types.is_some(),
filtered_by_status: query.statuses.is_some(),
filtered_by_canceled_by: query.canceled_by.is_some(),
filtered_by_before_enqueued_at: query.before_enqueued_at.is_some(),
filtered_by_after_enqueued_at: query.after_enqueued_at.is_some(),
filtered_by_before_started_at: query.before_started_at.is_some(),
filtered_by_after_started_at: query.after_started_at.is_some(),
filtered_by_before_finished_at: query.before_finished_at.is_some(),
filtered_by_after_finished_at: query.after_finished_at.is_some(),
filtered_by_uid: query.common.uids.is_some(),
filtered_by_index_uid: query.common.index_uids.is_some(),
filtered_by_type: query.common.types.is_some(),
filtered_by_status: query.common.statuses.is_some(),
filtered_by_canceled_by: query.common.canceled_by.is_some(),
filtered_by_before_enqueued_at: query.dates.before_enqueued_at.is_some(),
filtered_by_after_enqueued_at: query.dates.after_enqueued_at.is_some(),
filtered_by_before_started_at: query.dates.before_started_at.is_some(),
filtered_by_after_started_at: query.dates.after_started_at.is_some(),
filtered_by_before_finished_at: query.dates.before_finished_at.is_some(),
filtered_by_after_finished_at: query.dates.after_finished_at.is_some(),
total_received: 1,
}
}

View File

@@ -2,7 +2,7 @@ use actix_web as aweb;
use aweb::error::{JsonPayloadError, QueryPayloadError};
use meilisearch_types::document_formats::{DocumentFormatError, PayloadType};
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
use meilisearch_types::index_uid::{IndexUid, IndexUidFormatError};
use meilisearch_types::index_uid::IndexUidFormatError;
use serde_json::Value;
use tokio::task::JoinError;
@@ -24,10 +24,10 @@ pub enum MeilisearchHttpError {
MissingPayload(PayloadType),
#[error("The provided payload reached the size limit.")]
PayloadTooLarge,
#[error("Two indexes must be given for each swap. The list `[{}]` contains {} indexes.",
.0.iter().map(|uid| format!("\"{uid}\"")).collect::<Vec<_>>().join(", "), .0.len()
#[error("Two indexes must be given for each swap. The list `{:?}` contains {} indexes.",
.0, .0.len()
)]
SwapIndexPayloadWrongLength(Vec<IndexUid>),
SwapIndexPayloadWrongLength(Vec<String>),
#[error(transparent)]
IndexUid(#[from] IndexUidFormatError),
#[error(transparent)]
@@ -55,9 +55,9 @@ impl ErrorCode for MeilisearchHttpError {
MeilisearchHttpError::MissingPayload(_) => Code::MissingPayload,
MeilisearchHttpError::InvalidContentType(_, _) => Code::InvalidContentType,
MeilisearchHttpError::DocumentNotFound(_) => Code::DocumentNotFound,
MeilisearchHttpError::InvalidExpression(_, _) => Code::InvalidSearchFilter,
MeilisearchHttpError::InvalidExpression(_, _) => Code::Filter,
MeilisearchHttpError::PayloadTooLarge => Code::PayloadTooLarge,
MeilisearchHttpError::SwapIndexPayloadWrongLength(_) => Code::InvalidSwapIndexes,
MeilisearchHttpError::SwapIndexPayloadWrongLength(_) => Code::BadRequest,
MeilisearchHttpError::IndexUid(e) => e.error_code(),
MeilisearchHttpError::SerdeJson(_) => Code::Internal,
MeilisearchHttpError::HeedError(_) => Code::Internal,

View File

@@ -17,7 +17,7 @@ impl ErrorCode for AuthenticationError {
fn error_code(&self) -> Code {
match self {
AuthenticationError::MissingAuthorizationHeader => Code::MissingAuthorizationHeader,
AuthenticationError::InvalidToken => Code::InvalidApiKey,
AuthenticationError::InvalidToken => Code::InvalidToken,
AuthenticationError::IrretrievableState => Code::Internal,
AuthenticationError::MissingMasterKey => Code::MissingMasterKey,
}

View File

@@ -1,78 +0,0 @@
use std::fmt::Debug;
use std::future::Future;
use std::marker::PhantomData;
use std::pin::Pin;
use std::task::{Context, Poll};
use actix_web::dev::Payload;
use actix_web::web::Json;
use actix_web::{FromRequest, HttpRequest};
use deserr::{DeserializeError, DeserializeFromValue};
use futures::ready;
use meilisearch_types::error::{ErrorCode, ResponseError};
/// Extractor for typed data from Json request payloads
/// deserialised by deserr.
///
/// # Extractor
/// To extract typed data from a request body, the inner type `T` must implement the
/// [`deserr::DeserializeFromError<E>`] trait. The inner type `E` must implement the
/// [`ErrorCode`](meilisearch_error::ErrorCode) trait.
#[derive(Debug)]
pub struct ValidatedJson<T, E>(pub T, PhantomData<*const E>);
impl<T, E> ValidatedJson<T, E> {
pub fn new(data: T) -> Self {
ValidatedJson(data, PhantomData)
}
pub fn into_inner(self) -> T {
self.0
}
}
impl<T, E> FromRequest for ValidatedJson<T, E>
where
E: DeserializeError + ErrorCode + std::error::Error + 'static,
T: DeserializeFromValue<E>,
{
type Error = actix_web::Error;
type Future = ValidatedJsonExtractFut<T, E>;
#[inline]
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
ValidatedJsonExtractFut {
fut: Json::<serde_json::Value>::from_request(req, payload),
_phantom: PhantomData,
}
}
}
pub struct ValidatedJsonExtractFut<T, E> {
fut: <Json<serde_json::Value> as FromRequest>::Future,
_phantom: PhantomData<*const (T, E)>,
}
impl<T, E> Future for ValidatedJsonExtractFut<T, E>
where
T: DeserializeFromValue<E>,
E: DeserializeError + ErrorCode + std::error::Error + 'static,
{
type Output = Result<ValidatedJson<T, E>, actix_web::Error>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let ValidatedJsonExtractFut { fut, .. } = self.get_mut();
let fut = Pin::new(fut);
let res = ready!(fut.poll(cx));
let res = match res {
Err(err) => Err(err),
Ok(data) => match deserr::deserialize::<_, _, E>(data.into_inner()) {
Ok(data) => Ok(ValidatedJson::new(data)),
Err(e) => Err(ResponseError::from(e).into()),
},
};
Poll::Ready(res)
}
}

View File

@@ -1,6 +1,4 @@
pub mod payload;
#[macro_use]
pub mod authentication;
pub mod json;
pub mod query_parameters;
pub mod sequential_extractor;

View File

@@ -1,70 +0,0 @@
//! A module to parse query parameter with deserr
use std::marker::PhantomData;
use std::{fmt, ops};
use actix_http::Payload;
use actix_utils::future::{err, ok, Ready};
use actix_web::{FromRequest, HttpRequest};
use deserr::{DeserializeError, DeserializeFromValue};
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct QueryParameter<T, E>(pub T, PhantomData<*const E>);
impl<T, E> QueryParameter<T, E> {
/// Unwrap into inner `T` value.
pub fn into_inner(self) -> T {
self.0
}
}
impl<T, E> QueryParameter<T, E>
where
T: DeserializeFromValue<E>,
E: DeserializeError + ErrorCode + std::error::Error + 'static,
{
pub fn from_query(query_str: &str) -> Result<Self, actix_web::Error> {
let value = serde_urlencoded::from_str::<serde_json::Value>(query_str)
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?;
match deserr::deserialize::<_, _, E>(value) {
Ok(data) => Ok(QueryParameter(data, PhantomData)),
Err(e) => Err(ResponseError::from(e).into()),
}
}
}
impl<T, E> ops::Deref for QueryParameter<T, E> {
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}
impl<T, E> ops::DerefMut for QueryParameter<T, E> {
fn deref_mut(&mut self) -> &mut T {
&mut self.0
}
}
impl<T: fmt::Display, E> fmt::Display for QueryParameter<T, E> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl<T, E> FromRequest for QueryParameter<T, E>
where
T: DeserializeFromValue<E>,
E: DeserializeError + ErrorCode + std::error::Error + 'static,
{
type Error = actix_web::Error;
type Future = Ready<Result<Self, actix_web::Error>>;
#[inline]
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
QueryParameter::from_query(req.query_string()).map(ok).unwrap_or_else(err)
}
}

View File

@@ -16,14 +16,19 @@ pub mod route_metrics;
use std::fs::File;
use std::io::{BufReader, BufWriter};
use std::path::Path;
use std::sync::atomic::AtomicBool;
use std::sync::Arc;
use std::thread;
use std::time::Duration;
use actix_cors::Cors;
use actix_governor::{
GlobalKeyExtractor, Governor, GovernorConfigBuilder, KeyExtractor, PeerIpKeyExtractor,
};
use actix_http::body::MessageBody;
use actix_web::dev::{ServiceFactory, ServiceResponse};
use actix_web::error::JsonPayloadError;
use actix_web::middleware::Condition;
use actix_web::web::Data;
use actix_web::{middleware, web, HttpRequest};
use analytics::Analytics;
@@ -41,10 +46,12 @@ use meilisearch_types::tasks::KindWithContent;
use meilisearch_types::versioning::{check_version_file, create_version_file};
use meilisearch_types::{compression, milli, VERSION_FILE_NAME};
pub use option::Opt;
use option::ScheduleSnapshot;
use option::RateLimiterConfig;
use crate::error::MeilisearchHttpError;
pub static AUTOBATCHING_ENABLED: AtomicBool = AtomicBool::new(false);
/// Check if a db is empty. It does not provide any information on the
/// validity of the data in it.
/// We consider a database as non empty when it's a non empty directory.
@@ -76,6 +83,7 @@ pub fn create_app(
InitError = (),
>,
> {
let rate_limiters = configure_rate_limiters(&opt.rate_limiter_options);
let app = actix_web::App::new()
.configure(|s| {
configure_data(
@@ -86,7 +94,7 @@ pub fn create_app(
analytics.clone(),
)
})
.configure(routes::configure)
.configure(|cfg| routes::configure(cfg, rate_limiters))
.configure(|s| dashboard(s, enable_dashboard));
#[cfg(feature = "metrics")]
let app = app.configure(|s| configure_metrics_route(s, opt.enable_metrics_route));
@@ -170,8 +178,8 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(Arc<IndexScheduler>, Auth
// We create a loop in a thread that registers snapshotCreation tasks
let index_scheduler = Arc::new(index_scheduler);
if let ScheduleSnapshot::Enabled(snapshot_delay) = opt.schedule_snapshot {
let snapshot_delay = Duration::from_secs(snapshot_delay);
if opt.schedule_snapshot {
let snapshot_delay = Duration::from_secs(opt.snapshot_interval_sec);
let index_scheduler = index_scheduler.clone();
thread::Builder::new()
.name(String::from("register-snapshot-tasks"))
@@ -207,7 +215,7 @@ fn open_or_create_database_unchecked(
task_db_size: opt.max_task_db_size.get_bytes() as usize,
index_size: opt.max_index_size.get_bytes() as usize,
indexer_config: (&opt.indexer_options).try_into()?,
autobatching_enabled: true,
autobatching_enabled: !opt.scheduler_options.disable_auto_batching,
})?)
};
@@ -384,6 +392,123 @@ pub fn configure_data(
);
}
/// Helper struct to implement rate-limiting depending on the API key.
#[derive(Clone, Copy)]
pub struct ApiKeyExtractor;
impl KeyExtractor for ApiKeyExtractor {
/// `Some(api_key)` for requests containing an API key, `None` otherwise
type Key = Option<String>;
/// Error indicating that the request header could not be converted to a `String` representation.
type KeyExtractionError = actix_http::header::ToStrError;
/// Extracts an API key from a request header, if one is present.
///
/// Returns Ok(None) if there is no authorization header.
///
/// # Errors
///
/// - `Self::KeyExtractionError`: if an authorization header is present, but not representable as a `String` (e.g. non-UTF8)
fn extract(
&self,
req: &actix_web::dev::ServiceRequest,
) -> Result<Self::Key, Self::KeyExtractionError> {
let key = req.headers().get("Authorization").map(|token| token.to_str()).transpose()?;
Ok(key.and_then(|token| token.strip_prefix("Bearer ")).map(|key| key.trim().to_owned()))
}
}
/// Encapsulates a conditionally enabled rate-limiter.
///
/// This struct can be turned into an Actix middleware using [`Self::into_middleware`],
/// allowing to add it to some routes.
pub struct RateLimiter<K: KeyExtractor> {
enabled: bool,
governor: Governor<K>,
}
/// The available rate limiters.
pub struct RateLimiters {
/// Limits globally regardless of the origin of the query.
pub global: RateLimiter<GlobalKeyExtractor>,
/// Limits depending on the IP address of origin.
pub ip: RateLimiter<PeerIpKeyExtractor>,
/// Limits depending on the API Key in the Authorization header.
pub api_key: RateLimiter<ApiKeyExtractor>,
}
impl<K: KeyExtractor> RateLimiter<K> {
fn disabled(key_extractor: K) -> Self {
let governor = Governor::new(
&GovernorConfigBuilder::default()
.methods(vec![])
.key_extractor(key_extractor)
.finish()
.unwrap(),
);
Self { enabled: false, governor }
}
fn enabled(key_extractor: K, pool_size: u32, cooldown_ns: u64) -> Self {
let governor = Governor::new(
&GovernorConfigBuilder::default()
.key_extractor(key_extractor)
.burst_size(pool_size)
.per_nanosecond(cooldown_ns)
.use_headers()
.finish()
.unwrap(),
);
Self { enabled: true, governor }
}
/// Turns this into a middleware that is enabled only if the rate limiter was enabled.
pub fn into_middleware(self) -> Condition<Governor<K>> {
Condition::new(self.enabled, self.governor)
}
}
fn configure_rate_limiters(rate_limiter_options: &RateLimiterConfig) -> RateLimiters {
if rate_limiter_options.rate_limiting_disable_all {
return RateLimiters {
global: RateLimiter::disabled(GlobalKeyExtractor),
ip: RateLimiter::disabled(PeerIpKeyExtractor),
api_key: RateLimiter::disabled(ApiKeyExtractor),
};
}
let global = if rate_limiter_options.rate_limiting_disable_global {
RateLimiter::disabled(GlobalKeyExtractor)
} else {
RateLimiter::enabled(
GlobalKeyExtractor,
rate_limiter_options.rate_limiting_global_pool,
rate_limiter_options.rate_limiting_global_cooldown_ns,
)
};
let ip = if rate_limiter_options.rate_limiting_disable_ip {
RateLimiter::disabled(PeerIpKeyExtractor)
} else {
RateLimiter::enabled(
PeerIpKeyExtractor,
rate_limiter_options.rate_limiting_ip_pool,
rate_limiter_options.rate_limiting_ip_cooldown_ns,
)
};
let api_key = if rate_limiter_options.rate_limiting_disable_api_key {
RateLimiter::disabled(ApiKeyExtractor)
} else {
RateLimiter::enabled(
ApiKeyExtractor,
rate_limiter_options.rate_limiting_api_key_pool,
rate_limiter_options.rate_limiting_api_key_cooldown_ns,
)
};
RateLimiters { global, ip, api_key }
}
#[cfg(feature = "mini-dashboard")]
pub fn dashboard(config: &mut web::ServiceConfig, enable_frontend: bool) {
use actix_web::HttpResponse;

View File

@@ -16,7 +16,11 @@ static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
/// does all the setup before meilisearch is launched
fn setup(opt: &Opt) -> anyhow::Result<()> {
let mut log_builder = env_logger::Builder::new();
log_builder.parse_filters(&opt.log_level.to_string());
log_builder.parse_filters(&opt.log_level);
if opt.log_level == "info" {
// if we are in info we only allow the warn log_level for milli
log_builder.filter_module("milli", log::LevelFilter::Warn);
}
log_builder.init();
@@ -29,12 +33,17 @@ async fn main() -> anyhow::Result<()> {
setup(&opt)?;
if opt.generate_master_key {
println!("{}", generate_master_key());
return Ok(());
}
match (opt.env.as_ref(), &opt.master_key) {
("production", Some(master_key)) if master_key.len() < MASTER_KEY_MIN_SIZE => {
anyhow::bail!(
"In production mode, the master key must be of at least {MASTER_KEY_MIN_SIZE} bytes, but the provided key is only {} bytes long
"In production mode, the master key must be of at least {MASTER_KEY_MIN_SIZE} characters, but the provided key is only {} characters long
We generated a secure master key for you (you can safely copy this token):
We generated a secure Master Key for you (you can safely copy this token):
>> export MEILI_MASTER_KEY={} <<",
master_key.len(),
@@ -45,7 +54,7 @@ We generated a secure master key for you (you can safely copy this token):
anyhow::bail!(
"In production mode, you must provide a master key to secure your instance. It can be specified via the MEILI_MASTER_KEY environment variable or the --master-key launch option.
We generated a secure master key for you (you can safely copy this token):
We generated a secure Master Key for you (you can safely copy this token):
>> export MEILI_MASTER_KEY={} <<
",
@@ -164,21 +173,24 @@ Anonymous telemetry:\t\"Enabled\""
match (opt.env.as_ref(), &opt.master_key) {
("production", Some(_)) => {
eprintln!("A master key has been set. Requests to Meilisearch won't be authorized unless you provide an authentication key.");
eprintln!("A Master Key has been set. Requests to Meilisearch won't be authorized unless you provide an authentication key.");
}
("development", Some(master_key)) => {
eprintln!("A master key has been set. Requests to Meilisearch won't be authorized unless you provide an authentication key.");
eprintln!("A Master Key has been set. Requests to Meilisearch won't be authorized unless you provide an authentication key.");
if master_key.len() < MASTER_KEY_MIN_SIZE {
eprintln!();
log::warn!("The provided master key is too short (< {MASTER_KEY_MIN_SIZE} bytes)");
eprintln!("A master key of at least {MASTER_KEY_MIN_SIZE} bytes will be required when switching to the production environment.");
log::warn!(
"The provided Master Key is too short (< {MASTER_KEY_MIN_SIZE} characters)"
);
eprintln!("A Master Key of at least {MASTER_KEY_MIN_SIZE} characters will be required when switching to the production environment.");
eprintln!("Restart Meilisearch with the `--generate-master-key` flag to generate a secure Master Key you can use");
}
}
("development", None) => {
log::warn!("No master key found; The server will accept unidentified requests");
eprintln!("If you need some protection in development mode, please export a key:\n\nexport MEILI_MASTER_KEY={}", generate_master_key());
eprintln!("\nA master key of at least {MASTER_KEY_MIN_SIZE} bytes will be required when switching to the production environment.");
eprintln!("\nA Master Key of at least {MASTER_KEY_MIN_SIZE} characters will be required when switching to the production environment.");
}
// unreachable because Opt::try_build above would have failed already if any other value had been produced
_ => unreachable!(),

View File

@@ -1,7 +1,6 @@
use std::convert::TryFrom;
use std::env::VarError;
use std::ffi::OsStr;
use std::fmt::Display;
use std::io::{BufReader, Read};
use std::num::ParseIntError;
use std::ops::Deref;
@@ -29,6 +28,8 @@ const MEILI_MASTER_KEY: &str = "MEILI_MASTER_KEY";
const MEILI_ENV: &str = "MEILI_ENV";
#[cfg(all(not(debug_assertions), feature = "analytics"))]
const MEILI_NO_ANALYTICS: &str = "MEILI_NO_ANALYTICS";
const MEILI_MAX_INDEX_SIZE: &str = "MEILI_MAX_INDEX_SIZE";
const MEILI_MAX_TASK_DB_SIZE: &str = "MEILI_MAX_TASK_DB_SIZE";
const MEILI_HTTP_PAYLOAD_SIZE_LIMIT: &str = "MEILI_HTTP_PAYLOAD_SIZE_LIMIT";
const MEILI_SSL_CERT_PATH: &str = "MEILI_SSL_CERT_PATH";
const MEILI_SSL_KEY_PATH: &str = "MEILI_SSL_KEY_PATH";
@@ -42,11 +43,29 @@ const MEILI_IGNORE_MISSING_SNAPSHOT: &str = "MEILI_IGNORE_MISSING_SNAPSHOT";
const MEILI_IGNORE_SNAPSHOT_IF_DB_EXISTS: &str = "MEILI_IGNORE_SNAPSHOT_IF_DB_EXISTS";
const MEILI_SNAPSHOT_DIR: &str = "MEILI_SNAPSHOT_DIR";
const MEILI_SCHEDULE_SNAPSHOT: &str = "MEILI_SCHEDULE_SNAPSHOT";
const MEILI_SNAPSHOT_INTERVAL_SEC: &str = "MEILI_SNAPSHOT_INTERVAL_SEC";
const MEILI_IMPORT_DUMP: &str = "MEILI_IMPORT_DUMP";
const MEILI_IGNORE_MISSING_DUMP: &str = "MEILI_IGNORE_MISSING_DUMP";
const MEILI_IGNORE_DUMP_IF_DB_EXISTS: &str = "MEILI_IGNORE_DUMP_IF_DB_EXISTS";
const MEILI_DUMP_DIR: &str = "MEILI_DUMP_DIR";
const MEILI_LOG_LEVEL: &str = "MEILI_LOG_LEVEL";
const MEILI_GENERATE_MASTER_KEY: &str = "MEILI_GENERATE_MASTER_KEY";
// rate limiting
const MEILI_RATE_LIMITING_DISABLE_ALL: &str = "MEILI_RATE_LIMITING_DISABLE_ALL";
const MEILI_RATE_LIMITING_DISABLE_GLOBAL: &str = "MEILI_RATE_LIMITING_DISABLE_GLOBAL";
const MEILI_RATE_LIMITING_DISABLE_IP: &str = "MEILI_RATE_LIMITING_DISABLE_IP";
const MEILI_RATE_LIMITING_DISABLE_API_KEY: &str = "MEILI_RATE_LIMITING_DISABLE_API_KEY";
const MEILI_RATE_LIMITING_GLOBAL_POOL: &str = "MEILI_RATE_LIMITING_GLOBAL_POOL";
const MEILI_RATE_LIMITING_IP_POOL: &str = "MEILI_RATE_LIMITING_IP_POOL";
const MEILI_RATE_LIMITING_API_KEY_POOL: &str = "MEILI_RATE_LIMITING_API_KEY_POOL";
const MEILI_RATE_LIMITING_GLOBAL_COOLDOWN_NS: &str = "MEILI_RATE_LIMITING_GLOBAL_COOLDOWN_NS";
const MEILI_RATE_LIMITING_IP_COOLDOWN_NS: &str = "MEILI_RATE_LIMITING_IP_COOLDOWN_NS";
const MEILI_RATE_LIMITING_API_KEY_COOLDOWN_NS: &str = "MEILI_RATE_LIMITING_API_KEY_COOLDOWN_NS";
#[cfg(feature = "metrics")]
const MEILI_ENABLE_METRICS_ROUTE: &str = "MEILI_ENABLE_METRICS_ROUTE";
@@ -54,80 +73,27 @@ const DEFAULT_CONFIG_FILE_PATH: &str = "./config.toml";
const DEFAULT_DB_PATH: &str = "./data.ms";
const DEFAULT_HTTP_ADDR: &str = "localhost:7700";
const DEFAULT_ENV: &str = "development";
const DEFAULT_MAX_INDEX_SIZE: &str = "100 GiB";
const DEFAULT_MAX_TASK_DB_SIZE: &str = "100 GiB";
const DEFAULT_HTTP_PAYLOAD_SIZE_LIMIT: &str = "100 MB";
const DEFAULT_SNAPSHOT_DIR: &str = "snapshots/";
const DEFAULT_SNAPSHOT_INTERVAL_SEC: u64 = 86400;
const DEFAULT_SNAPSHOT_INTERVAL_SEC_STR: &str = "86400";
const DEFAULT_DUMP_DIR: &str = "dumps/";
const DEFAULT_LOG_LEVEL: &str = "INFO";
const MEILI_MAX_INDEXING_MEMORY: &str = "MEILI_MAX_INDEXING_MEMORY";
const MEILI_MAX_INDEXING_THREADS: &str = "MEILI_MAX_INDEXING_THREADS";
const DEFAULT_LOG_EVERY_N: usize = 100_000;
const DISABLE_AUTO_BATCHING: &str = "DISABLE_AUTO_BATCHING";
const DEFAULT_LOG_EVERY_N: usize = 100000;
// Each environment (index and task-db) is taking space in the virtual address space.
//
// The size of the virtual address space is limited by the OS. About 100TB for Linux and about 10TB for Windows.
// This means that the number of indexes is limited to about 200 for Linux and about 20 for Windows.
pub const INDEX_SIZE: u64 = 536_870_912_000; // 500 GiB
pub const TASK_DB_SIZE: u64 = 10_737_418_240; // 10 GiB
const DEFAULT_GLOBAL_RATE_LIMITING_POOL: u32 = 100_000;
const DEFAULT_GLOBAL_RATE_LIMITING_COOLDOWN_NS: u64 = 50_000; // pool replenishes in 5s
#[derive(Debug, Default, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "UPPERCASE")]
pub enum LogLevel {
Off,
Error,
Warn,
#[default]
Info,
Debug,
Trace,
}
const DEFAULT_IP_RATE_LIMITING_POOL: u32 = 200;
const DEFAULT_IP_RATE_LIMITING_COOLDOWN_NS: u64 = 50_000_000; // pool replenishes in 10s
#[derive(Debug)]
pub struct LogLevelError {
pub given_log_level: String,
}
impl Display for LogLevelError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(
f,
"Log level '{}' is invalid. Accepted values are 'OFF', 'ERROR', 'WARN', 'INFO', 'DEBUG', and 'TRACE'.",
self.given_log_level
)
}
}
impl Display for LogLevel {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
LogLevel::Off => Display::fmt("OFF", f),
LogLevel::Error => Display::fmt("ERROR", f),
LogLevel::Warn => Display::fmt("WARN", f),
LogLevel::Info => Display::fmt("INFO", f),
LogLevel::Debug => Display::fmt("DEBUG", f),
LogLevel::Trace => Display::fmt("TRACE", f),
}
}
}
impl std::error::Error for LogLevelError {}
impl FromStr for LogLevel {
type Err = LogLevelError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.trim().to_lowercase().as_str() {
"off" => Ok(LogLevel::Off),
"error" => Ok(LogLevel::Error),
"warn" => Ok(LogLevel::Warn),
"info" => Ok(LogLevel::Info),
"debug" => Ok(LogLevel::Debug),
"trace" => Ok(LogLevel::Trace),
_ => Err(LogLevelError { given_log_level: s.to_owned() }),
}
}
}
const DEFAULT_API_KEY_RATE_LIMITING_POOL: u32 = 10_000;
const DEFAULT_API_KEY_RATE_LIMITING_COOLDOWN_NS: u64 = 500_000; // pool replenishes in 10s
#[derive(Debug, Clone, Parser, Deserialize)]
#[clap(version, next_display_order = None)]
@@ -163,14 +129,14 @@ pub struct Opt {
pub no_analytics: bool,
/// Sets the maximum size of the index. Value must be given in bytes or explicitly stating a base unit (for instance: 107374182400, '107.7Gb', or '107374 Mb').
#[clap(skip = default_max_index_size())]
#[serde(skip, default = "default_max_index_size")]
#[clap(long, env = MEILI_MAX_INDEX_SIZE, default_value_t = default_max_index_size())]
#[serde(default = "default_max_index_size")]
pub max_index_size: Byte,
/// Sets the maximum size of the task database. Value must be given in bytes or explicitly stating a
/// base unit (for instance: 107374182400, '107.7Gb', or '107374 Mb').
#[clap(skip = default_max_task_db_size())]
#[serde(skip, default = "default_max_task_db_size")]
#[clap(long, env = MEILI_MAX_TASK_DB_SIZE, default_value_t = default_max_task_db_size())]
#[serde(default = "default_max_task_db_size")]
pub max_task_db_size: Byte,
/// Sets the maximum size of accepted payloads. Value must be given in bytes or explicitly stating a
@@ -247,11 +213,14 @@ pub struct Opt {
pub snapshot_dir: PathBuf,
/// Activates scheduled snapshots when provided. Snapshots are disabled by default.
///
/// When provided with a value, defines the interval between each snapshot, in seconds.
#[clap(long,env = MEILI_SCHEDULE_SNAPSHOT, num_args(0..=1), value_parser=parse_schedule_snapshot, default_value_t, default_missing_value=default_snapshot_interval_sec(), value_name = "SNAPSHOT_INTERVAL_SEC")]
#[serde(default, deserialize_with = "schedule_snapshot_deserialize")]
pub schedule_snapshot: ScheduleSnapshot,
#[clap(long, env = MEILI_SCHEDULE_SNAPSHOT)]
#[serde(default)]
pub schedule_snapshot: bool,
/// Defines the interval between each snapshot. Value must be given in seconds.
#[clap(long, env = MEILI_SNAPSHOT_INTERVAL_SEC, default_value_t = default_snapshot_interval_sec())]
#[serde(default = "default_snapshot_interval_sec")]
pub snapshot_interval_sec: u64,
/// Imports the dump file located at the specified path. Path must point to a `.dump` file.
/// If a database already exists, Meilisearch will throw an error and abort launch.
@@ -282,10 +251,17 @@ pub struct Opt {
/// Defines how much detail should be present in Meilisearch's logs.
///
/// Meilisearch currently supports six log levels, listed in order of increasing verbosity: OFF, ERROR, WARN, INFO, DEBUG, TRACE.
#[clap(long, env = MEILI_LOG_LEVEL, default_value_t)]
/// Meilisearch currently supports five log levels, listed in order of increasing verbosity: ERROR, WARN, INFO, DEBUG, TRACE.
#[clap(long, env = MEILI_LOG_LEVEL, default_value_t = default_log_level())]
#[serde(default = "default_log_level")]
pub log_level: String,
/// Generates a string of characters that can be used as a master key and exits.
///
/// Pass the generated master key using the `--master-key` argument or the `MEILI_MASTER_KEY` environment variable in a subsequent Meilisearch invocation.
#[clap(long, env = MEILI_GENERATE_MASTER_KEY)]
#[serde(default)]
pub log_level: LogLevel,
pub generate_master_key: bool,
/// Enables Prometheus metrics and /metrics route.
#[cfg(feature = "metrics")]
@@ -297,6 +273,14 @@ pub struct Opt {
#[clap(flatten)]
pub indexer_options: IndexerOpts,
#[serde(flatten)]
#[clap(flatten)]
pub scheduler_options: SchedulerConfig,
#[serde(flatten)]
#[clap(flatten)]
pub rate_limiter_options: RateLimiterConfig,
/// Set the path to a configuration file that should be used to setup the engine.
/// Format must be TOML.
#[clap(long)]
@@ -360,8 +344,8 @@ impl Opt {
http_addr,
master_key,
env,
max_index_size: _,
max_task_db_size: _,
max_index_size,
max_task_db_size,
http_payload_size_limit,
ssl_cert_path,
ssl_key_path,
@@ -372,16 +356,20 @@ impl Opt {
ssl_tickets,
snapshot_dir,
schedule_snapshot,
snapshot_interval_sec,
dump_dir,
log_level,
indexer_options,
scheduler_options,
import_snapshot: _,
ignore_missing_snapshot: _,
ignore_snapshot_if_db_exists: _,
import_dump: _,
generate_master_key: _,
ignore_missing_dump: _,
ignore_dump_if_db_exists: _,
config_file_path: _,
rate_limiter_options,
#[cfg(all(not(debug_assertions), feature = "analytics"))]
no_analytics,
#[cfg(feature = "metrics")]
@@ -397,6 +385,8 @@ impl Opt {
{
export_to_env_if_not_present(MEILI_NO_ANALYTICS, no_analytics.to_string());
}
export_to_env_if_not_present(MEILI_MAX_INDEX_SIZE, max_index_size.to_string());
export_to_env_if_not_present(MEILI_MAX_TASK_DB_SIZE, max_task_db_size.to_string());
export_to_env_if_not_present(
MEILI_HTTP_PAYLOAD_SIZE_LIMIT,
http_payload_size_limit.to_string(),
@@ -417,12 +407,13 @@ impl Opt {
export_to_env_if_not_present(MEILI_SSL_RESUMPTION, ssl_resumption.to_string());
export_to_env_if_not_present(MEILI_SSL_TICKETS, ssl_tickets.to_string());
export_to_env_if_not_present(MEILI_SNAPSHOT_DIR, snapshot_dir);
if let Some(snapshot_interval) = schedule_snapshot_to_env(schedule_snapshot) {
export_to_env_if_not_present(MEILI_SCHEDULE_SNAPSHOT, snapshot_interval)
}
export_to_env_if_not_present(MEILI_SCHEDULE_SNAPSHOT, schedule_snapshot.to_string());
export_to_env_if_not_present(
MEILI_SNAPSHOT_INTERVAL_SEC,
snapshot_interval_sec.to_string(),
);
export_to_env_if_not_present(MEILI_DUMP_DIR, dump_dir);
export_to_env_if_not_present(MEILI_LOG_LEVEL, log_level.to_string());
export_to_env_if_not_present(MEILI_LOG_LEVEL, log_level);
#[cfg(feature = "metrics")]
{
export_to_env_if_not_present(
@@ -431,6 +422,8 @@ impl Opt {
);
}
indexer_options.export_to_env();
scheduler_options.export_to_env();
rate_limiter_options.export_to_env();
}
pub fn get_ssl_config(&self) -> anyhow::Result<Option<rustls::ServerConfig>> {
@@ -480,8 +473,18 @@ impl Opt {
}
}
#[derive(Debug, Default, Clone, Parser, Deserialize)]
#[derive(Debug, Clone, Parser, Deserialize)]
pub struct IndexerOpts {
/// Sets the amount of documents to skip before printing
/// a log regarding the indexing advancement.
#[serde(default = "default_log_every_n")]
#[clap(long, default_value_t = default_log_every_n(), hide = true)] // 100k
pub log_every_n: usize,
/// Grenad max number of chunks in bytes.
#[clap(long, hide = true)]
pub max_nb_chunks: Option<usize>,
/// Sets the maximum amount of RAM Meilisearch can use when indexing. By default, Meilisearch
/// uses no more than two thirds of available memory.
#[clap(long, env = MEILI_MAX_INDEXING_MEMORY, default_value_t)]
@@ -499,7 +502,12 @@ pub struct IndexerOpts {
impl IndexerOpts {
/// Exports the values to their corresponding env vars if they are not set.
pub fn export_to_env(self) {
let IndexerOpts { max_indexing_memory, max_indexing_threads } = self;
let IndexerOpts {
max_indexing_memory,
max_indexing_threads,
log_every_n: _,
max_nb_chunks: _,
} = self;
if let Some(max_indexing_memory) = max_indexing_memory.0 {
export_to_env_if_not_present(
MEILI_MAX_INDEXING_MEMORY,
@@ -513,6 +521,22 @@ impl IndexerOpts {
}
}
#[derive(Debug, Clone, Parser, Default, Deserialize)]
#[serde(rename_all = "snake_case", deny_unknown_fields)]
pub struct SchedulerConfig {
/// Deactivates auto-batching when provided.
#[clap(long, env = DISABLE_AUTO_BATCHING)]
#[serde(default)]
pub disable_auto_batching: bool,
}
impl SchedulerConfig {
pub fn export_to_env(self) {
let SchedulerConfig { disable_auto_batching } = self;
export_to_env_if_not_present(DISABLE_AUTO_BATCHING, disable_auto_batching.to_string());
}
}
impl TryFrom<&IndexerOpts> for IndexerConfig {
type Error = anyhow::Error;
@@ -523,7 +547,8 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
.build()?;
Ok(Self {
log_every_n: Some(DEFAULT_LOG_EVERY_N),
log_every_n: Some(other.log_every_n),
max_nb_chunks: other.max_nb_chunks,
max_memory: other.max_indexing_memory.map(|b| b.get_bytes() as usize),
thread_pool: Some(thread_pool),
max_positions_per_attributes: None,
@@ -532,6 +557,153 @@ impl TryFrom<&IndexerOpts> for IndexerConfig {
}
}
impl Default for IndexerOpts {
fn default() -> Self {
Self {
log_every_n: 100_000,
max_nb_chunks: None,
max_indexing_memory: MaxMemory::default(),
max_indexing_threads: MaxThreads::default(),
}
}
}
/// Options related to the configuration of the rate limiters.
#[derive(Debug, Clone, Parser, Default, Deserialize)]
#[serde(rename_all = "snake_case", deny_unknown_fields)]
pub struct RateLimiterConfig {
/// When provided, completely disables all rate limiting.
#[clap(long, env = MEILI_RATE_LIMITING_DISABLE_ALL)]
#[serde(default)]
pub rate_limiting_disable_all: bool,
/// When provided, disables the global rate limiting that applies to all search requests.
///
/// Disabling the global rate limiting does not disable IP-based and API-key-based rate limitings.
/// To disable all rate limiting regardless of the origin use `--rate-limiting-disable-all`.
#[clap(long, env = MEILI_RATE_LIMITING_DISABLE_GLOBAL)]
#[serde(default)]
pub rate_limiting_disable_global: bool,
/// The maximum pool of search requests that can be performed before they are rejected.
///
/// The pool starts full at the provided value, then each search request diminishes the pool by 1.
/// When the pool is empty the search request is rejected.
/// The pool is replenished by 1 depending on the cooldown period.
#[clap(long, env = MEILI_RATE_LIMITING_GLOBAL_POOL, default_value_t = default_rate_limiting_global_pool())]
#[serde(default = "default_rate_limiting_global_pool")]
pub rate_limiting_global_pool: u32,
/// The amount of time, in nanoseconds, before the pool of available search requests is replenished by 1 again.
///
/// The maximum number of available search requests is given by `--rate-limiting-global-pool`.
#[clap(long, env = MEILI_RATE_LIMITING_GLOBAL_COOLDOWN_NS, default_value_t = default_rate_limiting_global_cooldown_ns())]
#[serde(default = "default_rate_limiting_global_cooldown_ns")]
pub rate_limiting_global_cooldown_ns: u64,
/// When provided, disables the rate limiting that applies to all search requests originating with a specific IP address.
///
/// Disabling the IP rate limiting does not disable the rate limiting that applies to all requests ("global") nor the API-key-based rate limiting.
/// To disable all rate limiting regardless of the origin use `--rate-limiting-disable-all`.
#[clap(long, env = MEILI_RATE_LIMITING_DISABLE_IP)]
#[serde(default)]
pub rate_limiting_disable_ip: bool,
/// The maximum pool of search requests that can be performed from a specific IP before they are rejected.
///
/// The pool starts full at the provided value, then each search request from the same IP address diminishes the pool by 1.
/// When the pool is empty the search request is rejected.
/// The pool is replenished by 1 depending on the cooldown period.
#[clap(long, env = MEILI_RATE_LIMITING_IP_POOL, default_value_t = default_rate_limiting_ip_pool())]
#[serde(default = "default_rate_limiting_ip_pool")]
pub rate_limiting_ip_pool: u32,
/// The amount of time, in nanoseconds, before the pool of available search requests for a specific IP address is replenished by 1 again.
///
/// The maximum number of available search requests for a specific IP address is given by `--rate-limiting-ip-pool`.
#[clap(long, env = MEILI_RATE_LIMITING_IP_COOLDOWN_NS, default_value_t = default_rate_limiting_ip_cooldown_ns())]
#[serde(default = "default_rate_limiting_ip_cooldown_ns")]
pub rate_limiting_ip_cooldown_ns: u64,
/// When provided, disables the rate limiting that applies to all search requests originating with a specific API key.
///
/// Disabling the API key limiting does not disable the rate limiting that applies to all requests ("global") nor the IP-based rate limiting.
/// To disable all rate limiting regardless of the origin use `--rate-limiting-disable-all`.
#[clap(long, env = MEILI_RATE_LIMITING_DISABLE_API_KEY)]
#[serde(default)]
pub rate_limiting_disable_api_key: bool,
/// The maximum pool of search requests that can be performed using a specific API key before they are rejected.
///
/// The pool starts full at the provided value, then each search request using the same API key diminishes the pool by 1.
/// When the pool is empty the search request is rejected.
/// The pool is replenished by 1 depending on the cooldown period.
#[clap(long, env = MEILI_RATE_LIMITING_API_KEY_POOL, default_value_t = default_rate_limiting_api_key_pool())]
#[serde(default = "default_rate_limiting_api_key_pool")]
pub rate_limiting_api_key_pool: u32,
/// The amount of time, in nanoseconds, before the pool of available search requests using a specific API key is replenished by 1 again.
///
/// The maximum number of available search requests using a specific API key is given by `--rate-limiting-api-key-pool`.
#[clap(long, env = MEILI_RATE_LIMITING_API_KEY_COOLDOWN_NS, default_value_t = default_rate_limiting_api_key_cooldown_ns())]
#[serde(default = "default_rate_limiting_api_key_cooldown_ns")]
pub rate_limiting_api_key_cooldown_ns: u64,
}
impl RateLimiterConfig {
/// Exports the values to their corresponding env vars if they are not set.
pub fn export_to_env(self) {
let RateLimiterConfig {
rate_limiting_disable_all: disable_rate_limiting,
rate_limiting_disable_global: disable_global_rate_limiting,
rate_limiting_global_pool: global_rate_limiting_pool,
rate_limiting_global_cooldown_ns: global_rate_limiting_cooldown_ns,
rate_limiting_disable_ip: disable_ip_rate_limiting,
rate_limiting_ip_pool: ip_rate_limiting_pool,
rate_limiting_ip_cooldown_ns: ip_rate_limiting_cooldown_ns,
rate_limiting_disable_api_key: disable_api_key_rate_limiting,
rate_limiting_api_key_pool: api_key_rate_limiting_pool,
rate_limiting_api_key_cooldown_ns: api_key_rate_limiting_cooldown_ns,
} = self;
export_to_env_if_not_present(
MEILI_RATE_LIMITING_DISABLE_ALL,
disable_rate_limiting.to_string(),
);
export_to_env_if_not_present(
MEILI_RATE_LIMITING_DISABLE_GLOBAL,
disable_global_rate_limiting.to_string(),
);
export_to_env_if_not_present(
MEILI_RATE_LIMITING_DISABLE_IP,
disable_ip_rate_limiting.to_string(),
);
export_to_env_if_not_present(
MEILI_RATE_LIMITING_DISABLE_API_KEY,
disable_api_key_rate_limiting.to_string(),
);
export_to_env_if_not_present(
MEILI_RATE_LIMITING_GLOBAL_POOL,
global_rate_limiting_pool.to_string(),
);
export_to_env_if_not_present(
MEILI_RATE_LIMITING_IP_POOL,
ip_rate_limiting_pool.to_string(),
);
export_to_env_if_not_present(
MEILI_RATE_LIMITING_API_KEY_POOL,
api_key_rate_limiting_pool.to_string(),
);
export_to_env_if_not_present(
MEILI_RATE_LIMITING_GLOBAL_COOLDOWN_NS,
global_rate_limiting_cooldown_ns.to_string(),
);
export_to_env_if_not_present(
MEILI_RATE_LIMITING_IP_COOLDOWN_NS,
ip_rate_limiting_cooldown_ns.to_string(),
);
export_to_env_if_not_present(
MEILI_RATE_LIMITING_API_KEY_COOLDOWN_NS,
api_key_rate_limiting_cooldown_ns.to_string(),
);
}
}
/// A type used to detect the max memory available and use 2/3 of it.
#[derive(Debug, Clone, Copy, Deserialize, Serialize)]
pub struct MaxMemory(Option<Byte>);
@@ -693,11 +865,11 @@ fn default_env() -> String {
}
fn default_max_index_size() -> Byte {
Byte::from_bytes(INDEX_SIZE)
Byte::from_str(DEFAULT_MAX_INDEX_SIZE).unwrap()
}
fn default_max_task_db_size() -> Byte {
Byte::from_bytes(TASK_DB_SIZE)
Byte::from_str(DEFAULT_MAX_TASK_DB_SIZE).unwrap()
}
fn default_http_payload_size_limit() -> Byte {
@@ -708,108 +880,44 @@ fn default_snapshot_dir() -> PathBuf {
PathBuf::from(DEFAULT_SNAPSHOT_DIR)
}
fn default_snapshot_interval_sec() -> &'static str {
DEFAULT_SNAPSHOT_INTERVAL_SEC_STR
fn default_snapshot_interval_sec() -> u64 {
DEFAULT_SNAPSHOT_INTERVAL_SEC
}
fn default_dump_dir() -> PathBuf {
PathBuf::from(DEFAULT_DUMP_DIR)
}
/// Indicates if a snapshot was scheduled, and if yes with which interval.
#[derive(Debug, Default, Copy, Clone, Deserialize, Serialize)]
pub enum ScheduleSnapshot {
/// Scheduled snapshots are disabled.
#[default]
Disabled,
/// Snapshots are scheduled at the specified interval, in seconds.
Enabled(u64),
fn default_log_level() -> String {
DEFAULT_LOG_LEVEL.to_string()
}
impl Display for ScheduleSnapshot {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ScheduleSnapshot::Disabled => write!(f, ""),
ScheduleSnapshot::Enabled(value) => write!(f, "{}", value),
}
}
fn default_log_every_n() -> usize {
DEFAULT_LOG_EVERY_N
}
impl FromStr for ScheduleSnapshot {
type Err = ParseIntError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(match s {
"" => ScheduleSnapshot::Disabled,
s => ScheduleSnapshot::Enabled(s.parse()?),
})
}
fn default_rate_limiting_global_pool() -> u32 {
DEFAULT_GLOBAL_RATE_LIMITING_POOL
}
fn parse_schedule_snapshot(s: &str) -> Result<ScheduleSnapshot, ParseIntError> {
Ok(if s.is_empty() { ScheduleSnapshot::Disabled } else { ScheduleSnapshot::from_str(s)? })
fn default_rate_limiting_ip_pool() -> u32 {
DEFAULT_IP_RATE_LIMITING_POOL
}
fn schedule_snapshot_to_env(schedule_snapshot: ScheduleSnapshot) -> Option<String> {
match schedule_snapshot {
ScheduleSnapshot::Enabled(snapshot_delay) => Some(snapshot_delay.to_string()),
_ => None,
}
fn default_rate_limiting_api_key_pool() -> u32 {
DEFAULT_API_KEY_RATE_LIMITING_POOL
}
fn schedule_snapshot_deserialize<'de, D>(deserializer: D) -> Result<ScheduleSnapshot, D::Error>
where
D: serde::Deserializer<'de>,
{
struct BoolOrInt;
fn default_rate_limiting_global_cooldown_ns() -> u64 {
DEFAULT_GLOBAL_RATE_LIMITING_COOLDOWN_NS
}
impl<'de> serde::de::Visitor<'de> for BoolOrInt {
type Value = ScheduleSnapshot;
fn default_rate_limiting_ip_cooldown_ns() -> u64 {
DEFAULT_IP_RATE_LIMITING_COOLDOWN_NS
}
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("integer or boolean")
}
fn visit_bool<E>(self, value: bool) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(if value {
ScheduleSnapshot::Enabled(DEFAULT_SNAPSHOT_INTERVAL_SEC)
} else {
ScheduleSnapshot::Disabled
})
}
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ScheduleSnapshot::Enabled(v as u64))
}
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ScheduleSnapshot::Enabled(v))
}
fn visit_none<E>(self) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ScheduleSnapshot::Disabled)
}
fn visit_unit<E>(self) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(ScheduleSnapshot::Disabled)
}
}
deserializer.deserialize_any(BoolOrInt)
fn default_rate_limiting_api_key_cooldown_ns() -> u64 {
DEFAULT_API_KEY_RATE_LIMITING_COOLDOWN_NS
}
#[cfg(test)]

View File

@@ -1,23 +1,17 @@
use std::str;
use actix_web::{web, HttpRequest, HttpResponse};
use deserr::DeserializeFromValue;
use meilisearch_auth::error::AuthControllerError;
use meilisearch_auth::AuthController;
use meilisearch_types::deserr::query_params::Param;
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey};
use meilisearch_types::keys::{Action, Key};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use time::OffsetDateTime;
use uuid::Uuid;
use super::PAGINATION_DEFAULT_LIMIT;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::json::ValidatedJson;
use crate::extractors::query_parameters::QueryParameter;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::Pagination;
@@ -37,7 +31,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
pub async fn create_api_key(
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_CREATE }>, AuthController>,
body: ValidatedJson<CreateApiKey, DeserrJsonError>,
body: web::Json<Value>,
_req: HttpRequest,
) -> Result<HttpResponse, ResponseError> {
let v = body.into_inner();
@@ -51,25 +45,10 @@ pub async fn create_api_key(
Ok(HttpResponse::Created().json(res))
}
#[derive(DeserializeFromValue, Debug, Clone, Copy)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
pub struct ListApiKeys {
#[deserr(default, error = DeserrQueryParamError<InvalidApiKeyOffset>)]
pub offset: Param<usize>,
#[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidApiKeyLimit>)]
pub limit: Param<usize>,
}
impl ListApiKeys {
fn as_pagination(self) -> Pagination {
Pagination { offset: self.offset.0, limit: self.limit.0 }
}
}
pub async fn list_api_keys(
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
list_api_keys: QueryParameter<ListApiKeys, DeserrQueryParamError>,
paginate: web::Query<Pagination>,
) -> Result<HttpResponse, ResponseError> {
let paginate = list_api_keys.into_inner().as_pagination();
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
let keys = auth_controller.list_keys()?;
let page_view = paginate
@@ -104,15 +83,15 @@ pub async fn get_api_key(
pub async fn patch_api_key(
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_UPDATE }>, AuthController>,
body: ValidatedJson<PatchApiKey, DeserrJsonError>,
body: web::Json<Value>,
path: web::Path<AuthParam>,
) -> Result<HttpResponse, ResponseError> {
let key = path.into_inner().key;
let patch_api_key = body.into_inner();
let body = body.into_inner();
let res = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
let uid =
Uuid::parse_str(&key).or_else(|_| auth_controller.get_uid_from_encoded_key(&key))?;
let key = auth_controller.update_key(uid, patch_api_key)?;
let key = auth_controller.update_key(uid, body)?;
Ok(KeyView::from_key(key, &auth_controller))
})
@@ -170,7 +149,7 @@ impl KeyView {
key: generated_key,
uid: key.uid,
actions: key.actions,
indexes: key.indexes.into_iter().map(|x| x.to_string()).collect(),
indexes: key.indexes.into_iter().map(String::from).collect(),
expires_at: key.expires_at,
created_at: key.created_at,
updated_at: key.updated_at,

View File

@@ -4,24 +4,21 @@ use actix_web::http::header::CONTENT_TYPE;
use actix_web::web::Data;
use actix_web::{web, HttpMessage, HttpRequest, HttpResponse};
use bstr::ByteSlice;
use deserr::DeserializeFromValue;
use futures::StreamExt;
use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_types::deserr::query_params::Param;
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
use meilisearch_types::document_formats::{read_csv, read_json, read_ndjson, PayloadType};
use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::ResponseError;
use meilisearch_types::heed::RoTxn;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::milli::update::IndexDocumentsMethod;
use meilisearch_types::star_or::OptionStarOrList;
use meilisearch_types::star_or::StarOr;
use meilisearch_types::tasks::KindWithContent;
use meilisearch_types::{milli, Document, Index};
use mime::Mime;
use once_cell::sync::Lazy;
use serde::Deserialize;
use serde_cs::vec::CS;
use serde_json::Value;
use tempfile::tempfile;
use tokio::fs::File;
@@ -33,9 +30,8 @@ use crate::error::PayloadError::ReceivePayload;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::payload::Payload;
use crate::extractors::query_parameters::QueryParameter;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::{PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
use crate::routes::{fold_star_or, PaginationView, SummarizedTaskView};
static ACCEPTED_CONTENT_TYPE: Lazy<Vec<String>> = Lazy::new(|| {
vec!["application/json".to_string(), "application/x-ndjson".to_string(), "text/csv".to_string()]
@@ -80,26 +76,22 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
);
}
#[derive(Debug, DeserializeFromValue)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct GetDocument {
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFields>)]
fields: OptionStarOrList<String>,
fields: Option<CS<StarOr<String>>>,
}
pub async fn get_document(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
document_param: web::Path<DocumentParam>,
params: QueryParameter<GetDocument, DeserrQueryParamError>,
path: web::Path<DocumentParam>,
params: web::Query<GetDocument>,
) -> Result<HttpResponse, ResponseError> {
let DocumentParam { index_uid, document_id } = document_param.into_inner();
let index_uid = IndexUid::try_from(index_uid)?;
let GetDocument { fields } = params.into_inner();
let attributes_to_retrieve = fields.merge_star_and_none();
let attributes_to_retrieve = fields.and_then(fold_star_or);
let index = index_scheduler.index(&index_uid)?;
let document = retrieve_document(&index, &document_id, attributes_to_retrieve)?;
let index = index_scheduler.index(&path.index_uid)?;
let document = retrieve_document(&index, &path.document_id, attributes_to_retrieve)?;
debug!("returns: {:?}", document);
Ok(HttpResponse::Ok().json(document))
}
@@ -110,68 +102,58 @@ pub async fn delete_document(
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
let DocumentParam { index_uid, document_id } = path.into_inner();
let index_uid = IndexUid::try_from(index_uid)?;
analytics.delete_documents(DocumentDeletionKind::PerDocumentId, &req);
let task = KindWithContent::DocumentDeletion {
index_uid: index_uid.to_string(),
documents_ids: vec![document_id],
};
let DocumentParam { document_id, index_uid } = path.into_inner();
let task = KindWithContent::DocumentDeletion { index_uid, documents_ids: vec![document_id] };
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
debug!("returns: {:?}", task);
Ok(HttpResponse::Accepted().json(task))
}
#[derive(Debug, DeserializeFromValue)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct BrowseQuery {
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentOffset>)]
offset: Param<usize>,
#[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidDocumentLimit>)]
limit: Param<usize>,
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFields>)]
fields: OptionStarOrList<String>,
#[serde(default)]
offset: usize,
#[serde(default = "crate::routes::PAGINATION_DEFAULT_LIMIT")]
limit: usize,
fields: Option<CS<StarOr<String>>>,
}
pub async fn get_all_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
params: QueryParameter<BrowseQuery, DeserrQueryParamError>,
params: web::Query<BrowseQuery>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
debug!("called with params: {:?}", params);
let BrowseQuery { limit, offset, fields } = params.into_inner();
let attributes_to_retrieve = fields.merge_star_and_none();
let attributes_to_retrieve = fields.and_then(fold_star_or);
let index = index_scheduler.index(&index_uid)?;
let (total, documents) = retrieve_documents(&index, offset.0, limit.0, attributes_to_retrieve)?;
let (total, documents) = retrieve_documents(&index, offset, limit, attributes_to_retrieve)?;
let ret = PaginationView::new(offset.0, limit.0, total as usize, documents);
let ret = PaginationView::new(offset, limit, total as usize, documents);
debug!("returns: {:?}", ret);
Ok(HttpResponse::Ok().json(ret))
}
#[derive(Deserialize, Debug, DeserializeFromValue)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct UpdateDocumentsQuery {
#[deserr(default, error = DeserrJsonError<InvalidIndexPrimaryKey>)]
pub primary_key: Option<String>,
}
pub async fn add_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
params: QueryParameter<UpdateDocumentsQuery, DeserrJsonError>,
params: web::Query<UpdateDocumentsQuery>,
body: Payload,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
debug!("called with params: {:?}", params);
let params = params.into_inner();
@@ -181,7 +163,7 @@ pub async fn add_documents(
let task = document_addition(
extract_mime_type(&req)?,
index_scheduler,
index_uid,
index_uid.into_inner(),
params.primary_key,
body,
IndexDocumentsMethod::ReplaceDocuments,
@@ -194,15 +176,14 @@ pub async fn add_documents(
pub async fn update_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
params: QueryParameter<UpdateDocumentsQuery, DeserrJsonError>,
path: web::Path<String>,
params: web::Query<UpdateDocumentsQuery>,
body: Payload,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
debug!("called with params: {:?}", params);
let index_uid = path.into_inner();
analytics.update_documents(&params, index_scheduler.index(&index_uid).is_err(), &req);
@@ -224,7 +205,7 @@ pub async fn update_documents(
async fn document_addition(
mime_type: Option<Mime>,
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
index_uid: IndexUid,
index_uid: String,
primary_key: Option<String>,
mut body: Payload,
method: IndexDocumentsMethod,
@@ -245,6 +226,9 @@ async fn document_addition(
}
};
// is your indexUid valid?
let index_uid = IndexUid::try_from(index_uid)?.into_inner();
let (uuid, mut update_file) = index_scheduler.create_update_file()?;
let temp_file = match tempfile() {
@@ -320,7 +304,7 @@ async fn document_addition(
documents_count,
primary_key,
allow_index_creation,
index_uid: index_uid.to_string(),
index_uid,
};
let scheduler = index_scheduler.clone();
@@ -338,13 +322,12 @@ async fn document_addition(
pub async fn delete_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
path: web::Path<String>,
body: web::Json<Vec<Value>>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", body);
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
analytics.delete_documents(DocumentDeletionKind::PerBatch, &req);
@@ -354,7 +337,7 @@ pub async fn delete_documents(
.collect();
let task =
KindWithContent::DocumentDeletion { index_uid: index_uid.to_string(), documents_ids: ids };
KindWithContent::DocumentDeletion { index_uid: path.into_inner(), documents_ids: ids };
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
@@ -364,14 +347,13 @@ pub async fn delete_documents(
pub async fn clear_all_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_DELETE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
path: web::Path<String>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
analytics.delete_documents(DocumentDeletionKind::ClearAll, &req);
let task = KindWithContent::DocumentClear { index_uid: index_uid.to_string() };
let task = KindWithContent::DocumentClear { index_uid: path.into_inner() };
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();

View File

@@ -1,34 +1,27 @@
use std::convert::Infallible;
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef};
use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_types::deserr::query_params::Param;
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::{unwrap_any, Code, ResponseError};
use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::milli::{self, FieldDistribution, Index};
use meilisearch_types::tasks::KindWithContent;
use serde::Serialize;
use serde::{Deserialize, Serialize};
use serde_json::json;
use time::OffsetDateTime;
use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
use super::{Pagination, SummarizedTaskView};
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::{AuthenticationError, GuardedData};
use crate::extractors::json::ValidatedJson;
use crate::extractors::query_parameters::QueryParameter;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::RateLimiters;
pub mod documents;
pub mod search;
pub mod settings;
pub fn configure(cfg: &mut web::ServiceConfig) {
pub fn configure(cfg: &mut web::ServiceConfig, rate_limiters: RateLimiters) {
cfg.service(
web::resource("")
.route(web::get().to(list_indexes))
@@ -44,12 +37,12 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
)
.service(web::resource("/stats").route(web::get().to(SeqHandler(get_index_stats))))
.service(web::scope("/documents").configure(documents::configure))
.service(web::scope("/search").configure(search::configure))
.service(web::scope("/search").configure(|cfg| search::configure(cfg, rate_limiters)))
.service(web::scope("/settings").configure(settings::configure)),
);
}
#[derive(Debug, Serialize, Clone)]
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct IndexView {
pub uid: String,
@@ -72,23 +65,9 @@ impl IndexView {
}
}
#[derive(DeserializeFromValue, Debug, Clone, Copy)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
pub struct ListIndexes {
#[deserr(default, error = DeserrQueryParamError<InvalidIndexOffset>)]
pub offset: Param<usize>,
#[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidIndexLimit>)]
pub limit: Param<usize>,
}
impl ListIndexes {
fn as_pagination(self) -> Pagination {
Pagination { offset: self.offset.0, limit: self.limit.0 }
}
}
pub async fn list_indexes(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
paginate: QueryParameter<ListIndexes, DeserrQueryParamError>,
paginate: web::Query<Pagination>,
) -> Result<HttpResponse, ResponseError> {
let search_rules = &index_scheduler.filters().search_rules;
let indexes: Vec<_> = index_scheduler.indexes()?;
@@ -98,28 +77,27 @@ pub async fn list_indexes(
.map(|(name, index)| IndexView::new(name, &index))
.collect::<Result<Vec<_>, _>>()?;
let ret = paginate.as_pagination().auto_paginate_sized(indexes.into_iter());
let ret = paginate.auto_paginate_sized(indexes.into_iter());
debug!("returns: {:?}", ret);
Ok(HttpResponse::Ok().json(ret))
}
#[derive(DeserializeFromValue, Debug)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct IndexCreateRequest {
#[deserr(error = DeserrJsonError<InvalidIndexUid>, missing_field_error = DeserrJsonError::missing_index_uid)]
uid: IndexUid,
#[deserr(default, error = DeserrJsonError<InvalidIndexPrimaryKey>)]
uid: String,
primary_key: Option<String>,
}
pub async fn create_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>,
body: ValidatedJson<IndexCreateRequest, DeserrJsonError>,
body: web::Json<IndexCreateRequest>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
let IndexCreateRequest { primary_key, uid } = body.into_inner();
let uid = IndexUid::try_from(uid)?.into_inner();
let allow_index_creation = index_scheduler.filters().search_rules.is_index_authorized(&uid);
if allow_index_creation {
@@ -129,7 +107,7 @@ pub async fn create_index(
Some(&req),
);
let task = KindWithContent::IndexCreation { index_uid: uid.to_string(), primary_key };
let task = KindWithContent::IndexCreation { index_uid: uid, primary_key };
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
@@ -139,29 +117,11 @@ pub async fn create_index(
}
}
fn deny_immutable_fields_index(
field: &str,
accepted: &[&str],
location: ValuePointerRef,
) -> DeserrJsonError {
let mut error = unwrap_any(DeserrJsonError::<BadRequest>::error::<Infallible>(
None,
deserr::ErrorKind::UnknownKey { key: field, accepted },
location,
));
error.code = match field {
"uid" => Code::ImmutableIndexUid,
"createdAt" => Code::ImmutableIndexCreatedAt,
"updatedAt" => Code::ImmutableIndexUpdatedAt,
_ => Code::BadRequest,
};
error
}
#[derive(DeserializeFromValue, Debug)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_index)]
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
#[allow(dead_code)]
pub struct UpdateIndexRequest {
#[deserr(default, error = DeserrJsonError<InvalidIndexPrimaryKey>)]
uid: Option<String>,
primary_key: Option<String>,
}
@@ -169,8 +129,6 @@ pub async fn get_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let index = index_scheduler.index(&index_uid)?;
let index_view = IndexView::new(index_uid.into_inner(), &index)?;
@@ -181,22 +139,21 @@ pub async fn get_index(
pub async fn update_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
body: ValidatedJson<UpdateIndexRequest, DeserrJsonError>,
path: web::Path<String>,
body: web::Json<UpdateIndexRequest>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", body);
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let body = body.into_inner();
analytics.publish(
"Index Updated".to_string(),
json!({ "primary_key": body.primary_key }),
json!({ "primary_key": body.primary_key}),
Some(&req),
);
let task = KindWithContent::IndexUpdate {
index_uid: index_uid.into_inner(),
index_uid: path.into_inner(),
primary_key: body.primary_key,
};
@@ -211,7 +168,6 @@ pub async fn delete_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_DELETE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let task = KindWithContent::IndexDeletion { index_uid: index_uid.into_inner() };
let task: SummarizedTaskView =
tokio::task::spawn_blocking(move || index_scheduler.register(task)).await??.into();
@@ -225,7 +181,6 @@ pub async fn get_index_stats(
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": true }), Some(&req));
let stats = IndexStats::new((*index_scheduler).clone(), index_uid.into_inner())?;

View File

@@ -3,70 +3,60 @@ use actix_web::{web, HttpRequest, HttpResponse};
use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_auth::IndexSearchRules;
use meilisearch_types::deserr::query_params::Param;
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::serde_cs::vec::CS;
use serde::Deserialize;
use serde_cs::vec::CS;
use serde_json::Value;
use crate::analytics::{Analytics, SearchAggregator};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::json::ValidatedJson;
use crate::extractors::query_parameters::QueryParameter;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::search::{
perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH, DEFAULT_CROP_MARKER,
DEFAULT_HIGHLIGHT_POST_TAG, DEFAULT_HIGHLIGHT_PRE_TAG, DEFAULT_SEARCH_LIMIT,
DEFAULT_SEARCH_OFFSET,
};
use crate::RateLimiters;
pub fn configure(cfg: &mut web::ServiceConfig) {
pub fn configure(cfg: &mut web::ServiceConfig, rate_limiters: RateLimiters) {
cfg.service(
web::resource("")
.wrap(rate_limiters.global.into_middleware())
.wrap(rate_limiters.ip.into_middleware())
.wrap(rate_limiters.api_key.into_middleware())
.route(web::get().to(SeqHandler(search_with_url_query)))
.route(web::post().to(SeqHandler(search_with_post))),
);
}
#[derive(Debug, deserr::DeserializeFromValue)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct SearchQueryGet {
#[deserr(default, error = DeserrQueryParamError<InvalidSearchQ>)]
q: Option<String>,
#[deserr(default = Param(DEFAULT_SEARCH_OFFSET()), error = DeserrQueryParamError<InvalidSearchOffset>)]
offset: Param<usize>,
#[deserr(default = Param(DEFAULT_SEARCH_LIMIT()), error = DeserrQueryParamError<InvalidSearchLimit>)]
limit: Param<usize>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchPage>)]
page: Option<Param<usize>>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchHitsPerPage>)]
hits_per_page: Option<Param<usize>>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToRetrieve>)]
#[serde(default = "DEFAULT_SEARCH_OFFSET")]
offset: usize,
#[serde(default = "DEFAULT_SEARCH_LIMIT")]
limit: usize,
page: Option<usize>,
hits_per_page: Option<usize>,
attributes_to_retrieve: Option<CS<String>>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToCrop>)]
attributes_to_crop: Option<CS<String>>,
#[deserr(default = Param(DEFAULT_CROP_LENGTH()), error = DeserrQueryParamError<InvalidSearchCropLength>)]
crop_length: Param<usize>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchAttributesToHighlight>)]
#[serde(default = "DEFAULT_CROP_LENGTH")]
crop_length: usize,
attributes_to_highlight: Option<CS<String>>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchFilter>)]
filter: Option<String>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchSort>)]
sort: Option<String>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchShowMatchesPosition>)]
show_matches_position: Param<bool>,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchFacets>)]
#[serde(default = "Default::default")]
show_matches_position: bool,
facets: Option<CS<String>>,
#[deserr( default = DEFAULT_HIGHLIGHT_PRE_TAG(), error = DeserrQueryParamError<InvalidSearchHighlightPreTag>)]
#[serde(default = "DEFAULT_HIGHLIGHT_PRE_TAG")]
highlight_pre_tag: String,
#[deserr( default = DEFAULT_HIGHLIGHT_POST_TAG(), error = DeserrQueryParamError<InvalidSearchHighlightPostTag>)]
#[serde(default = "DEFAULT_HIGHLIGHT_POST_TAG")]
highlight_post_tag: String,
#[deserr(default = DEFAULT_CROP_MARKER(), error = DeserrQueryParamError<InvalidSearchCropMarker>)]
#[serde(default = "DEFAULT_CROP_MARKER")]
crop_marker: String,
#[deserr(default, error = DeserrQueryParamError<InvalidSearchMatchingStrategy>)]
#[serde(default)]
matching_strategy: MatchingStrategy,
}
@@ -82,17 +72,17 @@ impl From<SearchQueryGet> for SearchQuery {
Self {
q: other.q,
offset: other.offset.0,
limit: other.limit.0,
page: other.page.as_deref().copied(),
hits_per_page: other.hits_per_page.as_deref().copied(),
offset: other.offset,
limit: other.limit,
page: other.page,
hits_per_page: other.hits_per_page,
attributes_to_retrieve: other.attributes_to_retrieve.map(|o| o.into_iter().collect()),
attributes_to_crop: other.attributes_to_crop.map(|o| o.into_iter().collect()),
crop_length: other.crop_length.0,
crop_length: other.crop_length,
attributes_to_highlight: other.attributes_to_highlight.map(|o| o.into_iter().collect()),
filter,
sort: other.sort.map(|attr| fix_sort_query_parameters(&attr)),
show_matches_position: other.show_matches_position.0,
show_matches_position: other.show_matches_position,
facets: other.facets.map(|o| o.into_iter().collect()),
highlight_pre_tag: other.highlight_pre_tag,
highlight_post_tag: other.highlight_post_tag,
@@ -150,13 +140,11 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
pub async fn search_with_url_query(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
params: QueryParameter<SearchQueryGet, DeserrQueryParamError>,
params: web::Query<SearchQueryGet>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
debug!("called with params: {:?}", params);
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let mut query: SearchQuery = params.into_inner().into();
// Tenant token search_rules.
@@ -184,12 +172,10 @@ pub async fn search_with_url_query(
pub async fn search_with_post(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
params: ValidatedJson<SearchQuery, DeserrJsonError>,
params: web::Json<SearchQuery>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let mut query = params.into_inner();
debug!("search called with params: {:?}", query);

View File

@@ -2,22 +2,20 @@ use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::settings::{settings, RankingRuleView, Settings, Unchecked};
use meilisearch_types::settings::{settings, Settings, Unchecked};
use meilisearch_types::tasks::KindWithContent;
use serde_json::json;
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::json::ValidatedJson;
use crate::routes::SummarizedTaskView;
#[macro_export]
macro_rules! make_setting_route {
($route:literal, $update_verb:ident, $type:ty, $err_ty:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
($route:literal, $update_verb:ident, $type:ty, $attr:ident, $camelcase_attr:literal, $analytics_var:ident, $analytics:expr) => {
pub mod $attr {
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse, Resource};
@@ -41,14 +39,12 @@ macro_rules! make_setting_route {
>,
index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let new_settings = Settings { $attr: Setting::Reset.into(), ..Default::default() };
let new_settings = Settings { $attr: Setting::Reset, ..Default::default() };
let allow_index_creation = index_scheduler.filters().allow_index_creation;
let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner();
let task = KindWithContent::SettingsUpdate {
index_uid: index_uid.to_string(),
index_uid,
new_settings: Box::new(new_settings),
is_deletion: true,
allow_index_creation,
@@ -68,28 +64,26 @@ macro_rules! make_setting_route {
Data<IndexScheduler>,
>,
index_uid: actix_web::web::Path<String>,
body: $crate::routes::indexes::ValidatedJson<Option<$type>, $err_ty>,
body: actix_web::web::Json<Option<$type>>,
req: HttpRequest,
$analytics_var: web::Data<dyn Analytics>,
) -> std::result::Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let body = body.into_inner();
$analytics(&body, &req);
let new_settings = Settings {
$attr: match body {
Some(inner_body) => Setting::Set(inner_body).into(),
None => Setting::Reset.into(),
Some(inner_body) => Setting::Set(inner_body),
None => Setting::Reset,
},
..Default::default()
};
let allow_index_creation = index_scheduler.filters().allow_index_creation;
let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner();
let task = KindWithContent::SettingsUpdate {
index_uid: index_uid.to_string(),
index_uid,
new_settings: Box::new(new_settings),
is_deletion: false,
allow_index_creation,
@@ -110,8 +104,6 @@ macro_rules! make_setting_route {
>,
index_uid: actix_web::web::Path<String>,
) -> std::result::Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let index = index_scheduler.index(&index_uid)?;
let rtxn = index.read_txn()?;
let settings = settings(&index, &rtxn)?;
@@ -137,9 +129,6 @@ make_setting_route!(
"/filterable-attributes",
put,
std::collections::BTreeSet<String>,
meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsFilterableAttributes,
>,
filterable_attributes,
"filterableAttributes",
analytics,
@@ -163,9 +152,6 @@ make_setting_route!(
"/sortable-attributes",
put,
std::collections::BTreeSet<String>,
meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsSortableAttributes,
>,
sortable_attributes,
"sortableAttributes",
analytics,
@@ -189,9 +175,6 @@ make_setting_route!(
"/displayed-attributes",
put,
Vec<String>,
meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsDisplayedAttributes,
>,
displayed_attributes,
"displayedAttributes",
analytics,
@@ -215,9 +198,6 @@ make_setting_route!(
"/typo-tolerance",
patch,
meilisearch_types::settings::TypoSettings,
meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsTypoTolerance,
>,
typo_tolerance,
"typoTolerance",
analytics,
@@ -260,9 +240,6 @@ make_setting_route!(
"/searchable-attributes",
put,
Vec<String>,
meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsSearchableAttributes,
>,
searchable_attributes,
"searchableAttributes",
analytics,
@@ -286,9 +263,6 @@ make_setting_route!(
"/stop-words",
put,
std::collections::BTreeSet<String>,
meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsStopWords,
>,
stop_words,
"stopWords",
analytics,
@@ -311,9 +285,6 @@ make_setting_route!(
"/synonyms",
put,
std::collections::BTreeMap<String, Vec<String>>,
meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsSynonyms,
>,
synonyms,
"synonyms",
analytics,
@@ -336,9 +307,6 @@ make_setting_route!(
"/distinct-attribute",
put,
String,
meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsDistinctAttribute,
>,
distinct_attribute,
"distinctAttribute",
analytics,
@@ -359,27 +327,24 @@ make_setting_route!(
make_setting_route!(
"/ranking-rules",
put,
Vec<meilisearch_types::settings::RankingRuleView>,
meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsRankingRules,
>,
Vec<String>,
ranking_rules,
"rankingRules",
analytics,
|setting: &Option<Vec<meilisearch_types::settings::RankingRuleView>>, req: &HttpRequest| {
|setting: &Option<Vec<String>>, req: &HttpRequest| {
use serde_json::json;
analytics.publish(
"RankingRules Updated".to_string(),
json!({
"ranking_rules": {
"words_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Words))),
"typo_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Typo))),
"proximity_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Proximity))),
"attribute_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Attribute))),
"sort_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Sort))),
"exactness_position": setting.as_ref().map(|rr| rr.iter().position(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Exactness))),
"values": setting.as_ref().map(|rr| rr.iter().filter(|s| matches!(s, meilisearch_types::settings::RankingRuleView::Asc(_) | meilisearch_types::settings::RankingRuleView::Desc(_)) ).map(|x| x.to_string()).collect::<Vec<_>>().join(", ")),
"words_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "words")),
"typo_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "typo")),
"proximity_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "proximity")),
"attribute_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "attribute")),
"sort_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "sort")),
"exactness_position": setting.as_ref().map(|rr| rr.iter().position(|s| s == "exactness")),
"values": setting.as_ref().map(|rr| rr.iter().filter(|s| !s.contains(':')).cloned().collect::<Vec<_>>().join(", ")),
}
}),
Some(req),
@@ -391,9 +356,6 @@ make_setting_route!(
"/faceting",
patch,
meilisearch_types::settings::FacetingSettings,
meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsFaceting,
>,
faceting,
"faceting",
analytics,
@@ -416,9 +378,6 @@ make_setting_route!(
"/pagination",
patch,
meilisearch_types::settings::PaginationSettings,
meilisearch_types::deserr::DeserrJsonError<
meilisearch_types::error::deserr_codes::InvalidSettingsPagination,
>,
pagination,
"pagination",
analytics,
@@ -468,25 +427,23 @@ generate_configure!(
pub async fn update_all(
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
body: ValidatedJson<Settings<Unchecked>, DeserrJsonError>,
body: web::Json<Settings<Unchecked>>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let new_settings = body.into_inner();
analytics.publish(
"Settings Updated".to_string(),
json!({
"ranking_rules": {
"words_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Words))),
"typo_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Typo))),
"proximity_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Proximity))),
"attribute_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Attribute))),
"sort_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Sort))),
"exactness_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| matches!(s, RankingRuleView::Exactness))),
"values": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().filter(|s| !matches!(s, RankingRuleView::Asc(_) | RankingRuleView::Desc(_)) ).map(|x| x.to_string()).collect::<Vec<_>>().join(", ")),
"words_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "words")),
"typo_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "typo")),
"proximity_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "proximity")),
"attribute_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "attribute")),
"sort_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "sort")),
"exactness_position": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().position(|s| s == "exactness")),
"values": new_settings.ranking_rules.as_ref().set().map(|rr| rr.iter().filter(|s| !s.contains(':')).cloned().collect::<Vec<_>>().join(", ")),
},
"searchable_attributes": {
"total": new_settings.searchable_attributes.as_ref().set().map(|searchable| searchable.len()),
@@ -579,8 +536,6 @@ pub async fn get_all(
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_GET }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let index = index_scheduler.index(&index_uid)?;
let rtxn = index.read_txn()?;
let new_settings = settings(&index, &rtxn)?;
@@ -592,8 +547,6 @@ pub async fn delete_all(
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
let new_settings = Settings::cleared().into_unchecked();
let allow_index_creation = index_scheduler.filters().allow_index_creation;

View File

@@ -6,6 +6,7 @@ use index_scheduler::{IndexScheduler, Query};
use log::debug;
use meilisearch_types::error::ResponseError;
use meilisearch_types::settings::{Settings, Unchecked};
use meilisearch_types::star_or::StarOr;
use meilisearch_types::tasks::{Kind, Status, Task, TaskId};
use serde::{Deserialize, Serialize};
use serde_json::json;
@@ -15,6 +16,7 @@ use self::indexes::IndexStats;
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::RateLimiters;
mod api_key;
mod dump;
@@ -22,18 +24,33 @@ pub mod indexes;
mod swap_indexes;
pub mod tasks;
pub fn configure(cfg: &mut web::ServiceConfig) {
pub fn configure(cfg: &mut web::ServiceConfig, rate_limiters: RateLimiters) {
cfg.service(web::scope("/tasks").configure(tasks::configure))
.service(web::resource("/health").route(web::get().to(get_health)))
.service(web::scope("/keys").configure(api_key::configure))
.service(web::scope("/dumps").configure(dump::configure))
.service(web::resource("/stats").route(web::get().to(get_stats)))
.service(web::resource("/version").route(web::get().to(get_version)))
.service(web::scope("/indexes").configure(indexes::configure))
.service(web::scope("/indexes").configure(|cfg| indexes::configure(cfg, rate_limiters)))
.service(web::scope("/swap-indexes").configure(swap_indexes::configure));
}
const PAGINATION_DEFAULT_LIMIT: usize = 20;
/// Extracts the raw values from the `StarOr` types and
/// return None if a `StarOr::Star` is encountered.
pub fn fold_star_or<T, O>(content: impl IntoIterator<Item = StarOr<T>>) -> Option<O>
where
O: FromIterator<T>,
{
content
.into_iter()
.map(|value| match value {
StarOr::Star => None,
StarOr::Other(val) => Some(val),
})
.collect()
}
const PAGINATION_DEFAULT_LIMIT: fn() -> usize = || 20;
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
@@ -58,8 +75,13 @@ impl From<Task> for SummarizedTaskView {
}
}
}
#[derive(Debug, Clone, Copy, Deserialize)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct Pagination {
#[serde(default)]
pub offset: usize,
#[serde(default = "PAGINATION_DEFAULT_LIMIT")]
pub limit: usize,
}

View File

@@ -1,12 +1,9 @@
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use deserr::DeserializeFromValue;
use index_scheduler::IndexScheduler;
use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::deserr_codes::InvalidSwapIndexes;
use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::tasks::{IndexSwap, KindWithContent};
use serde::Deserialize;
use serde_json::json;
use super::SummarizedTaskView;
@@ -14,27 +11,23 @@ use crate::analytics::Analytics;
use crate::error::MeilisearchHttpError;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::{AuthenticationError, GuardedData};
use crate::extractors::json::ValidatedJson;
use crate::extractors::sequential_extractor::SeqHandler;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(SeqHandler(swap_indexes))));
}
#[derive(DeserializeFromValue, Debug, Clone, PartialEq, Eq)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct SwapIndexesPayload {
#[deserr(error = DeserrJsonError<InvalidSwapIndexes>, missing_field_error = DeserrJsonError::missing_swap_indexes)]
indexes: Vec<IndexUid>,
indexes: Vec<String>,
}
pub async fn swap_indexes(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>,
params: ValidatedJson<Vec<SwapIndexesPayload>, DeserrJsonError>,
params: web::Json<Vec<SwapIndexesPayload>>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
let params = params.into_inner();
analytics.publish(
"Indexes Swapped".to_string(),
json!({
@@ -45,8 +38,7 @@ pub async fn swap_indexes(
let search_rules = &index_scheduler.filters().search_rules;
let mut swaps = vec![];
for SwapIndexesPayload { indexes } in params.into_iter() {
// TODO: switch to deserr
for SwapIndexesPayload { indexes } in params.into_inner().into_iter() {
let (lhs, rhs) = match indexes.as_slice() {
[lhs, rhs] => (lhs, rhs),
_ => {
@@ -56,7 +48,7 @@ pub async fn swap_indexes(
if !search_rules.is_index_authorized(lhs) || !search_rules.is_index_authorized(rhs) {
return Err(AuthenticationError::InvalidToken.into());
}
swaps.push(IndexSwap { indexes: (lhs.to_string(), rhs.to_string()) });
swaps.push(IndexSwap { indexes: (lhs.clone(), rhs.clone()) });
}
let task = KindWithContent::IndexSwap { swaps };

File diff suppressed because it is too large Load Diff

View File

@@ -3,10 +3,7 @@ use std::collections::{BTreeMap, BTreeSet, HashSet};
use std::str::FromStr;
use std::time::Instant;
use deserr::DeserializeFromValue;
use either::Either;
use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS;
use meilisearch_types::{milli, Document};
use milli::tokenizer::TokenizerBuilder;
@@ -15,7 +12,7 @@ use milli::{
SortError, TermsMatchingStrategy, DEFAULT_VALUES_PER_FACET,
};
use regex::Regex;
use serde::Serialize;
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
use crate::error::MeilisearchHttpError;
@@ -29,42 +26,34 @@ pub const DEFAULT_CROP_MARKER: fn() -> String = || "…".to_string();
pub const DEFAULT_HIGHLIGHT_PRE_TAG: fn() -> String = || "<em>".to_string();
pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string();
#[derive(Debug, Clone, Default, PartialEq, DeserializeFromValue)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(rename_all = "camelCase", deny_unknown_fields)]
pub struct SearchQuery {
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
pub q: Option<String>,
#[deserr(default = DEFAULT_SEARCH_OFFSET(), error = DeserrJsonError<InvalidSearchOffset>)]
#[serde(default = "DEFAULT_SEARCH_OFFSET")]
pub offset: usize,
#[deserr(default = DEFAULT_SEARCH_LIMIT(), error = DeserrJsonError<InvalidSearchLimit>)]
#[serde(default = "DEFAULT_SEARCH_LIMIT")]
pub limit: usize,
#[deserr(default, error = DeserrJsonError<InvalidSearchPage>)]
pub page: Option<usize>,
#[deserr(default, error = DeserrJsonError<InvalidSearchHitsPerPage>)]
pub hits_per_page: Option<usize>,
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToRetrieve>)]
pub attributes_to_retrieve: Option<BTreeSet<String>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToCrop>)]
pub attributes_to_crop: Option<Vec<String>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchCropLength>, default = DEFAULT_CROP_LENGTH())]
#[serde(default = "DEFAULT_CROP_LENGTH")]
pub crop_length: usize,
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToHighlight>)]
pub attributes_to_highlight: Option<HashSet<String>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchShowMatchesPosition>, default)]
// Default to false
#[serde(default = "Default::default")]
pub show_matches_position: bool,
#[deserr(default, error = DeserrJsonError<InvalidSearchFilter>)]
pub filter: Option<Value>,
#[deserr(default, error = DeserrJsonError<InvalidSearchSort>)]
pub sort: Option<Vec<String>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchFacets>)]
pub facets: Option<Vec<String>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchHighlightPreTag>, default = DEFAULT_HIGHLIGHT_PRE_TAG())]
#[serde(default = "DEFAULT_HIGHLIGHT_PRE_TAG")]
pub highlight_pre_tag: String,
#[deserr(default, error = DeserrJsonError<InvalidSearchHighlightPostTag>, default = DEFAULT_HIGHLIGHT_POST_TAG())]
#[serde(default = "DEFAULT_HIGHLIGHT_POST_TAG")]
pub highlight_post_tag: String,
#[deserr(default, error = DeserrJsonError<InvalidSearchCropMarker>, default = DEFAULT_CROP_MARKER())]
#[serde(default = "DEFAULT_CROP_MARKER")]
pub crop_marker: String,
#[deserr(default, error = DeserrJsonError<InvalidSearchMatchingStrategy>, default)]
#[serde(default)]
pub matching_strategy: MatchingStrategy,
}
@@ -74,8 +63,8 @@ impl SearchQuery {
}
}
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
#[deserr(rename_all = camelCase)]
#[derive(Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub enum MatchingStrategy {
/// Remove query words from last to first
Last,
@@ -609,7 +598,7 @@ fn parse_filter(facets: &Value) -> Result<Option<Filter>, MeilisearchHttpError>
Ok(condition)
}
Value::Array(arr) => parse_filter_array(arr),
v => Err(MeilisearchHttpError::InvalidExpression(&["String", "Array"], v.clone())),
v => Err(MeilisearchHttpError::InvalidExpression(&["Array"], v.clone())),
}
}

View File

@@ -1,77 +0,0 @@
{"id":0,"isActive":false,"balance":"$2,668.55","picture":"http://placehold.it/32x32","age":36,"color":"Green","name":"Lucas Hess","gender":"male","email":"lucashess@chorizon.com","phone":"+1 (998) 478-2597","address":"412 Losee Terrace, Blairstown, Georgia, 2825","about":"Mollit ad in exercitation quis. Anim est ut consequat fugiat duis magna aliquip velit nisi. Commodo eiusmod est consequat proident consectetur aliqua enim fugiat. Aliqua adipisicing laboris elit proident enim veniam laboris mollit. Incididunt fugiat minim ad nostrud deserunt tempor in. Id irure officia labore qui est labore nulla nisi. Magna sit quis tempor esse consectetur amet labore duis aliqua consequat.\r\n","registered":"2016-06-21T09:30:25 -02:00","latitude":-44.174957,"longitude":-145.725388,"tags":["bug","bug"]}
{"id":1,"isActive":true,"balance":"$1,706.13","picture":"http://placehold.it/32x32","age":27,"color":"Green","name":"Cherry Orr","gender":"female","email":"cherryorr@chorizon.com","phone":"+1 (995) 479-3174","address":"442 Beverly Road, Ventress, New Mexico, 3361","about":"Exercitation officia mollit proident nostrud ea. Pariatur voluptate labore nostrud magna duis non elit et incididunt Lorem velit duis amet commodo. Irure in velit laboris pariatur. Do tempor ex deserunt duis minim amet.\r\n","registered":"2020-03-18T11:12:21 -01:00","latitude":-24.356932,"longitude":27.184808,"tags":["new issue","bug"]}
{"id":2,"isActive":true,"balance":"$2,467.47","picture":"http://placehold.it/32x32","age":34,"color":"blue","name":"Patricia Goff","gender":"female","email":"patriciagoff@chorizon.com","phone":"+1 (864) 463-2277","address":"866 Hornell Loop, Cresaptown, Ohio, 1700","about":"Non culpa duis dolore Lorem aliqua. Labore veniam laborum cupidatat nostrud ea exercitation. Esse nostrud sit veniam laborum minim ullamco nulla aliqua est cillum magna. Duis non esse excepteur veniam voluptate sunt cupidatat nostrud consequat sint adipisicing ut excepteur. Incididunt sit aliquip non id magna amet deserunt esse quis dolor.\r\n","registered":"2014-10-28T12:59:30 -01:00","latitude":-64.008555,"longitude":11.867098,"tags":["good first issue"]}
{"id":3,"isActive":true,"balance":"$3,344.40","picture":"http://placehold.it/32x32","age":35,"color":"blue","name":"Adeline Flynn","gender":"female","email":"adelineflynn@chorizon.com","phone":"+1 (994) 600-2840","address":"428 Paerdegat Avenue, Hollymead, Pennsylvania, 948","about":"Ex velit magna minim labore dolor id laborum incididunt. Proident dolor fugiat exercitation ad adipisicing amet dolore. Veniam nisi pariatur aute eu amet sint elit duis exercitation. Eu fugiat Lorem nostrud consequat aute sunt. Minim excepteur cillum laboris enim tempor adipisicing nulla reprehenderit ea velit Lorem qui in incididunt. Esse ipsum mollit deserunt ea exercitation ex aliqua anim magna cupidatat culpa.\r\n","registered":"2014-03-27T06:24:45 -01:00","latitude":-74.485173,"longitude":-11.059859,"tags":["bug","good first issue","wontfix","new issue"]}
{"id":4,"isActive":false,"balance":"$2,575.78","picture":"http://placehold.it/32x32","age":39,"color":"Green","name":"Mariana Pacheco","gender":"female","email":"marianapacheco@chorizon.com","phone":"+1 (820) 414-2223","address":"664 Rapelye Street, Faywood, California, 7320","about":"Sint cillum enim eu Lorem dolore. Est excepteur cillum consequat incididunt. Ut consectetur et do culpa eiusmod ex ut id proident aliqua. Sunt dolor anim minim labore incididunt deserunt enim velit sunt ut in velit. Nulla ipsum cillum qui est minim officia in occaecat exercitation Lorem sunt. Aliqua minim excepteur tempor incididunt dolore. Quis amet ullamco et proident aliqua magna consequat.\r\n","registered":"2015-09-02T03:23:35 -02:00","latitude":75.763501,"longitude":-78.777124,"tags":["new issue"]}
{"id":5,"isActive":true,"balance":"$3,793.09","picture":"http://placehold.it/32x32","age":20,"color":"Green","name":"Warren Watson","gender":"male","email":"warrenwatson@chorizon.com","phone":"+1 (807) 583-2427","address":"671 Prince Street, Faxon, Connecticut, 4275","about":"Cillum incididunt mollit labore ipsum elit ea. Lorem labore consectetur nulla ea fugiat sint esse cillum ea commodo id qui. Sint cillum mollit dolore enim quis esse. Nisi labore duis dolor tempor laborum laboris ad minim pariatur in excepteur sit. Aliqua anim amet sunt ullamco labore amet culpa irure esse eiusmod deserunt consequat Lorem nostrud.\r\n","registered":"2017-06-04T06:02:17 -02:00","latitude":29.979223,"longitude":25.358943,"tags":["wontfix","wontfix","wontfix"]}
{"id":6,"isActive":true,"balance":"$2,919.70","picture":"http://placehold.it/32x32","age":20,"color":"blue","name":"Shelia Berry","gender":"female","email":"sheliaberry@chorizon.com","phone":"+1 (853) 511-2651","address":"437 Forrest Street, Coventry, Illinois, 2056","about":"Id occaecat qui voluptate proident culpa cillum nisi reprehenderit. Pariatur nostrud proident adipisicing reprehenderit eiusmod qui minim proident aliqua id cupidatat laboris deserunt. Proident sint laboris sit mollit dolor qui incididunt quis veniam cillum cupidatat ad nostrud ut. Aliquip consequat eiusmod eiusmod irure tempor do incididunt id culpa laboris eiusmod.\r\n","registered":"2018-07-11T02:45:01 -02:00","latitude":54.815991,"longitude":-118.690609,"tags":["good first issue","bug","wontfix","new issue"]}
{"id":7,"isActive":true,"balance":"$1,349.50","picture":"http://placehold.it/32x32","age":28,"color":"Green","name":"Chrystal Boyd","gender":"female","email":"chrystalboyd@chorizon.com","phone":"+1 (936) 563-2802","address":"670 Croton Loop, Sussex, Florida, 4692","about":"Consequat ex voluptate consectetur laborum nulla. Qui voluptate Lorem amet labore est esse sunt. Nulla cupidatat consequat quis incididunt exercitation aliquip reprehenderit ea ea adipisicing reprehenderit id consectetur quis. Exercitation est incididunt ullamco non proident consequat. Nisi veniam aliquip fugiat voluptate ex id aute duis ullamco magna ipsum ad laborum ipsum. Cupidatat velit dolore esse nisi.\r\n","registered":"2016-11-01T07:36:04 -01:00","latitude":-24.711933,"longitude":147.246705,"tags":[]}
{"id":8,"isActive":false,"balance":"$3,999.56","picture":"http://placehold.it/32x32","age":30,"color":"brown","name":"Martin Porter","gender":"male","email":"martinporter@chorizon.com","phone":"+1 (895) 580-2304","address":"577 Regent Place, Aguila, Guam, 6554","about":"Nostrud nulla labore ex excepteur labore enim cillum pariatur in do Lorem eiusmod ullamco est. Labore aliquip id ut nisi commodo pariatur ea esse laboris. Incididunt eu dolor esse excepteur nulla minim proident non cillum nisi dolore incididunt ipsum tempor.\r\n","registered":"2014-09-20T02:08:30 -02:00","latitude":-88.344273,"longitude":37.964466,"tags":[]}
{"id":9,"isActive":true,"balance":"$3,729.71","picture":"http://placehold.it/32x32","age":26,"color":"blue","name":"Kelli Mendez","gender":"female","email":"kellimendez@chorizon.com","phone":"+1 (936) 401-2236","address":"242 Caton Place, Grazierville, Alabama, 3968","about":"Consectetur occaecat dolore esse eiusmod enim ea aliqua eiusmod amet velit laborum. Velit quis consequat consectetur velit fugiat labore commodo amet do. Magna minim est ad commodo consequat fugiat. Laboris duis Lorem ipsum irure sit ipsum consequat tempor sit. Est ad nulla duis quis velit anim id nulla. Cupidatat ea esse laboris eu veniam cupidatat proident veniam quis.\r\n","registered":"2018-05-04T10:35:30 -02:00","latitude":49.37551,"longitude":41.872323,"tags":["new issue","new issue"]}
{"id":10,"isActive":false,"balance":"$1,127.47","picture":"http://placehold.it/32x32","age":27,"color":"blue","name":"Maddox Johns","gender":"male","email":"maddoxjohns@chorizon.com","phone":"+1 (892) 470-2357","address":"756 Beard Street, Avalon, Louisiana, 114","about":"Voluptate et dolor magna do do. Id do enim ut nulla esse culpa fugiat excepteur quis. Nostrud ad aliquip aliqua qui esse ut consequat proident deserunt esse cupidatat do elit fugiat. Sint cillum aliquip cillum laboris laborum laboris ad aliquip enim reprehenderit cillum eu sint. Sint ut ad duis do culpa non eiusmod amet non ipsum commodo. Pariatur aliquip sit deserunt non. Ut consequat pariatur deserunt veniam est sit eiusmod officia aliquip commodo sunt in eu duis.\r\n","registered":"2016-04-22T06:41:25 -02:00","latitude":66.640229,"longitude":-17.222666,"tags":["new issue","good first issue","good first issue","new issue"]}
{"id":11,"isActive":true,"balance":"$1,351.43","picture":"http://placehold.it/32x32","age":28,"color":"Green","name":"Evans Wagner","gender":"male","email":"evanswagner@chorizon.com","phone":"+1 (889) 496-2332","address":"118 Monaco Place, Lutsen, Delaware, 6209","about":"Sunt consectetur enim ipsum consectetur occaecat reprehenderit nulla pariatur. Cupidatat do exercitation tempor voluptate duis nostrud dolor consectetur. Excepteur aliquip Lorem voluptate cillum est. Nisi velit nulla nostrud ea id officia laboris et.\r\n","registered":"2016-10-27T01:26:31 -02:00","latitude":-77.673222,"longitude":-142.657214,"tags":["good first issue","good first issue"]}
{"id":12,"isActive":false,"balance":"$3,394.96","picture":"http://placehold.it/32x32","age":25,"color":"blue","name":"Aida Kirby","gender":"female","email":"aidakirby@chorizon.com","phone":"+1 (942) 532-2325","address":"797 Engert Avenue, Wilsonia, Idaho, 6532","about":"Mollit aute esse Lorem do laboris anim reprehenderit excepteur. Ipsum culpa esse voluptate officia cupidatat minim. Velit officia proident nostrud sunt irure labore. Culpa ex commodo amet dolor amet voluptate Lorem ex esse commodo fugiat quis non. Ex est adipisicing veniam sunt dolore ut aliqua nisi ex sit. Esse voluptate esse anim id adipisicing enim aute ea exercitation tempor cillum.\r\n","registered":"2018-06-18T04:39:57 -02:00","latitude":-58.062041,"longitude":34.999254,"tags":["new issue","wontfix","bug","new issue"]}
{"id":13,"isActive":true,"balance":"$2,812.62","picture":"http://placehold.it/32x32","age":40,"color":"blue","name":"Nelda Burris","gender":"female","email":"neldaburris@chorizon.com","phone":"+1 (813) 600-2576","address":"160 Opal Court, Fowlerville, Tennessee, 2170","about":"Ipsum aliquip adipisicing elit magna. Veniam irure quis laborum laborum sint velit amet. Irure non eiusmod laborum fugiat qui quis Lorem culpa veniam commodo. Fugiat cupidatat dolore et consequat pariatur enim ex velit consequat deserunt quis. Deserunt et quis laborum cupidatat cillum minim cupidatat nisi do commodo commodo labore cupidatat ea. In excepteur sit nostrud nulla nostrud dolor sint. Et anim culpa aliquip laborum Lorem elit.\r\n","registered":"2015-08-15T12:39:53 -02:00","latitude":66.6871,"longitude":179.549488,"tags":["wontfix"]}
{"id":14,"isActive":true,"balance":"$1,718.33","picture":"http://placehold.it/32x32","age":35,"color":"blue","name":"Jennifer Hart","gender":"female","email":"jenniferhart@chorizon.com","phone":"+1 (850) 537-2513","address":"124 Veranda Place, Nash, Utah, 985","about":"Amet amet voluptate in occaecat pariatur. Nulla ipsum esse quis qui in quis qui. Non est non nisi qui tempor commodo consequat fugiat. Sint eu ipsum aute anim anim. Ea nostrud excepteur exercitation consectetur Lorem.\r\n","registered":"2016-09-04T11:46:59 -02:00","latitude":-66.827751,"longitude":99.220079,"tags":["wontfix","bug","new issue","new issue"]}
{"id":15,"isActive":false,"balance":"$2,698.16","picture":"http://placehold.it/32x32","age":28,"color":"blue","name":"Aurelia Contreras","gender":"female","email":"aureliacontreras@chorizon.com","phone":"+1 (932) 442-3103","address":"655 Dwight Street, Grapeview, Palau, 8356","about":"Qui adipisicing consectetur aute veniam culpa ipsum. Occaecat occaecat ut mollit enim enim elit Lorem nostrud Lorem. Consequat laborum mollit nulla aute cillum sunt mollit commodo velit culpa. Pariatur pariatur velit nostrud tempor. In minim enim cillum exercitation in laboris labore ea sunt in incididunt fugiat.\r\n","registered":"2014-09-11T10:43:15 -02:00","latitude":-71.328973,"longitude":133.404895,"tags":["wontfix","bug","good first issue"]}
{"id":16,"isActive":true,"balance":"$3,303.25","picture":"http://placehold.it/32x32","age":28,"color":"brown","name":"Estella Bass","gender":"female","email":"estellabass@chorizon.com","phone":"+1 (825) 436-2909","address":"435 Rockwell Place, Garberville, Wisconsin, 2230","about":"Sit eiusmod mollit velit non. Qui ea in exercitation elit reprehenderit occaecat tempor minim officia. Culpa amet voluptate sit eiusmod pariatur.\r\n","registered":"2017-11-23T09:32:09 -01:00","latitude":81.17014,"longitude":-145.262693,"tags":["new issue"]}
{"id":17,"isActive":false,"balance":"$3,579.20","picture":"http://placehold.it/32x32","age":25,"color":"brown","name":"Ortega Brennan","gender":"male","email":"ortegabrennan@chorizon.com","phone":"+1 (906) 526-2287","address":"440 Berry Street, Rivera, Maine, 1849","about":"Veniam velit non laboris consectetur sit aliquip enim proident velit in ipsum reprehenderit reprehenderit. Dolor qui nulla adipisicing ad magna dolore do ut duis et aute est. Qui est elit cupidatat nostrud. Laboris voluptate reprehenderit minim sint exercitation cupidatat ipsum sint consectetur velit sunt et officia incididunt. Ut amet Lorem minim deserunt officia officia irure qui et Lorem deserunt culpa sit.\r\n","registered":"2016-03-31T02:17:13 -02:00","latitude":-68.407524,"longitude":-113.642067,"tags":["new issue","wontfix"]}
{"id":18,"isActive":false,"balance":"$1,484.92","picture":"http://placehold.it/32x32","age":39,"color":"blue","name":"Leonard Tillman","gender":"male","email":"leonardtillman@chorizon.com","phone":"+1 (864) 541-3456","address":"985 Provost Street, Charco, New Hampshire, 8632","about":"Consectetur ut magna sit id officia nostrud ipsum. Lorem cupidatat laborum nostrud aliquip magna qui est cupidatat exercitation et. Officia qui magna commodo id cillum magna ut ad veniam sunt sint ex. Id minim do in do exercitation aliquip incididunt ex esse. Nisi aliqua quis excepteur qui aute excepteur dolore eu pariatur irure id eu cupidatat eiusmod. Aliqua amet et dolore enim et eiusmod qui irure pariatur qui officia adipisicing nulla duis.\r\n","registered":"2018-05-06T08:21:27 -02:00","latitude":-8.581801,"longitude":-61.910062,"tags":["wontfix","new issue","bug","bug"]}
{"id":19,"isActive":true,"balance":"$3,572.55","picture":"http://placehold.it/32x32","age":33,"color":"brown","name":"Dale Payne","gender":"male","email":"dalepayne@chorizon.com","phone":"+1 (814) 469-3499","address":"536 Dare Court, Ironton, Arkansas, 8605","about":"Et velit cupidatat velit incididunt mollit. Occaecat do labore aliqua dolore excepteur occaecat ut veniam ad ullamco tempor. Ut anim laboris deserunt culpa esse. Pariatur Lorem nulla cillum cupidatat nostrud Lorem commodo reprehenderit ut est. In dolor cillum reprehenderit laboris incididunt ad reprehenderit aute ipsum officia id in consequat. Culpa exercitation voluptate fugiat est Lorem ipsum in dolore dolor consequat Lorem et.\r\n","registered":"2019-10-11T01:01:33 -02:00","latitude":-18.280968,"longitude":-126.091797,"tags":["bug","wontfix","wontfix","wontfix"]}
{"id":20,"isActive":true,"balance":"$1,986.48","picture":"http://placehold.it/32x32","age":38,"color":"Green","name":"Florence Long","gender":"female","email":"florencelong@chorizon.com","phone":"+1 (972) 557-3858","address":"519 Hendrickson Street, Templeton, Hawaii, 2389","about":"Quis officia occaecat veniam veniam. Ex minim enim labore cupidatat qui. Proident esse deserunt laborum laboris sunt nostrud.\r\n","registered":"2016-05-02T09:18:59 -02:00","latitude":-27.110866,"longitude":-45.09445,"tags":[]}
{"id":21,"isActive":true,"balance":"$1,440.09","picture":"http://placehold.it/32x32","age":40,"color":"blue","name":"Levy Whitley","gender":"male","email":"levywhitley@chorizon.com","phone":"+1 (911) 458-2411","address":"187 Thomas Street, Hachita, North Carolina, 2989","about":"Velit laboris non minim elit sint deserunt fugiat. Aute minim ex commodo aute cillum aliquip fugiat pariatur nulla eiusmod pariatur consectetur. Qui ex ea qui laborum veniam adipisicing magna minim ut. In irure anim voluptate mollit et. Adipisicing labore ea mollit magna aliqua culpa velit est. Excepteur nisi veniam enim velit in ad officia irure laboris.\r\n","registered":"2014-04-30T07:31:38 -02:00","latitude":-6.537315,"longitude":171.813536,"tags":["bug"]}
{"id":22,"isActive":false,"balance":"$2,938.57","picture":"http://placehold.it/32x32","age":35,"color":"blue","name":"Bernard Mcfarland","gender":"male","email":"bernardmcfarland@chorizon.com","phone":"+1 (979) 442-3386","address":"409 Hall Street, Keyport, Federated States Of Micronesia, 7011","about":"Reprehenderit irure aute et anim ullamco enim est tempor id ipsum mollit veniam aute ullamco. Consectetur dolor velit tempor est reprehenderit ut id non est ullamco voluptate. Commodo aute ullamco culpa non voluptate incididunt non culpa culpa nisi id proident cupidatat.\r\n","registered":"2017-08-10T10:07:59 -02:00","latitude":63.766795,"longitude":68.177069,"tags":[]}
{"id":23,"isActive":true,"balance":"$1,678.49","picture":"http://placehold.it/32x32","age":31,"color":"brown","name":"Blanca Mcclain","gender":"female","email":"blancamcclain@chorizon.com","phone":"+1 (976) 439-2772","address":"176 Crooke Avenue, Valle, Virginia, 5373","about":"Aliquip sunt irure ut consectetur elit. Cillum amet incididunt et anim elit in incididunt adipisicing fugiat veniam esse veniam. Nisi qui sit occaecat tempor nostrud est aute cillum anim excepteur laboris magna in. Fugiat fugiat veniam cillum laborum ut pariatur amet nulla nulla. Nostrud mollit in laborum minim exercitation aute. Lorem aute ipsum laboris est adipisicing qui ullamco tempor adipisicing cupidatat mollit.\r\n","registered":"2015-10-12T11:57:28 -02:00","latitude":-8.944564,"longitude":-150.711709,"tags":["bug","wontfix","good first issue"]}
{"id":24,"isActive":true,"balance":"$2,276.87","picture":"http://placehold.it/32x32","age":28,"color":"brown","name":"Espinoza Ford","gender":"male","email":"espinozaford@chorizon.com","phone":"+1 (945) 429-3975","address":"137 Bowery Street, Itmann, District Of Columbia, 1864","about":"Deserunt nisi aliquip esse occaecat laborum qui aliqua excepteur ea cupidatat dolore magna consequat. Culpa aliquip cillum incididunt proident est officia consequat duis. Elit tempor ut cupidatat nisi ea sint non labore aliquip amet. Deserunt labore cupidatat laboris dolor duis occaecat velit aliquip reprehenderit esse. Sit ad qui consectetur id anim nisi amet eiusmod.\r\n","registered":"2014-03-26T02:16:08 -01:00","latitude":-37.137666,"longitude":-51.811757,"tags":["wontfix","bug"]}
{"id":25,"isActive":true,"balance":"$3,973.43","picture":"http://placehold.it/32x32","age":29,"color":"Green","name":"Sykes Conley","gender":"male","email":"sykesconley@chorizon.com","phone":"+1 (851) 401-3916","address":"345 Grand Street, Woodlands, Missouri, 4461","about":"Pariatur ullamco duis reprehenderit ad sit dolore. Dolore ex fugiat labore incididunt nostrud. Minim deserunt officia sunt enim magna elit veniam reprehenderit nisi cupidatat dolor eiusmod. Veniam laboris sint cillum et laboris nostrud culpa laboris anim. Incididunt velit pariatur cupidatat sit dolore in. Voluptate consectetur officia id nostrud velit mollit dolor. Id laboris consectetur culpa sunt pariatur minim sunt laboris sit.\r\n","registered":"2015-09-12T06:03:56 -02:00","latitude":67.282955,"longitude":-64.341323,"tags":["wontfix"]}
{"id":26,"isActive":false,"balance":"$1,431.50","picture":"http://placehold.it/32x32","age":35,"color":"blue","name":"Barlow Duran","gender":"male","email":"barlowduran@chorizon.com","phone":"+1 (995) 436-2562","address":"481 Everett Avenue, Allison, Nebraska, 3065","about":"Proident quis eu officia adipisicing aliquip. Lorem laborum magna dolor et incididunt cillum excepteur et amet. Veniam consectetur officia fugiat magna consequat dolore elit aute exercitation fugiat excepteur ullamco. Sit qui proident reprehenderit ea ad qui culpa exercitation reprehenderit anim cupidatat. Nulla et duis Lorem cillum duis pariatur amet voluptate labore ut aliqua mollit anim ea. Nostrud incididunt et proident adipisicing non consequat tempor ullamco adipisicing incididunt. Incididunt cupidatat tempor fugiat officia qui eiusmod reprehenderit.\r\n","registered":"2017-06-29T04:28:43 -02:00","latitude":-38.70606,"longitude":55.02816,"tags":["new issue"]}
{"id":27,"isActive":true,"balance":"$3,478.27","picture":"http://placehold.it/32x32","age":31,"color":"blue","name":"Schwartz Morgan","gender":"male","email":"schwartzmorgan@chorizon.com","phone":"+1 (861) 507-2067","address":"451 Lincoln Road, Fairlee, Washington, 2717","about":"Labore eiusmod sint dolore sunt eiusmod esse et in id aliquip. Aliqua consequat occaecat laborum labore ipsum enim non nostrud adipisicing adipisicing cillum occaecat. Duis minim est culpa sunt nulla ullamco adipisicing magna irure. Occaecat quis irure eiusmod fugiat quis commodo reprehenderit labore cillum commodo id et.\r\n","registered":"2016-05-10T08:34:54 -02:00","latitude":-75.886403,"longitude":93.044471,"tags":["bug","bug","wontfix","wontfix"]}
{"id":28,"isActive":true,"balance":"$2,825.59","picture":"http://placehold.it/32x32","age":32,"color":"blue","name":"Kristy Leon","gender":"female","email":"kristyleon@chorizon.com","phone":"+1 (948) 465-2563","address":"594 Macon Street, Floris, South Dakota, 3565","about":"Proident veniam voluptate magna id do. Laboris enim dolor culpa quis. Esse voluptate elit commodo duis incididunt velit aliqua. Qui aute commodo incididunt elit eu Lorem dolore. Non esse duis do reprehenderit culpa minim. Ullamco consequat id do exercitation exercitation mollit ipsum velit eiusmod quis.\r\n","registered":"2014-12-14T04:10:29 -01:00","latitude":-50.01615,"longitude":-68.908804,"tags":["wontfix","good first issue"]}
{"id":29,"isActive":false,"balance":"$3,028.03","picture":"http://placehold.it/32x32","age":39,"color":"blue","name":"Ashley Pittman","gender":"male","email":"ashleypittman@chorizon.com","phone":"+1 (928) 507-3523","address":"646 Adelphi Street, Clara, Colorado, 6056","about":"Incididunt cillum consectetur nulla sit sit labore nulla sit. Ullamco nisi mollit reprehenderit tempor irure in Lorem duis. Sunt eu aute laboris dolore commodo ipsum sint cupidatat veniam amet culpa incididunt aute ad. Quis dolore aliquip id aute mollit eiusmod nisi ipsum ut labore adipisicing do culpa.\r\n","registered":"2016-01-07T10:40:48 -01:00","latitude":-58.766037,"longitude":-124.828485,"tags":["wontfix"]}
{"id":30,"isActive":true,"balance":"$2,021.11","picture":"http://placehold.it/32x32","age":32,"color":"blue","name":"Stacy Espinoza","gender":"female","email":"stacyespinoza@chorizon.com","phone":"+1 (999) 487-3253","address":"931 Alabama Avenue, Bangor, Alaska, 8215","about":"Id reprehenderit cupidatat exercitation anim ad nisi irure. Minim est proident mollit laborum. Duis ad duis eiusmod quis.\r\n","registered":"2014-07-16T06:15:53 -02:00","latitude":41.560197,"longitude":177.697,"tags":["new issue","new issue","bug"]}
{"id":31,"isActive":false,"balance":"$3,609.82","picture":"http://placehold.it/32x32","age":32,"color":"blue","name":"Vilma Garza","gender":"female","email":"vilmagarza@chorizon.com","phone":"+1 (944) 585-2021","address":"565 Tech Place, Sedley, Puerto Rico, 858","about":"Excepteur et fugiat mollit incididunt cupidatat. Mollit nisi veniam sint eu exercitation amet labore. Voluptate est magna est amet qui minim excepteur cupidatat dolor quis id excepteur aliqua reprehenderit. Proident nostrud ex veniam officia nisi enim occaecat ex magna officia id consectetur ad eu. In et est reprehenderit cupidatat ad minim veniam proident nulla elit nisi veniam proident ex. Eu in irure sit veniam amet incididunt fugiat proident quis ullamco laboris.\r\n","registered":"2017-06-30T07:43:52 -02:00","latitude":-12.574889,"longitude":-54.771186,"tags":["new issue","wontfix","wontfix"]}
{"id":32,"isActive":false,"balance":"$2,882.34","picture":"http://placehold.it/32x32","age":38,"color":"brown","name":"June Dunlap","gender":"female","email":"junedunlap@chorizon.com","phone":"+1 (997) 504-2937","address":"353 Cozine Avenue, Goodville, Indiana, 1438","about":"Non dolore ut Lorem dolore amet veniam fugiat reprehenderit ut amet ea ut. Non aliquip cillum ad occaecat non et sint quis proident velit laborum ullamco et. Quis qui tempor eu voluptate et proident duis est commodo laboris ex enim. Nisi aliquip laboris nostrud veniam aliqua ullamco. Et officia proident dolor aliqua incididunt veniam proident.\r\n","registered":"2016-08-23T08:54:11 -02:00","latitude":-27.883363,"longitude":-163.919683,"tags":["new issue","new issue","bug","wontfix"]}
{"id":33,"isActive":true,"balance":"$3,556.54","picture":"http://placehold.it/32x32","age":33,"color":"brown","name":"Cecilia Greer","gender":"female","email":"ceciliagreer@chorizon.com","phone":"+1 (977) 573-3498","address":"696 Withers Street, Lydia, Oklahoma, 3220","about":"Dolor pariatur veniam ad enim eiusmod fugiat ullamco nulla veniam. Dolore dolor sit excepteur veniam adipisicing adipisicing excepteur commodo qui reprehenderit magna exercitation enim reprehenderit. Cupidatat eu ullamco excepteur sint do. Et cupidatat ex adipisicing veniam eu tempor reprehenderit ut eiusmod amet proident veniam nostrud. Tempor ex enim mollit laboris magna tempor. Et aliqua nostrud esse pariatur quis. Ut pariatur ea ipsum pariatur.\r\n","registered":"2017-01-13T11:30:12 -01:00","latitude":60.467215,"longitude":84.684575,"tags":["wontfix","good first issue","good first issue","wontfix"]}
{"id":34,"isActive":true,"balance":"$1,413.35","picture":"http://placehold.it/32x32","age":33,"color":"brown","name":"Mckay Schroeder","gender":"male","email":"mckayschroeder@chorizon.com","phone":"+1 (816) 480-3657","address":"958 Miami Court, Rehrersburg, Northern Mariana Islands, 567","about":"Amet do velit excepteur tempor sit eu voluptate. Excepteur amet culpa ipsum in pariatur mollit amet nisi veniam. Laboris elit consectetur id anim qui laboris. Reprehenderit mollit laboris occaecat esse sunt Lorem Lorem sunt occaecat.\r\n","registered":"2016-02-08T04:50:15 -01:00","latitude":-72.413287,"longitude":-159.254371,"tags":["good first issue"]}
{"id":35,"isActive":true,"balance":"$2,306.53","picture":"http://placehold.it/32x32","age":34,"color":"blue","name":"Sawyer Mccormick","gender":"male","email":"sawyermccormick@chorizon.com","phone":"+1 (829) 569-3012","address":"749 Apollo Street, Eastvale, Texas, 7373","about":"Est irure ex occaecat aute. Lorem ad ullamco esse cillum deserunt qui proident anim officia dolore. Incididunt tempor cupidatat nulla cupidatat ullamco reprehenderit Lorem. Laboris tempor do pariatur sint non officia id qui deserunt amet Lorem pariatur consectetur exercitation. Adipisicing reprehenderit pariatur duis ex cupidatat cillum ad laboris ex. Sunt voluptate pariatur esse amet dolore minim aliquip reprehenderit nisi velit mollit.\r\n","registered":"2019-11-30T11:53:23 -01:00","latitude":-48.978194,"longitude":110.950191,"tags":["good first issue","new issue","new issue","bug"]}
{"id":36,"isActive":false,"balance":"$1,844.54","picture":"http://placehold.it/32x32","age":37,"color":"brown","name":"Barbra Valenzuela","gender":"female","email":"barbravalenzuela@chorizon.com","phone":"+1 (992) 512-2649","address":"617 Schenck Court, Reinerton, Michigan, 2908","about":"Deserunt adipisicing nisi et amet aliqua amet. Veniam occaecat et elit excepteur veniam. Aute irure culpa nostrud occaecat. Excepteur sit aute mollit commodo. Do ex pariatur consequat sint Lorem veniam laborum excepteur. Non voluptate ex laborum enim irure. Adipisicing excepteur anim elit esse.\r\n","registered":"2019-03-29T01:59:31 -01:00","latitude":45.193723,"longitude":-12.486778,"tags":["new issue","new issue","wontfix","wontfix"]}
{"id":37,"isActive":false,"balance":"$3,469.82","picture":"http://placehold.it/32x32","age":39,"color":"brown","name":"Opal Weiss","gender":"female","email":"opalweiss@chorizon.com","phone":"+1 (809) 400-3079","address":"535 Bogart Street, Frizzleburg, Arizona, 5222","about":"Reprehenderit nostrud minim adipisicing voluptate nisi consequat id sint. Proident tempor est esse cupidatat minim irure esse do do sint dolor. In officia duis et voluptate Lorem minim cupidatat ipsum enim qui dolor quis in Lorem. Aliquip commodo ex quis exercitation reprehenderit. Lorem id reprehenderit cillum adipisicing sunt ipsum incididunt incididunt.\r\n","registered":"2019-09-04T07:22:28 -02:00","latitude":72.50376,"longitude":61.656435,"tags":["bug","bug","good first issue","good first issue"]}
{"id":38,"isActive":true,"balance":"$1,992.38","picture":"http://placehold.it/32x32","age":40,"color":"Green","name":"Christina Short","gender":"female","email":"christinashort@chorizon.com","phone":"+1 (884) 589-2705","address":"594 Willmohr Street, Dexter, Montana, 660","about":"Quis commodo eu dolor incididunt. Nisi magna mollit nostrud do consequat irure exercitation mollit aute deserunt. Magna aute quis occaecat incididunt deserunt tempor nostrud sint ullamco ipsum. Anim in occaecat exercitation laborum nostrud eiusmod reprehenderit ea culpa et sit. Culpa voluptate consectetur nostrud do eu fugiat excepteur officia pariatur enim duis amet.\r\n","registered":"2014-01-21T09:31:56 -01:00","latitude":-42.762739,"longitude":77.052349,"tags":["bug","new issue"]}
{"id":39,"isActive":false,"balance":"$1,722.85","picture":"http://placehold.it/32x32","age":29,"color":"brown","name":"Golden Horton","gender":"male","email":"goldenhorton@chorizon.com","phone":"+1 (903) 426-2489","address":"191 Schenck Avenue, Mayfair, North Dakota, 5000","about":"Cillum velit aliqua velit in quis do mollit in et veniam. Nostrud proident non irure commodo. Ea culpa duis enim adipisicing do sint et est culpa reprehenderit officia laborum. Non et nostrud tempor nostrud nostrud ea duis esse laboris occaecat laborum. In eu ipsum sit tempor esse eiusmod enim aliquip aute. Officia ea anim ea ea. Consequat aute deserunt tempor nulla nisi tempor velit.\r\n","registered":"2015-08-19T02:56:41 -02:00","latitude":69.922534,"longitude":9.881433,"tags":["bug"]}
{"id":40,"isActive":false,"balance":"$1,656.54","picture":"http://placehold.it/32x32","age":21,"color":"blue","name":"Stafford Emerson","gender":"male","email":"staffordemerson@chorizon.com","phone":"+1 (992) 455-2573","address":"523 Thornton Street, Conway, Vermont, 6331","about":"Adipisicing cupidatat elit minim elit nostrud elit non eiusmod sunt ut. Enim minim irure officia irure occaecat mollit eu nostrud eiusmod adipisicing sunt. Elit deserunt commodo minim dolor qui. Nostrud officia ex proident mollit et dolor tempor pariatur. Ex consequat tempor eiusmod irure mollit cillum laboris est veniam ea mollit deserunt. Tempor sit voluptate excepteur elit ullamco.\r\n","registered":"2019-02-16T04:07:08 -01:00","latitude":-29.143111,"longitude":-57.207703,"tags":["wontfix","good first issue","good first issue"]}
{"id":41,"isActive":false,"balance":"$1,861.56","picture":"http://placehold.it/32x32","age":21,"color":"brown","name":"Salinas Gamble","gender":"male","email":"salinasgamble@chorizon.com","phone":"+1 (901) 525-2373","address":"991 Nostrand Avenue, Kansas, Mississippi, 6756","about":"Consequat tempor adipisicing cupidatat aliquip. Mollit proident incididunt ad ipsum laborum. Dolor in elit minim aliquip aliquip voluptate reprehenderit mollit eiusmod excepteur aliquip minim nulla cupidatat.\r\n","registered":"2017-08-21T05:47:53 -02:00","latitude":-22.593819,"longitude":-63.613004,"tags":["good first issue","bug","bug","wontfix"]}
{"id":42,"isActive":true,"balance":"$3,179.74","picture":"http://placehold.it/32x32","age":34,"color":"brown","name":"Graciela Russell","gender":"female","email":"gracielarussell@chorizon.com","phone":"+1 (893) 464-3951","address":"361 Greenpoint Avenue, Shrewsbury, New Jersey, 4713","about":"Ex amet duis incididunt consequat minim dolore deserunt reprehenderit adipisicing in mollit aliqua adipisicing sunt. In ullamco eu qui est eiusmod qui. Fugiat esse est Lorem dolore nisi mollit exercitation. Aliquip occaecat esse exercitation ex non aute velit excepteur duis aliquip id. Velit id non aliquip fugiat minim qui exercitation culpa tempor consectetur. Minim dolor labore ea aute aute eu.\r\n","registered":"2015-05-18T09:52:56 -02:00","latitude":-14.634444,"longitude":12.931783,"tags":["wontfix","bug","wontfix"]}
{"id":43,"isActive":true,"balance":"$1,777.38","picture":"http://placehold.it/32x32","age":25,"color":"blue","name":"Arnold Bender","gender":"male","email":"arnoldbender@chorizon.com","phone":"+1 (945) 581-3808","address":"781 Lorraine Street, Gallina, American Samoa, 1832","about":"Et mollit laboris duis ut duis eiusmod aute laborum duis irure labore deserunt. Ut occaecat ullamco quis excepteur. Et commodo non sint laboris tempor laboris aliqua consequat magna ea aute minim tempor pariatur. Dolore occaecat qui irure Lorem nulla consequat non.\r\n","registered":"2018-12-23T02:26:30 -01:00","latitude":41.208579,"longitude":51.948925,"tags":["bug","good first issue","good first issue","wontfix"]}
{"id":44,"isActive":true,"balance":"$2,893.45","picture":"http://placehold.it/32x32","age":22,"color":"Green","name":"Joni Spears","gender":"female","email":"jonispears@chorizon.com","phone":"+1 (916) 565-2124","address":"307 Harwood Place, Canterwood, Maryland, 2047","about":"Dolore consequat deserunt aliquip duis consequat minim occaecat enim est. Nulla aute reprehenderit est enim duis cillum ullamco aliquip eiusmod sunt. Labore eiusmod aliqua Lorem velit aliqua quis ex mollit mollit duis culpa et qui in. Cupidatat est id ullamco irure dolor nulla.\r\n","registered":"2015-03-01T12:38:28 -01:00","latitude":8.19071,"longitude":146.323808,"tags":["wontfix","new issue","good first issue","good first issue"]}
{"id":45,"isActive":true,"balance":"$2,830.36","picture":"http://placehold.it/32x32","age":20,"color":"brown","name":"Irene Bennett","gender":"female","email":"irenebennett@chorizon.com","phone":"+1 (904) 431-2211","address":"353 Ridgecrest Terrace, Springdale, Marshall Islands, 2686","about":"Consectetur Lorem dolor reprehenderit sunt duis. Pariatur non velit velit veniam elit reprehenderit in. Aute quis Lorem quis pariatur Lorem incididunt nulla magna adipisicing. Et id occaecat labore officia occaecat occaecat adipisicing.\r\n","registered":"2018-04-17T05:18:51 -02:00","latitude":-36.435177,"longitude":-127.552573,"tags":["bug","wontfix"]}
{"id":46,"isActive":true,"balance":"$1,348.04","picture":"http://placehold.it/32x32","age":34,"color":"Green","name":"Lawson Curtis","gender":"male","email":"lawsoncurtis@chorizon.com","phone":"+1 (896) 532-2172","address":"942 Gerritsen Avenue, Southmont, Kansas, 8915","about":"Amet consectetur minim aute nostrud excepteur sint labore in culpa. Mollit qui quis ea amet sint ex incididunt nulla. Elit id esse ea consectetur laborum consequat occaecat aute consectetur ex. Commodo duis aute elit occaecat cupidatat non consequat ad officia qui dolore nostrud reprehenderit. Occaecat velit velit adipisicing exercitation consectetur. Incididunt et amet nostrud tempor do esse ullamco est Lorem irure. Eu aliqua eu exercitation sint.\r\n","registered":"2016-08-23T01:41:09 -02:00","latitude":-48.783539,"longitude":20.492944,"tags":[]}
{"id":47,"isActive":true,"balance":"$1,132.41","picture":"http://placehold.it/32x32","age":38,"color":"Green","name":"Goff May","gender":"male","email":"goffmay@chorizon.com","phone":"+1 (859) 453-3415","address":"225 Rutledge Street, Boonville, Massachusetts, 4081","about":"Sint occaecat velit anim sint reprehenderit est. Adipisicing ea pariatur amet id non ex. Aute id laborum tempor aliquip magna ex eu incididunt aliquip eiusmod elit quis dolor. Anim est minim deserunt amet exercitation nulla elit nulla nulla culpa ullamco. Velit consectetur ipsum amet proident labore excepteur ut id excepteur voluptate commodo. Exercitation et laboris labore esse est laboris consectetur et sint.\r\n","registered":"2014-10-25T07:32:30 -02:00","latitude":13.079225,"longitude":76.215086,"tags":["bug"]}
{"id":48,"isActive":true,"balance":"$1,201.87","picture":"http://placehold.it/32x32","age":38,"color":"Green","name":"Goodman Becker","gender":"male","email":"goodmanbecker@chorizon.com","phone":"+1 (825) 470-3437","address":"388 Seigel Street, Sisquoc, Kentucky, 8231","about":"Velit excepteur aute esse fugiat laboris aliqua magna. Est ex sit do labore ullamco aliquip. Duis ea commodo nostrud in fugiat. Aliqua consequat mollit dolore excepteur nisi ullamco commodo ea nostrud ea minim. Minim occaecat ut laboris ea consectetur veniam ipsum qui sit tempor incididunt anim amet eu. Velit sint incididunt eu adipisicing ipsum qui labore. Anim commodo labore reprehenderit aliquip labore elit minim deserunt amet exercitation officia non ea consectetur.\r\n","registered":"2019-09-05T04:49:03 -02:00","latitude":-23.792094,"longitude":-13.621221,"tags":["bug","bug","wontfix","new issue"]}
{"id":49,"isActive":true,"balance":"$1,476.39","picture":"http://placehold.it/32x32","age":28,"color":"brown","name":"Maureen Dale","gender":"female","email":"maureendale@chorizon.com","phone":"+1 (984) 538-3684","address":"817 Newton Street, Bannock, Wyoming, 1468","about":"Tempor mollit exercitation excepteur cupidatat reprehenderit ad ex. Nulla laborum proident incididunt quis. Esse laborum deserunt qui anim. Sunt incididunt pariatur cillum anim proident eu ullamco dolor excepteur. Ullamco amet culpa nostrud adipisicing duis aliqua consequat duis non eu id mollit velit. Deserunt ullamco amet in occaecat.\r\n","registered":"2018-04-26T06:04:40 -02:00","latitude":-64.196802,"longitude":-117.396238,"tags":["wontfix"]}
{"id":50,"isActive":true,"balance":"$1,947.08","picture":"http://placehold.it/32x32","age":21,"color":"Green","name":"Guerra Mcintyre","gender":"male","email":"guerramcintyre@chorizon.com","phone":"+1 (951) 536-2043","address":"423 Lombardy Street, Stewart, West Virginia, 908","about":"Sunt proident proident deserunt exercitation consectetur deserunt labore non commodo amet. Duis aute aliqua amet deserunt consectetur velit. Quis Lorem dolore occaecat deserunt reprehenderit non esse ullamco nostrud enim sunt ea fugiat. Elit amet veniam eu magna tempor. Mollit cupidatat laboris ex deserunt et labore sit tempor nostrud anim. Tempor aliqua occaecat voluptate reprehenderit eiusmod aliqua incididunt officia.\r\n","registered":"2015-07-16T05:11:42 -02:00","latitude":79.733743,"longitude":-20.602356,"tags":["bug","good first issue","good first issue"]}
{"id":51,"isActive":true,"balance":"$2,960.90","picture":"http://placehold.it/32x32","age":23,"color":"blue","name":"Key Cervantes","gender":"male","email":"keycervantes@chorizon.com","phone":"+1 (931) 474-3865","address":"410 Barbey Street, Vernon, Oregon, 2328","about":"Duis amet minim eu consectetur laborum ad exercitation eiusmod nulla velit cillum consectetur. Nostrud aliqua cillum minim veniam quis do cupidatat mollit laborum. Culpa fugiat consectetur cillum non occaecat tempor non fugiat esse pariatur in ullamco. Occaecat amet officia et culpa officia deserunt in qui magna aute consequat eiusmod.\r\n","registered":"2019-12-15T12:13:35 -01:00","latitude":47.627647,"longitude":117.049918,"tags":["new issue"]}
{"id":52,"isActive":false,"balance":"$1,884.02","picture":"http://placehold.it/32x32","age":35,"color":"blue","name":"Karen Nelson","gender":"female","email":"karennelson@chorizon.com","phone":"+1 (993) 528-3607","address":"930 Frank Court, Dunbar, New York, 8810","about":"Occaecat officia veniam consectetur aliqua laboris dolor irure nulla. Lorem ipsum sit nisi veniam mollit ea sint nisi irure. Eiusmod officia do laboris nostrud enim ullamco nulla officia in Lorem qui. Sint sunt incididunt quis reprehenderit incididunt. Sit dolore nulla consequat ea magna.\r\n","registered":"2014-06-23T09:21:44 -02:00","latitude":-59.059033,"longitude":76.565373,"tags":["new issue","bug"]}
{"id":53,"isActive":true,"balance":"$3,559.55","picture":"http://placehold.it/32x32","age":32,"color":"brown","name":"Caitlin Burnett","gender":"female","email":"caitlinburnett@chorizon.com","phone":"+1 (945) 480-2796","address":"516 Senator Street, Emory, Iowa, 4145","about":"In aliqua ea esse in. Magna aute cupidatat culpa enim proident ad adipisicing laborum consequat exercitation nisi. Qui esse aliqua duis anim nulla esse enim nostrud ipsum tempor. Lorem deserunt ullamco do mollit culpa ipsum duis Lorem velit duis occaecat.\r\n","registered":"2019-01-09T02:26:31 -01:00","latitude":-82.774237,"longitude":42.316194,"tags":["bug","good first issue"]}
{"id":54,"isActive":true,"balance":"$2,113.29","picture":"http://placehold.it/32x32","age":28,"color":"Green","name":"Richards Walls","gender":"male","email":"richardswalls@chorizon.com","phone":"+1 (865) 517-2982","address":"959 Brightwater Avenue, Stevens, Nevada, 2968","about":"Ad aute Lorem non pariatur anim ullamco ad amet eiusmod tempor velit. Mollit et tempor nisi aute adipisicing exercitation mollit do amet amet est fugiat enim. Ex voluptate nulla id tempor officia ullamco cillum dolor irure irure mollit et magna nisi. Pariatur voluptate qui laboris dolor id. Eu ipsum nulla dolore aute voluptate deserunt anim aliqua. Ut enim enim velit officia est nisi. Duis amet ut veniam aliquip minim tempor Lorem amet Lorem dolor duis.\r\n","registered":"2014-09-25T06:51:22 -02:00","latitude":80.09202,"longitude":87.49759,"tags":["wontfix","wontfix","bug"]}
{"id":55,"isActive":true,"balance":"$1,977.66","picture":"http://placehold.it/32x32","age":36,"color":"brown","name":"Combs Stanley","gender":"male","email":"combsstanley@chorizon.com","phone":"+1 (827) 419-2053","address":"153 Beverley Road, Siglerville, South Carolina, 3666","about":"Commodo ullamco consequat eu ipsum eiusmod aute voluptate in. Ea laboris id deserunt nostrud pariatur et laboris minim tempor quis qui consequat non esse. Magna elit commodo mollit veniam Lorem enim nisi pariatur. Nisi non nisi adipisicing ea ipsum laborum dolore cillum. Amet do nisi esse laboris ipsum proident non veniam ullamco ea cupidatat sunt. Aliquip aute cillum quis laboris consectetur enim eiusmod nisi non id ullamco cupidatat sunt.\r\n","registered":"2019-08-22T07:53:15 -02:00","latitude":78.386181,"longitude":143.661058,"tags":[]}
{"id":56,"isActive":false,"balance":"$3,886.12","picture":"http://placehold.it/32x32","age":23,"color":"brown","name":"Tucker Barry","gender":"male","email":"tuckerbarry@chorizon.com","phone":"+1 (808) 544-3433","address":"805 Jamaica Avenue, Cornfields, Minnesota, 3689","about":"Enim est sunt ullamco nulla aliqua commodo. Enim minim veniam non fugiat id tempor ad velit quis velit ad sunt consectetur laborum. Cillum deserunt tempor est adipisicing Lorem esse qui. Magna quis sunt cillum ea officia adipisicing eiusmod eu et nisi consectetur.\r\n","registered":"2016-08-29T07:28:00 -02:00","latitude":71.701551,"longitude":9.903068,"tags":[]}
{"id":57,"isActive":false,"balance":"$1,844.56","picture":"http://placehold.it/32x32","age":20,"color":"Green","name":"Kaitlin Conner","gender":"female","email":"kaitlinconner@chorizon.com","phone":"+1 (862) 467-2666","address":"501 Knight Court, Joppa, Rhode Island, 274","about":"Occaecat id reprehenderit pariatur ea. Incididunt laborum reprehenderit ipsum velit labore excepteur nostrud voluptate officia ut culpa. Sint sunt in qui duis cillum aliqua do ullamco. Non do aute excepteur non labore sint consectetur tempor ad ea fugiat commodo labore. Dolor tempor culpa Lorem voluptate esse nostrud anim tempor irure reprehenderit. Deserunt ipsum cillum fugiat ut labore labore anim. In aliqua sunt dolore irure reprehenderit voluptate commodo consequat mollit amet laboris sit anim.\r\n","registered":"2019-05-30T06:38:24 -02:00","latitude":15.613464,"longitude":171.965629,"tags":[]}
{"id":58,"isActive":true,"balance":"$2,876.10","picture":"http://placehold.it/32x32","age":38,"color":"Green","name":"Mamie Fischer","gender":"female","email":"mamiefischer@chorizon.com","phone":"+1 (948) 545-3901","address":"599 Hunterfly Place, Haena, Georgia, 6005","about":"Cillum eu aliquip ipsum anim in dolore labore ea. Laboris velit esse ea ea aute do adipisicing ullamco elit laborum aute tempor. Esse consectetur quis irure occaecat nisi cillum et consectetur cillum cillum quis quis commodo.\r\n","registered":"2019-05-27T05:07:10 -02:00","latitude":70.915079,"longitude":-48.813584,"tags":["bug","wontfix","wontfix","good first issue"]}
{"id":59,"isActive":true,"balance":"$1,921.58","picture":"http://placehold.it/32x32","age":31,"color":"Green","name":"Harper Carson","gender":"male","email":"harpercarson@chorizon.com","phone":"+1 (912) 430-3243","address":"883 Dennett Place, Knowlton, New Mexico, 9219","about":"Exercitation minim esse proident cillum velit et deserunt incididunt adipisicing minim. Cillum Lorem consectetur laborum id consequat exercitation velit. Magna dolor excepteur sunt deserunt dolor ullamco non sint proident ipsum. Reprehenderit voluptate sit veniam consectetur ea sunt duis labore deserunt ipsum aute. Eiusmod aliqua anim voluptate id duis tempor aliqua commodo sunt. Do officia ea consectetur nostrud eiusmod laborum.\r\n","registered":"2019-12-07T07:33:15 -01:00","latitude":-60.812605,"longitude":-27.129016,"tags":["bug","new issue"]}
{"id":60,"isActive":true,"balance":"$1,770.93","picture":"http://placehold.it/32x32","age":23,"color":"brown","name":"Jody Herrera","gender":"female","email":"jodyherrera@chorizon.com","phone":"+1 (890) 583-3222","address":"261 Jay Street, Strykersville, Ohio, 9248","about":"Sit adipisicing pariatur irure non sint cupidatat ex ipsum pariatur exercitation ea. Enim consequat enim eu eu sint eu elit ex esse aliquip. Pariatur ipsum dolore veniam nisi id tempor elit exercitation dolore ad fugiat labore velit.\r\n","registered":"2016-05-21T01:00:02 -02:00","latitude":-36.846586,"longitude":131.156223,"tags":[]}
{"id":61,"isActive":false,"balance":"$2,813.41","picture":"http://placehold.it/32x32","age":37,"color":"Green","name":"Charles Castillo","gender":"male","email":"charlescastillo@chorizon.com","phone":"+1 (934) 467-2108","address":"675 Morton Street, Rew, Pennsylvania, 137","about":"Velit amet laborum amet sunt sint sit cupidatat deserunt dolor laborum consectetur veniam. Minim cupidatat amet exercitation nostrud ex deserunt ad Lorem amet aute consectetur labore reprehenderit. Minim mollit aliqua et deserunt ex nisi. Id irure dolor labore consequat ipsum consectetur.\r\n","registered":"2019-06-10T02:54:22 -02:00","latitude":-16.423202,"longitude":-146.293752,"tags":["new issue","new issue"]}
{"id":62,"isActive":true,"balance":"$3,341.35","picture":"http://placehold.it/32x32","age":33,"color":"blue","name":"Estelle Ramirez","gender":"female","email":"estelleramirez@chorizon.com","phone":"+1 (816) 459-2073","address":"636 Nolans Lane, Camptown, California, 7794","about":"Dolor proident incididunt ex labore quis ullamco duis. Sit esse laboris nisi eu voluptate nulla cupidatat nulla fugiat veniam. Culpa cillum est esse dolor consequat. Pariatur ex sit irure qui do fugiat. Fugiat culpa veniam est nisi excepteur quis cupidatat et minim in esse minim dolor et. Anim aliquip labore dolor occaecat nisi sunt dolore pariatur veniam nostrud est ut.\r\n","registered":"2015-02-14T01:05:50 -01:00","latitude":-46.591249,"longitude":-83.385587,"tags":["good first issue","bug"]}
{"id":63,"isActive":true,"balance":"$2,478.30","picture":"http://placehold.it/32x32","age":21,"color":"blue","name":"Knowles Hebert","gender":"male","email":"knowleshebert@chorizon.com","phone":"+1 (819) 409-2308","address":"361 Kathleen Court, Gratton, Connecticut, 7254","about":"Esse mollit nulla eiusmod esse duis non proident excepteur labore. Nisi ex culpa do mollit dolor ea deserunt elit anim ipsum nostrud. Cupidatat nostrud duis ipsum dolore amet et. Veniam in cillum ea cillum deserunt excepteur officia laboris nulla. Commodo incididunt aliquip qui sunt dolore occaecat labore do laborum irure. Labore culpa duis pariatur reprehenderit ad laboris occaecat anim cillum et fugiat ea.\r\n","registered":"2016-03-08T08:34:52 -01:00","latitude":71.042482,"longitude":152.460406,"tags":["good first issue","wontfix"]}
{"id":64,"isActive":false,"balance":"$2,559.09","picture":"http://placehold.it/32x32","age":28,"color":"brown","name":"Thelma Mckenzie","gender":"female","email":"thelmamckenzie@chorizon.com","phone":"+1 (941) 596-2777","address":"202 Leonard Street, Riverton, Illinois, 8577","about":"Non ad ipsum elit commodo fugiat Lorem ipsum reprehenderit. Commodo incididunt officia cillum eiusmod officia proident ea incididunt ullamco magna commodo consectetur dolor. Nostrud esse nisi ea laboris. Veniam et dolore nulla excepteur pariatur laborum non. Eiusmod reprehenderit do tempor esse eu eu aliquip. Magna quis consectetur ipsum adipisicing mollit elit ad elit.\r\n","registered":"2020-04-14T12:43:06 -02:00","latitude":16.026129,"longitude":105.464476,"tags":[]}
{"id":65,"isActive":true,"balance":"$1,025.08","picture":"http://placehold.it/32x32","age":34,"color":"blue","name":"Carole Rowland","gender":"female","email":"carolerowland@chorizon.com","phone":"+1 (862) 558-3448","address":"941 Melba Court, Bluetown, Florida, 9555","about":"Ullamco occaecat ipsum aliqua sit proident eu. Occaecat ut consectetur proident culpa aliqua excepteur quis qui anim irure sit proident mollit irure. Proident cupidatat deserunt dolor adipisicing.\r\n","registered":"2014-12-01T05:55:35 -01:00","latitude":-0.191998,"longitude":43.389652,"tags":["wontfix"]}
{"id":66,"isActive":true,"balance":"$1,061.49","picture":"http://placehold.it/32x32","age":35,"color":"brown","name":"Higgins Aguilar","gender":"male","email":"higginsaguilar@chorizon.com","phone":"+1 (911) 540-3791","address":"132 Sackman Street, Layhill, Guam, 8729","about":"Anim ea dolore exercitation minim. Proident cillum non deserunt cupidatat veniam non occaecat aute ullamco irure velit laboris ex aliquip. Voluptate incididunt non ex nulla est ipsum. Amet anim do velit sunt irure sint minim nisi occaecat proident tempor elit exercitation nostrud.\r\n","registered":"2015-04-05T02:10:07 -02:00","latitude":74.702813,"longitude":151.314972,"tags":["bug"]}
{"id":67,"isActive":true,"balance":"$3,510.14","picture":"http://placehold.it/32x32","age":28,"color":"brown","name":"Ilene Gillespie","gender":"female","email":"ilenegillespie@chorizon.com","phone":"+1 (937) 575-2676","address":"835 Lake Street, Naomi, Alabama, 4131","about":"Quis laborum consequat id cupidatat exercitation aute ad ex nulla dolore velit qui proident minim. Et do consequat nisi eiusmod exercitation exercitation enim voluptate elit ullamco. Cupidatat ut adipisicing consequat aute est voluptate sit ipsum culpa ullamco. Ex pariatur ex qui quis qui.\r\n","registered":"2015-06-28T09:41:45 -02:00","latitude":71.573342,"longitude":-95.295989,"tags":["wontfix","wontfix"]}
{"id":68,"isActive":false,"balance":"$1,539.98","picture":"http://placehold.it/32x32","age":24,"color":"Green","name":"Angelina Dyer","gender":"female","email":"angelinadyer@chorizon.com","phone":"+1 (948) 574-3949","address":"575 Division Place, Gorham, Louisiana, 3458","about":"Cillum magna eu est veniam incididunt laboris laborum elit mollit incididunt proident non mollit. Dolor mollit culpa ullamco dolore aliqua adipisicing culpa officia. Reprehenderit minim nisi fugiat consectetur dolore.\r\n","registered":"2014-07-08T06:34:36 -02:00","latitude":-85.649593,"longitude":66.126018,"tags":["good first issue"]}
{"id":69,"isActive":true,"balance":"$3,367.69","picture":"http://placehold.it/32x32","age":30,"color":"brown","name":"Marks Burt","gender":"male","email":"marksburt@chorizon.com","phone":"+1 (895) 497-3138","address":"819 Village Road, Wadsworth, Delaware, 6099","about":"Fugiat tempor aute voluptate proident exercitation tempor esse dolor id. Duis aliquip exercitation Lorem elit magna sint sit. Culpa adipisicing occaecat aliqua officia reprehenderit laboris sint aliquip. Magna do sunt consequat excepteur nisi do commodo non. Cillum officia nostrud consequat excepteur elit proident in. Tempor ipsum in ut qui cupidatat exercitation est nulla exercitation voluptate.\r\n","registered":"2014-08-31T06:12:18 -02:00","latitude":26.854112,"longitude":-143.313948,"tags":["good first issue"]}
{"id":70,"isActive":false,"balance":"$3,755.72","picture":"http://placehold.it/32x32","age":23,"color":"blue","name":"Glass Perkins","gender":"male","email":"glassperkins@chorizon.com","phone":"+1 (923) 486-3725","address":"899 Roosevelt Court, Belleview, Idaho, 1737","about":"Esse magna id labore sunt qui eu enim esse cillum consequat enim eu culpa enim. Duis veniam cupidatat deserunt sunt irure ad Lorem proident aliqua mollit. Laborum mollit aute nulla est. Sunt id proident incididunt ipsum et dolor consectetur laborum enim dolor officia dolore laborum. Est commodo duis et ea consequat labore id id eu aliqua. Qui veniam sit eu aliquip ad sit dolor ullamco et laborum voluptate quis fugiat ex. Exercitation dolore cillum amet ad nisi consectetur occaecat sit aliqua laborum qui proident aliqua exercitation.\r\n","registered":"2015-05-22T05:44:33 -02:00","latitude":54.27147,"longitude":-65.065604,"tags":["wontfix"]}
{"id":71,"isActive":true,"balance":"$3,381.63","picture":"http://placehold.it/32x32","age":38,"color":"Green","name":"Candace Sawyer","gender":"female","email":"candacesawyer@chorizon.com","phone":"+1 (830) 404-2636","address":"334 Arkansas Drive, Bordelonville, Tennessee, 8449","about":"Et aliqua elit incididunt et aliqua. Deserunt ut elit proident ullamco ut. Ex exercitation amet non eu reprehenderit ea voluptate qui sit reprehenderit ad sint excepteur.\r\n","registered":"2014-04-04T08:45:00 -02:00","latitude":6.484262,"longitude":-37.054928,"tags":["new issue","new issue"]}
{"id":72,"isActive":true,"balance":"$1,640.98","picture":"http://placehold.it/32x32","age":27,"color":"Green","name":"Hendricks Martinez","gender":"male","email":"hendricksmartinez@chorizon.com","phone":"+1 (857) 566-3245","address":"636 Agate Court, Newry, Utah, 3304","about":"Do sit culpa amet incididunt officia enim occaecat incididunt excepteur enim tempor deserunt qui. Excepteur adipisicing anim consectetur adipisicing proident anim laborum qui. Aliquip nostrud cupidatat sit ullamco.\r\n","registered":"2018-06-15T10:36:11 -02:00","latitude":86.746034,"longitude":10.347893,"tags":["new issue"]}
{"id":73,"isActive":false,"balance":"$1,239.74","picture":"http://placehold.it/32x32","age":38,"color":"blue","name":"Eleanor Shepherd","gender":"female","email":"eleanorshepherd@chorizon.com","phone":"+1 (894) 567-2617","address":"670 Lafayette Walk, Darlington, Palau, 8803","about":"Adipisicing ad incididunt id veniam magna cupidatat et labore eu deserunt mollit. Lorem voluptate exercitation elit eu aliquip cupidatat occaecat anim excepteur reprehenderit est est. Ipsum excepteur ea mollit qui nisi laboris ex qui. Cillum velit culpa culpa commodo laboris nisi Lorem non elit deserunt incididunt. Officia quis velit nulla sint incididunt duis mollit tempor adipisicing qui officia eu nisi Lorem. Do proident pariatur ex enim nostrud eu aute esse deserunt eu velit quis culpa exercitation. Occaecat ad cupidatat ullamco consequat duis anim deserunt occaecat aliqua sunt consectetur ipsum magna.\r\n","registered":"2020-02-29T12:15:28 -01:00","latitude":35.749621,"longitude":-94.40842,"tags":["good first issue","new issue","new issue","bug"]}
{"id":74,"isActive":true,"balance":"$1,180.90","picture":"http://placehold.it/32x32","age":36,"color":"Green","name":"Stark Wong","gender":"male","email":"starkwong@chorizon.com","phone":"+1 (805) 575-3055","address":"522 Bond Street, Bawcomville, Wisconsin, 324","about":"Aute qui sit incididunt eu adipisicing exercitation sunt nostrud. Id laborum incididunt proident ipsum est cillum esse. Officia ullamco eu ut Lorem do minim ea dolor consequat sit eu est voluptate. Id commodo cillum enim culpa aliquip ullamco nisi Lorem cillum ipsum cupidatat anim officia eu. Dolore sint elit labore pariatur. Officia duis nulla voluptate et nulla ut voluptate laboris eu commodo veniam qui veniam.\r\n","registered":"2020-01-25T10:47:48 -01:00","latitude":-80.452139,"longitude":160.72546,"tags":["wontfix"]}
{"id":75,"isActive":false,"balance":"$1,913.42","picture":"http://placehold.it/32x32","age":24,"color":"Green","name":"Emma Jacobs","gender":"female","email":"emmajacobs@chorizon.com","phone":"+1 (899) 554-3847","address":"173 Tapscott Street, Esmont, Maine, 7450","about":"Laboris consequat consectetur tempor labore ullamco ullamco voluptate quis quis duis ut ad. In est irure quis amet sunt nulla ad ut sit labore ut eu quis duis. Nostrud cupidatat aliqua sunt occaecat minim id consequat officia deserunt laborum. Ea dolor reprehenderit laborum veniam exercitation est nostrud excepteur laborum minim id qui et.\r\n","registered":"2019-03-29T06:24:13 -01:00","latitude":-35.53722,"longitude":155.703874,"tags":[]}
{"id":76,"isActive":false,"balance":"$1,274.29","picture":"http://placehold.it/32x32","age":25,"color":"Green","name":"Clarice Gardner","gender":"female","email":"claricegardner@chorizon.com","phone":"+1 (810) 407-3258","address":"894 Brooklyn Road, Utting, New Hampshire, 6404","about":"Elit occaecat aute ea adipisicing mollit cupidatat aliquip excepteur veniam minim. Sunt quis dolore in commodo aute esse quis. Lorem in cillum commodo eu anim commodo mollit. Adipisicing enim sunt adipisicing cupidatat adipisicing eiusmod eu do sit nisi.\r\n","registered":"2014-10-20T10:13:32 -02:00","latitude":17.11935,"longitude":65.38197,"tags":["new issue","wontfix"]}

File diff suppressed because it is too large Load Diff

View File

@@ -73,7 +73,7 @@ static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
json!({"message": "The provided API key is invalid.",
"code": "invalid_api_key",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#invalid-api-key"
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
})
});
@@ -520,7 +520,7 @@ async fn error_creating_index_without_action() {
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-not-found"
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
// try to create a index via add documents route

View File

@@ -37,7 +37,7 @@ async fn error_api_key_bad_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid-content-type");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
// patch
let req = test::TestRequest::patch()
@@ -59,7 +59,7 @@ async fn error_api_key_bad_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid-content-type");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
}
#[actix_rt::test]
@@ -96,7 +96,7 @@ async fn error_api_key_empty_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid-content-type");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
// patch
let req = test::TestRequest::patch()
@@ -118,7 +118,7 @@ async fn error_api_key_empty_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid-content-type");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
}
#[actix_rt::test]
@@ -154,7 +154,7 @@ async fn error_api_key_missing_content_types() {
);
assert_eq!(response["code"], "missing_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing-content-type");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
// patch
let req = test::TestRequest::patch()
@@ -175,7 +175,7 @@ async fn error_api_key_missing_content_types() {
);
assert_eq!(response["code"], "missing_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing-content-type");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
}
#[actix_rt::test]
@@ -200,7 +200,7 @@ async fn error_api_key_empty_payload() {
assert_eq!(status_code, 400);
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
// patch
@@ -217,7 +217,7 @@ async fn error_api_key_empty_payload() {
assert_eq!(status_code, 400);
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
}
@@ -243,7 +243,7 @@ async fn error_api_key_malformed_payload() {
assert_eq!(status_code, 400);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
assert_eq!(
response["message"],
json!(
@@ -265,7 +265,7 @@ async fn error_api_key_malformed_payload() {
assert_eq!(status_code, 400);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
assert_eq!(
response["message"],
json!(

View File

@@ -56,7 +56,7 @@ static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
json!({"message": "The provided API key is invalid.",
"code": "invalid_api_key",
"type": "auth",
"link": "https://docs.meilisearch.com/errors#invalid-api-key"
"link": "https://docs.meilisearch.com/errors#invalid_api_key"
})
});

View File

@@ -25,30 +25,8 @@ impl Index<'_> {
pub async fn load_test_set(&self) -> u64 {
let url = format!("/indexes/{}/documents", urlencode(self.uid.as_ref()));
let (response, code) = self
.service
.post_str(
url,
include_str!("../assets/test_set.json"),
("content-type", "application/json"),
)
.await;
assert_eq!(code, 202);
let update_id = response["taskUid"].as_i64().unwrap();
self.wait_task(update_id as u64).await;
update_id as u64
}
pub async fn load_test_set_ndjson(&self) -> u64 {
let url = format!("/indexes/{}/documents", urlencode(self.uid.as_ref()));
let (response, code) = self
.service
.post_str(
url,
include_str!("../assets/test_set.ndjson"),
("content-type", "application/x-ndjson"),
)
.await;
let (response, code) =
self.service.post_str(url, include_str!("../assets/test_set.json")).await;
assert_eq!(code, 202);
let update_id = response["taskUid"].as_i64().unwrap();
self.wait_task(update_id as u64).await;
@@ -132,12 +110,7 @@ impl Index<'_> {
self.service.get(url).await
}
pub async fn filtered_tasks(
&self,
types: &[&str],
statuses: &[&str],
canceled_by: &[&str],
) -> (Value, StatusCode) {
pub async fn filtered_tasks(&self, types: &[&str], statuses: &[&str]) -> (Value, StatusCode) {
let mut url = format!("/tasks?indexUids={}", self.uid);
if !types.is_empty() {
let _ = write!(url, "&types={}", types.join(","));
@@ -145,9 +118,6 @@ impl Index<'_> {
if !statuses.is_empty() {
let _ = write!(url, "&statuses={}", statuses.join(","));
}
if !canceled_by.is_empty() {
let _ = write!(url, "&canceledBy={}", canceled_by.join(","));
}
self.service.get(url).await
}
@@ -163,11 +133,6 @@ impl Index<'_> {
self.service.get(url).await
}
pub async fn get_all_documents_raw(&self, options: &str) -> (Value, StatusCode) {
let url = format!("/indexes/{}/documents{}", urlencode(self.uid.as_ref()), options);
self.service.get(url).await
}
pub async fn get_all_documents(&self, options: GetAllDocumentsOptions) -> (Value, StatusCode) {
let mut url = format!("/indexes/{}/documents?", urlencode(self.uid.as_ref()));
if let Some(limit) = options.limit {
@@ -200,11 +165,6 @@ impl Index<'_> {
self.service.post_encoded(url, serde_json::to_value(&ids).unwrap(), self.encoder).await
}
pub async fn delete_batch_raw(&self, body: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/documents/delete-batch", urlencode(self.uid.as_ref()));
self.service.post_encoded(url, body, self.encoder).await
}
pub async fn settings(&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref()));
self.service.get(url).await
@@ -215,76 +175,6 @@ impl Index<'_> {
self.service.patch_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_displayed_attributes(
&self,
settings: Value,
) -> (Value, StatusCode) {
let url =
format!("/indexes/{}/settings/displayed-attributes", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_searchable_attributes(
&self,
settings: Value,
) -> (Value, StatusCode) {
let url =
format!("/indexes/{}/settings/searchable-attributes", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_filterable_attributes(
&self,
settings: Value,
) -> (Value, StatusCode) {
let url =
format!("/indexes/{}/settings/filterable-attributes", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_sortable_attributes(
&self,
settings: Value,
) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/sortable-attributes", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_ranking_rules(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/ranking-rules", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_stop_words(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/stop-words", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_synonyms(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/synonyms", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_distinct_attribute(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/distinct-attribute", urlencode(self.uid.as_ref()));
self.service.put_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_typo_tolerance(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/typo-tolerance", urlencode(self.uid.as_ref()));
self.service.patch_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_faceting(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/faceting", urlencode(self.uid.as_ref()));
self.service.patch_encoded(url, settings, self.encoder).await
}
pub async fn update_settings_pagination(&self, settings: Value) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings/pagination", urlencode(self.uid.as_ref()));
self.service.patch_encoded(url, settings, self.encoder).await
}
pub async fn delete_settings(&self) -> (Value, StatusCode) {
let url = format!("/indexes/{}/settings", urlencode(self.uid.as_ref()));
self.service.delete(url).await
@@ -307,8 +197,8 @@ impl Index<'_> {
eprintln!("Error with post search");
resume_unwind(e);
}
let query = yaup::to_string(&query).unwrap();
let (response, code) = self.search_get(&query).await;
let (response, code) = self.search_get(query).await;
if let Err(e) = catch_unwind(move || test(response, code)) {
eprintln!("Error with get search");
resume_unwind(e);
@@ -320,8 +210,9 @@ impl Index<'_> {
self.service.post_encoded(url, query, self.encoder).await
}
pub async fn search_get(&self, query: &str) -> (Value, StatusCode) {
let url = format!("/indexes/{}/search?{}", urlencode(self.uid.as_ref()), query);
pub async fn search_get(&self, query: Value) -> (Value, StatusCode) {
let params = yaup::to_string(&query).unwrap();
let url = format!("/indexes/{}/search?{}", urlencode(self.uid.as_ref()), params);
self.service.get(url).await
}

View File

@@ -8,7 +8,7 @@ use actix_web::dev::ServiceResponse;
use actix_web::http::StatusCode;
use byte_unit::{Byte, ByteUnit};
use clap::Parser;
use meilisearch::option::{IndexerOpts, MaxMemory, Opt};
use meilisearch::option::{IndexerOpts, MaxMemory, Opt, RateLimiterConfig};
use meilisearch::{analytics, create_app, setup_meilisearch};
use once_cell::sync::Lazy;
use serde_json::{json, Value};
@@ -132,8 +132,8 @@ impl Server {
self.service.get("/tasks").await
}
pub async fn tasks_filter(&self, filter: &str) -> (Value, StatusCode) {
self.service.get(format!("/tasks?{}", filter)).await
pub async fn tasks_filter(&self, filter: Value) -> (Value, StatusCode) {
self.service.get(format!("/tasks?{}", yaup::to_string(&filter).unwrap())).await
}
pub async fn get_dump_status(&self, uid: &str) -> (Value, StatusCode) {
@@ -148,12 +148,14 @@ impl Server {
self.service.post("/swap-indexes", value).await
}
pub async fn cancel_tasks(&self, value: &str) -> (Value, StatusCode) {
self.service.post(format!("/tasks/cancel?{}", value), json!(null)).await
pub async fn cancel_tasks(&self, value: Value) -> (Value, StatusCode) {
self.service
.post(format!("/tasks/cancel?{}", yaup::to_string(&value).unwrap()), json!(null))
.await
}
pub async fn delete_tasks(&self, value: &str) -> (Value, StatusCode) {
self.service.delete(format!("/tasks?{}", value)).await
pub async fn delete_tasks(&self, value: Value) -> (Value, StatusCode) {
self.service.delete(format!("/tasks?{}", yaup::to_string(&value).unwrap())).await
}
pub async fn wait_task(&self, update_id: u64) -> Value {
@@ -190,6 +192,10 @@ pub fn default_settings(dir: impl AsRef<Path>) -> Opt {
max_task_db_size: Byte::from_unit(1.0, ByteUnit::GiB).unwrap(),
http_payload_size_limit: Byte::from_unit(10.0, ByteUnit::MiB).unwrap(),
snapshot_dir: ".".into(),
rate_limiter_options: RateLimiterConfig {
rate_limiting_disable_all: true,
..Parser::parse_from(None as Option<&str>)
},
indexer_options: IndexerOpts {
// memory has to be unlimited because several meilisearch are running in test context.
max_indexing_memory: MaxMemory::unlimited(),

View File

@@ -39,12 +39,11 @@ impl Service {
&self,
url: impl AsRef<str>,
body: impl AsRef<str>,
header: (&str, &str),
) -> (Value, StatusCode) {
let req = test::TestRequest::post()
.uri(url.as_ref())
.set_payload(body.as_ref().to_string())
.insert_header(header);
.insert_header(("content-type", "application/json"));
self.request(req).await
}

View File

@@ -88,7 +88,7 @@ async fn error_json_bad_content_type() {
"message": r#"A Content-Type header is missing. Accepted values for the Content-Type header are: `application/json`"#,
"code": "missing_content_type",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#missing-content-type",
"link": "https://docs.meilisearch.com/errors#missing_content_type",
}),
"when calling the route `{}` with no content-type",
route,
@@ -117,7 +117,7 @@ async fn error_json_bad_content_type() {
"message": expected_error_message,
"code": "invalid_content_type",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-content-type",
"link": "https://docs.meilisearch.com/errors#invalid_content_type",
}),
"when calling the route `{}` with a content-type of `{}`",
route,

View File

@@ -193,7 +193,7 @@ async fn error_add_documents_test_bad_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid-content-type");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
// put
let req = test::TestRequest::put()
@@ -214,7 +214,7 @@ async fn error_add_documents_test_bad_content_types() {
);
assert_eq!(response["code"], "invalid_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid-content-type");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#invalid_content_type");
}
/// missing content-type must be refused
@@ -248,7 +248,7 @@ async fn error_add_documents_test_no_content_type() {
);
assert_eq!(response["code"], "missing_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing-content-type");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
// put
let req = test::TestRequest::put()
@@ -268,7 +268,7 @@ async fn error_add_documents_test_no_content_type() {
);
assert_eq!(response["code"], "missing_content_type");
assert_eq!(response["type"], "invalid_request");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing-content-type");
assert_eq!(response["link"], "https://docs.meilisearch.com/errors#missing_content_type");
}
#[actix_rt::test]
@@ -297,7 +297,7 @@ async fn error_add_malformed_csv_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
// put
let req = test::TestRequest::put()
@@ -318,7 +318,7 @@ async fn error_add_malformed_csv_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
}
#[actix_rt::test]
@@ -347,7 +347,7 @@ async fn error_add_malformed_json_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
// put
let req = test::TestRequest::put()
@@ -368,7 +368,7 @@ async fn error_add_malformed_json_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
// truncate
@@ -393,7 +393,7 @@ async fn error_add_malformed_json_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
// add one more char to the long string to test if the truncating works.
let document = format!("\"{}m\"", long);
@@ -412,7 +412,7 @@ async fn error_add_malformed_json_documents() {
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
}
#[actix_rt::test]
@@ -436,12 +436,12 @@ async fn error_add_malformed_ndjson_documents() {
assert_eq!(
response["message"],
json!(
r#"The `ndjson` payload provided is malformed. `Couldn't serialize document value: key must be a string at line 2 column 2`."#
r#"The `ndjson` payload provided is malformed. `Couldn't serialize document value: trailing characters at line 2 column 1`."#
)
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
// put
let req = test::TestRequest::put()
@@ -456,11 +456,11 @@ async fn error_add_malformed_ndjson_documents() {
assert_eq!(status_code, 400);
assert_eq!(
response["message"],
json!("The `ndjson` payload provided is malformed. `Couldn't serialize document value: key must be a string at line 2 column 2`.")
json!("The `ndjson` payload provided is malformed. `Couldn't serialize document value: trailing characters at line 2 column 1`.")
);
assert_eq!(response["code"], json!("malformed_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#malformed_payload"));
}
#[actix_rt::test]
@@ -484,7 +484,7 @@ async fn error_add_missing_payload_csv_documents() {
assert_eq!(response["message"], json!(r#"A csv payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
// put
let req = test::TestRequest::put()
@@ -500,7 +500,7 @@ async fn error_add_missing_payload_csv_documents() {
assert_eq!(response["message"], json!(r#"A csv payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
}
#[actix_rt::test]
@@ -524,7 +524,7 @@ async fn error_add_missing_payload_json_documents() {
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
// put
let req = test::TestRequest::put()
@@ -540,7 +540,7 @@ async fn error_add_missing_payload_json_documents() {
assert_eq!(response["message"], json!(r#"A json payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
}
#[actix_rt::test]
@@ -564,7 +564,7 @@ async fn error_add_missing_payload_ndjson_documents() {
assert_eq!(response["message"], json!(r#"A ndjson payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
// put
let req = test::TestRequest::put()
@@ -580,7 +580,7 @@ async fn error_add_missing_payload_ndjson_documents() {
assert_eq!(response["message"], json!(r#"A ndjson payload is missing."#));
assert_eq!(response["code"], json!("missing_payload"));
assert_eq!(response["type"], json!("invalid_request"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing-payload"));
assert_eq!(response["link"], json!("https://docs.meilisearch.com/errors#missing_payload"));
}
#[actix_rt::test]
@@ -639,7 +639,7 @@ async fn error_document_add_create_index_bad_uid() {
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-index-uid"
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
});
assert_eq!(code, 400);
@@ -737,22 +737,6 @@ async fn add_larger_dataset() {
.await;
assert_eq!(code, 200, "failed with `{}`", response);
assert_eq!(response["results"].as_array().unwrap().len(), 77);
// x-ndjson add large test
let server = Server::new().await;
let index = server.index("test");
let update_id = index.load_test_set_ndjson().await;
let (response, code) = index.get_task(update_id).await;
assert_eq!(code, 200);
assert_eq!(response["status"], "succeeded");
assert_eq!(response["type"], "documentAdditionOrUpdate");
assert_eq!(response["details"]["indexedDocuments"], 77);
assert_eq!(response["details"]["receivedDocuments"], 77);
let (response, code) = index
.get_all_documents(GetAllDocumentsOptions { limit: Some(1000), ..Default::default() })
.await;
assert_eq!(code, 200, "failed with `{}`", response);
assert_eq!(response["results"].as_array().unwrap().len(), 77);
}
#[actix_rt::test]
@@ -781,7 +765,7 @@ async fn error_add_documents_bad_document_id() {
assert_eq!(response["error"]["type"], json!("invalid_request"));
assert_eq!(
response["error"]["link"],
json!("https://docs.meilisearch.com/errors#invalid-document-id")
json!("https://docs.meilisearch.com/errors#invalid_document_id")
);
}
@@ -809,7 +793,7 @@ async fn error_add_documents_missing_document_id() {
assert_eq!(response["error"]["type"], json!("invalid_request"));
assert_eq!(
response["error"]["link"],
json!("https://docs.meilisearch.com/errors#missing-document-id")
json!("https://docs.meilisearch.com/errors#missing_document_id")
);
}
@@ -843,7 +827,7 @@ async fn error_document_field_limit_reached() {
"message": "A document cannot contain more than 65,535 fields.",
"code": "document_fields_limit_reached",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#document-fields-limit-reached"
"link": "https://docs.meilisearch.com/errors#document_fields_limit_reached"
});
assert_eq!(response["error"], expected_error);
@@ -889,7 +873,7 @@ async fn error_add_documents_payload_size() {
"message": "The provided payload reached the size limit.",
"code": "payload_too_large",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#payload-too-large"
"link": "https://docs.meilisearch.com/errors#payload_too_large"
});
assert_eq!(response, expected_response);
@@ -912,104 +896,16 @@ async fn error_primary_key_inference() {
index.wait_task(0).await;
let (response, code) = index.get_task(0).await;
assert_eq!(code, 200);
assert_eq!(response["status"], "failed");
insta::assert_json_snapshot!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": 0,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": {
"message": "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.",
"code": "index_primary_key_no_candidate_found",
let expected_error = json!({
"message": r#"The primary key inference process failed because the engine did not find any fields containing `id` substring in their name. If your document identifier does not contain any `id` substring, you can set the primary key of the index."#,
"code": "primary_key_inference_failed",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-primary-key-no-candidate-found"
},
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
"###);
"link": "https://docs.meilisearch.com/errors#primary_key_inference_failed"
});
let documents = json!([
{
"primary_id": "12",
"object_id": "42",
"id": "124",
"title": "11",
"desc": "foobar"
}
]);
index.add_documents(documents, None).await;
index.wait_task(1).await;
let (response, code) = index.get_task(1).await;
assert_eq!(code, 200);
insta::assert_json_snapshot!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": 1,
"indexUid": "test",
"status": "failed",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": {
"message": "The primary key inference failed as the engine found 3 fields ending with `id` in their names: 'id' and 'object_id'. Please specify the primary key manually using the `primaryKey` query parameter.",
"code": "index_primary_key_multiple_candidates_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-primary-key-multiple-candidates-found"
},
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
"###);
let documents = json!([
{
"primary_id": "12",
"title": "11",
"desc": "foobar"
}
]);
index.add_documents(documents, None).await;
index.wait_task(2).await;
let (response, code) = index.get_task(2).await;
assert_eq!(code, 200);
insta::assert_json_snapshot!(response, { ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": 2,
"indexUid": "test",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
"###);
assert_eq!(response["error"], expected_error);
}
#[actix_rt::test]
@@ -1077,7 +973,7 @@ async fn batch_several_documents_addition() {
futures::future::join_all(waiter).await;
index.wait_task(9).await;
let (response, _code) = index.filtered_tasks(&[], &["failed"], &[]).await;
let (response, _code) = index.filtered_tasks(&[], &["failed"]).await;
// Check if only the 6th task failed
println!("{}", &response);

View File

@@ -95,7 +95,7 @@ async fn error_delete_batch_unexisting_index() {
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-not-found"
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
assert_eq!(code, 202);

View File

@@ -1,99 +0,0 @@
use meili_snap::*;
use serde_json::json;
use crate::common::Server;
#[actix_rt::test]
async fn get_all_documents_bad_offset() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.get_all_documents_raw("?offset").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `offset`: could not parse `` as a positive integer",
"code": "invalid_document_offset",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-document-offset"
}
"###);
let (response, code) = index.get_all_documents_raw("?offset=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `offset`: could not parse `doggo` as a positive integer",
"code": "invalid_document_offset",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-document-offset"
}
"###);
let (response, code) = index.get_all_documents_raw("?offset=-1").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `offset`: could not parse `-1` as a positive integer",
"code": "invalid_document_offset",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-document-offset"
}
"###);
}
#[actix_rt::test]
async fn get_all_documents_bad_limit() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.get_all_documents_raw("?limit").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `limit`: could not parse `` as a positive integer",
"code": "invalid_document_limit",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-document-limit"
}
"###);
let (response, code) = index.get_all_documents_raw("?limit=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `limit`: could not parse `doggo` as a positive integer",
"code": "invalid_document_limit",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-document-limit"
}
"###);
let (response, code) = index.get_all_documents_raw("?limit=-1").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `limit`: could not parse `-1` as a positive integer",
"code": "invalid_document_limit",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-document-limit"
}
"###);
}
#[actix_rt::test]
async fn delete_documents_batch() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.delete_batch_raw(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Json deserialize error: invalid type: string \"doggo\", expected a sequence at line 1 column 7",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad-request"
}
"###);
}

View File

@@ -27,7 +27,7 @@ async fn error_get_unexisting_document() {
"message": "Document `1` not found.",
"code": "document_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#document-not-found"
"link": "https://docs.meilisearch.com/errors#document_not_found"
});
assert_eq!(response, expected_response);
@@ -90,7 +90,7 @@ async fn error_get_unexisting_index_all_documents() {
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-not-found"
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
assert_eq!(response, expected_response);

View File

@@ -1,5 +1,4 @@
mod add_documents;
mod delete_documents;
mod errors;
mod get_documents;
mod update_documents;

View File

@@ -13,7 +13,7 @@ async fn error_document_update_create_index_bad_uid() {
"message": "`883 fj!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-index-uid"
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
});
assert_eq!(code, 400);
@@ -167,7 +167,7 @@ async fn error_update_documents_bad_document_id() {
assert_eq!(response["error"]["type"], json!("invalid_request"));
assert_eq!(
response["error"]["link"],
json!("https://docs.meilisearch.com/errors#invalid-document-id")
json!("https://docs.meilisearch.com/errors#invalid_document_id")
);
}
@@ -193,6 +193,6 @@ async fn error_update_documents_missing_document_id() {
assert_eq!(response["error"]["type"], "invalid_request");
assert_eq!(
response["error"]["link"],
"https://docs.meilisearch.com/errors#missing-document-id"
"https://docs.meilisearch.com/errors#missing_document_id"
);
}

View File

@@ -1,7 +1,6 @@
use actix_web::http::header::ContentType;
use actix_web::test;
use http::header::ACCEPT_ENCODING;
use meili_snap::{json_string, snapshot};
use serde_json::{json, Value};
use crate::common::encoder::Encoder;
@@ -177,7 +176,7 @@ async fn error_create_existing_index() {
"message": "Index `test` already exists.",
"code": "index_already_exists",
"type": "invalid_request",
"link":"https://docs.meilisearch.com/errors#index-already-exists"
"link":"https://docs.meilisearch.com/errors#index_already_exists"
});
assert_eq!(response["error"], expected_response);
@@ -189,13 +188,13 @@ async fn error_create_with_invalid_index_uid() {
let index = server.index("test test#!");
let (response, code) = index.create(None).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value at `.uid`: `test test#!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-index-uid"
}
"###);
let expected_response = json!({
"message": "`test test#!` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
});
assert_eq!(response, expected_response);
assert_eq!(code, 400);
}

View File

@@ -35,7 +35,7 @@ async fn error_delete_unexisting_index() {
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-not-found"
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
let response = index.wait_task(0).await;

View File

@@ -1,4 +1,3 @@
use meili_snap::{json_string, snapshot};
use serde_json::{json, Value};
use crate::common::Server;
@@ -35,7 +34,7 @@ async fn error_get_unexisting_index() {
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-not-found"
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
assert_eq!(response, expected_response);
@@ -183,13 +182,15 @@ async fn get_invalid_index_uid() {
let index = server.index("this is not a valid index name");
let (response, code) = index.get().await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "`this is not a valid index name` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-index-uid"
}
"###);
assert_eq!(code, 404);
assert_eq!(
response,
json!(
{
"message": "Index `this is not a valid index name` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index_not_found"
})
);
}

View File

@@ -55,7 +55,7 @@ async fn error_get_stats_unexisting_index() {
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-not-found"
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
assert_eq!(response, expected_response);

View File

@@ -98,7 +98,7 @@ async fn error_update_existing_primary_key() {
"message": "Index already has a primary key: `id`.",
"code": "index_primary_key_already_exists",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-primary-key-already-exists"
"link": "https://docs.meilisearch.com/errors#index_primary_key_already_exists"
});
assert_eq!(response["error"], expected_response);
@@ -117,7 +117,7 @@ async fn error_update_unexisting_index() {
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-not-found"
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
assert_eq!(response["error"], expected_response);

View File

@@ -8,7 +8,6 @@ mod search;
mod settings;
mod snapshot;
mod stats;
mod swap_indexes;
mod tasks;
// Tests are isolated by features in different modules to allow better readability, test

View File

@@ -1,4 +1,3 @@
use meili_snap::*;
use serde_json::json;
use super::DOCUMENTS;
@@ -13,7 +12,7 @@ async fn search_unexisting_index() {
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-not-found"
"link": "https://docs.meilisearch.com/errors#index_not_found"
});
index
@@ -38,369 +37,25 @@ async fn search_unexisting_parameter() {
}
#[actix_rt::test]
async fn search_bad_q() {
async fn search_invalid_highlight_and_crop_tags() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.search_post(json!({"q": ["doggo"]})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.q`: expected a string, but found an array: `[\"doggo\"]`",
"code": "invalid_search_q",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-q"
let fields = &["cropMarker", "highlightPreTag", "highlightPostTag"];
for field in fields {
// object
let (response, code) =
index.search_post(json!({field.to_string(): {"marker": "<crop>"}})).await;
assert_eq!(code, 400, "field {} passing object: {}", &field, response);
assert_eq!(response["code"], "bad_request");
// array
let (response, code) =
index.search_post(json!({field.to_string(): ["marker", "<crop>"]})).await;
assert_eq!(code, 400, "field {} passing array: {}", &field, response);
assert_eq!(response["code"], "bad_request");
}
"###);
// Can't make the `q` fail with a get search since it'll accept anything as a string.
}
#[actix_rt::test]
async fn search_bad_offset() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.search_post(json!({"offset": "doggo"})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.offset`: expected a positive integer, but found a string: `\"doggo\"`",
"code": "invalid_search_offset",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-offset"
}
"###);
let (response, code) = index.search_get("offset=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `offset`: could not parse `doggo` as a positive integer",
"code": "invalid_search_offset",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-offset"
}
"###);
}
#[actix_rt::test]
async fn search_bad_limit() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.search_post(json!({"limit": "doggo"})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.limit`: expected a positive integer, but found a string: `\"doggo\"`",
"code": "invalid_search_limit",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-limit"
}
"###);
let (response, code) = index.search_get("limit=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `limit`: could not parse `doggo` as a positive integer",
"code": "invalid_search_limit",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-limit"
}
"###);
}
#[actix_rt::test]
async fn search_bad_page() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.search_post(json!({"page": "doggo"})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.page`: expected a positive integer, but found a string: `\"doggo\"`",
"code": "invalid_search_page",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-page"
}
"###);
let (response, code) = index.search_get("page=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `page`: could not parse `doggo` as a positive integer",
"code": "invalid_search_page",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-page"
}
"###);
}
#[actix_rt::test]
async fn search_bad_hits_per_page() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.search_post(json!({"hitsPerPage": "doggo"})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.hitsPerPage`: expected a positive integer, but found a string: `\"doggo\"`",
"code": "invalid_search_hits_per_page",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-hits-per-page"
}
"###);
let (response, code) = index.search_get("hitsPerPage=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `hitsPerPage`: could not parse `doggo` as a positive integer",
"code": "invalid_search_hits_per_page",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-hits-per-page"
}
"###);
}
#[actix_rt::test]
async fn search_bad_attributes_to_crop() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.search_post(json!({"attributesToCrop": "doggo"})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.attributesToCrop`: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_search_attributes_to_crop",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-attributes-to-crop"
}
"###);
// Can't make the `attributes_to_crop` fail with a get search since it'll accept anything as an array of strings.
}
#[actix_rt::test]
async fn search_bad_crop_length() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.search_post(json!({"cropLength": "doggo"})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.cropLength`: expected a positive integer, but found a string: `\"doggo\"`",
"code": "invalid_search_crop_length",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-crop-length"
}
"###);
let (response, code) = index.search_get("cropLength=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `cropLength`: could not parse `doggo` as a positive integer",
"code": "invalid_search_crop_length",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-crop-length"
}
"###);
}
#[actix_rt::test]
async fn search_bad_attributes_to_highlight() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.search_post(json!({"attributesToHighlight": "doggo"})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.attributesToHighlight`: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_search_attributes_to_highlight",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-attributes-to-highlight"
}
"###);
// Can't make the `attributes_to_highlight` fail with a get search since it'll accept anything as an array of strings.
}
#[actix_rt::test]
async fn search_bad_filter() {
// Since a filter is deserialized as a json Value it will never fail to deserialize.
// Thus the error message is not generated by deserr but written by us.
let server = Server::new().await;
let index = server.index("test");
// Also, to trigger the error message we need to effectively create the index or else it'll throw an
// index does not exists error.
let (_, code) = index.create(None).await;
server.wait_task(0).await;
snapshot!(code, @"202 Accepted");
let (response, code) = index.search_post(json!({ "filter": true })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid syntax for the filter parameter: `expected String, Array, found: true`.",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
}
"###);
// Can't make the `filter` fail with a get search since it'll accept anything as a strings.
}
#[actix_rt::test]
async fn search_bad_sort() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.search_post(json!({"sort": "doggo"})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.sort`: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_search_sort",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-sort"
}
"###);
// Can't make the `sort` fail with a get search since it'll accept anything as a strings.
}
#[actix_rt::test]
async fn search_bad_show_matches_position() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.search_post(json!({"showMatchesPosition": "doggo"})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.showMatchesPosition`: expected a boolean, but found a string: `\"doggo\"`",
"code": "invalid_search_show_matches_position",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-show-matches-position"
}
"###);
let (response, code) = index.search_get("showMatchesPosition=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `showMatchesPosition`: could not parse `doggo` as a boolean, expected either `true` or `false`",
"code": "invalid_search_show_matches_position",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-show-matches-position"
}
"###);
}
#[actix_rt::test]
async fn search_bad_facets() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.search_post(json!({"facets": "doggo"})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.facets`: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_search_facets",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-facets"
}
"###);
// Can't make the `attributes_to_highlight` fail with a get search since it'll accept anything as an array of strings.
}
#[actix_rt::test]
async fn search_bad_highlight_pre_tag() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.search_post(json!({"highlightPreTag": ["doggo"]})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.highlightPreTag`: expected a string, but found an array: `[\"doggo\"]`",
"code": "invalid_search_highlight_pre_tag",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-highlight-pre-tag"
}
"###);
// Can't make the `highlight_pre_tag` fail with a get search since it'll accept anything as a strings.
}
#[actix_rt::test]
async fn search_bad_highlight_post_tag() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.search_post(json!({"highlightPostTag": ["doggo"]})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.highlightPostTag`: expected a string, but found an array: `[\"doggo\"]`",
"code": "invalid_search_highlight_post_tag",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-highlight-post-tag"
}
"###);
// Can't make the `highlight_post_tag` fail with a get search since it'll accept anything as a strings.
}
#[actix_rt::test]
async fn search_bad_crop_marker() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.search_post(json!({"cropMarker": ["doggo"]})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.cropMarker`: expected a string, but found an array: `[\"doggo\"]`",
"code": "invalid_search_crop_marker",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-crop-marker"
}
"###);
// Can't make the `crop_marker` fail with a get search since it'll accept anything as a strings.
}
#[actix_rt::test]
async fn search_bad_matching_strategy() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.search_post(json!({"matchingStrategy": "doggo"})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Unknown value `doggo` at `.matchingStrategy`: expected one of `last`, `all`",
"code": "invalid_search_matching_strategy",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-matching-strategy"
}
"###);
let (response, code) = index.search_get("matchingStrategy=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Unknown value `doggo` for parameter `matchingStrategy`: expected one of `last`, `all`",
"code": "invalid_search_matching_strategy",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-matching-strategy"
}
"###);
}
#[actix_rt::test]
@@ -416,9 +71,9 @@ async fn filter_invalid_syntax_object() {
let expected_response = json!({
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
"code": "invalid_search_filter",
"code": "invalid_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(json!({"filter": "title & Glass"}), |response, code| {
@@ -441,9 +96,9 @@ async fn filter_invalid_syntax_array() {
let expected_response = json!({
"message": "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `title & Glass`.\n1:14 title & Glass",
"code": "invalid_search_filter",
"code": "invalid_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(json!({"filter": ["title & Glass"]}), |response, code| {
@@ -466,9 +121,9 @@ async fn filter_invalid_syntax_string() {
let expected_response = json!({
"message": "Found unexpected characters at the end of the filter: `XOR title = Glass`. You probably forgot an `OR` or an `AND` rule.\n15:32 title = Glass XOR title = Glass",
"code": "invalid_search_filter",
"code": "invalid_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(json!({"filter": "title = Glass XOR title = Glass"}), |response, code| {
@@ -491,9 +146,9 @@ async fn filter_invalid_attribute_array() {
let expected_response = json!({
"message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass",
"code": "invalid_search_filter",
"code": "invalid_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(json!({"filter": ["many = Glass"]}), |response, code| {
@@ -516,9 +171,9 @@ async fn filter_invalid_attribute_string() {
let expected_response = json!({
"message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass",
"code": "invalid_search_filter",
"code": "invalid_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(json!({"filter": "many = Glass"}), |response, code| {
@@ -541,9 +196,9 @@ async fn filter_reserved_geo_attribute_array() {
let expected_response = json!({
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.\n1:5 _geo = Glass",
"code": "invalid_search_filter",
"code": "invalid_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(json!({"filter": ["_geo = Glass"]}), |response, code| {
@@ -566,9 +221,9 @@ async fn filter_reserved_geo_attribute_string() {
let expected_response = json!({
"message": "`_geo` is a reserved keyword and thus can't be used as a filter expression. Use the _geoRadius(latitude, longitude, distance) built-in rule to filter on _geo field coordinates.\n1:5 _geo = Glass",
"code": "invalid_search_filter",
"code": "invalid_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(json!({"filter": "_geo = Glass"}), |response, code| {
@@ -591,9 +246,9 @@ async fn filter_reserved_attribute_array() {
let expected_response = json!({
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression.\n1:13 _geoDistance = Glass",
"code": "invalid_search_filter",
"code": "invalid_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(json!({"filter": ["_geoDistance = Glass"]}), |response, code| {
@@ -616,9 +271,9 @@ async fn filter_reserved_attribute_string() {
let expected_response = json!({
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a filter expression.\n1:13 _geoDistance = Glass",
"code": "invalid_search_filter",
"code": "invalid_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-filter"
"link": "https://docs.meilisearch.com/errors#invalid_filter"
});
index
.search(json!({"filter": "_geoDistance = Glass"}), |response, code| {
@@ -641,9 +296,9 @@ async fn sort_geo_reserved_attribute() {
let expected_response = json!({
"message": "`_geo` is a reserved keyword and thus can't be used as a sort expression. Use the _geoPoint(latitude, longitude) built-in rule to sort on _geo field coordinates.",
"code": "invalid_search_sort",
"code": "invalid_sort",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-sort"
"link": "https://docs.meilisearch.com/errors#invalid_sort"
});
index
.search(
@@ -671,9 +326,9 @@ async fn sort_reserved_attribute() {
let expected_response = json!({
"message": "`_geoDistance` is a reserved keyword and thus can't be used as a sort expression.",
"code": "invalid_search_sort",
"code": "invalid_sort",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-sort"
"link": "https://docs.meilisearch.com/errors#invalid_sort"
});
index
.search(
@@ -701,9 +356,9 @@ async fn sort_unsortable_attribute() {
let expected_response = json!({
"message": "Attribute `title` is not sortable. Available sortable attributes are: `id`.",
"code": "invalid_search_sort",
"code": "invalid_sort",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-sort"
"link": "https://docs.meilisearch.com/errors#invalid_sort"
});
index
.search(
@@ -731,9 +386,9 @@ async fn sort_invalid_syntax() {
let expected_response = json!({
"message": "Invalid syntax for the sort parameter: expected expression ending by `:asc` or `:desc`, found `title`.",
"code": "invalid_search_sort",
"code": "invalid_sort",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-sort"
"link": "https://docs.meilisearch.com/errors#invalid_sort"
});
index
.search(
@@ -765,9 +420,9 @@ async fn sort_unset_ranking_rule() {
let expected_response = json!({
"message": "The sort ranking rule must be specified in the ranking rules settings to use the sort parameter at search time.",
"code": "invalid_search_sort",
"code": "invalid_sort",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-search-sort"
"link": "https://docs.meilisearch.com/errors#invalid_sort"
});
index
.search(

View File

@@ -200,14 +200,11 @@ async fn search_with_filter_string_notation() {
let server = Server::new().await;
let index = server.index("test");
let (_, code) = index.update_settings(json!({"filterableAttributes": ["title"]})).await;
meili_snap::snapshot!(code, @"202 Accepted");
index.update_settings(json!({"filterableAttributes": ["title"]})).await;
let documents = DOCUMENTS.clone();
let (_, code) = index.add_documents(documents, None).await;
meili_snap::snapshot!(code, @"202 Accepted");
let res = index.wait_task(1).await;
meili_snap::snapshot!(res["status"], @r###""succeeded""###);
index.add_documents(documents, None).await;
index.wait_task(1).await;
index
.search(
@@ -223,15 +220,11 @@ async fn search_with_filter_string_notation() {
let index = server.index("nested");
let (_, code) =
index.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})).await;
meili_snap::snapshot!(code, @"202 Accepted");
index.update_settings(json!({"filterableAttributes": ["cattos", "doggos.age"]})).await;
let documents = NESTED_DOCUMENTS.clone();
let (_, code) = index.add_documents(documents, None).await;
meili_snap::snapshot!(code, @"202 Accepted");
let res = index.wait_task(3).await;
meili_snap::snapshot!(res["status"], @r###""succeeded""###);
index.add_documents(documents, None).await;
index.wait_task(3).await;
index
.search(

View File

@@ -111,56 +111,3 @@ async fn hits_per_page_0_should_not_return_any_result() {
})
.await;
}
#[actix_rt::test]
async fn ensure_placeholder_search_hit_count_valid() {
let server = Server::new().await;
let index = server.index("basic");
let documents = json!([
{
"title": "Shazam!",
"id": "287947",
"distinct": 1,
},
{
"title": "Captain Marvel",
"id": "299537",
"distinct": 4,
},
{
"title": "Escape Room",
"id": "522681",
"distinct": 2,
},
{
"title": "How to Train Your Dragon: The Hidden World",
"id": "166428",
"distinct": 3,
},
{
"title": "Glass",
"id": "450465",
"distinct": 3,
}
]);
index.add_documents(documents, None).await;
index.wait_task(0).await;
let (_response, _code) = index
.update_settings(
json!({ "rankingRules": ["distinct:asc"], "distinctAttribute": "distinct"}),
)
.await;
index.wait_task(1).await;
for page in 0..=4 {
index
.search(json!({"page": page, "hitsPerPage": 1}), |response, code| {
assert_eq!(code, 200, "{}", response);
assert_eq!(response["totalHits"], 4);
assert_eq!(response["totalPages"], 4);
})
.await;
}
}

View File

@@ -1,339 +0,0 @@
use meili_snap::*;
use serde_json::json;
use crate::common::Server;
#[actix_rt::test]
async fn settings_bad_displayed_attributes() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.update_settings(json!({ "displayedAttributes": "doggo" })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.displayedAttributes`: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_settings_displayed_attributes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-displayed-attributes"
}
"###);
let (response, code) = index.update_settings_displayed_attributes(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_settings_displayed_attributes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-displayed-attributes"
}
"###);
}
#[actix_rt::test]
async fn settings_bad_searchable_attributes() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.update_settings(json!({ "searchableAttributes": "doggo" })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.searchableAttributes`: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_settings_searchable_attributes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-searchable-attributes"
}
"###);
let (response, code) = index.update_settings_searchable_attributes(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_settings_searchable_attributes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-searchable-attributes"
}
"###);
}
#[actix_rt::test]
async fn settings_bad_filterable_attributes() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.update_settings(json!({ "filterableAttributes": "doggo" })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.filterableAttributes`: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_settings_filterable_attributes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-filterable-attributes"
}
"###);
let (response, code) = index.update_settings_filterable_attributes(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_settings_filterable_attributes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-filterable-attributes"
}
"###);
}
#[actix_rt::test]
async fn settings_bad_sortable_attributes() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.update_settings(json!({ "sortableAttributes": "doggo" })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.sortableAttributes`: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_settings_sortable_attributes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-sortable-attributes"
}
"###);
let (response, code) = index.update_settings_sortable_attributes(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_settings_sortable_attributes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-sortable-attributes"
}
"###);
}
#[actix_rt::test]
async fn settings_bad_ranking_rules() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.update_settings(json!({ "rankingRules": "doggo" })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.rankingRules`: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_settings_ranking_rules",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules"
}
"###);
let (response, code) = index.update_settings_ranking_rules(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_settings_ranking_rules",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules"
}
"###);
}
#[actix_rt::test]
async fn settings_bad_stop_words() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.update_settings(json!({ "stopWords": "doggo" })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.stopWords`: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_settings_stop_words",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-stop-words"
}
"###);
let (response, code) = index.update_settings_stop_words(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_settings_stop_words",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-stop-words"
}
"###);
}
#[actix_rt::test]
async fn settings_bad_synonyms() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.update_settings(json!({ "synonyms": "doggo" })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.synonyms`: expected an object, but found a string: `\"doggo\"`",
"code": "invalid_settings_synonyms",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-synonyms"
}
"###);
let (response, code) = index.update_settings_synonyms(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type: expected an object, but found a string: `\"doggo\"`",
"code": "invalid_settings_synonyms",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-synonyms"
}
"###);
}
#[actix_rt::test]
async fn settings_bad_distinct_attribute() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.update_settings(json!({ "distinctAttribute": ["doggo"] })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.distinctAttribute`: expected a string, but found an array: `[\"doggo\"]`",
"code": "invalid_settings_distinct_attribute",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-distinct-attribute"
}
"###);
let (response, code) = index.update_settings_distinct_attribute(json!(["doggo"])).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type: expected a string, but found an array: `[\"doggo\"]`",
"code": "invalid_settings_distinct_attribute",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-distinct-attribute"
}
"###);
}
#[actix_rt::test]
async fn settings_bad_typo_tolerance() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.update_settings(json!({ "typoTolerance": "doggo" })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.typoTolerance`: expected an object, but found a string: `\"doggo\"`",
"code": "invalid_settings_typo_tolerance",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance"
}
"###);
let (response, code) =
index.update_settings(json!({ "typoTolerance": { "minWordSizeForTypos": "doggo" }})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.typoTolerance.minWordSizeForTypos`: expected an object, but found a string: `\"doggo\"`",
"code": "invalid_settings_typo_tolerance",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance"
}
"###);
let (response, code) = index.update_settings_typo_tolerance(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type: expected an object, but found a string: `\"doggo\"`",
"code": "invalid_settings_typo_tolerance",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance"
}
"###);
let (response, code) = index
.update_settings_typo_tolerance(
json!({ "typoTolerance": { "minWordSizeForTypos": "doggo" }}),
)
.await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Unknown field `typoTolerance`: expected one of `enabled`, `minWordSizeForTypos`, `disableOnWords`, `disableOnAttributes`",
"code": "invalid_settings_typo_tolerance",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-typo-tolerance"
}
"###);
}
#[actix_rt::test]
async fn settings_bad_faceting() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.update_settings(json!({ "faceting": "doggo" })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.faceting`: expected an object, but found a string: `\"doggo\"`",
"code": "invalid_settings_faceting",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-faceting"
}
"###);
let (response, code) = index.update_settings_faceting(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type: expected an object, but found a string: `\"doggo\"`",
"code": "invalid_settings_faceting",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-faceting"
}
"###);
}
#[actix_rt::test]
async fn settings_bad_pagination() {
let server = Server::new().await;
let index = server.index("test");
let (response, code) = index.update_settings(json!({ "pagination": "doggo" })).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `.pagination`: expected an object, but found a string: `\"doggo\"`",
"code": "invalid_settings_pagination",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-pagination"
}
"###);
let (response, code) = index.update_settings_pagination(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type: expected an object, but found a string: `\"doggo\"`",
"code": "invalid_settings_pagination",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-pagination"
}
"###);
}

View File

@@ -179,15 +179,15 @@ async fn error_update_setting_unexisting_index_invalid_uid() {
let server = Server::new().await;
let index = server.index("test##! ");
let (response, code) = index.update_settings(json!({})).await;
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{
"message": "`test##! ` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-index-uid"
}
"###);
assert_eq!(code, 400);
let expected = json!({
"message": "`test##! ` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"});
assert_eq!(response, expected);
}
macro_rules! test_setting_routes {
@@ -278,16 +278,22 @@ async fn error_set_invalid_ranking_rules() {
let index = server.index("test");
index.create(None).await;
let (response, code) = index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await;
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{
"message": "Invalid value at `.rankingRules[0]`: `manyTheFish` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.",
"code": "invalid_settings_ranking_rules",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules"
}
"###);
let (_response, _code) =
index.update_settings(json!({ "rankingRules": [ "manyTheFish"]})).await;
index.wait_task(1).await;
let (response, code) = index.get_task(1).await;
assert_eq!(code, 200);
assert_eq!(response["status"], "failed");
let expected_error = json!({
"message": r#"`manyTheFish` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules."#,
"code": "invalid_ranking_rule",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_ranking_rule"
});
assert_eq!(response["error"], expected_error);
}
#[actix_rt::test]

View File

@@ -1,3 +1,2 @@
mod distinct;
mod errors;
mod get_settings;

View File

@@ -1,6 +1,5 @@
use std::time::Duration;
use meilisearch::option::ScheduleSnapshot;
use meilisearch::Opt;
use tokio::time::sleep;
@@ -37,7 +36,8 @@ async fn perform_snapshot() {
let options = Opt {
snapshot_dir: snapshot_dir.path().to_owned(),
schedule_snapshot: ScheduleSnapshot::Enabled(1),
snapshot_interval_sec: 1,
schedule_snapshot: true,
..default_settings(temp.path())
};

View File

@@ -1,94 +0,0 @@
use meili_snap::*;
use serde_json::json;
use crate::common::Server;
#[actix_rt::test]
async fn swap_indexes_bad_format() {
let server = Server::new().await;
let (response, code) = server.index_swap(json!("doggo")).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type: expected an array, but found a string: `\"doggo\"`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad-request"
}
"###);
let (response, code) = server.index_swap(json!(["doggo"])).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `[0]`: expected an object, but found a string: `\"doggo\"`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad-request"
}
"###);
}
#[actix_rt::test]
async fn swap_indexes_bad_indexes() {
let server = Server::new().await;
let (response, code) = server.index_swap(json!([{ "indexes": "doggo"}])).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value type at `[0].indexes`: expected an array, but found a string: `\"doggo\"`",
"code": "invalid_swap_indexes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-swap-indexes"
}
"###);
let (response, code) = server.index_swap(json!([{ "indexes": ["doggo"]}])).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Two indexes must be given for each swap. The list `[\"doggo\"]` contains 1 indexes.",
"code": "invalid_swap_indexes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-swap-indexes"
}
"###);
let (response, code) =
server.index_swap(json!([{ "indexes": ["doggo", "crabo", "croco"]}])).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Two indexes must be given for each swap. The list `[\"doggo\", \"crabo\", \"croco\"]` contains 3 indexes.",
"code": "invalid_swap_indexes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-swap-indexes"
}
"###);
let (response, code) = server.index_swap(json!([{ "indexes": ["doggo", "doggo"]}])).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Indexes must be declared only once during a swap. `doggo` was specified several times.",
"code": "invalid_swap_duplicate_index_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-swap-duplicate-index-found"
}
"###);
let (response, code) = server
.index_swap(json!([{ "indexes": ["doggo", "catto"]}, { "indexes": ["girafo", "doggo"]}]))
.await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Indexes must be declared only once during a swap. `doggo` was specified several times.",
"code": "invalid_swap_duplicate_index_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-swap-duplicate-index-found"
}
"###);
}

View File

@@ -1,357 +0,0 @@
mod errors;
use meili_snap::{json_string, snapshot};
use serde_json::json;
use crate::common::{GetAllDocumentsOptions, Server};
#[actix_rt::test]
async fn swap_indexes() {
let server = Server::new().await;
let a = server.index("a");
let (_, code) = a.add_documents(json!({ "id": 1, "index": "a"}), None).await;
snapshot!(code, @"202 Accepted");
let b = server.index("b");
let (res, code) = b.add_documents(json!({ "id": 1, "index": "b"}), None).await;
snapshot!(code, @"202 Accepted");
snapshot!(res["taskUid"], @"1");
server.wait_task(1).await;
let (tasks, code) = server.tasks().await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(tasks, { ".results[].duration" => "[duration]", ".results[].enqueuedAt" => "[date]", ".results[].startedAt" => "[date]", ".results[].finishedAt" => "[date]" }), @r###"
{
"results": [
{
"uid": 1,
"indexUid": "b",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
},
{
"uid": 0,
"indexUid": "a",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
],
"limit": 20,
"from": 1,
"next": null
}
"###);
let (res, code) = server.index_swap(json!([{ "indexes": ["a", "b"] }])).await;
snapshot!(code, @"202 Accepted");
snapshot!(res["taskUid"], @"2");
server.wait_task(2).await;
let (tasks, code) = server.tasks().await;
snapshot!(code, @"200 OK");
// Notice how the task 0 which was initially representing the creation of the index `A` now represents the creation of the index `B`.
snapshot!(json_string!(tasks, { ".results[].duration" => "[duration]", ".results[].enqueuedAt" => "[date]", ".results[].startedAt" => "[date]", ".results[].finishedAt" => "[date]" }), @r###"
{
"results": [
{
"uid": 2,
"indexUid": null,
"status": "succeeded",
"type": "indexSwap",
"canceledBy": null,
"details": {
"swaps": [
{
"indexes": [
"a",
"b"
]
}
]
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
},
{
"uid": 1,
"indexUid": "a",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
},
{
"uid": 0,
"indexUid": "b",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
],
"limit": 20,
"from": 2,
"next": null
}
"###);
// BUT, the data in `a` should now points to the data that was in `b`.
// And the opposite is true as well
let (res, _) = a.get_all_documents(GetAllDocumentsOptions::default()).await;
snapshot!(res["results"], @r###"[{"id":1,"index":"b"}]"###);
let (res, _) = b.get_all_documents(GetAllDocumentsOptions::default()).await;
snapshot!(res["results"], @r###"[{"id":1,"index":"a"}]"###);
// ================
// And now we're going to attempt the famous and dangerous DOUBLE index swap 🤞
let c = server.index("c");
let (res, code) = c.add_documents(json!({ "id": 1, "index": "c"}), None).await;
snapshot!(code, @"202 Accepted");
snapshot!(res["taskUid"], @"3");
let d = server.index("d");
let (res, code) = d.add_documents(json!({ "id": 1, "index": "d"}), None).await;
snapshot!(code, @"202 Accepted");
snapshot!(res["taskUid"], @"4");
server.wait_task(4).await;
// ensure the index creation worked properly
let (tasks, code) = server.tasks_filter("limit=2").await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(tasks, { ".results[].duration" => "[duration]", ".results[].enqueuedAt" => "[date]", ".results[].startedAt" => "[date]", ".results[].finishedAt" => "[date]" }), @r###"
{
"results": [
{
"uid": 4,
"indexUid": "d",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
},
{
"uid": 3,
"indexUid": "c",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
],
"limit": 2,
"from": 4,
"next": 2
}
"###);
// It's happening 😲
let (res, code) =
server.index_swap(json!([{ "indexes": ["a", "b"] }, { "indexes": ["c", "d"] } ])).await;
snapshot!(res["taskUid"], @"5");
snapshot!(code, @"202 Accepted");
server.wait_task(5).await;
// ensure the index creation worked properly
let (tasks, code) = server.tasks().await;
snapshot!(code, @"200 OK");
// What should we check for each tasks in this test:
// Task number;
// 0. should have the indexUid `a` again
// 1. should have the indexUid `b` again
// 2. stays unchanged
// 3. now have the indexUid `d` instead of `c`
// 4. now have the indexUid `c` instead of `d`
snapshot!(json_string!(tasks, { ".results[].duration" => "[duration]", ".results[].enqueuedAt" => "[date]", ".results[].startedAt" => "[date]", ".results[].finishedAt" => "[date]" }), @r###"
{
"results": [
{
"uid": 5,
"indexUid": null,
"status": "succeeded",
"type": "indexSwap",
"canceledBy": null,
"details": {
"swaps": [
{
"indexes": [
"a",
"b"
]
},
{
"indexes": [
"c",
"d"
]
}
]
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
},
{
"uid": 4,
"indexUid": "c",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
},
{
"uid": 3,
"indexUid": "d",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
},
{
"uid": 2,
"indexUid": null,
"status": "succeeded",
"type": "indexSwap",
"canceledBy": null,
"details": {
"swaps": [
{
"indexes": [
"b",
"a"
]
}
]
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
},
{
"uid": 1,
"indexUid": "b",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
},
{
"uid": 0,
"indexUid": "a",
"status": "succeeded",
"type": "documentAdditionOrUpdate",
"canceledBy": null,
"details": {
"receivedDocuments": 1,
"indexedDocuments": 1
},
"error": null,
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
],
"limit": 20,
"from": 5,
"next": null
}
"###);
// - The data in `a` should point to `a`.
// - The data in `b` should point to `b`.
// - The data in `c` should point to `d`.
// - The data in `d` should point to `c`.
let (res, _) = a.get_all_documents(GetAllDocumentsOptions::default()).await;
snapshot!(res["results"], @r###"[{"id":1,"index":"a"}]"###);
let (res, _) = b.get_all_documents(GetAllDocumentsOptions::default()).await;
snapshot!(res["results"], @r###"[{"id":1,"index":"b"}]"###);
let (res, _) = c.get_all_documents(GetAllDocumentsOptions::default()).await;
snapshot!(res["results"], @r###"[{"id":1,"index":"d"}]"###);
let (res, _) = d.get_all_documents(GetAllDocumentsOptions::default()).await;
snapshot!(res["results"], @r###"[{"id":1,"index":"c"}]"###);
}

View File

@@ -1,508 +0,0 @@
use meili_snap::*;
use crate::common::Server;
#[actix_rt::test]
async fn task_bad_uids() {
let server = Server::new().await;
let (response, code) = server.tasks_filter("uids=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `uids`: could not parse `doggo` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
}
"###);
let (response, code) = server.cancel_tasks("uids=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `uids`: could not parse `doggo` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
}
"###);
let (response, code) = server.delete_tasks("uids=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `uids`: could not parse `doggo` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
}
"###);
let (response, code) = server.delete_tasks("uids=1,dogo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `uids[1]`: could not parse `dogo` as a positive integer",
"code": "invalid_task_uids",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
}
"###);
}
#[actix_rt::test]
async fn task_bad_canceled_by() {
let server = Server::new().await;
let (response, code) = server.tasks_filter("canceledBy=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `canceledBy`: could not parse `doggo` as a positive integer",
"code": "invalid_task_canceled_by",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-canceled-by"
}
"###);
let (response, code) = server.cancel_tasks("canceledBy=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `canceledBy`: could not parse `doggo` as a positive integer",
"code": "invalid_task_canceled_by",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-canceled-by"
}
"###);
let (response, code) = server.delete_tasks("canceledBy=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `canceledBy`: could not parse `doggo` as a positive integer",
"code": "invalid_task_canceled_by",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-canceled-by"
}
"###);
}
#[actix_rt::test]
async fn task_bad_types() {
let server = Server::new().await;
let (response, code) = server.tasks_filter("types=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-types"
}
"###);
let (response, code) = server.cancel_tasks("types=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-types"
}
"###);
let (response, code) = server.delete_tasks("types=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `types`: `doggo` is not a valid task type. Available types are `documentAdditionOrUpdate`, `documentDeletion`, `settingsUpdate`, `indexCreation`, `indexDeletion`, `indexUpdate`, `indexSwap`, `taskCancelation`, `taskDeletion`, `dumpCreation`, `snapshotCreation`.",
"code": "invalid_task_types",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-types"
}
"###);
}
#[actix_rt::test]
async fn task_bad_statuses() {
let server = Server::new().await;
let (response, code) = server.tasks_filter("statuses=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `statuses`: `doggo` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.",
"code": "invalid_task_statuses",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-statuses"
}
"###);
let (response, code) = server.cancel_tasks("statuses=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `statuses`: `doggo` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.",
"code": "invalid_task_statuses",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-statuses"
}
"###);
let (response, code) = server.delete_tasks("statuses=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `statuses`: `doggo` is not a valid task status. Available statuses are `enqueued`, `processing`, `succeeded`, `failed`, `canceled`.",
"code": "invalid_task_statuses",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-statuses"
}
"###);
}
#[actix_rt::test]
async fn task_bad_index_uids() {
let server = Server::new().await;
let (response, code) = server.tasks_filter("indexUids=the%20good%20doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `indexUids`: `the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-index-uid"
}
"###);
let (response, code) = server.cancel_tasks("indexUids=the%20good%20doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `indexUids`: `the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-index-uid"
}
"###);
let (response, code) = server.delete_tasks("indexUids=the%20good%20doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `indexUids`: `the good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"code": "invalid_index_uid",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-index-uid"
}
"###);
}
#[actix_rt::test]
async fn task_bad_limit() {
let server = Server::new().await;
let (response, code) = server.tasks_filter("limit=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `limit`: could not parse `doggo` as a positive integer",
"code": "invalid_task_limit",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-limit"
}
"###);
let (response, code) = server.cancel_tasks("limit=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Unknown parameter `limit`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad-request"
}
"###);
let (response, code) = server.delete_tasks("limit=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Unknown parameter `limit`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad-request"
}
"###);
}
#[actix_rt::test]
async fn task_bad_from() {
let server = Server::new().await;
let (response, code) = server.tasks_filter("from=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `from`: could not parse `doggo` as a positive integer",
"code": "invalid_task_from",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-from"
}
"###);
let (response, code) = server.cancel_tasks("from=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Unknown parameter `from`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad-request"
}
"###);
let (response, code) = server.delete_tasks("from=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Unknown parameter `from`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad-request"
}
"###);
}
#[actix_rt::test]
async fn task_bad_after_enqueued_at() {
let server = Server::new().await;
let (response, code) = server.tasks_filter("afterEnqueuedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `afterEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at"
}
"###);
let (response, code) = server.cancel_tasks("afterEnqueuedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `afterEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at"
}
"###);
let (response, code) = server.delete_tasks("afterEnqueuedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `afterEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-after-enqueued-at"
}
"###);
}
#[actix_rt::test]
async fn task_bad_before_enqueued_at() {
let server = Server::new().await;
let (response, code) = server.tasks_filter("beforeEnqueuedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `beforeEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at"
}
"###);
let (response, code) = server.cancel_tasks("beforeEnqueuedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `beforeEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at"
}
"###);
let (response, code) = server.delete_tasks("beforeEnqueuedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `beforeEnqueuedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_enqueued_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-before-enqueued-at"
}
"###);
}
#[actix_rt::test]
async fn task_bad_after_started_at() {
let server = Server::new().await;
let (response, code) = server.tasks_filter("afterStartedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `afterStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-after-started-at"
}
"###);
let (response, code) = server.cancel_tasks("afterStartedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `afterStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-after-started-at"
}
"###);
let (response, code) = server.delete_tasks("afterStartedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `afterStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-after-started-at"
}
"###);
}
#[actix_rt::test]
async fn task_bad_before_started_at() {
let server = Server::new().await;
let (response, code) = server.tasks_filter("beforeStartedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `beforeStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at"
}
"###);
let (response, code) = server.cancel_tasks("beforeStartedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `beforeStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at"
}
"###);
let (response, code) = server.delete_tasks("beforeStartedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `beforeStartedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_started_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at"
}
"###);
}
#[actix_rt::test]
async fn task_bad_after_finished_at() {
let server = Server::new().await;
let (response, code) = server.tasks_filter("afterFinishedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `afterFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-after-finished-at"
}
"###);
let (response, code) = server.cancel_tasks("afterFinishedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `afterFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-after-finished-at"
}
"###);
let (response, code) = server.delete_tasks("afterFinishedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `afterFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_after_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-after-finished-at"
}
"###);
}
#[actix_rt::test]
async fn task_bad_before_finished_at() {
let server = Server::new().await;
let (response, code) = server.tasks_filter("beforeFinishedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `beforeFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-before-finished-at"
}
"###);
let (response, code) = server.cancel_tasks("beforeFinishedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `beforeFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-before-finished-at"
}
"###);
let (response, code) = server.delete_tasks("beforeFinishedAt=doggo").await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value in parameter `beforeFinishedAt`: `doggo` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_finished_at",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-before-finished-at"
}
"###);
}

View File

@@ -1,6 +1,4 @@
mod errors;
use meili_snap::insta::assert_json_snapshot;
use meili_snap::insta::{self, assert_json_snapshot};
use serde_json::json;
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime;
@@ -19,7 +17,7 @@ async fn error_get_unexisting_task_status() {
"message": "Task `1` not found.",
"code": "task_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#task-not-found"
"link": "https://docs.meilisearch.com/errors#task_not_found"
});
assert_eq!(response, expected_response);
@@ -115,7 +113,7 @@ async fn list_tasks_status_filtered() {
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await;
let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
@@ -126,7 +124,7 @@ async fn list_tasks_status_filtered() {
index.wait_task(1).await;
let (response, code) = index.filtered_tasks(&[], &["succeeded"], &[]).await;
let (response, code) = index.filtered_tasks(&[], &["succeeded"]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
@@ -141,31 +139,16 @@ async fn list_tasks_type_filtered() {
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &[], &[]).await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 1);
let (response, code) =
index.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[], &[]).await;
index.filtered_tasks(&["indexCreation", "documentAdditionOrUpdate"], &[]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 2);
}
#[actix_rt::test]
async fn list_tasks_invalid_canceled_by_filter() {
let server = Server::new().await;
let index = server.index("test");
index.create(None).await;
index.wait_task(0).await;
index
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.filtered_tasks(&[], &[], &["0"]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 0);
}
#[actix_rt::test]
async fn list_tasks_status_and_type_filtered() {
let server = Server::new().await;
@@ -176,7 +159,7 @@ async fn list_tasks_status_and_type_filtered() {
.add_documents(serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(), None)
.await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"], &[]).await;
let (response, code) = index.filtered_tasks(&["indexCreation"], &["failed"]).await;
assert_eq!(code, 200, "{}", response);
assert_eq!(response["results"].as_array().unwrap().len(), 0);
@@ -184,7 +167,6 @@ async fn list_tasks_status_and_type_filtered() {
.filtered_tasks(
&["indexCreation", "documentAdditionOrUpdate"],
&["succeeded", "processing", "enqueued"],
&[],
)
.await;
assert_eq!(code, 200, "{}", response);
@@ -195,47 +177,47 @@ async fn list_tasks_status_and_type_filtered() {
async fn get_task_filter_error() {
let server = Server::new().await;
let (response, code) = server.tasks_filter("lol=pied").await;
let (response, code) = server.tasks_filter(json!( { "lol": "pied" })).await;
assert_eq!(code, 400, "{}", response);
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
insta::assert_json_snapshot!(response, @r###"
{
"message": "Unknown parameter `lol`: expected one of `limit`, `from`, `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"message": "Query deserialize error: unknown field `lol`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad-request"
"link": "https://docs.meilisearch.com/errors#bad_request"
}
"###);
let (response, code) = server.tasks_filter("uids=pied").await;
let (response, code) = server.tasks_filter(json!( { "uids": "pied" })).await;
assert_eq!(code, 400, "{}", response);
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
insta::assert_json_snapshot!(response, @r###"
{
"message": "Invalid value in parameter `uids`: could not parse `pied` as a positive integer",
"code": "invalid_task_uids",
"message": "Task uid `pied` is invalid. It should only contain numeric characters.",
"code": "invalid_task_uids_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
"link": "https://docs.meilisearch.com/errors#invalid_task_uids_filter"
}
"###);
let (response, code) = server.tasks_filter("from=pied").await;
let (response, code) = server.tasks_filter(json!( { "from": "pied" })).await;
assert_eq!(code, 400, "{}", response);
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
insta::assert_json_snapshot!(response, @r###"
{
"message": "Invalid value in parameter `from`: could not parse `pied` as a positive integer",
"code": "invalid_task_from",
"message": "Query deserialize error: invalid digit found in string",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-from"
"link": "https://docs.meilisearch.com/errors#bad_request"
}
"###);
let (response, code) = server.tasks_filter("beforeStartedAt=pied").await;
let (response, code) = server.tasks_filter(json!( { "beforeStartedAt": "pied" })).await;
assert_eq!(code, 400, "{}", response);
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
insta::assert_json_snapshot!(response, @r###"
{
"message": "Invalid value in parameter `beforeStartedAt`: `pied` is an invalid date-time. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_before_started_at",
"message": "Task `beforeStartedAt` `pied` is invalid. It should follow the YYYY-MM-DD or RFC 3339 date-time format.",
"code": "invalid_task_date_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-before-started-at"
"link": "https://docs.meilisearch.com/errors#invalid_task_date_filter"
}
"###);
}
@@ -244,36 +226,36 @@ async fn get_task_filter_error() {
async fn delete_task_filter_error() {
let server = Server::new().await;
let (response, code) = server.delete_tasks("").await;
let (response, code) = server.delete_tasks(json!(null)).await;
assert_eq!(code, 400, "{}", response);
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
insta::assert_json_snapshot!(response, @r###"
{
"message": "Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.",
"code": "missing_task_filters",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#missing-task-filters"
"link": "https://docs.meilisearch.com/errors#missing_task_filters"
}
"###);
let (response, code) = server.delete_tasks("lol=pied").await;
let (response, code) = server.delete_tasks(json!({ "lol": "pied" })).await;
assert_eq!(code, 400, "{}", response);
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
insta::assert_json_snapshot!(response, @r###"
{
"message": "Unknown parameter `lol`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"message": "Query deserialize error: unknown field `lol`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad-request"
"link": "https://docs.meilisearch.com/errors#bad_request"
}
"###);
let (response, code) = server.delete_tasks("uids=pied").await;
let (response, code) = server.delete_tasks(json!({ "uids": "pied" })).await;
assert_eq!(code, 400, "{}", response);
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
insta::assert_json_snapshot!(response, @r###"
{
"message": "Invalid value in parameter `uids`: could not parse `pied` as a positive integer",
"code": "invalid_task_uids",
"message": "Task uid `pied` is invalid. It should only contain numeric characters.",
"code": "invalid_task_uids_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
"link": "https://docs.meilisearch.com/errors#invalid_task_uids_filter"
}
"###);
}
@@ -282,36 +264,36 @@ async fn delete_task_filter_error() {
async fn cancel_task_filter_error() {
let server = Server::new().await;
let (response, code) = server.cancel_tasks("").await;
let (response, code) = server.cancel_tasks(json!(null)).await;
assert_eq!(code, 400, "{}", response);
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
insta::assert_json_snapshot!(response, @r###"
{
"message": "Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.",
"code": "missing_task_filters",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#missing-task-filters"
"link": "https://docs.meilisearch.com/errors#missing_task_filters"
}
"###);
let (response, code) = server.cancel_tasks("lol=pied").await;
let (response, code) = server.cancel_tasks(json!({ "lol": "pied" })).await;
assert_eq!(code, 400, "{}", response);
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
insta::assert_json_snapshot!(response, @r###"
{
"message": "Unknown parameter `lol`: expected one of `uids`, `canceledBy`, `types`, `statuses`, `indexUids`, `afterEnqueuedAt`, `beforeEnqueuedAt`, `afterStartedAt`, `beforeStartedAt`, `afterFinishedAt`, `beforeFinishedAt`",
"message": "Query deserialize error: unknown field `lol`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad-request"
"link": "https://docs.meilisearch.com/errors#bad_request"
}
"###);
let (response, code) = server.cancel_tasks("uids=pied").await;
let (response, code) = server.cancel_tasks(json!({ "uids": "pied" })).await;
assert_eq!(code, 400, "{}", response);
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
insta::assert_json_snapshot!(response, @r###"
{
"message": "Invalid value in parameter `uids`: could not parse `pied` as a positive integer",
"code": "invalid_task_uids",
"message": "Task uid `pied` is invalid. It should only contain numeric characters.",
"code": "invalid_task_uids_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-task-uids"
"link": "https://docs.meilisearch.com/errors#invalid_task_uids_filter"
}
"###);
}
@@ -436,7 +418,7 @@ async fn test_summarized_delete_batch() {
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-not-found"
"link": "https://docs.meilisearch.com/errors#index_not_found"
},
"duration": "[duration]",
"enqueuedAt": "[date]",
@@ -495,7 +477,7 @@ async fn test_summarized_delete_document() {
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-not-found"
"link": "https://docs.meilisearch.com/errors#index_not_found"
},
"duration": "[duration]",
"enqueuedAt": "[date]",
@@ -535,26 +517,46 @@ async fn test_summarized_settings_update() {
let server = Server::new().await;
let index = server.index("test");
// here we should find my payload even in the failed task.
let (response, code) = index.update_settings(json!({ "rankingRules": ["custom"] })).await;
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
{
"message": "Invalid value at `.rankingRules[0]`: `custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.",
"code": "invalid_settings_ranking_rules",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid-settings-ranking-rules"
}
"###);
index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
index.update_settings(json!({ "rankingRules": ["custom"] })).await;
index.wait_task(0).await;
let (task, _) = index.get_task(0).await;
dbg!(&task);
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": 0,
"indexUid": "test",
"status": "failed",
"type": "settingsUpdate",
"canceledBy": null,
"details": {
"rankingRules": [
"custom"
]
},
"error": {
"message": "`custom` ranking rule is invalid. Valid ranking rules are words, typo, sort, proximity, attribute, exactness and custom ranking rules.",
"code": "invalid_ranking_rule",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_ranking_rule"
},
"duration": "[duration]",
"enqueuedAt": "[date]",
"startedAt": "[date]",
"finishedAt": "[date]"
}
"###);
index.update_settings(json!({ "displayedAttributes": ["doggos", "name"], "filterableAttributes": ["age", "nb_paw_pads"], "sortableAttributes": ["iq"] })).await;
index.wait_task(1).await;
let (task, _) = index.get_task(1).await;
assert_json_snapshot!(task,
{ ".duration" => "[duration]", ".enqueuedAt" => "[date]", ".startedAt" => "[date]", ".finishedAt" => "[date]" },
@r###"
{
"uid": 1,
"indexUid": "test",
"status": "succeeded",
"type": "settingsUpdate",
"canceledBy": null,
@@ -626,7 +628,7 @@ async fn test_summarized_index_creation() {
"message": "Index `test` already exists.",
"code": "index_already_exists",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-already-exists"
"link": "https://docs.meilisearch.com/errors#index_already_exists"
},
"duration": "[duration]",
"enqueuedAt": "[date]",
@@ -659,7 +661,7 @@ async fn test_summarized_index_deletion() {
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-not-found"
"link": "https://docs.meilisearch.com/errors#index_not_found"
},
"duration": "[duration]",
"enqueuedAt": "[date]",
@@ -742,7 +744,7 @@ async fn test_summarized_index_update() {
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-not-found"
"link": "https://docs.meilisearch.com/errors#index_not_found"
},
"duration": "[duration]",
"enqueuedAt": "[date]",
@@ -770,7 +772,7 @@ async fn test_summarized_index_update() {
"message": "Index `test` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-not-found"
"link": "https://docs.meilisearch.com/errors#index_not_found"
},
"duration": "[duration]",
"enqueuedAt": "[date]",
@@ -862,7 +864,7 @@ async fn test_summarized_index_swap() {
"message": "Indexes `cattos`, `doggos` not found.",
"code": "index_not_found",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#index-not-found"
"link": "https://docs.meilisearch.com/errors#index_not_found"
},
"duration": "[duration]",
"enqueuedAt": "[date]",
@@ -915,7 +917,7 @@ async fn test_summarized_task_cancelation() {
// to avoid being flaky we're only going to cancel an already finished task :(
index.create(None).await;
index.wait_task(0).await;
server.cancel_tasks("uids=0").await;
server.cancel_tasks(json!({ "uids": [0] })).await;
index.wait_task(1).await;
let (task, _) = index.get_task(1).await;
assert_json_snapshot!(task,
@@ -948,7 +950,7 @@ async fn test_summarized_task_deletion() {
// to avoid being flaky we're only going to delete an already finished task :(
index.create(None).await;
index.wait_task(0).await;
server.delete_tasks("uids=0").await;
server.delete_tasks(json!({ "uids": [0] })).await;
index.wait_task(1).await;
let (task, _) = index.get_task(1).await;
assert_json_snapshot!(task,