Compare commits

..

14 Commits

Author SHA1 Message Date
Louis Dureuil
15bac3755a Fix analytics 2023-02-14 09:41:34 +01:00
Louis Dureuil
23c320262c Update tests 2023-02-14 08:57:32 +01:00
Louis Dureuil
cf45c4cca5 Use IndexUid type 2023-02-14 08:56:00 +01:00
Louis Dureuil
52b46e0d6c Wrap queries and results in JSON objects 2023-02-14 08:55:27 +01:00
Louis Dureuil
878c9f173a Add basic analytics 2023-02-10 15:59:20 +01:00
Louis Dureuil
837387f2d1 Add multi search tests 2023-02-10 15:59:20 +01:00
Louis Dureuil
10d4b0b4fa Add test server search method for multi search 2023-02-10 15:59:20 +01:00
Louis Dureuil
dbbd2ae91b Add search with an array of indexes 2023-02-10 15:59:20 +01:00
Louis Dureuil
d7d581636d Add note in code so one does not forget next time 2023-02-10 15:50:21 +01:00
Louis Dureuil
1a2d6da24f Change Dockerfile to also pass the VERGEN_GIT_SEMVER_LIGHTWEIGHT when building 2023-02-10 15:48:54 +01:00
Louis Dureuil
1cde517a13 Ignore -dirty flag 2023-02-09 18:54:18 +01:00
Louis Dureuil
e906b2c0be Update contributing.md 2023-02-09 18:54:18 +01:00
Louis Dureuil
f2d0672453 Add prototype to analytics if any 2023-02-09 13:35:06 +01:00
Louis Dureuil
59cf58e773 If using a prototype, display its name at Meilisearch startup 2023-02-09 13:35:05 +01:00
64 changed files with 1009 additions and 2403 deletions

View File

@@ -3,7 +3,7 @@
# check_tag $current_tag $file_tag $file_name
function check_tag {
if [[ "$1" != "$2" ]]; then
echo "Error: the current tag does not match the version in Cargo.toml: found $2 - expected $1"
echo "Error: the current tag does not match the version in $3: found $2 - expected $1"
ret=1
fi
}
@@ -11,8 +11,12 @@ function check_tag {
ret=0
current_tag=${GITHUB_REF#'refs/tags/v'}
file_tag="$(grep '^version = ' Cargo.toml | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')"
check_tag $current_tag $file_tag
toml_files='*/Cargo.toml'
for toml_file in $toml_files;
do
file_tag="$(grep '^version = ' $toml_file | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')"
check_tag $current_tag $file_tag $toml_file
done
lock_file='Cargo.lock'
lock_tag=$(grep -A 1 'name = "meilisearch-auth"' $lock_file | grep version | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')

View File

@@ -92,7 +92,7 @@ jobs:
build-args: |
COMMIT_SHA=${{ github.sha }}
COMMIT_DATE=${{ steps.build-metadata.outputs.date }}
GIT_TAG=${{ github.ref_name }}
GIT_TAG=$(printf "%q" ${{ github.ref_name }})
# /!\ Don't touch this without checking with Cloud team
- name: Send CI information to Cloud team

View File

@@ -2,9 +2,6 @@ name: Rust
on:
workflow_dispatch:
schedule:
# Everyday at 5:00am
- cron: '0 5 * * *'
pull_request:
push:
# trying and staging branches are for Bors config
@@ -30,18 +27,10 @@ jobs:
run: |
apt-get update && apt-get install -y curl
apt-get install build-essential -y
- name: Run test with Rust stable
if: github.event_name != 'schedule'
uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@v1
with:
toolchain: stable
override: true
- name: Run test with Rust nightly
if: github.event_name == 'schedule'
uses: actions-rs/toolchain@v1
with:
toolchain: nightly
override: true
# Disable cache due to disk space issues with Windows workers in CI
# - name: Cache dependencies
# uses: Swatinem/rust-cache@v2.2.0

View File

@@ -29,7 +29,7 @@ jobs:
run: |
raw_new_version=$(echo $NEW_VERSION | cut -d 'v' -f 2)
new_string="version = \"$raw_new_version\""
sd '^version = "\d+.\d+.\w+"$' "$new_string" Cargo.toml
sd '^version = "\d+.\d+.\w+"$' "$new_string" */Cargo.toml
- name: Build Meilisearch to update Cargo.lock
run: cargo build
- name: Commit and push the changes to the ${{ env.NEW_BRANCH }} branch

2
.gitignore vendored
View File

@@ -1,5 +1,3 @@
.idea/
.vscode/
/target
**/*.csv
**/*.json_lines

60
Cargo.lock generated
View File

@@ -36,9 +36,9 @@ dependencies = [
[[package]]
name = "actix-http"
version = "3.3.0"
version = "3.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0070905b2c4a98d184c4e81025253cb192aa8a73827553f38e9410801ceb35bb"
checksum = "0c83abf9903e1f0ad9973cc4f7b9767fd5a03a583f51a5b7a339e07987cd2724"
dependencies = [
"actix-codec",
"actix-rt",
@@ -46,7 +46,7 @@ dependencies = [
"actix-tls",
"actix-utils",
"ahash",
"base64 0.21.0",
"base64 0.13.1",
"bitflags",
"brotli",
"bytes",
@@ -68,10 +68,7 @@ dependencies = [
"rand",
"sha1",
"smallvec",
"tokio",
"tokio-util",
"tracing",
"zstd 0.12.3+zstd.1.5.2",
]
[[package]]
@@ -167,9 +164,9 @@ dependencies = [
[[package]]
name = "actix-web"
version = "4.3.0"
version = "4.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "464e0fddc668ede5f26ec1f9557a8d44eda948732f40c6b0ad79126930eb775f"
checksum = "d48f7b6534e06c7bfc72ee91db7917d4af6afe23e7d223b51e68fffbb21e96b9"
dependencies = [
"actix-codec",
"actix-http",
@@ -609,9 +606,9 @@ dependencies = [
[[package]]
name = "cargo_toml"
version = "0.14.1"
version = "0.13.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2bfbc36312494041e2cdd5f06697b7e89d4b76f42773a0b5556ac290ff22acc2"
checksum = "497049e9477329f8f6a559972ee42e117487d01d1e8c2cc9f836ea6fa23a9e1a"
dependencies = [
"serde",
"toml",
@@ -1113,26 +1110,20 @@ dependencies = [
[[package]]
name = "deserr"
version = "0.4.1"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6eee2844f21cf7fb5693aae1fb8f1658127acfdb2fc072167d68a9152584ae64"
checksum = "28380303ca15ec07e1d5b079baf19cf849b09edad5cab219c1c51b2bd07523de"
dependencies = [
"actix-http",
"actix-utils",
"actix-web",
"deserr-internal",
"futures",
"serde-cs",
"serde_json",
"serde_urlencoded",
"strsim",
]
[[package]]
name = "deserr-internal"
version = "0.4.1"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c27246f8ca9eeba9dd70d614b664dc43b529251ed7bd9e633131010d340da4b9"
checksum = "860928cd8af78d223a3d70dd581f21d7c3de8aa2eecd938e0c0a399ded7c1451"
dependencies = [
"convert_case 0.5.0",
"proc-macro2",
@@ -1921,7 +1912,6 @@ dependencies = [
"insta",
"log",
"meili-snap",
"meilisearch-auth",
"meilisearch-types",
"nelson",
"page_size 0.5.0",
@@ -2537,7 +2527,6 @@ dependencies = [
"base64 0.13.1",
"enum-iterator",
"hmac",
"maplit",
"meilisearch-types",
"rand",
"roaring",
@@ -4433,7 +4422,7 @@ dependencies = [
"pbkdf2",
"sha1",
"time",
"zstd 0.11.2+zstd.1.5.2",
"zstd",
]
[[package]]
@@ -4442,16 +4431,7 @@ version = "0.11.2+zstd.1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4"
dependencies = [
"zstd-safe 5.0.2+zstd.1.5.2",
]
[[package]]
name = "zstd"
version = "0.12.3+zstd.1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76eea132fb024e0e13fd9c2f5d5d595d8a967aa72382ac2f9d39fcc95afd0806"
dependencies = [
"zstd-safe 6.0.4+zstd.1.5.4",
"zstd-safe",
]
[[package]]
@@ -4464,21 +4444,11 @@ dependencies = [
"zstd-sys",
]
[[package]]
name = "zstd-safe"
version = "6.0.4+zstd.1.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7afb4b54b8910cf5447638cb54bf4e8a65cbedd783af98b98c62ffe91f185543"
dependencies = [
"libc",
"zstd-sys",
]
[[package]]
name = "zstd-sys"
version = "2.0.7+zstd.1.5.4"
version = "2.0.5+zstd.1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94509c3ba2fe55294d752b79842c530ccfab760192521df74a081a78d2b3c7f5"
checksum = "edc50ffce891ad571e9f9afe5039c4837bede781ac4bb13052ed7ae695518596"
dependencies = [
"cc",
"libc",

View File

@@ -16,15 +16,6 @@ members = [
"benchmarks"
]
[workspace.package]
version = "1.0.0"
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
description = "Meilisearch HTTP server"
homepage = "https://meilisearch.com"
readme = "README.md"
edition = "2021"
license = "MIT"
[profile.release]
codegen-units = 1

View File

@@ -1,15 +1,9 @@
[package]
name = "benchmarks"
version = "1.0.0"
edition = "2018"
publish = false
version.workspace = true
authors.workspace = true
description.workspace = true
homepage.workspace = true
readme.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
anyhow = "1.0.65"
csv = "1.1.6"

View File

@@ -1,14 +1,7 @@
[package]
name = "dump"
publish = false
version.workspace = true
authors.workspace = true
description.workspace = true
edition.workspace = true
homepage.workspace = true
readme.workspace = true
license.workspace = true
version = "1.0.0"
edition = "2021"
[dependencies]
anyhow = "1.0.65"

View File

@@ -203,11 +203,12 @@ pub(crate) mod test {
use big_s::S;
use maplit::btreeset;
use meilisearch_types::index_uid_pattern::IndexUidPattern;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::keys::{Action, Key};
use meilisearch_types::milli::update::Setting;
use meilisearch_types::milli::{self};
use meilisearch_types::settings::{Checked, Settings};
use meilisearch_types::star_or::StarOr;
use meilisearch_types::tasks::{Details, Status};
use serde_json::{json, Map, Value};
use time::macros::datetime;
@@ -340,7 +341,7 @@ pub(crate) mod test {
name: Some(S("doggos_key")),
uid: Uuid::from_str("9f8a34da-b6b2-42f0-939b-dbd4c3448655").unwrap(),
actions: vec![Action::DocumentsAll],
indexes: vec![IndexUidPattern::from_str("doggos").unwrap()],
indexes: vec![StarOr::Other(IndexUid::from_str("doggos").unwrap())],
expires_at: Some(datetime!(4130-03-14 12:21 UTC)),
created_at: datetime!(1960-11-15 0:00 UTC),
updated_at: datetime!(2022-11-10 0:00 UTC),
@@ -350,7 +351,7 @@ pub(crate) mod test {
name: Some(S("master_key")),
uid: Uuid::from_str("4622f717-1c00-47bb-a494-39d76a49b591").unwrap(),
actions: vec![Action::All],
indexes: vec![IndexUidPattern::all()],
indexes: vec![StarOr::Star],
expires_at: None,
created_at: datetime!(0000-01-01 00:01 UTC),
updated_at: datetime!(1964-05-04 17:25 UTC),

View File

@@ -181,8 +181,10 @@ impl CompatV5ToV6 {
.indexes
.into_iter()
.map(|index| match index {
v5::StarOr::Star => v6::IndexUidPattern::all(),
v5::StarOr::Other(uid) => v6::IndexUidPattern::new_unchecked(uid.as_str()),
v5::StarOr::Star => v6::StarOr::Star,
v5::StarOr::Other(uid) => {
v6::StarOr::Other(v6::IndexUid::new_unchecked(uid.as_str()))
}
})
.collect(),
expires_at: key.expires_at,

View File

@@ -34,7 +34,8 @@ pub type PaginationSettings = meilisearch_types::settings::PaginationSettings;
// everything related to the api keys
pub type Action = meilisearch_types::keys::Action;
pub type IndexUidPattern = meilisearch_types::index_uid_pattern::IndexUidPattern;
pub type StarOr<T> = meilisearch_types::star_or::StarOr<T>;
pub type IndexUid = meilisearch_types::index_uid::IndexUid;
// everything related to the errors
pub type ResponseError = meilisearch_types::error::ResponseError;

View File

@@ -1,14 +1,7 @@
[package]
name = "file-store"
publish = false
version.workspace = true
authors.workspace = true
description.workspace = true
homepage.workspace = true
readme.workspace = true
edition.workspace = true
license.workspace = true
version = "1.0.0"
edition = "2021"
[dependencies]
tempfile = "3.3.0"

View File

@@ -1,16 +1,10 @@
[package]
name = "filter-parser"
version = "1.0.0"
edition = "2021"
description = "The parser for the Meilisearch filter syntax"
publish = false
version.workspace = true
authors.workspace = true
# description.workspace = true
homepage.workspace = true
readme.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
nom = "7.1.1"
nom_locate = "4.0.0"

View File

@@ -1,17 +1,11 @@
[package]
name = "flatten-serde-json"
version = "1.0.0"
edition = "2021"
description = "Flatten serde-json objects like elastic search"
readme = "README.md"
publish = false
version.workspace = true
authors.workspace = true
# description.workspace = true
homepage.workspace = true
# readme.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
serde_json = "1.0"

View File

@@ -1,14 +1,7 @@
[package]
name = "index-scheduler"
publish = false
version.workspace = true
authors.workspace = true
description.workspace = true
homepage.workspace = true
readme.workspace = true
edition.workspace = true
license.workspace = true
version = "1.0.0"
edition = "2021"
[dependencies]
anyhow = "1.0.64"
@@ -19,7 +12,6 @@ dump = { path = "../dump" }
enum-iterator = "1.1.3"
file-store = { path = "../file-store" }
log = "0.4.14"
meilisearch-auth = { path = "../meilisearch-auth" }
meilisearch-types = { path = "../meilisearch-types" }
page_size = "0.5.0"
roaring = { version = "0.10.0", features = ["serde"] }

View File

@@ -630,13 +630,13 @@ impl IndexScheduler {
&self,
rtxn: &RoTxn,
query: &Query,
filters: &meilisearch_auth::AuthFilter,
authorized_indexes: &Option<Vec<String>>,
) -> Result<RoaringBitmap> {
let mut tasks = self.get_task_ids(rtxn, query)?;
// If the query contains a list of index uid or there is a finite list of authorized indexes,
// then we must exclude all the kinds that aren't associated to one and only one index.
if query.index_uids.is_some() || !filters.all_indexes_authorized() {
if query.index_uids.is_some() || authorized_indexes.is_some() {
for kind in enum_iterator::all::<Kind>().filter(|kind| !kind.related_to_one_index()) {
tasks -= self.get_kind(rtxn, kind)?;
}
@@ -644,11 +644,11 @@ impl IndexScheduler {
// Any task that is internally associated with a non-authorized index
// must be discarded.
if !filters.all_indexes_authorized() {
if let Some(authorized_indexes) = authorized_indexes {
let all_indexes_iter = self.index_tasks.iter(rtxn)?;
for result in all_indexes_iter {
let (index, index_tasks) = result?;
if !filters.is_index_authorized(index) {
if !authorized_indexes.contains(&index.to_owned()) {
tasks -= index_tasks;
}
}
@@ -668,11 +668,12 @@ impl IndexScheduler {
pub fn get_tasks_from_authorized_indexes(
&self,
query: Query,
filters: &meilisearch_auth::AuthFilter,
authorized_indexes: Option<Vec<String>>,
) -> Result<Vec<Task>> {
let rtxn = self.env.read_txn()?;
let tasks = self.get_task_ids_from_authorized_indexes(&rtxn, &query, filters)?;
let tasks =
self.get_task_ids_from_authorized_indexes(&rtxn, &query, &authorized_indexes)?;
let tasks = self.get_existing_tasks(
&rtxn,
@@ -1092,9 +1093,7 @@ mod tests {
use crossbeam::channel::RecvTimeoutError;
use file_store::File;
use meili_snap::snapshot;
use meilisearch_auth::AuthFilter;
use meilisearch_types::document_formats::DocumentFormatError;
use meilisearch_types::index_uid_pattern::IndexUidPattern;
use meilisearch_types::milli::obkv_to_json;
use meilisearch_types::milli::update::IndexDocumentsMethod::{
ReplaceDocuments, UpdateDocuments,
@@ -2246,45 +2245,38 @@ mod tests {
let rtxn = index_scheduler.env.read_txn().unwrap();
let query = Query { limit: Some(0), ..Default::default() };
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[]");
let query = Query { limit: Some(1), ..Default::default() };
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[2,]");
let query = Query { limit: Some(2), ..Default::default() };
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[1,2,]");
let query = Query { from: Some(1), ..Default::default() };
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[0,1,]");
let query = Query { from: Some(2), ..Default::default() };
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[0,1,2,]");
let query = Query { from: Some(1), limit: Some(1), ..Default::default() };
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[1,]");
let query = Query { from: Some(1), limit: Some(2), ..Default::default() };
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[0,1,]");
}
@@ -2309,24 +2301,21 @@ mod tests {
let rtxn = index_scheduler.env.read_txn().unwrap();
let query = Query { statuses: Some(vec![Status::Processing]), ..Default::default() };
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[0,]"); // only the processing tasks in the first tick
let query = Query { statuses: Some(vec![Status::Enqueued]), ..Default::default() };
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[1,2,]"); // only the enqueued tasks in the first tick
let query = Query {
statuses: Some(vec![Status::Enqueued, Status::Processing]),
..Default::default()
};
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
snapshot!(snapshot_bitmap(&tasks), @"[0,1,2,]"); // both enqueued and processing tasks in the first tick
let query = Query {
@@ -2334,9 +2323,8 @@ mod tests {
after_started_at: Some(start_time),
..Default::default()
};
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// both enqueued and processing tasks in the first tick, but limited to those with a started_at
// that comes after the start of the test, which should excludes the enqueued tasks
snapshot!(snapshot_bitmap(&tasks), @"[0,]");
@@ -2346,9 +2334,8 @@ mod tests {
before_started_at: Some(start_time),
..Default::default()
};
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// both enqueued and processing tasks in the first tick, but limited to those with a started_at
// that comes before the start of the test, which should excludes all of them
snapshot!(snapshot_bitmap(&tasks), @"[]");
@@ -2359,9 +2346,8 @@ mod tests {
before_started_at: Some(start_time + Duration::minutes(1)),
..Default::default()
};
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// both enqueued and processing tasks in the first tick, but limited to those with a started_at
// that comes after the start of the test and before one minute after the start of the test,
// which should exclude the enqueued tasks and include the only processing task
@@ -2386,9 +2372,8 @@ mod tests {
before_started_at: Some(start_time + Duration::minutes(1)),
..Default::default()
};
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// both succeeded and processing tasks in the first tick, but limited to those with a started_at
// that comes after the start of the test and before one minute after the start of the test,
// which should include all tasks
@@ -2399,9 +2384,8 @@ mod tests {
before_started_at: Some(start_time),
..Default::default()
};
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// both succeeded and processing tasks in the first tick, but limited to those with a started_at
// that comes before the start of the test, which should exclude all tasks
snapshot!(snapshot_bitmap(&tasks), @"[]");
@@ -2412,9 +2396,8 @@ mod tests {
before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default()
};
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// both succeeded and processing tasks in the first tick, but limited to those with a started_at
// that comes after the start of the second part of the test and before one minute after the
// second start of the test, which should exclude all tasks
@@ -2432,9 +2415,8 @@ mod tests {
let rtxn = index_scheduler.env.read_txn().unwrap();
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// we run the same query to verify that, and indeed find that the last task is matched
snapshot!(snapshot_bitmap(&tasks), @"[2,]");
@@ -2444,9 +2426,8 @@ mod tests {
before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default()
};
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// enqueued, succeeded, or processing tasks started after the second part of the test, should
// again only return the last task
snapshot!(snapshot_bitmap(&tasks), @"[2,]");
@@ -2456,9 +2437,8 @@ mod tests {
// now the last task should have failed
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "end");
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// so running the last query should return nothing
snapshot!(snapshot_bitmap(&tasks), @"[]");
@@ -2468,9 +2448,8 @@ mod tests {
before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default()
};
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// but the same query on failed tasks should return the last task
snapshot!(snapshot_bitmap(&tasks), @"[2,]");
@@ -2480,9 +2459,8 @@ mod tests {
before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default()
};
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// but the same query on failed tasks should return the last task
snapshot!(snapshot_bitmap(&tasks), @"[2,]");
@@ -2493,9 +2471,8 @@ mod tests {
before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default()
};
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// same query but with an invalid uid
snapshot!(snapshot_bitmap(&tasks), @"[]");
@@ -2506,9 +2483,8 @@ mod tests {
before_started_at: Some(second_start_time + Duration::minutes(1)),
..Default::default()
};
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// same query but with a valid uid
snapshot!(snapshot_bitmap(&tasks), @"[2,]");
}
@@ -2538,21 +2514,14 @@ mod tests {
let rtxn = index_scheduler.env.read_txn().unwrap();
let query = Query { index_uids: Some(vec!["catto".to_owned()]), ..Default::default() };
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// only the first task associated with catto is returned, the indexSwap tasks are excluded!
snapshot!(snapshot_bitmap(&tasks), @"[0,]");
let query = Query { index_uids: Some(vec!["catto".to_owned()]), ..Default::default() };
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(
&rtxn,
&query,
&AuthFilter::with_allowed_indexes(
vec![IndexUidPattern::new_unchecked("doggo")].into_iter().collect(),
),
)
.get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_owned()]))
.unwrap();
// we have asked for only the tasks associated with catto, but are only authorized to retrieve the tasks
// associated with doggo -> empty result
@@ -2560,13 +2529,7 @@ mod tests {
let query = Query::default();
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(
&rtxn,
&query,
&AuthFilter::with_allowed_indexes(
vec![IndexUidPattern::new_unchecked("doggo")].into_iter().collect(),
),
)
.get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_owned()]))
.unwrap();
// we asked for all the tasks, but we are only authorized to retrieve the doggo tasks
// -> only the index creation of doggo should be returned
@@ -2577,14 +2540,7 @@ mod tests {
.get_task_ids_from_authorized_indexes(
&rtxn,
&query,
&AuthFilter::with_allowed_indexes(
vec![
IndexUidPattern::new_unchecked("catto"),
IndexUidPattern::new_unchecked("doggo"),
]
.into_iter()
.collect(),
),
&Some(vec!["catto".to_owned(), "doggo".to_owned()]),
)
.unwrap();
// we asked for all the tasks, but we are only authorized to retrieve the doggo and catto tasks
@@ -2592,9 +2548,8 @@ mod tests {
snapshot!(snapshot_bitmap(&tasks), @"[0,1,]");
let query = Query::default();
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// we asked for all the tasks with all index authorized -> all tasks returned
snapshot!(snapshot_bitmap(&tasks), @"[0,1,2,3,]");
}
@@ -2625,22 +2580,15 @@ mod tests {
let rtxn = index_scheduler.read_txn().unwrap();
let query = Query { canceled_by: Some(vec![task_cancelation.uid]), ..Query::default() };
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
.unwrap();
let tasks =
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
// 0 is not returned because it was not canceled, 3 is not returned because it is the uid of the
// taskCancelation itself
snapshot!(snapshot_bitmap(&tasks), @"[1,2,]");
let query = Query { canceled_by: Some(vec![task_cancelation.uid]), ..Query::default() };
let tasks = index_scheduler
.get_task_ids_from_authorized_indexes(
&rtxn,
&query,
&AuthFilter::with_allowed_indexes(
vec![IndexUidPattern::new_unchecked("doggo")].into_iter().collect(),
),
)
.get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_string()]))
.unwrap();
// Return only 1 because the user is not authorized to see task 2
snapshot!(snapshot_bitmap(&tasks), @"[1,]");

View File

@@ -1,16 +1,10 @@
[package]
name = "json-depth-checker"
version = "1.0.0"
edition = "2021"
description = "A library that indicates if a JSON must be flattened"
publish = false
version.workspace = true
authors.workspace = true
# description.workspace = true
homepage.workspace = true
readme.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
serde_json = "1.0"

View File

@@ -1,14 +1,7 @@
[package]
name = "meili-snap"
publish = false
version.workspace = true
authors.workspace = true
description.workspace = true
homepage.workspace = true
readme.workspace = true
edition.workspace = true
license.workspace = true
version = "1.0.0"
edition = "2021"
[dependencies]
insta = { version = "^1.19.1", features = ["json", "redactions"] }

View File

@@ -1,20 +1,12 @@
[package]
name = "meilisearch-auth"
publish = false
version.workspace = true
authors.workspace = true
description.workspace = true
homepage.workspace = true
readme.workspace = true
edition.workspace = true
license.workspace = true
version = "1.0.0"
edition = "2021"
[dependencies]
base64 = "0.13.1"
enum-iterator = "1.1.3"
hmac = "0.12.1"
maplit = "1.0.2"
meilisearch-types = { path = "../meilisearch-types" }
rand = "0.8.5"
roaring = { version = "0.10.0", features = ["serde"] }

View File

@@ -7,10 +7,9 @@ use std::path::Path;
use std::sync::Arc;
use error::{AuthControllerError, Result};
use maplit::hashset;
use meilisearch_types::index_uid_pattern::IndexUidPattern;
use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey};
use meilisearch_types::milli::update::Setting;
use meilisearch_types::star_or::StarOr;
use serde::{Deserialize, Serialize};
pub use store::open_auth_store_env;
use store::{generate_key_as_hexa, HeedAuthStore};
@@ -85,13 +84,34 @@ impl AuthController {
uid: Uuid,
search_rules: Option<SearchRules>,
) -> Result<AuthFilter> {
let key = self.get_key(uid)?;
let mut filters = AuthFilter::default();
let key = self
.store
.get_api_key(uid)?
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string()))?;
let key_authorized_indexes = SearchRules::Set(key.indexes.into_iter().collect());
if !key.indexes.iter().any(|i| i == &StarOr::Star) {
filters.search_rules = match search_rules {
// Intersect search_rules with parent key authorized indexes.
Some(search_rules) => SearchRules::Map(
key.indexes
.into_iter()
.filter_map(|index| {
search_rules.get_index_search_rules(&format!("{index}")).map(
|index_search_rules| (index.to_string(), Some(index_search_rules)),
)
})
.collect(),
),
None => SearchRules::Set(key.indexes.into_iter().map(|x| x.to_string()).collect()),
};
} else if let Some(search_rules) = search_rules {
filters.search_rules = search_rules;
}
let allow_index_creation = self.is_key_authorized(uid, Action::IndexesAdd, None)?;
filters.allow_index_creation = self.is_key_authorized(uid, Action::IndexesAdd, None)?;
Ok(AuthFilter { search_rules, key_authorized_indexes, allow_index_creation })
Ok(filters)
}
pub fn list_keys(&self) -> Result<Vec<Key>> {
@@ -130,7 +150,9 @@ impl AuthController {
.get_expiration_date(uid, action, None)?
.or(match index {
// else check if the key has access to the requested index.
Some(index) => self.store.get_expiration_date(uid, action, Some(index))?,
Some(index) => {
self.store.get_expiration_date(uid, action, Some(index.as_bytes()))?
}
// or to any index if no index has been requested.
None => self.store.prefix_first_expiration_date(uid, action)?,
}) {
@@ -156,59 +178,13 @@ impl AuthController {
}
pub struct AuthFilter {
search_rules: Option<SearchRules>,
key_authorized_indexes: SearchRules,
allow_index_creation: bool,
pub search_rules: SearchRules,
pub allow_index_creation: bool,
}
impl Default for AuthFilter {
fn default() -> Self {
Self {
search_rules: None,
key_authorized_indexes: SearchRules::default(),
allow_index_creation: true,
}
}
}
impl AuthFilter {
#[inline]
pub fn allow_index_creation(&self) -> bool {
self.allow_index_creation
}
pub fn with_allowed_indexes(allowed_indexes: HashSet<IndexUidPattern>) -> Self {
Self {
search_rules: None,
key_authorized_indexes: SearchRules::Set(allowed_indexes),
allow_index_creation: false,
}
}
pub fn all_indexes_authorized(&self) -> bool {
self.key_authorized_indexes.all_indexes_authorized()
&& self
.search_rules
.as_ref()
.map(|search_rules| search_rules.all_indexes_authorized())
.unwrap_or(true)
}
pub fn is_index_authorized(&self, index: &str) -> bool {
self.key_authorized_indexes.is_index_authorized(index)
&& self
.search_rules
.as_ref()
.map(|search_rules| search_rules.is_index_authorized(index))
.unwrap_or(true)
}
pub fn get_index_search_rules(&self, index: &str) -> Option<IndexSearchRules> {
if !self.is_index_authorized(index) {
return None;
}
let search_rules = self.search_rules.as_ref().unwrap_or(&self.key_authorized_indexes);
search_rules.get_index_search_rules(index)
Self { search_rules: SearchRules::default(), allow_index_creation: true }
}
}
@@ -216,61 +192,63 @@ impl AuthFilter {
#[derive(Debug, Serialize, Deserialize, Clone)]
#[serde(untagged)]
pub enum SearchRules {
Set(HashSet<IndexUidPattern>),
Map(HashMap<IndexUidPattern, Option<IndexSearchRules>>),
Set(HashSet<String>),
Map(HashMap<String, Option<IndexSearchRules>>),
}
impl Default for SearchRules {
fn default() -> Self {
Self::Set(hashset! { IndexUidPattern::all() })
Self::Set(Some("*".to_string()).into_iter().collect())
}
}
impl SearchRules {
fn is_index_authorized(&self, index: &str) -> bool {
pub fn is_index_authorized(&self, index: &str) -> bool {
match self {
Self::Set(set) => {
set.contains("*")
|| set.contains(index)
|| set.iter().any(|pattern| pattern.matches_str(index))
}
Self::Map(map) => {
map.contains_key("*")
|| map.contains_key(index)
|| map.keys().any(|pattern| pattern.matches_str(index))
}
Self::Set(set) => set.contains("*") || set.contains(index),
Self::Map(map) => map.contains_key("*") || map.contains_key(index),
}
}
fn get_index_search_rules(&self, index: &str) -> Option<IndexSearchRules> {
pub fn get_index_search_rules(&self, index: &str) -> Option<IndexSearchRules> {
match self {
Self::Set(_) => {
if self.is_index_authorized(index) {
Self::Set(set) => {
if set.contains("*") || set.contains(index) {
Some(IndexSearchRules::default())
} else {
None
}
}
Self::Map(map) => {
// We must take the most retrictive rule of this index uid patterns set of rules.
map.iter()
.filter(|(pattern, _)| pattern.matches_str(index))
.max_by_key(|(pattern, _)| (pattern.is_exact(), pattern.len()))
.and_then(|(_, rule)| rule.clone())
map.get(index).or_else(|| map.get("*")).map(|isr| isr.clone().unwrap_or_default())
}
}
}
fn all_indexes_authorized(&self) -> bool {
/// Return the list of indexes such that `self.is_index_authorized(index) == true`,
/// or `None` if all indexes satisfy this condition.
pub fn authorized_indexes(&self) -> Option<Vec<String>> {
match self {
SearchRules::Set(set) => set.contains("*"),
SearchRules::Map(map) => map.contains_key("*"),
SearchRules::Set(set) => {
if set.contains("*") {
None
} else {
Some(set.iter().cloned().collect())
}
}
SearchRules::Map(map) => {
if map.contains_key("*") {
None
} else {
Some(map.keys().cloned().collect())
}
}
}
}
}
impl IntoIterator for SearchRules {
type Item = (IndexUidPattern, IndexSearchRules);
type Item = (String, IndexSearchRules);
type IntoIter = Box<dyn Iterator<Item = Self::Item>>;
fn into_iter(self) -> Self::IntoIter {

View File

@@ -5,21 +5,20 @@ use std::convert::{TryFrom, TryInto};
use std::fs::create_dir_all;
use std::path::Path;
use std::str;
use std::str::FromStr;
use std::sync::Arc;
use hmac::{Hmac, Mac};
use meilisearch_types::index_uid_pattern::IndexUidPattern;
use meilisearch_types::keys::KeyId;
use meilisearch_types::milli;
use meilisearch_types::milli::heed::types::{ByteSlice, DecodeIgnore, SerdeJson};
use meilisearch_types::milli::heed::{Database, Env, EnvOpenOptions, RwTxn};
use meilisearch_types::star_or::StarOr;
use sha2::Sha256;
use time::OffsetDateTime;
use uuid::fmt::Hyphenated;
use uuid::Uuid;
use super::error::{AuthControllerError, Result};
use super::error::Result;
use super::{Action, Key};
const AUTH_STORE_SIZE: usize = 1_073_741_824; //1GiB
@@ -130,7 +129,7 @@ impl HeedAuthStore {
}
}
let no_index_restriction = key.indexes.iter().any(|p| p.matches_all());
let no_index_restriction = key.indexes.contains(&StarOr::Star);
for action in actions {
if no_index_restriction {
// If there is no index restriction we put None.
@@ -215,28 +214,11 @@ impl HeedAuthStore {
&self,
uid: Uuid,
action: Action,
index: Option<&str>,
index: Option<&[u8]>,
) -> Result<Option<Option<OffsetDateTime>>> {
let rtxn = self.env.read_txn()?;
let tuple = (&uid, &action, index.map(|s| s.as_bytes()));
match self.action_keyid_index_expiration.get(&rtxn, &tuple)? {
Some(expiration) => Ok(Some(expiration)),
None => {
let tuple = (&uid, &action, None);
for result in self.action_keyid_index_expiration.prefix_iter(&rtxn, &tuple)? {
let ((_, _, index_uid_pattern), expiration) = result?;
if let Some((pattern, index)) = index_uid_pattern.zip(index) {
let index_uid_pattern = str::from_utf8(pattern)?;
let pattern = IndexUidPattern::from_str(index_uid_pattern)
.map_err(|e| AuthControllerError::Internal(Box::new(e)))?;
if pattern.matches_str(index) {
return Ok(Some(expiration));
}
}
}
Ok(None)
}
}
let tuple = (&uid, &action, index);
Ok(self.action_keyid_index_expiration.get(&rtxn, &tuple)?)
}
pub fn prefix_first_expiration_date(

View File

@@ -1,21 +1,15 @@
[package]
name = "meilisearch-types"
publish = false
version.workspace = true
authors.workspace = true
description.workspace = true
homepage.workspace = true
readme.workspace = true
edition.workspace = true
license.workspace = true
version = "1.0.0"
authors = ["marin <postma.marin@protonmail.com>"]
edition = "2021"
[dependencies]
actix-web = { version = "4.2.1", default-features = false }
anyhow = "1.0.65"
convert_case = "0.6.0"
csv = "1.1.6"
deserr = "0.4.1"
deserr = "0.3.0"
either = { version = "1.6.1", features = ["serde"] }
enum-iterator = "1.1.3"
file-store = { path = "../file-store" }

View File

@@ -0,0 +1,328 @@
/*!
This module implements the error messages of deserialization errors.
We try to:
1. Give a human-readable description of where the error originated.
2. Use the correct terms depending on the format of the request (json/query param)
3. Categorise the type of the error (e.g. missing field, wrong value type, unexpected error, etc.)
*/
use deserr::{ErrorKind, IntoValue, ValueKind, ValuePointerRef};
use super::{DeserrJsonError, DeserrQueryParamError};
use crate::error::{Code, ErrorCode};
/// Return a description of the given location in a Json, preceded by the given article.
/// e.g. `at .key1[8].key2`. If the location is the origin, the given article will not be
/// included in the description.
pub fn location_json_description(location: ValuePointerRef, article: &str) -> String {
fn rec(location: ValuePointerRef) -> String {
match location {
ValuePointerRef::Origin => String::new(),
ValuePointerRef::Key { key, prev } => rec(*prev) + "." + key,
ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)),
}
}
match location {
ValuePointerRef::Origin => String::new(),
_ => {
format!("{article} `{}`", rec(location))
}
}
}
/// Return a description of the list of value kinds for a Json payload.
fn value_kinds_description_json(kinds: &[ValueKind]) -> String {
// Rank each value kind so that they can be sorted (and deduplicated)
// Having a predictable order helps with pattern matching
fn order(kind: &ValueKind) -> u8 {
match kind {
ValueKind::Null => 0,
ValueKind::Boolean => 1,
ValueKind::Integer => 2,
ValueKind::NegativeInteger => 3,
ValueKind::Float => 4,
ValueKind::String => 5,
ValueKind::Sequence => 6,
ValueKind::Map => 7,
}
}
// Return a description of a single value kind, preceded by an article
fn single_description(kind: &ValueKind) -> &'static str {
match kind {
ValueKind::Null => "null",
ValueKind::Boolean => "a boolean",
ValueKind::Integer => "a positive integer",
ValueKind::NegativeInteger => "a negative integer",
ValueKind::Float => "a number",
ValueKind::String => "a string",
ValueKind::Sequence => "an array",
ValueKind::Map => "an object",
}
}
fn description_rec(kinds: &[ValueKind], count_items: &mut usize, message: &mut String) {
let (msg_part, rest): (_, &[ValueKind]) = match kinds {
[] => (String::new(), &[]),
[ValueKind::Integer | ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => {
("a number".to_owned(), rest)
}
[ValueKind::Integer, ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => {
("a number".to_owned(), rest)
}
[ValueKind::Integer, ValueKind::NegativeInteger, rest @ ..] => {
("an integer".to_owned(), rest)
}
[a] => (single_description(a).to_owned(), &[]),
[a, rest @ ..] => (single_description(a).to_owned(), rest),
};
if rest.is_empty() {
if *count_items == 0 {
message.push_str(&msg_part);
} else if *count_items == 1 {
message.push_str(&format!(" or {msg_part}"));
} else {
message.push_str(&format!(", or {msg_part}"));
}
} else {
if *count_items == 0 {
message.push_str(&msg_part);
} else {
message.push_str(&format!(", {msg_part}"));
}
*count_items += 1;
description_rec(rest, count_items, message);
}
}
let mut kinds = kinds.to_owned();
kinds.sort_by_key(order);
kinds.dedup();
if kinds.is_empty() {
// Should not happen ideally
"a different value".to_owned()
} else {
let mut message = String::new();
description_rec(kinds.as_slice(), &mut 0, &mut message);
message
}
}
/// Return the JSON string of the value preceded by a description of its kind
fn value_description_with_kind_json(v: &serde_json::Value) -> String {
match v.kind() {
ValueKind::Null => "null".to_owned(),
kind => {
format!(
"{}: `{}`",
value_kinds_description_json(&[kind]),
serde_json::to_string(v).unwrap()
)
}
}
}
impl<C: Default + ErrorCode> deserr::DeserializeError for DeserrJsonError<C> {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: deserr::ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let mut message = String::new();
message.push_str(&match error {
ErrorKind::IncorrectValueKind { actual, accepted } => {
let expected = value_kinds_description_json(accepted);
let received = value_description_with_kind_json(&serde_json::Value::from(actual));
let location = location_json_description(location, " at");
format!("Invalid value type{location}: expected {expected}, but found {received}")
}
ErrorKind::MissingField { field } => {
let location = location_json_description(location, " inside");
format!("Missing field `{field}`{location}")
}
ErrorKind::UnknownKey { key, accepted } => {
let location = location_json_description(location, " inside");
format!(
"Unknown field `{}`{location}: expected one of {}",
key,
accepted
.iter()
.map(|accepted| format!("`{}`", accepted))
.collect::<Vec<String>>()
.join(", ")
)
}
ErrorKind::UnknownValue { value, accepted } => {
let location = location_json_description(location, " at");
format!(
"Unknown value `{}`{location}: expected one of {}",
value,
accepted
.iter()
.map(|accepted| format!("`{}`", accepted))
.collect::<Vec<String>>()
.join(", "),
)
}
ErrorKind::Unexpected { msg } => {
let location = location_json_description(location, " at");
format!("Invalid value{location}: {msg}")
}
});
Err(DeserrJsonError::new(message, C::default().error_code()))
}
}
pub fn immutable_field_error(field: &str, accepted: &[&str], code: Code) -> DeserrJsonError {
let msg = format!(
"Immutable field `{field}`: expected one of {}",
accepted
.iter()
.map(|accepted| format!("`{}`", accepted))
.collect::<Vec<String>>()
.join(", ")
);
DeserrJsonError::new(msg, code)
}
/// Return a description of the given location in query parameters, preceded by the
/// given article. e.g. `at key5[2]`. If the location is the origin, the given article
/// will not be included in the description.
pub fn location_query_param_description(location: ValuePointerRef, article: &str) -> String {
fn rec(location: ValuePointerRef) -> String {
match location {
ValuePointerRef::Origin => String::new(),
ValuePointerRef::Key { key, prev } => {
if matches!(prev, ValuePointerRef::Origin) {
key.to_owned()
} else {
rec(*prev) + "." + key
}
}
ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)),
}
}
match location {
ValuePointerRef::Origin => String::new(),
_ => {
format!("{article} `{}`", rec(location))
}
}
}
impl<C: Default + ErrorCode> deserr::DeserializeError for DeserrQueryParamError<C> {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: deserr::ErrorKind<V>,
location: ValuePointerRef,
) -> Result<Self, Self> {
let mut message = String::new();
message.push_str(&match error {
ErrorKind::IncorrectValueKind { actual, accepted } => {
let expected = value_kinds_description_query_param(accepted);
let received = value_description_with_kind_query_param(actual);
let location = location_query_param_description(location, " for parameter");
format!("Invalid value type{location}: expected {expected}, but found {received}")
}
ErrorKind::MissingField { field } => {
let location = location_query_param_description(location, " inside");
format!("Missing parameter `{field}`{location}")
}
ErrorKind::UnknownKey { key, accepted } => {
let location = location_query_param_description(location, " inside");
format!(
"Unknown parameter `{}`{location}: expected one of {}",
key,
accepted
.iter()
.map(|accepted| format!("`{}`", accepted))
.collect::<Vec<String>>()
.join(", ")
)
}
ErrorKind::UnknownValue { value, accepted } => {
let location = location_query_param_description(location, " for parameter");
format!(
"Unknown value `{}`{location}: expected one of {}",
value,
accepted
.iter()
.map(|accepted| format!("`{}`", accepted))
.collect::<Vec<String>>()
.join(", "),
)
}
ErrorKind::Unexpected { msg } => {
let location = location_query_param_description(location, " in parameter");
format!("Invalid value{location}: {msg}")
}
});
Err(DeserrQueryParamError::new(message, C::default().error_code()))
}
}
/// Return a description of the list of value kinds for query parameters
/// Since query parameters are always treated as strings, we always return
/// "a string" for now.
fn value_kinds_description_query_param(_accepted: &[ValueKind]) -> String {
"a string".to_owned()
}
fn value_description_with_kind_query_param<V: IntoValue>(actual: deserr::Value<V>) -> String {
match actual {
deserr::Value::Null => "null".to_owned(),
deserr::Value::Boolean(x) => format!("a boolean: `{x}`"),
deserr::Value::Integer(x) => format!("an integer: `{x}`"),
deserr::Value::NegativeInteger(x) => {
format!("an integer: `{x}`")
}
deserr::Value::Float(x) => {
format!("a number: `{x}`")
}
deserr::Value::String(x) => {
format!("a string: `{x}`")
}
deserr::Value::Sequence(_) => "multiple values".to_owned(),
deserr::Value::Map(_) => "multiple parameters".to_owned(),
}
}
#[cfg(test)]
mod tests {
use deserr::ValueKind;
use crate::deserr::error_messages::value_kinds_description_json;
#[test]
fn test_value_kinds_description_json() {
insta::assert_display_snapshot!(value_kinds_description_json(&[]), @"a different value");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean]), @"a boolean");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::NegativeInteger]), @"a negative integer");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::String]), @"a string");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence]), @"an array");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Map]), @"an object");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Boolean]), @"a boolean or a positive integer");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Integer]), @"null or a positive integer");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence, ValueKind::NegativeInteger]), @"a negative integer or an array");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float]), @"a number");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger]), @"a number");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null or a number");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number");
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number");
}
}

View File

@@ -1,19 +1,18 @@
use std::convert::Infallible;
use std::fmt;
use std::marker::PhantomData;
use std::ops::ControlFlow;
use deserr::errors::{JsonError, QueryParamError};
use deserr::{take_cf_content, DeserializeError, IntoValue, MergeWithError, ValuePointerRef};
use deserr::{DeserializeError, MergeWithError, ValuePointerRef};
use crate::error::deserr_codes::*;
use crate::error::deserr_codes::{self, *};
use crate::error::{
Code, DeserrParseBoolError, DeserrParseIntError, ErrorCode, InvalidTaskDateError,
unwrap_any, Code, DeserrParseBoolError, DeserrParseIntError, ErrorCode, InvalidTaskDateError,
ParseOffsetDateTimeError,
};
use crate::index_uid::IndexUidFormatError;
use crate::tasks::{ParseTaskKindError, ParseTaskStatusError};
pub mod error_messages;
pub mod query_params;
/// Marker type for the Json format
@@ -21,8 +20,8 @@ pub struct DeserrJson;
/// Marker type for the Query Parameter format
pub struct DeserrQueryParam;
pub type DeserrJsonError<C = BadRequest> = DeserrError<DeserrJson, C>;
pub type DeserrQueryParamError<C = BadRequest> = DeserrError<DeserrQueryParam, C>;
pub type DeserrJsonError<C = deserr_codes::BadRequest> = DeserrError<DeserrJson, C>;
pub type DeserrQueryParamError<C = deserr_codes::BadRequest> = DeserrError<DeserrQueryParam, C>;
/// A request deserialization error.
///
@@ -38,7 +37,6 @@ impl<Format, C: Default + ErrorCode> DeserrError<Format, C> {
Self { msg, code, _phantom: PhantomData }
}
}
impl<Format, C: Default + ErrorCode> std::fmt::Debug for DeserrError<Format, C> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("DeserrError").field("msg", &self.msg).field("code", &self.code).finish()
@@ -51,16 +49,6 @@ impl<Format, C: Default + ErrorCode> std::fmt::Display for DeserrError<Format, C
}
}
impl<F, C: Default + ErrorCode> actix_web::ResponseError for DeserrError<F, C> {
fn status_code(&self) -> actix_web::http::StatusCode {
self.code.http()
}
fn error_response(&self) -> actix_web::HttpResponse<actix_web::body::BoxBody> {
crate::error::ResponseError::from_msg(self.msg.to_string(), self.code).error_response()
}
}
impl<Format, C: Default + ErrorCode> std::error::Error for DeserrError<Format, C> {}
impl<Format, C: Default + ErrorCode> ErrorCode for DeserrError<Format, C> {
fn error_code(&self) -> Code {
@@ -76,8 +64,8 @@ impl<Format, C1: Default + ErrorCode, C2: Default + ErrorCode>
_self_: Option<Self>,
other: DeserrError<Format, C2>,
_merge_location: ValuePointerRef,
) -> ControlFlow<Self, Self> {
ControlFlow::Break(DeserrError { msg: other.msg, code: other.code, _phantom: PhantomData })
) -> Result<Self, Self> {
Err(DeserrError { msg: other.msg, code: other.code, _phantom: PhantomData })
}
}
@@ -86,56 +74,17 @@ impl<Format, C: Default + ErrorCode> MergeWithError<Infallible> for DeserrError<
_self_: Option<Self>,
_other: Infallible,
_merge_location: ValuePointerRef,
) -> ControlFlow<Self, Self> {
) -> Result<Self, Self> {
unreachable!()
}
}
impl<C: Default + ErrorCode> DeserializeError for DeserrJsonError<C> {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: deserr::ErrorKind<V>,
location: ValuePointerRef,
) -> ControlFlow<Self, Self> {
ControlFlow::Break(DeserrJsonError::new(
take_cf_content(JsonError::error(None, error, location)).to_string(),
C::default().error_code(),
))
}
}
impl<C: Default + ErrorCode> DeserializeError for DeserrQueryParamError<C> {
fn error<V: IntoValue>(
_self_: Option<Self>,
error: deserr::ErrorKind<V>,
location: ValuePointerRef,
) -> ControlFlow<Self, Self> {
ControlFlow::Break(DeserrQueryParamError::new(
take_cf_content(QueryParamError::error(None, error, location)).to_string(),
C::default().error_code(),
))
}
}
pub fn immutable_field_error(field: &str, accepted: &[&str], code: Code) -> DeserrJsonError {
let msg = format!(
"Immutable field `{field}`: expected one of {}",
accepted
.iter()
.map(|accepted| format!("`{}`", accepted))
.collect::<Vec<String>>()
.join(", ")
);
DeserrJsonError::new(msg, code)
}
// Implement a convenience function to build a `missing_field` error
macro_rules! make_missing_field_convenience_builder {
($err_code:ident, $fn_name:ident) => {
impl DeserrJsonError<$err_code> {
pub fn $fn_name(field: &str, location: ValuePointerRef) -> Self {
let x = deserr::take_cf_content(Self::error::<Infallible>(
let x = unwrap_any(Self::error::<Infallible>(
None,
deserr::ErrorKind::MissingField { field },
location,
@@ -163,7 +112,7 @@ macro_rules! merge_with_error_impl_take_error_message {
_self_: Option<Self>,
other: $err_type,
merge_location: ValuePointerRef,
) -> ControlFlow<Self, Self> {
) -> Result<Self, Self> {
DeserrError::<Format, C>::error::<Infallible>(
None,
deserr::ErrorKind::Unexpected { msg: other.to_string() },

View File

@@ -15,9 +15,10 @@ use std::convert::Infallible;
use std::ops::Deref;
use std::str::FromStr;
use deserr::{DeserializeError, Deserr, MergeWithError, ValueKind};
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind};
use super::{DeserrParseBoolError, DeserrParseIntError};
use crate::error::unwrap_any;
use crate::index_uid::IndexUid;
use crate::tasks::{Kind, Status};
@@ -37,7 +38,7 @@ impl<T> Deref for Param<T> {
}
}
impl<T, E> Deserr<E> for Param<T>
impl<T, E> DeserializeFromValue<E> for Param<T>
where
E: DeserializeError + MergeWithError<T::Err>,
T: FromQueryParameter,
@@ -49,9 +50,9 @@ where
match value {
deserr::Value::String(s) => match T::from_query_param(&s) {
Ok(x) => Ok(Param(x)),
Err(e) => Err(deserr::take_cf_content(E::merge(None, e, location))),
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
},
_ => Err(deserr::take_cf_content(E::error(
_ => Err(unwrap_any(E::error(
None,
deserr::ErrorKind::IncorrectValueKind {
actual: value,

View File

@@ -127,7 +127,7 @@ macro_rules! make_error_codes {
}
impl Code {
/// return the HTTP status code associated with the `Code`
pub fn http(&self) -> StatusCode {
fn http(&self) -> StatusCode {
match self {
$(
Code::$code_ident => StatusCode::$status
@@ -381,6 +381,14 @@ impl ErrorCode for io::Error {
}
}
/// Unwrap a result, either its Ok or Err value.
pub fn unwrap_any<T>(any: Result<T, T>) -> T {
match any {
Ok(any) => any,
Err(any) => any,
}
}
/// Deserialization when `deserr` cannot parse an API key date.
#[derive(Debug)]
pub struct ParseOffsetDateTimeError(pub String);

View File

@@ -2,14 +2,14 @@ use std::error::Error;
use std::fmt;
use std::str::FromStr;
use deserr::Deserr;
use deserr::DeserializeFromValue;
use crate::error::{Code, ErrorCode};
/// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400
/// bytes long
#[derive(Debug, Clone, PartialEq, Eq, Deserr)]
#[deserr(try_from(String) = IndexUid::try_from -> IndexUidFormatError)]
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
#[deserr(from(String) = IndexUid::try_from -> IndexUidFormatError)]
pub struct IndexUid(String);
impl IndexUid {

View File

@@ -1,124 +0,0 @@
use std::borrow::Borrow;
use std::error::Error;
use std::fmt;
use std::ops::Deref;
use std::str::FromStr;
use deserr::Deserr;
use serde::{Deserialize, Serialize};
use crate::error::{Code, ErrorCode};
use crate::index_uid::{IndexUid, IndexUidFormatError};
/// An index uid pattern is composed of only ascii alphanumeric characters, - and _, between 1 and 400
/// bytes long and optionally ending with a *.
#[derive(Serialize, Deserialize, Deserr, Debug, Clone, PartialEq, Eq, Hash)]
#[deserr(try_from(&String) = FromStr::from_str -> IndexUidPatternFormatError)]
pub struct IndexUidPattern(String);
impl IndexUidPattern {
pub fn new_unchecked(s: impl AsRef<str>) -> Self {
Self(s.as_ref().to_string())
}
/// Matches any index name.
pub fn all() -> Self {
IndexUidPattern::from_str("*").unwrap()
}
/// Returns `true` if it matches any index.
pub fn matches_all(&self) -> bool {
self.0 == "*"
}
/// Returns `true` if the pattern matches a specific index name.
pub fn is_exact(&self) -> bool {
!self.0.ends_with('*')
}
/// Returns wether this index uid matches this index uid pattern.
pub fn matches(&self, uid: &IndexUid) -> bool {
self.matches_str(uid.as_str())
}
/// Returns wether this string matches this index uid pattern.
pub fn matches_str(&self, uid: &str) -> bool {
match self.0.strip_suffix('*') {
Some(prefix) => uid.starts_with(prefix),
None => self.0 == uid,
}
}
}
impl Deref for IndexUidPattern {
type Target = str;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Borrow<str> for IndexUidPattern {
fn borrow(&self) -> &str {
&self.0
}
}
impl TryFrom<String> for IndexUidPattern {
type Error = IndexUidPatternFormatError;
fn try_from(uid: String) -> Result<Self, Self::Error> {
let result = match uid.strip_suffix('*') {
Some("") => Ok(IndexUidPattern(uid)),
Some(prefix) => IndexUid::from_str(prefix).map(|_| IndexUidPattern(uid)),
None => IndexUid::try_from(uid).map(IndexUid::into_inner).map(IndexUidPattern),
};
match result {
Ok(index_uid_pattern) => Ok(index_uid_pattern),
Err(IndexUidFormatError { invalid_uid }) => {
Err(IndexUidPatternFormatError { invalid_uid })
}
}
}
}
impl FromStr for IndexUidPattern {
type Err = IndexUidPatternFormatError;
fn from_str(uid: &str) -> Result<IndexUidPattern, IndexUidPatternFormatError> {
uid.to_string().try_into()
}
}
impl From<IndexUidPattern> for String {
fn from(IndexUidPattern(uid): IndexUidPattern) -> Self {
uid
}
}
#[derive(Debug)]
pub struct IndexUidPatternFormatError {
pub invalid_uid: String,
}
impl fmt::Display for IndexUidPatternFormatError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"`{}` is not a valid index uid pattern. Index uid patterns \
can be an integer or a string containing only alphanumeric \
characters, hyphens (-), underscores (_), and \
optionally end with a star (*).",
self.invalid_uid,
)
}
}
impl Error for IndexUidPatternFormatError {}
impl ErrorCode for IndexUidPatternFormatError {
fn error_code(&self) -> Code {
Code::InvalidIndexUid
}
}

View File

@@ -2,7 +2,7 @@ use std::convert::Infallible;
use std::hash::Hash;
use std::str::FromStr;
use deserr::{DeserializeError, Deserr, MergeWithError, ValuePointerRef};
use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef};
use enum_iterator::Sequence;
use milli::update::Setting;
use serde::{Deserialize, Serialize};
@@ -11,44 +11,31 @@ use time::macros::{format_description, time};
use time::{Date, OffsetDateTime, PrimitiveDateTime};
use uuid::Uuid;
use crate::deserr::{immutable_field_error, DeserrError, DeserrJsonError};
use crate::deserr::error_messages::immutable_field_error;
use crate::deserr::DeserrJsonError;
use crate::error::deserr_codes::*;
use crate::error::{Code, ErrorCode, ParseOffsetDateTimeError};
use crate::index_uid_pattern::{IndexUidPattern, IndexUidPatternFormatError};
use crate::error::{unwrap_any, Code, ParseOffsetDateTimeError};
use crate::index_uid::IndexUid;
use crate::star_or::StarOr;
pub type KeyId = Uuid;
impl<C: Default + ErrorCode> MergeWithError<IndexUidPatternFormatError> for DeserrJsonError<C> {
fn merge(
_self_: Option<Self>,
other: IndexUidPatternFormatError,
merge_location: deserr::ValuePointerRef,
) -> std::ops::ControlFlow<Self, Self> {
DeserrError::error::<Infallible>(
None,
deserr::ErrorKind::Unexpected { msg: other.to_string() },
merge_location,
)
}
}
#[derive(Debug, Deserr)]
#[derive(Debug, DeserializeFromValue)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct CreateApiKey {
#[deserr(default, error = DeserrJsonError<InvalidApiKeyDescription>)]
pub description: Option<String>,
#[deserr(default, error = DeserrJsonError<InvalidApiKeyName>)]
pub name: Option<String>,
#[deserr(default = Uuid::new_v4(), error = DeserrJsonError<InvalidApiKeyUid>, try_from(&String) = Uuid::from_str -> uuid::Error)]
#[deserr(default = Uuid::new_v4(), error = DeserrJsonError<InvalidApiKeyUid>, from(&String) = Uuid::from_str -> uuid::Error)]
pub uid: KeyId,
#[deserr(error = DeserrJsonError<InvalidApiKeyActions>, missing_field_error = DeserrJsonError::missing_api_key_actions)]
pub actions: Vec<Action>,
#[deserr(error = DeserrJsonError<InvalidApiKeyIndexes>, missing_field_error = DeserrJsonError::missing_api_key_indexes)]
pub indexes: Vec<IndexUidPattern>,
#[deserr(error = DeserrJsonError<InvalidApiKeyExpiresAt>, try_from(Option<String>) = parse_expiration_date -> ParseOffsetDateTimeError, missing_field_error = DeserrJsonError::missing_api_key_expires_at)]
pub indexes: Vec<StarOr<IndexUid>>,
#[deserr(error = DeserrJsonError<InvalidApiKeyExpiresAt>, from(Option<String>) = parse_expiration_date -> ParseOffsetDateTimeError, missing_field_error = DeserrJsonError::missing_api_key_expires_at)]
pub expires_at: Option<OffsetDateTime>,
}
impl CreateApiKey {
pub fn to_key(self) -> Key {
let CreateApiKey { description, name, uid, actions, indexes, expires_at } = self;
@@ -78,7 +65,7 @@ fn deny_immutable_fields_api_key(
"expiresAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyExpiresAt),
"createdAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyCreatedAt),
"updatedAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyUpdatedAt),
_ => deserr::take_cf_content(DeserrJsonError::<BadRequest>::error::<Infallible>(
_ => unwrap_any(DeserrJsonError::<BadRequest>::error::<Infallible>(
None,
deserr::ErrorKind::UnknownKey { key: field, accepted },
location,
@@ -86,7 +73,7 @@ fn deny_immutable_fields_api_key(
}
}
#[derive(Debug, Deserr)]
#[derive(Debug, DeserializeFromValue)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_api_key)]
pub struct PatchApiKey {
#[deserr(default, error = DeserrJsonError<InvalidApiKeyDescription>)]
@@ -103,7 +90,7 @@ pub struct Key {
pub name: Option<String>,
pub uid: KeyId,
pub actions: Vec<Action>,
pub indexes: Vec<IndexUidPattern>,
pub indexes: Vec<StarOr<IndexUid>>,
#[serde(with = "time::serde::rfc3339::option")]
pub expires_at: Option<OffsetDateTime>,
#[serde(with = "time::serde::rfc3339")]
@@ -121,7 +108,7 @@ impl Key {
description: Some("Use it for anything that is not a search operation. Caution! Do not expose it on a public frontend".to_string()),
uid,
actions: vec![Action::All],
indexes: vec![IndexUidPattern::all()],
indexes: vec![StarOr::Star],
expires_at: None,
created_at: now,
updated_at: now,
@@ -136,7 +123,7 @@ impl Key {
description: Some("Use it to search from the frontend".to_string()),
uid,
actions: vec![Action::Search],
indexes: vec![IndexUidPattern::all()],
indexes: vec![StarOr::Star],
expires_at: None,
created_at: now,
updated_at: now,
@@ -181,7 +168,9 @@ fn parse_expiration_date(
}
}
#[derive(Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Sequence, Deserr)]
#[derive(
Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Sequence, DeserializeFromValue,
)]
#[repr(u8)]
pub enum Action {
#[serde(rename = "*")]

View File

@@ -3,7 +3,6 @@ pub mod deserr;
pub mod document_formats;
pub mod error;
pub mod index_uid;
pub mod index_uid_pattern;
pub mod keys;
pub mod settings;
pub mod star_or;

View File

@@ -3,10 +3,9 @@ use std::convert::Infallible;
use std::fmt;
use std::marker::PhantomData;
use std::num::NonZeroUsize;
use std::ops::ControlFlow;
use std::str::FromStr;
use deserr::{DeserializeError, Deserr, ErrorKind, MergeWithError, ValuePointerRef};
use deserr::{DeserializeError, DeserializeFromValue, ErrorKind, MergeWithError, ValuePointerRef};
use fst::IntoStreamer;
use milli::update::Setting;
use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET};
@@ -14,6 +13,7 @@ use serde::{Deserialize, Serialize, Serializer};
use crate::deserr::DeserrJsonError;
use crate::error::deserr_codes::*;
use crate::error::unwrap_any;
/// The maximimum number of results that the engine
/// will be able to return in one search call.
@@ -41,7 +41,7 @@ pub struct Checked;
#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct Unchecked;
impl<E> Deserr<E> for Unchecked
impl<E> DeserializeFromValue<E> for Unchecked
where
E: DeserializeError,
{
@@ -59,13 +59,13 @@ fn validate_min_word_size_for_typo_setting<E: DeserializeError>(
) -> Result<MinWordSizeTyposSetting, E> {
if let (Setting::Set(one), Setting::Set(two)) = (s.one_typo, s.two_typos) {
if one > two {
return Err(deserr::take_cf_content(E::error::<Infallible>(None, ErrorKind::Unexpected { msg: format!("`minWordSizeForTypos` setting is invalid. `oneTypo` and `twoTypos` fields should be between `0` and `255`, and `twoTypos` should be greater or equals to `oneTypo` but found `oneTypo: {one}` and twoTypos: {two}`.") }, location)));
return Err(unwrap_any(E::error::<Infallible>(None, ErrorKind::Unexpected { msg: format!("`minWordSizeForTypos` setting is invalid. `oneTypo` and `twoTypos` fields should be between `0` and `255`, and `twoTypos` should be greater or equals to `oneTypo` but found `oneTypo: {one}` and twoTypos: {two}`.") }, location)));
}
}
Ok(s)
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[deserr(deny_unknown_fields, rename_all = camelCase, validate = validate_min_word_size_for_typo_setting -> DeserrJsonError<InvalidSettingsTypoTolerance>)]
pub struct MinWordSizeTyposSetting {
@@ -77,7 +77,7 @@ pub struct MinWordSizeTyposSetting {
pub two_typos: Setting<u8>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError<DeserrJsonError<InvalidSettingsTypoTolerance>>)]
pub struct TypoSettings {
@@ -95,7 +95,7 @@ pub struct TypoSettings {
pub disable_on_attributes: Setting<BTreeSet<String>>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
pub struct FacetingSettings {
@@ -104,7 +104,7 @@ pub struct FacetingSettings {
pub max_values_per_facet: Setting<usize>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(deny_unknown_fields, rename_all = "camelCase")]
#[deserr(rename_all = camelCase, deny_unknown_fields)]
pub struct PaginationSettings {
@@ -118,7 +118,7 @@ impl MergeWithError<milli::CriterionError> for DeserrJsonError<InvalidSettingsRa
_self_: Option<Self>,
other: milli::CriterionError,
merge_location: ValuePointerRef,
) -> ControlFlow<Self, Self> {
) -> Result<Self, Self> {
Self::error::<Infallible>(
None,
ErrorKind::Unexpected { msg: other.to_string() },
@@ -130,7 +130,7 @@ impl MergeWithError<milli::CriterionError> for DeserrJsonError<InvalidSettingsRa
/// Holds all the settings for an index. `T` can either be `Checked` if they represents settings
/// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a
/// call to `check` will return a `Settings<Checked>` from a `Settings<Unchecked>`.
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)]
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
#[serde(
deny_unknown_fields,
rename_all = "camelCase",
@@ -509,8 +509,8 @@ pub fn settings(
})
}
#[derive(Debug, Clone, PartialEq, Eq, Deserr)]
#[deserr(try_from(&String) = FromStr::from_str -> CriterionError)]
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
#[deserr(from(&String) = FromStr::from_str -> CriterionError)]
pub enum RankingRuleView {
/// Sorted by decreasing number of matched query terms.
/// Query words at the front of an attribute is considered better than if it was at the back.

View File

@@ -1,13 +1,13 @@
use std::fmt;
use std::marker::PhantomData;
use std::ops::ControlFlow;
use std::str::FromStr;
use deserr::{DeserializeError, Deserr, MergeWithError, ValueKind};
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind};
use serde::de::Visitor;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use crate::deserr::query_params::FromQueryParameter;
use crate::error::unwrap_any;
/// A type that tries to match either a star (*) or
/// any other thing that implements `FromStr`.
@@ -111,7 +111,7 @@ where
}
}
impl<T, E> Deserr<E> for StarOr<T>
impl<T, E> DeserializeFromValue<E> for StarOr<T>
where
T: FromStr,
E: DeserializeError + MergeWithError<T::Err>,
@@ -127,11 +127,11 @@ where
} else {
match T::from_str(&v) {
Ok(parsed) => Ok(StarOr::Other(parsed)),
Err(e) => Err(deserr::take_cf_content(E::merge(None, e, location))),
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
}
}
}
_ => Err(deserr::take_cf_content(E::error::<V>(
_ => Err(unwrap_any(E::error::<V>(
None,
deserr::ErrorKind::IncorrectValueKind {
actual: value,
@@ -191,7 +191,7 @@ where
}
}
impl<T, E> Deserr<E> for OptionStarOr<T>
impl<T, E> DeserializeFromValue<E> for OptionStarOr<T>
where
E: DeserializeError + MergeWithError<T::Err>,
T: FromQueryParameter,
@@ -205,10 +205,10 @@ where
"*" => Ok(OptionStarOr::Star),
s => match T::from_query_param(s) {
Ok(x) => Ok(OptionStarOr::Other(x)),
Err(e) => Err(deserr::take_cf_content(E::merge(None, e, location))),
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
},
},
_ => Err(deserr::take_cf_content(E::error::<V>(
_ => Err(unwrap_any(E::error::<V>(
None,
deserr::ErrorKind::IncorrectValueKind {
actual: value,
@@ -271,7 +271,7 @@ impl<T> OptionStarOrList<T> {
}
}
impl<T, E> Deserr<E> for OptionStarOrList<T>
impl<T, E> DeserializeFromValue<E> for OptionStarOrList<T>
where
E: DeserializeError + MergeWithError<T::Err>,
T: FromQueryParameter,
@@ -299,10 +299,7 @@ where
Err(e) => {
let location =
if len_cs > 1 { location.push_index(i) } else { location };
error = match E::merge(error, e, location) {
ControlFlow::Continue(e) => Some(e),
ControlFlow::Break(e) => return Err(e),
};
error = Some(E::merge(error, e, location)?);
}
}
}
@@ -317,7 +314,7 @@ where
Ok(OptionStarOrList::List(els))
}
}
_ => Err(deserr::take_cf_content(E::error::<V>(
_ => Err(unwrap_any(E::error::<V>(
None,
deserr::ErrorKind::IncorrectValueKind {
actual: value,

View File

@@ -1,16 +1,10 @@
[package]
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
description = "Meilisearch HTTP server"
edition = "2021"
license = "MIT"
name = "meilisearch"
publish = false
version.workspace = true
authors.workspace = true
description.workspace = true
homepage.workspace = true
readme.workspace = true
edition.workspace = true
license.workspace = true
default-run = "meilisearch"
version = "1.0.0"
[dependencies]
actix-cors = "0.6.3"
@@ -25,7 +19,7 @@ byte-unit = { version = "4.0.14", default-features = false, features = ["std", "
bytes = "1.2.1"
clap = { version = "4.0.9", features = ["derive", "env"] }
crossbeam-channel = "0.5.6"
deserr = "0.4.1"
deserr = "0.3.0"
dump = { path = "../dump" }
either = "1.8.0"
env_logger = "0.9.1"
@@ -96,7 +90,7 @@ yaup = "0.2.1"
[build-dependencies]
anyhow = { version = "1.0.65", optional = true }
cargo_toml = { version = "0.14.0", optional = true }
cargo_toml = { version = "0.13.0", optional = true }
hex = { version = "0.4.3", optional = true }
reqwest = { version = "0.11.12", features = ["blocking", "rustls-tls"], default-features = false, optional = true }
sha-1 = { version = "0.10.0", optional = true }

View File

@@ -9,7 +9,7 @@ use actix_web::HttpRequest;
use byte_unit::Byte;
use http::header::CONTENT_TYPE;
use index_scheduler::IndexScheduler;
use meilisearch_auth::{AuthController, AuthFilter};
use meilisearch_auth::{AuthController, SearchRules};
use meilisearch_types::InstanceUid;
use once_cell::sync::Lazy;
use regex::Regex;
@@ -407,7 +407,7 @@ impl Segment {
auth_controller: AuthController,
) {
if let Ok(stats) =
create_all_stats(index_scheduler.into(), auth_controller, &AuthFilter::default())
create_all_stats(index_scheduler.into(), auth_controller, &SearchRules::default())
{
// Replace the version number with the prototype name if any.
let version = if let Some(prototype) = crate::prototype_name() {
@@ -435,7 +435,7 @@ impl Segment {
let post_search = std::mem::take(&mut self.post_search_aggregator)
.into_event(&self.user, "Documents Searched POST");
let post_multi_search = std::mem::take(&mut self.post_multi_search_aggregator)
.into_event(&self.user, "Documents Searched by Multi-Search POST");
.into_event(&self.user, "Documents Searched By Array of Queries POST");
let add_documents = std::mem::take(&mut self.add_documents_aggregator)
.into_event(&self.user, "Documents Added");
let delete_documents = std::mem::take(&mut self.delete_documents_aggregator)

View File

@@ -136,13 +136,6 @@ pub mod policies {
use crate::extractors::authentication::Policy;
enum TenantTokenOutcome {
NotATenantToken,
Invalid,
Expired,
Valid(Uuid, SearchRules),
}
fn tenant_token_validation() -> Validation {
let mut validation = Validation::default();
validation.validate_exp = false;
@@ -171,42 +164,29 @@ pub mod policies {
pub struct ActionPolicy<const A: u8>;
impl<const A: u8> Policy for ActionPolicy<A> {
/// Attempts to grant authentication from a bearer token (that can be a tenant token or an API key), the requested Action,
/// and a list of requested indexes.
///
/// If the bearer token is not allowed for the specified indexes and action, returns `None`.
/// Otherwise, returns an object containing the generated permissions: the search filters to add to a search, and the list of allowed indexes
/// (that may contain more indexes than requested).
fn authenticate(
auth: AuthController,
token: &str,
index: Option<&str>,
) -> Option<AuthFilter> {
// authenticate if token is the master key.
// Without a master key, all routes are accessible except the key-related routes.
// master key can only have access to keys routes.
// if master key is None only keys routes are inaccessible.
if auth.get_master_key().map_or_else(|| !is_keys_action(A), |mk| mk == token) {
return Some(AuthFilter::default());
}
let (key_uuid, search_rules) =
match ActionPolicy::<A>::authenticate_tenant_token(&auth, token) {
TenantTokenOutcome::Valid(key_uuid, search_rules) => {
(key_uuid, Some(search_rules))
}
TenantTokenOutcome::Expired => return None,
TenantTokenOutcome::Invalid => return None,
TenantTokenOutcome::NotATenantToken => {
(auth.get_optional_uid_from_encoded_key(token.as_bytes()).ok()??, None)
}
};
// check that the indexes are allowed
let action = Action::from_repr(A)?;
let auth_filter = auth.get_key_filters(key_uuid, search_rules).ok()?;
if auth.is_key_authorized(key_uuid, action, index).unwrap_or(false)
&& index.map(|index| auth_filter.is_index_authorized(index)).unwrap_or(true)
// Tenant token
if let Some(filters) = ActionPolicy::<A>::authenticate_tenant_token(&auth, token, index)
{
return Some(auth_filter);
return Some(filters);
} else if let Some(action) = Action::from_repr(A) {
// API key
if let Ok(Some(uid)) = auth.get_optional_uid_from_encoded_key(token.as_bytes()) {
if let Ok(true) = auth.is_key_authorized(uid, action, index) {
return auth.get_key_filters(uid, None).ok();
}
}
}
None
@@ -214,43 +194,46 @@ pub mod policies {
}
impl<const A: u8> ActionPolicy<A> {
fn authenticate_tenant_token(auth: &AuthController, token: &str) -> TenantTokenOutcome {
fn authenticate_tenant_token(
auth: &AuthController,
token: &str,
index: Option<&str>,
) -> Option<AuthFilter> {
// Only search action can be accessed by a tenant token.
if A != actions::SEARCH {
return TenantTokenOutcome::NotATenantToken;
return None;
}
let uid = if let Some(uid) = extract_key_id(token) {
uid
} else {
return TenantTokenOutcome::NotATenantToken;
};
let uid = extract_key_id(token)?;
// check if parent key is authorized to do the action.
if auth.is_key_authorized(uid, Action::Search, index).ok()? {
// Check if tenant token is valid.
let key = auth.generate_key(uid)?;
let data = decode::<Claims>(
token,
&DecodingKey::from_secret(key.as_bytes()),
&tenant_token_validation(),
)
.ok()?;
// Check if tenant token is valid.
let key = if let Some(key) = auth.generate_key(uid) {
key
} else {
return TenantTokenOutcome::Invalid;
};
let data = if let Ok(data) = decode::<Claims>(
token,
&DecodingKey::from_secret(key.as_bytes()),
&tenant_token_validation(),
) {
data
} else {
return TenantTokenOutcome::Invalid;
};
// Check if token is expired.
if let Some(exp) = data.claims.exp {
if OffsetDateTime::now_utc().unix_timestamp() > exp {
return TenantTokenOutcome::Expired;
// Check index access if an index restriction is provided.
if let Some(index) = index {
if !data.claims.search_rules.is_index_authorized(index) {
return None;
}
}
// Check if token is expired.
if let Some(exp) = data.claims.exp {
if OffsetDateTime::now_utc().unix_timestamp() > exp {
return None;
}
}
return auth.get_key_filters(uid, Some(data.claims.search_rules)).ok();
}
TenantTokenOutcome::Valid(uid, data.claims.search_rules)
None
}
}

View File

@@ -0,0 +1,78 @@
use std::fmt::Debug;
use std::future::Future;
use std::marker::PhantomData;
use std::pin::Pin;
use std::task::{Context, Poll};
use actix_web::dev::Payload;
use actix_web::web::Json;
use actix_web::{FromRequest, HttpRequest};
use deserr::{DeserializeError, DeserializeFromValue};
use futures::ready;
use meilisearch_types::error::{ErrorCode, ResponseError};
/// Extractor for typed data from Json request payloads
/// deserialised by deserr.
///
/// # Extractor
/// To extract typed data from a request body, the inner type `T` must implement the
/// [`deserr::DeserializeFromError<E>`] trait. The inner type `E` must implement the
/// [`ErrorCode`](meilisearch_error::ErrorCode) trait.
#[derive(Debug)]
pub struct ValidatedJson<T, E>(pub T, PhantomData<*const E>);
impl<T, E> ValidatedJson<T, E> {
pub fn new(data: T) -> Self {
ValidatedJson(data, PhantomData)
}
pub fn into_inner(self) -> T {
self.0
}
}
impl<T, E> FromRequest for ValidatedJson<T, E>
where
E: DeserializeError + ErrorCode + std::error::Error + 'static,
T: DeserializeFromValue<E>,
{
type Error = actix_web::Error;
type Future = ValidatedJsonExtractFut<T, E>;
#[inline]
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
ValidatedJsonExtractFut {
fut: Json::<serde_json::Value>::from_request(req, payload),
_phantom: PhantomData,
}
}
}
pub struct ValidatedJsonExtractFut<T, E> {
fut: <Json<serde_json::Value> as FromRequest>::Future,
_phantom: PhantomData<*const (T, E)>,
}
impl<T, E> Future for ValidatedJsonExtractFut<T, E>
where
T: DeserializeFromValue<E>,
E: DeserializeError + ErrorCode + std::error::Error + 'static,
{
type Output = Result<ValidatedJson<T, E>, actix_web::Error>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let ValidatedJsonExtractFut { fut, .. } = self.get_mut();
let fut = Pin::new(fut);
let res = ready!(fut.poll(cx));
let res = match res {
Err(err) => Err(err),
Ok(data) => match deserr::deserialize::<_, _, E>(data.into_inner()) {
Ok(data) => Ok(ValidatedJson::new(data)),
Err(e) => Err(ResponseError::from(e).into()),
},
};
Poll::Ready(res)
}
}

View File

@@ -1,4 +1,6 @@
pub mod payload;
#[macro_use]
pub mod authentication;
pub mod json;
pub mod query_parameters;
pub mod sequential_extractor;

View File

@@ -0,0 +1,70 @@
//! A module to parse query parameter with deserr
use std::marker::PhantomData;
use std::{fmt, ops};
use actix_http::Payload;
use actix_utils::future::{err, ok, Ready};
use actix_web::{FromRequest, HttpRequest};
use deserr::{DeserializeError, DeserializeFromValue};
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct QueryParameter<T, E>(pub T, PhantomData<*const E>);
impl<T, E> QueryParameter<T, E> {
/// Unwrap into inner `T` value.
pub fn into_inner(self) -> T {
self.0
}
}
impl<T, E> QueryParameter<T, E>
where
T: DeserializeFromValue<E>,
E: DeserializeError + ErrorCode + std::error::Error + 'static,
{
pub fn from_query(query_str: &str) -> Result<Self, actix_web::Error> {
let value = serde_urlencoded::from_str::<serde_json::Value>(query_str)
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?;
match deserr::deserialize::<_, _, E>(value) {
Ok(data) => Ok(QueryParameter(data, PhantomData)),
Err(e) => Err(ResponseError::from(e).into()),
}
}
}
impl<T, E> ops::Deref for QueryParameter<T, E> {
type Target = T;
fn deref(&self) -> &T {
&self.0
}
}
impl<T, E> ops::DerefMut for QueryParameter<T, E> {
fn deref_mut(&mut self) -> &mut T {
&mut self.0
}
}
impl<T: fmt::Display, E> fmt::Display for QueryParameter<T, E> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl<T, E> FromRequest for QueryParameter<T, E>
where
T: DeserializeFromValue<E>,
E: DeserializeError + ErrorCode + std::error::Error + 'static,
{
type Error = actix_web::Error;
type Future = Ready<Result<Self, actix_web::Error>>;
#[inline]
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
QueryParameter::from_query(req.query_string()).map(ok).unwrap_or_else(err)
}
}

View File

@@ -1,8 +1,7 @@
use std::str;
use actix_web::{web, HttpRequest, HttpResponse};
use deserr::actix_web::{AwebJson, AwebQueryParameter};
use deserr::Deserr;
use deserr::DeserializeFromValue;
use meilisearch_auth::error::AuthControllerError;
use meilisearch_auth::AuthController;
use meilisearch_types::deserr::query_params::Param;
@@ -17,6 +16,8 @@ use uuid::Uuid;
use super::PAGINATION_DEFAULT_LIMIT;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::json::ValidatedJson;
use crate::extractors::query_parameters::QueryParameter;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::Pagination;
@@ -36,7 +37,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
pub async fn create_api_key(
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_CREATE }>, AuthController>,
body: AwebJson<CreateApiKey, DeserrJsonError>,
body: ValidatedJson<CreateApiKey, DeserrJsonError>,
_req: HttpRequest,
) -> Result<HttpResponse, ResponseError> {
let v = body.into_inner();
@@ -50,7 +51,7 @@ pub async fn create_api_key(
Ok(HttpResponse::Created().json(res))
}
#[derive(Deserr, Debug, Clone, Copy)]
#[derive(DeserializeFromValue, Debug, Clone, Copy)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
pub struct ListApiKeys {
#[deserr(default, error = DeserrQueryParamError<InvalidApiKeyOffset>)]
@@ -58,7 +59,6 @@ pub struct ListApiKeys {
#[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidApiKeyLimit>)]
pub limit: Param<usize>,
}
impl ListApiKeys {
fn as_pagination(self) -> Pagination {
Pagination { offset: self.offset.0, limit: self.limit.0 }
@@ -67,7 +67,7 @@ impl ListApiKeys {
pub async fn list_api_keys(
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
list_api_keys: AwebQueryParameter<ListApiKeys, DeserrQueryParamError>,
list_api_keys: QueryParameter<ListApiKeys, DeserrQueryParamError>,
) -> Result<HttpResponse, ResponseError> {
let paginate = list_api_keys.into_inner().as_pagination();
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
@@ -104,7 +104,7 @@ pub async fn get_api_key(
pub async fn patch_api_key(
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_UPDATE }>, AuthController>,
body: AwebJson<PatchApiKey, DeserrJsonError>,
body: ValidatedJson<PatchApiKey, DeserrJsonError>,
path: web::Path<AuthParam>,
) -> Result<HttpResponse, ResponseError> {
let key = path.into_inner().key;

View File

@@ -4,8 +4,7 @@ use actix_web::http::header::CONTENT_TYPE;
use actix_web::web::Data;
use actix_web::{web, HttpMessage, HttpRequest, HttpResponse};
use bstr::ByteSlice;
use deserr::actix_web::AwebQueryParameter;
use deserr::Deserr;
use deserr::DeserializeFromValue;
use futures::StreamExt;
use index_scheduler::IndexScheduler;
use log::debug;
@@ -34,6 +33,7 @@ use crate::error::PayloadError::ReceivePayload;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::payload::Payload;
use crate::extractors::query_parameters::QueryParameter;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::routes::{PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
@@ -80,7 +80,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
);
}
#[derive(Debug, Deserr)]
#[derive(Debug, DeserializeFromValue)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
pub struct GetDocument {
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFields>)]
@@ -90,7 +90,7 @@ pub struct GetDocument {
pub async fn get_document(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
document_param: web::Path<DocumentParam>,
params: AwebQueryParameter<GetDocument, DeserrQueryParamError>,
params: QueryParameter<GetDocument, DeserrQueryParamError>,
) -> Result<HttpResponse, ResponseError> {
let DocumentParam { index_uid, document_id } = document_param.into_inner();
let index_uid = IndexUid::try_from(index_uid)?;
@@ -125,7 +125,7 @@ pub async fn delete_document(
Ok(HttpResponse::Accepted().json(task))
}
#[derive(Debug, Deserr)]
#[derive(Debug, DeserializeFromValue)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
pub struct BrowseQuery {
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentOffset>)]
@@ -139,7 +139,7 @@ pub struct BrowseQuery {
pub async fn get_all_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
params: AwebQueryParameter<BrowseQuery, DeserrQueryParamError>,
params: QueryParameter<BrowseQuery, DeserrQueryParamError>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
debug!("called with params: {:?}", params);
@@ -155,7 +155,7 @@ pub async fn get_all_documents(
Ok(HttpResponse::Ok().json(ret))
}
#[derive(Deserialize, Debug, Deserr)]
#[derive(Deserialize, Debug, DeserializeFromValue)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct UpdateDocumentsQuery {
#[deserr(default, error = DeserrJsonError<InvalidIndexPrimaryKey>)]
@@ -165,7 +165,7 @@ pub struct UpdateDocumentsQuery {
pub async fn add_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
params: AwebQueryParameter<UpdateDocumentsQuery, DeserrJsonError>,
params: QueryParameter<UpdateDocumentsQuery, DeserrJsonError>,
body: Payload,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
@@ -177,7 +177,7 @@ pub async fn add_documents(
analytics.add_documents(&params, index_scheduler.index(&index_uid).is_err(), &req);
let allow_index_creation = index_scheduler.filters().allow_index_creation();
let allow_index_creation = index_scheduler.filters().allow_index_creation;
let task = document_addition(
extract_mime_type(&req)?,
index_scheduler,
@@ -195,7 +195,7 @@ pub async fn add_documents(
pub async fn update_documents(
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
params: AwebQueryParameter<UpdateDocumentsQuery, DeserrJsonError>,
params: QueryParameter<UpdateDocumentsQuery, DeserrJsonError>,
body: Payload,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
@@ -206,7 +206,7 @@ pub async fn update_documents(
analytics.update_documents(&params, index_scheduler.index(&index_uid).is_err(), &req);
let allow_index_creation = index_scheduler.filters().allow_index_creation();
let allow_index_creation = index_scheduler.filters().allow_index_creation;
let task = document_addition(
extract_mime_type(&req)?,
index_scheduler,

View File

@@ -2,14 +2,14 @@ use std::convert::Infallible;
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use deserr::actix_web::{AwebJson, AwebQueryParameter};
use deserr::{DeserializeError, Deserr, ValuePointerRef};
use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef};
use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_types::deserr::error_messages::immutable_field_error;
use meilisearch_types::deserr::query_params::Param;
use meilisearch_types::deserr::{immutable_field_error, DeserrJsonError, DeserrQueryParamError};
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
use meilisearch_types::error::deserr_codes::*;
use meilisearch_types::error::{Code, ResponseError};
use meilisearch_types::error::{unwrap_any, Code, ResponseError};
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::milli::{self, FieldDistribution, Index};
use meilisearch_types::tasks::KindWithContent;
@@ -21,6 +21,8 @@ use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::{AuthenticationError, GuardedData};
use crate::extractors::json::ValidatedJson;
use crate::extractors::query_parameters::QueryParameter;
use crate::extractors::sequential_extractor::SeqHandler;
pub mod documents;
@@ -71,7 +73,7 @@ impl IndexView {
}
}
#[derive(Deserr, Debug, Clone, Copy)]
#[derive(DeserializeFromValue, Debug, Clone, Copy)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
pub struct ListIndexes {
#[deserr(default, error = DeserrQueryParamError<InvalidIndexOffset>)]
@@ -87,13 +89,13 @@ impl ListIndexes {
pub async fn list_indexes(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
paginate: AwebQueryParameter<ListIndexes, DeserrQueryParamError>,
paginate: QueryParameter<ListIndexes, DeserrQueryParamError>,
) -> Result<HttpResponse, ResponseError> {
let filters = index_scheduler.filters();
let search_rules = &index_scheduler.filters().search_rules;
let indexes: Vec<_> = index_scheduler.indexes()?;
let indexes = indexes
.into_iter()
.filter(|(name, _)| filters.is_index_authorized(name))
.filter(|(name, _)| search_rules.is_index_authorized(name))
.map(|(name, index)| IndexView::new(name, &index))
.collect::<Result<Vec<_>, _>>()?;
@@ -103,7 +105,7 @@ pub async fn list_indexes(
Ok(HttpResponse::Ok().json(ret))
}
#[derive(Deserr, Debug)]
#[derive(DeserializeFromValue, Debug)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct IndexCreateRequest {
#[deserr(error = DeserrJsonError<InvalidIndexUid>, missing_field_error = DeserrJsonError::missing_index_uid)]
@@ -114,14 +116,13 @@ pub struct IndexCreateRequest {
pub async fn create_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>,
body: AwebJson<IndexCreateRequest, DeserrJsonError>,
body: ValidatedJson<IndexCreateRequest, DeserrJsonError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
let IndexCreateRequest { primary_key, uid } = body.into_inner();
// FIXME: allow_index_creation?
let allow_index_creation = index_scheduler.filters().is_index_authorized(&uid);
let allow_index_creation = index_scheduler.filters().search_rules.is_index_authorized(&uid);
if allow_index_creation {
analytics.publish(
"Index Created".to_string(),
@@ -148,7 +149,7 @@ fn deny_immutable_fields_index(
"uid" => immutable_field_error(field, accepted, Code::ImmutableIndexUid),
"createdAt" => immutable_field_error(field, accepted, Code::ImmutableIndexCreatedAt),
"updatedAt" => immutable_field_error(field, accepted, Code::ImmutableIndexUpdatedAt),
_ => deserr::take_cf_content(DeserrJsonError::<BadRequest>::error::<Infallible>(
_ => unwrap_any(DeserrJsonError::<BadRequest>::error::<Infallible>(
None,
deserr::ErrorKind::UnknownKey { key: field, accepted },
location,
@@ -156,7 +157,7 @@ fn deny_immutable_fields_index(
}
}
#[derive(Deserr, Debug)]
#[derive(DeserializeFromValue, Debug)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_index)]
pub struct UpdateIndexRequest {
#[deserr(default, error = DeserrJsonError<InvalidIndexPrimaryKey>)]
@@ -180,7 +181,7 @@ pub async fn get_index(
pub async fn update_index(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
body: AwebJson<UpdateIndexRequest, DeserrJsonError>,
body: ValidatedJson<UpdateIndexRequest, DeserrJsonError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {

View File

@@ -1,6 +1,5 @@
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use deserr::actix_web::{AwebJson, AwebQueryParameter};
use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_types::deserr::query_params::Param;
@@ -14,6 +13,8 @@ use serde_json::Value;
use crate::analytics::{Analytics, SearchAggregator};
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::json::ValidatedJson;
use crate::extractors::query_parameters::QueryParameter;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::search::{
add_search_rules, perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH,
@@ -29,7 +30,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
);
}
#[derive(Debug, deserr::Deserr)]
#[derive(Debug, deserr::DeserializeFromValue)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
pub struct SearchQueryGet {
#[deserr(default, error = DeserrQueryParamError<InvalidSearchQ>)]
@@ -128,7 +129,7 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
pub async fn search_with_url_query(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
params: AwebQueryParameter<SearchQueryGet, DeserrQueryParamError>,
params: QueryParameter<SearchQueryGet, DeserrQueryParamError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
@@ -138,7 +139,9 @@ pub async fn search_with_url_query(
let mut query: SearchQuery = params.into_inner().into();
// Tenant token search_rules.
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
if let Some(search_rules) =
index_scheduler.filters().search_rules.get_index_search_rules(&index_uid)
{
add_search_rules(&mut query, search_rules);
}
@@ -160,7 +163,7 @@ pub async fn search_with_url_query(
pub async fn search_with_post(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
params: AwebJson<SearchQuery, DeserrJsonError>,
params: ValidatedJson<SearchQuery, DeserrJsonError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
@@ -170,7 +173,9 @@ pub async fn search_with_post(
debug!("search called with params: {:?}", query);
// Tenant token search_rules.
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
if let Some(search_rules) =
index_scheduler.filters().search_rules.get_index_search_rules(&index_uid)
{
add_search_rules(&mut query, search_rules);
}

View File

@@ -1,6 +1,5 @@
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use deserr::actix_web::AwebJson;
use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_types::deserr::DeserrJsonError;
@@ -13,6 +12,7 @@ use serde_json::json;
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::json::ValidatedJson;
use crate::routes::SummarizedTaskView;
#[macro_export]
@@ -45,7 +45,7 @@ macro_rules! make_setting_route {
let new_settings = Settings { $attr: Setting::Reset.into(), ..Default::default() };
let allow_index_creation = index_scheduler.filters().allow_index_creation();
let allow_index_creation = index_scheduler.filters().allow_index_creation;
let task = KindWithContent::SettingsUpdate {
index_uid: index_uid.to_string(),
@@ -68,7 +68,7 @@ macro_rules! make_setting_route {
Data<IndexScheduler>,
>,
index_uid: actix_web::web::Path<String>,
body: deserr::actix_web::AwebJson<Option<$type>, $err_ty>,
body: $crate::routes::indexes::ValidatedJson<Option<$type>, $err_ty>,
req: HttpRequest,
$analytics_var: web::Data<dyn Analytics>,
) -> std::result::Result<HttpResponse, ResponseError> {
@@ -86,7 +86,7 @@ macro_rules! make_setting_route {
..Default::default()
};
let allow_index_creation = index_scheduler.filters().allow_index_creation();
let allow_index_creation = index_scheduler.filters().allow_index_creation;
let task = KindWithContent::SettingsUpdate {
index_uid: index_uid.to_string(),
@@ -468,7 +468,7 @@ generate_configure!(
pub async fn update_all(
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
body: AwebJson<Settings<Unchecked>, DeserrJsonError>,
body: ValidatedJson<Settings<Unchecked>, DeserrJsonError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
@@ -560,7 +560,7 @@ pub async fn update_all(
Some(&req),
);
let allow_index_creation = index_scheduler.filters().allow_index_creation();
let allow_index_creation = index_scheduler.filters().allow_index_creation;
let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner();
let task = KindWithContent::SettingsUpdate {
index_uid,
@@ -596,7 +596,7 @@ pub async fn delete_all(
let new_settings = Settings::cleared().into_unchecked();
let allow_index_creation = index_scheduler.filters().allow_index_creation();
let allow_index_creation = index_scheduler.filters().allow_index_creation;
let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner();
let task = KindWithContent::SettingsUpdate {
index_uid,

View File

@@ -17,8 +17,6 @@ use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
const PAGINATION_DEFAULT_LIMIT: usize = 20;
mod api_key;
mod dump;
pub mod indexes;
@@ -33,10 +31,12 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(web::resource("/stats").route(web::get().to(get_stats)))
.service(web::resource("/version").route(web::get().to(get_version)))
.service(web::scope("/indexes").configure(indexes::configure))
.service(web::scope("/multi-search").configure(multi_search::configure))
.service(web::scope("/search").configure(multi_search::configure))
.service(web::scope("/swap-indexes").configure(swap_indexes::configure));
}
const PAGINATION_DEFAULT_LIMIT: usize = 20;
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SummarizedTaskView {
@@ -60,7 +60,6 @@ impl From<Task> for SummarizedTaskView {
}
}
}
pub struct Pagination {
pub offset: usize,
pub limit: usize,
@@ -238,9 +237,10 @@ async fn get_stats(
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": false }), Some(&req));
let filters = index_scheduler.filters();
let search_rules = &index_scheduler.filters().search_rules;
let stats = create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), filters)?;
let stats =
create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), search_rules)?;
debug!("returns: {:?}", stats);
Ok(HttpResponse::Ok().json(stats))
@@ -249,19 +249,19 @@ async fn get_stats(
pub fn create_all_stats(
index_scheduler: Data<IndexScheduler>,
auth_controller: AuthController,
filters: &meilisearch_auth::AuthFilter,
search_rules: &meilisearch_auth::SearchRules,
) -> Result<Stats, ResponseError> {
let mut last_task: Option<OffsetDateTime> = None;
let mut indexes = BTreeMap::new();
let mut database_size = 0;
let processing_task = index_scheduler.get_tasks_from_authorized_indexes(
Query { statuses: Some(vec![Status::Processing]), limit: Some(1), ..Query::default() },
filters,
search_rules.authorized_indexes(),
)?;
// accumulate the size of each indexes
let processing_index = processing_task.first().and_then(|task| task.index_uid());
for (name, index) in index_scheduler.indexes()? {
if !filters.is_index_authorized(&name) {
if !search_rules.is_index_authorized(&name) {
continue;
}

View File

@@ -1,6 +1,5 @@
use actix_web::web::{self, Data};
use actix_web::{HttpRequest, HttpResponse};
use deserr::actix_web::AwebJson;
use index_scheduler::IndexScheduler;
use log::debug;
use meilisearch_types::deserr::DeserrJsonError;
@@ -10,14 +9,15 @@ use serde::Serialize;
use crate::analytics::{Analytics, MultiSearchAggregator};
use crate::extractors::authentication::policies::ActionPolicy;
use crate::extractors::authentication::{AuthenticationError, GuardedData};
use crate::extractors::authentication::GuardedData;
use crate::extractors::json::ValidatedJson;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::search::{
add_search_rules, perform_search, SearchQueryWithIndex, SearchResultWithIndex,
};
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(SeqHandler(multi_search_with_post))));
cfg.service(web::resource("").route(web::post().to(SeqHandler(search_with_post))));
}
#[derive(Serialize)]
@@ -25,15 +25,15 @@ struct SearchResults {
results: Vec<SearchResultWithIndex>,
}
#[derive(Debug, deserr::Deserr)]
#[derive(Debug, deserr::DeserializeFromValue)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct SearchQueries {
queries: Vec<SearchQueryWithIndex>,
}
pub async fn multi_search_with_post(
pub async fn search_with_post(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
params: AwebJson<SearchQueries, DeserrJsonError>,
params: ValidatedJson<SearchQueries, DeserrJsonError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
@@ -49,13 +49,9 @@ pub async fn multi_search_with_post(
{
debug!("search called with params: {:?}", query);
// Check index from API key
if !index_scheduler.filters().is_index_authorized(&index_uid) {
return Err(AuthenticationError::InvalidToken.into());
}
// Apply search rules from tenant token
// Tenant token search_rules.
if let Some(search_rules) =
index_scheduler.filters().get_index_search_rules(&index_uid)
index_scheduler.filters().search_rules.get_index_search_rules(&index_uid)
{
add_search_rules(&mut query, search_rules);
}

View File

@@ -1,7 +1,6 @@
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use deserr::actix_web::AwebJson;
use deserr::Deserr;
use deserr::DeserializeFromValue;
use index_scheduler::IndexScheduler;
use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::deserr_codes::InvalidSwapIndexes;
@@ -15,13 +14,14 @@ use crate::analytics::Analytics;
use crate::error::MeilisearchHttpError;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::{AuthenticationError, GuardedData};
use crate::extractors::json::ValidatedJson;
use crate::extractors::sequential_extractor::SeqHandler;
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(SeqHandler(swap_indexes))));
}
#[derive(Deserr, Debug, Clone, PartialEq, Eq)]
#[derive(DeserializeFromValue, Debug, Clone, PartialEq, Eq)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct SwapIndexesPayload {
#[deserr(error = DeserrJsonError<InvalidSwapIndexes>, missing_field_error = DeserrJsonError::missing_swap_indexes)]
@@ -30,7 +30,7 @@ pub struct SwapIndexesPayload {
pub async fn swap_indexes(
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>,
params: AwebJson<Vec<SwapIndexesPayload>, DeserrJsonError>,
params: ValidatedJson<Vec<SwapIndexesPayload>, DeserrJsonError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
@@ -42,7 +42,7 @@ pub async fn swap_indexes(
}),
Some(&req),
);
let filters = index_scheduler.filters();
let search_rules = &index_scheduler.filters().search_rules;
let mut swaps = vec![];
for SwapIndexesPayload { indexes } in params.into_iter() {
@@ -53,7 +53,7 @@ pub async fn swap_indexes(
return Err(MeilisearchHttpError::SwapIndexPayloadWrongLength(indexes).into());
}
};
if !filters.is_index_authorized(lhs) || !filters.is_index_authorized(rhs) {
if !search_rules.is_index_authorized(lhs) || !search_rules.is_index_authorized(rhs) {
return Err(AuthenticationError::InvalidToken.into());
}
swaps.push(IndexSwap { indexes: (lhs.to_string(), rhs.to_string()) });

View File

@@ -1,7 +1,6 @@
use actix_web::web::Data;
use actix_web::{web, HttpRequest, HttpResponse};
use deserr::actix_web::AwebQueryParameter;
use deserr::Deserr;
use deserr::DeserializeFromValue;
use index_scheduler::{IndexScheduler, Query, TaskId};
use meilisearch_types::deserr::query_params::Param;
use meilisearch_types::deserr::DeserrQueryParamError;
@@ -24,6 +23,7 @@ use super::SummarizedTaskView;
use crate::analytics::Analytics;
use crate::extractors::authentication::policies::*;
use crate::extractors::authentication::GuardedData;
use crate::extractors::query_parameters::QueryParameter;
use crate::extractors::sequential_extractor::SeqHandler;
const DEFAULT_LIMIT: u32 = 20;
@@ -162,7 +162,7 @@ impl From<Details> for DetailsView {
}
}
#[derive(Debug, Deserr)]
#[derive(Debug, DeserializeFromValue)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
pub struct TasksFilterQuery {
#[deserr(default = Param(DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidTaskLimit>)]
@@ -181,20 +181,19 @@ pub struct TasksFilterQuery {
#[deserr(default, error = DeserrQueryParamError<InvalidIndexUid>)]
pub index_uids: OptionStarOrList<IndexUid>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterEnqueuedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
pub after_enqueued_at: OptionStarOr<OffsetDateTime>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeEnqueuedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
pub before_enqueued_at: OptionStarOr<OffsetDateTime>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterStartedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
pub after_started_at: OptionStarOr<OffsetDateTime>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeStartedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
pub before_started_at: OptionStarOr<OffsetDateTime>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterFinishedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
pub after_finished_at: OptionStarOr<OffsetDateTime>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeFinishedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
pub before_finished_at: OptionStarOr<OffsetDateTime>,
}
impl TasksFilterQuery {
fn into_query(self) -> Query {
Query {
@@ -236,7 +235,7 @@ impl TaskDeletionOrCancelationQuery {
}
}
#[derive(Debug, Deserr)]
#[derive(Debug, DeserializeFromValue)]
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
pub struct TaskDeletionOrCancelationQuery {
#[deserr(default, error = DeserrQueryParamError<InvalidTaskUids>)]
@@ -250,20 +249,19 @@ pub struct TaskDeletionOrCancelationQuery {
#[deserr(default, error = DeserrQueryParamError<InvalidIndexUid>)]
pub index_uids: OptionStarOrList<IndexUid>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterEnqueuedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
pub after_enqueued_at: OptionStarOr<OffsetDateTime>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeEnqueuedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
pub before_enqueued_at: OptionStarOr<OffsetDateTime>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterStartedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
pub after_started_at: OptionStarOr<OffsetDateTime>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeStartedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
pub before_started_at: OptionStarOr<OffsetDateTime>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterFinishedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
pub after_finished_at: OptionStarOr<OffsetDateTime>,
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeFinishedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
pub before_finished_at: OptionStarOr<OffsetDateTime>,
}
impl TaskDeletionOrCancelationQuery {
fn into_query(self) -> Query {
Query {
@@ -286,7 +284,7 @@ impl TaskDeletionOrCancelationQuery {
async fn cancel_tasks(
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_CANCEL }>, Data<IndexScheduler>>,
params: AwebQueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
params: QueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
@@ -319,7 +317,7 @@ async fn cancel_tasks(
let tasks = index_scheduler.get_task_ids_from_authorized_indexes(
&index_scheduler.read_txn()?,
&query,
index_scheduler.filters(),
&index_scheduler.filters().search_rules.authorized_indexes(),
)?;
let task_cancelation =
KindWithContent::TaskCancelation { query: format!("?{}", req.query_string()), tasks };
@@ -332,7 +330,7 @@ async fn cancel_tasks(
async fn delete_tasks(
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_DELETE }>, Data<IndexScheduler>>,
params: AwebQueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
params: QueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
@@ -364,7 +362,7 @@ async fn delete_tasks(
let tasks = index_scheduler.get_task_ids_from_authorized_indexes(
&index_scheduler.read_txn()?,
&query,
index_scheduler.filters(),
&index_scheduler.filters().search_rules.authorized_indexes(),
)?;
let task_deletion =
KindWithContent::TaskDeletion { query: format!("?{}", req.query_string()), tasks };
@@ -385,7 +383,7 @@ pub struct AllTasks {
async fn get_tasks(
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>,
params: AwebQueryParameter<TasksFilterQuery, DeserrQueryParamError>,
params: QueryParameter<TasksFilterQuery, DeserrQueryParamError>,
req: HttpRequest,
analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
@@ -398,7 +396,10 @@ async fn get_tasks(
let query = params.into_query();
let mut tasks_results: Vec<TaskView> = index_scheduler
.get_tasks_from_authorized_indexes(query, index_scheduler.filters())?
.get_tasks_from_authorized_indexes(
query,
index_scheduler.filters().search_rules.authorized_indexes(),
)?
.into_iter()
.map(|t| TaskView::from_task(&t))
.collect();
@@ -436,8 +437,12 @@ async fn get_task(
let query = index_scheduler::Query { uids: Some(vec![task_uid]), ..Query::default() };
if let Some(task) =
index_scheduler.get_tasks_from_authorized_indexes(query, index_scheduler.filters())?.first()
if let Some(task) = index_scheduler
.get_tasks_from_authorized_indexes(
query,
index_scheduler.filters().search_rules.authorized_indexes(),
)?
.first()
{
let task_view = TaskView::from_task(task);
Ok(HttpResponse::Ok().json(task_view))
@@ -493,7 +498,7 @@ pub fn deserialize_date_before(
#[cfg(test)]
mod tests {
use deserr::Deserr;
use deserr::DeserializeFromValue;
use meili_snap::snapshot;
use meilisearch_types::deserr::DeserrQueryParamError;
use meilisearch_types::error::{Code, ResponseError};
@@ -502,7 +507,7 @@ mod tests {
fn deserr_query_params<T>(j: &str) -> Result<T, ResponseError>
where
T: Deserr<DeserrQueryParamError>,
T: DeserializeFromValue<DeserrQueryParamError>,
{
let value = serde_urlencoded::from_str::<serde_json::Value>(j)
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?;

View File

@@ -3,7 +3,7 @@ use std::collections::{BTreeMap, BTreeSet, HashSet};
use std::str::FromStr;
use std::time::Instant;
use deserr::Deserr;
use deserr::DeserializeFromValue;
use either::Either;
use meilisearch_auth::IndexSearchRules;
use meilisearch_types::deserr::DeserrJsonError;
@@ -31,7 +31,7 @@ pub const DEFAULT_CROP_MARKER: fn() -> String = || "…".to_string();
pub const DEFAULT_HIGHLIGHT_PRE_TAG: fn() -> String = || "<em>".to_string();
pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string();
#[derive(Debug, Clone, Default, PartialEq, Eq, Deserr)]
#[derive(Debug, Clone, Default, PartialEq, Eq, DeserializeFromValue)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct SearchQuery {
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
@@ -80,7 +80,7 @@ impl SearchQuery {
// This struct contains the fields of `SearchQuery` inline.
// This is because neither deserr nor serde support `flatten` when using `deny_unknown_fields.
// The `From<SearchQueryWithIndex>` implementation ensures both structs remain up to date.
#[derive(Debug, Clone, PartialEq, Eq, Deserr)]
#[derive(Debug, deserr::DeserializeFromValue)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct SearchQueryWithIndex {
#[deserr(error = DeserrJsonError<InvalidIndexUid>, missing_field_error = DeserrJsonError::missing_index_uid)]
@@ -170,7 +170,7 @@ impl SearchQueryWithIndex {
}
}
#[derive(Debug, Clone, PartialEq, Eq, Deserr)]
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
#[deserr(rename_all = camelCase)]
pub enum MatchingStrategy {
/// Remove query words from last to first

View File

@@ -377,7 +377,7 @@ async fn error_add_api_key_invalid_index_uids() {
meili_snap::snapshot!(code, @"400 Bad Request");
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
{
"message": "Invalid value at `.indexes[0]`: `invalid index # / \\name with spaces` is not a valid index uid pattern. Index uid patterns can be an integer or a string containing only alphanumeric characters, hyphens (-), underscores (_), and optionally end with a star (*).",
"message": "Invalid value at `.indexes[0]`: `invalid index # / \\name with spaces` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"code": "invalid_api_key_indexes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes"

View File

@@ -11,7 +11,6 @@ use crate::common::Server;
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
Lazy::new(|| {
let mut authorizations = hashmap! {
("POST", "/multi-search") => hashset!{"search", "*"},
("POST", "/indexes/products/search") => hashset!{"search", "*"},
("GET", "/indexes/products/search") => hashset!{"search", "*"},
("POST", "/indexes/products/documents") => hashset!{"documents.add", "documents.*", "*"},
@@ -78,14 +77,12 @@ static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
})
});
const MASTER_KEY: &str = "MASTER_KEY";
#[actix_rt::test]
async fn error_access_expired_key() {
use std::{thread, time};
let mut server = Server::new_auth().await;
server.use_api_key(MASTER_KEY);
server.use_api_key("MASTER_KEY");
let content = json!({
"indexes": ["products"],
@@ -114,7 +111,7 @@ async fn error_access_expired_key() {
#[actix_rt::test]
async fn error_access_unauthorized_index() {
let mut server = Server::new_auth().await;
server.use_api_key(MASTER_KEY);
server.use_api_key("MASTER_KEY");
let content = json!({
"indexes": ["sales"],
@@ -147,7 +144,7 @@ async fn error_access_unauthorized_action() {
for ((method, route), action) in AUTHORIZATIONS.iter() {
// create a new API key letting only the needed action.
server.use_api_key(MASTER_KEY);
server.use_api_key("MASTER_KEY");
let content = json!({
"indexes": ["products"],
@@ -171,7 +168,7 @@ async fn error_access_unauthorized_action() {
#[actix_rt::test]
async fn access_authorized_master_key() {
let mut server = Server::new_auth().await;
server.use_api_key(MASTER_KEY);
server.use_api_key("MASTER_KEY");
// master key must have access to all routes.
for ((method, route), _) in AUTHORIZATIONS.iter() {
@@ -188,7 +185,7 @@ async fn access_authorized_restricted_index() {
for ((method, route), actions) in AUTHORIZATIONS.iter() {
for action in actions {
// create a new API key letting only the needed action.
server.use_api_key(MASTER_KEY);
server.use_api_key("MASTER_KEY");
let content = json!({
"indexes": ["products"],
@@ -225,7 +222,7 @@ async fn access_authorized_no_index_restriction() {
for ((method, route), actions) in AUTHORIZATIONS.iter() {
for action in actions {
// create a new API key letting only the needed action.
server.use_api_key(MASTER_KEY);
server.use_api_key("MASTER_KEY");
let content = json!({
"indexes": ["*"],
@@ -258,7 +255,7 @@ async fn access_authorized_no_index_restriction() {
#[actix_rt::test]
async fn access_authorized_stats_restricted_index() {
let mut server = Server::new_auth().await;
server.use_admin_key(MASTER_KEY).await;
server.use_admin_key("MASTER_KEY").await;
// create index `test`
let index = server.index("test");
@@ -298,7 +295,7 @@ async fn access_authorized_stats_restricted_index() {
#[actix_rt::test]
async fn access_authorized_stats_no_index_restriction() {
let mut server = Server::new_auth().await;
server.use_admin_key(MASTER_KEY).await;
server.use_admin_key("MASTER_KEY").await;
// create index `test`
let index = server.index("test");
@@ -338,7 +335,7 @@ async fn access_authorized_stats_no_index_restriction() {
#[actix_rt::test]
async fn list_authorized_indexes_restricted_index() {
let mut server = Server::new_auth().await;
server.use_admin_key(MASTER_KEY).await;
server.use_admin_key("MASTER_KEY").await;
// create index `test`
let index = server.index("test");
@@ -379,7 +376,7 @@ async fn list_authorized_indexes_restricted_index() {
#[actix_rt::test]
async fn list_authorized_indexes_no_index_restriction() {
let mut server = Server::new_auth().await;
server.use_admin_key(MASTER_KEY).await;
server.use_admin_key("MASTER_KEY").await;
// create index `test`
let index = server.index("test");
@@ -417,194 +414,10 @@ async fn list_authorized_indexes_no_index_restriction() {
assert!(response.iter().any(|index| index["uid"] == "test"));
}
#[actix_rt::test]
async fn access_authorized_index_patterns() {
let mut server = Server::new_auth().await;
server.use_admin_key(MASTER_KEY).await;
// create products_1 index
let index_1 = server.index("products_1");
let (response, code) = index_1.create(Some("id")).await;
assert_eq!(202, code, "{:?}", &response);
// create products index
let index_ = server.index("products");
let (response, code) = index_.create(Some("id")).await;
assert_eq!(202, code, "{:?}", &response);
// create key with all document access on indices with product_* pattern.
let content = json!({
"indexes": ["products_*"],
"actions": ["documents.*"],
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
});
// Register the key
let (response, code) = server.add_api_key(content).await;
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
// use created key.
let key = response["key"].as_str().unwrap();
server.use_api_key(key);
// refer to products_1 and products with modified api key.
let index_1 = server.index("products_1");
let index_ = server.index("products");
// try to create a index via add documents route
let documents = json!([
{
"id": 1,
"content": "foo",
}
]);
// Adding document to products_1 index. Should succeed with 202
let (response, code) = index_1.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{:?}", &response);
let task_id = response["taskUid"].as_u64().unwrap();
// Adding document to products index. Should Fail with 403 -- invalid_api_key
let (response, code) = index_.add_documents(documents, None).await;
assert_eq!(403, code, "{:?}", &response);
server.use_api_key(MASTER_KEY);
// refer to products_1 with modified api key.
let index_1 = server.index("products_1");
index_1.wait_task(task_id).await;
let (response, code) = index_1.get_task(task_id).await;
assert_eq!(200, code, "{:?}", &response);
assert_eq!(response["status"], "succeeded");
}
#[actix_rt::test]
async fn raise_error_non_authorized_index_patterns() {
let mut server = Server::new_auth().await;
server.use_admin_key(MASTER_KEY).await;
// create products_1 index
let product_1_index = server.index("products_1");
let (response, code) = product_1_index.create(Some("id")).await;
assert_eq!(202, code, "{:?}", &response);
// create products_2 index
let product_2_index = server.index("products_2");
let (response, code) = product_2_index.create(Some("id")).await;
assert_eq!(202, code, "{:?}", &response);
// create test index
let test_index = server.index("test");
let (response, code) = test_index.create(Some("id")).await;
assert_eq!(202, code, "{:?}", &response);
// create key with all document access on indices with product_* pattern.
let content = json!({
"indexes": ["products_*"],
"actions": ["documents.*"],
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
});
// Register the key
let (response, code) = server.add_api_key(content).await;
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
// use created key.
let key = response["key"].as_str().unwrap();
server.use_api_key(key);
// refer to products_1 and products_2 with modified api key.
let product_1_index = server.index("products_1");
let product_2_index = server.index("products_2");
// refer to test index
let test_index = server.index("test");
// try to create a index via add documents route
let documents = json!([
{
"id": 1,
"content": "foo",
}
]);
// Adding document to products_1 index. Should succeed with 202
let (response, code) = product_1_index.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{:?}", &response);
let task1_id = response["taskUid"].as_u64().unwrap();
// Adding document to products_2 index. Should succeed with 202
let (response, code) = product_2_index.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{:?}", &response);
let task2_id = response["taskUid"].as_u64().unwrap();
// Adding document to test index. Should Fail with 403 -- invalid_api_key
let (response, code) = test_index.add_documents(documents, None).await;
assert_eq!(403, code, "{:?}", &response);
server.use_api_key(MASTER_KEY);
// refer to products_1 with modified api key.
let product_1_index = server.index("products_1");
// refer to products_2 with modified api key.
let product_2_index = server.index("products_2");
product_1_index.wait_task(task1_id).await;
product_2_index.wait_task(task2_id).await;
let (response, code) = product_1_index.get_task(task1_id).await;
assert_eq!(200, code, "{:?}", &response);
assert_eq!(response["status"], "succeeded");
let (response, code) = product_1_index.get_task(task2_id).await;
assert_eq!(200, code, "{:?}", &response);
assert_eq!(response["status"], "succeeded");
}
#[actix_rt::test]
async fn pattern_indexes() {
// Create server with master key
let mut server = Server::new_auth().await;
server.use_admin_key(MASTER_KEY).await;
// index.* constraints on products_* index pattern
let content = json!({
"indexes": ["products_*"],
"actions": ["indexes.*"],
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
});
// Generate and use the api key
let (response, code) = server.add_api_key(content).await;
assert_eq!(201, code, "{:?}", &response);
let key = response["key"].as_str().expect("Key is not string");
server.use_api_key(key);
// Create Index products_1 using generated api key
let products_1 = server.index("products_1");
let (response, code) = products_1.create(Some("id")).await;
assert_eq!(202, code, "{:?}", &response);
// Fail to create products_* using generated api key
let products_1 = server.index("products_*");
let (response, code) = products_1.create(Some("id")).await;
assert_eq!(400, code, "{:?}", &response);
// Fail to create test_1 using generated api key
let products_1 = server.index("test_1");
let (response, code) = products_1.create(Some("id")).await;
assert_eq!(403, code, "{:?}", &response);
}
#[actix_rt::test]
async fn list_authorized_tasks_restricted_index() {
let mut server = Server::new_auth().await;
server.use_admin_key(MASTER_KEY).await;
server.use_admin_key("MASTER_KEY").await;
// create index `test`
let index = server.index("test");
@@ -633,6 +446,7 @@ async fn list_authorized_tasks_restricted_index() {
let (response, code) = server.service.get("/tasks").await;
assert_eq!(200, code, "{:?}", &response);
println!("{}", response);
let response = response["results"].as_array().unwrap();
// key should have access on `products` index.
assert!(response.iter().any(|task| task["indexUid"] == "products"));
@@ -644,7 +458,7 @@ async fn list_authorized_tasks_restricted_index() {
#[actix_rt::test]
async fn list_authorized_tasks_no_index_restriction() {
let mut server = Server::new_auth().await;
server.use_admin_key(MASTER_KEY).await;
server.use_admin_key("MASTER_KEY").await;
// create index `test`
let index = server.index("test");
@@ -685,7 +499,7 @@ async fn list_authorized_tasks_no_index_restriction() {
#[actix_rt::test]
async fn error_creating_index_without_action() {
let mut server = Server::new_auth().await;
server.use_api_key(MASTER_KEY);
server.use_api_key("MASTER_KEY");
// create key with access on all indexes.
let content = json!({
@@ -773,7 +587,7 @@ async fn lazy_create_index() {
];
for content in contents {
server.use_api_key(MASTER_KEY);
server.use_api_key("MASTER_KEY");
let (response, code) = server.add_api_key(content).await;
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
@@ -829,114 +643,14 @@ async fn lazy_create_index() {
}
}
#[actix_rt::test]
async fn lazy_create_index_from_pattern() {
let mut server = Server::new_auth().await;
// create key with access on all indexes.
let contents = vec![
json!({
"indexes": ["products_*"],
"actions": ["*"],
"expiresAt": "2050-11-13T00:00:00Z"
}),
json!({
"indexes": ["products_*"],
"actions": ["indexes.*", "documents.*", "settings.*", "tasks.*"],
"expiresAt": "2050-11-13T00:00:00Z"
}),
json!({
"indexes": ["products_*"],
"actions": ["indexes.create", "documents.add", "settings.update", "tasks.get"],
"expiresAt": "2050-11-13T00:00:00Z"
}),
];
for content in contents {
server.use_api_key(MASTER_KEY);
let (response, code) = server.add_api_key(content).await;
assert_eq!(201, code, "{:?}", &response);
assert!(response["key"].is_string());
// use created key.
let key = response["key"].as_str().unwrap();
server.use_api_key(key);
// try to create a index via add documents route
let index = server.index("products_1");
let test = server.index("test");
let documents = json!([
{
"id": 1,
"content": "foo",
}
]);
let (response, code) = index.add_documents(documents.clone(), None).await;
assert_eq!(202, code, "{:?}", &response);
let task_id = response["taskUid"].as_u64().unwrap();
index.wait_task(task_id).await;
let (response, code) = index.get_task(task_id).await;
assert_eq!(200, code, "{:?}", &response);
assert_eq!(response["status"], "succeeded");
// Fail to create test index
let (response, code) = test.add_documents(documents, None).await;
assert_eq!(403, code, "{:?}", &response);
// try to create a index via add settings route
let index = server.index("products_2");
let settings = json!({ "distinctAttribute": "test"});
let (response, code) = index.update_settings(settings).await;
assert_eq!(202, code, "{:?}", &response);
let task_id = response["taskUid"].as_u64().unwrap();
index.wait_task(task_id).await;
let (response, code) = index.get_task(task_id).await;
assert_eq!(200, code, "{:?}", &response);
assert_eq!(response["status"], "succeeded");
// Fail to create test index
let index = server.index("test");
let settings = json!({ "distinctAttribute": "test"});
let (response, code) = index.update_settings(settings).await;
assert_eq!(403, code, "{:?}", &response);
// try to create a index via add specialized settings route
let index = server.index("products_3");
let (response, code) = index.update_distinct_attribute(json!("test")).await;
assert_eq!(202, code, "{:?}", &response);
let task_id = response["taskUid"].as_u64().unwrap();
index.wait_task(task_id).await;
let (response, code) = index.get_task(task_id).await;
assert_eq!(200, code, "{:?}", &response);
assert_eq!(response["status"], "succeeded");
// Fail to create test index
let index = server.index("test");
let settings = json!({ "distinctAttribute": "test"});
let (response, code) = index.update_settings(settings).await;
assert_eq!(403, code, "{:?}", &response);
}
}
#[actix_rt::test]
async fn error_creating_index_without_index() {
let mut server = Server::new_auth().await;
server.use_api_key(MASTER_KEY);
server.use_api_key("MASTER_KEY");
// create key with access on all indexes.
let content = json!({
"indexes": ["unexpected","products_*"],
"indexes": ["unexpected"],
"actions": ["*"],
"expiresAt": "2050-11-13T00:00:00Z"
});
@@ -976,32 +690,4 @@ async fn error_creating_index_without_index() {
let index = server.index("test3");
let (response, code) = index.create(None).await;
assert_eq!(403, code, "{:?}", &response);
// try to create a index via add documents route
let index = server.index("products");
let documents = json!([
{
"id": 1,
"content": "foo",
}
]);
let (response, code) = index.add_documents(documents, None).await;
assert_eq!(403, code, "{:?}", &response);
// try to create a index via add settings route
let index = server.index("products");
let settings = json!({ "distinctAttribute": "test"});
let (response, code) = index.update_settings(settings).await;
assert_eq!(403, code, "{:?}", &response);
// try to create a index via add specialized settings route
let index = server.index("products");
let (response, code) = index.update_distinct_attribute(json!("test")).await;
assert_eq!(403, code, "{:?}", &response);
// try to create a index via create index route
let index = server.index("products");
let (response, code) = index.create(None).await;
assert_eq!(403, code, "{:?}", &response);
}

View File

@@ -120,7 +120,7 @@ async fn create_api_key_bad_indexes() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Invalid value at `.indexes[0]`: `good doggo` is not a valid index uid pattern. Index uid patterns can be an integer or a string containing only alphanumeric characters, hyphens (-), underscores (_), and optionally end with a star (*).",
"message": "Invalid value at `.indexes[0]`: `good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
"code": "invalid_api_key_indexes",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes"
@@ -138,7 +138,7 @@ async fn create_api_key_bad_expires_at() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Unknown field `expires_at`: did you mean `expiresAt`? expected one of `description`, `name`, `uid`, `actions`, `indexes`, `expiresAt`",
"message": "Unknown field `expires_at`: expected one of `description`, `name`, `uid`, `actions`, `indexes`, `expiresAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"
@@ -150,7 +150,7 @@ async fn create_api_key_bad_expires_at() {
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
"message": "Unknown field `expires_at`: did you mean `expiresAt`? expected one of `description`, `name`, `uid`, `actions`, `indexes`, `expiresAt`",
"message": "Unknown field `expires_at`: expected one of `description`, `name`, `uid`, `actions`, `indexes`, `expiresAt`",
"code": "bad_request",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#bad_request"

View File

@@ -4,8 +4,6 @@ mod errors;
mod payload;
mod tenant_token;
mod tenant_token_multi_search;
use actix_web::http::StatusCode;
use serde_json::{json, Value};

View File

@@ -82,11 +82,6 @@ static ACCEPTED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
"actions": ["search"],
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
}),
json!({
"indexes": ["sal*", "prod*"],
"actions": ["search"],
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
}),
]
});
@@ -109,11 +104,6 @@ static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
"actions": ["*"],
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
}),
json!({
"indexes": ["prod*", "p*"],
"actions": ["*"],
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
}),
json!({
"indexes": ["products"],
"actions": ["search"],
@@ -255,10 +245,6 @@ async fn search_authorized_simple_token() {
"searchRules" => json!(["sales"]),
"exp" => Value::Null
},
hashmap! {
"searchRules" => json!(["sa*"]),
"exp" => Value::Null
},
];
compute_authorized_search!(tenant_tokens, {}, 5);
@@ -365,19 +351,11 @@ async fn filter_search_authorized_filter_token() {
}),
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
},
hashmap! {
"searchRules" => json!({
"*": {},
"sal*": {"filter": ["color = blue"]}
}),
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
},
];
compute_authorized_search!(tenant_tokens, "color = yellow", 1);
}
/// Tests that those Tenant Token are incompatible with the REFUSED_KEYS defined above.
#[actix_rt::test]
async fn error_search_token_forbidden_parent_key() {
let tenant_tokens = vec![
@@ -405,10 +383,6 @@ async fn error_search_token_forbidden_parent_key() {
"searchRules" => json!(["sales"]),
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
},
hashmap! {
"searchRules" => json!(["sali*", "s*", "sales*"]),
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
},
];
compute_forbidden_search!(tenant_tokens, REFUSED_KEYS);

File diff suppressed because it is too large Load Diff

View File

@@ -103,8 +103,8 @@ impl Server {
Index { uid: uid.as_ref().to_string(), service: &self.service, encoder }
}
pub async fn multi_search(&self, queries: Value) -> (Value, StatusCode) {
self.service.post("/multi-search", queries).await
pub async fn search(&self, queries: Value) -> (Value, StatusCode) {
self.service.post("/search", queries).await
}
pub async fn list_indexes_raw(&self, parameters: &str) -> (Value, StatusCode) {

View File

@@ -8,7 +8,7 @@ use crate::common::Server;
async fn search_empty_list() {
let server = Server::new().await;
let (response, code) = server.multi_search(json!({"queries": []})).await;
let (response, code) = server.search(json!({"queries": []})).await;
snapshot!(code, @"200 OK");
snapshot!(json_string!(response), @r###"
{
@@ -21,7 +21,7 @@ async fn search_empty_list() {
async fn search_json_object() {
let server = Server::new().await;
let (response, code) = server.multi_search(json!({})).await;
let (response, code) = server.search(json!({})).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
@@ -37,7 +37,7 @@ async fn search_json_object() {
async fn search_json_array() {
let server = Server::new().await;
let (response, code) = server.multi_search(json!([])).await;
let (response, code) = server.search(json!([])).await;
snapshot!(code, @"400 Bad Request");
snapshot!(json_string!(response), @r###"
{
@@ -59,7 +59,7 @@ async fn simple_search_single_index() {
index.wait_task(0).await;
let (response, code) = server
.multi_search(json!({"queries": [
.search(json!({"queries": [
{"indexUid" : "test", "q": "glass"},
{"indexUid": "test", "q": "captain"},
]}))
@@ -109,7 +109,7 @@ async fn simple_search_missing_index_uid() {
index.wait_task(0).await;
let (response, code) = server
.multi_search(json!({"queries": [
.search(json!({"queries": [
{"q": "glass"},
]}))
.await;
@@ -134,7 +134,7 @@ async fn simple_search_illegal_index_uid() {
index.wait_task(0).await;
let (response, code) = server
.multi_search(json!({"queries": [
.search(json!({"queries": [
{"indexUid": "", "q": "glass"},
]}))
.await;
@@ -164,7 +164,7 @@ async fn simple_search_two_indexes() {
index.wait_task(1).await;
let (response, code) = server
.multi_search(json!({"queries": [
.search(json!({"queries": [
{"indexUid" : "test", "q": "glass"},
{"indexUid": "nested", "q": "pesti"},
]}))
@@ -241,7 +241,7 @@ async fn search_one_index_doesnt_exist() {
index.wait_task(0).await;
let (response, code) = server
.multi_search(json!({"queries": [
.search(json!({"queries": [
{"indexUid" : "test", "q": "glass"},
{"indexUid": "nested", "q": "pesti"},
]}))
@@ -262,7 +262,7 @@ async fn search_multiple_indexes_dont_exist() {
let server = Server::new().await;
let (response, code) = server
.multi_search(json!({"queries": [
.search(json!({"queries": [
{"indexUid" : "test", "q": "glass"},
{"indexUid": "nested", "q": "pesti"},
]}))
@@ -294,7 +294,7 @@ async fn search_one_query_error() {
index.wait_task(1).await;
let (response, code) = server
.multi_search(json!({"queries": [
.search(json!({"queries": [
{"indexUid" : "test", "q": "glass", "facets": ["title"]},
{"indexUid": "nested", "q": "pesti"},
]}))
@@ -326,7 +326,7 @@ async fn search_multiple_query_errors() {
index.wait_task(1).await;
let (response, code) = server
.multi_search(json!({"queries": [
.search(json!({"queries": [
{"indexUid" : "test", "q": "glass", "facets": ["title"]},
{"indexUid": "nested", "q": "pesti", "facets": ["doggos"]},
]}))

View File

@@ -1,15 +1,8 @@
[package]
name = "milli"
version = "1.0.0"
authors = ["Kerollmops <clement@meilisearch.com>"]
edition = "2018"
publish = false
version.workspace = true
authors.workspace = true
description.workspace = true
homepage.workspace = true
readme.workspace = true
# edition.workspace = true
license.workspace = true
[dependencies]
bimap = { version = "0.6.2", features = ["serde"] }
@@ -19,7 +12,7 @@ byteorder = "1.4.3"
charabia = { version = "0.7.0", default-features = false }
concat-arrays = "0.1.2"
crossbeam-channel = "0.5.6"
deserr = "0.4.1"
deserr = "0.3.0"
either = "1.8.0"
flatten-serde-json = { path = "../flatten-serde-json" }
fst = "0.4.7"

View File

@@ -7,31 +7,45 @@ use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::error::is_reserved_keyword;
use crate::search::facet::BadGeoError;
use crate::{CriterionError, Error, UserError};
/// This error type is never supposed to be shown to the end user.
/// You must always cast it to a sort error or a criterion error.
#[derive(Error, Debug)]
#[derive(Debug)]
pub enum AscDescError {
#[error(transparent)]
GeoError(BadGeoError),
#[error("Invalid syntax for the asc/desc parameter: expected expression ending by `:asc` or `:desc`, found `{name}`.")]
InvalidLatitude,
InvalidLongitude,
InvalidSyntax { name: String },
#[error("`{name}` is a reserved keyword and thus can't be used as a asc/desc rule.")]
ReservedKeyword { name: String },
}
impl From<BadGeoError> for AscDescError {
fn from(geo_error: BadGeoError) -> Self {
AscDescError::GeoError(geo_error)
impl fmt::Display for AscDescError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Self::InvalidLatitude => {
write!(f, "Latitude must be contained between -90 and 90 degrees.",)
}
Self::InvalidLongitude => {
write!(f, "Longitude must be contained between -180 and 180 degrees.",)
}
Self::InvalidSyntax { name } => {
write!(f, "Invalid syntax for the asc/desc parameter: expected expression ending by `:asc` or `:desc`, found `{}`.", name)
}
Self::ReservedKeyword { name } => {
write!(
f,
"`{}` is a reserved keyword and thus can't be used as a asc/desc rule.",
name
)
}
}
}
}
impl From<AscDescError> for CriterionError {
fn from(error: AscDescError) -> Self {
match error {
AscDescError::GeoError(_) => {
AscDescError::InvalidLatitude | AscDescError::InvalidLongitude => {
CriterionError::ReservedNameForSort { name: "_geoPoint".to_string() }
}
AscDescError::InvalidSyntax { name } => CriterionError::InvalidName { name },
@@ -71,9 +85,9 @@ impl FromStr for Member {
.map_err(|_| AscDescError::ReservedKeyword { name: text.to_string() })
})?;
if !(-90.0..=90.0).contains(&lat) {
return Err(BadGeoError::Lat(lat))?;
return Err(AscDescError::InvalidLatitude)?;
} else if !(-180.0..=180.0).contains(&lng) {
return Err(BadGeoError::Lng(lng))?;
return Err(AscDescError::InvalidLongitude)?;
}
Ok(Member::Geo([lat, lng]))
}
@@ -148,8 +162,10 @@ impl FromStr for AscDesc {
#[derive(Error, Debug)]
pub enum SortError {
#[error(transparent)]
ParseGeoError { error: BadGeoError },
#[error("{}", AscDescError::InvalidLatitude)]
InvalidLatitude,
#[error("{}", AscDescError::InvalidLongitude)]
InvalidLongitude,
#[error("Invalid syntax for the geo parameter: expected expression formated like \
`_geoPoint(latitude, longitude)` and ending by `:asc` or `:desc`, found `{name}`.")]
BadGeoPointUsage { name: String },
@@ -168,7 +184,8 @@ pub enum SortError {
impl From<AscDescError> for SortError {
fn from(error: AscDescError) -> Self {
match error {
AscDescError::GeoError(error) => SortError::ParseGeoError { error },
AscDescError::InvalidLatitude => SortError::InvalidLatitude,
AscDescError::InvalidLongitude => SortError::InvalidLongitude,
AscDescError::InvalidSyntax { name } => SortError::InvalidName { name },
AscDescError::ReservedKeyword { name } if name.starts_with("_geoPoint") => {
SortError::BadGeoPointUsage { name }
@@ -260,11 +277,11 @@ mod tests {
),
("_geoPoint(35, 85, 75):asc", ReservedKeyword { name: S("_geoPoint(35, 85, 75)") }),
("_geoPoint(18):asc", ReservedKeyword { name: S("_geoPoint(18)") }),
("_geoPoint(200, 200):asc", GeoError(BadGeoError::Lat(200.))),
("_geoPoint(90.000001, 0):asc", GeoError(BadGeoError::Lat(90.000001))),
("_geoPoint(0, -180.000001):desc", GeoError(BadGeoError::Lng(-180.000001))),
("_geoPoint(159.256, 130):asc", GeoError(BadGeoError::Lat(159.256))),
("_geoPoint(12, -2021):desc", GeoError(BadGeoError::Lng(-2021.))),
("_geoPoint(200, 200):asc", InvalidLatitude),
("_geoPoint(90.000001, 0):asc", InvalidLatitude),
("_geoPoint(0, -180.000001):desc", InvalidLongitude),
("_geoPoint(159.256, 130):asc", InvalidLatitude),
("_geoPoint(12, -2021):desc", InvalidLongitude),
];
for (req, expected_error) in invalid_req {

View File

@@ -123,7 +123,7 @@ impl<'t> Criterion for Attribute<'t> {
None => {
return Ok(Some(CriterionResult {
query_tree: Some(query_tree),
candidates: Some(allowed_candidates),
candidates: Some(RoaringBitmap::new()),
filtered_candidates: None,
initial_candidates: Some(self.initial_candidates.take()),
}));

View File

@@ -21,51 +21,18 @@ pub struct Filter<'a> {
condition: FilterCondition<'a>,
}
#[derive(Debug)]
pub enum BadGeoError {
Lat(f64),
Lng(f64),
BoundingBoxTopIsBelowBottom(f64, f64),
}
impl std::error::Error for BadGeoError {}
impl Display for BadGeoError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::BoundingBoxTopIsBelowBottom(top, bottom) => {
write!(f, "The top latitude `{top}` is below the bottom latitude `{bottom}`.")
}
Self::Lat(lat) => write!(
f,
"Bad latitude `{}`. Latitude must be contained between -90 and 90 degrees. ",
lat
),
Self::Lng(lng) => write!(
f,
"Bad longitude `{}`. Longitude must be contained between -180 and 180 degrees. ",
lng
),
}
}
}
#[derive(Debug)]
enum FilterError<'a> {
AttributeNotFilterable { attribute: &'a str, filterable_fields: HashSet<String> },
ParseGeoError(BadGeoError),
ReservedGeo(&'a str),
BadGeo(&'a str),
BadGeoLat(f64),
BadGeoLng(f64),
BadGeoBoundingBoxTopIsBelowBottom(f64, f64),
Reserved(&'a str),
TooDeep,
}
impl<'a> std::error::Error for FilterError<'a> {}
impl<'a> From<BadGeoError> for FilterError<'a> {
fn from(geo_error: BadGeoError) -> Self {
FilterError::ParseGeoError(geo_error)
}
}
impl<'a> Display for FilterError<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
@@ -77,11 +44,7 @@ impl<'a> Display for FilterError<'a> {
attribute,
)
} else {
let filterables_list = filterable_fields
.iter()
.map(AsRef::as_ref)
.collect::<Vec<&str>>()
.join(" ");
let filterables_list = filterable_fields.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(" ");
write!(
f,
@@ -90,19 +53,20 @@ impl<'a> Display for FilterError<'a> {
filterables_list,
)
}
}
Self::TooDeep => write!(
f,
},
Self::TooDeep => write!(f,
"Too many filter conditions, can't process more than {} filters.",
MAX_FILTER_DEPTH
),
Self::ReservedGeo(keyword) => write!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` field coordinates.", keyword),
Self::Reserved(keyword) => write!(
f,
"`{}` is a reserved keyword and thus can't be used as a filter expression.",
keyword
),
Self::ParseGeoError(error) => write!(f, "{}", error),
Self::BadGeo(keyword) => write!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` field coordinates.", keyword),
Self::BadGeoBoundingBoxTopIsBelowBottom(top, bottom) => write!(f, "The top latitude `{top}` is below the bottom latitude `{bottom}`."),
Self::BadGeoLat(lat) => write!(f, "Bad latitude `{}`. Latitude must be contained between -90 and 90 degrees. ", lat),
Self::BadGeoLng(lng) => write!(f, "Bad longitude `{}`. Longitude must be contained between -180 and 180 degrees. ", lng),
}
}
}
@@ -334,10 +298,10 @@ impl<'a> Filter<'a> {
} else {
match fid.value() {
attribute @ "_geo" => {
Err(fid.as_external_error(FilterError::ReservedGeo(attribute)))?
Err(fid.as_external_error(FilterError::BadGeo(attribute)))?
}
attribute if attribute.starts_with("_geoPoint(") => {
Err(fid.as_external_error(FilterError::ReservedGeo("_geoPoint")))?
Err(fid.as_external_error(FilterError::BadGeo("_geoPoint")))?
}
attribute @ "_geoDistance" => {
Err(fid.as_external_error(FilterError::Reserved(attribute)))?
@@ -389,10 +353,14 @@ impl<'a> Filter<'a> {
let base_point: [f64; 2] =
[point[0].parse_finite_float()?, point[1].parse_finite_float()?];
if !(-90.0..=90.0).contains(&base_point[0]) {
return Err(point[0].as_external_error(BadGeoError::Lat(base_point[0])))?;
return Err(
point[0].as_external_error(FilterError::BadGeoLat(base_point[0]))
)?;
}
if !(-180.0..=180.0).contains(&base_point[1]) {
return Err(point[1].as_external_error(BadGeoError::Lng(base_point[1])))?;
return Err(
point[1].as_external_error(FilterError::BadGeoLng(base_point[1]))
)?;
}
let radius = radius.parse_finite_float()?;
let rtree = match index.geo_rtree(rtxn)? {
@@ -430,26 +398,27 @@ impl<'a> Filter<'a> {
bottom_right_point[1].parse_finite_float()?,
];
if !(-90.0..=90.0).contains(&top_left[0]) {
return Err(
top_left_point[0].as_external_error(BadGeoError::Lat(top_left[0]))
)?;
return Err(top_left_point[0]
.as_external_error(FilterError::BadGeoLat(top_left[0])))?;
}
if !(-180.0..=180.0).contains(&top_left[1]) {
return Err(
top_left_point[1].as_external_error(BadGeoError::Lng(top_left[1]))
)?;
return Err(top_left_point[1]
.as_external_error(FilterError::BadGeoLng(top_left[1])))?;
}
if !(-90.0..=90.0).contains(&bottom_right[0]) {
return Err(bottom_right_point[0]
.as_external_error(BadGeoError::Lat(bottom_right[0])))?;
.as_external_error(FilterError::BadGeoLat(bottom_right[0])))?;
}
if !(-180.0..=180.0).contains(&bottom_right[1]) {
return Err(bottom_right_point[1]
.as_external_error(BadGeoError::Lng(bottom_right[1])))?;
.as_external_error(FilterError::BadGeoLng(bottom_right[1])))?;
}
if top_left[0] < bottom_right[0] {
return Err(bottom_right_point[1].as_external_error(
BadGeoError::BoundingBoxTopIsBelowBottom(top_left[0], bottom_right[0]),
FilterError::BadGeoBoundingBoxTopIsBelowBottom(
top_left[0],
bottom_right[0],
),
))?;
}

View File

@@ -4,7 +4,7 @@ use heed::types::{ByteSlice, DecodeIgnore};
use heed::{BytesDecode, RoTxn};
pub use self::facet_distribution::{FacetDistribution, DEFAULT_VALUES_PER_FACET};
pub use self::filter::{BadGeoError, Filter};
pub use self::filter::Filter;
use crate::heed_codec::facet::{FacetGroupKeyCodec, FacetGroupValueCodec};
use crate::heed_codec::ByteSliceRefCodec;
mod facet_distribution;

View File

@@ -2,7 +2,7 @@ use std::collections::{BTreeSet, HashMap, HashSet};
use std::result::Result as StdResult;
use charabia::{Tokenizer, TokenizerBuilder};
use deserr::{DeserializeError, Deserr};
use deserr::{DeserializeError, DeserializeFromValue};
use itertools::Itertools;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use time::OffsetDateTime;
@@ -23,9 +23,9 @@ pub enum Setting<T> {
NotSet,
}
impl<T, E> Deserr<E> for Setting<T>
impl<T, E> DeserializeFromValue<E> for Setting<T>
where
T: Deserr<E>,
T: DeserializeFromValue<E>,
E: DeserializeError,
{
fn deserialize_from_value<V: deserr::IntoValue>(

View File

@@ -1,16 +1,9 @@
[package]
name = "permissive-json-pointer"
version = "1.0.0"
edition = "2021"
description = "A permissive json pointer"
readme = "README.md"
publish = false
version.workspace = true
authors.workspace = true
# description.workspace = true
homepage.workspace = true
# readme.workspace = true
edition.workspace = true
license.workspace = true
[dependencies]
serde_json = "1.0"