mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-11-29 09:15:38 +00:00
Compare commits
66 Commits
prototype-
...
prototype-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6d8c88b129 | ||
|
|
7510e3d684 | ||
|
|
797da246a4 | ||
|
|
e19bd82202 | ||
|
|
9edda9a1e8 | ||
|
|
2bffc0b32a | ||
|
|
34ac3c0535 | ||
|
|
9ce65b3aaa | ||
|
|
809847f138 | ||
|
|
e83bc57b18 | ||
|
|
b83b67fe12 | ||
|
|
421666a64d | ||
|
|
181a86305e | ||
|
|
d1e66f687e | ||
|
|
2b9cea271b | ||
|
|
c91bfeaf15 | ||
|
|
28961b2ad1 | ||
|
|
f11c7d4b62 | ||
|
|
49e18da23e | ||
|
|
54240db495 | ||
|
|
e1ed4bc750 | ||
|
|
9bd1cfb3a3 | ||
|
|
a341c94871 | ||
|
|
f46cf46b8c | ||
|
|
c3a30a5a91 | ||
|
|
143e3cf948 | ||
|
|
ab2adba183 | ||
|
|
74d1a67a99 | ||
|
|
91ce8a5e67 | ||
|
|
fd7ae1883b | ||
|
|
42a3cdca66 | ||
|
|
a43765d454 | ||
|
|
769576fd94 | ||
|
|
8fb7b1d10f | ||
|
|
d494c29768 | ||
|
|
f3b54337f9 | ||
|
|
7f3ae40204 | ||
|
|
a53536836b | ||
|
|
b095325bf8 | ||
|
|
d7ad39ad77 | ||
|
|
849de089d2 | ||
|
|
7f25007d31 | ||
|
|
c810af3ebf | ||
|
|
c0b77773ba | ||
|
|
7481559e8b | ||
|
|
83c765ce6c | ||
|
|
4c91037602 | ||
|
|
825923f6fc | ||
|
|
e405702733 | ||
|
|
6fa877efb0 | ||
|
|
4b1cd10653 | ||
|
|
47748395dc | ||
|
|
ff595156d7 | ||
|
|
8770088df3 | ||
|
|
827c1c8447 | ||
|
|
764df24b7d | ||
|
|
4570d5bf3a | ||
|
|
c690c4fec4 | ||
|
|
7b4b57ecc8 | ||
|
|
a36b1dbd70 | ||
|
|
d563ed8a39 | ||
|
|
ec7de4bae7 | ||
|
|
184b8afd9e | ||
|
|
29961b8c6b | ||
|
|
0b08413c98 | ||
|
|
474d4ec498 |
10
.github/scripts/check-release.sh
vendored
10
.github/scripts/check-release.sh
vendored
@@ -3,7 +3,7 @@
|
||||
# check_tag $current_tag $file_tag $file_name
|
||||
function check_tag {
|
||||
if [[ "$1" != "$2" ]]; then
|
||||
echo "Error: the current tag does not match the version in $3: found $2 - expected $1"
|
||||
echo "Error: the current tag does not match the version in Cargo.toml: found $2 - expected $1"
|
||||
ret=1
|
||||
fi
|
||||
}
|
||||
@@ -11,12 +11,8 @@ function check_tag {
|
||||
ret=0
|
||||
current_tag=${GITHUB_REF#'refs/tags/v'}
|
||||
|
||||
toml_files='*/Cargo.toml'
|
||||
for toml_file in $toml_files;
|
||||
do
|
||||
file_tag="$(grep '^version = ' $toml_file | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')"
|
||||
check_tag $current_tag $file_tag $toml_file
|
||||
done
|
||||
file_tag="$(grep '^version = ' Cargo.toml | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')"
|
||||
check_tag $current_tag $file_tag
|
||||
|
||||
lock_file='Cargo.lock'
|
||||
lock_tag=$(grep -A 1 'name = "meilisearch-auth"' $lock_file | grep version | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')
|
||||
|
||||
2
.github/workflows/publish-docker-images.yml
vendored
2
.github/workflows/publish-docker-images.yml
vendored
@@ -92,7 +92,7 @@ jobs:
|
||||
build-args: |
|
||||
COMMIT_SHA=${{ github.sha }}
|
||||
COMMIT_DATE=${{ steps.build-metadata.outputs.date }}
|
||||
GIT_TAG=$(printf "%q" ${{ github.ref_name }})
|
||||
GIT_TAG=${{ github.ref_name }}
|
||||
|
||||
# /!\ Don't touch this without checking with Cloud team
|
||||
- name: Send CI information to Cloud team
|
||||
|
||||
13
.github/workflows/rust.yml
vendored
13
.github/workflows/rust.yml
vendored
@@ -2,6 +2,9 @@ name: Rust
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# Everyday at 5:00am
|
||||
- cron: '0 5 * * *'
|
||||
pull_request:
|
||||
push:
|
||||
# trying and staging branches are for Bors config
|
||||
@@ -27,10 +30,18 @@ jobs:
|
||||
run: |
|
||||
apt-get update && apt-get install -y curl
|
||||
apt-get install build-essential -y
|
||||
- uses: actions-rs/toolchain@v1
|
||||
- name: Run test with Rust stable
|
||||
if: github.event_name != 'schedule'
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Run test with Rust nightly
|
||||
if: github.event_name == 'schedule'
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
# Disable cache due to disk space issues with Windows workers in CI
|
||||
# - name: Cache dependencies
|
||||
# uses: Swatinem/rust-cache@v2.2.0
|
||||
|
||||
@@ -29,7 +29,7 @@ jobs:
|
||||
run: |
|
||||
raw_new_version=$(echo $NEW_VERSION | cut -d 'v' -f 2)
|
||||
new_string="version = \"$raw_new_version\""
|
||||
sd '^version = "\d+.\d+.\w+"$' "$new_string" */Cargo.toml
|
||||
sd '^version = "\d+.\d+.\w+"$' "$new_string" Cargo.toml
|
||||
- name: Build Meilisearch to update Cargo.lock
|
||||
run: cargo build
|
||||
- name: Commit and push the changes to the ${{ env.NEW_BRANCH }} branch
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,3 +1,5 @@
|
||||
.idea/
|
||||
.vscode/
|
||||
/target
|
||||
**/*.csv
|
||||
**/*.json_lines
|
||||
|
||||
62
Cargo.lock
generated
62
Cargo.lock
generated
@@ -36,9 +36,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "actix-http"
|
||||
version = "3.2.2"
|
||||
version = "3.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0c83abf9903e1f0ad9973cc4f7b9767fd5a03a583f51a5b7a339e07987cd2724"
|
||||
checksum = "0070905b2c4a98d184c4e81025253cb192aa8a73827553f38e9410801ceb35bb"
|
||||
dependencies = [
|
||||
"actix-codec",
|
||||
"actix-rt",
|
||||
@@ -46,7 +46,7 @@ dependencies = [
|
||||
"actix-tls",
|
||||
"actix-utils",
|
||||
"ahash",
|
||||
"base64 0.13.1",
|
||||
"base64 0.21.0",
|
||||
"bitflags",
|
||||
"brotli",
|
||||
"bytes",
|
||||
@@ -68,7 +68,10 @@ dependencies = [
|
||||
"rand",
|
||||
"sha1",
|
||||
"smallvec",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tracing",
|
||||
"zstd 0.12.3+zstd.1.5.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -164,9 +167,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "actix-web"
|
||||
version = "4.2.1"
|
||||
version = "4.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d48f7b6534e06c7bfc72ee91db7917d4af6afe23e7d223b51e68fffbb21e96b9"
|
||||
checksum = "464e0fddc668ede5f26ec1f9557a8d44eda948732f40c6b0ad79126930eb775f"
|
||||
dependencies = [
|
||||
"actix-codec",
|
||||
"actix-http",
|
||||
@@ -606,9 +609,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cargo_toml"
|
||||
version = "0.13.3"
|
||||
version = "0.14.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "497049e9477329f8f6a559972ee42e117487d01d1e8c2cc9f836ea6fa23a9e1a"
|
||||
checksum = "2bfbc36312494041e2cdd5f06697b7e89d4b76f42773a0b5556ac290ff22acc2"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"toml",
|
||||
@@ -1110,20 +1113,26 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "deserr"
|
||||
version = "0.3.0"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28380303ca15ec07e1d5b079baf19cf849b09edad5cab219c1c51b2bd07523de"
|
||||
checksum = "6eee2844f21cf7fb5693aae1fb8f1658127acfdb2fc072167d68a9152584ae64"
|
||||
dependencies = [
|
||||
"actix-http",
|
||||
"actix-utils",
|
||||
"actix-web",
|
||||
"deserr-internal",
|
||||
"futures",
|
||||
"serde-cs",
|
||||
"serde_json",
|
||||
"serde_urlencoded",
|
||||
"strsim",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deserr-internal"
|
||||
version = "0.3.0"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "860928cd8af78d223a3d70dd581f21d7c3de8aa2eecd938e0c0a399ded7c1451"
|
||||
checksum = "c27246f8ca9eeba9dd70d614b664dc43b529251ed7bd9e633131010d340da4b9"
|
||||
dependencies = [
|
||||
"convert_case 0.5.0",
|
||||
"proc-macro2",
|
||||
@@ -1912,6 +1921,7 @@ dependencies = [
|
||||
"insta",
|
||||
"log",
|
||||
"meili-snap",
|
||||
"meilisearch-auth",
|
||||
"meilisearch-types",
|
||||
"nelson",
|
||||
"page_size 0.5.0",
|
||||
@@ -2527,6 +2537,7 @@ dependencies = [
|
||||
"base64 0.13.1",
|
||||
"enum-iterator",
|
||||
"hmac",
|
||||
"maplit",
|
||||
"meilisearch-types",
|
||||
"rand",
|
||||
"roaring",
|
||||
@@ -4422,7 +4433,7 @@ dependencies = [
|
||||
"pbkdf2",
|
||||
"sha1",
|
||||
"time",
|
||||
"zstd",
|
||||
"zstd 0.11.2+zstd.1.5.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4431,7 +4442,16 @@ version = "0.11.2+zstd.1.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4"
|
||||
dependencies = [
|
||||
"zstd-safe",
|
||||
"zstd-safe 5.0.2+zstd.1.5.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd"
|
||||
version = "0.12.3+zstd.1.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76eea132fb024e0e13fd9c2f5d5d595d8a967aa72382ac2f9d39fcc95afd0806"
|
||||
dependencies = [
|
||||
"zstd-safe 6.0.4+zstd.1.5.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4445,10 +4465,20 @@ dependencies = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd-sys"
|
||||
version = "2.0.5+zstd.1.5.2"
|
||||
name = "zstd-safe"
|
||||
version = "6.0.4+zstd.1.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "edc50ffce891ad571e9f9afe5039c4837bede781ac4bb13052ed7ae695518596"
|
||||
checksum = "7afb4b54b8910cf5447638cb54bf4e8a65cbedd783af98b98c62ffe91f185543"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"zstd-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd-sys"
|
||||
version = "2.0.7+zstd.1.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94509c3ba2fe55294d752b79842c530ccfab760192521df74a081a78d2b3c7f5"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
|
||||
@@ -16,6 +16,15 @@ members = [
|
||||
"benchmarks"
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "1.0.0"
|
||||
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||
description = "Meilisearch HTTP server"
|
||||
homepage = "https://meilisearch.com"
|
||||
readme = "README.md"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
[package]
|
||||
name = "benchmarks"
|
||||
version = "1.0.0"
|
||||
edition = "2018"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
description.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.65"
|
||||
csv = "1.1.6"
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
[package]
|
||||
name = "dump"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
description.workspace = true
|
||||
edition.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.65"
|
||||
|
||||
@@ -203,12 +203,11 @@ pub(crate) mod test {
|
||||
|
||||
use big_s::S;
|
||||
use maplit::btreeset;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||
use meilisearch_types::keys::{Action, Key};
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::milli::{self};
|
||||
use meilisearch_types::settings::{Checked, Settings};
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use meilisearch_types::tasks::{Details, Status};
|
||||
use serde_json::{json, Map, Value};
|
||||
use time::macros::datetime;
|
||||
@@ -341,7 +340,7 @@ pub(crate) mod test {
|
||||
name: Some(S("doggos_key")),
|
||||
uid: Uuid::from_str("9f8a34da-b6b2-42f0-939b-dbd4c3448655").unwrap(),
|
||||
actions: vec![Action::DocumentsAll],
|
||||
indexes: vec![StarOr::Other(IndexUid::from_str("doggos").unwrap())],
|
||||
indexes: vec![IndexUidPattern::from_str("doggos").unwrap()],
|
||||
expires_at: Some(datetime!(4130-03-14 12:21 UTC)),
|
||||
created_at: datetime!(1960-11-15 0:00 UTC),
|
||||
updated_at: datetime!(2022-11-10 0:00 UTC),
|
||||
@@ -351,7 +350,7 @@ pub(crate) mod test {
|
||||
name: Some(S("master_key")),
|
||||
uid: Uuid::from_str("4622f717-1c00-47bb-a494-39d76a49b591").unwrap(),
|
||||
actions: vec![Action::All],
|
||||
indexes: vec![StarOr::Star],
|
||||
indexes: vec![IndexUidPattern::all()],
|
||||
expires_at: None,
|
||||
created_at: datetime!(0000-01-01 00:01 UTC),
|
||||
updated_at: datetime!(1964-05-04 17:25 UTC),
|
||||
|
||||
@@ -181,10 +181,8 @@ impl CompatV5ToV6 {
|
||||
.indexes
|
||||
.into_iter()
|
||||
.map(|index| match index {
|
||||
v5::StarOr::Star => v6::StarOr::Star,
|
||||
v5::StarOr::Other(uid) => {
|
||||
v6::StarOr::Other(v6::IndexUid::new_unchecked(uid.as_str()))
|
||||
}
|
||||
v5::StarOr::Star => v6::IndexUidPattern::all(),
|
||||
v5::StarOr::Other(uid) => v6::IndexUidPattern::new_unchecked(uid.as_str()),
|
||||
})
|
||||
.collect(),
|
||||
expires_at: key.expires_at,
|
||||
|
||||
@@ -34,8 +34,7 @@ pub type PaginationSettings = meilisearch_types::settings::PaginationSettings;
|
||||
|
||||
// everything related to the api keys
|
||||
pub type Action = meilisearch_types::keys::Action;
|
||||
pub type StarOr<T> = meilisearch_types::star_or::StarOr<T>;
|
||||
pub type IndexUid = meilisearch_types::index_uid::IndexUid;
|
||||
pub type IndexUidPattern = meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||
|
||||
// everything related to the errors
|
||||
pub type ResponseError = meilisearch_types::error::ResponseError;
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
[package]
|
||||
name = "file-store"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
description.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
tempfile = "3.3.0"
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
[package]
|
||||
name = "filter-parser"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
description = "The parser for the Meilisearch filter syntax"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
# description.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
nom = "7.1.1"
|
||||
nom_locate = "4.0.0"
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
[package]
|
||||
name = "flatten-serde-json"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
description = "Flatten serde-json objects like elastic search"
|
||||
readme = "README.md"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
# description.workspace = true
|
||||
homepage.workspace = true
|
||||
# readme.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
serde_json = "1.0"
|
||||
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
[package]
|
||||
name = "index-scheduler"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
description.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.64"
|
||||
@@ -12,6 +19,7 @@ dump = { path = "../dump" }
|
||||
enum-iterator = "1.1.3"
|
||||
file-store = { path = "../file-store" }
|
||||
log = "0.4.14"
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
page_size = "0.5.0"
|
||||
roaring = { version = "0.10.0", features = ["serde"] }
|
||||
|
||||
@@ -630,13 +630,13 @@ impl IndexScheduler {
|
||||
&self,
|
||||
rtxn: &RoTxn,
|
||||
query: &Query,
|
||||
authorized_indexes: &Option<Vec<String>>,
|
||||
filters: &meilisearch_auth::AuthFilter,
|
||||
) -> Result<RoaringBitmap> {
|
||||
let mut tasks = self.get_task_ids(rtxn, query)?;
|
||||
|
||||
// If the query contains a list of index uid or there is a finite list of authorized indexes,
|
||||
// then we must exclude all the kinds that aren't associated to one and only one index.
|
||||
if query.index_uids.is_some() || authorized_indexes.is_some() {
|
||||
if query.index_uids.is_some() || !filters.all_indexes_authorized() {
|
||||
for kind in enum_iterator::all::<Kind>().filter(|kind| !kind.related_to_one_index()) {
|
||||
tasks -= self.get_kind(rtxn, kind)?;
|
||||
}
|
||||
@@ -644,11 +644,11 @@ impl IndexScheduler {
|
||||
|
||||
// Any task that is internally associated with a non-authorized index
|
||||
// must be discarded.
|
||||
if let Some(authorized_indexes) = authorized_indexes {
|
||||
if !filters.all_indexes_authorized() {
|
||||
let all_indexes_iter = self.index_tasks.iter(rtxn)?;
|
||||
for result in all_indexes_iter {
|
||||
let (index, index_tasks) = result?;
|
||||
if !authorized_indexes.contains(&index.to_owned()) {
|
||||
if !filters.is_index_authorized(index) {
|
||||
tasks -= index_tasks;
|
||||
}
|
||||
}
|
||||
@@ -668,12 +668,11 @@ impl IndexScheduler {
|
||||
pub fn get_tasks_from_authorized_indexes(
|
||||
&self,
|
||||
query: Query,
|
||||
authorized_indexes: Option<Vec<String>>,
|
||||
filters: &meilisearch_auth::AuthFilter,
|
||||
) -> Result<Vec<Task>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
|
||||
let tasks =
|
||||
self.get_task_ids_from_authorized_indexes(&rtxn, &query, &authorized_indexes)?;
|
||||
let tasks = self.get_task_ids_from_authorized_indexes(&rtxn, &query, filters)?;
|
||||
|
||||
let tasks = self.get_existing_tasks(
|
||||
&rtxn,
|
||||
@@ -1093,7 +1092,9 @@ mod tests {
|
||||
use crossbeam::channel::RecvTimeoutError;
|
||||
use file_store::File;
|
||||
use meili_snap::snapshot;
|
||||
use meilisearch_auth::AuthFilter;
|
||||
use meilisearch_types::document_formats::DocumentFormatError;
|
||||
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||
use meilisearch_types::milli::obkv_to_json;
|
||||
use meilisearch_types::milli::update::IndexDocumentsMethod::{
|
||||
ReplaceDocuments, UpdateDocuments,
|
||||
@@ -2245,38 +2246,45 @@ mod tests {
|
||||
|
||||
let rtxn = index_scheduler.env.read_txn().unwrap();
|
||||
let query = Query { limit: Some(0), ..Default::default() };
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[]");
|
||||
|
||||
let query = Query { limit: Some(1), ..Default::default() };
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[2,]");
|
||||
|
||||
let query = Query { limit: Some(2), ..Default::default() };
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[1,2,]");
|
||||
|
||||
let query = Query { from: Some(1), ..Default::default() };
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[0,1,]");
|
||||
|
||||
let query = Query { from: Some(2), ..Default::default() };
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[0,1,2,]");
|
||||
|
||||
let query = Query { from: Some(1), limit: Some(1), ..Default::default() };
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[1,]");
|
||||
|
||||
let query = Query { from: Some(1), limit: Some(2), ..Default::default() };
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[0,1,]");
|
||||
}
|
||||
|
||||
@@ -2301,21 +2309,24 @@ mod tests {
|
||||
let rtxn = index_scheduler.env.read_txn().unwrap();
|
||||
|
||||
let query = Query { statuses: Some(vec![Status::Processing]), ..Default::default() };
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[0,]"); // only the processing tasks in the first tick
|
||||
|
||||
let query = Query { statuses: Some(vec![Status::Enqueued]), ..Default::default() };
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[1,2,]"); // only the enqueued tasks in the first tick
|
||||
|
||||
let query = Query {
|
||||
statuses: Some(vec![Status::Enqueued, Status::Processing]),
|
||||
..Default::default()
|
||||
};
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[0,1,2,]"); // both enqueued and processing tasks in the first tick
|
||||
|
||||
let query = Query {
|
||||
@@ -2323,8 +2334,9 @@ mod tests {
|
||||
after_started_at: Some(start_time),
|
||||
..Default::default()
|
||||
};
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// both enqueued and processing tasks in the first tick, but limited to those with a started_at
|
||||
// that comes after the start of the test, which should excludes the enqueued tasks
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[0,]");
|
||||
@@ -2334,8 +2346,9 @@ mod tests {
|
||||
before_started_at: Some(start_time),
|
||||
..Default::default()
|
||||
};
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// both enqueued and processing tasks in the first tick, but limited to those with a started_at
|
||||
// that comes before the start of the test, which should excludes all of them
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[]");
|
||||
@@ -2346,8 +2359,9 @@ mod tests {
|
||||
before_started_at: Some(start_time + Duration::minutes(1)),
|
||||
..Default::default()
|
||||
};
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// both enqueued and processing tasks in the first tick, but limited to those with a started_at
|
||||
// that comes after the start of the test and before one minute after the start of the test,
|
||||
// which should exclude the enqueued tasks and include the only processing task
|
||||
@@ -2372,8 +2386,9 @@ mod tests {
|
||||
before_started_at: Some(start_time + Duration::minutes(1)),
|
||||
..Default::default()
|
||||
};
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// both succeeded and processing tasks in the first tick, but limited to those with a started_at
|
||||
// that comes after the start of the test and before one minute after the start of the test,
|
||||
// which should include all tasks
|
||||
@@ -2384,8 +2399,9 @@ mod tests {
|
||||
before_started_at: Some(start_time),
|
||||
..Default::default()
|
||||
};
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// both succeeded and processing tasks in the first tick, but limited to those with a started_at
|
||||
// that comes before the start of the test, which should exclude all tasks
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[]");
|
||||
@@ -2396,8 +2412,9 @@ mod tests {
|
||||
before_started_at: Some(second_start_time + Duration::minutes(1)),
|
||||
..Default::default()
|
||||
};
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// both succeeded and processing tasks in the first tick, but limited to those with a started_at
|
||||
// that comes after the start of the second part of the test and before one minute after the
|
||||
// second start of the test, which should exclude all tasks
|
||||
@@ -2415,8 +2432,9 @@ mod tests {
|
||||
|
||||
let rtxn = index_scheduler.env.read_txn().unwrap();
|
||||
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// we run the same query to verify that, and indeed find that the last task is matched
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[2,]");
|
||||
|
||||
@@ -2426,8 +2444,9 @@ mod tests {
|
||||
before_started_at: Some(second_start_time + Duration::minutes(1)),
|
||||
..Default::default()
|
||||
};
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// enqueued, succeeded, or processing tasks started after the second part of the test, should
|
||||
// again only return the last task
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[2,]");
|
||||
@@ -2437,8 +2456,9 @@ mod tests {
|
||||
|
||||
// now the last task should have failed
|
||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "end");
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// so running the last query should return nothing
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[]");
|
||||
|
||||
@@ -2448,8 +2468,9 @@ mod tests {
|
||||
before_started_at: Some(second_start_time + Duration::minutes(1)),
|
||||
..Default::default()
|
||||
};
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// but the same query on failed tasks should return the last task
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[2,]");
|
||||
|
||||
@@ -2459,8 +2480,9 @@ mod tests {
|
||||
before_started_at: Some(second_start_time + Duration::minutes(1)),
|
||||
..Default::default()
|
||||
};
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// but the same query on failed tasks should return the last task
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[2,]");
|
||||
|
||||
@@ -2471,8 +2493,9 @@ mod tests {
|
||||
before_started_at: Some(second_start_time + Duration::minutes(1)),
|
||||
..Default::default()
|
||||
};
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// same query but with an invalid uid
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[]");
|
||||
|
||||
@@ -2483,8 +2506,9 @@ mod tests {
|
||||
before_started_at: Some(second_start_time + Duration::minutes(1)),
|
||||
..Default::default()
|
||||
};
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// same query but with a valid uid
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[2,]");
|
||||
}
|
||||
@@ -2514,14 +2538,21 @@ mod tests {
|
||||
let rtxn = index_scheduler.env.read_txn().unwrap();
|
||||
|
||||
let query = Query { index_uids: Some(vec!["catto".to_owned()]), ..Default::default() };
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// only the first task associated with catto is returned, the indexSwap tasks are excluded!
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[0,]");
|
||||
|
||||
let query = Query { index_uids: Some(vec!["catto".to_owned()]), ..Default::default() };
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_owned()]))
|
||||
.get_task_ids_from_authorized_indexes(
|
||||
&rtxn,
|
||||
&query,
|
||||
&AuthFilter::with_allowed_indexes(
|
||||
vec![IndexUidPattern::new_unchecked("doggo")].into_iter().collect(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
// we have asked for only the tasks associated with catto, but are only authorized to retrieve the tasks
|
||||
// associated with doggo -> empty result
|
||||
@@ -2529,7 +2560,13 @@ mod tests {
|
||||
|
||||
let query = Query::default();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_owned()]))
|
||||
.get_task_ids_from_authorized_indexes(
|
||||
&rtxn,
|
||||
&query,
|
||||
&AuthFilter::with_allowed_indexes(
|
||||
vec![IndexUidPattern::new_unchecked("doggo")].into_iter().collect(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
// we asked for all the tasks, but we are only authorized to retrieve the doggo tasks
|
||||
// -> only the index creation of doggo should be returned
|
||||
@@ -2540,7 +2577,14 @@ mod tests {
|
||||
.get_task_ids_from_authorized_indexes(
|
||||
&rtxn,
|
||||
&query,
|
||||
&Some(vec!["catto".to_owned(), "doggo".to_owned()]),
|
||||
&AuthFilter::with_allowed_indexes(
|
||||
vec![
|
||||
IndexUidPattern::new_unchecked("catto"),
|
||||
IndexUidPattern::new_unchecked("doggo"),
|
||||
]
|
||||
.into_iter()
|
||||
.collect(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
// we asked for all the tasks, but we are only authorized to retrieve the doggo and catto tasks
|
||||
@@ -2548,8 +2592,9 @@ mod tests {
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[0,1,]");
|
||||
|
||||
let query = Query::default();
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// we asked for all the tasks with all index authorized -> all tasks returned
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[0,1,2,3,]");
|
||||
}
|
||||
@@ -2580,15 +2625,22 @@ mod tests {
|
||||
|
||||
let rtxn = index_scheduler.read_txn().unwrap();
|
||||
let query = Query { canceled_by: Some(vec![task_cancelation.uid]), ..Query::default() };
|
||||
let tasks =
|
||||
index_scheduler.get_task_ids_from_authorized_indexes(&rtxn, &query, &None).unwrap();
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &AuthFilter::default())
|
||||
.unwrap();
|
||||
// 0 is not returned because it was not canceled, 3 is not returned because it is the uid of the
|
||||
// taskCancelation itself
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[1,2,]");
|
||||
|
||||
let query = Query { canceled_by: Some(vec![task_cancelation.uid]), ..Query::default() };
|
||||
let tasks = index_scheduler
|
||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_string()]))
|
||||
.get_task_ids_from_authorized_indexes(
|
||||
&rtxn,
|
||||
&query,
|
||||
&AuthFilter::with_allowed_indexes(
|
||||
vec![IndexUidPattern::new_unchecked("doggo")].into_iter().collect(),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
// Return only 1 because the user is not authorized to see task 2
|
||||
snapshot!(snapshot_bitmap(&tasks), @"[1,]");
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
[package]
|
||||
name = "json-depth-checker"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
description = "A library that indicates if a JSON must be flattened"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
# description.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
serde_json = "1.0"
|
||||
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
[package]
|
||||
name = "meili-snap"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
description.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
insta = { version = "^1.19.1", features = ["json", "redactions"] }
|
||||
|
||||
@@ -1,12 +1,20 @@
|
||||
[package]
|
||||
name = "meilisearch-auth"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
description.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.13.1"
|
||||
enum-iterator = "1.1.3"
|
||||
hmac = "0.12.1"
|
||||
maplit = "1.0.2"
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
rand = "0.8.5"
|
||||
roaring = { version = "0.10.0", features = ["serde"] }
|
||||
|
||||
@@ -7,9 +7,10 @@ use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use error::{AuthControllerError, Result};
|
||||
use maplit::hashset;
|
||||
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||
use meilisearch_types::keys::{Action, CreateApiKey, Key, PatchApiKey};
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use serde::{Deserialize, Serialize};
|
||||
pub use store::open_auth_store_env;
|
||||
use store::{generate_key_as_hexa, HeedAuthStore};
|
||||
@@ -84,34 +85,13 @@ impl AuthController {
|
||||
uid: Uuid,
|
||||
search_rules: Option<SearchRules>,
|
||||
) -> Result<AuthFilter> {
|
||||
let mut filters = AuthFilter::default();
|
||||
let key = self
|
||||
.store
|
||||
.get_api_key(uid)?
|
||||
.ok_or_else(|| AuthControllerError::ApiKeyNotFound(uid.to_string()))?;
|
||||
let key = self.get_key(uid)?;
|
||||
|
||||
if !key.indexes.iter().any(|i| i == &StarOr::Star) {
|
||||
filters.search_rules = match search_rules {
|
||||
// Intersect search_rules with parent key authorized indexes.
|
||||
Some(search_rules) => SearchRules::Map(
|
||||
key.indexes
|
||||
.into_iter()
|
||||
.filter_map(|index| {
|
||||
search_rules.get_index_search_rules(&format!("{index}")).map(
|
||||
|index_search_rules| (index.to_string(), Some(index_search_rules)),
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
None => SearchRules::Set(key.indexes.into_iter().map(|x| x.to_string()).collect()),
|
||||
};
|
||||
} else if let Some(search_rules) = search_rules {
|
||||
filters.search_rules = search_rules;
|
||||
}
|
||||
let key_authorized_indexes = SearchRules::Set(key.indexes.into_iter().collect());
|
||||
|
||||
filters.allow_index_creation = self.is_key_authorized(uid, Action::IndexesAdd, None)?;
|
||||
let allow_index_creation = self.is_key_authorized(uid, Action::IndexesAdd, None)?;
|
||||
|
||||
Ok(filters)
|
||||
Ok(AuthFilter { search_rules, key_authorized_indexes, allow_index_creation })
|
||||
}
|
||||
|
||||
pub fn list_keys(&self) -> Result<Vec<Key>> {
|
||||
@@ -150,9 +130,7 @@ impl AuthController {
|
||||
.get_expiration_date(uid, action, None)?
|
||||
.or(match index {
|
||||
// else check if the key has access to the requested index.
|
||||
Some(index) => {
|
||||
self.store.get_expiration_date(uid, action, Some(index.as_bytes()))?
|
||||
}
|
||||
Some(index) => self.store.get_expiration_date(uid, action, Some(index))?,
|
||||
// or to any index if no index has been requested.
|
||||
None => self.store.prefix_first_expiration_date(uid, action)?,
|
||||
}) {
|
||||
@@ -178,13 +156,59 @@ impl AuthController {
|
||||
}
|
||||
|
||||
pub struct AuthFilter {
|
||||
pub search_rules: SearchRules,
|
||||
pub allow_index_creation: bool,
|
||||
search_rules: Option<SearchRules>,
|
||||
key_authorized_indexes: SearchRules,
|
||||
allow_index_creation: bool,
|
||||
}
|
||||
|
||||
impl Default for AuthFilter {
|
||||
fn default() -> Self {
|
||||
Self { search_rules: SearchRules::default(), allow_index_creation: true }
|
||||
Self {
|
||||
search_rules: None,
|
||||
key_authorized_indexes: SearchRules::default(),
|
||||
allow_index_creation: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthFilter {
|
||||
#[inline]
|
||||
pub fn allow_index_creation(&self) -> bool {
|
||||
self.allow_index_creation
|
||||
}
|
||||
|
||||
pub fn with_allowed_indexes(allowed_indexes: HashSet<IndexUidPattern>) -> Self {
|
||||
Self {
|
||||
search_rules: None,
|
||||
key_authorized_indexes: SearchRules::Set(allowed_indexes),
|
||||
allow_index_creation: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn all_indexes_authorized(&self) -> bool {
|
||||
self.key_authorized_indexes.all_indexes_authorized()
|
||||
&& self
|
||||
.search_rules
|
||||
.as_ref()
|
||||
.map(|search_rules| search_rules.all_indexes_authorized())
|
||||
.unwrap_or(true)
|
||||
}
|
||||
|
||||
pub fn is_index_authorized(&self, index: &str) -> bool {
|
||||
self.key_authorized_indexes.is_index_authorized(index)
|
||||
&& self
|
||||
.search_rules
|
||||
.as_ref()
|
||||
.map(|search_rules| search_rules.is_index_authorized(index))
|
||||
.unwrap_or(true)
|
||||
}
|
||||
|
||||
pub fn get_index_search_rules(&self, index: &str) -> Option<IndexSearchRules> {
|
||||
if !self.is_index_authorized(index) {
|
||||
return None;
|
||||
}
|
||||
let search_rules = self.search_rules.as_ref().unwrap_or(&self.key_authorized_indexes);
|
||||
search_rules.get_index_search_rules(index)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -192,63 +216,61 @@ impl Default for AuthFilter {
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum SearchRules {
|
||||
Set(HashSet<String>),
|
||||
Map(HashMap<String, Option<IndexSearchRules>>),
|
||||
Set(HashSet<IndexUidPattern>),
|
||||
Map(HashMap<IndexUidPattern, Option<IndexSearchRules>>),
|
||||
}
|
||||
|
||||
impl Default for SearchRules {
|
||||
fn default() -> Self {
|
||||
Self::Set(Some("*".to_string()).into_iter().collect())
|
||||
Self::Set(hashset! { IndexUidPattern::all() })
|
||||
}
|
||||
}
|
||||
|
||||
impl SearchRules {
|
||||
pub fn is_index_authorized(&self, index: &str) -> bool {
|
||||
fn is_index_authorized(&self, index: &str) -> bool {
|
||||
match self {
|
||||
Self::Set(set) => set.contains("*") || set.contains(index),
|
||||
Self::Map(map) => map.contains_key("*") || map.contains_key(index),
|
||||
Self::Set(set) => {
|
||||
set.contains("*")
|
||||
|| set.contains(index)
|
||||
|| set.iter().any(|pattern| pattern.matches_str(index))
|
||||
}
|
||||
Self::Map(map) => {
|
||||
map.contains_key("*")
|
||||
|| map.contains_key(index)
|
||||
|| map.keys().any(|pattern| pattern.matches_str(index))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_index_search_rules(&self, index: &str) -> Option<IndexSearchRules> {
|
||||
fn get_index_search_rules(&self, index: &str) -> Option<IndexSearchRules> {
|
||||
match self {
|
||||
Self::Set(set) => {
|
||||
if set.contains("*") || set.contains(index) {
|
||||
Self::Set(_) => {
|
||||
if self.is_index_authorized(index) {
|
||||
Some(IndexSearchRules::default())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Self::Map(map) => {
|
||||
map.get(index).or_else(|| map.get("*")).map(|isr| isr.clone().unwrap_or_default())
|
||||
// We must take the most retrictive rule of this index uid patterns set of rules.
|
||||
map.iter()
|
||||
.filter(|(pattern, _)| pattern.matches_str(index))
|
||||
.max_by_key(|(pattern, _)| (pattern.is_exact(), pattern.len()))
|
||||
.and_then(|(_, rule)| rule.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the list of indexes such that `self.is_index_authorized(index) == true`,
|
||||
/// or `None` if all indexes satisfy this condition.
|
||||
pub fn authorized_indexes(&self) -> Option<Vec<String>> {
|
||||
fn all_indexes_authorized(&self) -> bool {
|
||||
match self {
|
||||
SearchRules::Set(set) => {
|
||||
if set.contains("*") {
|
||||
None
|
||||
} else {
|
||||
Some(set.iter().cloned().collect())
|
||||
}
|
||||
}
|
||||
SearchRules::Map(map) => {
|
||||
if map.contains_key("*") {
|
||||
None
|
||||
} else {
|
||||
Some(map.keys().cloned().collect())
|
||||
}
|
||||
}
|
||||
SearchRules::Set(set) => set.contains("*"),
|
||||
SearchRules::Map(map) => map.contains_key("*"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for SearchRules {
|
||||
type Item = (String, IndexSearchRules);
|
||||
type Item = (IndexUidPattern, IndexSearchRules);
|
||||
type IntoIter = Box<dyn Iterator<Item = Self::Item>>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
|
||||
@@ -5,20 +5,21 @@ use std::convert::{TryFrom, TryInto};
|
||||
use std::fs::create_dir_all;
|
||||
use std::path::Path;
|
||||
use std::str;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use hmac::{Hmac, Mac};
|
||||
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||
use meilisearch_types::keys::KeyId;
|
||||
use meilisearch_types::milli;
|
||||
use meilisearch_types::milli::heed::types::{ByteSlice, DecodeIgnore, SerdeJson};
|
||||
use meilisearch_types::milli::heed::{Database, Env, EnvOpenOptions, RwTxn};
|
||||
use meilisearch_types::star_or::StarOr;
|
||||
use sha2::Sha256;
|
||||
use time::OffsetDateTime;
|
||||
use uuid::fmt::Hyphenated;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::error::Result;
|
||||
use super::error::{AuthControllerError, Result};
|
||||
use super::{Action, Key};
|
||||
|
||||
const AUTH_STORE_SIZE: usize = 1_073_741_824; //1GiB
|
||||
@@ -129,7 +130,7 @@ impl HeedAuthStore {
|
||||
}
|
||||
}
|
||||
|
||||
let no_index_restriction = key.indexes.contains(&StarOr::Star);
|
||||
let no_index_restriction = key.indexes.iter().any(|p| p.matches_all());
|
||||
for action in actions {
|
||||
if no_index_restriction {
|
||||
// If there is no index restriction we put None.
|
||||
@@ -214,11 +215,28 @@ impl HeedAuthStore {
|
||||
&self,
|
||||
uid: Uuid,
|
||||
action: Action,
|
||||
index: Option<&[u8]>,
|
||||
index: Option<&str>,
|
||||
) -> Result<Option<Option<OffsetDateTime>>> {
|
||||
let rtxn = self.env.read_txn()?;
|
||||
let tuple = (&uid, &action, index);
|
||||
Ok(self.action_keyid_index_expiration.get(&rtxn, &tuple)?)
|
||||
let tuple = (&uid, &action, index.map(|s| s.as_bytes()));
|
||||
match self.action_keyid_index_expiration.get(&rtxn, &tuple)? {
|
||||
Some(expiration) => Ok(Some(expiration)),
|
||||
None => {
|
||||
let tuple = (&uid, &action, None);
|
||||
for result in self.action_keyid_index_expiration.prefix_iter(&rtxn, &tuple)? {
|
||||
let ((_, _, index_uid_pattern), expiration) = result?;
|
||||
if let Some((pattern, index)) = index_uid_pattern.zip(index) {
|
||||
let index_uid_pattern = str::from_utf8(pattern)?;
|
||||
let pattern = IndexUidPattern::from_str(index_uid_pattern)
|
||||
.map_err(|e| AuthControllerError::Internal(Box::new(e)))?;
|
||||
if pattern.matches_str(index) {
|
||||
return Ok(Some(expiration));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prefix_first_expiration_date(
|
||||
|
||||
@@ -1,15 +1,21 @@
|
||||
[package]
|
||||
name = "meilisearch-types"
|
||||
version = "1.0.0"
|
||||
authors = ["marin <postma.marin@protonmail.com>"]
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
description.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
actix-web = { version = "4.2.1", default-features = false }
|
||||
anyhow = "1.0.65"
|
||||
convert_case = "0.6.0"
|
||||
csv = "1.1.6"
|
||||
deserr = "0.3.0"
|
||||
deserr = "0.4.1"
|
||||
either = { version = "1.6.1", features = ["serde"] }
|
||||
enum-iterator = "1.1.3"
|
||||
file-store = { path = "../file-store" }
|
||||
|
||||
@@ -1,328 +0,0 @@
|
||||
/*!
|
||||
This module implements the error messages of deserialization errors.
|
||||
|
||||
We try to:
|
||||
1. Give a human-readable description of where the error originated.
|
||||
2. Use the correct terms depending on the format of the request (json/query param)
|
||||
3. Categorise the type of the error (e.g. missing field, wrong value type, unexpected error, etc.)
|
||||
*/
|
||||
use deserr::{ErrorKind, IntoValue, ValueKind, ValuePointerRef};
|
||||
|
||||
use super::{DeserrJsonError, DeserrQueryParamError};
|
||||
use crate::error::{Code, ErrorCode};
|
||||
|
||||
/// Return a description of the given location in a Json, preceded by the given article.
|
||||
/// e.g. `at .key1[8].key2`. If the location is the origin, the given article will not be
|
||||
/// included in the description.
|
||||
pub fn location_json_description(location: ValuePointerRef, article: &str) -> String {
|
||||
fn rec(location: ValuePointerRef) -> String {
|
||||
match location {
|
||||
ValuePointerRef::Origin => String::new(),
|
||||
ValuePointerRef::Key { key, prev } => rec(*prev) + "." + key,
|
||||
ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)),
|
||||
}
|
||||
}
|
||||
match location {
|
||||
ValuePointerRef::Origin => String::new(),
|
||||
_ => {
|
||||
format!("{article} `{}`", rec(location))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a description of the list of value kinds for a Json payload.
|
||||
fn value_kinds_description_json(kinds: &[ValueKind]) -> String {
|
||||
// Rank each value kind so that they can be sorted (and deduplicated)
|
||||
// Having a predictable order helps with pattern matching
|
||||
fn order(kind: &ValueKind) -> u8 {
|
||||
match kind {
|
||||
ValueKind::Null => 0,
|
||||
ValueKind::Boolean => 1,
|
||||
ValueKind::Integer => 2,
|
||||
ValueKind::NegativeInteger => 3,
|
||||
ValueKind::Float => 4,
|
||||
ValueKind::String => 5,
|
||||
ValueKind::Sequence => 6,
|
||||
ValueKind::Map => 7,
|
||||
}
|
||||
}
|
||||
// Return a description of a single value kind, preceded by an article
|
||||
fn single_description(kind: &ValueKind) -> &'static str {
|
||||
match kind {
|
||||
ValueKind::Null => "null",
|
||||
ValueKind::Boolean => "a boolean",
|
||||
ValueKind::Integer => "a positive integer",
|
||||
ValueKind::NegativeInteger => "a negative integer",
|
||||
ValueKind::Float => "a number",
|
||||
ValueKind::String => "a string",
|
||||
ValueKind::Sequence => "an array",
|
||||
ValueKind::Map => "an object",
|
||||
}
|
||||
}
|
||||
|
||||
fn description_rec(kinds: &[ValueKind], count_items: &mut usize, message: &mut String) {
|
||||
let (msg_part, rest): (_, &[ValueKind]) = match kinds {
|
||||
[] => (String::new(), &[]),
|
||||
[ValueKind::Integer | ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => {
|
||||
("a number".to_owned(), rest)
|
||||
}
|
||||
[ValueKind::Integer, ValueKind::NegativeInteger, ValueKind::Float, rest @ ..] => {
|
||||
("a number".to_owned(), rest)
|
||||
}
|
||||
[ValueKind::Integer, ValueKind::NegativeInteger, rest @ ..] => {
|
||||
("an integer".to_owned(), rest)
|
||||
}
|
||||
[a] => (single_description(a).to_owned(), &[]),
|
||||
[a, rest @ ..] => (single_description(a).to_owned(), rest),
|
||||
};
|
||||
|
||||
if rest.is_empty() {
|
||||
if *count_items == 0 {
|
||||
message.push_str(&msg_part);
|
||||
} else if *count_items == 1 {
|
||||
message.push_str(&format!(" or {msg_part}"));
|
||||
} else {
|
||||
message.push_str(&format!(", or {msg_part}"));
|
||||
}
|
||||
} else {
|
||||
if *count_items == 0 {
|
||||
message.push_str(&msg_part);
|
||||
} else {
|
||||
message.push_str(&format!(", {msg_part}"));
|
||||
}
|
||||
|
||||
*count_items += 1;
|
||||
description_rec(rest, count_items, message);
|
||||
}
|
||||
}
|
||||
|
||||
let mut kinds = kinds.to_owned();
|
||||
kinds.sort_by_key(order);
|
||||
kinds.dedup();
|
||||
|
||||
if kinds.is_empty() {
|
||||
// Should not happen ideally
|
||||
"a different value".to_owned()
|
||||
} else {
|
||||
let mut message = String::new();
|
||||
description_rec(kinds.as_slice(), &mut 0, &mut message);
|
||||
message
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the JSON string of the value preceded by a description of its kind
|
||||
fn value_description_with_kind_json(v: &serde_json::Value) -> String {
|
||||
match v.kind() {
|
||||
ValueKind::Null => "null".to_owned(),
|
||||
kind => {
|
||||
format!(
|
||||
"{}: `{}`",
|
||||
value_kinds_description_json(&[kind]),
|
||||
serde_json::to_string(v).unwrap()
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Default + ErrorCode> deserr::DeserializeError for DeserrJsonError<C> {
|
||||
fn error<V: IntoValue>(
|
||||
_self_: Option<Self>,
|
||||
error: deserr::ErrorKind<V>,
|
||||
location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
let mut message = String::new();
|
||||
|
||||
message.push_str(&match error {
|
||||
ErrorKind::IncorrectValueKind { actual, accepted } => {
|
||||
let expected = value_kinds_description_json(accepted);
|
||||
let received = value_description_with_kind_json(&serde_json::Value::from(actual));
|
||||
|
||||
let location = location_json_description(location, " at");
|
||||
|
||||
format!("Invalid value type{location}: expected {expected}, but found {received}")
|
||||
}
|
||||
ErrorKind::MissingField { field } => {
|
||||
let location = location_json_description(location, " inside");
|
||||
format!("Missing field `{field}`{location}")
|
||||
}
|
||||
ErrorKind::UnknownKey { key, accepted } => {
|
||||
let location = location_json_description(location, " inside");
|
||||
format!(
|
||||
"Unknown field `{}`{location}: expected one of {}",
|
||||
key,
|
||||
accepted
|
||||
.iter()
|
||||
.map(|accepted| format!("`{}`", accepted))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
)
|
||||
}
|
||||
ErrorKind::UnknownValue { value, accepted } => {
|
||||
let location = location_json_description(location, " at");
|
||||
format!(
|
||||
"Unknown value `{}`{location}: expected one of {}",
|
||||
value,
|
||||
accepted
|
||||
.iter()
|
||||
.map(|accepted| format!("`{}`", accepted))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", "),
|
||||
)
|
||||
}
|
||||
ErrorKind::Unexpected { msg } => {
|
||||
let location = location_json_description(location, " at");
|
||||
format!("Invalid value{location}: {msg}")
|
||||
}
|
||||
});
|
||||
|
||||
Err(DeserrJsonError::new(message, C::default().error_code()))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn immutable_field_error(field: &str, accepted: &[&str], code: Code) -> DeserrJsonError {
|
||||
let msg = format!(
|
||||
"Immutable field `{field}`: expected one of {}",
|
||||
accepted
|
||||
.iter()
|
||||
.map(|accepted| format!("`{}`", accepted))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
);
|
||||
|
||||
DeserrJsonError::new(msg, code)
|
||||
}
|
||||
|
||||
/// Return a description of the given location in query parameters, preceded by the
|
||||
/// given article. e.g. `at key5[2]`. If the location is the origin, the given article
|
||||
/// will not be included in the description.
|
||||
pub fn location_query_param_description(location: ValuePointerRef, article: &str) -> String {
|
||||
fn rec(location: ValuePointerRef) -> String {
|
||||
match location {
|
||||
ValuePointerRef::Origin => String::new(),
|
||||
ValuePointerRef::Key { key, prev } => {
|
||||
if matches!(prev, ValuePointerRef::Origin) {
|
||||
key.to_owned()
|
||||
} else {
|
||||
rec(*prev) + "." + key
|
||||
}
|
||||
}
|
||||
ValuePointerRef::Index { index, prev } => format!("{}[{index}]", rec(*prev)),
|
||||
}
|
||||
}
|
||||
match location {
|
||||
ValuePointerRef::Origin => String::new(),
|
||||
_ => {
|
||||
format!("{article} `{}`", rec(location))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Default + ErrorCode> deserr::DeserializeError for DeserrQueryParamError<C> {
|
||||
fn error<V: IntoValue>(
|
||||
_self_: Option<Self>,
|
||||
error: deserr::ErrorKind<V>,
|
||||
location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
let mut message = String::new();
|
||||
|
||||
message.push_str(&match error {
|
||||
ErrorKind::IncorrectValueKind { actual, accepted } => {
|
||||
let expected = value_kinds_description_query_param(accepted);
|
||||
let received = value_description_with_kind_query_param(actual);
|
||||
|
||||
let location = location_query_param_description(location, " for parameter");
|
||||
|
||||
format!("Invalid value type{location}: expected {expected}, but found {received}")
|
||||
}
|
||||
ErrorKind::MissingField { field } => {
|
||||
let location = location_query_param_description(location, " inside");
|
||||
format!("Missing parameter `{field}`{location}")
|
||||
}
|
||||
ErrorKind::UnknownKey { key, accepted } => {
|
||||
let location = location_query_param_description(location, " inside");
|
||||
format!(
|
||||
"Unknown parameter `{}`{location}: expected one of {}",
|
||||
key,
|
||||
accepted
|
||||
.iter()
|
||||
.map(|accepted| format!("`{}`", accepted))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
)
|
||||
}
|
||||
ErrorKind::UnknownValue { value, accepted } => {
|
||||
let location = location_query_param_description(location, " for parameter");
|
||||
format!(
|
||||
"Unknown value `{}`{location}: expected one of {}",
|
||||
value,
|
||||
accepted
|
||||
.iter()
|
||||
.map(|accepted| format!("`{}`", accepted))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", "),
|
||||
)
|
||||
}
|
||||
ErrorKind::Unexpected { msg } => {
|
||||
let location = location_query_param_description(location, " in parameter");
|
||||
format!("Invalid value{location}: {msg}")
|
||||
}
|
||||
});
|
||||
|
||||
Err(DeserrQueryParamError::new(message, C::default().error_code()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Return a description of the list of value kinds for query parameters
|
||||
/// Since query parameters are always treated as strings, we always return
|
||||
/// "a string" for now.
|
||||
fn value_kinds_description_query_param(_accepted: &[ValueKind]) -> String {
|
||||
"a string".to_owned()
|
||||
}
|
||||
|
||||
fn value_description_with_kind_query_param<V: IntoValue>(actual: deserr::Value<V>) -> String {
|
||||
match actual {
|
||||
deserr::Value::Null => "null".to_owned(),
|
||||
deserr::Value::Boolean(x) => format!("a boolean: `{x}`"),
|
||||
deserr::Value::Integer(x) => format!("an integer: `{x}`"),
|
||||
deserr::Value::NegativeInteger(x) => {
|
||||
format!("an integer: `{x}`")
|
||||
}
|
||||
deserr::Value::Float(x) => {
|
||||
format!("a number: `{x}`")
|
||||
}
|
||||
deserr::Value::String(x) => {
|
||||
format!("a string: `{x}`")
|
||||
}
|
||||
deserr::Value::Sequence(_) => "multiple values".to_owned(),
|
||||
deserr::Value::Map(_) => "multiple parameters".to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use deserr::ValueKind;
|
||||
|
||||
use crate::deserr::error_messages::value_kinds_description_json;
|
||||
|
||||
#[test]
|
||||
fn test_value_kinds_description_json() {
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[]), @"a different value");
|
||||
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean]), @"a boolean");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::NegativeInteger]), @"a negative integer");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer]), @"a positive integer");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::String]), @"a string");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence]), @"an array");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Map]), @"an object");
|
||||
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Boolean]), @"a boolean or a positive integer");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Integer]), @"null or a positive integer");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Sequence, ValueKind::NegativeInteger]), @"a negative integer or an array");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float]), @"a number");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger]), @"a number");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null or a number");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number");
|
||||
insta::assert_display_snapshot!(value_kinds_description_json(&[ValueKind::Null, ValueKind::Boolean, ValueKind::Integer, ValueKind::Float, ValueKind::NegativeInteger, ValueKind::Null]), @"null, a boolean, or a number");
|
||||
}
|
||||
}
|
||||
@@ -1,18 +1,19 @@
|
||||
use std::convert::Infallible;
|
||||
use std::fmt;
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::ControlFlow;
|
||||
|
||||
use deserr::{DeserializeError, MergeWithError, ValuePointerRef};
|
||||
use deserr::errors::{JsonError, QueryParamError};
|
||||
use deserr::{take_cf_content, DeserializeError, IntoValue, MergeWithError, ValuePointerRef};
|
||||
|
||||
use crate::error::deserr_codes::{self, *};
|
||||
use crate::error::deserr_codes::*;
|
||||
use crate::error::{
|
||||
unwrap_any, Code, DeserrParseBoolError, DeserrParseIntError, ErrorCode, InvalidTaskDateError,
|
||||
Code, DeserrParseBoolError, DeserrParseIntError, ErrorCode, InvalidTaskDateError,
|
||||
ParseOffsetDateTimeError,
|
||||
};
|
||||
use crate::index_uid::IndexUidFormatError;
|
||||
use crate::tasks::{ParseTaskKindError, ParseTaskStatusError};
|
||||
|
||||
pub mod error_messages;
|
||||
pub mod query_params;
|
||||
|
||||
/// Marker type for the Json format
|
||||
@@ -20,8 +21,8 @@ pub struct DeserrJson;
|
||||
/// Marker type for the Query Parameter format
|
||||
pub struct DeserrQueryParam;
|
||||
|
||||
pub type DeserrJsonError<C = deserr_codes::BadRequest> = DeserrError<DeserrJson, C>;
|
||||
pub type DeserrQueryParamError<C = deserr_codes::BadRequest> = DeserrError<DeserrQueryParam, C>;
|
||||
pub type DeserrJsonError<C = BadRequest> = DeserrError<DeserrJson, C>;
|
||||
pub type DeserrQueryParamError<C = BadRequest> = DeserrError<DeserrQueryParam, C>;
|
||||
|
||||
/// A request deserialization error.
|
||||
///
|
||||
@@ -37,6 +38,7 @@ impl<Format, C: Default + ErrorCode> DeserrError<Format, C> {
|
||||
Self { msg, code, _phantom: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
impl<Format, C: Default + ErrorCode> std::fmt::Debug for DeserrError<Format, C> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("DeserrError").field("msg", &self.msg).field("code", &self.code).finish()
|
||||
@@ -49,6 +51,16 @@ impl<Format, C: Default + ErrorCode> std::fmt::Display for DeserrError<Format, C
|
||||
}
|
||||
}
|
||||
|
||||
impl<F, C: Default + ErrorCode> actix_web::ResponseError for DeserrError<F, C> {
|
||||
fn status_code(&self) -> actix_web::http::StatusCode {
|
||||
self.code.http()
|
||||
}
|
||||
|
||||
fn error_response(&self) -> actix_web::HttpResponse<actix_web::body::BoxBody> {
|
||||
crate::error::ResponseError::from_msg(self.msg.to_string(), self.code).error_response()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Format, C: Default + ErrorCode> std::error::Error for DeserrError<Format, C> {}
|
||||
impl<Format, C: Default + ErrorCode> ErrorCode for DeserrError<Format, C> {
|
||||
fn error_code(&self) -> Code {
|
||||
@@ -64,8 +76,8 @@ impl<Format, C1: Default + ErrorCode, C2: Default + ErrorCode>
|
||||
_self_: Option<Self>,
|
||||
other: DeserrError<Format, C2>,
|
||||
_merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
Err(DeserrError { msg: other.msg, code: other.code, _phantom: PhantomData })
|
||||
) -> ControlFlow<Self, Self> {
|
||||
ControlFlow::Break(DeserrError { msg: other.msg, code: other.code, _phantom: PhantomData })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -74,17 +86,56 @@ impl<Format, C: Default + ErrorCode> MergeWithError<Infallible> for DeserrError<
|
||||
_self_: Option<Self>,
|
||||
_other: Infallible,
|
||||
_merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
) -> ControlFlow<Self, Self> {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Default + ErrorCode> DeserializeError for DeserrJsonError<C> {
|
||||
fn error<V: IntoValue>(
|
||||
_self_: Option<Self>,
|
||||
error: deserr::ErrorKind<V>,
|
||||
location: ValuePointerRef,
|
||||
) -> ControlFlow<Self, Self> {
|
||||
ControlFlow::Break(DeserrJsonError::new(
|
||||
take_cf_content(JsonError::error(None, error, location)).to_string(),
|
||||
C::default().error_code(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: Default + ErrorCode> DeserializeError for DeserrQueryParamError<C> {
|
||||
fn error<V: IntoValue>(
|
||||
_self_: Option<Self>,
|
||||
error: deserr::ErrorKind<V>,
|
||||
location: ValuePointerRef,
|
||||
) -> ControlFlow<Self, Self> {
|
||||
ControlFlow::Break(DeserrQueryParamError::new(
|
||||
take_cf_content(QueryParamError::error(None, error, location)).to_string(),
|
||||
C::default().error_code(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn immutable_field_error(field: &str, accepted: &[&str], code: Code) -> DeserrJsonError {
|
||||
let msg = format!(
|
||||
"Immutable field `{field}`: expected one of {}",
|
||||
accepted
|
||||
.iter()
|
||||
.map(|accepted| format!("`{}`", accepted))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", ")
|
||||
);
|
||||
|
||||
DeserrJsonError::new(msg, code)
|
||||
}
|
||||
|
||||
// Implement a convenience function to build a `missing_field` error
|
||||
macro_rules! make_missing_field_convenience_builder {
|
||||
($err_code:ident, $fn_name:ident) => {
|
||||
impl DeserrJsonError<$err_code> {
|
||||
pub fn $fn_name(field: &str, location: ValuePointerRef) -> Self {
|
||||
let x = unwrap_any(Self::error::<Infallible>(
|
||||
let x = deserr::take_cf_content(Self::error::<Infallible>(
|
||||
None,
|
||||
deserr::ErrorKind::MissingField { field },
|
||||
location,
|
||||
@@ -112,7 +163,7 @@ macro_rules! merge_with_error_impl_take_error_message {
|
||||
_self_: Option<Self>,
|
||||
other: $err_type,
|
||||
merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
) -> ControlFlow<Self, Self> {
|
||||
DeserrError::<Format, C>::error::<Infallible>(
|
||||
None,
|
||||
deserr::ErrorKind::Unexpected { msg: other.to_string() },
|
||||
|
||||
@@ -15,10 +15,9 @@ use std::convert::Infallible;
|
||||
use std::ops::Deref;
|
||||
use std::str::FromStr;
|
||||
|
||||
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind};
|
||||
use deserr::{DeserializeError, Deserr, MergeWithError, ValueKind};
|
||||
|
||||
use super::{DeserrParseBoolError, DeserrParseIntError};
|
||||
use crate::error::unwrap_any;
|
||||
use crate::index_uid::IndexUid;
|
||||
use crate::tasks::{Kind, Status};
|
||||
|
||||
@@ -38,7 +37,7 @@ impl<T> Deref for Param<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> DeserializeFromValue<E> for Param<T>
|
||||
impl<T, E> Deserr<E> for Param<T>
|
||||
where
|
||||
E: DeserializeError + MergeWithError<T::Err>,
|
||||
T: FromQueryParameter,
|
||||
@@ -50,9 +49,9 @@ where
|
||||
match value {
|
||||
deserr::Value::String(s) => match T::from_query_param(&s) {
|
||||
Ok(x) => Ok(Param(x)),
|
||||
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
|
||||
Err(e) => Err(deserr::take_cf_content(E::merge(None, e, location))),
|
||||
},
|
||||
_ => Err(unwrap_any(E::error(
|
||||
_ => Err(deserr::take_cf_content(E::error(
|
||||
None,
|
||||
deserr::ErrorKind::IncorrectValueKind {
|
||||
actual: value,
|
||||
|
||||
@@ -127,7 +127,7 @@ macro_rules! make_error_codes {
|
||||
}
|
||||
impl Code {
|
||||
/// return the HTTP status code associated with the `Code`
|
||||
fn http(&self) -> StatusCode {
|
||||
pub fn http(&self) -> StatusCode {
|
||||
match self {
|
||||
$(
|
||||
Code::$code_ident => StatusCode::$status
|
||||
@@ -381,14 +381,6 @@ impl ErrorCode for io::Error {
|
||||
}
|
||||
}
|
||||
|
||||
/// Unwrap a result, either its Ok or Err value.
|
||||
pub fn unwrap_any<T>(any: Result<T, T>) -> T {
|
||||
match any {
|
||||
Ok(any) => any,
|
||||
Err(any) => any,
|
||||
}
|
||||
}
|
||||
|
||||
/// Deserialization when `deserr` cannot parse an API key date.
|
||||
#[derive(Debug)]
|
||||
pub struct ParseOffsetDateTimeError(pub String);
|
||||
|
||||
@@ -2,14 +2,14 @@ use std::error::Error;
|
||||
use std::fmt;
|
||||
use std::str::FromStr;
|
||||
|
||||
use deserr::DeserializeFromValue;
|
||||
use deserr::Deserr;
|
||||
|
||||
use crate::error::{Code, ErrorCode};
|
||||
|
||||
/// An index uid is composed of only ascii alphanumeric characters, - and _, between 1 and 400
|
||||
/// bytes long
|
||||
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[deserr(from(String) = IndexUid::try_from -> IndexUidFormatError)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserr)]
|
||||
#[deserr(try_from(String) = IndexUid::try_from -> IndexUidFormatError)]
|
||||
pub struct IndexUid(String);
|
||||
|
||||
impl IndexUid {
|
||||
|
||||
124
meilisearch-types/src/index_uid_pattern.rs
Normal file
124
meilisearch-types/src/index_uid_pattern.rs
Normal file
@@ -0,0 +1,124 @@
|
||||
use std::borrow::Borrow;
|
||||
use std::error::Error;
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
use std::str::FromStr;
|
||||
|
||||
use deserr::Deserr;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::error::{Code, ErrorCode};
|
||||
use crate::index_uid::{IndexUid, IndexUidFormatError};
|
||||
|
||||
/// An index uid pattern is composed of only ascii alphanumeric characters, - and _, between 1 and 400
|
||||
/// bytes long and optionally ending with a *.
|
||||
#[derive(Serialize, Deserialize, Deserr, Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[deserr(try_from(&String) = FromStr::from_str -> IndexUidPatternFormatError)]
|
||||
pub struct IndexUidPattern(String);
|
||||
|
||||
impl IndexUidPattern {
|
||||
pub fn new_unchecked(s: impl AsRef<str>) -> Self {
|
||||
Self(s.as_ref().to_string())
|
||||
}
|
||||
|
||||
/// Matches any index name.
|
||||
pub fn all() -> Self {
|
||||
IndexUidPattern::from_str("*").unwrap()
|
||||
}
|
||||
|
||||
/// Returns `true` if it matches any index.
|
||||
pub fn matches_all(&self) -> bool {
|
||||
self.0 == "*"
|
||||
}
|
||||
|
||||
/// Returns `true` if the pattern matches a specific index name.
|
||||
pub fn is_exact(&self) -> bool {
|
||||
!self.0.ends_with('*')
|
||||
}
|
||||
|
||||
/// Returns wether this index uid matches this index uid pattern.
|
||||
pub fn matches(&self, uid: &IndexUid) -> bool {
|
||||
self.matches_str(uid.as_str())
|
||||
}
|
||||
|
||||
/// Returns wether this string matches this index uid pattern.
|
||||
pub fn matches_str(&self, uid: &str) -> bool {
|
||||
match self.0.strip_suffix('*') {
|
||||
Some(prefix) => uid.starts_with(prefix),
|
||||
None => self.0 == uid,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for IndexUidPattern {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<str> for IndexUidPattern {
|
||||
fn borrow(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<String> for IndexUidPattern {
|
||||
type Error = IndexUidPatternFormatError;
|
||||
|
||||
fn try_from(uid: String) -> Result<Self, Self::Error> {
|
||||
let result = match uid.strip_suffix('*') {
|
||||
Some("") => Ok(IndexUidPattern(uid)),
|
||||
Some(prefix) => IndexUid::from_str(prefix).map(|_| IndexUidPattern(uid)),
|
||||
None => IndexUid::try_from(uid).map(IndexUid::into_inner).map(IndexUidPattern),
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(index_uid_pattern) => Ok(index_uid_pattern),
|
||||
Err(IndexUidFormatError { invalid_uid }) => {
|
||||
Err(IndexUidPatternFormatError { invalid_uid })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for IndexUidPattern {
|
||||
type Err = IndexUidPatternFormatError;
|
||||
|
||||
fn from_str(uid: &str) -> Result<IndexUidPattern, IndexUidPatternFormatError> {
|
||||
uid.to_string().try_into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IndexUidPattern> for String {
|
||||
fn from(IndexUidPattern(uid): IndexUidPattern) -> Self {
|
||||
uid
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct IndexUidPatternFormatError {
|
||||
pub invalid_uid: String,
|
||||
}
|
||||
|
||||
impl fmt::Display for IndexUidPatternFormatError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"`{}` is not a valid index uid pattern. Index uid patterns \
|
||||
can be an integer or a string containing only alphanumeric \
|
||||
characters, hyphens (-), underscores (_), and \
|
||||
optionally end with a star (*).",
|
||||
self.invalid_uid,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for IndexUidPatternFormatError {}
|
||||
|
||||
impl ErrorCode for IndexUidPatternFormatError {
|
||||
fn error_code(&self) -> Code {
|
||||
Code::InvalidIndexUid
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,7 @@ use std::convert::Infallible;
|
||||
use std::hash::Hash;
|
||||
use std::str::FromStr;
|
||||
|
||||
use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef};
|
||||
use deserr::{DeserializeError, Deserr, MergeWithError, ValuePointerRef};
|
||||
use enum_iterator::Sequence;
|
||||
use milli::update::Setting;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -11,31 +11,44 @@ use time::macros::{format_description, time};
|
||||
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::deserr::error_messages::immutable_field_error;
|
||||
use crate::deserr::DeserrJsonError;
|
||||
use crate::deserr::{immutable_field_error, DeserrError, DeserrJsonError};
|
||||
use crate::error::deserr_codes::*;
|
||||
use crate::error::{unwrap_any, Code, ParseOffsetDateTimeError};
|
||||
use crate::index_uid::IndexUid;
|
||||
use crate::star_or::StarOr;
|
||||
use crate::error::{Code, ErrorCode, ParseOffsetDateTimeError};
|
||||
use crate::index_uid_pattern::{IndexUidPattern, IndexUidPatternFormatError};
|
||||
|
||||
pub type KeyId = Uuid;
|
||||
|
||||
#[derive(Debug, DeserializeFromValue)]
|
||||
impl<C: Default + ErrorCode> MergeWithError<IndexUidPatternFormatError> for DeserrJsonError<C> {
|
||||
fn merge(
|
||||
_self_: Option<Self>,
|
||||
other: IndexUidPatternFormatError,
|
||||
merge_location: deserr::ValuePointerRef,
|
||||
) -> std::ops::ControlFlow<Self, Self> {
|
||||
DeserrError::error::<Infallible>(
|
||||
None,
|
||||
deserr::ErrorKind::Unexpected { msg: other.to_string() },
|
||||
merge_location,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserr)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct CreateApiKey {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidApiKeyDescription>)]
|
||||
pub description: Option<String>,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidApiKeyName>)]
|
||||
pub name: Option<String>,
|
||||
#[deserr(default = Uuid::new_v4(), error = DeserrJsonError<InvalidApiKeyUid>, from(&String) = Uuid::from_str -> uuid::Error)]
|
||||
#[deserr(default = Uuid::new_v4(), error = DeserrJsonError<InvalidApiKeyUid>, try_from(&String) = Uuid::from_str -> uuid::Error)]
|
||||
pub uid: KeyId,
|
||||
#[deserr(error = DeserrJsonError<InvalidApiKeyActions>, missing_field_error = DeserrJsonError::missing_api_key_actions)]
|
||||
pub actions: Vec<Action>,
|
||||
#[deserr(error = DeserrJsonError<InvalidApiKeyIndexes>, missing_field_error = DeserrJsonError::missing_api_key_indexes)]
|
||||
pub indexes: Vec<StarOr<IndexUid>>,
|
||||
#[deserr(error = DeserrJsonError<InvalidApiKeyExpiresAt>, from(Option<String>) = parse_expiration_date -> ParseOffsetDateTimeError, missing_field_error = DeserrJsonError::missing_api_key_expires_at)]
|
||||
pub indexes: Vec<IndexUidPattern>,
|
||||
#[deserr(error = DeserrJsonError<InvalidApiKeyExpiresAt>, try_from(Option<String>) = parse_expiration_date -> ParseOffsetDateTimeError, missing_field_error = DeserrJsonError::missing_api_key_expires_at)]
|
||||
pub expires_at: Option<OffsetDateTime>,
|
||||
}
|
||||
|
||||
impl CreateApiKey {
|
||||
pub fn to_key(self) -> Key {
|
||||
let CreateApiKey { description, name, uid, actions, indexes, expires_at } = self;
|
||||
@@ -65,7 +78,7 @@ fn deny_immutable_fields_api_key(
|
||||
"expiresAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyExpiresAt),
|
||||
"createdAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyCreatedAt),
|
||||
"updatedAt" => immutable_field_error(field, accepted, Code::ImmutableApiKeyUpdatedAt),
|
||||
_ => unwrap_any(DeserrJsonError::<BadRequest>::error::<Infallible>(
|
||||
_ => deserr::take_cf_content(DeserrJsonError::<BadRequest>::error::<Infallible>(
|
||||
None,
|
||||
deserr::ErrorKind::UnknownKey { key: field, accepted },
|
||||
location,
|
||||
@@ -73,7 +86,7 @@ fn deny_immutable_fields_api_key(
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, DeserializeFromValue)]
|
||||
#[derive(Debug, Deserr)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_api_key)]
|
||||
pub struct PatchApiKey {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidApiKeyDescription>)]
|
||||
@@ -90,7 +103,7 @@ pub struct Key {
|
||||
pub name: Option<String>,
|
||||
pub uid: KeyId,
|
||||
pub actions: Vec<Action>,
|
||||
pub indexes: Vec<StarOr<IndexUid>>,
|
||||
pub indexes: Vec<IndexUidPattern>,
|
||||
#[serde(with = "time::serde::rfc3339::option")]
|
||||
pub expires_at: Option<OffsetDateTime>,
|
||||
#[serde(with = "time::serde::rfc3339")]
|
||||
@@ -108,7 +121,7 @@ impl Key {
|
||||
description: Some("Use it for anything that is not a search operation. Caution! Do not expose it on a public frontend".to_string()),
|
||||
uid,
|
||||
actions: vec![Action::All],
|
||||
indexes: vec![StarOr::Star],
|
||||
indexes: vec![IndexUidPattern::all()],
|
||||
expires_at: None,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
@@ -123,7 +136,7 @@ impl Key {
|
||||
description: Some("Use it to search from the frontend".to_string()),
|
||||
uid,
|
||||
actions: vec![Action::Search],
|
||||
indexes: vec![StarOr::Star],
|
||||
indexes: vec![IndexUidPattern::all()],
|
||||
expires_at: None,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
@@ -168,9 +181,7 @@ fn parse_expiration_date(
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Sequence, DeserializeFromValue,
|
||||
)]
|
||||
#[derive(Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Sequence, Deserr)]
|
||||
#[repr(u8)]
|
||||
pub enum Action {
|
||||
#[serde(rename = "*")]
|
||||
|
||||
@@ -3,6 +3,7 @@ pub mod deserr;
|
||||
pub mod document_formats;
|
||||
pub mod error;
|
||||
pub mod index_uid;
|
||||
pub mod index_uid_pattern;
|
||||
pub mod keys;
|
||||
pub mod settings;
|
||||
pub mod star_or;
|
||||
|
||||
@@ -3,9 +3,10 @@ use std::convert::Infallible;
|
||||
use std::fmt;
|
||||
use std::marker::PhantomData;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::ops::ControlFlow;
|
||||
use std::str::FromStr;
|
||||
|
||||
use deserr::{DeserializeError, DeserializeFromValue, ErrorKind, MergeWithError, ValuePointerRef};
|
||||
use deserr::{DeserializeError, Deserr, ErrorKind, MergeWithError, ValuePointerRef};
|
||||
use fst::IntoStreamer;
|
||||
use milli::update::Setting;
|
||||
use milli::{Criterion, CriterionError, Index, DEFAULT_VALUES_PER_FACET};
|
||||
@@ -13,7 +14,6 @@ use serde::{Deserialize, Serialize, Serializer};
|
||||
|
||||
use crate::deserr::DeserrJsonError;
|
||||
use crate::error::deserr_codes::*;
|
||||
use crate::error::unwrap_any;
|
||||
|
||||
/// The maximimum number of results that the engine
|
||||
/// will be able to return in one search call.
|
||||
@@ -41,7 +41,7 @@ pub struct Checked;
|
||||
#[derive(Clone, Default, Debug, Serialize, Deserialize, PartialEq, Eq)]
|
||||
pub struct Unchecked;
|
||||
|
||||
impl<E> DeserializeFromValue<E> for Unchecked
|
||||
impl<E> Deserr<E> for Unchecked
|
||||
where
|
||||
E: DeserializeError,
|
||||
{
|
||||
@@ -59,13 +59,13 @@ fn validate_min_word_size_for_typo_setting<E: DeserializeError>(
|
||||
) -> Result<MinWordSizeTyposSetting, E> {
|
||||
if let (Setting::Set(one), Setting::Set(two)) = (s.one_typo, s.two_typos) {
|
||||
if one > two {
|
||||
return Err(unwrap_any(E::error::<Infallible>(None, ErrorKind::Unexpected { msg: format!("`minWordSizeForTypos` setting is invalid. `oneTypo` and `twoTypos` fields should be between `0` and `255`, and `twoTypos` should be greater or equals to `oneTypo` but found `oneTypo: {one}` and twoTypos: {two}`.") }, location)));
|
||||
return Err(deserr::take_cf_content(E::error::<Infallible>(None, ErrorKind::Unexpected { msg: format!("`minWordSizeForTypos` setting is invalid. `oneTypo` and `twoTypos` fields should be between `0` and `255`, and `twoTypos` should be greater or equals to `oneTypo` but found `oneTypo: {one}` and twoTypos: {two}`.") }, location)));
|
||||
}
|
||||
}
|
||||
Ok(s)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
#[deserr(deny_unknown_fields, rename_all = camelCase, validate = validate_min_word_size_for_typo_setting -> DeserrJsonError<InvalidSettingsTypoTolerance>)]
|
||||
pub struct MinWordSizeTyposSetting {
|
||||
@@ -77,7 +77,7 @@ pub struct MinWordSizeTyposSetting {
|
||||
pub two_typos: Setting<u8>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
#[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError<DeserrJsonError<InvalidSettingsTypoTolerance>>)]
|
||||
pub struct TypoSettings {
|
||||
@@ -95,7 +95,7 @@ pub struct TypoSettings {
|
||||
pub disable_on_attributes: Setting<BTreeSet<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct FacetingSettings {
|
||||
@@ -104,7 +104,7 @@ pub struct FacetingSettings {
|
||||
pub max_values_per_facet: Setting<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
#[deserr(rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct PaginationSettings {
|
||||
@@ -118,7 +118,7 @@ impl MergeWithError<milli::CriterionError> for DeserrJsonError<InvalidSettingsRa
|
||||
_self_: Option<Self>,
|
||||
other: milli::CriterionError,
|
||||
merge_location: ValuePointerRef,
|
||||
) -> Result<Self, Self> {
|
||||
) -> ControlFlow<Self, Self> {
|
||||
Self::error::<Infallible>(
|
||||
None,
|
||||
ErrorKind::Unexpected { msg: other.to_string() },
|
||||
@@ -130,7 +130,7 @@ impl MergeWithError<milli::CriterionError> for DeserrJsonError<InvalidSettingsRa
|
||||
/// Holds all the settings for an index. `T` can either be `Checked` if they represents settings
|
||||
/// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a
|
||||
/// call to `check` will return a `Settings<Checked>` from a `Settings<Unchecked>`.
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr)]
|
||||
#[serde(
|
||||
deny_unknown_fields,
|
||||
rename_all = "camelCase",
|
||||
@@ -509,8 +509,8 @@ pub fn settings(
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[deserr(from(&String) = FromStr::from_str -> CriterionError)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserr)]
|
||||
#[deserr(try_from(&String) = FromStr::from_str -> CriterionError)]
|
||||
pub enum RankingRuleView {
|
||||
/// Sorted by decreasing number of matched query terms.
|
||||
/// Query words at the front of an attribute is considered better than if it was at the back.
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
use std::fmt;
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::ControlFlow;
|
||||
use std::str::FromStr;
|
||||
|
||||
use deserr::{DeserializeError, DeserializeFromValue, MergeWithError, ValueKind};
|
||||
use deserr::{DeserializeError, Deserr, MergeWithError, ValueKind};
|
||||
use serde::de::Visitor;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
use crate::deserr::query_params::FromQueryParameter;
|
||||
use crate::error::unwrap_any;
|
||||
|
||||
/// A type that tries to match either a star (*) or
|
||||
/// any other thing that implements `FromStr`.
|
||||
@@ -111,7 +111,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> DeserializeFromValue<E> for StarOr<T>
|
||||
impl<T, E> Deserr<E> for StarOr<T>
|
||||
where
|
||||
T: FromStr,
|
||||
E: DeserializeError + MergeWithError<T::Err>,
|
||||
@@ -127,11 +127,11 @@ where
|
||||
} else {
|
||||
match T::from_str(&v) {
|
||||
Ok(parsed) => Ok(StarOr::Other(parsed)),
|
||||
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
|
||||
Err(e) => Err(deserr::take_cf_content(E::merge(None, e, location))),
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => Err(unwrap_any(E::error::<V>(
|
||||
_ => Err(deserr::take_cf_content(E::error::<V>(
|
||||
None,
|
||||
deserr::ErrorKind::IncorrectValueKind {
|
||||
actual: value,
|
||||
@@ -191,7 +191,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> DeserializeFromValue<E> for OptionStarOr<T>
|
||||
impl<T, E> Deserr<E> for OptionStarOr<T>
|
||||
where
|
||||
E: DeserializeError + MergeWithError<T::Err>,
|
||||
T: FromQueryParameter,
|
||||
@@ -205,10 +205,10 @@ where
|
||||
"*" => Ok(OptionStarOr::Star),
|
||||
s => match T::from_query_param(s) {
|
||||
Ok(x) => Ok(OptionStarOr::Other(x)),
|
||||
Err(e) => Err(unwrap_any(E::merge(None, e, location))),
|
||||
Err(e) => Err(deserr::take_cf_content(E::merge(None, e, location))),
|
||||
},
|
||||
},
|
||||
_ => Err(unwrap_any(E::error::<V>(
|
||||
_ => Err(deserr::take_cf_content(E::error::<V>(
|
||||
None,
|
||||
deserr::ErrorKind::IncorrectValueKind {
|
||||
actual: value,
|
||||
@@ -271,7 +271,7 @@ impl<T> OptionStarOrList<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> DeserializeFromValue<E> for OptionStarOrList<T>
|
||||
impl<T, E> Deserr<E> for OptionStarOrList<T>
|
||||
where
|
||||
E: DeserializeError + MergeWithError<T::Err>,
|
||||
T: FromQueryParameter,
|
||||
@@ -299,7 +299,10 @@ where
|
||||
Err(e) => {
|
||||
let location =
|
||||
if len_cs > 1 { location.push_index(i) } else { location };
|
||||
error = Some(E::merge(error, e, location)?);
|
||||
error = match E::merge(error, e, location) {
|
||||
ControlFlow::Continue(e) => Some(e),
|
||||
ControlFlow::Break(e) => return Err(e),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -314,7 +317,7 @@ where
|
||||
Ok(OptionStarOrList::List(els))
|
||||
}
|
||||
}
|
||||
_ => Err(unwrap_any(E::error::<V>(
|
||||
_ => Err(deserr::take_cf_content(E::error::<V>(
|
||||
None,
|
||||
deserr::ErrorKind::IncorrectValueKind {
|
||||
actual: value,
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
[package]
|
||||
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||
description = "Meilisearch HTTP server"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
name = "meilisearch"
|
||||
version = "1.0.0"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
description.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
default-run = "meilisearch"
|
||||
|
||||
[dependencies]
|
||||
actix-cors = "0.6.3"
|
||||
@@ -19,7 +25,7 @@ byte-unit = { version = "4.0.14", default-features = false, features = ["std", "
|
||||
bytes = "1.2.1"
|
||||
clap = { version = "4.0.9", features = ["derive", "env"] }
|
||||
crossbeam-channel = "0.5.6"
|
||||
deserr = "0.3.0"
|
||||
deserr = "0.4.1"
|
||||
dump = { path = "../dump" }
|
||||
either = "1.8.0"
|
||||
env_logger = "0.9.1"
|
||||
@@ -90,7 +96,7 @@ yaup = "0.2.1"
|
||||
|
||||
[build-dependencies]
|
||||
anyhow = { version = "1.0.65", optional = true }
|
||||
cargo_toml = { version = "0.13.0", optional = true }
|
||||
cargo_toml = { version = "0.14.0", optional = true }
|
||||
hex = { version = "0.4.3", optional = true }
|
||||
reqwest = { version = "0.11.12", features = ["blocking", "rustls-tls"], default-features = false, optional = true }
|
||||
sha-1 = { version = "0.10.0", optional = true }
|
||||
|
||||
@@ -9,7 +9,7 @@ use actix_web::HttpRequest;
|
||||
use byte_unit::Byte;
|
||||
use http::header::CONTENT_TYPE;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_auth::{AuthController, SearchRules};
|
||||
use meilisearch_auth::{AuthController, AuthFilter};
|
||||
use meilisearch_types::InstanceUid;
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
@@ -407,7 +407,7 @@ impl Segment {
|
||||
auth_controller: AuthController,
|
||||
) {
|
||||
if let Ok(stats) =
|
||||
create_all_stats(index_scheduler.into(), auth_controller, &SearchRules::default())
|
||||
create_all_stats(index_scheduler.into(), auth_controller, &AuthFilter::default())
|
||||
{
|
||||
// Replace the version number with the prototype name if any.
|
||||
let version = if let Some(prototype) = crate::prototype_name() {
|
||||
@@ -435,7 +435,7 @@ impl Segment {
|
||||
let post_search = std::mem::take(&mut self.post_search_aggregator)
|
||||
.into_event(&self.user, "Documents Searched POST");
|
||||
let post_multi_search = std::mem::take(&mut self.post_multi_search_aggregator)
|
||||
.into_event(&self.user, "Documents Searched By Array of Queries POST");
|
||||
.into_event(&self.user, "Documents Searched by Multi-Search POST");
|
||||
let add_documents = std::mem::take(&mut self.add_documents_aggregator)
|
||||
.into_event(&self.user, "Documents Added");
|
||||
let delete_documents = std::mem::take(&mut self.delete_documents_aggregator)
|
||||
|
||||
@@ -136,6 +136,13 @@ pub mod policies {
|
||||
|
||||
use crate::extractors::authentication::Policy;
|
||||
|
||||
enum TenantTokenOutcome {
|
||||
NotATenantToken,
|
||||
Invalid,
|
||||
Expired,
|
||||
Valid(Uuid, SearchRules),
|
||||
}
|
||||
|
||||
fn tenant_token_validation() -> Validation {
|
||||
let mut validation = Validation::default();
|
||||
validation.validate_exp = false;
|
||||
@@ -164,29 +171,42 @@ pub mod policies {
|
||||
pub struct ActionPolicy<const A: u8>;
|
||||
|
||||
impl<const A: u8> Policy for ActionPolicy<A> {
|
||||
/// Attempts to grant authentication from a bearer token (that can be a tenant token or an API key), the requested Action,
|
||||
/// and a list of requested indexes.
|
||||
///
|
||||
/// If the bearer token is not allowed for the specified indexes and action, returns `None`.
|
||||
/// Otherwise, returns an object containing the generated permissions: the search filters to add to a search, and the list of allowed indexes
|
||||
/// (that may contain more indexes than requested).
|
||||
fn authenticate(
|
||||
auth: AuthController,
|
||||
token: &str,
|
||||
index: Option<&str>,
|
||||
) -> Option<AuthFilter> {
|
||||
// authenticate if token is the master key.
|
||||
// master key can only have access to keys routes.
|
||||
// if master key is None only keys routes are inaccessible.
|
||||
// Without a master key, all routes are accessible except the key-related routes.
|
||||
if auth.get_master_key().map_or_else(|| !is_keys_action(A), |mk| mk == token) {
|
||||
return Some(AuthFilter::default());
|
||||
}
|
||||
|
||||
// Tenant token
|
||||
if let Some(filters) = ActionPolicy::<A>::authenticate_tenant_token(&auth, token, index)
|
||||
{
|
||||
return Some(filters);
|
||||
} else if let Some(action) = Action::from_repr(A) {
|
||||
// API key
|
||||
if let Ok(Some(uid)) = auth.get_optional_uid_from_encoded_key(token.as_bytes()) {
|
||||
if let Ok(true) = auth.is_key_authorized(uid, action, index) {
|
||||
return auth.get_key_filters(uid, None).ok();
|
||||
let (key_uuid, search_rules) =
|
||||
match ActionPolicy::<A>::authenticate_tenant_token(&auth, token) {
|
||||
TenantTokenOutcome::Valid(key_uuid, search_rules) => {
|
||||
(key_uuid, Some(search_rules))
|
||||
}
|
||||
}
|
||||
TenantTokenOutcome::Expired => return None,
|
||||
TenantTokenOutcome::Invalid => return None,
|
||||
TenantTokenOutcome::NotATenantToken => {
|
||||
(auth.get_optional_uid_from_encoded_key(token.as_bytes()).ok()??, None)
|
||||
}
|
||||
};
|
||||
|
||||
// check that the indexes are allowed
|
||||
let action = Action::from_repr(A)?;
|
||||
let auth_filter = auth.get_key_filters(key_uuid, search_rules).ok()?;
|
||||
if auth.is_key_authorized(key_uuid, action, index).unwrap_or(false)
|
||||
&& index.map(|index| auth_filter.is_index_authorized(index)).unwrap_or(true)
|
||||
{
|
||||
return Some(auth_filter);
|
||||
}
|
||||
|
||||
None
|
||||
@@ -194,46 +214,43 @@ pub mod policies {
|
||||
}
|
||||
|
||||
impl<const A: u8> ActionPolicy<A> {
|
||||
fn authenticate_tenant_token(
|
||||
auth: &AuthController,
|
||||
token: &str,
|
||||
index: Option<&str>,
|
||||
) -> Option<AuthFilter> {
|
||||
fn authenticate_tenant_token(auth: &AuthController, token: &str) -> TenantTokenOutcome {
|
||||
// Only search action can be accessed by a tenant token.
|
||||
if A != actions::SEARCH {
|
||||
return None;
|
||||
return TenantTokenOutcome::NotATenantToken;
|
||||
}
|
||||
|
||||
let uid = extract_key_id(token)?;
|
||||
// check if parent key is authorized to do the action.
|
||||
if auth.is_key_authorized(uid, Action::Search, index).ok()? {
|
||||
// Check if tenant token is valid.
|
||||
let key = auth.generate_key(uid)?;
|
||||
let data = decode::<Claims>(
|
||||
token,
|
||||
&DecodingKey::from_secret(key.as_bytes()),
|
||||
&tenant_token_validation(),
|
||||
)
|
||||
.ok()?;
|
||||
let uid = if let Some(uid) = extract_key_id(token) {
|
||||
uid
|
||||
} else {
|
||||
return TenantTokenOutcome::NotATenantToken;
|
||||
};
|
||||
|
||||
// Check index access if an index restriction is provided.
|
||||
if let Some(index) = index {
|
||||
if !data.claims.search_rules.is_index_authorized(index) {
|
||||
return None;
|
||||
}
|
||||
// Check if tenant token is valid.
|
||||
let key = if let Some(key) = auth.generate_key(uid) {
|
||||
key
|
||||
} else {
|
||||
return TenantTokenOutcome::Invalid;
|
||||
};
|
||||
|
||||
let data = if let Ok(data) = decode::<Claims>(
|
||||
token,
|
||||
&DecodingKey::from_secret(key.as_bytes()),
|
||||
&tenant_token_validation(),
|
||||
) {
|
||||
data
|
||||
} else {
|
||||
return TenantTokenOutcome::Invalid;
|
||||
};
|
||||
|
||||
// Check if token is expired.
|
||||
if let Some(exp) = data.claims.exp {
|
||||
if OffsetDateTime::now_utc().unix_timestamp() > exp {
|
||||
return TenantTokenOutcome::Expired;
|
||||
}
|
||||
|
||||
// Check if token is expired.
|
||||
if let Some(exp) = data.claims.exp {
|
||||
if OffsetDateTime::now_utc().unix_timestamp() > exp {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
return auth.get_key_filters(uid, Some(data.claims.search_rules)).ok();
|
||||
}
|
||||
|
||||
None
|
||||
TenantTokenOutcome::Valid(uid, data.claims.search_rules)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,78 +0,0 @@
|
||||
use std::fmt::Debug;
|
||||
use std::future::Future;
|
||||
use std::marker::PhantomData;
|
||||
use std::pin::Pin;
|
||||
use std::task::{Context, Poll};
|
||||
|
||||
use actix_web::dev::Payload;
|
||||
use actix_web::web::Json;
|
||||
use actix_web::{FromRequest, HttpRequest};
|
||||
use deserr::{DeserializeError, DeserializeFromValue};
|
||||
use futures::ready;
|
||||
use meilisearch_types::error::{ErrorCode, ResponseError};
|
||||
|
||||
/// Extractor for typed data from Json request payloads
|
||||
/// deserialised by deserr.
|
||||
///
|
||||
/// # Extractor
|
||||
/// To extract typed data from a request body, the inner type `T` must implement the
|
||||
/// [`deserr::DeserializeFromError<E>`] trait. The inner type `E` must implement the
|
||||
/// [`ErrorCode`](meilisearch_error::ErrorCode) trait.
|
||||
#[derive(Debug)]
|
||||
pub struct ValidatedJson<T, E>(pub T, PhantomData<*const E>);
|
||||
|
||||
impl<T, E> ValidatedJson<T, E> {
|
||||
pub fn new(data: T) -> Self {
|
||||
ValidatedJson(data, PhantomData)
|
||||
}
|
||||
pub fn into_inner(self) -> T {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> FromRequest for ValidatedJson<T, E>
|
||||
where
|
||||
E: DeserializeError + ErrorCode + std::error::Error + 'static,
|
||||
T: DeserializeFromValue<E>,
|
||||
{
|
||||
type Error = actix_web::Error;
|
||||
type Future = ValidatedJsonExtractFut<T, E>;
|
||||
|
||||
#[inline]
|
||||
fn from_request(req: &HttpRequest, payload: &mut Payload) -> Self::Future {
|
||||
ValidatedJsonExtractFut {
|
||||
fut: Json::<serde_json::Value>::from_request(req, payload),
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ValidatedJsonExtractFut<T, E> {
|
||||
fut: <Json<serde_json::Value> as FromRequest>::Future,
|
||||
_phantom: PhantomData<*const (T, E)>,
|
||||
}
|
||||
|
||||
impl<T, E> Future for ValidatedJsonExtractFut<T, E>
|
||||
where
|
||||
T: DeserializeFromValue<E>,
|
||||
E: DeserializeError + ErrorCode + std::error::Error + 'static,
|
||||
{
|
||||
type Output = Result<ValidatedJson<T, E>, actix_web::Error>;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let ValidatedJsonExtractFut { fut, .. } = self.get_mut();
|
||||
let fut = Pin::new(fut);
|
||||
|
||||
let res = ready!(fut.poll(cx));
|
||||
|
||||
let res = match res {
|
||||
Err(err) => Err(err),
|
||||
Ok(data) => match deserr::deserialize::<_, _, E>(data.into_inner()) {
|
||||
Ok(data) => Ok(ValidatedJson::new(data)),
|
||||
Err(e) => Err(ResponseError::from(e).into()),
|
||||
},
|
||||
};
|
||||
|
||||
Poll::Ready(res)
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,4 @@
|
||||
pub mod payload;
|
||||
#[macro_use]
|
||||
pub mod authentication;
|
||||
pub mod json;
|
||||
pub mod query_parameters;
|
||||
pub mod sequential_extractor;
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
//! A module to parse query parameter with deserr
|
||||
|
||||
use std::marker::PhantomData;
|
||||
use std::{fmt, ops};
|
||||
|
||||
use actix_http::Payload;
|
||||
use actix_utils::future::{err, ok, Ready};
|
||||
use actix_web::{FromRequest, HttpRequest};
|
||||
use deserr::{DeserializeError, DeserializeFromValue};
|
||||
use meilisearch_types::error::{Code, ErrorCode, ResponseError};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct QueryParameter<T, E>(pub T, PhantomData<*const E>);
|
||||
|
||||
impl<T, E> QueryParameter<T, E> {
|
||||
/// Unwrap into inner `T` value.
|
||||
pub fn into_inner(self) -> T {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> QueryParameter<T, E>
|
||||
where
|
||||
T: DeserializeFromValue<E>,
|
||||
E: DeserializeError + ErrorCode + std::error::Error + 'static,
|
||||
{
|
||||
pub fn from_query(query_str: &str) -> Result<Self, actix_web::Error> {
|
||||
let value = serde_urlencoded::from_str::<serde_json::Value>(query_str)
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?;
|
||||
|
||||
match deserr::deserialize::<_, _, E>(value) {
|
||||
Ok(data) => Ok(QueryParameter(data, PhantomData)),
|
||||
Err(e) => Err(ResponseError::from(e).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> ops::Deref for QueryParameter<T, E> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &T {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> ops::DerefMut for QueryParameter<T, E> {
|
||||
fn deref_mut(&mut self) -> &mut T {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: fmt::Display, E> fmt::Display for QueryParameter<T, E> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, E> FromRequest for QueryParameter<T, E>
|
||||
where
|
||||
T: DeserializeFromValue<E>,
|
||||
E: DeserializeError + ErrorCode + std::error::Error + 'static,
|
||||
{
|
||||
type Error = actix_web::Error;
|
||||
type Future = Ready<Result<Self, actix_web::Error>>;
|
||||
|
||||
#[inline]
|
||||
fn from_request(req: &HttpRequest, _: &mut Payload) -> Self::Future {
|
||||
QueryParameter::from_query(req.query_string()).map(ok).unwrap_or_else(err)
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
use std::str;
|
||||
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use deserr::DeserializeFromValue;
|
||||
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
||||
use deserr::Deserr;
|
||||
use meilisearch_auth::error::AuthControllerError;
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_types::deserr::query_params::Param;
|
||||
@@ -16,8 +17,6 @@ use uuid::Uuid;
|
||||
use super::PAGINATION_DEFAULT_LIMIT;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::json::ValidatedJson;
|
||||
use crate::extractors::query_parameters::QueryParameter;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::Pagination;
|
||||
|
||||
@@ -37,7 +36,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
|
||||
pub async fn create_api_key(
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_CREATE }>, AuthController>,
|
||||
body: ValidatedJson<CreateApiKey, DeserrJsonError>,
|
||||
body: AwebJson<CreateApiKey, DeserrJsonError>,
|
||||
_req: HttpRequest,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let v = body.into_inner();
|
||||
@@ -51,7 +50,7 @@ pub async fn create_api_key(
|
||||
Ok(HttpResponse::Created().json(res))
|
||||
}
|
||||
|
||||
#[derive(DeserializeFromValue, Debug, Clone, Copy)]
|
||||
#[derive(Deserr, Debug, Clone, Copy)]
|
||||
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct ListApiKeys {
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidApiKeyOffset>)]
|
||||
@@ -59,6 +58,7 @@ pub struct ListApiKeys {
|
||||
#[deserr(default = Param(PAGINATION_DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidApiKeyLimit>)]
|
||||
pub limit: Param<usize>,
|
||||
}
|
||||
|
||||
impl ListApiKeys {
|
||||
fn as_pagination(self) -> Pagination {
|
||||
Pagination { offset: self.offset.0, limit: self.limit.0 }
|
||||
@@ -67,7 +67,7 @@ impl ListApiKeys {
|
||||
|
||||
pub async fn list_api_keys(
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_GET }>, AuthController>,
|
||||
list_api_keys: QueryParameter<ListApiKeys, DeserrQueryParamError>,
|
||||
list_api_keys: AwebQueryParameter<ListApiKeys, DeserrQueryParamError>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let paginate = list_api_keys.into_inner().as_pagination();
|
||||
let page_view = tokio::task::spawn_blocking(move || -> Result<_, AuthControllerError> {
|
||||
@@ -104,7 +104,7 @@ pub async fn get_api_key(
|
||||
|
||||
pub async fn patch_api_key(
|
||||
auth_controller: GuardedData<ActionPolicy<{ actions::KEYS_UPDATE }>, AuthController>,
|
||||
body: ValidatedJson<PatchApiKey, DeserrJsonError>,
|
||||
body: AwebJson<PatchApiKey, DeserrJsonError>,
|
||||
path: web::Path<AuthParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let key = path.into_inner().key;
|
||||
|
||||
@@ -4,7 +4,8 @@ use actix_web::http::header::CONTENT_TYPE;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpMessage, HttpRequest, HttpResponse};
|
||||
use bstr::ByteSlice;
|
||||
use deserr::DeserializeFromValue;
|
||||
use deserr::actix_web::AwebQueryParameter;
|
||||
use deserr::Deserr;
|
||||
use futures::StreamExt;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use log::debug;
|
||||
@@ -33,7 +34,6 @@ use crate::error::PayloadError::ReceivePayload;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::payload::Payload;
|
||||
use crate::extractors::query_parameters::QueryParameter;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::routes::{PaginationView, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
||||
|
||||
@@ -80,7 +80,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Debug, DeserializeFromValue)]
|
||||
#[derive(Debug, Deserr)]
|
||||
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct GetDocument {
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentFields>)]
|
||||
@@ -90,7 +90,7 @@ pub struct GetDocument {
|
||||
pub async fn get_document(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
|
||||
document_param: web::Path<DocumentParam>,
|
||||
params: QueryParameter<GetDocument, DeserrQueryParamError>,
|
||||
params: AwebQueryParameter<GetDocument, DeserrQueryParamError>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let DocumentParam { index_uid, document_id } = document_param.into_inner();
|
||||
let index_uid = IndexUid::try_from(index_uid)?;
|
||||
@@ -125,7 +125,7 @@ pub async fn delete_document(
|
||||
Ok(HttpResponse::Accepted().json(task))
|
||||
}
|
||||
|
||||
#[derive(Debug, DeserializeFromValue)]
|
||||
#[derive(Debug, Deserr)]
|
||||
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct BrowseQuery {
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidDocumentOffset>)]
|
||||
@@ -139,7 +139,7 @@ pub struct BrowseQuery {
|
||||
pub async fn get_all_documents(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_GET }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
params: QueryParameter<BrowseQuery, DeserrQueryParamError>,
|
||||
params: AwebQueryParameter<BrowseQuery, DeserrQueryParamError>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
|
||||
debug!("called with params: {:?}", params);
|
||||
@@ -155,7 +155,7 @@ pub async fn get_all_documents(
|
||||
Ok(HttpResponse::Ok().json(ret))
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug, DeserializeFromValue)]
|
||||
#[derive(Deserialize, Debug, Deserr)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct UpdateDocumentsQuery {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidIndexPrimaryKey>)]
|
||||
@@ -165,7 +165,7 @@ pub struct UpdateDocumentsQuery {
|
||||
pub async fn add_documents(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
params: QueryParameter<UpdateDocumentsQuery, DeserrJsonError>,
|
||||
params: AwebQueryParameter<UpdateDocumentsQuery, DeserrJsonError>,
|
||||
body: Payload,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
@@ -177,7 +177,7 @@ pub async fn add_documents(
|
||||
|
||||
analytics.add_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req);
|
||||
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation();
|
||||
let task = document_addition(
|
||||
extract_mime_type(&req)?,
|
||||
index_scheduler,
|
||||
@@ -195,7 +195,7 @@ pub async fn add_documents(
|
||||
pub async fn update_documents(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::DOCUMENTS_ADD }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
params: QueryParameter<UpdateDocumentsQuery, DeserrJsonError>,
|
||||
params: AwebQueryParameter<UpdateDocumentsQuery, DeserrJsonError>,
|
||||
body: Payload,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
@@ -206,7 +206,7 @@ pub async fn update_documents(
|
||||
|
||||
analytics.update_documents(¶ms, index_scheduler.index(&index_uid).is_err(), &req);
|
||||
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation();
|
||||
let task = document_addition(
|
||||
extract_mime_type(&req)?,
|
||||
index_scheduler,
|
||||
|
||||
@@ -2,14 +2,14 @@ use std::convert::Infallible;
|
||||
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use deserr::{DeserializeError, DeserializeFromValue, ValuePointerRef};
|
||||
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
||||
use deserr::{DeserializeError, Deserr, ValuePointerRef};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use log::debug;
|
||||
use meilisearch_types::deserr::error_messages::immutable_field_error;
|
||||
use meilisearch_types::deserr::query_params::Param;
|
||||
use meilisearch_types::deserr::{DeserrJsonError, DeserrQueryParamError};
|
||||
use meilisearch_types::deserr::{immutable_field_error, DeserrJsonError, DeserrQueryParamError};
|
||||
use meilisearch_types::error::deserr_codes::*;
|
||||
use meilisearch_types::error::{unwrap_any, Code, ResponseError};
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::milli::{self, FieldDistribution, Index};
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
@@ -21,8 +21,6 @@ use super::{Pagination, SummarizedTaskView, PAGINATION_DEFAULT_LIMIT};
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||
use crate::extractors::json::ValidatedJson;
|
||||
use crate::extractors::query_parameters::QueryParameter;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
|
||||
pub mod documents;
|
||||
@@ -73,7 +71,7 @@ impl IndexView {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeserializeFromValue, Debug, Clone, Copy)]
|
||||
#[derive(Deserr, Debug, Clone, Copy)]
|
||||
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct ListIndexes {
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidIndexOffset>)]
|
||||
@@ -89,13 +87,13 @@ impl ListIndexes {
|
||||
|
||||
pub async fn list_indexes(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_GET }>, Data<IndexScheduler>>,
|
||||
paginate: QueryParameter<ListIndexes, DeserrQueryParamError>,
|
||||
paginate: AwebQueryParameter<ListIndexes, DeserrQueryParamError>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let search_rules = &index_scheduler.filters().search_rules;
|
||||
let filters = index_scheduler.filters();
|
||||
let indexes: Vec<_> = index_scheduler.indexes()?;
|
||||
let indexes = indexes
|
||||
.into_iter()
|
||||
.filter(|(name, _)| search_rules.is_index_authorized(name))
|
||||
.filter(|(name, _)| filters.is_index_authorized(name))
|
||||
.map(|(name, index)| IndexView::new(name, &index))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
@@ -105,7 +103,7 @@ pub async fn list_indexes(
|
||||
Ok(HttpResponse::Ok().json(ret))
|
||||
}
|
||||
|
||||
#[derive(DeserializeFromValue, Debug)]
|
||||
#[derive(Deserr, Debug)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct IndexCreateRequest {
|
||||
#[deserr(error = DeserrJsonError<InvalidIndexUid>, missing_field_error = DeserrJsonError::missing_index_uid)]
|
||||
@@ -116,13 +114,14 @@ pub struct IndexCreateRequest {
|
||||
|
||||
pub async fn create_index(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_CREATE }>, Data<IndexScheduler>>,
|
||||
body: ValidatedJson<IndexCreateRequest, DeserrJsonError>,
|
||||
body: AwebJson<IndexCreateRequest, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let IndexCreateRequest { primary_key, uid } = body.into_inner();
|
||||
|
||||
let allow_index_creation = index_scheduler.filters().search_rules.is_index_authorized(&uid);
|
||||
// FIXME: allow_index_creation?
|
||||
let allow_index_creation = index_scheduler.filters().is_index_authorized(&uid);
|
||||
if allow_index_creation {
|
||||
analytics.publish(
|
||||
"Index Created".to_string(),
|
||||
@@ -149,7 +148,7 @@ fn deny_immutable_fields_index(
|
||||
"uid" => immutable_field_error(field, accepted, Code::ImmutableIndexUid),
|
||||
"createdAt" => immutable_field_error(field, accepted, Code::ImmutableIndexCreatedAt),
|
||||
"updatedAt" => immutable_field_error(field, accepted, Code::ImmutableIndexUpdatedAt),
|
||||
_ => unwrap_any(DeserrJsonError::<BadRequest>::error::<Infallible>(
|
||||
_ => deserr::take_cf_content(DeserrJsonError::<BadRequest>::error::<Infallible>(
|
||||
None,
|
||||
deserr::ErrorKind::UnknownKey { key: field, accepted },
|
||||
location,
|
||||
@@ -157,7 +156,7 @@ fn deny_immutable_fields_index(
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(DeserializeFromValue, Debug)]
|
||||
#[derive(Deserr, Debug)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields = deny_immutable_fields_index)]
|
||||
pub struct UpdateIndexRequest {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidIndexPrimaryKey>)]
|
||||
@@ -181,7 +180,7 @@ pub async fn get_index(
|
||||
pub async fn update_index(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_UPDATE }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
body: ValidatedJson<UpdateIndexRequest, DeserrJsonError>,
|
||||
body: AwebJson<UpdateIndexRequest, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use deserr::actix_web::{AwebJson, AwebQueryParameter};
|
||||
use index_scheduler::IndexScheduler;
|
||||
use log::debug;
|
||||
use meilisearch_types::deserr::query_params::Param;
|
||||
@@ -13,8 +14,6 @@ use serde_json::Value;
|
||||
use crate::analytics::{Analytics, SearchAggregator};
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::json::ValidatedJson;
|
||||
use crate::extractors::query_parameters::QueryParameter;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::search::{
|
||||
add_search_rules, perform_search, MatchingStrategy, SearchQuery, DEFAULT_CROP_LENGTH,
|
||||
@@ -30,7 +29,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Debug, deserr::DeserializeFromValue)]
|
||||
#[derive(Debug, deserr::Deserr)]
|
||||
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct SearchQueryGet {
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidSearchQ>)]
|
||||
@@ -129,7 +128,7 @@ fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
|
||||
pub async fn search_with_url_query(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
params: QueryParameter<SearchQueryGet, DeserrQueryParamError>,
|
||||
params: AwebQueryParameter<SearchQueryGet, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
@@ -139,9 +138,7 @@ pub async fn search_with_url_query(
|
||||
let mut query: SearchQuery = params.into_inner().into();
|
||||
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) =
|
||||
index_scheduler.filters().search_rules.get_index_search_rules(&index_uid)
|
||||
{
|
||||
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
|
||||
add_search_rules(&mut query, search_rules);
|
||||
}
|
||||
|
||||
@@ -163,7 +160,7 @@ pub async fn search_with_url_query(
|
||||
pub async fn search_with_post(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
params: ValidatedJson<SearchQuery, DeserrJsonError>,
|
||||
params: AwebJson<SearchQuery, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
@@ -173,9 +170,7 @@ pub async fn search_with_post(
|
||||
debug!("search called with params: {:?}", query);
|
||||
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) =
|
||||
index_scheduler.filters().search_rules.get_index_search_rules(&index_uid)
|
||||
{
|
||||
if let Some(search_rules) = index_scheduler.filters().get_index_search_rules(&index_uid) {
|
||||
add_search_rules(&mut query, search_rules);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use deserr::actix_web::AwebJson;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use log::debug;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
@@ -12,7 +13,6 @@ use serde_json::json;
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::json::ValidatedJson;
|
||||
use crate::routes::SummarizedTaskView;
|
||||
|
||||
#[macro_export]
|
||||
@@ -45,7 +45,7 @@ macro_rules! make_setting_route {
|
||||
|
||||
let new_settings = Settings { $attr: Setting::Reset.into(), ..Default::default() };
|
||||
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation();
|
||||
|
||||
let task = KindWithContent::SettingsUpdate {
|
||||
index_uid: index_uid.to_string(),
|
||||
@@ -68,7 +68,7 @@ macro_rules! make_setting_route {
|
||||
Data<IndexScheduler>,
|
||||
>,
|
||||
index_uid: actix_web::web::Path<String>,
|
||||
body: $crate::routes::indexes::ValidatedJson<Option<$type>, $err_ty>,
|
||||
body: deserr::actix_web::AwebJson<Option<$type>, $err_ty>,
|
||||
req: HttpRequest,
|
||||
$analytics_var: web::Data<dyn Analytics>,
|
||||
) -> std::result::Result<HttpResponse, ResponseError> {
|
||||
@@ -86,7 +86,7 @@ macro_rules! make_setting_route {
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation();
|
||||
|
||||
let task = KindWithContent::SettingsUpdate {
|
||||
index_uid: index_uid.to_string(),
|
||||
@@ -468,7 +468,7 @@ generate_configure!(
|
||||
pub async fn update_all(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SETTINGS_UPDATE }>, Data<IndexScheduler>>,
|
||||
index_uid: web::Path<String>,
|
||||
body: ValidatedJson<Settings<Unchecked>, DeserrJsonError>,
|
||||
body: AwebJson<Settings<Unchecked>, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
@@ -560,7 +560,7 @@ pub async fn update_all(
|
||||
Some(&req),
|
||||
);
|
||||
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation();
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner();
|
||||
let task = KindWithContent::SettingsUpdate {
|
||||
index_uid,
|
||||
@@ -596,7 +596,7 @@ pub async fn delete_all(
|
||||
|
||||
let new_settings = Settings::cleared().into_unchecked();
|
||||
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation;
|
||||
let allow_index_creation = index_scheduler.filters().allow_index_creation();
|
||||
let index_uid = IndexUid::try_from(index_uid.into_inner())?.into_inner();
|
||||
let task = KindWithContent::SettingsUpdate {
|
||||
index_uid,
|
||||
|
||||
@@ -17,6 +17,8 @@ use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
|
||||
const PAGINATION_DEFAULT_LIMIT: usize = 20;
|
||||
|
||||
mod api_key;
|
||||
mod dump;
|
||||
pub mod indexes;
|
||||
@@ -31,12 +33,10 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(web::resource("/stats").route(web::get().to(get_stats)))
|
||||
.service(web::resource("/version").route(web::get().to(get_version)))
|
||||
.service(web::scope("/indexes").configure(indexes::configure))
|
||||
.service(web::scope("/search").configure(multi_search::configure))
|
||||
.service(web::scope("/multi-search").configure(multi_search::configure))
|
||||
.service(web::scope("/swap-indexes").configure(swap_indexes::configure));
|
||||
}
|
||||
|
||||
const PAGINATION_DEFAULT_LIMIT: usize = 20;
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SummarizedTaskView {
|
||||
@@ -60,6 +60,7 @@ impl From<Task> for SummarizedTaskView {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Pagination {
|
||||
pub offset: usize,
|
||||
pub limit: usize,
|
||||
@@ -237,10 +238,9 @@ async fn get_stats(
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
analytics.publish("Stats Seen".to_string(), json!({ "per_index_uid": false }), Some(&req));
|
||||
let search_rules = &index_scheduler.filters().search_rules;
|
||||
let filters = index_scheduler.filters();
|
||||
|
||||
let stats =
|
||||
create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), search_rules)?;
|
||||
let stats = create_all_stats((*index_scheduler).clone(), (*auth_controller).clone(), filters)?;
|
||||
|
||||
debug!("returns: {:?}", stats);
|
||||
Ok(HttpResponse::Ok().json(stats))
|
||||
@@ -249,19 +249,19 @@ async fn get_stats(
|
||||
pub fn create_all_stats(
|
||||
index_scheduler: Data<IndexScheduler>,
|
||||
auth_controller: AuthController,
|
||||
search_rules: &meilisearch_auth::SearchRules,
|
||||
filters: &meilisearch_auth::AuthFilter,
|
||||
) -> Result<Stats, ResponseError> {
|
||||
let mut last_task: Option<OffsetDateTime> = None;
|
||||
let mut indexes = BTreeMap::new();
|
||||
let mut database_size = 0;
|
||||
let processing_task = index_scheduler.get_tasks_from_authorized_indexes(
|
||||
Query { statuses: Some(vec![Status::Processing]), limit: Some(1), ..Query::default() },
|
||||
search_rules.authorized_indexes(),
|
||||
filters,
|
||||
)?;
|
||||
// accumulate the size of each indexes
|
||||
let processing_index = processing_task.first().and_then(|task| task.index_uid());
|
||||
for (name, index) in index_scheduler.indexes()? {
|
||||
if !search_rules.is_index_authorized(&name) {
|
||||
if !filters.is_index_authorized(&name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use actix_web::web::{self, Data};
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
use deserr::actix_web::AwebJson;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use log::debug;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
@@ -9,15 +10,14 @@ use serde::Serialize;
|
||||
|
||||
use crate::analytics::{Analytics, MultiSearchAggregator};
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::json::ValidatedJson;
|
||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
use crate::search::{
|
||||
add_search_rules, perform_search, SearchQueryWithIndex, SearchResultWithIndex,
|
||||
};
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(search_with_post))));
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(multi_search_with_post))));
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
@@ -25,15 +25,15 @@ struct SearchResults {
|
||||
results: Vec<SearchResultWithIndex>,
|
||||
}
|
||||
|
||||
#[derive(Debug, deserr::DeserializeFromValue)]
|
||||
#[derive(Debug, deserr::Deserr)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct SearchQueries {
|
||||
queries: Vec<SearchQueryWithIndex>,
|
||||
}
|
||||
|
||||
pub async fn search_with_post(
|
||||
pub async fn multi_search_with_post(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
||||
params: ValidatedJson<SearchQueries, DeserrJsonError>,
|
||||
params: AwebJson<SearchQueries, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
@@ -49,9 +49,13 @@ pub async fn search_with_post(
|
||||
{
|
||||
debug!("search called with params: {:?}", query);
|
||||
|
||||
// Tenant token search_rules.
|
||||
// Check index from API key
|
||||
if !index_scheduler.filters().is_index_authorized(&index_uid) {
|
||||
return Err(AuthenticationError::InvalidToken.into());
|
||||
}
|
||||
// Apply search rules from tenant token
|
||||
if let Some(search_rules) =
|
||||
index_scheduler.filters().search_rules.get_index_search_rules(&index_uid)
|
||||
index_scheduler.filters().get_index_search_rules(&index_uid)
|
||||
{
|
||||
add_search_rules(&mut query, search_rules);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use deserr::DeserializeFromValue;
|
||||
use deserr::actix_web::AwebJson;
|
||||
use deserr::Deserr;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::deserr_codes::InvalidSwapIndexes;
|
||||
@@ -14,14 +15,13 @@ use crate::analytics::Analytics;
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||
use crate::extractors::json::ValidatedJson;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(swap_indexes))));
|
||||
}
|
||||
|
||||
#[derive(DeserializeFromValue, Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Deserr, Debug, Clone, PartialEq, Eq)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct SwapIndexesPayload {
|
||||
#[deserr(error = DeserrJsonError<InvalidSwapIndexes>, missing_field_error = DeserrJsonError::missing_swap_indexes)]
|
||||
@@ -30,7 +30,7 @@ pub struct SwapIndexesPayload {
|
||||
|
||||
pub async fn swap_indexes(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::INDEXES_SWAP }>, Data<IndexScheduler>>,
|
||||
params: ValidatedJson<Vec<SwapIndexesPayload>, DeserrJsonError>,
|
||||
params: AwebJson<Vec<SwapIndexesPayload>, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
@@ -42,7 +42,7 @@ pub async fn swap_indexes(
|
||||
}),
|
||||
Some(&req),
|
||||
);
|
||||
let search_rules = &index_scheduler.filters().search_rules;
|
||||
let filters = index_scheduler.filters();
|
||||
|
||||
let mut swaps = vec![];
|
||||
for SwapIndexesPayload { indexes } in params.into_iter() {
|
||||
@@ -53,7 +53,7 @@ pub async fn swap_indexes(
|
||||
return Err(MeilisearchHttpError::SwapIndexPayloadWrongLength(indexes).into());
|
||||
}
|
||||
};
|
||||
if !search_rules.is_index_authorized(lhs) || !search_rules.is_index_authorized(rhs) {
|
||||
if !filters.is_index_authorized(lhs) || !filters.is_index_authorized(rhs) {
|
||||
return Err(AuthenticationError::InvalidToken.into());
|
||||
}
|
||||
swaps.push(IndexSwap { indexes: (lhs.to_string(), rhs.to_string()) });
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{web, HttpRequest, HttpResponse};
|
||||
use deserr::DeserializeFromValue;
|
||||
use deserr::actix_web::AwebQueryParameter;
|
||||
use deserr::Deserr;
|
||||
use index_scheduler::{IndexScheduler, Query, TaskId};
|
||||
use meilisearch_types::deserr::query_params::Param;
|
||||
use meilisearch_types::deserr::DeserrQueryParamError;
|
||||
@@ -23,7 +24,6 @@ use super::SummarizedTaskView;
|
||||
use crate::analytics::Analytics;
|
||||
use crate::extractors::authentication::policies::*;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::query_parameters::QueryParameter;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
|
||||
const DEFAULT_LIMIT: u32 = 20;
|
||||
@@ -162,7 +162,7 @@ impl From<Details> for DetailsView {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, DeserializeFromValue)]
|
||||
#[derive(Debug, Deserr)]
|
||||
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct TasksFilterQuery {
|
||||
#[deserr(default = Param(DEFAULT_LIMIT), error = DeserrQueryParamError<InvalidTaskLimit>)]
|
||||
@@ -181,19 +181,20 @@ pub struct TasksFilterQuery {
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidIndexUid>)]
|
||||
pub index_uids: OptionStarOrList<IndexUid>,
|
||||
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterEnqueuedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
pub after_enqueued_at: OptionStarOr<OffsetDateTime>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeEnqueuedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
pub before_enqueued_at: OptionStarOr<OffsetDateTime>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterStartedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
pub after_started_at: OptionStarOr<OffsetDateTime>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeStartedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
pub before_started_at: OptionStarOr<OffsetDateTime>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterFinishedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
pub after_finished_at: OptionStarOr<OffsetDateTime>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeFinishedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
pub before_finished_at: OptionStarOr<OffsetDateTime>,
|
||||
}
|
||||
|
||||
impl TasksFilterQuery {
|
||||
fn into_query(self) -> Query {
|
||||
Query {
|
||||
@@ -235,7 +236,7 @@ impl TaskDeletionOrCancelationQuery {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, DeserializeFromValue)]
|
||||
#[derive(Debug, Deserr)]
|
||||
#[deserr(error = DeserrQueryParamError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct TaskDeletionOrCancelationQuery {
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskUids>)]
|
||||
@@ -249,19 +250,20 @@ pub struct TaskDeletionOrCancelationQuery {
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidIndexUid>)]
|
||||
pub index_uids: OptionStarOrList<IndexUid>,
|
||||
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterEnqueuedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
pub after_enqueued_at: OptionStarOr<OffsetDateTime>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeEnqueuedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
pub before_enqueued_at: OptionStarOr<OffsetDateTime>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterStartedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
pub after_started_at: OptionStarOr<OffsetDateTime>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeStartedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
pub before_started_at: OptionStarOr<OffsetDateTime>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterFinishedAt>, from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
pub after_finished_at: OptionStarOr<OffsetDateTime>,
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeFinishedAt>, from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
pub before_finished_at: OptionStarOr<OffsetDateTime>,
|
||||
}
|
||||
|
||||
impl TaskDeletionOrCancelationQuery {
|
||||
fn into_query(self) -> Query {
|
||||
Query {
|
||||
@@ -284,7 +286,7 @@ impl TaskDeletionOrCancelationQuery {
|
||||
|
||||
async fn cancel_tasks(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_CANCEL }>, Data<IndexScheduler>>,
|
||||
params: QueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
|
||||
params: AwebQueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
@@ -317,7 +319,7 @@ async fn cancel_tasks(
|
||||
let tasks = index_scheduler.get_task_ids_from_authorized_indexes(
|
||||
&index_scheduler.read_txn()?,
|
||||
&query,
|
||||
&index_scheduler.filters().search_rules.authorized_indexes(),
|
||||
index_scheduler.filters(),
|
||||
)?;
|
||||
let task_cancelation =
|
||||
KindWithContent::TaskCancelation { query: format!("?{}", req.query_string()), tasks };
|
||||
@@ -330,7 +332,7 @@ async fn cancel_tasks(
|
||||
|
||||
async fn delete_tasks(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_DELETE }>, Data<IndexScheduler>>,
|
||||
params: QueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
|
||||
params: AwebQueryParameter<TaskDeletionOrCancelationQuery, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
@@ -362,7 +364,7 @@ async fn delete_tasks(
|
||||
let tasks = index_scheduler.get_task_ids_from_authorized_indexes(
|
||||
&index_scheduler.read_txn()?,
|
||||
&query,
|
||||
&index_scheduler.filters().search_rules.authorized_indexes(),
|
||||
index_scheduler.filters(),
|
||||
)?;
|
||||
let task_deletion =
|
||||
KindWithContent::TaskDeletion { query: format!("?{}", req.query_string()), tasks };
|
||||
@@ -383,7 +385,7 @@ pub struct AllTasks {
|
||||
|
||||
async fn get_tasks(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::TASKS_GET }>, Data<IndexScheduler>>,
|
||||
params: QueryParameter<TasksFilterQuery, DeserrQueryParamError>,
|
||||
params: AwebQueryParameter<TasksFilterQuery, DeserrQueryParamError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
@@ -396,10 +398,7 @@ async fn get_tasks(
|
||||
let query = params.into_query();
|
||||
|
||||
let mut tasks_results: Vec<TaskView> = index_scheduler
|
||||
.get_tasks_from_authorized_indexes(
|
||||
query,
|
||||
index_scheduler.filters().search_rules.authorized_indexes(),
|
||||
)?
|
||||
.get_tasks_from_authorized_indexes(query, index_scheduler.filters())?
|
||||
.into_iter()
|
||||
.map(|t| TaskView::from_task(&t))
|
||||
.collect();
|
||||
@@ -437,12 +436,8 @@ async fn get_task(
|
||||
|
||||
let query = index_scheduler::Query { uids: Some(vec![task_uid]), ..Query::default() };
|
||||
|
||||
if let Some(task) = index_scheduler
|
||||
.get_tasks_from_authorized_indexes(
|
||||
query,
|
||||
index_scheduler.filters().search_rules.authorized_indexes(),
|
||||
)?
|
||||
.first()
|
||||
if let Some(task) =
|
||||
index_scheduler.get_tasks_from_authorized_indexes(query, index_scheduler.filters())?.first()
|
||||
{
|
||||
let task_view = TaskView::from_task(task);
|
||||
Ok(HttpResponse::Ok().json(task_view))
|
||||
@@ -498,7 +493,7 @@ pub fn deserialize_date_before(
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use deserr::DeserializeFromValue;
|
||||
use deserr::Deserr;
|
||||
use meili_snap::snapshot;
|
||||
use meilisearch_types::deserr::DeserrQueryParamError;
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
@@ -507,7 +502,7 @@ mod tests {
|
||||
|
||||
fn deserr_query_params<T>(j: &str) -> Result<T, ResponseError>
|
||||
where
|
||||
T: DeserializeFromValue<DeserrQueryParamError>,
|
||||
T: Deserr<DeserrQueryParamError>,
|
||||
{
|
||||
let value = serde_urlencoded::from_str::<serde_json::Value>(j)
|
||||
.map_err(|e| ResponseError::from_msg(e.to_string(), Code::BadRequest))?;
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::collections::{BTreeMap, BTreeSet, HashSet};
|
||||
use std::str::FromStr;
|
||||
use std::time::Instant;
|
||||
|
||||
use deserr::DeserializeFromValue;
|
||||
use deserr::Deserr;
|
||||
use either::Either;
|
||||
use meilisearch_auth::IndexSearchRules;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
@@ -31,7 +31,7 @@ pub const DEFAULT_CROP_MARKER: fn() -> String = || "…".to_string();
|
||||
pub const DEFAULT_HIGHLIGHT_PRE_TAG: fn() -> String = || "<em>".to_string();
|
||||
pub const DEFAULT_HIGHLIGHT_POST_TAG: fn() -> String = || "</em>".to_string();
|
||||
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Deserr)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct SearchQuery {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
|
||||
@@ -80,7 +80,7 @@ impl SearchQuery {
|
||||
// This struct contains the fields of `SearchQuery` inline.
|
||||
// This is because neither deserr nor serde support `flatten` when using `deny_unknown_fields.
|
||||
// The `From<SearchQueryWithIndex>` implementation ensures both structs remain up to date.
|
||||
#[derive(Debug, deserr::DeserializeFromValue)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserr)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct SearchQueryWithIndex {
|
||||
#[deserr(error = DeserrJsonError<InvalidIndexUid>, missing_field_error = DeserrJsonError::missing_index_uid)]
|
||||
@@ -170,7 +170,7 @@ impl SearchQueryWithIndex {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, DeserializeFromValue)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Deserr)]
|
||||
#[deserr(rename_all = camelCase)]
|
||||
pub enum MatchingStrategy {
|
||||
/// Remove query words from last to first
|
||||
|
||||
@@ -377,7 +377,7 @@ async fn error_add_api_key_invalid_index_uids() {
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||
{
|
||||
"message": "Invalid value at `.indexes[0]`: `invalid index # / \\name with spaces` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
"message": "Invalid value at `.indexes[0]`: `invalid index # / \\name with spaces` is not a valid index uid pattern. Index uid patterns can be an integer or a string containing only alphanumeric characters, hyphens (-), underscores (_), and optionally end with a star (*).",
|
||||
"code": "invalid_api_key_indexes",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes"
|
||||
|
||||
@@ -11,6 +11,7 @@ use crate::common::Server;
|
||||
pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> =
|
||||
Lazy::new(|| {
|
||||
let mut authorizations = hashmap! {
|
||||
("POST", "/multi-search") => hashset!{"search", "*"},
|
||||
("POST", "/indexes/products/search") => hashset!{"search", "*"},
|
||||
("GET", "/indexes/products/search") => hashset!{"search", "*"},
|
||||
("POST", "/indexes/products/documents") => hashset!{"documents.add", "documents.*", "*"},
|
||||
@@ -77,12 +78,14 @@ static INVALID_RESPONSE: Lazy<Value> = Lazy::new(|| {
|
||||
})
|
||||
});
|
||||
|
||||
const MASTER_KEY: &str = "MASTER_KEY";
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_access_expired_key() {
|
||||
use std::{thread, time};
|
||||
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_api_key(MASTER_KEY);
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
@@ -111,7 +114,7 @@ async fn error_access_expired_key() {
|
||||
#[actix_rt::test]
|
||||
async fn error_access_unauthorized_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_api_key(MASTER_KEY);
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["sales"],
|
||||
@@ -144,7 +147,7 @@ async fn error_access_unauthorized_action() {
|
||||
|
||||
for ((method, route), action) in AUTHORIZATIONS.iter() {
|
||||
// create a new API key letting only the needed action.
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_api_key(MASTER_KEY);
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
@@ -168,7 +171,7 @@ async fn error_access_unauthorized_action() {
|
||||
#[actix_rt::test]
|
||||
async fn access_authorized_master_key() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_api_key(MASTER_KEY);
|
||||
|
||||
// master key must have access to all routes.
|
||||
for ((method, route), _) in AUTHORIZATIONS.iter() {
|
||||
@@ -185,7 +188,7 @@ async fn access_authorized_restricted_index() {
|
||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||
for action in actions {
|
||||
// create a new API key letting only the needed action.
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_api_key(MASTER_KEY);
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["products"],
|
||||
@@ -222,7 +225,7 @@ async fn access_authorized_no_index_restriction() {
|
||||
for ((method, route), actions) in AUTHORIZATIONS.iter() {
|
||||
for action in actions {
|
||||
// create a new API key letting only the needed action.
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_api_key(MASTER_KEY);
|
||||
|
||||
let content = json!({
|
||||
"indexes": ["*"],
|
||||
@@ -255,7 +258,7 @@ async fn access_authorized_no_index_restriction() {
|
||||
#[actix_rt::test]
|
||||
async fn access_authorized_stats_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
server.use_admin_key(MASTER_KEY).await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
@@ -295,7 +298,7 @@ async fn access_authorized_stats_restricted_index() {
|
||||
#[actix_rt::test]
|
||||
async fn access_authorized_stats_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
server.use_admin_key(MASTER_KEY).await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
@@ -335,7 +338,7 @@ async fn access_authorized_stats_no_index_restriction() {
|
||||
#[actix_rt::test]
|
||||
async fn list_authorized_indexes_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
server.use_admin_key(MASTER_KEY).await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
@@ -376,7 +379,7 @@ async fn list_authorized_indexes_restricted_index() {
|
||||
#[actix_rt::test]
|
||||
async fn list_authorized_indexes_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
server.use_admin_key(MASTER_KEY).await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
@@ -414,10 +417,194 @@ async fn list_authorized_indexes_no_index_restriction() {
|
||||
assert!(response.iter().any(|index| index["uid"] == "test"));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn access_authorized_index_patterns() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key(MASTER_KEY).await;
|
||||
|
||||
// create products_1 index
|
||||
let index_1 = server.index("products_1");
|
||||
let (response, code) = index_1.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
|
||||
// create products index
|
||||
let index_ = server.index("products");
|
||||
let (response, code) = index_.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
|
||||
// create key with all document access on indices with product_* pattern.
|
||||
let content = json!({
|
||||
"indexes": ["products_*"],
|
||||
"actions": ["documents.*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
// Register the key
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
|
||||
// refer to products_1 and products with modified api key.
|
||||
let index_1 = server.index("products_1");
|
||||
|
||||
let index_ = server.index("products");
|
||||
|
||||
// try to create a index via add documents route
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 1,
|
||||
"content": "foo",
|
||||
}
|
||||
]);
|
||||
|
||||
// Adding document to products_1 index. Should succeed with 202
|
||||
let (response, code) = index_1.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
// Adding document to products index. Should Fail with 403 -- invalid_api_key
|
||||
let (response, code) = index_.add_documents(documents, None).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
|
||||
server.use_api_key(MASTER_KEY);
|
||||
|
||||
// refer to products_1 with modified api key.
|
||||
let index_1 = server.index("products_1");
|
||||
|
||||
index_1.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index_1.get_task(task_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn raise_error_non_authorized_index_patterns() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key(MASTER_KEY).await;
|
||||
|
||||
// create products_1 index
|
||||
let product_1_index = server.index("products_1");
|
||||
let (response, code) = product_1_index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
|
||||
// create products_2 index
|
||||
let product_2_index = server.index("products_2");
|
||||
let (response, code) = product_2_index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
|
||||
// create test index
|
||||
let test_index = server.index("test");
|
||||
let (response, code) = test_index.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
|
||||
// create key with all document access on indices with product_* pattern.
|
||||
let content = json!({
|
||||
"indexes": ["products_*"],
|
||||
"actions": ["documents.*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
// Register the key
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
|
||||
// refer to products_1 and products_2 with modified api key.
|
||||
let product_1_index = server.index("products_1");
|
||||
let product_2_index = server.index("products_2");
|
||||
|
||||
// refer to test index
|
||||
let test_index = server.index("test");
|
||||
|
||||
// try to create a index via add documents route
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 1,
|
||||
"content": "foo",
|
||||
}
|
||||
]);
|
||||
|
||||
// Adding document to products_1 index. Should succeed with 202
|
||||
let (response, code) = product_1_index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task1_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
// Adding document to products_2 index. Should succeed with 202
|
||||
let (response, code) = product_2_index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task2_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
// Adding document to test index. Should Fail with 403 -- invalid_api_key
|
||||
let (response, code) = test_index.add_documents(documents, None).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
|
||||
server.use_api_key(MASTER_KEY);
|
||||
|
||||
// refer to products_1 with modified api key.
|
||||
let product_1_index = server.index("products_1");
|
||||
// refer to products_2 with modified api key.
|
||||
let product_2_index = server.index("products_2");
|
||||
|
||||
product_1_index.wait_task(task1_id).await;
|
||||
product_2_index.wait_task(task2_id).await;
|
||||
|
||||
let (response, code) = product_1_index.get_task(task1_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
let (response, code) = product_1_index.get_task(task2_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn pattern_indexes() {
|
||||
// Create server with master key
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key(MASTER_KEY).await;
|
||||
|
||||
// index.* constraints on products_* index pattern
|
||||
let content = json!({
|
||||
"indexes": ["products_*"],
|
||||
"actions": ["indexes.*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(),
|
||||
});
|
||||
|
||||
// Generate and use the api key
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
let key = response["key"].as_str().expect("Key is not string");
|
||||
server.use_api_key(key);
|
||||
|
||||
// Create Index products_1 using generated api key
|
||||
let products_1 = server.index("products_1");
|
||||
let (response, code) = products_1.create(Some("id")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
|
||||
// Fail to create products_* using generated api key
|
||||
let products_1 = server.index("products_*");
|
||||
let (response, code) = products_1.create(Some("id")).await;
|
||||
assert_eq!(400, code, "{:?}", &response);
|
||||
|
||||
// Fail to create test_1 using generated api key
|
||||
let products_1 = server.index("test_1");
|
||||
let (response, code) = products_1.create(Some("id")).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn list_authorized_tasks_restricted_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
server.use_admin_key(MASTER_KEY).await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
@@ -446,7 +633,6 @@ async fn list_authorized_tasks_restricted_index() {
|
||||
|
||||
let (response, code) = server.service.get("/tasks").await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
println!("{}", response);
|
||||
let response = response["results"].as_array().unwrap();
|
||||
// key should have access on `products` index.
|
||||
assert!(response.iter().any(|task| task["indexUid"] == "products"));
|
||||
@@ -458,7 +644,7 @@ async fn list_authorized_tasks_restricted_index() {
|
||||
#[actix_rt::test]
|
||||
async fn list_authorized_tasks_no_index_restriction() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_admin_key("MASTER_KEY").await;
|
||||
server.use_admin_key(MASTER_KEY).await;
|
||||
|
||||
// create index `test`
|
||||
let index = server.index("test");
|
||||
@@ -499,7 +685,7 @@ async fn list_authorized_tasks_no_index_restriction() {
|
||||
#[actix_rt::test]
|
||||
async fn error_creating_index_without_action() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_api_key(MASTER_KEY);
|
||||
|
||||
// create key with access on all indexes.
|
||||
let content = json!({
|
||||
@@ -587,7 +773,7 @@ async fn lazy_create_index() {
|
||||
];
|
||||
|
||||
for content in contents {
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_api_key(MASTER_KEY);
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
@@ -643,14 +829,114 @@ async fn lazy_create_index() {
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn lazy_create_index_from_pattern() {
|
||||
let mut server = Server::new_auth().await;
|
||||
|
||||
// create key with access on all indexes.
|
||||
let contents = vec![
|
||||
json!({
|
||||
"indexes": ["products_*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["products_*"],
|
||||
"actions": ["indexes.*", "documents.*", "settings.*", "tasks.*"],
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["products_*"],
|
||||
"actions": ["indexes.create", "documents.add", "settings.update", "tasks.get"],
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
}),
|
||||
];
|
||||
|
||||
for content in contents {
|
||||
server.use_api_key(MASTER_KEY);
|
||||
let (response, code) = server.add_api_key(content).await;
|
||||
assert_eq!(201, code, "{:?}", &response);
|
||||
assert!(response["key"].is_string());
|
||||
|
||||
// use created key.
|
||||
let key = response["key"].as_str().unwrap();
|
||||
server.use_api_key(key);
|
||||
|
||||
// try to create a index via add documents route
|
||||
let index = server.index("products_1");
|
||||
let test = server.index("test");
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 1,
|
||||
"content": "foo",
|
||||
}
|
||||
]);
|
||||
|
||||
let (response, code) = index.add_documents(documents.clone(), None).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
// Fail to create test index
|
||||
let (response, code) = test.add_documents(documents, None).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
|
||||
// try to create a index via add settings route
|
||||
let index = server.index("products_2");
|
||||
let settings = json!({ "distinctAttribute": "test"});
|
||||
|
||||
let (response, code) = index.update_settings(settings).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
// Fail to create test index
|
||||
|
||||
let index = server.index("test");
|
||||
let settings = json!({ "distinctAttribute": "test"});
|
||||
|
||||
let (response, code) = index.update_settings(settings).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
|
||||
// try to create a index via add specialized settings route
|
||||
let index = server.index("products_3");
|
||||
let (response, code) = index.update_distinct_attribute(json!("test")).await;
|
||||
assert_eq!(202, code, "{:?}", &response);
|
||||
let task_id = response["taskUid"].as_u64().unwrap();
|
||||
|
||||
index.wait_task(task_id).await;
|
||||
|
||||
let (response, code) = index.get_task(task_id).await;
|
||||
assert_eq!(200, code, "{:?}", &response);
|
||||
assert_eq!(response["status"], "succeeded");
|
||||
|
||||
// Fail to create test index
|
||||
let index = server.index("test");
|
||||
let settings = json!({ "distinctAttribute": "test"});
|
||||
|
||||
let (response, code) = index.update_settings(settings).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
}
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn error_creating_index_without_index() {
|
||||
let mut server = Server::new_auth().await;
|
||||
server.use_api_key("MASTER_KEY");
|
||||
server.use_api_key(MASTER_KEY);
|
||||
|
||||
// create key with access on all indexes.
|
||||
let content = json!({
|
||||
"indexes": ["unexpected"],
|
||||
"indexes": ["unexpected","products_*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": "2050-11-13T00:00:00Z"
|
||||
});
|
||||
@@ -690,4 +976,32 @@ async fn error_creating_index_without_index() {
|
||||
let index = server.index("test3");
|
||||
let (response, code) = index.create(None).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
|
||||
// try to create a index via add documents route
|
||||
let index = server.index("products");
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 1,
|
||||
"content": "foo",
|
||||
}
|
||||
]);
|
||||
|
||||
let (response, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
|
||||
// try to create a index via add settings route
|
||||
let index = server.index("products");
|
||||
let settings = json!({ "distinctAttribute": "test"});
|
||||
let (response, code) = index.update_settings(settings).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
|
||||
// try to create a index via add specialized settings route
|
||||
let index = server.index("products");
|
||||
let (response, code) = index.update_distinct_attribute(json!("test")).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
|
||||
// try to create a index via create index route
|
||||
let index = server.index("products");
|
||||
let (response, code) = index.create(None).await;
|
||||
assert_eq!(403, code, "{:?}", &response);
|
||||
}
|
||||
|
||||
@@ -120,7 +120,7 @@ async fn create_api_key_bad_indexes() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid value at `.indexes[0]`: `good doggo` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
"message": "Invalid value at `.indexes[0]`: `good doggo` is not a valid index uid pattern. Index uid patterns can be an integer or a string containing only alphanumeric characters, hyphens (-), underscores (_), and optionally end with a star (*).",
|
||||
"code": "invalid_api_key_indexes",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes"
|
||||
@@ -138,7 +138,7 @@ async fn create_api_key_bad_expires_at() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Unknown field `expires_at`: expected one of `description`, `name`, `uid`, `actions`, `indexes`, `expiresAt`",
|
||||
"message": "Unknown field `expires_at`: did you mean `expiresAt`? expected one of `description`, `name`, `uid`, `actions`, `indexes`, `expiresAt`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
@@ -150,7 +150,7 @@ async fn create_api_key_bad_expires_at() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Unknown field `expires_at`: expected one of `description`, `name`, `uid`, `actions`, `indexes`, `expiresAt`",
|
||||
"message": "Unknown field `expires_at`: did you mean `expiresAt`? expected one of `description`, `name`, `uid`, `actions`, `indexes`, `expiresAt`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
|
||||
@@ -4,6 +4,8 @@ mod errors;
|
||||
mod payload;
|
||||
mod tenant_token;
|
||||
|
||||
mod tenant_token_multi_search;
|
||||
|
||||
use actix_web::http::StatusCode;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
|
||||
@@ -82,6 +82,11 @@ static ACCEPTED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
||||
"actions": ["search"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["sal*", "prod*"],
|
||||
"actions": ["search"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
]
|
||||
});
|
||||
|
||||
@@ -104,6 +109,11 @@ static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
||||
"actions": ["*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["prod*", "p*"],
|
||||
"actions": ["*"],
|
||||
"expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap()
|
||||
}),
|
||||
json!({
|
||||
"indexes": ["products"],
|
||||
"actions": ["search"],
|
||||
@@ -245,6 +255,10 @@ async fn search_authorized_simple_token() {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sa*"]),
|
||||
"exp" => Value::Null
|
||||
},
|
||||
];
|
||||
|
||||
compute_authorized_search!(tenant_tokens, {}, 5);
|
||||
@@ -351,11 +365,19 @@ async fn filter_search_authorized_filter_token() {
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!({
|
||||
"*": {},
|
||||
"sal*": {"filter": ["color = blue"]}
|
||||
}),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
];
|
||||
|
||||
compute_authorized_search!(tenant_tokens, "color = yellow", 1);
|
||||
}
|
||||
|
||||
/// Tests that those Tenant Token are incompatible with the REFUSED_KEYS defined above.
|
||||
#[actix_rt::test]
|
||||
async fn error_search_token_forbidden_parent_key() {
|
||||
let tenant_tokens = vec![
|
||||
@@ -383,6 +405,10 @@ async fn error_search_token_forbidden_parent_key() {
|
||||
"searchRules" => json!(["sales"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
hashmap! {
|
||||
"searchRules" => json!(["sali*", "s*", "sales*"]),
|
||||
"exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp())
|
||||
},
|
||||
];
|
||||
|
||||
compute_forbidden_search!(tenant_tokens, REFUSED_KEYS);
|
||||
|
||||
1102
meilisearch/tests/auth/tenant_token_multi_search.rs
Normal file
1102
meilisearch/tests/auth/tenant_token_multi_search.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -103,8 +103,8 @@ impl Server {
|
||||
Index { uid: uid.as_ref().to_string(), service: &self.service, encoder }
|
||||
}
|
||||
|
||||
pub async fn search(&self, queries: Value) -> (Value, StatusCode) {
|
||||
self.service.post("/search", queries).await
|
||||
pub async fn multi_search(&self, queries: Value) -> (Value, StatusCode) {
|
||||
self.service.post("/multi-search", queries).await
|
||||
}
|
||||
|
||||
pub async fn list_indexes_raw(&self, parameters: &str) -> (Value, StatusCode) {
|
||||
|
||||
@@ -8,7 +8,7 @@ use crate::common::Server;
|
||||
async fn search_empty_list() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (response, code) = server.search(json!({"queries": []})).await;
|
||||
let (response, code) = server.multi_search(json!({"queries": []})).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
@@ -21,7 +21,7 @@ async fn search_empty_list() {
|
||||
async fn search_json_object() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (response, code) = server.search(json!({})).await;
|
||||
let (response, code) = server.multi_search(json!({})).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
@@ -37,7 +37,7 @@ async fn search_json_object() {
|
||||
async fn search_json_array() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (response, code) = server.search(json!([])).await;
|
||||
let (response, code) = server.multi_search(json!([])).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
@@ -59,7 +59,7 @@ async fn simple_search_single_index() {
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
.multi_search(json!({"queries": [
|
||||
{"indexUid" : "test", "q": "glass"},
|
||||
{"indexUid": "test", "q": "captain"},
|
||||
]}))
|
||||
@@ -109,7 +109,7 @@ async fn simple_search_missing_index_uid() {
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
.multi_search(json!({"queries": [
|
||||
{"q": "glass"},
|
||||
]}))
|
||||
.await;
|
||||
@@ -134,7 +134,7 @@ async fn simple_search_illegal_index_uid() {
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
.multi_search(json!({"queries": [
|
||||
{"indexUid": "hé", "q": "glass"},
|
||||
]}))
|
||||
.await;
|
||||
@@ -164,7 +164,7 @@ async fn simple_search_two_indexes() {
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
.multi_search(json!({"queries": [
|
||||
{"indexUid" : "test", "q": "glass"},
|
||||
{"indexUid": "nested", "q": "pesti"},
|
||||
]}))
|
||||
@@ -241,7 +241,7 @@ async fn search_one_index_doesnt_exist() {
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
.multi_search(json!({"queries": [
|
||||
{"indexUid" : "test", "q": "glass"},
|
||||
{"indexUid": "nested", "q": "pesti"},
|
||||
]}))
|
||||
@@ -262,7 +262,7 @@ async fn search_multiple_indexes_dont_exist() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
.multi_search(json!({"queries": [
|
||||
{"indexUid" : "test", "q": "glass"},
|
||||
{"indexUid": "nested", "q": "pesti"},
|
||||
]}))
|
||||
@@ -294,7 +294,7 @@ async fn search_one_query_error() {
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
.multi_search(json!({"queries": [
|
||||
{"indexUid" : "test", "q": "glass", "facets": ["title"]},
|
||||
{"indexUid": "nested", "q": "pesti"},
|
||||
]}))
|
||||
@@ -326,7 +326,7 @@ async fn search_multiple_query_errors() {
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
.multi_search(json!({"queries": [
|
||||
{"indexUid" : "test", "q": "glass", "facets": ["title"]},
|
||||
{"indexUid": "nested", "q": "pesti", "facets": ["doggos"]},
|
||||
]}))
|
||||
|
||||
@@ -1,8 +1,15 @@
|
||||
[package]
|
||||
name = "milli"
|
||||
version = "1.0.0"
|
||||
authors = ["Kerollmops <clement@meilisearch.com>"]
|
||||
edition = "2018"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
description.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
# edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
bimap = { version = "0.6.2", features = ["serde"] }
|
||||
@@ -12,7 +19,7 @@ byteorder = "1.4.3"
|
||||
charabia = { version = "0.7.0", default-features = false }
|
||||
concat-arrays = "0.1.2"
|
||||
crossbeam-channel = "0.5.6"
|
||||
deserr = "0.3.0"
|
||||
deserr = "0.4.1"
|
||||
either = "1.8.0"
|
||||
flatten-serde-json = { path = "../flatten-serde-json" }
|
||||
fst = "0.4.7"
|
||||
|
||||
@@ -7,45 +7,31 @@ use serde::{Deserialize, Serialize};
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::error::is_reserved_keyword;
|
||||
use crate::search::facet::BadGeoError;
|
||||
use crate::{CriterionError, Error, UserError};
|
||||
|
||||
/// This error type is never supposed to be shown to the end user.
|
||||
/// You must always cast it to a sort error or a criterion error.
|
||||
#[derive(Debug)]
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AscDescError {
|
||||
InvalidLatitude,
|
||||
InvalidLongitude,
|
||||
#[error(transparent)]
|
||||
GeoError(BadGeoError),
|
||||
#[error("Invalid syntax for the asc/desc parameter: expected expression ending by `:asc` or `:desc`, found `{name}`.")]
|
||||
InvalidSyntax { name: String },
|
||||
#[error("`{name}` is a reserved keyword and thus can't be used as a asc/desc rule.")]
|
||||
ReservedKeyword { name: String },
|
||||
}
|
||||
|
||||
impl fmt::Display for AscDescError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self {
|
||||
Self::InvalidLatitude => {
|
||||
write!(f, "Latitude must be contained between -90 and 90 degrees.",)
|
||||
}
|
||||
Self::InvalidLongitude => {
|
||||
write!(f, "Longitude must be contained between -180 and 180 degrees.",)
|
||||
}
|
||||
Self::InvalidSyntax { name } => {
|
||||
write!(f, "Invalid syntax for the asc/desc parameter: expected expression ending by `:asc` or `:desc`, found `{}`.", name)
|
||||
}
|
||||
Self::ReservedKeyword { name } => {
|
||||
write!(
|
||||
f,
|
||||
"`{}` is a reserved keyword and thus can't be used as a asc/desc rule.",
|
||||
name
|
||||
)
|
||||
}
|
||||
}
|
||||
impl From<BadGeoError> for AscDescError {
|
||||
fn from(geo_error: BadGeoError) -> Self {
|
||||
AscDescError::GeoError(geo_error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AscDescError> for CriterionError {
|
||||
fn from(error: AscDescError) -> Self {
|
||||
match error {
|
||||
AscDescError::InvalidLatitude | AscDescError::InvalidLongitude => {
|
||||
AscDescError::GeoError(_) => {
|
||||
CriterionError::ReservedNameForSort { name: "_geoPoint".to_string() }
|
||||
}
|
||||
AscDescError::InvalidSyntax { name } => CriterionError::InvalidName { name },
|
||||
@@ -85,9 +71,9 @@ impl FromStr for Member {
|
||||
.map_err(|_| AscDescError::ReservedKeyword { name: text.to_string() })
|
||||
})?;
|
||||
if !(-90.0..=90.0).contains(&lat) {
|
||||
return Err(AscDescError::InvalidLatitude)?;
|
||||
return Err(BadGeoError::Lat(lat))?;
|
||||
} else if !(-180.0..=180.0).contains(&lng) {
|
||||
return Err(AscDescError::InvalidLongitude)?;
|
||||
return Err(BadGeoError::Lng(lng))?;
|
||||
}
|
||||
Ok(Member::Geo([lat, lng]))
|
||||
}
|
||||
@@ -162,10 +148,8 @@ impl FromStr for AscDesc {
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum SortError {
|
||||
#[error("{}", AscDescError::InvalidLatitude)]
|
||||
InvalidLatitude,
|
||||
#[error("{}", AscDescError::InvalidLongitude)]
|
||||
InvalidLongitude,
|
||||
#[error(transparent)]
|
||||
ParseGeoError { error: BadGeoError },
|
||||
#[error("Invalid syntax for the geo parameter: expected expression formated like \
|
||||
`_geoPoint(latitude, longitude)` and ending by `:asc` or `:desc`, found `{name}`.")]
|
||||
BadGeoPointUsage { name: String },
|
||||
@@ -184,8 +168,7 @@ pub enum SortError {
|
||||
impl From<AscDescError> for SortError {
|
||||
fn from(error: AscDescError) -> Self {
|
||||
match error {
|
||||
AscDescError::InvalidLatitude => SortError::InvalidLatitude,
|
||||
AscDescError::InvalidLongitude => SortError::InvalidLongitude,
|
||||
AscDescError::GeoError(error) => SortError::ParseGeoError { error },
|
||||
AscDescError::InvalidSyntax { name } => SortError::InvalidName { name },
|
||||
AscDescError::ReservedKeyword { name } if name.starts_with("_geoPoint") => {
|
||||
SortError::BadGeoPointUsage { name }
|
||||
@@ -277,11 +260,11 @@ mod tests {
|
||||
),
|
||||
("_geoPoint(35, 85, 75):asc", ReservedKeyword { name: S("_geoPoint(35, 85, 75)") }),
|
||||
("_geoPoint(18):asc", ReservedKeyword { name: S("_geoPoint(18)") }),
|
||||
("_geoPoint(200, 200):asc", InvalidLatitude),
|
||||
("_geoPoint(90.000001, 0):asc", InvalidLatitude),
|
||||
("_geoPoint(0, -180.000001):desc", InvalidLongitude),
|
||||
("_geoPoint(159.256, 130):asc", InvalidLatitude),
|
||||
("_geoPoint(12, -2021):desc", InvalidLongitude),
|
||||
("_geoPoint(200, 200):asc", GeoError(BadGeoError::Lat(200.))),
|
||||
("_geoPoint(90.000001, 0):asc", GeoError(BadGeoError::Lat(90.000001))),
|
||||
("_geoPoint(0, -180.000001):desc", GeoError(BadGeoError::Lng(-180.000001))),
|
||||
("_geoPoint(159.256, 130):asc", GeoError(BadGeoError::Lat(159.256))),
|
||||
("_geoPoint(12, -2021):desc", GeoError(BadGeoError::Lng(-2021.))),
|
||||
];
|
||||
|
||||
for (req, expected_error) in invalid_req {
|
||||
|
||||
@@ -123,7 +123,7 @@ impl<'t> Criterion for Attribute<'t> {
|
||||
None => {
|
||||
return Ok(Some(CriterionResult {
|
||||
query_tree: Some(query_tree),
|
||||
candidates: Some(RoaringBitmap::new()),
|
||||
candidates: Some(allowed_candidates),
|
||||
filtered_candidates: None,
|
||||
initial_candidates: Some(self.initial_candidates.take()),
|
||||
}));
|
||||
|
||||
@@ -21,18 +21,51 @@ pub struct Filter<'a> {
|
||||
condition: FilterCondition<'a>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum BadGeoError {
|
||||
Lat(f64),
|
||||
Lng(f64),
|
||||
BoundingBoxTopIsBelowBottom(f64, f64),
|
||||
}
|
||||
|
||||
impl std::error::Error for BadGeoError {}
|
||||
|
||||
impl Display for BadGeoError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::BoundingBoxTopIsBelowBottom(top, bottom) => {
|
||||
write!(f, "The top latitude `{top}` is below the bottom latitude `{bottom}`.")
|
||||
}
|
||||
Self::Lat(lat) => write!(
|
||||
f,
|
||||
"Bad latitude `{}`. Latitude must be contained between -90 and 90 degrees. ",
|
||||
lat
|
||||
),
|
||||
Self::Lng(lng) => write!(
|
||||
f,
|
||||
"Bad longitude `{}`. Longitude must be contained between -180 and 180 degrees. ",
|
||||
lng
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum FilterError<'a> {
|
||||
AttributeNotFilterable { attribute: &'a str, filterable_fields: HashSet<String> },
|
||||
BadGeo(&'a str),
|
||||
BadGeoLat(f64),
|
||||
BadGeoLng(f64),
|
||||
BadGeoBoundingBoxTopIsBelowBottom(f64, f64),
|
||||
ParseGeoError(BadGeoError),
|
||||
ReservedGeo(&'a str),
|
||||
Reserved(&'a str),
|
||||
TooDeep,
|
||||
}
|
||||
impl<'a> std::error::Error for FilterError<'a> {}
|
||||
|
||||
impl<'a> From<BadGeoError> for FilterError<'a> {
|
||||
fn from(geo_error: BadGeoError) -> Self {
|
||||
FilterError::ParseGeoError(geo_error)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Display for FilterError<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
@@ -44,7 +77,11 @@ impl<'a> Display for FilterError<'a> {
|
||||
attribute,
|
||||
)
|
||||
} else {
|
||||
let filterables_list = filterable_fields.iter().map(AsRef::as_ref).collect::<Vec<&str>>().join(" ");
|
||||
let filterables_list = filterable_fields
|
||||
.iter()
|
||||
.map(AsRef::as_ref)
|
||||
.collect::<Vec<&str>>()
|
||||
.join(" ");
|
||||
|
||||
write!(
|
||||
f,
|
||||
@@ -53,20 +90,19 @@ impl<'a> Display for FilterError<'a> {
|
||||
filterables_list,
|
||||
)
|
||||
}
|
||||
},
|
||||
Self::TooDeep => write!(f,
|
||||
}
|
||||
Self::TooDeep => write!(
|
||||
f,
|
||||
"Too many filter conditions, can't process more than {} filters.",
|
||||
MAX_FILTER_DEPTH
|
||||
),
|
||||
Self::ReservedGeo(keyword) => write!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` field coordinates.", keyword),
|
||||
Self::Reserved(keyword) => write!(
|
||||
f,
|
||||
"`{}` is a reserved keyword and thus can't be used as a filter expression.",
|
||||
keyword
|
||||
),
|
||||
Self::BadGeo(keyword) => write!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance)` or `_geoBoundingBox([latitude, longitude], [latitude, longitude])` built-in rules to filter on `_geo` field coordinates.", keyword),
|
||||
Self::BadGeoBoundingBoxTopIsBelowBottom(top, bottom) => write!(f, "The top latitude `{top}` is below the bottom latitude `{bottom}`."),
|
||||
Self::BadGeoLat(lat) => write!(f, "Bad latitude `{}`. Latitude must be contained between -90 and 90 degrees. ", lat),
|
||||
Self::BadGeoLng(lng) => write!(f, "Bad longitude `{}`. Longitude must be contained between -180 and 180 degrees. ", lng),
|
||||
Self::ParseGeoError(error) => write!(f, "{}", error),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -298,10 +334,10 @@ impl<'a> Filter<'a> {
|
||||
} else {
|
||||
match fid.value() {
|
||||
attribute @ "_geo" => {
|
||||
Err(fid.as_external_error(FilterError::BadGeo(attribute)))?
|
||||
Err(fid.as_external_error(FilterError::ReservedGeo(attribute)))?
|
||||
}
|
||||
attribute if attribute.starts_with("_geoPoint(") => {
|
||||
Err(fid.as_external_error(FilterError::BadGeo("_geoPoint")))?
|
||||
Err(fid.as_external_error(FilterError::ReservedGeo("_geoPoint")))?
|
||||
}
|
||||
attribute @ "_geoDistance" => {
|
||||
Err(fid.as_external_error(FilterError::Reserved(attribute)))?
|
||||
@@ -353,14 +389,10 @@ impl<'a> Filter<'a> {
|
||||
let base_point: [f64; 2] =
|
||||
[point[0].parse_finite_float()?, point[1].parse_finite_float()?];
|
||||
if !(-90.0..=90.0).contains(&base_point[0]) {
|
||||
return Err(
|
||||
point[0].as_external_error(FilterError::BadGeoLat(base_point[0]))
|
||||
)?;
|
||||
return Err(point[0].as_external_error(BadGeoError::Lat(base_point[0])))?;
|
||||
}
|
||||
if !(-180.0..=180.0).contains(&base_point[1]) {
|
||||
return Err(
|
||||
point[1].as_external_error(FilterError::BadGeoLng(base_point[1]))
|
||||
)?;
|
||||
return Err(point[1].as_external_error(BadGeoError::Lng(base_point[1])))?;
|
||||
}
|
||||
let radius = radius.parse_finite_float()?;
|
||||
let rtree = match index.geo_rtree(rtxn)? {
|
||||
@@ -398,27 +430,26 @@ impl<'a> Filter<'a> {
|
||||
bottom_right_point[1].parse_finite_float()?,
|
||||
];
|
||||
if !(-90.0..=90.0).contains(&top_left[0]) {
|
||||
return Err(top_left_point[0]
|
||||
.as_external_error(FilterError::BadGeoLat(top_left[0])))?;
|
||||
return Err(
|
||||
top_left_point[0].as_external_error(BadGeoError::Lat(top_left[0]))
|
||||
)?;
|
||||
}
|
||||
if !(-180.0..=180.0).contains(&top_left[1]) {
|
||||
return Err(top_left_point[1]
|
||||
.as_external_error(FilterError::BadGeoLng(top_left[1])))?;
|
||||
return Err(
|
||||
top_left_point[1].as_external_error(BadGeoError::Lng(top_left[1]))
|
||||
)?;
|
||||
}
|
||||
if !(-90.0..=90.0).contains(&bottom_right[0]) {
|
||||
return Err(bottom_right_point[0]
|
||||
.as_external_error(FilterError::BadGeoLat(bottom_right[0])))?;
|
||||
.as_external_error(BadGeoError::Lat(bottom_right[0])))?;
|
||||
}
|
||||
if !(-180.0..=180.0).contains(&bottom_right[1]) {
|
||||
return Err(bottom_right_point[1]
|
||||
.as_external_error(FilterError::BadGeoLng(bottom_right[1])))?;
|
||||
.as_external_error(BadGeoError::Lng(bottom_right[1])))?;
|
||||
}
|
||||
if top_left[0] < bottom_right[0] {
|
||||
return Err(bottom_right_point[1].as_external_error(
|
||||
FilterError::BadGeoBoundingBoxTopIsBelowBottom(
|
||||
top_left[0],
|
||||
bottom_right[0],
|
||||
),
|
||||
BadGeoError::BoundingBoxTopIsBelowBottom(top_left[0], bottom_right[0]),
|
||||
))?;
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ use heed::types::{ByteSlice, DecodeIgnore};
|
||||
use heed::{BytesDecode, RoTxn};
|
||||
|
||||
pub use self::facet_distribution::{FacetDistribution, DEFAULT_VALUES_PER_FACET};
|
||||
pub use self::filter::Filter;
|
||||
pub use self::filter::{BadGeoError, Filter};
|
||||
use crate::heed_codec::facet::{FacetGroupKeyCodec, FacetGroupValueCodec};
|
||||
use crate::heed_codec::ByteSliceRefCodec;
|
||||
mod facet_distribution;
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::collections::{BTreeSet, HashMap, HashSet};
|
||||
use std::result::Result as StdResult;
|
||||
|
||||
use charabia::{Tokenizer, TokenizerBuilder};
|
||||
use deserr::{DeserializeError, DeserializeFromValue};
|
||||
use deserr::{DeserializeError, Deserr};
|
||||
use itertools::Itertools;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use time::OffsetDateTime;
|
||||
@@ -23,9 +23,9 @@ pub enum Setting<T> {
|
||||
NotSet,
|
||||
}
|
||||
|
||||
impl<T, E> DeserializeFromValue<E> for Setting<T>
|
||||
impl<T, E> Deserr<E> for Setting<T>
|
||||
where
|
||||
T: DeserializeFromValue<E>,
|
||||
T: Deserr<E>,
|
||||
E: DeserializeError,
|
||||
{
|
||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||
|
||||
@@ -1,9 +1,16 @@
|
||||
[package]
|
||||
name = "permissive-json-pointer"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
description = "A permissive json pointer"
|
||||
readme = "README.md"
|
||||
publish = false
|
||||
|
||||
version.workspace = true
|
||||
authors.workspace = true
|
||||
# description.workspace = true
|
||||
homepage.workspace = true
|
||||
# readme.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
serde_json = "1.0"
|
||||
|
||||
Reference in New Issue
Block a user